From c8e3bbf73f056380431a951e956557f6de9ecfec Mon Sep 17 00:00:00 2001 From: Florian BRUNIAUX Date: Fri, 20 Mar 2026 14:22:35 +0100 Subject: [PATCH 1/6] feat(changelog): add fragment-based changelog system - package.json with changelog:add/validate/assemble/audit scripts - changelog/schema.yml documenting the YAML fragment format - changelog/fragments/.gitkeep placeholder directory - changelog/scripts/add.ts: interactive CLI to create fragments - changelog/scripts/validate.ts: CI validation with PR/filename consistency checks - changelog/scripts/assemble.ts: release assembler with CHANGELOG.md injection and fragment archival - changelog/scripts/audit.ts: merged PR coverage audit via gh CLI Co-Authored-By: Claude Sonnet 4.6 Signed-off-by: Florian BRUNIAUX --- changelog/fragments/.gitkeep | 0 changelog/schema.yml | 21 ++ changelog/scripts/add.ts | 100 ++++++++++ changelog/scripts/assemble.ts | 201 +++++++++++++++++++ changelog/scripts/audit.ts | 184 ++++++++++++++++++ changelog/scripts/validate.ts | 140 ++++++++++++++ package.json | 17 ++ pnpm-lock.yaml | 352 ++++++++++++++++++++++++++++++++++ 8 files changed, 1015 insertions(+) create mode 100644 changelog/fragments/.gitkeep create mode 100644 changelog/schema.yml create mode 100644 changelog/scripts/add.ts create mode 100644 changelog/scripts/assemble.ts create mode 100644 changelog/scripts/audit.ts create mode 100644 changelog/scripts/validate.ts create mode 100644 package.json create mode 100644 pnpm-lock.yaml diff --git a/changelog/fragments/.gitkeep b/changelog/fragments/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/changelog/schema.yml b/changelog/schema.yml new file mode 100644 index 00000000..507b9980 --- /dev/null +++ b/changelog/schema.yml @@ -0,0 +1,21 @@ +# RTK Changelog Fragment Schema +# One file per PR — never edit manually, use `pnpm changelog:add` + +# Required fields +pr: 123 # PR number (must match filename prefix) +type: feat # feat | fix | perf | refactor | security | docs | chore +scope: "hook" # Functional scope (free text) +title: "Short title < 80 chars" # Will appear in CHANGELOG.md + +# Optional fields +description: | + User-facing impact in 1-2 sentences (Markdown). +breaking: false # true → appears in Breaking Changes section +migration: false # true → shows ⚠️ Migration DB warning +scripts: [] # Post-deploy commands (array of strings) + # Example: + # scripts: + # - "node scripts/migrate-data.js --execute" + +# Naming rule: {PR_NUMBER}-{slug-kebab-case}.yml +# The PR number in the filename MUST match the `pr` field. diff --git a/changelog/scripts/add.ts b/changelog/scripts/add.ts new file mode 100644 index 00000000..d038eae8 --- /dev/null +++ b/changelog/scripts/add.ts @@ -0,0 +1,100 @@ +#!/usr/bin/env tsx +/** + * changelog:add — Interactive CLI to create a changelog fragment + * Usage: pnpm changelog:add + */ +import * as readline from "readline"; +import * as fs from "fs"; +import * as path from "path"; + +const TYPES = ["feat", "fix", "perf", "refactor", "security", "docs", "chore"] as const; +type FragmentType = (typeof TYPES)[number]; + +const FRAGMENTS_DIR = path.resolve(process.cwd(), "changelog/fragments"); + +function slugify(text: string, maxLen = 40): string { + return text + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+|-+$/g, "") + .slice(0, maxLen); +} + +function ask(rl: readline.Interface, question: string): Promise { + return new Promise((resolve) => rl.question(question, resolve)); +} + +async function main() { + const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); + + console.log("\n📝 Create a new changelog fragment\n"); + + const prStr = await ask(rl, "PR number: "); + const pr = parseInt(prStr.trim(), 10); + if (isNaN(pr) || pr <= 0) { + console.error("❌ Invalid PR number"); + process.exit(1); + } + + console.log(`Types: ${TYPES.join(" | ")}`); + const typeInput = (await ask(rl, "Type: ")).trim() as FragmentType; + if (!TYPES.includes(typeInput)) { + console.error(`❌ Invalid type. Must be one of: ${TYPES.join(", ")}`); + process.exit(1); + } + + const scope = (await ask(rl, "Scope (e.g. hook, git, permissions): ")).trim(); + if (!scope) { + console.error("❌ Scope is required"); + process.exit(1); + } + + const title = (await ask(rl, "Title (< 80 chars): ")).trim(); + if (!title) { + console.error("❌ Title is required"); + process.exit(1); + } + if (title.length > 80) { + console.error(`❌ Title too long: ${title.length} chars (max 80)`); + process.exit(1); + } + + rl.close(); + + const slug = slugify(title); + const filename = `${pr}-${slug}.yml`; + const filepath = path.join(FRAGMENTS_DIR, filename); + + if (fs.existsSync(filepath)) { + console.error(`❌ File already exists: changelog/fragments/${filename}`); + process.exit(1); + } + + const content = [ + `pr: ${pr}`, + `type: ${typeInput}`, + `scope: "${scope}"`, + `title: "${title}"`, + `description: |`, + ` TODO: describe the user-facing impact in 1-2 sentences.`, + `breaking: false`, + `migration: false`, + `scripts: []`, + ].join("\n") + "\n"; + + fs.mkdirSync(FRAGMENTS_DIR, { recursive: true }); + fs.writeFileSync(filepath, content, "utf8"); + + console.log(`\n✅ Created: changelog/fragments/${filename}`); + console.log("\nNext steps:"); + console.log(` 1. Edit the description in changelog/fragments/${filename}`); + console.log(` 2. git add changelog/fragments/${filename}`); + console.log(` 3. git commit -s -m "docs(changelog): add fragment for PR #${pr}"`); +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/changelog/scripts/assemble.ts b/changelog/scripts/assemble.ts new file mode 100644 index 00000000..a59c6ef1 --- /dev/null +++ b/changelog/scripts/assemble.ts @@ -0,0 +1,201 @@ +#!/usr/bin/env tsx +/** + * changelog:assemble — Assemble fragments into a versioned CHANGELOG section + * Usage: pnpm changelog:assemble --version 1.3.0 [--date 2026-04-01] [--dry-run] + */ +import * as fs from "fs"; +import * as path from "path"; +import { parse } from "yaml"; + +const FRAGMENTS_DIR = path.resolve(process.cwd(), "changelog/fragments"); +const CHANGELOG_PATH = path.resolve(process.cwd(), "CHANGELOG.md"); + +const TYPE_ORDER = ["feat", "fix", "perf", "refactor", "security", "docs", "chore"] as const; + +const TYPE_LABELS: Record = { + feat: "✨ Nouvelles Fonctionnalités", + fix: "🔧 Corrections de Bugs", + perf: "⚡ Performances", + refactor: "🔄 Refactoring", + security: "🔒 Sécurité", + docs: "📚 Documentation", + chore: "🛠️ Technique", +}; + +interface Fragment { + pr: number; + type: string; + scope: string; + title: string; + description?: string; + breaking?: boolean; + migration?: boolean; + scripts?: string[]; +} + +function loadFragments(): Array<{ fragment: Fragment; file: string }> { + if (!fs.existsSync(FRAGMENTS_DIR)) return []; + return fs + .readdirSync(FRAGMENTS_DIR) + .filter((f) => f.endsWith(".yml")) + .map((f) => { + const filepath = path.join(FRAGMENTS_DIR, f); + const raw = fs.readFileSync(filepath, "utf8"); + const fragment = parse(raw) as Fragment; + return { fragment, file: f }; + }); +} + +function formatLine(fragment: Fragment): string { + let line = `- **${fragment.title} (#${fragment.pr})**`; + if (fragment.description) { + const desc = fragment.description.trim().replace(/\n/g, " "); + line += ` — ${desc}`; + } + if (fragment.migration) { + line += " ⚠️ Migration DB."; + } + return line; +} + +function buildSection(version: string, date: string, fragments: Array<{ fragment: Fragment; file: string }>): string { + const breaking = fragments.filter((f) => f.fragment.breaking); + const withScripts = fragments.filter( + (f) => f.fragment.scripts && f.fragment.scripts.length > 0 + ); + + const lines: string[] = [`## [${version}] - ${date}`, ""]; + + // Breaking changes first + if (breaking.length > 0) { + lines.push("### 🔨 Breaking Changes", ""); + breaking.forEach(({ fragment }) => lines.push(formatLine(fragment))); + lines.push(""); + } + + // Group by type in order + for (const type of TYPE_ORDER) { + const group = fragments.filter( + (f) => f.fragment.type === type && (!f.fragment.breaking || breaking.length === 0) + ); + if (group.length === 0) continue; + lines.push(`### ${TYPE_LABELS[type] ?? type}`, ""); + group.forEach(({ fragment }) => lines.push(formatLine(fragment))); + lines.push(""); + } + + // Scripts section + if (withScripts.length > 0) { + lines.push("### 🔧 Scripts Post-Deploy", ""); + withScripts.forEach(({ fragment }) => { + lines.push(`**PR #${fragment.pr} — ${fragment.title}:**`); + fragment.scripts!.forEach((s) => lines.push("```bash", s, "```")); + lines.push(""); + }); + } + + lines.push("---", ""); + return lines.join("\n"); +} + +function parseArgs() { + const args = process.argv.slice(2); + const get = (flag: string) => { + const i = args.indexOf(flag); + return i !== -1 ? args[i + 1] : undefined; + }; + return { + version: get("--version"), + date: get("--date") ?? new Date().toISOString().slice(0, 10), + dryRun: args.includes("--dry-run"), + }; +} + +const { version, date, dryRun } = parseArgs(); + +if (!version) { + console.error("Usage: pnpm changelog:assemble --version [--date YYYY-MM-DD] [--dry-run]"); + process.exit(1); +} + +const entries = loadFragments(); +if (entries.length === 0) { + console.log("No fragments found in changelog/fragments/ — nothing to assemble."); + process.exit(0); +} + +console.log(`Assembling ${entries.length} fragment(s) into version ${version}...`); + +const section = buildSection(version, date, entries); + +if (dryRun) { + console.log("\n--- DRY RUN OUTPUT ---\n"); + console.log(section); + console.log("--- END DRY RUN ---\n"); + console.log("No files modified (--dry-run)."); + process.exit(0); +} + +// Update CHANGELOG.md +const NEXT_RELEASE_MARKER = "## [Next Release]"; +const NEXT_RELEASE_PLACEHOLDER = [ + "## [Next Release]", + "", + "", + "", + "---", + "", +].join("\n"); + +let changelog = fs.readFileSync(CHANGELOG_PATH, "utf8"); + +if (!changelog.includes(NEXT_RELEASE_MARKER)) { + // Inject placeholder at top after title block + const firstRelease = changelog.search(/^## \[/m); + if (firstRelease !== -1) { + changelog = + changelog.slice(0, firstRelease) + + NEXT_RELEASE_PLACEHOLDER + + "\n" + + changelog.slice(firstRelease); + } +} + +// Replace ## [Next Release] section with the new versioned section + a fresh placeholder +const markerIdx = changelog.indexOf(NEXT_RELEASE_MARKER); +if (markerIdx !== -1) { + // Find end of the Next Release section (next ## or EOF) + const afterMarker = changelog.indexOf("\n## ", markerIdx + 1); + const nextSectionStart = afterMarker !== -1 ? afterMarker + 1 : changelog.length; + changelog = + changelog.slice(0, markerIdx) + + NEXT_RELEASE_PLACEHOLDER + + "\n" + + section + + changelog.slice(nextSectionStart); +} + +fs.writeFileSync(CHANGELOG_PATH, changelog, "utf8"); +console.log(`✅ CHANGELOG.md updated with version ${version}`); + +// Archive fragments +const archiveDir = path.join(FRAGMENTS_DIR, "released", version); +fs.mkdirSync(archiveDir, { recursive: true }); + +entries.forEach(({ file }) => { + const src = path.join(FRAGMENTS_DIR, file); + const dst = path.join(archiveDir, file); + fs.renameSync(src, dst); +}); + +// Recreate .gitkeep if fragments/ is now empty +const remaining = fs.readdirSync(FRAGMENTS_DIR).filter((f) => f.endsWith(".yml")); +if (remaining.length === 0) { + fs.writeFileSync(path.join(FRAGMENTS_DIR, ".gitkeep"), "", "utf8"); +} + +console.log(`✅ ${entries.length} fragment(s) archived to changelog/fragments/released/${version}/`); +console.log("\nNext steps:"); +console.log(" git add CHANGELOG.md changelog/"); +console.log(` git commit -s -m "chore(release): assemble changelog for v${version}"`); +console.log(` git tag v${version}`); diff --git a/changelog/scripts/audit.ts b/changelog/scripts/audit.ts new file mode 100644 index 00000000..fa048f9a --- /dev/null +++ b/changelog/scripts/audit.ts @@ -0,0 +1,184 @@ +#!/usr/bin/env tsx +/** + * changelog:audit — Detect merged PRs without a changelog fragment + * Usage: pnpm changelog:audit [--since v1.2.0] [--repo owner/repo] [--json] + * Exit 0 if all covered, exit 1 if gaps found. + */ +import { execSync } from "child_process"; +import * as fs from "fs"; +import * as path from "path"; +import { parse } from "yaml"; + +const FRAGMENTS_DIR = path.resolve(process.cwd(), "changelog/fragments"); +const BYPASS_LABELS = ["skip-changelog", "dependencies", "release", "chore: deps"]; + +interface PrInfo { + number: number; + title: string; + mergedAt: string; + labels: Array<{ name: string }>; + url: string; +} + +interface Fragment { + pr: number; + [key: string]: unknown; +} + +function run(cmd: string): string { + return execSync(cmd, { encoding: "utf8" }).trim(); +} + +function parseArgs() { + const args = process.argv.slice(2); + const get = (flag: string) => { + const i = args.indexOf(flag); + return i !== -1 ? args[i + 1] : undefined; + }; + return { + since: get("--since"), + repo: get("--repo"), + json: args.includes("--json"), + }; +} + +function detectRepo(argRepo?: string): string { + if (argRepo) return argRepo; + try { + const remote = run("git remote get-url origin"); + const match = remote.match(/github\.com[:/]([^/]+\/[^/.]+)/); + if (match) return match[1]; + } catch { + // ignore + } + console.error("❌ Could not detect repo. Use --repo owner/repo"); + process.exit(1); +} + +function detectSinceDate(sinceTag?: string): string { + if (sinceTag) { + try { + return run(`git log -1 --format=%aI ${sinceTag}`); + } catch { + console.error(`❌ Tag not found: ${sinceTag}`); + process.exit(1); + } + } + try { + const lastTag = run("git describe --tags --abbrev=0"); + return run(`git log -1 --format=%aI ${lastTag}`); + } catch { + // No tags — use epoch + return "1970-01-01T00:00:00Z"; + } +} + +function loadAllFragmentPrs(): Set { + const prNums = new Set(); + const scanDir = (dir: string) => { + if (!fs.existsSync(dir)) return; + for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { + if (entry.isDirectory()) { + scanDir(path.join(dir, entry.name)); + } else if (entry.name.endsWith(".yml")) { + try { + const raw = fs.readFileSync(path.join(dir, entry.name), "utf8"); + const frag = parse(raw) as Partial; + if (typeof frag.pr === "number") prNums.add(frag.pr); + } catch { + // ignore malformed fragments + } + } + } + }; + scanDir(FRAGMENTS_DIR); + return prNums; +} + +async function main() { + const { since, repo: argRepo, json } = parseArgs(); + const repo = detectRepo(argRepo); + const sinceDate = detectSinceDate(since); + + if (!json) console.log(`🔍 Auditing merged PRs in ${repo} since ${sinceDate}...\n`); + + // Fetch merged PRs via gh CLI + let prs: PrInfo[]; + try { + const raw = run( + `gh pr list --repo ${repo} --state merged --base develop --json number,title,mergedAt,labels,url --limit 200` + ); + const all: PrInfo[] = JSON.parse(raw); + prs = all.filter((pr) => pr.mergedAt >= sinceDate); + } catch (e) { + console.error(`❌ Failed to fetch PRs: ${e}`); + process.exit(1); + } + + const coveredPrs = loadAllFragmentPrs(); + + const covered: PrInfo[] = []; + const bypassed: PrInfo[] = []; + const missing: PrInfo[] = []; + const orphans: number[] = []; + + for (const pr of prs) { + const labels = pr.labels.map((l) => l.name); + if (coveredPrs.has(pr.number)) { + covered.push(pr); + } else if (BYPASS_LABELS.some((b) => labels.includes(b))) { + bypassed.push(pr); + } else { + missing.push(pr); + } + } + + // Orphan detection: fragments referencing PRs not in the merged list + const mergedNums = new Set(prs.map((p) => p.number)); + for (const pr of coveredPrs) { + if (!mergedNums.has(pr)) orphans.push(pr); + } + + if (json) { + console.log( + JSON.stringify( + { + summary: { + total: prs.length, + covered: covered.length, + bypassed: bypassed.length, + missing: missing.length, + orphans: orphans.length, + }, + missing: missing.map((p) => ({ number: p.number, title: p.title, url: p.url })), + orphans, + }, + null, + 2 + ) + ); + } else { + console.log(`Total PRs merged: ${prs.length}`); + console.log(` ✅ Covered: ${covered.length}`); + console.log(` ⏭️ Bypassed: ${bypassed.length}`); + console.log(` ❌ Missing: ${missing.length}`); + if (orphans.length > 0) { + console.log(` ⚠️ Orphan fragments (no matching PR): ${orphans.join(", ")}`); + } + + if (missing.length > 0) { + console.log("\n❌ PRs without fragment:"); + missing.forEach((p) => console.log(` #${p.number} — ${p.title}\n ${p.url}`)); + console.log("\nFix: pnpm changelog:add (then commit the fragment on the PR branch)"); + } else { + console.log("\n✅ All merged PRs have changelog coverage."); + } + } + + process.exit(missing.length > 0 ? 1 : 0); +} + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/changelog/scripts/validate.ts b/changelog/scripts/validate.ts new file mode 100644 index 00000000..2a761271 --- /dev/null +++ b/changelog/scripts/validate.ts @@ -0,0 +1,140 @@ +#!/usr/bin/env tsx +/** + * changelog:validate — Validate a changelog fragment file + * Usage: pnpm changelog:validate changelog/fragments/123-my-feature.yml + * Exit 0 if valid, exit 1 with error messages if not. + */ +import * as fs from "fs"; +import * as path from "path"; +import { parse } from "yaml"; + +const TYPES = ["feat", "fix", "perf", "refactor", "security", "docs", "chore"]; +const FRAGMENTS_DIR = path.resolve(process.cwd(), "changelog/fragments"); + +interface Fragment { + pr: number; + type: string; + scope: string; + title: string; + description?: string; + breaking?: boolean; + migration?: boolean; + scripts?: string[]; +} + +function validateFragment(filepath: string): string[] { + const errors: string[] = []; + + let raw: string; + try { + raw = fs.readFileSync(filepath, "utf8"); + } catch { + return [`Cannot read file: ${filepath}`]; + } + + let fragment: Partial; + try { + fragment = parse(raw) as Partial; + } catch (e) { + return [`Invalid YAML: ${e}`]; + } + + // Required fields + if (fragment.pr === undefined || fragment.pr === null) { + errors.push("Missing required field: pr"); + } else if (typeof fragment.pr !== "number" || fragment.pr <= 0) { + errors.push(`Field 'pr' must be a positive number, got: ${fragment.pr}`); + } + + if (!fragment.type) { + errors.push("Missing required field: type"); + } else if (!TYPES.includes(fragment.type)) { + errors.push(`Field 'type' must be one of: ${TYPES.join(", ")}, got: ${fragment.type}`); + } + + if (!fragment.scope || String(fragment.scope).trim() === "") { + errors.push("Missing required field: scope"); + } + + if (!fragment.title || String(fragment.title).trim() === "") { + errors.push("Missing required field: title"); + } else if (fragment.title.length > 80) { + errors.push(`Field 'title' exceeds 80 chars: ${fragment.title.length} chars`); + } + + // Validate scripts if present + if (fragment.scripts !== undefined) { + if (!Array.isArray(fragment.scripts)) { + errors.push("Field 'scripts' must be an array"); + } else { + fragment.scripts.forEach((s, i) => { + if (typeof s !== "string" || s.trim() === "") { + errors.push(`Field 'scripts[${i}]' must be a non-empty string`); + } + }); + } + } + + // Check PR number matches filename + const basename = path.basename(filepath); + const match = basename.match(/^(\d+)-/); + if (match) { + const filenamePr = parseInt(match[1], 10); + if (fragment.pr !== undefined && fragment.pr !== filenamePr) { + errors.push( + `PR number mismatch: filename says ${filenamePr}, field 'pr' says ${fragment.pr}` + ); + } + } else { + errors.push(`Filename doesn't start with PR number: ${basename}`); + } + + return errors; +} + +function checkDuplicates(targetFile: string): string[] { + const warnings: string[] = []; + const targetBasename = path.basename(targetFile); + const targetMatch = targetBasename.match(/^(\d+)-/); + if (!targetMatch) return warnings; + const targetPr = parseInt(targetMatch[1], 10); + + try { + const files = fs.readdirSync(FRAGMENTS_DIR).filter((f) => f.endsWith(".yml")); + const duplicates = files.filter((f) => { + if (f === targetBasename) return false; + const m = f.match(/^(\d+)-/); + return m && parseInt(m[1], 10) === targetPr; + }); + if (duplicates.length > 0) { + warnings.push( + `Warning: duplicate PR #${targetPr} in fragments: ${duplicates.join(", ")}` + ); + } + } catch { + // FRAGMENTS_DIR may not exist yet, ignore + } + + return warnings; +} + +const filepath = process.argv[2]; +if (!filepath) { + console.error("Usage: pnpm changelog:validate "); + process.exit(1); +} + +const absPath = path.resolve(process.cwd(), filepath); +const errors = validateFragment(absPath); +const warnings = checkDuplicates(absPath); + +warnings.forEach((w) => console.warn(`⚠️ ${w}`)); + +if (errors.length > 0) { + console.error(`❌ Validation failed for ${path.basename(filepath)}:`); + errors.forEach((e) => console.error(` - ${e}`)); + process.exit(1); +} + +console.log(`✅ Valid: ${path.basename(filepath)}`); +process.exit(0); diff --git a/package.json b/package.json new file mode 100644 index 00000000..9f4ef454 --- /dev/null +++ b/package.json @@ -0,0 +1,17 @@ +{ + "name": "rtk-changelog-tools", + "version": "1.0.0", + "private": true, + "scripts": { + "changelog:add": "tsx changelog/scripts/add.ts", + "changelog:validate": "tsx changelog/scripts/validate.ts", + "changelog:assemble": "tsx changelog/scripts/assemble.ts", + "changelog:audit": "tsx changelog/scripts/audit.ts" + }, + "devDependencies": { + "tsx": "^4.19.2", + "typescript": "^5.7.3", + "yaml": "^2.7.0", + "@types/node": "^22.13.10" + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 00000000..8dd2a999 --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,352 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@types/node': + specifier: ^22.13.10 + version: 22.19.15 + tsx: + specifier: ^4.19.2 + version: 4.21.0 + typescript: + specifier: ^5.7.3 + version: 5.9.3 + yaml: + specifier: ^2.7.0 + version: 2.8.2 + +packages: + + '@esbuild/aix-ppc64@0.27.4': + resolution: {integrity: sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.27.4': + resolution: {integrity: sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.27.4': + resolution: {integrity: sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.27.4': + resolution: {integrity: sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.27.4': + resolution: {integrity: sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.27.4': + resolution: {integrity: sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.27.4': + resolution: {integrity: sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.27.4': + resolution: {integrity: sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.27.4': + resolution: {integrity: sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.27.4': + resolution: {integrity: sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.27.4': + resolution: {integrity: sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.27.4': + resolution: {integrity: sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.27.4': + resolution: {integrity: sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.27.4': + resolution: {integrity: sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.27.4': + resolution: {integrity: sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.27.4': + resolution: {integrity: sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.27.4': + resolution: {integrity: sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.27.4': + resolution: {integrity: sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.27.4': + resolution: {integrity: sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.27.4': + resolution: {integrity: sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.27.4': + resolution: {integrity: sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.4': + resolution: {integrity: sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.27.4': + resolution: {integrity: sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.27.4': + resolution: {integrity: sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.27.4': + resolution: {integrity: sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.27.4': + resolution: {integrity: sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@types/node@22.19.15': + resolution: {integrity: sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==} + + esbuild@0.27.4: + resolution: {integrity: sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==} + engines: {node: '>=18'} + hasBin: true + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + get-tsconfig@4.13.6: + resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + + tsx@4.21.0: + resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==} + engines: {node: '>=18.0.0'} + hasBin: true + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + yaml@2.8.2: + resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} + engines: {node: '>= 14.6'} + hasBin: true + +snapshots: + + '@esbuild/aix-ppc64@0.27.4': + optional: true + + '@esbuild/android-arm64@0.27.4': + optional: true + + '@esbuild/android-arm@0.27.4': + optional: true + + '@esbuild/android-x64@0.27.4': + optional: true + + '@esbuild/darwin-arm64@0.27.4': + optional: true + + '@esbuild/darwin-x64@0.27.4': + optional: true + + '@esbuild/freebsd-arm64@0.27.4': + optional: true + + '@esbuild/freebsd-x64@0.27.4': + optional: true + + '@esbuild/linux-arm64@0.27.4': + optional: true + + '@esbuild/linux-arm@0.27.4': + optional: true + + '@esbuild/linux-ia32@0.27.4': + optional: true + + '@esbuild/linux-loong64@0.27.4': + optional: true + + '@esbuild/linux-mips64el@0.27.4': + optional: true + + '@esbuild/linux-ppc64@0.27.4': + optional: true + + '@esbuild/linux-riscv64@0.27.4': + optional: true + + '@esbuild/linux-s390x@0.27.4': + optional: true + + '@esbuild/linux-x64@0.27.4': + optional: true + + '@esbuild/netbsd-arm64@0.27.4': + optional: true + + '@esbuild/netbsd-x64@0.27.4': + optional: true + + '@esbuild/openbsd-arm64@0.27.4': + optional: true + + '@esbuild/openbsd-x64@0.27.4': + optional: true + + '@esbuild/openharmony-arm64@0.27.4': + optional: true + + '@esbuild/sunos-x64@0.27.4': + optional: true + + '@esbuild/win32-arm64@0.27.4': + optional: true + + '@esbuild/win32-ia32@0.27.4': + optional: true + + '@esbuild/win32-x64@0.27.4': + optional: true + + '@types/node@22.19.15': + dependencies: + undici-types: 6.21.0 + + esbuild@0.27.4: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.4 + '@esbuild/android-arm': 0.27.4 + '@esbuild/android-arm64': 0.27.4 + '@esbuild/android-x64': 0.27.4 + '@esbuild/darwin-arm64': 0.27.4 + '@esbuild/darwin-x64': 0.27.4 + '@esbuild/freebsd-arm64': 0.27.4 + '@esbuild/freebsd-x64': 0.27.4 + '@esbuild/linux-arm': 0.27.4 + '@esbuild/linux-arm64': 0.27.4 + '@esbuild/linux-ia32': 0.27.4 + '@esbuild/linux-loong64': 0.27.4 + '@esbuild/linux-mips64el': 0.27.4 + '@esbuild/linux-ppc64': 0.27.4 + '@esbuild/linux-riscv64': 0.27.4 + '@esbuild/linux-s390x': 0.27.4 + '@esbuild/linux-x64': 0.27.4 + '@esbuild/netbsd-arm64': 0.27.4 + '@esbuild/netbsd-x64': 0.27.4 + '@esbuild/openbsd-arm64': 0.27.4 + '@esbuild/openbsd-x64': 0.27.4 + '@esbuild/openharmony-arm64': 0.27.4 + '@esbuild/sunos-x64': 0.27.4 + '@esbuild/win32-arm64': 0.27.4 + '@esbuild/win32-ia32': 0.27.4 + '@esbuild/win32-x64': 0.27.4 + + fsevents@2.3.3: + optional: true + + get-tsconfig@4.13.6: + dependencies: + resolve-pkg-maps: 1.0.0 + + resolve-pkg-maps@1.0.0: {} + + tsx@4.21.0: + dependencies: + esbuild: 0.27.4 + get-tsconfig: 4.13.6 + optionalDependencies: + fsevents: 2.3.3 + + typescript@5.9.3: {} + + undici-types@6.21.0: {} + + yaml@2.8.2: {} From 4f6359f4b7d51844850b854c373fb1be06abcba2 Mon Sep 17 00:00:00 2001 From: Florian BRUNIAUX Date: Fri, 20 Mar 2026 14:23:55 +0100 Subject: [PATCH 2/6] chore(changelog): add tsconfig.json for changelog scripts type resolution Signed-off-by: Florian BRUNIAUX --- tsconfig.json | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 tsconfig.json diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..e51b6745 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "CommonJS", + "moduleResolution": "node", + "lib": ["ES2022"], + "types": ["node"], + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "outDir": "dist" + }, + "include": ["changelog/scripts/**/*.ts"], + "exclude": ["node_modules", "dist"] +} From 95b8cd53706c70ecfc88b8d0aeb6d121a4750f8e Mon Sep 17 00:00:00 2001 From: Florian BRUNIAUX Date: Fri, 20 Mar 2026 14:32:42 +0100 Subject: [PATCH 3/6] test(changelog): add unit tests for validate and assemble scripts Export validateFragment/checkDuplicates from validate.ts and buildSection/Fragment from assemble.ts. Guard CLI entrypoints with require.main === module so imports don't trigger process.exit. Add validate.test.ts (10 tests) and assemble.test.ts (9 tests) using node:test + assert. Add test:changelog script to package.json. Co-Authored-By: Claude Sonnet 4.6 Signed-off-by: Florian BRUNIAUX --- changelog/scripts/assemble.test.ts | 105 ++++++++++++++++++++ changelog/scripts/assemble.ts | 150 +++++++++++++++-------------- changelog/scripts/validate.test.ts | 145 ++++++++++++++++++++++++++++ changelog/scripts/validate.ts | 42 ++++---- package.json | 3 +- 5 files changed, 350 insertions(+), 95 deletions(-) create mode 100644 changelog/scripts/assemble.test.ts create mode 100644 changelog/scripts/validate.test.ts diff --git a/changelog/scripts/assemble.test.ts b/changelog/scripts/assemble.test.ts new file mode 100644 index 00000000..4622bb89 --- /dev/null +++ b/changelog/scripts/assemble.test.ts @@ -0,0 +1,105 @@ +/** + * Unit tests for changelog/scripts/assemble.ts + * Run with: pnpm test:changelog + */ +import { describe, it } from "node:test"; +import assert from "node:assert/strict"; +import { buildSection, Fragment } from "./assemble.js"; + +function entry(fragment: Fragment, file = `${fragment.pr}-fragment.yml`) { + return { fragment, file }; +} + +const BASE_FEAT: Fragment = { + pr: 1, + type: "feat", + scope: "hook", + title: "Add hook filtering support", +}; + +const BASE_FIX: Fragment = { + pr: 2, + type: "fix", + scope: "git", + title: "Fix log filter", +}; + +describe("buildSection", () => { + it("empty fragments list returns section header with no type groups", () => { + const result = buildSection("1.0.0", "2026-01-01", []); + assert.ok(result.includes("## [1.0.0] - 2026-01-01"), "Should contain version header"); + assert.ok(!result.includes("###"), "Should have no type sections for empty input"); + }); + + it("single feat fragment appears under Nouvelles Fonctionnalités", () => { + const result = buildSection("1.0.0", "2026-01-01", [entry(BASE_FEAT)]); + assert.ok(result.includes("✨ Nouvelles Fonctionnalités"), "Should include feat section label"); + assert.ok(result.includes("Add hook filtering support"), "Should include fragment title"); + assert.ok(result.includes("(#1)"), "Should include PR number"); + }); + + it("single fix fragment appears under Corrections de Bugs", () => { + const result = buildSection("1.0.0", "2026-01-01", [entry(BASE_FIX)]); + assert.ok(result.includes("🔧 Corrections de Bugs"), "Should include fix section label"); + assert.ok(result.includes("Fix log filter"), "Should include fragment title"); + assert.ok(result.includes("(#2)"), "Should include PR number"); + }); + + it("breaking: true fragment appears under Breaking Changes", () => { + const fragment: Fragment = { ...BASE_FEAT, breaking: true }; + const result = buildSection("2.0.0", "2026-02-01", [entry(fragment)]); + assert.ok(result.includes("🔨 Breaking Changes"), "Should include breaking changes section"); + assert.ok(result.includes("Add hook filtering support"), "Should include fragment title"); + }); + + it("migration: true fragment line includes migration warning", () => { + const fragment: Fragment = { ...BASE_FIX, migration: true }; + const result = buildSection("1.1.0", "2026-01-15", [entry(fragment)]); + assert.ok(result.includes("⚠️ Migration DB."), "Should include migration warning marker"); + }); + + it("scripts non-empty causes fragment to appear under Scripts Post-Deploy", () => { + const fragment: Fragment = { + ...BASE_FIX, + scripts: ["psql -d mydb -c 'ALTER TABLE ...'"], + }; + const result = buildSection("1.1.0", "2026-01-15", [entry(fragment)]); + assert.ok(result.includes("🔧 Scripts Post-Deploy"), "Should include scripts section"); + assert.ok(result.includes("psql -d mydb"), "Should include the script content"); + }); + + it("type ordering: feat before fix before security in output", () => { + const secFragment: Fragment = { pr: 3, type: "security", scope: "auth", title: "Patch XSS" }; + const entries = [entry(BASE_FIX), entry(secFragment), entry(BASE_FEAT)]; + const result = buildSection("1.2.0", "2026-03-01", entries); + + const featIdx = result.indexOf("✨ Nouvelles Fonctionnalités"); + const fixIdx = result.indexOf("🔧 Corrections de Bugs"); + const secIdx = result.indexOf("🔒 Sécurité"); + + assert.ok(featIdx !== -1, "feat section should be present"); + assert.ok(fixIdx !== -1, "fix section should be present"); + assert.ok(secIdx !== -1, "security section should be present"); + assert.ok(featIdx < fixIdx, "feat should come before fix"); + assert.ok(fixIdx < secIdx, "fix should come before security"); + }); + + it("multiple fragments of same type all appear in the same section", () => { + const fix2: Fragment = { pr: 10, type: "fix", scope: "cargo", title: "Fix cargo output" }; + const result = buildSection("1.0.1", "2026-01-20", [entry(BASE_FIX), entry(fix2)]); + + assert.equal( + (result.match(/🔧 Corrections de Bugs/g) ?? []).length, + 1, + "Should have exactly one fix section heading" + ); + assert.ok(result.includes("Fix log filter"), "Should include first fix fragment"); + assert.ok(result.includes("Fix cargo output"), "Should include second fix fragment"); + }); + + it("PR number appears in output as (#123)", () => { + const fragment: Fragment = { pr: 123, type: "feat", scope: "core", title: "Big feature" }; + const result = buildSection("1.5.0", "2026-04-01", [entry(fragment)]); + assert.ok(result.includes("(#123)"), "Should format PR as (#123)"); + }); +}); diff --git a/changelog/scripts/assemble.ts b/changelog/scripts/assemble.ts index a59c6ef1..66d8c10d 100644 --- a/changelog/scripts/assemble.ts +++ b/changelog/scripts/assemble.ts @@ -22,7 +22,7 @@ const TYPE_LABELS: Record = { chore: "🛠️ Technique", }; -interface Fragment { +export interface Fragment { pr: number; type: string; scope: string; @@ -58,7 +58,7 @@ function formatLine(fragment: Fragment): string { return line; } -function buildSection(version: string, date: string, fragments: Array<{ fragment: Fragment; file: string }>): string { +export function buildSection(version: string, date: string, fragments: Array<{ fragment: Fragment; file: string }>): string { const breaking = fragments.filter((f) => f.fragment.breaking); const withScripts = fragments.filter( (f) => f.fragment.scripts && f.fragment.scripts.length > 0 @@ -111,91 +111,93 @@ function parseArgs() { }; } -const { version, date, dryRun } = parseArgs(); +if (require.main === module) { + const { version, date, dryRun } = parseArgs(); -if (!version) { - console.error("Usage: pnpm changelog:assemble --version [--date YYYY-MM-DD] [--dry-run]"); - process.exit(1); -} + if (!version) { + console.error("Usage: pnpm changelog:assemble --version [--date YYYY-MM-DD] [--dry-run]"); + process.exit(1); + } -const entries = loadFragments(); -if (entries.length === 0) { - console.log("No fragments found in changelog/fragments/ — nothing to assemble."); - process.exit(0); -} + const entries = loadFragments(); + if (entries.length === 0) { + console.log("No fragments found in changelog/fragments/ — nothing to assemble."); + process.exit(0); + } -console.log(`Assembling ${entries.length} fragment(s) into version ${version}...`); + console.log(`Assembling ${entries.length} fragment(s) into version ${version}...`); -const section = buildSection(version, date, entries); + const section = buildSection(version, date, entries); -if (dryRun) { - console.log("\n--- DRY RUN OUTPUT ---\n"); - console.log(section); - console.log("--- END DRY RUN ---\n"); - console.log("No files modified (--dry-run)."); - process.exit(0); -} + if (dryRun) { + console.log("\n--- DRY RUN OUTPUT ---\n"); + console.log(section); + console.log("--- END DRY RUN ---\n"); + console.log("No files modified (--dry-run)."); + process.exit(0); + } -// Update CHANGELOG.md -const NEXT_RELEASE_MARKER = "## [Next Release]"; -const NEXT_RELEASE_PLACEHOLDER = [ - "## [Next Release]", - "", - "", - "", - "---", - "", -].join("\n"); - -let changelog = fs.readFileSync(CHANGELOG_PATH, "utf8"); - -if (!changelog.includes(NEXT_RELEASE_MARKER)) { - // Inject placeholder at top after title block - const firstRelease = changelog.search(/^## \[/m); - if (firstRelease !== -1) { + // Update CHANGELOG.md + const NEXT_RELEASE_MARKER = "## [Next Release]"; + const NEXT_RELEASE_PLACEHOLDER = [ + "## [Next Release]", + "", + "", + "", + "---", + "", + ].join("\n"); + + let changelog = fs.readFileSync(CHANGELOG_PATH, "utf8"); + + if (!changelog.includes(NEXT_RELEASE_MARKER)) { + // Inject placeholder at top after title block + const firstRelease = changelog.search(/^## \[/m); + if (firstRelease !== -1) { + changelog = + changelog.slice(0, firstRelease) + + NEXT_RELEASE_PLACEHOLDER + + "\n" + + changelog.slice(firstRelease); + } + } + + // Replace ## [Next Release] section with the new versioned section + a fresh placeholder + const markerIdx = changelog.indexOf(NEXT_RELEASE_MARKER); + if (markerIdx !== -1) { + // Find end of the Next Release section (next ## or EOF) + const afterMarker = changelog.indexOf("\n## ", markerIdx + 1); + const nextSectionStart = afterMarker !== -1 ? afterMarker + 1 : changelog.length; changelog = - changelog.slice(0, firstRelease) + + changelog.slice(0, markerIdx) + NEXT_RELEASE_PLACEHOLDER + "\n" + - changelog.slice(firstRelease); + section + + changelog.slice(nextSectionStart); } -} -// Replace ## [Next Release] section with the new versioned section + a fresh placeholder -const markerIdx = changelog.indexOf(NEXT_RELEASE_MARKER); -if (markerIdx !== -1) { - // Find end of the Next Release section (next ## or EOF) - const afterMarker = changelog.indexOf("\n## ", markerIdx + 1); - const nextSectionStart = afterMarker !== -1 ? afterMarker + 1 : changelog.length; - changelog = - changelog.slice(0, markerIdx) + - NEXT_RELEASE_PLACEHOLDER + - "\n" + - section + - changelog.slice(nextSectionStart); -} + fs.writeFileSync(CHANGELOG_PATH, changelog, "utf8"); + console.log(`✅ CHANGELOG.md updated with version ${version}`); -fs.writeFileSync(CHANGELOG_PATH, changelog, "utf8"); -console.log(`✅ CHANGELOG.md updated with version ${version}`); + // Archive fragments + const archiveDir = path.join(FRAGMENTS_DIR, "released", version); + fs.mkdirSync(archiveDir, { recursive: true }); -// Archive fragments -const archiveDir = path.join(FRAGMENTS_DIR, "released", version); -fs.mkdirSync(archiveDir, { recursive: true }); + entries.forEach(({ file }) => { + const src = path.join(FRAGMENTS_DIR, file); + const dst = path.join(archiveDir, file); + fs.renameSync(src, dst); + }); -entries.forEach(({ file }) => { - const src = path.join(FRAGMENTS_DIR, file); - const dst = path.join(archiveDir, file); - fs.renameSync(src, dst); -}); + // Recreate .gitkeep if fragments/ is now empty + const remaining = fs.readdirSync(FRAGMENTS_DIR).filter((f) => f.endsWith(".yml")); + if (remaining.length === 0) { + fs.writeFileSync(path.join(FRAGMENTS_DIR, ".gitkeep"), "", "utf8"); + } -// Recreate .gitkeep if fragments/ is now empty -const remaining = fs.readdirSync(FRAGMENTS_DIR).filter((f) => f.endsWith(".yml")); -if (remaining.length === 0) { - fs.writeFileSync(path.join(FRAGMENTS_DIR, ".gitkeep"), "", "utf8"); + console.log(`✅ ${entries.length} fragment(s) archived to changelog/fragments/released/${version}/`); + console.log("\nNext steps:"); + console.log(" git add CHANGELOG.md changelog/"); + console.log(` git commit -s -m "chore(release): assemble changelog for v${version}"`); + console.log(` git tag v${version}`); } - -console.log(`✅ ${entries.length} fragment(s) archived to changelog/fragments/released/${version}/`); -console.log("\nNext steps:"); -console.log(" git add CHANGELOG.md changelog/"); -console.log(` git commit -s -m "chore(release): assemble changelog for v${version}"`); -console.log(` git tag v${version}`); diff --git a/changelog/scripts/validate.test.ts b/changelog/scripts/validate.test.ts new file mode 100644 index 00000000..f757656a --- /dev/null +++ b/changelog/scripts/validate.test.ts @@ -0,0 +1,145 @@ +/** + * Unit tests for changelog/scripts/validate.ts + * Run with: pnpm test:changelog + */ +import { describe, it } from "node:test"; +import assert from "node:assert/strict"; +import * as fs from "fs"; +import * as os from "os"; +import * as path from "path"; +import { validateFragment } from "./validate.js"; + +function writeTmp(filename: string, content: string): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), "rtk-validate-")); + const filepath = path.join(dir, filename); + fs.writeFileSync(filepath, content, "utf8"); + return filepath; +} + +describe("validateFragment", () => { + it("valid fragment passes with no errors", () => { + const fp = writeTmp( + "123-add-feature.yml", + `pr: 123\ntype: feat\nscope: "hook"\ntitle: "Add support for hook filtering"\nbreaking: false\nmigration: false\nscripts: []\n` + ); + const errors = validateFragment(fp); + assert.deepEqual(errors, []); + }); + + it("missing pr field returns error", () => { + const fp = writeTmp( + "456-fix-bug.yml", + `type: fix\nscope: "git"\ntitle: "Fix log filter"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("pr")), + `Expected error about 'pr', got: ${JSON.stringify(errors)}` + ); + }); + + it("missing type field returns error", () => { + const fp = writeTmp( + "789-fix-bug.yml", + `pr: 789\nscope: "git"\ntitle: "Fix log filter"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("type")), + `Expected error about 'type', got: ${JSON.stringify(errors)}` + ); + }); + + it("missing scope field returns error", () => { + const fp = writeTmp( + "101-feat.yml", + `pr: 101\ntype: feat\ntitle: "Some feature"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("scope")), + `Expected error about 'scope', got: ${JSON.stringify(errors)}` + ); + }); + + it("missing title field returns error", () => { + const fp = writeTmp( + "202-feat.yml", + `pr: 202\ntype: feat\nscope: "core"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("title")), + `Expected error about 'title', got: ${JSON.stringify(errors)}` + ); + }); + + it("type not in enum returns error", () => { + const fp = writeTmp( + "303-stuff.yml", + `pr: 303\ntype: stuff\nscope: "core"\ntitle: "Something"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("type")), + `Expected error about 'type' enum, got: ${JSON.stringify(errors)}` + ); + }); + + it("title longer than 80 chars returns error", () => { + const longTitle = "A".repeat(81); + const fp = writeTmp( + "404-feat.yml", + `pr: 404\ntype: feat\nscope: "core"\ntitle: "${longTitle}"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("title") && e.includes("80")), + `Expected error about title length, got: ${JSON.stringify(errors)}` + ); + }); + + it("PR number in filename doesn't match pr field returns error", () => { + const fp = writeTmp( + "500-feat.yml", + `pr: 999\ntype: feat\nscope: "core"\ntitle: "Something"\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.toLowerCase().includes("mismatch") || e.includes("500") || e.includes("999")), + `Expected PR mismatch error, got: ${JSON.stringify(errors)}` + ); + }); + + it("scripts field with non-string items returns error", () => { + const fp = writeTmp( + "600-feat.yml", + `pr: 600\ntype: feat\nscope: "db"\ntitle: "Migrate schema"\nscripts:\n - 42\n - true\n` + ); + const errors = validateFragment(fp); + assert.ok( + errors.some((e) => e.includes("scripts")), + `Expected error about scripts, got: ${JSON.stringify(errors)}` + ); + }); + + it("valid fragment with all optional fields passes", () => { + const fp = writeTmp( + "700-migration.yml", + [ + "pr: 700", + 'type: fix', + 'scope: "db"', + 'title: "Run schema migration"', + "description: |", + " Adds a new index to the users table.", + "breaking: false", + "migration: true", + "scripts:", + " - psql -d mydb -c 'CREATE INDEX ...'", + ].join("\n") + "\n" + ); + const errors = validateFragment(fp); + assert.deepEqual(errors, []); + }); +}); diff --git a/changelog/scripts/validate.ts b/changelog/scripts/validate.ts index 2a761271..c4a4d10c 100644 --- a/changelog/scripts/validate.ts +++ b/changelog/scripts/validate.ts @@ -8,10 +8,10 @@ import * as fs from "fs"; import * as path from "path"; import { parse } from "yaml"; -const TYPES = ["feat", "fix", "perf", "refactor", "security", "docs", "chore"]; +export const TYPES = ["feat", "fix", "perf", "refactor", "security", "docs", "chore"]; const FRAGMENTS_DIR = path.resolve(process.cwd(), "changelog/fragments"); -interface Fragment { +export interface Fragment { pr: number; type: string; scope: string; @@ -22,7 +22,7 @@ interface Fragment { scripts?: string[]; } -function validateFragment(filepath: string): string[] { +export function validateFragment(filepath: string): string[] { const errors: string[] = []; let raw: string; @@ -92,7 +92,7 @@ function validateFragment(filepath: string): string[] { return errors; } -function checkDuplicates(targetFile: string): string[] { +export function checkDuplicates(targetFile: string): string[] { const warnings: string[] = []; const targetBasename = path.basename(targetFile); const targetMatch = targetBasename.match(/^(\d+)-/); @@ -118,23 +118,25 @@ function checkDuplicates(targetFile: string): string[] { return warnings; } -const filepath = process.argv[2]; -if (!filepath) { - console.error("Usage: pnpm changelog:validate "); - process.exit(1); -} +if (require.main === module) { + const filepath = process.argv[2]; + if (!filepath) { + console.error("Usage: pnpm changelog:validate "); + process.exit(1); + } -const absPath = path.resolve(process.cwd(), filepath); -const errors = validateFragment(absPath); -const warnings = checkDuplicates(absPath); + const absPath = path.resolve(process.cwd(), filepath); + const errors = validateFragment(absPath); + const warnings = checkDuplicates(absPath); -warnings.forEach((w) => console.warn(`⚠️ ${w}`)); + warnings.forEach((w) => console.warn(`⚠️ ${w}`)); -if (errors.length > 0) { - console.error(`❌ Validation failed for ${path.basename(filepath)}:`); - errors.forEach((e) => console.error(` - ${e}`)); - process.exit(1); -} + if (errors.length > 0) { + console.error(`❌ Validation failed for ${path.basename(filepath)}:`); + errors.forEach((e) => console.error(` - ${e}`)); + process.exit(1); + } -console.log(`✅ Valid: ${path.basename(filepath)}`); -process.exit(0); + console.log(`✅ Valid: ${path.basename(filepath)}`); + process.exit(0); +} diff --git a/package.json b/package.json index 9f4ef454..80cdfbfe 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,8 @@ "changelog:add": "tsx changelog/scripts/add.ts", "changelog:validate": "tsx changelog/scripts/validate.ts", "changelog:assemble": "tsx changelog/scripts/assemble.ts", - "changelog:audit": "tsx changelog/scripts/audit.ts" + "changelog:audit": "tsx changelog/scripts/audit.ts", + "test:changelog": "tsx --test changelog/scripts/validate.test.ts changelog/scripts/assemble.test.ts" }, "devDependencies": { "tsx": "^4.19.2", From b7842b5101adb834f56662260610dd0f1185ba99 Mon Sep 17 00:00:00 2001 From: Florian BRUNIAUX Date: Fri, 20 Mar 2026 14:32:47 +0100 Subject: [PATCH 4/6] docs: document changelog fragment workflow in CONTRIBUTING and CLAUDE.md Add "Changelog Fragments" section to CONTRIBUTING.md (before Pull Request Process) covering the why, workflow, bypass labels, and schema reference. Add a concise "Changelog Fragments" section to CLAUDE.md with the three key commands and a reminder not to edit CHANGELOG.md directly. Co-Authored-By: Claude Sonnet 4.6 Signed-off-by: Florian BRUNIAUX --- CLAUDE.md | 146 ++++++++++++++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 43 ++++++++++++++ 2 files changed, 189 insertions(+) diff --git a/CLAUDE.md b/CLAUDE.md index 35ff19ed..c04fa508 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -22,6 +22,18 @@ rtk gain # Should show token savings stats (NOT "command not found") If `rtk gain` fails, you have the wrong package installed. +## Changelog Fragments + +Every PR needs a fragment in `changelog/fragments/{PR}-{slug}.yml`. Do NOT edit CHANGELOG.md directly. + +```bash +pnpm changelog:add # Interactive — creates the fragment file +pnpm changelog:validate # Validate a specific fragment +pnpm changelog:assemble --version x.y.z # Release time only — assembles into CHANGELOG.md +``` + +Fragment format documented in `changelog/schema.yml`. Required fields: `pr`, `type`, `scope`, `title`. + ## Development Commands > **Note**: If rtk is installed, prefer `rtk ` over raw commands for token-optimized output. @@ -615,3 +627,137 @@ rtk newcmd args # 7. Document # Update README.md, CHANGELOG.md, this file ``` + + +# RTK (Rust Token Killer) - Token-Optimized Commands + +## Golden Rule + +**Always prefix commands with `rtk`**. If RTK has a dedicated filter, it uses it. If not, it passes through unchanged. This means RTK is always safe to use. + +**Important**: Even in command chains with `&&`, use `rtk`: +```bash +# ❌ Wrong +git add . && git commit -m "msg" && git push + +# ✅ Correct +rtk git add . && rtk git commit -m "msg" && rtk git push +``` + +## RTK Commands by Workflow + +### Build & Compile (80-90% savings) +```bash +rtk cargo build # Cargo build output +rtk cargo check # Cargo check output +rtk cargo clippy # Clippy warnings grouped by file (80%) +rtk tsc # TypeScript errors grouped by file/code (83%) +rtk lint # ESLint/Biome violations grouped (84%) +rtk prettier --check # Files needing format only (70%) +rtk next build # Next.js build with route metrics (87%) +``` + +### Test (90-99% savings) +```bash +rtk cargo test # Cargo test failures only (90%) +rtk vitest run # Vitest failures only (99.5%) +rtk playwright test # Playwright failures only (94%) +rtk test # Generic test wrapper - failures only +``` + +### Git (59-80% savings) +```bash +rtk git status # Compact status +rtk git log # Compact log (works with all git flags) +rtk git diff # Compact diff (80%) +rtk git show # Compact show (80%) +rtk git add # Ultra-compact confirmations (59%) +rtk git commit # Ultra-compact confirmations (59%) +rtk git push # Ultra-compact confirmations +rtk git pull # Ultra-compact confirmations +rtk git branch # Compact branch list +rtk git fetch # Compact fetch +rtk git stash # Compact stash +rtk git worktree # Compact worktree +``` + +Note: Git passthrough works for ALL subcommands, even those not explicitly listed. + +### GitHub (26-87% savings) +```bash +rtk gh pr view # Compact PR view (87%) +rtk gh pr checks # Compact PR checks (79%) +rtk gh run list # Compact workflow runs (82%) +rtk gh issue list # Compact issue list (80%) +rtk gh api # Compact API responses (26%) +``` + +### JavaScript/TypeScript Tooling (70-90% savings) +```bash +rtk pnpm list # Compact dependency tree (70%) +rtk pnpm outdated # Compact outdated packages (80%) +rtk pnpm install # Compact install output (90%) +rtk npm run