From a3c3c5a318d68bdb521ae68476955620ffa3eea2 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 09:23:11 +0530
Subject: [PATCH 01/18] feat(ctx): add CTX AI context engine integration
---
demo/ctx/.gitignore | 136 ++++++++++++++
demo/ctx/README.md | 362 +++++++++++++++++++++++++++++++++++++
demo/ctx/agent.ts | 290 +++++++++++++++++++++++++++++
demo/ctx/package.json | 17 ++
demo/ctx/src/agent-loop.ts | 112 ++++++++++++
demo/ctx/src/cli.ts | 108 +++++++++++
demo/ctx/src/files.ts | 65 +++++++
demo/ctx/src/i18n.ts | 77 ++++++++
demo/ctx/src/jsonc.ts | 103 +++++++++++
demo/ctx/src/research.ts | 172 ++++++++++++++++++
demo/ctx/src/state.ts | 57 ++++++
demo/ctx/src/ui.ts | 91 ++++++++++
demo/ctx/src/voices.ts | 92 ++++++++++
pnpm-lock.yaml | 39 ++++
14 files changed, 1721 insertions(+)
create mode 100644 demo/ctx/.gitignore
create mode 100644 demo/ctx/README.md
create mode 100755 demo/ctx/agent.ts
create mode 100644 demo/ctx/package.json
create mode 100644 demo/ctx/src/agent-loop.ts
create mode 100644 demo/ctx/src/cli.ts
create mode 100644 demo/ctx/src/files.ts
create mode 100644 demo/ctx/src/i18n.ts
create mode 100644 demo/ctx/src/jsonc.ts
create mode 100644 demo/ctx/src/research.ts
create mode 100644 demo/ctx/src/state.ts
create mode 100644 demo/ctx/src/ui.ts
create mode 100644 demo/ctx/src/voices.ts
diff --git a/demo/ctx/.gitignore b/demo/ctx/.gitignore
new file mode 100644
index 000000000..1170717c1
--- /dev/null
+++ b/demo/ctx/.gitignore
@@ -0,0 +1,136 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+.pnpm-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# Snowpack dependency directory (https://snowpack.dev/)
+web_modules/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Optional stylelint cache
+.stylelintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variable files
+.env
+.env.development.local
+.env.test.local
+.env.production.local
+.env.local
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+.parcel-cache
+
+# Next.js build output
+.next
+out
+
+# Nuxt.js build / generate output
+.nuxt
+dist
+
+# Gatsby files
+.cache/
+# Comment in the public line in if your project uses Gatsby and not Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+.vuepress/dist
+
+# vuepress v2.x temp and cache directory
+.temp
+.cache
+
+# vitepress build output
+**/.vitepress/dist
+
+# vitepress cache directory
+**/.vitepress/cache
+
+# Docusaurus cache and generated files
+.docusaurus
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# TernJS port file
+.tern-port
+
+# Stores VSCode versions used for testing VSCode extensions
+.vscode-test
+
+# yarn v2
+.yarn/cache
+.yarn/unplugged
+.yarn/build-state.yml
+.yarn/install-state.gz
+.pnp.*
diff --git a/demo/ctx/README.md b/demo/ctx/README.md
new file mode 100644
index 000000000..24a5c131a
--- /dev/null
+++ b/demo/ctx/README.md
@@ -0,0 +1,362 @@
+
+
+
+
+
+ ctx — AI context engine for lingo.dev
+
+
+
+ Your AI translator knows grammar. ctx teaches it your product.
+
+
+
+
+
+ Problem •
+ What It Fixes •
+ How It Works •
+ Brand Voices •
+ Agentic Pipeline •
+ Install •
+ Usage •
+ JSONC Notes
+
+
+
+
+
+
+
+
+---
+
+## The Problem
+
+lingo.dev is genuinely great — fast, cheap, AI-powered translation that plugs straight into your codebase. But out of the box, it has no idea who *you* are. It'll translate "ship it" differently every time. It'll switch between formal and informal mid-product. It'll call your core feature something different in every locale. The translations are correct — they're just not *yours*.
+
+> "ship" → translated as "enviar" (to mail/send) instead of "lanzar" (to launch/deploy)
+> "fly solo" → translated literally instead of "trabajar solo"
+> tú vs vos inconsistency across files because no one wrote down the register rule
+
+lingo.dev solves this with [`lingo-context.md`](https://lingo.dev/en/translator-context) — a global context file it injects into every translation prompt. But writing it by hand takes hours, and keeping it current as your codebase grows is easy to forget.
+
+**ctx automates that entirely.** It reads your project, understands your product, and generates a precise, structured `lingo-context.md`. Then it keeps it in sync as your source files change — file by file, cheaply, only processing what actually changed.
+
+---
+
+## What ctx Actually Fixes
+
+lingo handles the translation. ctx makes sure every translation sounds like it came from the same company, in the same voice, on the same product. It's the difference between *translated* and *localized*.
+
+- **Pronoun consistency** — picks `tú` or `usted` once, enforces it everywhere. No more mixed register in the same product.
+- **Grammar conventions** — locale-specific rules baked in. German compound nouns, French gender agreements, Japanese politeness levels — defined once, applied always.
+- **Repeated terms** — your product's vocabulary is locked. "Workspace" is always "Workspace", not "Space", "Area", or "Room" depending on which string Claude saw first.
+- **On-brand tone** — not just "be professional" (useless), but "use informal du, keep CTAs under 4 words, never use exclamation marks".
+
+---
+
+## How It Works
+
+```
+your lingo.dev project
+├── i18n.json ← ctx reads this: locales, bucket paths, provider config
+├── lingo-context.md ← ctx generates and maintains this
+└── app/locales/
+ ├── en.tsx ← source locale files ctx reads and analyses
+ ├── en.jsonc ← ctx injects per-key translator notes here
+ └── en/
+ └── getting-started.md
+```
+
+ctx reads `i18n.json` to discover your bucket files, analyses only the source locale, and writes a context file that covers:
+
+- **App** — what the product does, factual, no marketing copy
+- **Tone & Voice** — explicit dos and don'ts the translator must follow
+- **Audience** — who reads these strings and in what context
+- **Languages** — per-language pitfalls: pronoun register, dialect, length warnings
+- **Tricky Terms** — every ambiguous, idiomatic, or domain-specific term with exact guidance
+- **Files** — per-file rules for files that need them
+
+Once written, ctx injects the full context into `i18n.json` as the provider prompt so lingo.dev carries it into every translation automatically.
+
+---
+
+## UI
+
+ctx has a minimal terminal UI designed to stay out of your way. Every stage is clearly labelled, tool calls are shown inline, and you always know where you are.
+
+**Fresh scan — first run:**
+```
+ ctx /your-project
+ lingo-context.md · claude-sonnet-4-6 · en → es fr de
+
+ ◆ Research
+ scanning project + searching web
+
+ ↳ web_search lingo crypto exchange localization
+ ↳ read_file en.jsonc
+ ↳ list_files app/locales
+ ↳ read_file package.json
+
+ ◆ Context Generation
+ writing lingo-context.md
+
+ ↳ read_file en.jsonc
+ ↳ read_file en/getting-started.md
+ ↳ write_file lingo-context.md
+
+ ◆ JSONC Injection
+ ↳ annotate en.jsonc
+
+ ┌─ Review: en.jsonc ──────────────────────────────────────┐
+
+ {
+ // Buy/sell action — use financial verb, not generic "send"
+ "trade.submit": "Place order",
+
+ // Shown on empty portfolio — encouraging, not alarming
+ "portfolio.empty": "No assets yet"
+ }
+
+ └──────────────────────────────────────────────────────────┘
+
+ ❯ Accept
+ Request changes
+ Skip
+
+ ◆ Provider Sync
+
+ ✓ Done
+```
+
+**Update run — after changing a file:**
+```
+ ctx /your-project
+ lingo-context.md · claude-sonnet-4-6 · en → es fr de
+
+ [1/2] en.jsonc
+ ↳ write_file lingo-context.md
+
+ [2/2] en/getting-started.md
+ ↳ write_file lingo-context.md
+
+ ~ Tricky Terms (+2 terms)
+ ~ Files (getting-started.md updated)
+
+ ◆ JSONC Injection
+ ↳ annotate en.jsonc
+
+ ◆ Provider Sync
+
+ ✓ Done
+```
+
+**No changes:**
+```
+ ✓ No new changes (uncommitted) — context is up to date.
+
+ ❯ No, exit
+ Yes, regenerate
+```
+
+**Brand voices (`--voices`):**
+```
+ ◆ Brand Voices
+ generating voice for es
+
+ ┌─ Review: voice · es ────────────────────────────────────┐
+
+ Write in Spanish using informal tú throughout — never usted.
+ Tone is direct and confident, like a senior dev talking to
+ a peer. Avoid exclamation marks. Keep CTAs under 4 words.
+ Financial terms use standard Latin American conventions.
+
+ └──────────────────────────────────────────────────────────┘
+
+ ❯ Accept
+ Request changes
+ Skip
+```
+
+---
+
+## Brand Voices
+
+Beyond the global context, ctx can generate a **brand voice** per locale — a concise prose brief that tells the translator exactly how your product sounds in that language.
+
+```bash
+ctx ./my-app --voices
+```
+
+A brand voice covers pronoun register (tú/usted, du/Sie, tu/vous), tone, audience context, and locale-specific conventions — all derived from your existing `lingo-context.md`. Voices are written into `i18n.json` under `provider.voices` and picked up by lingo.dev automatically.
+
+Each voice goes through a review loop — accept, skip, or give feedback and the agent revises.
+
+---
+
+## Agentic Pipeline
+
+ctx runs as a multi-step agentic pipeline. Each step is a separate Claude call with a focused job — not one big prompt trying to do everything.
+
+```
+ctx run
+ │
+ ├── Step 1: Research (first run only, optional)
+ │ Claude searches the web + reads your project files
+ │ Produces a product brief: market, audience, tone conventions
+ │ Or: answer 4 quick questions yourself
+ │
+ ├── Step 2: Fresh scan (first run only)
+ │ Claude agent explores the project using tools:
+ │ list_files → read_file → write_file
+ │ Reads: i18n.json + bucket files + package.json + README
+ │ Writes: lingo-context.md
+ │
+ ├── Step 3: Per-file update (subsequent runs)
+ │ For each changed source file — one Claude call per file:
+ │ Reads: current lingo-context.md + one changed file
+ │ Updates: only the sections affected by that file
+ │ Records: file hash so it won't re-process unless content changes
+ │
+ ├── Step 4: JSONC comment injection (for .jsonc buckets)
+ │ One Claude call per .jsonc source file:
+ │ Reads: lingo-context.md + source file
+ │ Writes: per-key // translator notes inline in the file
+ │ lingo.dev reads these natively during translation
+ │
+ └── Step 5: Provider sync
+ Writes the full lingo-context.md into i18n.json provider.prompt
+ so lingo.dev uses it automatically — no manual step needed
+```
+
+**Why per-file?** Sending all changed files in one prompt crushes context and produces shallow analysis. Processing one file at a time keeps the window focused — Claude can deeply scan every string for tricky terms rather than skimming.
+
+**Human in the loop:** Every write shows a preview and waits for approval. You can request changes and the agent revises with full context, or skip a step entirely.
+
+---
+
+## Install
+
+**Requirements:** [Bun](https://bun.sh) and an Anthropic API key.
+
+```bash
+git clone https://github.com/bhavya031/ctx
+cd ctx
+bun install
+bun link
+```
+
+```bash
+export ANTHROPIC_API_KEY=your_key_here
+```
+
+---
+
+## Usage
+
+```bash
+# Run in your lingo.dev project
+ctx ./my-app
+
+# With a focus prompt
+ctx ./my-app -p "B2B SaaS, formal tone, legal/compliance domain"
+
+# Preview what would run without writing anything
+ctx ./my-app --dry-run
+
+# Use files changed in last 3 commits
+ctx ./my-app --commits 3
+
+# Generate brand voices for all target locales
+ctx ./my-app --voices
+```
+
+**Options:**
+
+| Flag | Short | Default | Description |
+|------|-------|---------|-------------|
+| `--prompt` | `-p` | interactive | What the agent should focus on |
+| `--out` | `-o` | `lingo-context.md` | Output file path |
+| `--model` | `-m` | `claude-sonnet-4-6` | Claude model to use |
+| `--commits` | `-c` | — | Use files changed in last N commits |
+| `--dry-run` | `-d` | `false` | Preview what would run, write nothing |
+| `--voices` | `-V` | `false` | Generate brand voices only |
+| `--help` | `-h` | — | Show help |
+
+---
+
+## Modes
+
+| Mode | Trigger | What runs |
+|------|---------|-----------|
+| **Fresh** | No `lingo-context.md` yet | Research → full agent scan → writes context from scratch |
+| **Update** | Context exists, files changed | Per-file update — one agent call per changed bucket file |
+| **Commits** | `--commits ` | Same as update but diffs against last N commits instead of uncommitted |
+
+State is tracked via content hashes in `~/.ctx/state/` — only genuinely new or changed files are processed. Hashes are saved only after the full pipeline completes, so cancelling mid-run leaves state untouched and the same changes are detected next run.
+
+---
+
+## JSONC Translator Notes
+
+For `.jsonc` bucket files, ctx injects per-key translator notes directly into the source:
+
+```jsonc
+{
+ // Navigation item in the top header — keep under 12 characters
+ "nav.dashboard": "Dashboard",
+
+ // Button that triggers payment — not just "submit", implies money changing hands
+ "checkout.pay": "Pay now",
+
+ // Shown when session expires — urgent but not alarming, avoid exclamation marks
+ "auth.session_expired": "Your session has ended"
+}
+```
+
+lingo.dev reads these `//` comments natively and passes them to the LLM alongside the string. Notes are generated from `lingo-context.md` so they stay consistent with your global rules. Only changed `.jsonc` files get re-annotated on update runs.
+
+---
+
+## Review Before Writing
+
+ctx never writes silently. Every write — context file, JSONC comments, or brand voices — shows a preview first:
+
+```
+ ┌─ Review: lingo-context.md ──────────────────────────────┐
+
+ ## App
+ A B2B SaaS tool for managing compliance workflows...
+
+ ## Tone & Voice
+ Formal, precise. Use "you" not "we"...
+ … 42 more lines
+
+ └──────────────────────────────────────────────────────────┘
+
+ ❯ Accept
+ Request changes
+ Skip
+```
+
+Choose **Request changes**, describe what's wrong, and the agent revises with full context and shows you the result again.
+
+---
+
+## Tested On
+
+- [lingo-crypto](https://github.com/Bhavya031/lingo-crypto) — crypto exchange UI
+- [others](https://github.com/Bhavya031/others) — mixed project types
+
+---
+
+## Requirements
+
+- [Bun](https://bun.sh) v1.0+
+- `ANTHROPIC_API_KEY`
+- A lingo.dev project with `i18n.json`
+
+---
+
+*Built for the lingo.dev hackathon.*
diff --git a/demo/ctx/agent.ts b/demo/ctx/agent.ts
new file mode 100755
index 000000000..180419f0d
--- /dev/null
+++ b/demo/ctx/agent.ts
@@ -0,0 +1,290 @@
+#!/usr/bin/env tsx
+import Anthropic from "@anthropic-ai/sdk";
+import fs from "fs";
+import path from "path";
+import { values, positionals, selectMenu, textPrompt, die } from "./src/cli.ts";
+import { loadState, clearState, fileHash, filterNewFiles, recordFiles, type FileEntry } from "./src/state.ts";
+import { readFile, listFiles, getChangedFiles, formatFileBlock } from "./src/files.ts";
+import { allTools, writeOnlyTools, runAgent } from "./src/agent-loop.ts";
+import { runJsoncInjection } from "./src/jsonc.ts";
+import { printUpdateSummary, updateI18nProvider } from "./src/i18n.ts";
+import { runResearch } from "./src/research.ts";
+import { runVoices } from "./src/voices.ts";
+import { printHeader, phase, progress, fileItem, ok, warn, fail, info } from "./src/ui.ts";
+
+const targetDir = path.resolve(positionals[0] ?? process.cwd());
+const outPath = path.resolve(targetDir, values.out!);
+const model = values.model!;
+const commitCount = values.commits ? parseInt(values.commits, 10) : null;
+const dryRun = values["dry-run"]!;
+const voicesOnly = values["voices"]!;
+const debug = values["debug"]!;
+
+const dbg = (...args: any[]) => { if (debug) console.log("\x1B[2m[debug]", ...args, "\x1B[0m"); };
+const SKIPPED_MSG = `lingo-context.md was not written — skipping JSONC injection and provider update.`;
+
+async function run() {
+ if (!fs.existsSync(targetDir)) {
+ die(` ✗ Target folder not found: ${targetDir}`);
+ }
+
+ const i18nPath = path.join(targetDir, "i18n.json");
+ if (!fs.existsSync(i18nPath)) {
+ die(` ! No i18n.json found — is this a lingo project?`, ` Run: npx lingo.dev@latest init`);
+ }
+
+ const client = new Anthropic();
+ const hasContext = fs.existsSync(outPath);
+ const isUpdateMode = hasContext && commitCount === null;
+ const isCommitMode = hasContext && commitCount !== null;
+ const isFreshMode = !hasContext;
+
+ const i18nContent = readFile(i18nPath);
+ const i18nBlock = `\n--- i18n.json ---\n${i18nContent}\n`;
+
+ let sourceLocale = "en";
+ let targetLocales: string[] = [];
+ let bucketIncludes: string[] = [];
+ let jsoncSourceFiles: string[] = [];
+ try {
+ const i18n = JSON.parse(i18nContent);
+ sourceLocale = i18n.locale?.source ?? i18n.locale ?? "en";
+ targetLocales = (i18n.locale?.targets ?? i18n.locales ?? []).filter((l: string) => l !== sourceLocale);
+ bucketIncludes = Object.values(i18n.buckets ?? {})
+ .flatMap((b: any) => b.include ?? [])
+ .map((p: string) => p.replace("[locale]", sourceLocale));
+ jsoncSourceFiles = Object.values(i18n.buckets ?? {})
+ .flatMap((b: any) => b.include ?? [])
+ .filter((p: string) => p.endsWith(".jsonc"))
+ .map((p: string) => path.resolve(targetDir, p.replace("[locale]", sourceLocale)))
+ .filter((f: string) => fs.existsSync(f));
+ } catch {}
+
+ function matchesBucket(filePath: string): boolean {
+ return bucketIncludes.some((pattern) => {
+ const abs = path.resolve(targetDir, pattern);
+ return filePath === abs || filePath.endsWith(pattern);
+ });
+ }
+
+ function resolveBucketFiles(): string[] {
+ const results: string[] = [];
+ for (const p of bucketIncludes) {
+ if (p.includes("*")) {
+ const dir = path.resolve(targetDir, path.dirname(p));
+ const ext = path.extname(p);
+ try {
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
+ if (entry.isFile() && (!ext || entry.name.endsWith(ext))) {
+ results.push(path.join(dir, entry.name));
+ }
+ }
+ } catch {}
+ } else {
+ const abs = path.resolve(targetDir, p);
+ try { if (fs.statSync(abs).isFile()) results.push(abs); } catch {}
+ }
+ }
+ return results;
+ }
+
+ const agent = (system: string, message: string, tools: Anthropic.Tool[], review = false) =>
+ runAgent(client, model, system, message, tools, listFiles, review);
+
+ const printDone = () => fs.existsSync(outPath) ? ok(`Done → ${outPath}`) : warn(`Output file was not created`);
+ const modeLabel = isCommitMode ? `last ${commitCount} commit(s)` : "uncommitted";
+
+ printHeader({ targetDir, outPath, model, source: sourceLocale, targets: targetLocales });
+
+ if (voicesOnly) {
+ await runVoices(client, model, outPath, i18nPath, targetLocales);
+ return;
+ }
+
+ dbg(`hasContext=${hasContext} isFreshMode=${isFreshMode} isUpdateMode=${isUpdateMode} isCommitMode=${isCommitMode}`);
+ dbg(`bucketIncludes:`, bucketIncludes);
+ dbg(`jsoncSourceFiles:`, jsoncSourceFiles);
+ dbg(`outPath exists:`, hasContext);
+
+ const freshSystem = `You are a localization context agent. Generate lingo-context.md so an AI translator produces accurate, consistent translations.
+
+Read: i18n.json (provided) → source bucket files → package.json + README. Stop there unless something is still unclear.
+
+Rules:
+- Every rule must be actionable. Bad: "be careful with tone". Good: "use tú not usted — never vos".
+- App section: what it does and who uses it. No marketing language.
+- Language sections: named pitfalls only, no generic advice. Include pronoun register (tú/usted/vos), script/dialect notes, and length warnings.
+- Tricky Terms: flag every ambiguous, idiomatic, or domain-specific term. For tech terms, name the wrong translation risk explicitly (e.g. "ship" — mistranslated as mail/send, means deploy/launch).
+
+Structure (use exactly):
+
+## App
+## Tone & Voice
+## Audience
+## Languages
+Source: ${sourceLocale}
+Targets: ${targetLocales.join(", ") || "none specified"}
+###
+-
+
+## Tricky Terms
+| Term | Risk | Guidance |
+|------|------|----------|
+
+## Files
+###
+What / Tone / Priority
+
+You MUST call write_file to write lingo-context.md. Do NOT output the file content as text — call write_file.`;
+
+ const freshMessage = (prompt: string, brief?: string | null) => [
+ `Instructions:\n${prompt}`,
+ brief ? `\n${brief}` : "",
+ i18nBlock,
+ `Target folder: ${targetDir}`,
+ `Output file: ${outPath}`,
+ `\nExplore the project and write lingo-context.md.`,
+ ].join("\n");
+
+ // --- Update / Commit mode: detect changes BEFORE asking for instructions ---
+ let earlyChangedFiles: ReturnType | null = null;
+ if (isUpdateMode || isCommitMode) {
+ const state = loadState(outPath);
+ const gitChanged = getChangedFiles(targetDir, commitCount);
+ dbg(`gitChanged:`, gitChanged);
+ const allBucket = resolveBucketFiles();
+ dbg(`resolveBucketFiles:`, allBucket);
+ const candidates = [...new Set([...gitChanged.filter(matchesBucket), ...allBucket])];
+ dbg(`candidates:`, candidates);
+ earlyChangedFiles = filterNewFiles(candidates, state);
+ dbg(`earlyChangedFiles:`, earlyChangedFiles.map(([f]) => f));
+
+ if (earlyChangedFiles.length === 0) {
+ ok(`No new changes (${modeLabel}) — context is up to date.`);
+ const choice = await selectMenu("Regenerate anyway?", ["No, exit", "Yes, regenerate"], 0);
+ if (choice === 0) return;
+
+ const override = values.prompt ?? await textPrompt("What should the full regeneration cover?", "blank for default");
+ const regen = override || "Generate a comprehensive lingo-context.md for this project.";
+ clearState(outPath);
+ await agent(freshSystem, freshMessage(regen), allTools, true);
+ if (!fs.existsSync(outPath)) { warn(SKIPPED_MSG); return; }
+ phase("JSONC Injection");
+ const jsoncEntries1 = await runJsoncInjection(client, model, jsoncSourceFiles, outPath, true);
+ phase("Provider Sync");
+ await updateI18nProvider(i18nPath, outPath);
+ recordFiles([...allBucket.map((f) => [f, fileHash(f)] as FileEntry), ...jsoncEntries1, [i18nPath, fileHash(i18nPath)]], outPath);
+ return printDone();
+ }
+ }
+
+ // --- Dry run ---
+ if (dryRun) {
+ if (isFreshMode) {
+ phase("Fresh Scan", "would generate lingo-context.md");
+ if (jsoncSourceFiles.length) {
+ info(`JSONC inject ${jsoncSourceFiles.length} file(s)`);
+ jsoncSourceFiles.forEach((f) => fileItem(path.relative(targetDir, f)));
+ }
+ } else if (earlyChangedFiles && earlyChangedFiles.length > 0) {
+ phase("Update", `${earlyChangedFiles.length} file(s) from ${modeLabel}`);
+ earlyChangedFiles.forEach(([f]) => fileItem(path.relative(targetDir, f)));
+ const jsonc = earlyChangedFiles.map(([f]) => f).filter((f) => jsoncSourceFiles.includes(f));
+ if (jsonc.length) {
+ info(`JSONC inject ${jsonc.length} file(s)`);
+ jsonc.forEach((f) => fileItem(path.relative(targetDir, f)));
+ }
+ } else {
+ ok(`Up to date — nothing to do`);
+ }
+ warn(`dry-run — no files written`);
+ return;
+ }
+
+ // Get instructions
+ let instructions = values.prompt;
+ if (!instructions) {
+ const question = hasContext ? "What changed or what should the update cover?" : "What should the context summary include?";
+ const defaultInstr = hasContext ? "Update lingo-context.md to reflect any recent changes." : "Generate a comprehensive lingo-context.md for this project.";
+ instructions = await textPrompt(question, "blank for default");
+ if (!instructions) instructions = defaultInstr;
+ }
+
+ // --- Fresh mode ---
+ if (isFreshMode) {
+ const brief = await runResearch(client, targetDir, i18nBlock);
+ clearState(outPath);
+ dbg(`ensuring output dir:`, path.dirname(outPath));
+ fs.mkdirSync(path.dirname(outPath), { recursive: true });
+ phase("Context Generation", `writing ${path.basename(outPath)}`);
+ await agent(freshSystem, freshMessage(instructions, brief), allTools, true);
+ dbg(`after agent — outPath exists:`, fs.existsSync(outPath));
+ if (!fs.existsSync(outPath)) { warn(SKIPPED_MSG); return; }
+ const bucketFiles = resolveBucketFiles();
+ phase("JSONC Injection");
+ const jsoncEntries2 = await runJsoncInjection(client, model, jsoncSourceFiles, outPath, true);
+ phase("Provider Sync");
+ await updateI18nProvider(i18nPath, outPath);
+ // Record all hashes last — after everything completes, so a cancel leaves state unchanged
+ recordFiles([...bucketFiles.map((f) => [f, fileHash(f)] as FileEntry), ...jsoncEntries2, [i18nPath, fileHash(i18nPath)]], outPath);
+ return printDone();
+ }
+
+ // --- Update / Commit mode ---
+ if ((isUpdateMode || isCommitMode) && earlyChangedFiles && earlyChangedFiles.length > 0) {
+ const changedFiles = earlyChangedFiles;
+ phase("Context Update", `${changedFiles.length} changed file(s) from ${modeLabel}`);
+
+ const updateSystem = `You are a localization context updater. One file at a time.
+
+Given: existing lingo-context.md + one changed source file. Update the context to reflect it.
+
+Rules:
+- Touch only what this file changes. Leave all other sections as-is.
+- Tricky Terms: scan every string. Add any term that is ambiguous, idiomatic, or has a wrong-translation risk:
+ - Tech verbs with non-obvious meaning (ship = deploy not mail, run = execute not jog, push = git push not shove)
+ - Idioms that fail literally ("off to the races", "bang your head against the wall")
+ - Pronoun/register traps — if the file uses a pronoun register, note it and enforce consistency (e.g. tú throughout — never vos)
+ - Cultural references that don't map across regions
+- Language section: if a new consistency rule emerges from this file, add it.
+
+You MUST call write_file with the full updated lingo-context.md. Do NOT output the content as text.`;
+
+ const beforeContext = readFile(outPath);
+
+ for (let i = 0; i < changedFiles.length; i++) {
+ const [filePath] = changedFiles[i];
+ const fileName = path.relative(targetDir, filePath);
+ progress(i + 1, changedFiles.length, fileName);
+
+ const currentContext = readFile(outPath);
+ const updateMessage = [
+ `Instructions:\n${instructions}`,
+ `\n--- Existing context ---\n${currentContext}`,
+ `\n--- File to process ---${formatFileBlock(filePath)}`,
+ `\nUpdate the context file at: ${outPath}`,
+ ].join("\n");
+
+ await agent(updateSystem, updateMessage, writeOnlyTools, true);
+ }
+
+ printUpdateSummary(beforeContext, readFile(outPath));
+
+ const changedJsonc = changedFiles.map(([f]) => f).filter((f) => jsoncSourceFiles.includes(f));
+ phase("JSONC Injection");
+ const jsoncEntries3 = await runJsoncInjection(client, model, changedJsonc, outPath, true);
+ phase("Provider Sync");
+ await updateI18nProvider(i18nPath, outPath);
+
+ // Record all hashes last — after everything completes, so a cancel leaves state unchanged
+ recordFiles([
+ ...changedFiles.map(([f]) => [f, fileHash(f)] as FileEntry),
+ ...jsoncEntries3,
+ [i18nPath, fileHash(i18nPath)],
+ ], outPath);
+ }
+
+ printDone();
+}
+
+run().catch((e) => die(` ✗ ${e.message}`));
diff --git a/demo/ctx/package.json b/demo/ctx/package.json
new file mode 100644
index 000000000..684f9d7c4
--- /dev/null
+++ b/demo/ctx/package.json
@@ -0,0 +1,17 @@
+{
+ "name": "lingo-agent",
+ "version": "1.0.0",
+ "type": "module",
+ "bin": {
+ "ctx": "./agent.ts"
+ },
+ "scripts": {
+ "agent": "tsx agent.ts"
+ },
+ "dependencies": {
+ "@anthropic-ai/sdk": "^0.78.0"
+ },
+ "devDependencies": {
+ "tsx": "^4.19.3"
+ }
+}
diff --git a/demo/ctx/src/agent-loop.ts b/demo/ctx/src/agent-loop.ts
new file mode 100644
index 000000000..a00f040ff
--- /dev/null
+++ b/demo/ctx/src/agent-loop.ts
@@ -0,0 +1,112 @@
+import Anthropic from "@anthropic-ai/sdk";
+import path from "path";
+import { selectMenu, textPrompt } from "./cli.ts";
+import { readFile, writeFile } from "./files.ts";
+import { toolCall, reviewBox } from "./ui.ts";
+
+export const allTools: Anthropic.Tool[] = [
+ {
+ name: "list_files",
+ description: "List all files in a directory (ignores node_modules, .next, .git, dist)",
+ input_schema: {
+ type: "object",
+ properties: { directory: { type: "string" } },
+ required: ["directory"],
+ additionalProperties: false,
+ },
+ },
+ {
+ name: "read_file",
+ description: "Read the contents of a file",
+ input_schema: {
+ type: "object",
+ properties: { file_path: { type: "string" } },
+ required: ["file_path"],
+ additionalProperties: false,
+ },
+ },
+ {
+ name: "write_file",
+ description: "Write content to a file",
+ input_schema: {
+ type: "object",
+ properties: {
+ file_path: { type: "string" },
+ content: { type: "string" },
+ },
+ required: ["file_path", "content"],
+ additionalProperties: false,
+ },
+ },
+];
+
+export const writeOnlyTools: Anthropic.Tool[] = [allTools[2]];
+
+function executeTool(name: string, input: Record, listFilesFn: (dir: string) => string[]): string {
+ switch (name) {
+ case "list_files": return JSON.stringify(listFilesFn(input.directory));
+ case "read_file": return readFile(input.file_path);
+ case "write_file": return writeFile(input.file_path, input.content);
+ default: return `Unknown tool: ${name}`;
+ }
+}
+
+export async function reviewContent(label: string, content: string): Promise<"accept" | "skip" | string> {
+ reviewBox(label, content);
+ const choice = await selectMenu("", ["Accept", "Request changes", "Skip"], 0);
+ if (choice === 0) return "accept";
+ if (choice === 2) return "skip";
+ const feedback = await textPrompt("What should be changed?");
+ return feedback || "skip";
+}
+
+export async function runAgent(
+ client: Anthropic,
+ model: string,
+ system: string,
+ userMessage: string,
+ tools: Anthropic.Tool[],
+ listFilesFn: (dir: string) => string[],
+ review = false,
+) {
+ const messages: Anthropic.MessageParam[] = [{ role: "user", content: userMessage }];
+
+ while (true) {
+ const response = await client.messages.create({ model, max_tokens: 16000, system, tools, messages });
+
+ for (const block of response.content) {
+ if (block.type === "text" && block.text.trim()) {
+ // dim agent reasoning — it's secondary to the tool calls
+ process.stdout.write(`\x1B[2m ${block.text.trim()}\x1B[0m\n`);
+ }
+ }
+
+ if (response.stop_reason !== "tool_use") break;
+
+ const toolUses = response.content.filter((b): b is Anthropic.ToolUseBlock => b.type === "tool_use");
+ messages.push({ role: "assistant", content: response.content });
+
+ const toolResults: Anthropic.ToolResultBlockParam[] = [];
+ for (const tool of toolUses) {
+ const input = tool.input as Record;
+
+ if (review && tool.name === "write_file") {
+ const label = path.basename(input.file_path);
+ const result = await reviewContent(label, input.content);
+ if (result === "accept") {
+ toolCall("write_file", input);
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: writeFile(input.file_path, input.content) });
+ } else if (result === "skip") {
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "User skipped this write — do not write this file." });
+ } else {
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: `User requested changes: ${result}\nPlease revise and call write_file again with the updated content.` });
+ }
+ } else {
+ toolCall(tool.name, input);
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: executeTool(tool.name, input, listFilesFn) });
+ }
+ }
+
+ messages.push({ role: "user", content: toolResults });
+ }
+}
diff --git a/demo/ctx/src/cli.ts b/demo/ctx/src/cli.ts
new file mode 100644
index 000000000..fcfcaee6d
--- /dev/null
+++ b/demo/ctx/src/cli.ts
@@ -0,0 +1,108 @@
+import { parseArgs } from "util";
+
+export const { values, positionals } = parseArgs({
+ args: process.argv.slice(2),
+ options: {
+ prompt: { type: "string", short: "p" },
+ out: { type: "string", short: "o", default: "lingo-context.md" },
+ model: { type: "string", short: "m", default: "claude-haiku-4-5" },
+ commits: { type: "string", short: "c" },
+ "dry-run": { type: "boolean", short: "d", default: false },
+ voices: { type: "boolean", short: "V", default: false },
+ debug: { type: "boolean", short: "D", default: false },
+ help: { type: "boolean", short: "h", default: false },
+ },
+ allowPositionals: true,
+});
+
+if (values.help) {
+ console.log(`
+Usage: ctx [folder] [options]
+
+Arguments:
+ folder Folder to analyse (default: current directory)
+
+Options:
+ -p, --prompt What the agent should focus on
+ -o, --out Output file (default: lingo-context.md)
+ -m, --model Claude model (default: claude-haiku-4-5)
+ -c, --commits Use files changed in last N commits instead of uncommitted
+ -d, --dry-run Show what would run without writing anything
+ -V, --voices Generate per-locale brand voices into i18n.json (requires lingo-context.md)
+ -D, --debug Verbose logging — show all state, tool calls, and file paths
+ -h, --help Show this help
+
+Modes:
+ Fresh lingo-context.md absent → full project scan via agent tools
+ Update lingo-context.md exists → only changed files sent to LLM (uncommitted)
+ Commits --commits → only files changed in last N commits sent to LLM
+
+Examples:
+ ctx ./lingo-app -p "B2B SaaS, formal tone"
+ ctx ./lingo-app -p "consumer app, friendly and casual"
+ ctx ./lingo-app --out lingo-context.md
+ ctx --commits 3
+`);
+ process.exit(0);
+}
+
+export async function selectMenu(question: string, options: string[], defaultIndex = 0): Promise {
+ let selected = defaultIndex;
+ const render = () => {
+ process.stdout.write("\x1B[?25l");
+ process.stdout.write(`\n${question}\n`);
+ for (let i = 0; i < options.length; i++) {
+ process.stdout.write(i === selected
+ ? `\x1B[36m❯ ${options[i]}\x1B[0m\n`
+ : ` ${options[i]}\n`
+ );
+ }
+ };
+ const clear = () => process.stdout.write(`\x1B[${options.length + 2}A\x1B[0J`);
+
+ render();
+
+ return new Promise((resolve) => {
+ process.stdin.setRawMode(true);
+ process.stdin.resume();
+ process.stdin.setEncoding("utf-8");
+
+ const onKey = (key: string) => {
+ if (key === "\x1B[A" && selected > 0) { clear(); selected--; render(); }
+ else if (key === "\x1B[B" && selected < options.length - 1) { clear(); selected++; render(); }
+ else if (key === "\r" || key === "\n") {
+ process.stdout.write("\x1B[?25h");
+ process.stdin.setRawMode(false);
+ process.stdin.pause();
+ process.stdin.off("data", onKey);
+ process.stdout.write("\n");
+ resolve(selected);
+ } else if (key === "\x03") {
+ process.stdout.write("\x1B[?25h");
+ process.exit(0);
+ }
+ };
+
+ process.stdin.on("data", onKey);
+ });
+}
+
+export async function textPrompt(question: string, placeholder = ""): Promise {
+ process.stdout.write(question);
+ if (placeholder) process.stdout.write(` \x1B[2m(${placeholder})\x1B[0m`);
+ process.stdout.write("\n\x1B[36m❯ \x1B[0m");
+
+ return new Promise((resolve) => {
+ process.stdin.resume();
+ process.stdin.setEncoding("utf-8");
+ process.stdin.once("data", (data: string) => {
+ process.stdin.pause();
+ resolve(data.trim());
+ });
+ });
+}
+
+export function die(...lines: string[]): never {
+ for (const line of lines) console.error(line);
+ process.exit(1);
+}
diff --git a/demo/ctx/src/files.ts b/demo/ctx/src/files.ts
new file mode 100644
index 000000000..197f800df
--- /dev/null
+++ b/demo/ctx/src/files.ts
@@ -0,0 +1,65 @@
+import fs from "fs";
+import path from "path";
+import { execSync } from "child_process";
+
+const IGNORE = new Set(["node_modules", ".next", ".git", "dist", ".turbo"]);
+
+export function readFile(filePath: string): string {
+ try {
+ const buf = fs.readFileSync(filePath);
+ if (buf.byteLength > 50_000) return `[File too large: ${buf.byteLength} bytes]`;
+ return buf.toString("utf-8");
+ } catch (e) {
+ return `[Error: ${e}]`;
+ }
+}
+
+export function writeFile(filePath: string, content: string): string {
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
+ fs.writeFileSync(filePath, content, "utf-8");
+ return `Written to ${filePath}`;
+}
+
+export function listFiles(dir: string): string[] {
+ const results: string[] = [];
+ function walk(current: string) {
+ for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
+ if (IGNORE.has(entry.name)) continue;
+ const full = path.join(current, entry.name);
+ entry.isDirectory() ? walk(full) : results.push(full);
+ }
+ }
+ walk(dir);
+ return results;
+}
+
+export function git(cmd: string, cwd: string): string {
+ try { return execSync(cmd, { cwd, encoding: "utf-8" }).trim(); }
+ catch { return ""; }
+}
+
+export function getChangedFiles(cwd: string, commits: number | null): string[] {
+ let output: string;
+ if (commits !== null) {
+ output = git(`git diff HEAD~${commits} --name-only`, cwd);
+ } else {
+ output = git("git status --porcelain", cwd)
+ .split("\n").filter(Boolean).map((l) => l.slice(3).trim()).join("\n");
+ }
+ const paths = output.split("\n").map((f) => f.trim()).filter(Boolean)
+ .map((f) => path.join(cwd, f));
+
+ const files: string[] = [];
+ for (const p of paths) {
+ try {
+ const stat = fs.statSync(p);
+ if (stat.isFile()) files.push(p);
+ else if (stat.isDirectory()) files.push(...listFiles(p));
+ } catch {}
+ }
+ return files;
+}
+
+export function formatFileBlock(filePath: string): string {
+ return `\n--- ${filePath} ---\n${readFile(filePath)}\n`;
+}
diff --git a/demo/ctx/src/i18n.ts b/demo/ctx/src/i18n.ts
new file mode 100644
index 000000000..e82c98592
--- /dev/null
+++ b/demo/ctx/src/i18n.ts
@@ -0,0 +1,77 @@
+import fs from "fs";
+import { readFile } from "./files.ts";
+import { selectMenu } from "./cli.ts";
+import { summaryLine, info } from "./ui.ts";
+
+export function parseSections(content: string): Record {
+ const sections: Record = {};
+ const parts = content.split(/^(## .+)$/m);
+ for (let i = 1; i < parts.length; i += 2) {
+ sections[parts[i].trim()] = parts[i + 1]?.trim() ?? "";
+ }
+ return sections;
+}
+
+export function printUpdateSummary(before: string, after: string): void {
+ const prev = parseSections(before);
+ const next = parseSections(after);
+ const allKeys = new Set([...Object.keys(prev), ...Object.keys(next)]);
+ const lines: string[] = [];
+
+ for (const key of allKeys) {
+ const label = key.replace("## ", "");
+ if (!prev[key]) {
+ lines.push(` + ${label} (new section)`);
+ } else if (!next[key]) {
+ lines.push(` - ${label} (removed)`);
+ } else if (prev[key] !== next[key]) {
+ const pluralize = (n: number, word: string) => `${n} ${word}${n !== 1 ? "s" : ""}`;
+ if (label === "Tricky Terms") {
+ const countRows = (s: string) => s.split("\n").filter(l => l.startsWith("| ") && !l.includes("---") && !l.includes("Term |")).length;
+ const added = countRows(next[key]) - countRows(prev[key]);
+ const suffix = added > 0 ? ` (+${pluralize(added, "term")})` : "";
+ lines.push(` ~ ${label}${suffix}`);
+ } else if (label === "Files") {
+ const countFiles = (s: string) => (s.match(/^### /gm) ?? []).length;
+ const added = countFiles(next[key]) - countFiles(prev[key]);
+ const suffix = added > 0 ? ` (+${pluralize(added, "file")})` : "";
+ lines.push(` ~ ${label}${suffix}`);
+ } else {
+ lines.push(` ~ ${label}`);
+ }
+ }
+ }
+
+ if (lines.length) {
+ console.log();
+ for (const l of lines) {
+ const prefix = l.trimStart()[0] as "+" | "-" | "~";
+ const rest = l.replace(/^\s*[+\-~]\s*/, "");
+ const [label, detail] = rest.split(/\s*\((.+)\)$/);
+ summaryLine(prefix, label.trim(), detail);
+ }
+ }
+}
+
+export async function updateI18nProvider(i18nPath: string, contextPath: string): Promise {
+ const context = readFile(contextPath);
+ const i18nRaw = fs.readFileSync(i18nPath, "utf-8");
+ const i18n = JSON.parse(i18nRaw);
+
+ const newProvider = {
+ id: "anthropic",
+ model: "claude-haiku-4-5",
+ prompt: `Translate from {source} to {target}.\n\n${context}`,
+ ...(i18n.provider?.voices ? { voices: i18n.provider.voices } : {}),
+ };
+
+ if (i18n.provider) {
+ info(`provider: ${i18n.provider.id} · ${i18n.provider.model}`);
+ const choice = await selectMenu("Overwrite provider with updated context?", ["Update", "Keep existing"], 1);
+ if (choice === 1) return;
+ }
+
+ i18n.provider = newProvider;
+ fs.writeFileSync(i18nPath, JSON.stringify(i18n, null, 2), "utf-8");
+ info(`updated provider in i18n.json`);
+}
diff --git a/demo/ctx/src/jsonc.ts b/demo/ctx/src/jsonc.ts
new file mode 100644
index 000000000..5cda4e0ba
--- /dev/null
+++ b/demo/ctx/src/jsonc.ts
@@ -0,0 +1,103 @@
+import Anthropic from "@anthropic-ai/sdk";
+import fs from "fs";
+import path from "path";
+import { readFile } from "./files.ts";
+import { reviewContent } from "./agent-loop.ts";
+import { fileHash, type FileEntry } from "./state.ts";
+import { toolCall } from "./ui.ts";
+
+export async function generateJsoncComments(
+ client: Anthropic,
+ model: string,
+ sourceFile: string,
+ lingoContext: string,
+ feedback = "",
+): Promise> {
+ const content = readFile(sourceFile);
+ const feedbackBlock = feedback ? `\nUser feedback on previous attempt:\n${feedback}\nPlease revise accordingly.\n` : "";
+ const response = await client.messages.create({
+ model,
+ max_tokens: 4096,
+ messages: [{
+ role: "user",
+ content: `You are generating translator notes for a JSONC localization file.
+
+Localization context:
+${lingoContext}
+
+Source file (${path.basename(sourceFile)}):
+${content}
+${feedbackBlock}
+For each key, write a short one-line translator note that tells the translator:
+- What UI element or context the string appears in
+- Any ambiguity, idiom, or special meaning to watch out for
+- Length or tone constraints if relevant
+
+Return ONLY a flat JSON object mapping each key to its note. No nesting, no explanation.
+Example: {"nav.home": "Navigation item in top header bar", "checkout.submit": "Button — triggers payment, keep short"}`,
+ }],
+ });
+
+ const text = response.content.find((b): b is Anthropic.TextBlock => b.type === "text")?.text ?? "{}";
+ const match = text.match(/\{[\s\S]*\}/);
+ if (!match) return {};
+ try { return JSON.parse(match[0]); } catch { return {}; }
+}
+
+export function injectJsoncComments(filePath: string, comments: Record): void {
+ const lines = fs.readFileSync(filePath, "utf-8").split("\n");
+ const result: string[] = [];
+
+ for (const line of lines) {
+ const keyMatch = line.match(/^(\s*)"([^"]+)"\s*:/);
+ if (keyMatch) {
+ const indent = keyMatch[1];
+ const key = keyMatch[2];
+ if (result.length > 0 && result[result.length - 1].trimStart().startsWith("//")) {
+ result.pop();
+ }
+ if (comments[key]) result.push(`${indent}// ${comments[key]}`);
+ }
+ result.push(line);
+ }
+
+ fs.writeFileSync(filePath, result.join("\n"), "utf-8");
+}
+
+export async function runJsoncInjection(
+ client: Anthropic,
+ model: string,
+ files: string[],
+ contextPath: string,
+ review = false,
+): Promise {
+ if (files.length === 0) return [];
+ const injected: FileEntry[] = [];
+ const lingoContext = readFile(contextPath);
+
+ for (const file of files) {
+ let comments: Record = {};
+ let extraContext = "";
+
+ while (true) {
+ toolCall("annotate", { file_path: path.basename(file) + (extraContext ? " (revised)" : "") });
+ comments = await generateJsoncComments(client, model, file, lingoContext, extraContext);
+ if (Object.keys(comments).length === 0) break;
+
+ if (!review) break;
+
+ const preview = Object.entries(comments).map(([k, v]) => ` "${k}": "${v}"`).join("\n");
+ const result = await reviewContent(`comments for ${path.basename(file)}`, preview);
+ if (result === "accept") break;
+ if (result === "skip") { comments = {}; break; }
+ extraContext = result;
+ }
+
+ if (Object.keys(comments).length > 0) {
+ injectJsoncComments(file, comments);
+ injected.push([file, fileHash(file)]);
+ }
+ }
+
+ return injected;
+}
diff --git a/demo/ctx/src/research.ts b/demo/ctx/src/research.ts
new file mode 100644
index 000000000..056c1a9c1
--- /dev/null
+++ b/demo/ctx/src/research.ts
@@ -0,0 +1,172 @@
+import Anthropic from "@anthropic-ai/sdk";
+import { selectMenu, textPrompt } from "./cli.ts";
+import { readFile, listFiles } from "./files.ts";
+import { phase, toolCall, dim } from "./ui.ts";
+
+// Research agent uses Sonnet — needs web search + stronger reasoning
+// web_search_20250305 requires Sonnet — falls back gracefully if unavailable
+const RESEARCH_MODEL = "claude-sonnet-4-6";
+
+const researchSystem = `You are a product research analyst. Research a software product and produce a concise brief that will help an AI translation engine understand the product's market, audience, and tone.
+
+Steps:
+1. Read the project files — README, package.json, landing page copy, app strings
+2. Search the web for the product/company name to understand market position, competitors, and industry tone conventions
+3. Search for "[product category] localization best practices" or "[industry] translation tone" if useful
+
+Produce a brief covering:
+- What the product does and what problem it solves
+- Target customers (role, industry, technical level)
+- Market segment (B2B SaaS, consumer, devtools, etc.)
+- Tone conventions in this space — what competitors use, what the market expects
+- Domain-specific terms with known translation risks in this market
+- Recommended tone register and pronoun form per language
+
+Rules:
+- Be specific and factual. No marketing language.
+- Under 300 words.
+- End with "Translation implications:" — concrete rules derived from market and audience research.
+
+Respond with the brief as plain text. Do not use write_file.`;
+
+// --- Research agent with file + web access ---
+
+export async function runResearchAgent(
+ client: Anthropic,
+ targetDir: string,
+ i18nBlock: string,
+): Promise {
+ phase("Research", "scanning project + searching web");
+
+ const messages: Anthropic.MessageParam[] = [{
+ role: "user",
+ content: [
+ `Research this project and produce a product brief.`,
+ i18nBlock,
+ `Project folder: ${targetDir}`,
+ `\nExplore the project files and search the web as needed.`,
+ ].join("\n"),
+ }];
+
+ const tools = [
+ {
+ type: "web_search_20250305" as const,
+ name: "web_search" as const,
+ },
+ {
+ name: "list_files",
+ description: "List all files in a directory",
+ input_schema: {
+ type: "object" as const,
+ properties: { directory: { type: "string" } },
+ required: ["directory"],
+ additionalProperties: false,
+ },
+ },
+ {
+ name: "read_file",
+ description: "Read the contents of a file",
+ input_schema: {
+ type: "object" as const,
+ properties: { file_path: { type: "string" } },
+ required: ["file_path"],
+ additionalProperties: false,
+ },
+ },
+ ];
+
+ let brief = "";
+
+ while (true) {
+ const response = await client.messages.create({
+ model: RESEARCH_MODEL,
+ max_tokens: 2048,
+ system: researchSystem,
+ tools,
+ messages,
+ } as any);
+
+ for (const block of response.content) {
+ if (block.type === "text" && block.text.trim()) {
+ brief = block.text.trim();
+ process.stdout.write(`\x1B[2m ${brief}\x1B[0m\n`);
+ }
+ }
+
+ if (response.stop_reason !== "tool_use") break;
+
+ const toolUses = response.content.filter((b): b is Anthropic.ToolUseBlock => b.type === "tool_use");
+ messages.push({ role: "assistant", content: response.content });
+
+ const toolResults: Anthropic.ToolResultBlockParam[] = [];
+ for (const tool of toolUses) {
+ const input = tool.input as Record;
+ if (tool.name === "web_search") {
+ toolCall("web_search", { query: input.query });
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "" });
+ } else if (tool.name === "list_files") {
+ toolCall("list_files", input);
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: JSON.stringify(listFiles(input.directory)) });
+ } else if (tool.name === "read_file") {
+ toolCall("read_file", input);
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: readFile(input.file_path) });
+ }
+ }
+
+ messages.push({ role: "user", content: toolResults });
+ }
+
+ if (!brief) return null;
+ return `--- Product Research Brief ---\n${brief}\n--- End Brief ---`;
+}
+
+// --- Quick questionnaire ---
+
+const TONE_OPTIONS = [
+ "Formal & professional",
+ "Friendly & conversational",
+ "Technical & precise",
+ "Playful & energetic",
+ "Neutral — let the code speak",
+];
+
+async function runQuestionnaire(): Promise {
+ console.log("\n Answer a few questions — blank to skip any.\n");
+
+ const product = await textPrompt("What does your product do?", "e.g. task manager for remote teams");
+ const users = await textPrompt("Who are your target users?", "e.g. developers, small business owners");
+ const market = await textPrompt("What industry or market?", "e.g. B2B SaaS, consumer, fintech");
+ const toneIdx = await selectMenu("What tone should translations use?", TONE_OPTIONS, 0);
+ const extra = await textPrompt("Anything else translators should know?", "e.g. never translate brand name");
+
+ const lines = ["Product brief from user interview:"];
+ if (product) lines.push(`- Product: ${product}`);
+ if (users) lines.push(`- Target users: ${users}`);
+ if (market) lines.push(`- Market: ${market}`);
+ lines.push(`- Tone: ${TONE_OPTIONS[toneIdx]}`);
+ if (extra) lines.push(`- Notes: ${extra}`);
+
+ return lines.join("\n");
+}
+
+// --- Entry point: let user pick ---
+
+export async function runResearch(
+ client: Anthropic,
+ targetDir: string,
+ i18nBlock: string,
+): Promise {
+ const choice = await selectMenu(
+ "No lingo-context.md found. How should we gather product context?",
+ [
+ "Research agent — Claude searches the web + reads your project",
+ "Quick interview — answer 4 questions yourself",
+ "Skip — let the agent figure it out from code",
+ ],
+ 0,
+ );
+
+ if (choice === 0) return runResearchAgent(client, targetDir, i18nBlock);
+ if (choice === 1) return runQuestionnaire();
+ return null;
+}
diff --git a/demo/ctx/src/state.ts b/demo/ctx/src/state.ts
new file mode 100644
index 000000000..34930c88a
--- /dev/null
+++ b/demo/ctx/src/state.ts
@@ -0,0 +1,57 @@
+import fs from "fs";
+import path from "path";
+import { createHash } from "crypto";
+
+export type State = { processedFiles: Record };
+export type FileEntry = [path: string, hash: string];
+
+let _stateDir: string | undefined;
+function getStateDir(): string {
+ if (!_stateDir) {
+ _stateDir = path.join(process.env.HOME!, ".ctx", "state");
+ fs.mkdirSync(_stateDir, { recursive: true });
+ }
+ return _stateDir;
+}
+
+export function md5(data: Buffer | string): string {
+ return createHash("md5").update(data).digest("hex");
+}
+
+function stateFile(p: string) {
+ return path.join(getStateDir(), `${md5(p)}.json`);
+}
+
+export function loadState(p: string): State {
+ try { return JSON.parse(fs.readFileSync(stateFile(p), "utf-8")); }
+ catch { return { processedFiles: {} }; }
+}
+
+export function saveState(p: string, state: State) {
+ fs.writeFileSync(stateFile(p), JSON.stringify(state, null, 2));
+}
+
+export function fileHash(f: string): string {
+ try { return md5(fs.readFileSync(f)); }
+ catch { return ""; }
+}
+
+export function filterNewFiles(files: string[], state: State): FileEntry[] {
+ return files.flatMap((f) => {
+ const hash = fileHash(f);
+ if (!hash) return []; // skip unreadable files
+ return hash !== state.processedFiles[f] ? [[f, hash]] : [];
+ });
+}
+
+export function recordFiles(entries: FileEntry[], p: string) {
+ const state = loadState(p);
+ for (const [f, hash] of entries) {
+ if (hash) state.processedFiles[f] = hash; // never store empty hash
+ }
+ saveState(p, state);
+}
+
+export function clearState(p: string) {
+ try { fs.unlinkSync(stateFile(p)); } catch {}
+}
diff --git a/demo/ctx/src/ui.ts b/demo/ctx/src/ui.ts
new file mode 100644
index 000000000..7e2be21fc
--- /dev/null
+++ b/demo/ctx/src/ui.ts
@@ -0,0 +1,91 @@
+// ─── ANSI ────────────────────────────────────────────────────────────────────
+const R = "\x1B[0m";
+const B = "\x1B[1m"; // bold
+const D = "\x1B[2m"; // dim
+const CY = "\x1B[36m"; // cyan
+const GR = "\x1B[32m"; // green
+const YL = "\x1B[33m"; // yellow
+const RD = "\x1B[31m"; // red
+const MG = "\x1B[35m"; // magenta
+
+// ─── Building blocks ─────────────────────────────────────────────────────────
+export const dim = (s: string) => `${D}${s}${R}`;
+export const bold = (s: string) => `${B}${s}${R}`;
+export const cyan = (s: string) => `${CY}${s}${R}`;
+export const green = (s: string) => `${GR}${s}${R}`;
+
+// ─── Header ──────────────────────────────────────────────────────────────────
+export function printHeader(opts: {
+ targetDir: string;
+ outPath: string;
+ model: string;
+ source: string;
+ targets: string[];
+}) {
+ const rel = opts.outPath.replace(opts.targetDir + "/", "");
+ const arrow = opts.targets.length ? `${opts.source} → ${opts.targets.join(" ")}` : opts.source;
+ console.log();
+ console.log(` ${B}${CY}ctx${R} ${B}${opts.targetDir}${R}`);
+ console.log(` ${D}${rel} · ${opts.model} · ${arrow}${R}`);
+}
+
+// ─── Phase header — big transition between stages ────────────────────────────
+export function phase(label: string, sub?: string) {
+ console.log();
+ console.log(` ${B}${CY}◆${R} ${B}${label}${R}`);
+ if (sub) console.log(` ${D}${sub}${R}`);
+}
+
+// ─── Tool call — compact, no full paths ──────────────────────────────────────
+export function toolCall(name: string, input: Record) {
+ const arg = input.file_path ?? input.directory ?? Object.values(input)[0] ?? "";
+ // show only the last 2 path segments to keep it readable
+ const short = arg.split("/").slice(-2).join("/");
+ const color = name === "write_file" ? `${GR}` : `${D}`;
+ console.log(` ${color}↳ ${name.padEnd(12)}${short}${R}`);
+}
+
+// ─── File item (dry-run / update list) ───────────────────────────────────────
+export function fileItem(name: string) {
+ console.log(` ${D}· ${name}${R}`);
+}
+
+// ─── Progress counter (update loop) ──────────────────────────────────────────
+export function progress(i: number, total: number, label: string) {
+ console.log();
+ console.log(` ${D}[${i}/${total}]${R} ${B}${label}${R}`);
+}
+
+// ─── Status lines ─────────────────────────────────────────────────────────────
+export function ok(msg: string) { console.log(`\n ${GR}✓${R} ${msg}`); }
+export function warn(msg: string) { console.log(`\n ${YL}!${R} ${msg}`); }
+export function fail(msg: string) { console.log(`\n ${RD}✗${R} ${msg}`); }
+export function info(msg: string) { console.log(` ${D}${msg}${R}`); }
+
+// ─── Summary line (section changed/added/removed) ─────────────────────────────
+export function summaryLine(prefix: "+" | "-" | "~", label: string, detail = "") {
+ const color = prefix === "+" ? GR : prefix === "-" ? RD : YL;
+ console.log(` ${color}${prefix}${R} ${label}${detail ? ` ${D}${detail}${R}` : ""}`);
+}
+
+// ─── Review box ───────────────────────────────────────────────────────────────
+const PREVIEW_LINES = 50;
+const WIDTH = 62;
+
+export function reviewBox(label: string, content: string) {
+ const lines = content.split("\n");
+ const preview = lines.slice(0, PREVIEW_LINES).join("\n");
+ const truncated = lines.length > PREVIEW_LINES;
+ const title = ` ${label} `;
+ const pad = WIDTH - title.length - 2;
+ const hr = `${D}${"─".repeat(WIDTH)}${R}`;
+
+ console.log();
+ console.log(` ${D}┌─${R}${B}${title}${R}${D}${"─".repeat(Math.max(0, pad))}┐${R}`);
+ console.log();
+ // indent content
+ for (const line of preview.split("\n")) console.log(` ${line}`);
+ if (truncated) console.log(`\n ${D}… ${lines.length - PREVIEW_LINES} more lines${R}`);
+ console.log();
+ console.log(` ${D}└${"─".repeat(WIDTH)}┘${R}`);
+}
diff --git a/demo/ctx/src/voices.ts b/demo/ctx/src/voices.ts
new file mode 100644
index 000000000..fa3777447
--- /dev/null
+++ b/demo/ctx/src/voices.ts
@@ -0,0 +1,92 @@
+import Anthropic from "@anthropic-ai/sdk";
+import fs from "fs";
+import { readFile } from "./files.ts";
+import { reviewContent } from "./agent-loop.ts";
+import { phase, info, warn, fail } from "./ui.ts";
+
+const voiceSystem = `You are a brand voice writer for software localization.
+
+Given: lingo-context.md describing the product, tone, audience, and language-specific rules.
+Task: Write a brand voice for one target locale — concise natural language instructions for the LLM translator.
+
+A brand voice must cover:
+- Pronoun register: formal vs informal (du/Sie, tu/vous, tú/usted, etc.)
+- Tone: professional, conversational, technical, playful — be specific
+- Audience context if it changes word choice
+- Any critical conventions from the lingo-context.md for this locale (length, script, idioms)
+
+Rules:
+- 3–6 sentences. No bullet points. Plain prose.
+- Actionable only — no generic advice like "be natural". Every sentence must constrain a decision.
+- Pull from the lingo-context.md language section for this locale. Do not invent rules not in the file.
+- Write in English.`;
+
+async function generateVoice(
+ client: Anthropic,
+ model: string,
+ locale: string,
+ context: string,
+ feedback?: string,
+ previous?: string,
+): Promise {
+ const messages: Anthropic.MessageParam[] = [
+ { role: "user", content: `Target locale: ${locale}\n\n${context}` },
+ ];
+ if (previous && feedback) {
+ messages.push({ role: "assistant", content: previous });
+ messages.push({ role: "user", content: `Please revise: ${feedback}` });
+ }
+
+ const response = await client.messages.create({
+ model,
+ max_tokens: 512,
+ system: voiceSystem,
+ messages,
+ });
+
+ return response.content.find((b): b is Anthropic.TextBlock => b.type === "text")?.text.trim() ?? "";
+}
+
+export async function runVoices(
+ client: Anthropic,
+ model: string,
+ contextPath: string,
+ i18nPath: string,
+ targetLocales: string[],
+): Promise {
+ if (!fs.existsSync(contextPath)) {
+ fail(`lingo-context.md not found — run ctx first, then re-run with --voices.`);
+ return;
+ }
+
+ if (targetLocales.length === 0) {
+ warn(`No target locales in i18n.json — nothing to generate.`);
+ return;
+ }
+
+ const context = readFile(contextPath);
+ const i18nRaw = fs.readFileSync(i18nPath, "utf-8");
+ const i18n = JSON.parse(i18nRaw);
+ const voices: Record = { ...(i18n.provider?.voices ?? {}) };
+
+ phase("Brand Voices", targetLocales.join(" "));
+
+ for (const locale of targetLocales) {
+ info(`[${locale}] generating...`);
+ let text = await generateVoice(client, model, locale, context);
+ if (!text) { warn(`[${locale}] no output — skipped`); continue; }
+
+ while (true) {
+ const result = await reviewContent(`Brand voice · ${locale}`, text);
+ if (result === "accept") { voices[locale] = text; break; }
+ if (result === "skip") { info(`[${locale}] skipped`); break; }
+ info(`[${locale}] revising...`);
+ text = await generateVoice(client, model, locale, context, result, text) || text;
+ }
+ }
+
+ if (!i18n.provider) i18n.provider = { id: "anthropic", model };
+ i18n.provider.voices = voices;
+ fs.writeFileSync(i18nPath, JSON.stringify(i18n, null, 2), "utf-8");
+ info(`wrote ${Object.keys(voices).length} brand voice(s) to i18n.json`);
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index fd423f13c..c4eb4ad11 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -55,6 +55,16 @@ importers:
specifier: 2.6.1
version: 2.6.1
+ demo/ctx:
+ dependencies:
+ '@anthropic-ai/sdk':
+ specifier: ^0.78.0
+ version: 0.78.0(zod@4.1.12)
+ devDependencies:
+ tsx:
+ specifier: ^4.19.3
+ version: 4.21.0
+
demo/new-compiler-next16:
dependencies:
'@lingo.dev/compiler':
@@ -1175,6 +1185,15 @@ packages:
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
engines: {node: '>=6.0.0'}
+ '@anthropic-ai/sdk@0.78.0':
+ resolution: {integrity: sha512-PzQhR715td/m1UaaN5hHXjYB8Gl2lF9UVhrrGrZeysiF6Rb74Wc9GCB8hzLdzmQtBd1qe89F9OptgB9Za1Ib5w==}
+ hasBin: true
+ peerDependencies:
+ zod: ^3.25.0 || ^4.0.0
+ peerDependenciesMeta:
+ zod:
+ optional: true
+
'@asamuzakjp/css-color@3.2.0':
resolution: {integrity: sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==}
@@ -6838,6 +6857,10 @@ packages:
json-parse-even-better-errors@2.3.1:
resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
+ json-schema-to-ts@3.1.1:
+ resolution: {integrity: sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g==}
+ engines: {node: '>=16'}
+
json-schema-traverse@0.4.1:
resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
@@ -9045,6 +9068,9 @@ packages:
trough@2.2.0:
resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==}
+ ts-algebra@2.0.0:
+ resolution: {integrity: sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==}
+
ts-api-utils@2.1.0:
resolution: {integrity: sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==}
engines: {node: '>=18.12'}
@@ -10079,6 +10105,12 @@ snapshots:
'@jridgewell/gen-mapping': 0.3.13
'@jridgewell/trace-mapping': 0.3.31
+ '@anthropic-ai/sdk@0.78.0(zod@4.1.12)':
+ dependencies:
+ json-schema-to-ts: 3.1.1
+ optionalDependencies:
+ zod: 4.1.12
+
'@asamuzakjp/css-color@3.2.0':
dependencies:
'@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)
@@ -16790,6 +16822,11 @@ snapshots:
json-parse-even-better-errors@2.3.1: {}
+ json-schema-to-ts@3.1.1:
+ dependencies:
+ '@babel/runtime': 7.28.4
+ ts-algebra: 2.0.0
+
json-schema-traverse@0.4.1: {}
json-schema-traverse@1.0.0: {}
@@ -19499,6 +19536,8 @@ snapshots:
trough@2.2.0: {}
+ ts-algebra@2.0.0: {}
+
ts-api-utils@2.1.0(typescript@5.9.3):
dependencies:
typescript: 5.9.3
From edf5269c9fbd13f14a2b3918c906fb29c4f8d858 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:00 +0530
Subject: [PATCH 02/18] fix(demo/ctx): move tsx to dependencies for global
install support
tsx was in devDependencies, which are not installed for global consumers
of the bin entry; moving it to dependencies ensures the shebang resolves.
Made-with: Cursor
---
demo/ctx/package.json | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/demo/ctx/package.json b/demo/ctx/package.json
index 684f9d7c4..f67793d46 100644
--- a/demo/ctx/package.json
+++ b/demo/ctx/package.json
@@ -9,9 +9,7 @@
"agent": "tsx agent.ts"
},
"dependencies": {
- "@anthropic-ai/sdk": "^0.78.0"
- },
- "devDependencies": {
+ "@anthropic-ai/sdk": "^0.78.0",
"tsx": "^4.19.3"
}
}
From 716bee8e1f09a1b953f419259b9d515e278b9833 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:05 +0530
Subject: [PATCH 03/18] docs(demo/ctx): replace Bun references with Node.js/tsx
The project uses tsx/Node, not Bun. Updated the badge, install
requirements section, and the requirements list to reflect the
actual runtime.
Made-with: Cursor
---
demo/ctx/README.md | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/demo/ctx/README.md b/demo/ctx/README.md
index 24a5c131a..5634307b8 100644
--- a/demo/ctx/README.md
+++ b/demo/ctx/README.md
@@ -24,7 +24,7 @@
-
+
@@ -238,7 +238,7 @@ ctx run
## Install
-**Requirements:** [Bun](https://bun.sh) and an Anthropic API key.
+**Requirements:** [Node.js](https://nodejs.org) (with `tsx`) and an Anthropic API key.
```bash
git clone https://github.com/bhavya031/ctx
@@ -353,7 +353,7 @@ Choose **Request changes**, describe what's wrong, and the agent revises with fu
## Requirements
-- [Bun](https://bun.sh) v1.0+
+- [Node.js](https://nodejs.org) v18+ with [tsx](https://github.com/privatenumber/tsx)
- `ANTHROPIC_API_KEY`
- A lingo.dev project with `i18n.json`
From b37ac294df2316532b2c14d77443476a55b80e5a Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:43 +0530
Subject: [PATCH 04/18] fix(demo/ctx): use os.homedir() instead of
process.env.HOME! in state dir
Made-with: Cursor
---
demo/ctx/src/state.ts | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/demo/ctx/src/state.ts b/demo/ctx/src/state.ts
index 34930c88a..6edc40bbf 100644
--- a/demo/ctx/src/state.ts
+++ b/demo/ctx/src/state.ts
@@ -1,4 +1,5 @@
import fs from "fs";
+import os from "os";
import path from "path";
import { createHash } from "crypto";
@@ -8,7 +9,7 @@ export type FileEntry = [path: string, hash: string];
let _stateDir: string | undefined;
function getStateDir(): string {
if (!_stateDir) {
- _stateDir = path.join(process.env.HOME!, ".ctx", "state");
+ _stateDir = path.join(os.homedir(), ".ctx", "state");
fs.mkdirSync(_stateDir, { recursive: true });
}
return _stateDir;
From ab12215cc06c7e9c74b3a7cdab1156112f9ac90b Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:46 +0530
Subject: [PATCH 05/18] fix(demo/ctx): guard setRawMode behind isTTY check to
avoid crash in non-TTY
Made-with: Cursor
---
demo/ctx/src/cli.ts | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/demo/ctx/src/cli.ts b/demo/ctx/src/cli.ts
index fcfcaee6d..c5716d83c 100644
--- a/demo/ctx/src/cli.ts
+++ b/demo/ctx/src/cli.ts
@@ -63,6 +63,11 @@ export async function selectMenu(question: string, options: string[], defaultInd
render();
return new Promise((resolve) => {
+ if (!process.stdin.isTTY) {
+ resolve(defaultIndex);
+ return;
+ }
+
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.setEncoding("utf-8");
From 1e80ec6df62ed6d5d7f731929a780a41c11b8590 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:49 +0530
Subject: [PATCH 06/18] fix(demo/ctx): extract new path from rename entries in
git status --porcelain
Made-with: Cursor
---
demo/ctx/src/files.ts | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/demo/ctx/src/files.ts b/demo/ctx/src/files.ts
index 197f800df..0322babbd 100644
--- a/demo/ctx/src/files.ts
+++ b/demo/ctx/src/files.ts
@@ -44,7 +44,10 @@ export function getChangedFiles(cwd: string, commits: number | null): string[] {
output = git(`git diff HEAD~${commits} --name-only`, cwd);
} else {
output = git("git status --porcelain", cwd)
- .split("\n").filter(Boolean).map((l) => l.slice(3).trim()).join("\n");
+ .split("\n").filter(Boolean).map((l) => {
+ const entry = l.slice(3).trim();
+ return entry.includes(" -> ") ? entry.split(" -> ")[1].trim() : entry;
+ }).join("\n");
}
const paths = output.split("\n").map((f) => f.trim()).filter(Boolean)
.map((f) => path.join(cwd, f));
From fcf19ae9daba0d2313170f0eb1431520056458bd Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:53 +0530
Subject: [PATCH 07/18] fix(demo/ctx): scope JSONC comment replacement to //
CTX: prefix to preserve user comments
Made-with: Cursor
---
demo/ctx/src/jsonc.ts | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/demo/ctx/src/jsonc.ts b/demo/ctx/src/jsonc.ts
index 5cda4e0ba..810fc7541 100644
--- a/demo/ctx/src/jsonc.ts
+++ b/demo/ctx/src/jsonc.ts
@@ -53,10 +53,10 @@ export function injectJsoncComments(filePath: string, comments: Record 0 && result[result.length - 1].trimStart().startsWith("//")) {
+ if (result.length > 0 && result[result.length - 1].trimStart().startsWith("// CTX:")) {
result.pop();
}
- if (comments[key]) result.push(`${indent}// ${comments[key]}`);
+ if (comments[key]) result.push(`${indent}// CTX: ${comments[key]}`);
}
result.push(line);
}
From 73768047d51afdc14b2176c04195e4c3c87ebdd2 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:56 +0530
Subject: [PATCH 08/18] fix(demo/ctx): only apply path-shortening in toolCall
when arg is a filesystem path
Made-with: Cursor
---
demo/ctx/src/ui.ts | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/demo/ctx/src/ui.ts b/demo/ctx/src/ui.ts
index 7e2be21fc..8d9be9a45 100644
--- a/demo/ctx/src/ui.ts
+++ b/demo/ctx/src/ui.ts
@@ -39,10 +39,12 @@ export function phase(label: string, sub?: string) {
// ─── Tool call — compact, no full paths ──────────────────────────────────────
export function toolCall(name: string, input: Record) {
const arg = input.file_path ?? input.directory ?? Object.values(input)[0] ?? "";
- // show only the last 2 path segments to keep it readable
- const short = arg.split("/").slice(-2).join("/");
+ const isPath = arg.startsWith("/") || arg.startsWith("./") || arg.startsWith("../") || /^[a-zA-Z]:[/\\]/.test(arg) || (arg.includes("/") && !/\s/.test(arg));
+ const display = isPath
+ ? arg.split("/").slice(-2).join("/")
+ : arg.length > 60 ? `${arg.slice(0, 57)}…` : arg;
const color = name === "write_file" ? `${GR}` : `${D}`;
- console.log(` ${color}↳ ${name.padEnd(12)}${short}${R}`);
+ console.log(` ${color}↳ ${name.padEnd(12)}${display}${R}`);
}
// ─── File item (dry-run / update list) ───────────────────────────────────────
From 7603f2a50db0860ce7f77c1dfa8294c1623bb0a3 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:14:59 +0530
Subject: [PATCH 09/18] fix(demo/ctx): merge into existing provider instead of
overwriting to preserve custom fields
Made-with: Cursor
---
demo/ctx/src/i18n.ts | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/demo/ctx/src/i18n.ts b/demo/ctx/src/i18n.ts
index e82c98592..7b88a9da6 100644
--- a/demo/ctx/src/i18n.ts
+++ b/demo/ctx/src/i18n.ts
@@ -58,20 +58,20 @@ export async function updateI18nProvider(i18nPath: string, contextPath: string):
const i18nRaw = fs.readFileSync(i18nPath, "utf-8");
const i18n = JSON.parse(i18nRaw);
- const newProvider = {
- id: "anthropic",
- model: "claude-haiku-4-5",
- prompt: `Translate from {source} to {target}.\n\n${context}`,
- ...(i18n.provider?.voices ? { voices: i18n.provider.voices } : {}),
- };
-
if (i18n.provider) {
info(`provider: ${i18n.provider.id} · ${i18n.provider.model}`);
const choice = await selectMenu("Overwrite provider with updated context?", ["Update", "Keep existing"], 1);
if (choice === 1) return;
}
- i18n.provider = newProvider;
+ const mergedProvider = {
+ ...(i18n.provider ?? {}),
+ id: "anthropic",
+ model: "claude-haiku-4-5",
+ prompt: `Translate from {source} to {target}.\n\n${context}`,
+ };
+
+ i18n.provider = mergedProvider;
fs.writeFileSync(i18nPath, JSON.stringify(i18n, null, 2), "utf-8");
info(`updated provider in i18n.json`);
}
From dd2ebb70c427d3a136b7b1831d86293e543ad815 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:15:03 +0530
Subject: [PATCH 10/18] fix(demo/ctx): add error handling for readFile,
readFileSync, and JSON.parse in voices
Made-with: Cursor
---
demo/ctx/src/voices.ts | 22 ++++++++++++++++++++--
1 file changed, 20 insertions(+), 2 deletions(-)
diff --git a/demo/ctx/src/voices.ts b/demo/ctx/src/voices.ts
index fa3777447..464b0556d 100644
--- a/demo/ctx/src/voices.ts
+++ b/demo/ctx/src/voices.ts
@@ -65,8 +65,26 @@ export async function runVoices(
}
const context = readFile(contextPath);
- const i18nRaw = fs.readFileSync(i18nPath, "utf-8");
- const i18n = JSON.parse(i18nRaw);
+ if (context.startsWith("[Error:")) {
+ fail(`Cannot read context file: ${contextPath}`);
+ return;
+ }
+
+ let i18nRaw: string;
+ try {
+ i18nRaw = fs.readFileSync(i18nPath, "utf-8");
+ } catch (e) {
+ fail(`Cannot read i18n file: ${i18nPath}\n${e}`);
+ return;
+ }
+
+ let i18n: Record;
+ try {
+ i18n = JSON.parse(i18nRaw);
+ } catch (e) {
+ fail(`Malformed JSON in ${i18nPath}: ${e}`);
+ return;
+ }
const voices: Record = { ...(i18n.provider?.voices ?? {}) };
phase("Brand Voices", targetLocales.join(" "));
From f1ec7b1b50294e925a1b2e8ef2df918ea5ff4c0b Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:15:06 +0530
Subject: [PATCH 11/18] fix(demo/ctx): handle pause_turn stop reason and add
path traversal guard in research agent
Made-with: Cursor
---
demo/ctx/src/research.ts | 28 ++++++++++++++++++++++++----
1 file changed, 24 insertions(+), 4 deletions(-)
diff --git a/demo/ctx/src/research.ts b/demo/ctx/src/research.ts
index 056c1a9c1..7c2042fc1 100644
--- a/demo/ctx/src/research.ts
+++ b/demo/ctx/src/research.ts
@@ -1,4 +1,5 @@
import Anthropic from "@anthropic-ai/sdk";
+import path from "path";
import { selectMenu, textPrompt } from "./cli.ts";
import { readFile, listFiles } from "./files.ts";
import { phase, toolCall, dim } from "./ui.ts";
@@ -93,6 +94,11 @@ export async function runResearchAgent(
}
}
+ if (response.stop_reason === "pause_turn") {
+ messages.push({ role: "assistant", content: response.content });
+ continue;
+ }
+
if (response.stop_reason !== "tool_use") break;
const toolUses = response.content.filter((b): b is Anthropic.ToolUseBlock => b.type === "tool_use");
@@ -105,11 +111,25 @@ export async function runResearchAgent(
toolCall("web_search", { query: input.query });
toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "" });
} else if (tool.name === "list_files") {
- toolCall("list_files", input);
- toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: JSON.stringify(listFiles(input.directory)) });
+ const resolvedDir = path.resolve(targetDir, input.directory);
+ const relDir = path.relative(targetDir, resolvedDir);
+ if (relDir.startsWith("..") || path.isAbsolute(relDir)) {
+ toolCall("list_files", { directory: input.directory });
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "Error: Path outside project root" });
+ } else {
+ toolCall("list_files", { directory: resolvedDir });
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: JSON.stringify(listFiles(resolvedDir)) });
+ }
} else if (tool.name === "read_file") {
- toolCall("read_file", input);
- toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: readFile(input.file_path) });
+ const resolvedFile = path.resolve(targetDir, input.file_path);
+ const relFile = path.relative(targetDir, resolvedFile);
+ if (relFile.startsWith("..") || path.isAbsolute(relFile)) {
+ toolCall("read_file", { file_path: input.file_path });
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "Error: Path outside project root" });
+ } else {
+ toolCall("read_file", { file_path: resolvedFile });
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: readFile(resolvedFile) });
+ }
}
}
From e0b74032e3d0f46f9de7268413bb745d9396bd6c Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:15:09 +0530
Subject: [PATCH 12/18] fix(demo/ctx): add resolveSafe path traversal guard for
all tool calls in agent loop
Made-with: Cursor
---
demo/ctx/src/agent-loop.ts | 53 ++++++++++++++++++++++++++++----------
1 file changed, 39 insertions(+), 14 deletions(-)
diff --git a/demo/ctx/src/agent-loop.ts b/demo/ctx/src/agent-loop.ts
index a00f040ff..2dde00f6a 100644
--- a/demo/ctx/src/agent-loop.ts
+++ b/demo/ctx/src/agent-loop.ts
@@ -42,12 +42,31 @@ export const allTools: Anthropic.Tool[] = [
export const writeOnlyTools: Anthropic.Tool[] = [allTools[2]];
-function executeTool(name: string, input: Record, listFilesFn: (dir: string) => string[]): string {
+function resolveSafe(allowedRoot: string, rawPath: string): string | null {
+ const root = path.resolve(allowedRoot);
+ const resolved = path.resolve(root, rawPath);
+ if (resolved !== root && !resolved.startsWith(root + path.sep)) return null;
+ return resolved;
+}
+
+function executeTool(name: string, input: Record, listFilesFn: (dir: string) => string[], allowedRoot: string): string {
switch (name) {
- case "list_files": return JSON.stringify(listFilesFn(input.directory));
- case "read_file": return readFile(input.file_path);
- case "write_file": return writeFile(input.file_path, input.content);
- default: return `Unknown tool: ${name}`;
+ case "list_files": {
+ const dir = resolveSafe(allowedRoot, input.directory);
+ if (!dir) return "Error: Path outside project root";
+ return JSON.stringify(listFilesFn(dir));
+ }
+ case "read_file": {
+ const file = resolveSafe(allowedRoot, input.file_path);
+ if (!file) return "Error: Path outside project root";
+ return readFile(file);
+ }
+ case "write_file": {
+ const file = resolveSafe(allowedRoot, input.file_path);
+ if (!file) return "Error: Path outside project root";
+ return writeFile(file, input.content);
+ }
+ default: return `Unknown tool: ${name}`;
}
}
@@ -67,6 +86,7 @@ export async function runAgent(
userMessage: string,
tools: Anthropic.Tool[],
listFilesFn: (dir: string) => string[],
+ allowedRoot: string,
review = false,
) {
const messages: Anthropic.MessageParam[] = [{ role: "user", content: userMessage }];
@@ -91,19 +111,24 @@ export async function runAgent(
const input = tool.input as Record;
if (review && tool.name === "write_file") {
- const label = path.basename(input.file_path);
- const result = await reviewContent(label, input.content);
- if (result === "accept") {
- toolCall("write_file", input);
- toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: writeFile(input.file_path, input.content) });
- } else if (result === "skip") {
- toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "User skipped this write — do not write this file." });
+ const resolvedPath = resolveSafe(allowedRoot, input.file_path);
+ if (!resolvedPath) {
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "Error: Path outside project root" });
} else {
- toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: `User requested changes: ${result}\nPlease revise and call write_file again with the updated content.` });
+ const label = path.basename(resolvedPath);
+ const result = await reviewContent(label, input.content);
+ if (result === "accept") {
+ toolCall("write_file", input);
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: writeFile(resolvedPath, input.content) });
+ } else if (result === "skip") {
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: "User skipped this write — do not write this file." });
+ } else {
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: `User requested changes: ${result}\nPlease revise and call write_file again with the updated content.` });
+ }
}
} else {
toolCall(tool.name, input);
- toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: executeTool(tool.name, input, listFilesFn) });
+ toolResults.push({ type: "tool_result", tool_use_id: tool.id, content: executeTool(tool.name, input, listFilesFn, allowedRoot) });
}
}
From 674652e9209621ec3d411ef99724798e882b0c92 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 28 Mar 2026 12:15:16 +0530
Subject: [PATCH 13/18] fix(demo/ctx): include i18n.json in candidates and
defer clearState until rewrite confirmed
Made-with: Cursor
---
demo/ctx/agent.ts | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/demo/ctx/agent.ts b/demo/ctx/agent.ts
index 180419f0d..cb236da9f 100755
--- a/demo/ctx/agent.ts
+++ b/demo/ctx/agent.ts
@@ -89,7 +89,7 @@ async function run() {
}
const agent = (system: string, message: string, tools: Anthropic.Tool[], review = false) =>
- runAgent(client, model, system, message, tools, listFiles, review);
+ runAgent(client, model, system, message, tools, listFiles, targetDir, review);
const printDone = () => fs.existsSync(outPath) ? ok(`Done → ${outPath}`) : warn(`Output file was not created`);
const modeLabel = isCommitMode ? `last ${commitCount} commit(s)` : "uncommitted";
@@ -154,7 +154,7 @@ You MUST call write_file to write lingo-context.md. Do NOT output the file conte
dbg(`gitChanged:`, gitChanged);
const allBucket = resolveBucketFiles();
dbg(`resolveBucketFiles:`, allBucket);
- const candidates = [...new Set([...gitChanged.filter(matchesBucket), ...allBucket])];
+ const candidates = [...new Set([...gitChanged.filter(f => matchesBucket(f) || path.basename(f) === 'i18n.json'), ...allBucket])];
dbg(`candidates:`, candidates);
earlyChangedFiles = filterNewFiles(candidates, state);
dbg(`earlyChangedFiles:`, earlyChangedFiles.map(([f]) => f));
@@ -166,9 +166,11 @@ You MUST call write_file to write lingo-context.md. Do NOT output the file conte
const override = values.prompt ?? await textPrompt("What should the full regeneration cover?", "blank for default");
const regen = override || "Generate a comprehensive lingo-context.md for this project.";
- clearState(outPath);
+ const prevMtime = fs.existsSync(outPath) ? fs.statSync(outPath).mtimeMs : null;
await agent(freshSystem, freshMessage(regen), allTools, true);
- if (!fs.existsSync(outPath)) { warn(SKIPPED_MSG); return; }
+ const newMtime = fs.existsSync(outPath) ? fs.statSync(outPath).mtimeMs : null;
+ if (newMtime === null || newMtime === prevMtime) { warn(SKIPPED_MSG); return; }
+ clearState(outPath);
phase("JSONC Injection");
const jsoncEntries1 = await runJsoncInjection(client, model, jsoncSourceFiles, outPath, true);
phase("Provider Sync");
From 306d6c9a4b79483dd940c2bd8ca39ddd46f98224 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sun, 29 Mar 2026 18:55:02 +0530
Subject: [PATCH 14/18] docs(demo/ctx): update install instructions to point to
lingodotdev/lingo.dev monorepo
Made-with: Cursor
---
demo/ctx/README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/demo/ctx/README.md b/demo/ctx/README.md
index 5634307b8..a384c465a 100644
--- a/demo/ctx/README.md
+++ b/demo/ctx/README.md
@@ -241,8 +241,8 @@ ctx run
**Requirements:** [Node.js](https://nodejs.org) (with `tsx`) and an Anthropic API key.
```bash
-git clone https://github.com/bhavya031/ctx
-cd ctx
+git clone https://github.com/lingodotdev/lingo.dev
+cd lingo.dev/demo/ctx
bun install
bun link
```
From 9b8f699d1c55ca87f249b3401429a853765c921f Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Wed, 1 Apr 2026 12:52:21 +0530
Subject: [PATCH 15/18] chore(demo/ctx): rename package from lingo-agent to ctx
Made-with: Cursor
---
.changeset/ctx-agent-initial.md | 5 +++++
demo/ctx/package.json | 2 +-
2 files changed, 6 insertions(+), 1 deletion(-)
create mode 100644 .changeset/ctx-agent-initial.md
diff --git a/.changeset/ctx-agent-initial.md b/.changeset/ctx-agent-initial.md
new file mode 100644
index 000000000..798aaed72
--- /dev/null
+++ b/.changeset/ctx-agent-initial.md
@@ -0,0 +1,5 @@
+---
+"ctx": minor
+---
+
+Add CTX AI context engine to generate and maintain lingo-context.md
diff --git a/demo/ctx/package.json b/demo/ctx/package.json
index f67793d46..1d5f33cc7 100644
--- a/demo/ctx/package.json
+++ b/demo/ctx/package.json
@@ -1,5 +1,5 @@
{
- "name": "lingo-agent",
+ "name": "ctx",
"version": "1.0.0",
"type": "module",
"bin": {
From c2351bb45c7e1065b72c55c096bf1b88a6b86b0a Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 4 Apr 2026 11:50:25 +0530
Subject: [PATCH 16/18] revert(demo/ctx): remove manually created changeset
file
---
.changeset/ctx-agent-initial.md | 5 -----
1 file changed, 5 deletions(-)
delete mode 100644 .changeset/ctx-agent-initial.md
diff --git a/.changeset/ctx-agent-initial.md b/.changeset/ctx-agent-initial.md
deleted file mode 100644
index 798aaed72..000000000
--- a/.changeset/ctx-agent-initial.md
+++ /dev/null
@@ -1,5 +0,0 @@
----
-"ctx": minor
----
-
-Add CTX AI context engine to generate and maintain lingo-context.md
From de22c57dde04e66a36d4acef693c5d1552a1afe7 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Sat, 4 Apr 2026 11:57:30 +0530
Subject: [PATCH 17/18] chore(demo/ctx): add changeset for ctx minor release
---
.changeset/slow-groups-find.md | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 .changeset/slow-groups-find.md
diff --git a/.changeset/slow-groups-find.md b/.changeset/slow-groups-find.md
new file mode 100644
index 000000000..798aaed72
--- /dev/null
+++ b/.changeset/slow-groups-find.md
@@ -0,0 +1,5 @@
+---
+"ctx": minor
+---
+
+Add CTX AI context engine to generate and maintain lingo-context.md
From 52392ad674b3af284671f5fe4af63efdfc1e0319 Mon Sep 17 00:00:00 2001
From: Bhavya Patel
Date: Mon, 6 Apr 2026 14:16:43 +0530
Subject: [PATCH 18/18] chore: update pnpm-lock.yaml to include demo/ctx
workspace package
---
pnpm-lock.yaml | 1 -
1 file changed, 1 deletion(-)
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index c4eb4ad11..c7bf02d68 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -60,7 +60,6 @@ importers:
'@anthropic-ai/sdk':
specifier: ^0.78.0
version: 0.78.0(zod@4.1.12)
- devDependencies:
tsx:
specifier: ^4.19.3
version: 4.21.0