diff --git a/package.json b/package.json index 94f9a02..d239bad 100644 --- a/package.json +++ b/package.json @@ -46,9 +46,7 @@ "type": "git", "url": "https://github.com/AgentWorkforce/trajectories" }, - "files": [ - "dist" - ], + "files": ["dist"], "engines": { "node": ">=20.0.0" }, diff --git a/src/core/id.ts b/src/core/id.ts index ca896d2..529fc83 100644 --- a/src/core/id.ts +++ b/src/core/id.ts @@ -51,7 +51,10 @@ export function generateChapterId(): string { * @returns True if valid format */ export function isValidTrajectoryId(id: string): boolean { - return /^traj_[a-z0-9]{12}$/.test(id); + // Accept the canonical 12-char form generated by this library AND the + // legacy `traj__` form produced by external writers + // like the workforce workflow runner. + return /^traj_[a-z0-9_]+$/.test(id); } /** diff --git a/src/core/schema.ts b/src/core/schema.ts index eae1c7d..8246116 100644 --- a/src/core/schema.ts +++ b/src/core/schema.ts @@ -121,7 +121,7 @@ export const DecisionSchema = z.object({ */ export const AgentParticipationSchema = z.object({ name: z.string().min(1, "Agent name is required"), - role: z.enum(["lead", "contributor", "reviewer"]), + role: z.string().min(1, "Agent role is required"), joinedAt: z.string().datetime(), leftAt: z.string().datetime().optional(), }); @@ -231,7 +231,7 @@ export const TrajectoryTraceRefSchema = z.object({ * Full trajectory schema */ export const TrajectorySchema = z.object({ - id: z.string().regex(/^traj_[a-z0-9]+$/, "Invalid trajectory ID format"), + id: z.string().regex(/^traj_[a-z0-9_]+$/, "Invalid trajectory ID format"), version: z.literal(1), task: TaskReferenceSchema, status: TrajectoryStatusSchema, @@ -240,11 +240,11 @@ export const TrajectorySchema = z.object({ agents: z.array(AgentParticipationSchema), chapters: z.array(ChapterSchema), retrospective: RetrospectiveSchema.optional(), - commits: z.array(z.string()), - filesChanged: z.array(z.string()), - projectId: z.string(), + commits: z.array(z.string()).default([]), + filesChanged: z.array(z.string()).default([]), + projectId: z.string().optional(), workflowId: z.string().optional(), - tags: z.array(z.string()), + tags: z.array(z.string()).default([]), _trace: TrajectoryTraceRefSchema.optional(), }); diff --git a/src/core/trailers.ts b/src/core/trailers.ts index ac33af3..ef15dad 100644 --- a/src/core/trailers.ts +++ b/src/core/trailers.ts @@ -34,8 +34,12 @@ export function parseTrajectoryFromMessage( ): string | null { const lines = commitMessage.split("\n"); for (const line of lines) { + // Character class must include `_` to match legacy + // `traj__` ids produced by the workforce workflow runner + // in addition to the canonical `traj_<12hex>` shape. This must stay in + // sync with the regex in src/core/schema.ts and src/core/id.ts. const match = line.match( - new RegExp(`^${TRAJECTORY_TRAILER_KEY}:\\s*(traj_[a-z0-9]+)$`), + new RegExp(`^${TRAJECTORY_TRAILER_KEY}:\\s*(traj_[a-z0-9_]+)$`), ); if (match) { return match[1]; @@ -196,8 +200,11 @@ if [ -z "$ACTIVE_FILE" ]; then exit 0 fi -# Extract trajectory ID (grep for the "id" field) -TRAJ_ID=$(grep -o '"id"[[:space:]]*:[[:space:]]*"traj_[a-z0-9]*"' "$ACTIVE_FILE" | head -1 | grep -o 'traj_[a-z0-9]*') +# Extract trajectory ID (grep for the "id" field). Character class must +# include underscore to match legacy traj__ ids -- without +# it, grep -o silently truncates at the first internal underscore and +# emits a wrong (shorter) id into the commit trailer. +TRAJ_ID=$(grep -o '"id"[[:space:]]*:[[:space:]]*"traj_[a-z0-9_]*"' "$ACTIVE_FILE" | head -1 | grep -o 'traj_[a-z0-9_]*') if [ -z "$TRAJ_ID" ]; then exit 0 fi diff --git a/src/core/types.ts b/src/core/types.ts index 3e60150..d78fd96 100644 --- a/src/core/types.ts +++ b/src/core/types.ts @@ -60,7 +60,12 @@ export type TrajectoryEventType = | "finding" | "reflection" | "note" - | "error"; + | "error" + // Permissive fallback for event types produced by other tools + // (e.g. agent-relay's "completion-evidence"). The zod schema already + // accepts these via z.union([...literals, z.string()]); this keeps + // the TS type aligned so readers can assign freely. + | (string & {}); /** * Significance level for events @@ -149,8 +154,13 @@ export interface Finding { export interface AgentParticipation { /** Agent identifier */ name: string; - /** Role in the trajectory */ - role: "lead" | "contributor" | "reviewer"; + /** + * Role in the trajectory. Common values are "lead", "contributor", + * "reviewer", but this is intentionally open-ended — the workforce + * workflow runner emits domain-specific roles like "workflow-runner" + * and "specialist" that we want to read without rejecting. + */ + role: string; /** When the agent joined */ joinedAt: string; /** When the agent left (if applicable) */ @@ -223,8 +233,8 @@ export interface Trajectory { commits: string[]; /** Files that were modified */ filesChanged: string[]; - /** Project identifier */ - projectId: string; + /** Project identifier. Optional — legacy trajectories may omit it. */ + projectId?: string; /** Opaque id set by the workflow runner via TRAJECTORIES_WORKFLOW_ID env var. Lets trail compact --workflow collate all trajectories from a single workflow run. */ workflowId?: string; /** User-defined tags */ diff --git a/src/storage/file.ts b/src/storage/file.ts index 87420cc..2603681 100644 --- a/src/storage/file.ts +++ b/src/storage/file.ts @@ -5,7 +5,7 @@ * Active trajectories go in active/, completed in completed/YYYY-MM/. */ -import { existsSync } from "node:fs"; +import { type Dirent, existsSync } from "node:fs"; import { mkdir, readFile, readdir, unlink, writeFile } from "node:fs/promises"; import { join } from "node:path"; import { validateTrajectory } from "../core/schema.js"; @@ -69,6 +69,35 @@ interface TrajectoryIndex { >; } +/** + * Tagged result from reading a trajectory file. Lets callers distinguish + * missing files, malformed JSON, and schema violations so they can pick + * their own policy (reconcile counts and moves on; `get()` returns null; + * a future `getStrict()` could throw). + */ +export type ReadTrajectoryResult = + | { ok: true; trajectory: Trajectory } + | { + ok: false; + reason: "malformed_json" | "schema_violation" | "io_error"; + path: string; + error: unknown; + }; + +/** + * Aggregated counts emitted by reconcileIndex for observability. Exposed + * on the return value so tests and callers can assert on counts without + * parsing log output. + */ +export interface ReconcileSummary { + scanned: number; + added: number; + alreadyIndexed: number; + skippedMalformedJson: number; + skippedSchemaViolation: number; + skippedIoError: number; +} + /** * File system storage adapter */ @@ -112,12 +141,161 @@ export class FileStorage implements StorageAdapter { trajectories: {}, }); } + + // Reconcile on-disk trajectories with the index. Self-heals cases where + // files were written by a different process or an older layout that + // bypassed updateIndex. + await this.reconcileIndex(); + } + + /** + * Scan active/ and completed/ recursively and add any trajectory files + * missing from the index. Existing entries are preserved — reconcile + * only adds, never removes. + * + * Handles three on-disk layouts in completed/: + * - flat: completed/{id}.json (legacy workforce data) + * - monthly: completed/YYYY-MM/{id}.json (current save() writes) + * - nested: completed/.../{id}.json (defensive — any depth) + * + * Returns a ReconcileSummary so tests and CLI wrappers can observe + * outcomes without parsing logs. Only writes the index if anything was + * added. + */ + async reconcileIndex(): Promise { + const summary: ReconcileSummary = { + scanned: 0, + added: 0, + alreadyIndexed: 0, + skippedMalformedJson: 0, + skippedSchemaViolation: 0, + skippedIoError: 0, + }; + + const index = await this.loadIndex(); + const before = Object.keys(index.trajectories).length; + + const discovered: string[] = []; + + // Walk active/ — intentionally NOT recursive; active trajectories + // always live at the flat root. + try { + const activeFiles = await readdir(this.activeDir); + for (const file of activeFiles) { + if (!file.endsWith(".json")) continue; + discovered.push(join(this.activeDir, file)); + } + } catch (error) { + if ((error as NodeJS.ErrnoException).code !== "ENOENT") throw error; + } + + // Walk completed/ recursively so we transparently support every + // historical layout without guessing depth. + await this.walkJsonFilesInto(this.completedDir, discovered); + + for (const filePath of discovered) { + summary.scanned += 1; + const result = await this.readTrajectoryFile(filePath); + if (!result.ok) { + if (result.reason === "malformed_json") { + summary.skippedMalformedJson += 1; + } else if (result.reason === "schema_violation") { + summary.skippedSchemaViolation += 1; + } else { + summary.skippedIoError += 1; + } + continue; + } + const trajectory = result.trajectory; + if (index.trajectories[trajectory.id]) { + summary.alreadyIndexed += 1; + continue; + } + index.trajectories[trajectory.id] = { + title: trajectory.task.title, + status: trajectory.status, + startedAt: trajectory.startedAt, + completedAt: trajectory.completedAt, + path: filePath, + }; + summary.added += 1; + } + + if (Object.keys(index.trajectories).length !== before) { + await this.saveIndex(index); + } + + // Only log when something interesting happened. Noise is worse than + // silence here — the CLI spinner is the user's feedback. + const hadSkips = + summary.skippedMalformedJson + + summary.skippedSchemaViolation + + summary.skippedIoError > + 0; + if (summary.added > 0 || hadSkips) { + const parts = [`reconciled ${summary.added}/${summary.scanned}`]; + if (summary.skippedMalformedJson > 0) { + parts.push(`malformed: ${summary.skippedMalformedJson}`); + } + if (summary.skippedSchemaViolation > 0) { + parts.push(`invalid: ${summary.skippedSchemaViolation}`); + } + if (summary.skippedIoError > 0) { + parts.push(`io: ${summary.skippedIoError}`); + } + console.warn(`[trajectories] ${parts.join(", ")}`); + } + + return summary; } /** - * Save a trajectory + * Recursively collect all .json file paths under `dir` into `out`. + * Silently treats a missing directory as empty. */ - async save(trajectory: Trajectory): Promise { + private async walkJsonFilesInto(dir: string, out: string[]): Promise { + let entries: Dirent[]; + try { + entries = await readdir(dir, { withFileTypes: true }); + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") return; + throw error; + } + + for (const entry of entries) { + const entryPath = join(dir, entry.name); + if (entry.isDirectory()) { + await this.walkJsonFilesInto(entryPath, out); + } else if (entry.isFile() && entry.name.endsWith(".json")) { + out.push(entryPath); + } + } + } + + /** + * Save a trajectory. + * + * Validates the input against the trajectory schema before touching + * disk. Closes the historical read/write asymmetry where save() would + * happily write data that the reader then rejected, producing files + * that could never be loaded back. + */ + async save(input: Trajectory): Promise { + const validation = validateTrajectory(input); + if (!validation.success) { + const issues = + validation.errors?.issues + .map((issue) => { + const path = issue.path.length > 0 ? issue.path.join(".") : "root"; + return `${path}: ${issue.message}`; + }) + .join("; ") ?? "unknown validation error"; + throw new Error(`Cannot save invalid trajectory: ${issues}`); + } + // Use the parsed (defaulted) trajectory so newly-written files + // always carry normalized fields like commits/filesChanged/tags. + const trajectory = validation.data as Trajectory; + const isCompleted = trajectory.status === "completed" || trajectory.status === "abandoned"; @@ -160,23 +338,29 @@ export class FileStorage implements StorageAdapter { // Check active first const activePath = join(this.activeDir, `${id}.json`); if (existsSync(activePath)) { - return this.readTrajectoryFile(activePath); + return this.readTrajectoryOrNull(activePath); } // Check completed (need to search subdirectories) const index = await this.loadIndex(); const entry = index.trajectories[id]; if (entry?.path && existsSync(entry.path)) { - return this.readTrajectoryFile(entry.path); + return this.readTrajectoryOrNull(entry.path); } - // Search completed directories manually if not in index + // Search completed directories manually if not in index. Handles both + // the flat `completed/{id}.json` layout (legacy) and the nested + // `completed/YYYY-MM/{id}.json` layout written by save(). try { + const flatPath = join(this.completedDir, `${id}.json`); + if (existsSync(flatPath)) { + return this.readTrajectoryOrNull(flatPath); + } const months = await readdir(this.completedDir); for (const month of months) { const filePath = join(this.completedDir, month, `${id}.json`); if (existsSync(filePath)) { - return this.readTrajectoryFile(filePath); + return this.readTrajectoryOrNull(filePath); } } } catch (error) { @@ -206,7 +390,7 @@ export class FileStorage implements StorageAdapter { let mostRecentTime = 0; for (const file of jsonFiles) { - const trajectory = await this.readTrajectoryFile( + const trajectory = await this.readTrajectoryOrNull( join(this.activeDir, file), ); if (trajectory) { @@ -382,20 +566,49 @@ export class FileStorage implements StorageAdapter { // Private helpers - private async readTrajectoryFile(path: string): Promise { + /** + * Read a trajectory file and return a tagged result so callers can + * distinguish missing files, malformed JSON, and schema violations. + * + * Does NOT log. Callers choose whether to warn, swallow, or throw. + */ + private async readTrajectoryFile( + path: string, + ): Promise { + let content: string; try { - const content = await readFile(path, "utf-8"); - const data = JSON.parse(content); - const validation = validateTrajectory(data); - if (validation.success) { - return validation.data as Trajectory; - } - console.error(`Invalid trajectory at ${path}:`, validation.errors); - return null; + content = await readFile(path, "utf-8"); } catch (error) { - console.error(`Failed to read trajectory at ${path}:`, error); - return null; + return { ok: false, reason: "io_error", path, error }; } + + let data: unknown; + try { + data = JSON.parse(content); + } catch (error) { + return { ok: false, reason: "malformed_json", path, error }; + } + + const validation = validateTrajectory(data); + if (validation.success) { + return { ok: true, trajectory: validation.data as Trajectory }; + } + return { + ok: false, + reason: "schema_violation", + path, + error: validation.errors, + }; + } + + /** + * Convenience wrapper for callers that only care whether they got a + * trajectory. Returns null for any failure and writes nothing to the + * console — so nothing leaks into test output or the CLI spinner. + */ + private async readTrajectoryOrNull(path: string): Promise { + const result = await this.readTrajectoryFile(path); + return result.ok ? result.trajectory : null; } private async loadIndex(): Promise { diff --git a/tests/core/trailers.test.ts b/tests/core/trailers.test.ts index 92e9724..0adf37d 100644 --- a/tests/core/trailers.test.ts +++ b/tests/core/trailers.test.ts @@ -92,6 +92,26 @@ Signed-off-by: Dev `; const message = "Trajectory: traj_simple123456"; expect(parseTrajectoryFromMessage(message)).toBe("traj_simple123456"); }); + + it("should parse legacy timestamp-hex ids with internal underscores", () => { + // Legacy `traj__` format emitted by the workforce + // workflow runner via @agent-relay/sdk. The parser must return the + // FULL id, not a truncated prefix. + const message = "Commit\n\nTrajectory: traj_1775734701264_ba65c69b"; + expect(parseTrajectoryFromMessage(message)).toBe( + "traj_1775734701264_ba65c69b", + ); + }); + + it("should parse legacy id even when other trailers follow", () => { + const message = `Fix thing + +Trajectory: traj_1775832005024_c2cf5052 +Co-authored-by: Someone `; + expect(parseTrajectoryFromMessage(message)).toBe( + "traj_1775832005024_c2cf5052", + ); + }); }); describe("getTrajectoryFromCommit", () => { @@ -277,6 +297,50 @@ Signed-off-by: Dev `; const script = generateHookScript(); expect(script).toContain("TRAJECTORIES_DATA_DIR"); }); + + it("should use an id character class that accepts legacy underscores", () => { + // Regression lock: if someone relaxes the id regex in schema.ts/id.ts + // but forgets to propagate here, `grep -o` silently truncates legacy + // `traj__` ids at the first internal underscore. This + // keeps the hook script's character class aligned with the schema. + const script = generateHookScript(); + expect(script).toContain("traj_[a-z0-9_]*"); + expect(script).not.toMatch(/traj_\[a-z0-9\]\*/); + }); + + it("should extract the full legacy id from a trajectory file via real grep", async () => { + // Behavioral regression test: runs the exact extraction command from + // the hook script against a fixture file and asserts it returns the + // full legacy id, not a truncated prefix. + const { spawnSync } = + await vi.importActual( + "node:child_process", + ); + const { mkdtempSync, writeFileSync, rmSync } = + await vi.importActual("node:fs"); + const { join } = + await vi.importActual("node:path"); + const { tmpdir } = + await vi.importActual("node:os"); + + const dir = mkdtempSync(join(tmpdir(), "trail-hook-legacy-")); + const fixtureFile = join(dir, "traj_1775734701264_ba65c69b.json"); + writeFileSync( + fixtureFile, + '{\n "id": "traj_1775734701264_ba65c69b",\n "version": 1\n}\n', + "utf-8", + ); + + try { + // Mirrors the hook's extraction pipeline exactly. + const cmd = `grep -o '"id"[[:space:]]*:[[:space:]]*"traj_[a-z0-9_]*"' "${fixtureFile}" | head -1 | grep -o 'traj_[a-z0-9_]*'`; + const result = spawnSync("sh", ["-c", cmd], { encoding: "utf-8" }); + expect(result.status).toBe(0); + expect(result.stdout.trim()).toBe("traj_1775734701264_ba65c69b"); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); }); describe("detectExistingHook", () => { diff --git a/tests/fixtures/workforce-trajectories/README.md b/tests/fixtures/workforce-trajectories/README.md new file mode 100644 index 0000000..9f189ae --- /dev/null +++ b/tests/fixtures/workforce-trajectories/README.md @@ -0,0 +1,18 @@ +# Workforce trajectory fixtures + +Hand-crafted fixtures modeled after real-world trajectory JSON files +produced by the workforce workflow runner. Each fixture intentionally +exercises at least one constraint that the original trajectory schema +rejected, so that relaxing the schema without breaking legacy-read +support is detectable via test. + +## Layout constraints being tested + +| Fixture | Layout | What makes it "legacy" | +|---|---|---| +| `completed/traj_1775734701264_ba65c69b.json` | Flat root | Flat `completed/` root (no `YYYY-MM/` subdir); timestamp-hex id with underscore; `role: "workflow-runner"`/`"specialist"`; `source.system: "workflow-runner"`; omitted `commits`/`filesChanged`/`projectId`/`tags` | +| `completed/2026-04/traj_1775832005024_c2cf5052.json` | `YYYY-MM` subdir | Same legacy shape but in a month subdirectory — verifies both layouts reconcile | + +If you add new fixtures, keep them ~2 KB or less. The point is to lock +down real-world schema violations as a contract, not to mirror the full +workforce corpus. diff --git a/tests/fixtures/workforce-trajectories/completed/2026-04/traj_1775832005024_c2cf5052.json b/tests/fixtures/workforce-trajectories/completed/2026-04/traj_1775832005024_c2cf5052.json new file mode 100644 index 0000000..0a8c215 --- /dev/null +++ b/tests/fixtures/workforce-trajectories/completed/2026-04/traj_1775832005024_c2cf5052.json @@ -0,0 +1,43 @@ +{ + "id": "traj_1775832005024_c2cf5052", + "version": 1, + "task": { + "title": "align-trajectory-writer-with-schema", + "source": { + "system": "workflow-runner", + "id": "2a41e1bb7c0e9fd5e3bfa701" + } + }, + "status": "completed", + "startedAt": "2026-04-10T14:40:05.024Z", + "completedAt": "2026-04-10T14:52:11.003Z", + "agents": [ + { + "name": "orchestrator", + "role": "workflow-runner", + "joinedAt": "2026-04-10T14:40:05.024Z" + }, + { + "name": "aligner", + "role": "specialist", + "joinedAt": "2026-04-10T14:40:08.441Z" + } + ], + "chapters": [ + { + "id": "ch_1b0c36aa", + "title": "Analysis", + "agentName": "orchestrator", + "startedAt": "2026-04-10T14:40:05.024Z", + "events": [ + { + "ts": 1775832005024, + "type": "reflection", + "content": "Scope: audit zod constraints, decide relaxation strategy.", + "significance": "high" + } + ], + "endedAt": "2026-04-10T14:40:08.441Z" + } + ] +} diff --git a/tests/fixtures/workforce-trajectories/completed/traj_1775734701264_ba65c69b.json b/tests/fixtures/workforce-trajectories/completed/traj_1775734701264_ba65c69b.json new file mode 100644 index 0000000..6475a1a --- /dev/null +++ b/tests/fixtures/workforce-trajectories/completed/traj_1775734701264_ba65c69b.json @@ -0,0 +1,67 @@ +{ + "id": "traj_1775734701264_ba65c69b", + "version": 1, + "task": { + "title": "finish-npm-provenance-persona-workflow", + "source": { + "system": "workflow-runner", + "id": "051e6f8a822765938d8f29fc" + } + }, + "status": "completed", + "startedAt": "2026-04-09T11:38:21.264Z", + "completedAt": "2026-04-09T11:42:17.910Z", + "agents": [ + { + "name": "orchestrator", + "role": "workflow-runner", + "joinedAt": "2026-04-09T11:38:21.264Z" + }, + { + "name": "docs", + "role": "specialist", + "joinedAt": "2026-04-09T11:38:24.613Z" + }, + { + "name": "publisher", + "role": "specialist", + "joinedAt": "2026-04-09T11:38:24.614Z" + } + ], + "chapters": [ + { + "id": "ch_efda3e2f", + "title": "Planning", + "agentName": "orchestrator", + "startedAt": "2026-04-09T11:38:21.264Z", + "events": [ + { + "ts": 1775734701264, + "type": "note", + "content": "Purpose: Document persona skills + npm-provenance persona." + } + ], + "endedAt": "2026-04-09T11:38:24.583Z" + }, + { + "id": "ch_a8a6a1a8", + "title": "Execution: update-readme, create-publish-workflow", + "agentName": "orchestrator", + "startedAt": "2026-04-09T11:38:24.613Z", + "events": [ + { + "ts": 1775734734733, + "type": "completion-evidence", + "content": "update-readme verification-based completion — Verification passed", + "significance": "medium", + "raw": { + "stepName": "update-readme", + "completionMode": "verification", + "exitCode": 0 + } + } + ], + "endedAt": "2026-04-09T11:42:17.910Z" + } + ] +} diff --git a/tests/storage/reconcile-real-data.test.ts b/tests/storage/reconcile-real-data.test.ts new file mode 100644 index 0000000..8021506 --- /dev/null +++ b/tests/storage/reconcile-real-data.test.ts @@ -0,0 +1,184 @@ +/** + * Fixture-based reconcile tests. + * + * Every test in this file uses REAL-SHAPE trajectory data (hand-redacted + * from workforce) committed under tests/fixtures/workforce-trajectories/. + * The point is to lock down the legacy data contract: the reader must + * accept real-world role values, id shapes, and layouts without + * rejecting the data, and reconcileIndex must populate the index from + * both flat-root and YYYY-MM subdir layouts. + * + * If a future refactor breaks reconcile for legacy data, these tests + * fail in ~50ms — long before any E2E gate fires. + */ + +import { cp, mkdir, mkdtemp, readFile, rm } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +const FIXTURE_ROOT = join( + __dirname, + "..", + "fixtures", + "workforce-trajectories", +); + +async function seedFixtureInto(tempDir: string): Promise { + const trajRoot = join(tempDir, ".trajectories"); + await mkdir(join(trajRoot, "active"), { recursive: true }); + await mkdir(join(trajRoot, "completed"), { recursive: true }); + await cp(join(FIXTURE_ROOT, "completed"), join(trajRoot, "completed"), { + recursive: true, + }); +} + +describe("FileStorage reconcile — real workforce fixtures", () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), "trail-fixture-")); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + it("reconciles the legacy flat-root layout without rejecting it", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + await seedFixtureInto(tempDir); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + + const summaries = await storage.list({ status: "completed" }); + const ids = summaries.map((s) => s.id); + expect(ids).toContain("traj_1775734701264_ba65c69b"); + expect(ids).toContain("traj_1775832005024_c2cf5052"); + }); + + it("accepts legacy role values like 'workflow-runner' and 'specialist'", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + await seedFixtureInto(tempDir); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + const trajectory = await storage.get("traj_1775734701264_ba65c69b"); + + expect(trajectory).not.toBeNull(); + const roles = trajectory?.agents.map((a) => a.role) ?? []; + expect(roles).toContain("workflow-runner"); + expect(roles).toContain("specialist"); + }); + + it("accepts legacy timestamp-hex trajectory ids", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + await seedFixtureInto(tempDir); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + const trajectory = await storage.get("traj_1775734701264_ba65c69b"); + + expect(trajectory?.id).toBe("traj_1775734701264_ba65c69b"); + }); + + it("defaults missing commits/filesChanged/tags arrays on read", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + await seedFixtureInto(tempDir); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + const trajectory = await storage.get("traj_1775734701264_ba65c69b"); + + expect(trajectory?.commits).toEqual([]); + expect(trajectory?.filesChanged).toEqual([]); + expect(trajectory?.tags).toEqual([]); + expect(trajectory?.projectId).toBeUndefined(); + }); + + it("populates index.json with both fixtures after reconcile", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + await seedFixtureInto(tempDir); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + + const indexRaw = await readFile( + join(tempDir, ".trajectories", "index.json"), + "utf-8", + ); + const index = JSON.parse(indexRaw); + expect(Object.keys(index.trajectories ?? {}).sort()).toEqual([ + "traj_1775734701264_ba65c69b", + "traj_1775832005024_c2cf5052", + ]); + expect(index.trajectories.traj_1775734701264_ba65c69b.path).toContain( + "completed/traj_1775734701264_ba65c69b.json", + ); + expect(index.trajectories.traj_1775832005024_c2cf5052.path).toContain( + "completed/2026-04/traj_1775832005024_c2cf5052.json", + ); + }); + + it("reconcileIndex reports a structured summary", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + await seedFixtureInto(tempDir); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + + // A second reconcile on an already-reconciled index should scan 2 + // files, count them as already indexed, and add nothing. + const summary = await storage.reconcileIndex(); + expect(summary.scanned).toBe(2); + expect(summary.added).toBe(0); + expect(summary.alreadyIndexed).toBe(2); + expect(summary.skippedMalformedJson).toBe(0); + expect(summary.skippedSchemaViolation).toBe(0); + }); + + it("counts malformed JSON fixtures under skippedMalformedJson", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + const { writeFile } = await import("node:fs/promises"); + + const trajRoot = join(tempDir, ".trajectories"); + await mkdir(join(trajRoot, "completed"), { recursive: true }); + await writeFile( + join(trajRoot, "completed", "traj_broken0000_deadbeef.json"), + "{ not valid json", + "utf-8", + ); + + const storage = new FileStorage(tempDir); + const summary = await storage.reconcileIndex(); + + expect(summary.scanned).toBe(1); + expect(summary.added).toBe(0); + expect(summary.skippedMalformedJson).toBe(1); + expect(summary.skippedSchemaViolation).toBe(0); + }); + + it("counts schema-violating fixtures under skippedSchemaViolation", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + const { writeFile } = await import("node:fs/promises"); + + const trajRoot = join(tempDir, ".trajectories"); + await mkdir(join(trajRoot, "completed"), { recursive: true }); + await writeFile( + join(trajRoot, "completed", "traj_nothing0000_0000.json"), + JSON.stringify({ id: "traj_nothing0000_0000", version: 1 }), + "utf-8", + ); + + const storage = new FileStorage(tempDir); + const summary = await storage.reconcileIndex(); + + expect(summary.scanned).toBe(1); + expect(summary.added).toBe(0); + expect(summary.skippedMalformedJson).toBe(0); + expect(summary.skippedSchemaViolation).toBe(1); + }); +}); diff --git a/tests/storage/storage.test.ts b/tests/storage/storage.test.ts index cdc6eda..8298ef9 100644 --- a/tests/storage/storage.test.ts +++ b/tests/storage/storage.test.ts @@ -397,6 +397,88 @@ describe("FileStorage", () => { expect(index.trajectories[trajectory.id]).toBeDefined(); }); }); + + describe("reconcileIndex", () => { + it("picks up completed trajectories dropped in the flat completed/ root", async () => { + // Arrange: write a trajectory file directly into completed/ without + // touching index.json — mimics a stale index produced by an older + // writer or an out-of-band process. + const { FileStorage } = await import("../../src/storage/file.js"); + const { createTrajectory } = await import("../../src/core/trajectory.js"); + const { mkdir, writeFile } = await import("node:fs/promises"); + + const bootstrap = new FileStorage(tempDir); + await bootstrap.initialize(); + + const completedDir = join(tempDir, ".trajectories", "completed"); + await mkdir(completedDir, { recursive: true }); + const trajectory = { + ...createTrajectory({ title: "Stale flat file" }), + status: "completed" as const, + completedAt: new Date().toISOString(), + }; + await writeFile( + join(completedDir, `${trajectory.id}.json`), + JSON.stringify(trajectory, null, 2), + "utf-8", + ); + + // Act: initialize a fresh FileStorage; reconcile runs inside initialize(). + const storage = new FileStorage(tempDir); + await storage.initialize(); + const summaries = await storage.list({ status: "completed" }); + + // Assert + expect(summaries.map((s) => s.id)).toContain(trajectory.id); + }); + + it("picks up completed trajectories inside YYYY-MM subdirectories", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + const { createTrajectory } = await import("../../src/core/trajectory.js"); + const { mkdir, writeFile } = await import("node:fs/promises"); + + const bootstrap = new FileStorage(tempDir); + await bootstrap.initialize(); + + const monthDir = join(tempDir, ".trajectories", "completed", "2026-04"); + await mkdir(monthDir, { recursive: true }); + const trajectory = { + ...createTrajectory({ title: "Stale month file" }), + status: "completed" as const, + completedAt: "2026-04-01T00:00:00.000Z", + }; + await writeFile( + join(monthDir, `${trajectory.id}.json`), + JSON.stringify(trajectory, null, 2), + "utf-8", + ); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + const summaries = await storage.list({ status: "completed" }); + + expect(summaries.map((s) => s.id)).toContain(trajectory.id); + }); + + it("does not overwrite existing index entries", async () => { + const { FileStorage } = await import("../../src/storage/file.js"); + const { createTrajectory } = await import("../../src/core/trajectory.js"); + + const storage = new FileStorage(tempDir); + await storage.initialize(); + const trajectory = createTrajectory({ title: "Originally saved" }); + await storage.save(trajectory); + + // Running reconcile again should be a no-op for this entry. + await storage.reconcileIndex(); + + const { readFileSync } = await import("node:fs"); + const index = JSON.parse( + readFileSync(join(tempDir, ".trajectories", "index.json"), "utf-8"), + ); + expect(index.trajectories[trajectory.id].title).toBe("Originally saved"); + }); + }); }); describe("StorageAdapter Interface", () => { diff --git a/workflows/fix-trajectory-schema/.gitignore b/workflows/fix-trajectory-schema/.gitignore new file mode 100644 index 0000000..f0a5150 --- /dev/null +++ b/workflows/fix-trajectory-schema/.gitignore @@ -0,0 +1,3 @@ +DECISION.md +investigation-findings.md +PR_URLS.txt diff --git a/workflows/fix-trajectory-schema/01-investigate.ts b/workflows/fix-trajectory-schema/01-investigate.ts new file mode 100644 index 0000000..11eff03 --- /dev/null +++ b/workflows/fix-trajectory-schema/01-investigate.ts @@ -0,0 +1,291 @@ +/** + * 01-investigate.ts + * + * Phase 1 of the trajectory-schema comprehensive fix. + * + * Produces TWO files on disk and hands control back to the master executor: + * - investigation-findings.md — full audit, options, recommendation + * - DECISION.md — auto-picked by the lead (no human gate) + * + * The lead synthesizer is responsible for writing both files so the pipeline + * can chain directly into 02-implement.ts without human intervention. The + * human checkpoint is the PR review at the end, not the middle of the run. + * + * Normally invoked by run-all.sh, but can be run standalone: + * agent-relay run workflows/fix-trajectory-schema/01-investigate.ts + */ + +import { workflow } from "@agent-relay/sdk/workflows"; + +const TRAJ_ROOT = + process.env.TRAJ_ROOT ?? + "/Users/khaliqgant/Projects/AgentWorkforce/trajectories"; +const WORKFORCE_ROOT = + process.env.WORKFORCE_ROOT ?? + "/Users/khaliqgant/Projects/AgentWorkforce/workforce"; +const OUTPUT_DIR = `${TRAJ_ROOT}/workflows/fix-trajectory-schema`; +const FINDINGS_PATH = `${OUTPUT_DIR}/investigation-findings.md`; +const DECISION_PATH = `${OUTPUT_DIR}/DECISION.md`; + +async function main() { + const result = await workflow("fix-trajectory-schema-investigate") + .description( + "Investigate trajectory schema mismatch between trajectories lib and workforce writers", + ) + .pattern("dag") + .channel("wf-fix-traj-schema-investigate") + .maxConcurrency(4) + .timeout(1_800_000) + + .agent("lead", { + cli: "claude", + role: "Lead architect — synthesizes findings into a design doc", + }) + .agent("writer-auditor", { + cli: "codex", + preset: "worker", + role: "Audits workforce code for trajectory write paths", + }) + .agent("schema-auditor", { + cli: "codex", + preset: "worker", + role: "Audits trajectories schema constraints and history", + }) + + // ---------- Phase 1a: parallel data collection ---------- + + .step("dump-workforce-data", { + type: "deterministic", + command: ` +set -e +echo "=== FILE LIST ===" +ls -1 ${WORKFORCE_ROOT}/.trajectories/completed/*.json 2>/dev/null || true +find ${WORKFORCE_ROOT}/.trajectories/completed -mindepth 2 -name "*.json" 2>/dev/null || true +echo "" +echo "=== COUNT ===" +find ${WORKFORCE_ROOT}/.trajectories/completed -name "*.json" -type f | wc -l +echo "" +echo "=== INDEX.JSON ===" +cat ${WORKFORCE_ROOT}/.trajectories/index.json 2>/dev/null || echo "(missing)" +echo "" +echo "=== UNIQUE TRAJECTORY IDS ===" +find ${WORKFORCE_ROOT}/.trajectories -name "traj_*.json" -type f -exec basename {} .json \\; | sort -u +echo "" +echo "=== UNIQUE AGENT ROLES ===" +find ${WORKFORCE_ROOT}/.trajectories/completed -name "*.json" -type f -exec grep -h '"role"' {} \\; | sort -u +echo "" +echo "=== UNIQUE SOURCE.SYSTEM VALUES ===" +find ${WORKFORCE_ROOT}/.trajectories/completed -name "*.json" -type f -exec grep -hA1 '"source"' {} \\; | grep '"system"' | sort -u +echo "" +echo "=== STATUS VALUES ===" +find ${WORKFORCE_ROOT}/.trajectories/completed -name "*.json" -type f -exec grep -h '"status"' {} \\; | sort -u | head -20 +echo "" +echo "=== SAMPLE FIRST FILE (first 60 lines) ===" +find ${WORKFORCE_ROOT}/.trajectories/completed -name "*.json" -type f | head -1 | xargs head -60 +`.trim(), + captureOutput: true, + failOnError: false, + }) + + .step("dump-trajectories-schema", { + type: "deterministic", + command: ` +set -e +echo "=== schema.ts ===" +cat ${TRAJ_ROOT}/src/core/schema.ts +echo "" +echo "=== id.ts ===" +cat ${TRAJ_ROOT}/src/core/id.ts +echo "" +echo "=== types.ts (relevant types only) ===" +cat ${TRAJ_ROOT}/src/core/types.ts +echo "" +echo "=== validateTrajectory callsites ===" +grep -rn "validateTrajectory\\|readTrajectoryFile" ${TRAJ_ROOT}/src --include="*.ts" +echo "" +echo "=== git log on schema.ts (last 10) ===" +git -C ${TRAJ_ROOT} log --oneline -10 -- src/core/schema.ts +echo "" +echo "=== git log on id.ts (last 10) ===" +git -C ${TRAJ_ROOT} log --oneline -10 -- src/core/id.ts +`.trim(), + captureOutput: true, + failOnError: false, + }) + + .step("audit-writer", { + agent: "writer-auditor", + dependsOn: ["dump-workforce-data"], + task: ` +Find the code in workforce that produces the trajectory JSON files. + +Observed data from disk: +{{steps.dump-workforce-data.output}} + +Your job: +1. Search ${WORKFORCE_ROOT}/packages for any code that writes to .trajectories/ + or imports from 'agent-trajectories'. Use grep/rg. +2. Search ${WORKFORCE_ROOT}/node_modules/agent-trajectories/dist for the code + paths that emit files with role values like "workflow-runner", "specialist", + "orchestrator" — are these user-supplied roles or produced by the SDK? +3. Identify the exact file(s) and function(s) that construct the Trajectory + object before calling storage.save(). +4. Determine whether the writer: + (a) bypasses validateTrajectory() entirely + (b) uses a looser internal type than the zod schema + (c) lives in workforce source (fixable here) + (d) lives in an installed package like agent-trajectories or another dep + +Output format (plain markdown, no preamble): +### Writer location +- File path(s): +- Function name(s): + +### Role values — origin +- Are roles hard-coded? User-supplied? Mapped from persona configs? Cite the line. + +### ID generation — origin +- Where is traj_{timestamp}_{hex} generated? Cite the function. + +### Classification +- Circle one: (a) bypasses validation / (b) loose internal type / (c) workforce source / (d) external dep + +### Recommended source fix +- One paragraph: what minimal writer change aligns with the trajectories schema, + OR a statement that the schema is wrong and the writer is right. +`.trim(), + verification: { type: "exit_code" }, + }) + + .step("audit-schema", { + agent: "schema-auditor", + dependsOn: ["dump-trajectories-schema"], + task: ` +Enumerate every strict enum and regex in the trajectories schema that could +reject real-world data. You have the source already: + +{{steps.dump-trajectories-schema.output}} + +Your job: produce a complete constraint inventory. + +For each constraint, report: +- Field path (e.g., agents[].role, id, chapters[].events[].significance) +- Current constraint (enum values, regex) +- Blast radius: which callers read/write this field +- Risk if relaxed: what guarantees would be lost +- Observed real values that violate the constraint (cross-reference with + any values you can see from the workforce audit — the data is at + ${WORKFORCE_ROOT}/.trajectories/completed) + +Also answer: +- Is validateTrajectory called on save() or only on read()? (Look at + storage/file.ts.) If asymmetric, that's a bug — flag it. +- Has the schema been loosened before? Check git log. + +Output format (plain markdown): +### Constraint inventory +| Field | Current | Blast radius | Violated by | +|---|---|---|---| +| ... | ... | ... | ... | + +### Read/write asymmetry +- Findings on save() vs read() validation. + +### Schema loosening history +- Notable past changes, if any. + +### Recommendation +- For each problematic constraint, one line on preferred fix: + relax-regex | open-string | superset-enum | discriminated-union | leave-as-is. +`.trim(), + verification: { type: "exit_code" }, + }) + + // ---------- Phase 1b: synthesis ---------- + + .step("synthesize-findings", { + agent: "lead", + dependsOn: ["audit-writer", "audit-schema", "dump-workforce-data"], + task: ` +Produce TWO files on disk — do not echo them to stdout. + +File 1: ${FINDINGS_PATH} +File 2: ${DECISION_PATH} + +Inputs: + +=== Workforce data dump === +{{steps.dump-workforce-data.output}} + +=== Writer audit === +{{steps.audit-writer.output}} + +=== Schema audit === +{{steps.audit-schema.output}} + +--- File 1: investigation-findings.md --- + +Required sections (in this order): + +1. # Trajectory Schema Fix — Investigation Findings +2. ## Problem statement (3-5 lines, no preamble) +3. ## What the writer produces + - Exact data shape, cite unique role/status/source values +4. ## Constraint inventory + - Table from the schema audit +5. ## Read/write asymmetry + - Does save() validate? Does read() reject? Call the sub-bug out. +6. ## Options + For each option: name, change set, pros, cons, blast radius. + Include at minimum: + - Option A: Relax schema (open-string role + loosened ID regex) + - Option B: Superset enum for role + typed ID unions + - Option C: Fix at source (align workforce writer to current schema) +7. ## Recommendation + - One paragraph. State the pick and why. + +--- File 2: DECISION.md --- + +The pipeline will read this file directly in 02-implement.ts — use exactly +this format (one key=value per line, no backticks, no extra prose): + +SCHEMA_POLICY= +VALIDATE_ON_SAVE= +FIX_WORKFORCE_WRITER= +ID_REGEX= +ROLE_POLICY= +STATUS_POLICY= +SOURCE_POLICY= +NOTES= + +Rules for the auto-decision: +- Pick the option that unblocks the real workforce data with the smallest + schema blast radius. Bias toward Option A unless the audit surfaced a + stronger reason. +- If the writer code lives outside workforce's control (e.g., inside the + installed agent-trajectories package), FIX_WORKFORCE_WRITER=no — fix + it in trajectories repo only. +- If save() does not currently validate but read() does, + VALIDATE_ON_SAVE=yes (close the asymmetry). +- Your choice must reflect the EVIDENCE in the audits, not a default. + +After writing both files, ls -la them to confirm both exist. +`.trim(), + verification: { type: "file_exists", value: DECISION_PATH }, + }) + + .onError("fail-fast") + .run({ + cwd: TRAJ_ROOT, + onEvent: (e) => console.log(`[${e.type}]${e.step ? ` ${e.step}` : ""}`), + }); + + console.log("\nInvestigation complete:", result.status); + console.log(`Findings: ${FINDINGS_PATH}`); + console.log(`Decision: ${DECISION_PATH}`); +} + +main().catch((error) => { + console.error(error); + process.exitCode = 1; +}); diff --git a/workflows/fix-trajectory-schema/02-implement.ts b/workflows/fix-trajectory-schema/02-implement.ts new file mode 100644 index 0000000..2a55874 --- /dev/null +++ b/workflows/fix-trajectory-schema/02-implement.ts @@ -0,0 +1,485 @@ +/** + * 02-implement.ts + * + * Phase 3 + 4 of the trajectory-schema comprehensive fix. + * + * Prerequisites (validated by require-decision-file step): + * - investigation-findings.md and DECISION.md exist at OUTPUT_DIR + * - TRAJ_WORKTREE and WORKFORCE_WORKTREE env vars point to existing + * git worktrees created by run-all.sh + * + * This workflow: + * 1. Reads DECISION.md + findings + current source context from the + * trajectories worktree + * 2. Implements schema/reader/save-validation changes inside TRAJ_WORKTREE + * (stacked on top of the reconcile work already present on that branch) + * 3. Conditionally aligns workforce writer inside WORKFORCE_WORKTREE + * 4. Builds + runs unit tests from TRAJ_WORKTREE + * 5. E2E verification: runs the worktree-built CLI against a copy of + * workforce/.trajectories/, gating on 3 hard signals + * 6. Lead commits in both worktrees, pushes, opens 2 PRs, prints URLs + * + * Normally invoked by run-all.sh, but can be run standalone IF you set + * TRAJ_WORKTREE + WORKFORCE_WORKTREE manually first. + */ + +import { workflow } from "@agent-relay/sdk/workflows"; + +const TRAJ_ROOT = + process.env.TRAJ_ROOT ?? + "/Users/khaliqgant/Projects/AgentWorkforce/trajectories"; +const WORKFORCE_ROOT = + process.env.WORKFORCE_ROOT ?? + "/Users/khaliqgant/Projects/AgentWorkforce/workforce"; +const TRAJ_WORKTREE = process.env.TRAJ_WORKTREE ?? "/tmp/wt-traj-schema"; +const WORKFORCE_WORKTREE = + process.env.WORKFORCE_WORKTREE ?? "/tmp/wt-workforce-writer"; +const TRAJ_BRANCH = + process.env.TRAJ_BRANCH ?? "fix/trajectory-schema-comprehensive"; +const WORKFORCE_BRANCH = + process.env.WORKFORCE_BRANCH ?? "fix/trajectory-writer-align"; + +const WORKFLOW_DIR = `${TRAJ_ROOT}/workflows/fix-trajectory-schema`; +const FINDINGS_PATH = `${WORKFLOW_DIR}/investigation-findings.md`; +const DECISION_PATH = `${WORKFLOW_DIR}/DECISION.md`; +// The lead writes PR URL(s) here as its durable completion artifact. +// Verification gates on the file existing instead of on the lead printing +// a specific string to stdout — relay channel messages don't count as +// stdout and caused earlier runs to time out even after the PR was opened. +const PR_URLS_PATH = `${WORKFLOW_DIR}/PR_URLS.txt`; +const E2E_SCRATCH = "/tmp/traj-schema-e2e"; + +async function main() { + const result = await workflow("fix-trajectory-schema-implement") + .description( + "Implement schema/reader/writer fix in worktrees, E2E-gate against real data, open PRs", + ) + .pattern("dag") + .channel("wf-fix-traj-schema-implement") + .maxConcurrency(4) + .timeout(3_600_000) + + .agent("lead", { + cli: "claude", + role: "Lead coordinator — reviews outputs, commits in worktrees, opens PRs", + }) + .agent("trajectories-worker", { + cli: "codex", + preset: "worker", + role: "Implements schema + reader + save-validation in trajectories worktree", + }) + .agent("workforce-worker", { + cli: "codex", + preset: "worker", + role: "Aligns trajectory writer in workforce worktree (conditional)", + }) + + // ---------- Phase A: validate prereqs + gather context ---------- + + .step("require-decision-file", { + type: "deterministic", + command: ` +set -e +if [ ! -f "${FINDINGS_PATH}" ]; then + echo "FATAL: ${FINDINGS_PATH} missing. Run 01-investigate.ts first." >&2 + exit 1 +fi +if [ ! -f "${DECISION_PATH}" ]; then + echo "FATAL: ${DECISION_PATH} missing." >&2 + exit 1 +fi +if [ ! -d "${TRAJ_WORKTREE}" ]; then + echo "FATAL: TRAJ_WORKTREE=${TRAJ_WORKTREE} is not a directory." >&2 + echo "Run run-all.sh which sets up the worktrees, or create it manually:" >&2 + echo " git -C ${TRAJ_ROOT} worktree add ${TRAJ_WORKTREE} -b ${TRAJ_BRANCH}" >&2 + exit 1 +fi +if [ ! -d "${WORKFORCE_WORKTREE}" ]; then + echo "FATAL: WORKFORCE_WORKTREE=${WORKFORCE_WORKTREE} is not a directory." >&2 + exit 1 +fi +echo "=== DECISION ===" +cat ${DECISION_PATH} +echo "" +echo "=== FINDINGS (context injection) ===" +cat ${FINDINGS_PATH} +echo "" +echo "=== TRAJ_WORKTREE branch ===" +git -C ${TRAJ_WORKTREE} branch --show-current +echo "" +echo "=== WORKFORCE_WORKTREE branch ===" +git -C ${WORKFORCE_WORKTREE} branch --show-current +`.trim(), + captureOutput: true, + failOnError: true, + }) + + .step("read-trajectories-source", { + type: "deterministic", + dependsOn: ["require-decision-file"], + command: ` +set -e +echo "=== src/core/schema.ts ===" +cat ${TRAJ_WORKTREE}/src/core/schema.ts +echo "" +echo "=== src/core/id.ts ===" +cat ${TRAJ_WORKTREE}/src/core/id.ts +echo "" +echo "=== src/core/types.ts ===" +cat ${TRAJ_WORKTREE}/src/core/types.ts +echo "" +echo "=== src/storage/file.ts (full — worker must read but not modify storage internals) ===" +cat ${TRAJ_WORKTREE}/src/storage/file.ts +`.trim(), + captureOutput: true, + failOnError: true, + }) + + // Baseline vitest count — lets impl-trajectories assert monotonicity. + // If the post-impl pass count drops below baseline, the workflow knows + // the worker introduced a regression at vitest-layer (fast) rather + // than waiting for the E2E gate (8 minutes). + .step("vitest-baseline", { + type: "deterministic", + dependsOn: ["require-decision-file"], + command: ` +set -e +cd ${TRAJ_WORKTREE} +if [ ! -e node_modules ]; then + ln -sf ${TRAJ_ROOT}/node_modules ${TRAJ_WORKTREE}/node_modules +fi +OUTPUT=$(npx vitest run 2>&1 || true) +echo "$OUTPUT" | tail -10 +PASSED=$(echo "$OUTPUT" | grep -Eo "Tests [0-9]+ passed" | grep -Eo "[0-9]+" | head -1) +if [ -z "$PASSED" ]; then + echo "BASELINE_FAIL: could not parse vitest output" >&2 + exit 1 +fi +echo "BASELINE_PASSED=$PASSED" +`.trim(), + captureOutput: true, + failOnError: true, + }) + + // ---------- Phase B: parallel implementation in worktrees ---------- + + .step("impl-trajectories", { + agent: "trajectories-worker", + dependsOn: ["read-trajectories-source", "vitest-baseline"], + task: ` +Implement the trajectory schema loosening inside the trajectories WORKTREE. +All paths below are in the worktree, NOT the primary checkout. + +Worktree: ${TRAJ_WORKTREE} +Branch: ${TRAJ_BRANCH} + +=== HARD CONSTRAINTS — read these before touching anything === + +1. The files below are COMPLETE AND CORRECT at this branch. DO NOT modify + them. They are load-bearing for legacy-data compatibility. + - src/storage/file.ts (reconcileIndex, readTrajectoryFile, + readTrajectoryOrNull, walkJsonFilesInto, ReconcileSummary, the + save() path's validateTrajectory call — ALL of this is done) + - tests/storage/storage.test.ts (all existing reconcile tests) + - tests/storage/reconcile-real-data.test.ts (fixture-based locks) + - tests/fixtures/workforce-trajectories/** (fixtures) + + If you find yourself about to write a line in any of those files, + STOP. That is a regression. Exit with a non-zero status and report + exactly which file tempted you to edit. + +2. Your work is scoped to these files ONLY: + - src/core/schema.ts (constraint loosening per DECISION) + - src/core/id.ts (isValidTrajectoryId regex) + - src/core/types.ts (mirror TS types to zod shape) + - tests/core/schema.test.ts (NEW unit tests for loosened constraints) + + If a deliverable below seems to require editing outside this list, it + is already done by a previous step — move on. + +3. Run \`vitest run\` BEFORE making any edits and record the pass count. + The baseline from the vitest-baseline step is: + {{steps.vitest-baseline.output}} + After your edits, run vitest again. The post-impl pass count MUST be + >= the baseline pass count. A drop is a hard fail — revert and retry. + +=== DECISION + FINDINGS === +{{steps.require-decision-file.output}} + +=== CURRENT SOURCE (for reference — DO NOT EDIT storage/file.ts) === +{{steps.read-trajectories-source.output}} + +=== DELIVERABLES === + +1. src/core/schema.ts — apply ID_REGEX, ROLE_POLICY, and any other + constraint the DECISION flags. If a constraint you'd change is already + loose enough for the DECISION, leave it alone. + +2. src/core/id.ts — if ID_REGEX changed, update isValidTrajectoryId so + both the canonical form AND the legacy timestamp-hex form validate. + +3. src/core/types.ts — mirror the schema changes at the TS type level so + existing TS consumers don't break. Prefer \`string\` over literal + unions where the zod side has been opened up. + +4. tests/core/schema.test.ts — add (or update) unit tests that assert: + - Legacy ids like "traj_1775734701264_ba65c69b" pass validateTrajectory + - Legacy role values like "workflow-runner" pass + - Optional/default fields (commits, filesChanged, tags) default to [] + - projectId is optional + If the file doesn't exist, create it in the same style as + tests/core/trailers.test.ts. + +5. Run \`cd ${TRAJ_WORKTREE} && npx vitest run\`. Report: + - POST_IMPL_PASSED= + - Whether POST_IMPL_PASSED >= baseline (yes/no) + If not yes, exit non-zero. + +=== CONSTRAINTS === + - Do NOT git commit + - Do NOT touch workflows/, node_modules, dist/, or package.json + - Do NOT add new dependencies + - Keep the diff minimal + +Exit 0 on success (all tests pass, pass count monotonically non-decreasing). +`.trim(), + verification: { type: "exit_code", value: "0" }, + retries: 1, + }) + + .step("impl-workforce", { + agent: "workforce-worker", + dependsOn: ["require-decision-file"], + task: ` +Conditional on FIX_WORKFORCE_WRITER in DECISION.md. + +DECISION + FINDINGS: +{{steps.require-decision-file.output}} + +If the DECISION says FIX_WORKFORCE_WRITER=no, print exactly: + SKIPPED: no workforce changes requested +and exit 0. Do nothing else. + +Otherwise, all work happens inside the workforce WORKTREE: + +Worktree: ${WORKFORCE_WORKTREE} +Branch: ${WORKFORCE_BRANCH} + +1. Find the writer code path cited in findings "Writer location". +2. Align it with the new trajectories schema per ROLE_POLICY and ID_REGEX. + Preserve domain meaning — map "workflow-runner" to an equivalent in + the new shape rather than dropping it. +3. If the workforce package.json pins agent-trajectories to a specific + version AND the DECISION requires a version bump, update it. Otherwise + leave the dep alone. +4. Run the workforce test command (check package.json scripts — likely + \`npm test\` or \`pnpm test\`). Paste the summary. +5. Do NOT git commit — the lead commits. + +Exit 0 on clean skip or clean success. +`.trim(), + verification: { type: "exit_code", value: "0" }, + retries: 1, + }) + + // ---------- Phase C: build + unit tests in worktree ---------- + + .step("build-and-test", { + type: "deterministic", + dependsOn: ["impl-trajectories"], + command: ` +set -e +cd ${TRAJ_WORKTREE} +echo "=== npm install (worktree may need linked node_modules) ===" +if [ ! -d node_modules ]; then + ln -sf ${TRAJ_ROOT}/node_modules ${TRAJ_WORKTREE}/node_modules +fi +echo "=== npm run build ===" +npm run build +echo "" +echo "=== vitest ===" +npx vitest run 2>&1 | tail -40 +`.trim(), + captureOutput: true, + failOnError: true, + }) + + // ---------- Phase D: real-data E2E gate ---------- + + .step("prep-e2e-fixture", { + type: "deterministic", + dependsOn: ["build-and-test"], + command: ` +set -e +rm -rf ${E2E_SCRATCH} +mkdir -p ${E2E_SCRATCH} +cp -R ${WORKFORCE_ROOT}/.trajectories ${E2E_SCRATCH}/.trajectories +echo "=== fixture files ===" +find ${E2E_SCRATCH}/.trajectories -type f -name "*.json" | sort +echo "" +echo "=== count ===" +find ${E2E_SCRATCH}/.trajectories -type f -name "*.json" | wc -l +`.trim(), + captureOutput: true, + failOnError: true, + }) + + .step("e2e-compact", { + type: "deterministic", + dependsOn: ["prep-e2e-fixture"], + command: ` +set -e +# Use a SCRATCH copy of the fixture — cp brought workforce's original +# index.json with it, which would make reconcile look like it did +# nothing (everything appears pre-indexed). Rebuild from just the +# on-disk files so the reconcile summary reflects actual work. +FRESH=${E2E_SCRATCH}-fresh +rm -rf "$FRESH" +mkdir -p "$FRESH/.trajectories/completed" +if ls ${E2E_SCRATCH}/.trajectories/completed/*.json >/dev/null 2>&1; then + cp ${E2E_SCRATCH}/.trajectories/completed/*.json "$FRESH/.trajectories/completed/" || true +fi +if [ -d ${E2E_SCRATCH}/.trajectories/completed ]; then + find ${E2E_SCRATCH}/.trajectories/completed -mindepth 2 -name "*.json" -print0 2>/dev/null | while IFS= read -r -d '' f; do + rel=\${f#${E2E_SCRATCH}/.trajectories/completed/} + dir=$(dirname "$rel") + mkdir -p "$FRESH/.trajectories/completed/$dir" + cp "$f" "$FRESH/.trajectories/completed/$rel" + done +fi + +FILES_ON_DISK=$(find "$FRESH/.trajectories" -type f -name "*.json" 2>/dev/null | wc -l | tr -d ' ') +echo "=== fixture file count: $FILES_ON_DISK ===" + +cd "$FRESH" +LOG=$(mktemp) +node ${TRAJ_WORKTREE}/dist/cli/index.js compact --all --dry-run 2>&1 | tee "$LOG" +echo "" +echo "=== GATES ===" + +# Gate 1: NO ZodError / "Invalid trajectory" / generic schema-violation text +if grep -Eq "ZodError|Invalid trajectory|validation error" "$LOG"; then + echo "GATE_FAIL: schema validation errors present in output" + exit 1 +fi + +# Gate 2: reconcile's own structured log must show N/N where N equals +# files-on-disk. This is the real signal — compact's own status filter +# only shows \`completed\` trajectories, so the reconcile log is the only +# place that proves every file on disk was successfully read. +RECON=$(grep -Eo "reconciled [0-9]+/[0-9]+" "$LOG" | head -1 || true) +if [ -z "$RECON" ]; then + echo "GATE_FAIL: reconcile summary log not found — reconcile may not have run" + exit 1 +fi +RECON_ADDED=$(echo "$RECON" | grep -Eo "^reconciled [0-9]+" | grep -Eo "[0-9]+") +RECON_TOTAL=$(echo "$RECON" | grep -Eo "/[0-9]+" | grep -Eo "[0-9]+") +if [ "$RECON_ADDED" != "$RECON_TOTAL" ]; then + echo "GATE_FAIL: reconcile skipped files — $RECON (check reason counts in output)" + exit 1 +fi +if [ "$RECON_TOTAL" != "$FILES_ON_DISK" ]; then + echo "GATE_FAIL: reconcile scanned $RECON_TOTAL but disk has $FILES_ON_DISK" + exit 1 +fi + +# Gate 3: compact must surface at least one trajectory it can process. +# N may be small because workforce data has mostly status=abandoned; +# that's correct filter behavior. Just assert non-zero. +if grep -q "No trajectories found" "$LOG"; then + echo "GATE_FAIL: compact reported no trajectories" + exit 1 +fi +if ! grep -Eq "Compacting [0-9]+ trajectories" "$LOG"; then + echo "GATE_FAIL: no compaction header found" + exit 1 +fi +N=$(grep -Eo "Compacting [0-9]+ trajectories" "$LOG" | grep -Eo "[0-9]+" | head -1) +if [ "$N" -lt 1 ]; then + echo "GATE_FAIL: compact header present but count is 0" + exit 1 +fi + +echo "ALL_GATES_PASSED (reconciled $RECON, compacted $N)" +`.trim(), + captureOutput: true, + failOnError: true, + }) + + // ---------- Phase E: commit, push, open PRs ---------- + + .step("open-prs", { + agent: "lead", + dependsOn: ["e2e-compact", "impl-workforce"], + task: ` +E2E verification passed. Commit in the worktrees, push, open PRs. + +E2E output: +{{steps.e2e-compact.output}} + +Build/test summary: +{{steps.build-and-test.output}} + +Workforce impl result (may be SKIPPED): +{{steps.impl-workforce.output}} + +DECISION context: +{{steps.require-decision-file.output}} + +Steps (stop on first failure): + +1. Trajectories PR — in ${TRAJ_WORKTREE}: + a. git status — confirm branch is ${TRAJ_BRANCH} with expected modified + files (schema.ts, id.ts, file.ts, tests) + b. git add changed src/ and tests/ files by explicit name — no -A + c. git commit with a HEREDOC message that: + - Summarizes the schema relaxation per DECISION + - Calls out the read/write asymmetry fix if VALIDATE_ON_SAVE=yes + - Lists the new test cases + - Ends with the standard Claude Opus 4.6 Co-Authored-By trailer + d. git push -u origin ${TRAJ_BRANCH} + e. gh pr create with title <70 chars, Summary bullets, and a Test plan + section including the E2E gate output lines. Capture the PR URL. + +2. Workforce PR — ONLY if impl-workforce did NOT print SKIPPED. In + ${WORKFORCE_WORKTREE}: + a. git status — confirm branch is ${WORKFORCE_BRANCH} + b. git add changed files by explicit name + c. git commit with a clear message + Co-Authored-By trailer + d. git push -u origin ${WORKFORCE_BRANCH} + e. gh pr create — reference the trajectories PR URL in the body + +3. Write the PR URL(s) to ${PR_URLS_PATH}, one per line, no other + content. Create the file even if only one PR was opened (workforce + track was SKIPPED). Example file contents: + https://github.com/AgentWorkforce/trajectories/pull/123 + https://github.com/AgentWorkforce/workforce/pull/456 + + NOTE: This file is the workflow's completion signal. The verification + gate watches it on disk — not stdout and not relay channel messages. + You can still print the URLs to your own output for logging, but it + is the FILE that matters. + +4. Do NOT merge either PR. + +Exit 0 on success. +`.trim(), + verification: { type: "file_exists", value: PR_URLS_PATH }, + }) + + .onError("fail-fast") + .run({ + cwd: TRAJ_ROOT, + onEvent: (e) => { + const stepName = "stepName" in e ? e.stepName : ""; + console.log(`[${e.type}]${stepName ? ` ${stepName}` : ""}`); + }, + }); + + console.log("\nImplementation workflow complete:", result.status); +} + +main().catch((error) => { + console.error(error); + process.exitCode = 1; +}); diff --git a/workflows/fix-trajectory-schema/README.md b/workflows/fix-trajectory-schema/README.md new file mode 100644 index 0000000..2331010 --- /dev/null +++ b/workflows/fix-trajectory-schema/README.md @@ -0,0 +1,106 @@ +# fix-trajectory-schema workflow + +Comprehensive, end-to-end fix for the `trail compact --all` → "No trajectories +found" bug in `workforce/`. Two layers: + +1. Stale `index.json` (addressed by `FileStorage.reconcileIndex()` on branch + `fix/reconcile-stale-index`). +2. Schema mismatch — workforce writes `role` values and ID shapes the + trajectories zod schema rejects on read. `save()` doesn't validate, + `readTrajectoryFile()` does (asymmetry). + +## Running it + +One command. Master executor handles worktrees, chaining both workflows, +and clean-up instructions. + +```bash +./workflows/fix-trajectory-schema/run-all.sh +``` + +That script: +1. Preflight-checks `agent-relay`, `gh`, and base branches +2. Creates two fresh git worktrees (idempotent — recreates on each run): + - `/tmp/wt-traj-schema` off `fix/reconcile-stale-index`, new branch + `fix/trajectory-schema-comprehensive` + - `/tmp/wt-workforce-writer` off workforce `main`, new branch + `fix/trajectory-writer-align` +3. Symlinks `node_modules` into the trajectories worktree +4. Runs `01-investigate.ts` — Claude lead writes both + `investigation-findings.md` AND `DECISION.md` automatically +5. Runs `02-implement.ts` — codex workers implement in the worktrees, + deterministic E2E gate against a copy of real workforce data, Claude + lead commits, pushes, and opens both PRs +6. Leaves worktrees in place for inspection and prints the cleanup commands + +## Files in this directory + +| File | Purpose | +|---|---| +| `run-all.sh` | Master executor. One command kicks off the whole thing. | +| `01-investigate.ts` | Phase 1: 3-wave investigation DAG. Writes findings + DECISION. | +| `02-implement.ts` | Phase 2: 5-wave implementation DAG with E2E gate + PR open. | +| `investigation-findings.md` | Produced by phase 1 (not checked in). | +| `DECISION.md` | Produced by phase 1 (not checked in). Key=value format. | + +## DECISION.md format (auto-generated) + +``` +SCHEMA_POLICY=A|B|C|D +VALIDATE_ON_SAVE=yes|no +FIX_WORKFORCE_WRITER=yes|no +ID_REGEX= +ROLE_POLICY= +STATUS_POLICY= +SOURCE_POLICY= +NOTES= +``` + +The lead agent picks the option with the smallest blast radius that +unblocks the real workforce data. You see the decision when the master +script prints the file contents before phase 2. + +## Env overrides + +All optional — defaults work for this machine. + +| Var | Default | +|---|---| +| `TRAJ_ROOT` | `/Users/khaliqgant/Projects/AgentWorkforce/trajectories` | +| `WORKFORCE_ROOT` | `/Users/khaliqgant/Projects/AgentWorkforce/workforce` | +| `TRAJ_WORKTREE` | `/tmp/wt-traj-schema` | +| `WORKFORCE_WORKTREE` | `/tmp/wt-workforce-writer` | +| `TRAJ_BASE_BRANCH` | `fix/reconcile-stale-index` | +| `TRAJ_BRANCH` | `fix/trajectory-schema-comprehensive` | +| `WORKFORCE_BASE_BRANCH` | `main` | +| `WORKFORCE_BRANCH` | `fix/trajectory-writer-align` | +| `SKIP_INVESTIGATE=1` | reuse existing findings + DECISION from disk | +| `SKIP_WORKTREES=1` | reuse existing worktrees instead of recreating | + +`SKIP_INVESTIGATE=1` is useful when iterating on `02-implement.ts` without +paying for another investigation pass. + +## E2E gate (the real completion signal) + +Phase 2 wave 4 is a deterministic shell step that: +1. Copies the actual `workforce/.trajectories/` to `/tmp/traj-schema-e2e/` +2. Runs the worktree-built CLI: `node /dist/cli/index.js compact --all --dry-run` +3. Gates on 3 hard signals: + - No "No trajectories found" string + - No `ZodError` or "Invalid trajectory" in output + - `Compacting N trajectories` header with N ≥ 11 + +If any gate fails, the PR step is never reached and the run exits non-zero. + +## Worktree lifecycle + +Worktrees are idempotent — every `run-all.sh` invocation tears down and +recreates them. They're NOT auto-removed after success, so you can inspect +the diff, check the branches, and manually clean up when you're ready: + +```bash +git -C /Users/khaliqgant/Projects/AgentWorkforce/trajectories \ + worktree remove /tmp/wt-traj-schema +git -C /Users/khaliqgant/Projects/AgentWorkforce/workforce \ + worktree remove /tmp/wt-workforce-writer +``` diff --git a/workflows/fix-trajectory-schema/run-all.sh b/workflows/fix-trajectory-schema/run-all.sh new file mode 100755 index 0000000..9e62a2f --- /dev/null +++ b/workflows/fix-trajectory-schema/run-all.sh @@ -0,0 +1,149 @@ +#!/usr/bin/env bash +# +# run-all.sh — master executor for the trajectory-schema comprehensive fix +# +# Sets up git worktrees for both repos, runs 01-investigate, then +# 02-implement. The lead agent in 01 auto-writes DECISION.md so there's no +# human gate mid-pipeline. The human checkpoint is reviewing the PRs at +# the end. +# +# Worktrees (both are fresh on every run — idempotent): +# /tmp/wt-traj-schema branched from fix/reconcile-stale-index +# new branch: fix/trajectory-schema-comprehensive +# /tmp/wt-workforce-writer branched from workforce's main +# new branch: fix/trajectory-writer-align +# +# Env overrides (all optional — sensible defaults): +# TRAJ_ROOT base trajectories checkout +# WORKFORCE_ROOT base workforce checkout +# TRAJ_WORKTREE trajectories worktree path +# WORKFORCE_WORKTREE workforce worktree path +# TRAJ_BASE_BRANCH branch to worktree off (default: fix/reconcile-stale-index) +# TRAJ_BRANCH new branch name in the worktree +# WORKFORCE_BASE_BRANCH branch to worktree off (default: main) +# WORKFORCE_BRANCH new branch name in the workforce worktree +# SKIP_INVESTIGATE=1 skip phase 1 (reuse existing findings/DECISION) +# SKIP_WORKTREES=1 reuse existing worktrees instead of recreating +# +# Usage: +# ./workflows/fix-trajectory-schema/run-all.sh + +set -euo pipefail + +# ---- config --------------------------------------------------------------- + +TRAJ_ROOT="${TRAJ_ROOT:-/Users/khaliqgant/Projects/AgentWorkforce/trajectories}" +WORKFORCE_ROOT="${WORKFORCE_ROOT:-/Users/khaliqgant/Projects/AgentWorkforce/workforce}" +TRAJ_WORKTREE="${TRAJ_WORKTREE:-/tmp/wt-traj-schema}" +WORKFORCE_WORKTREE="${WORKFORCE_WORKTREE:-/tmp/wt-workforce-writer}" +TRAJ_BASE_BRANCH="${TRAJ_BASE_BRANCH:-fix/reconcile-stale-index}" +TRAJ_BRANCH="${TRAJ_BRANCH:-fix/trajectory-schema-comprehensive}" +WORKFORCE_BASE_BRANCH="${WORKFORCE_BASE_BRANCH:-main}" +WORKFORCE_BRANCH="${WORKFORCE_BRANCH:-fix/trajectory-writer-align}" + +WORKFLOW_DIR="${TRAJ_ROOT}/workflows/fix-trajectory-schema" +INVESTIGATE="${WORKFLOW_DIR}/01-investigate.ts" +IMPLEMENT="${WORKFLOW_DIR}/02-implement.ts" + +# ---- helpers -------------------------------------------------------------- + +log() { + printf '\n\033[1;36m[run-all]\033[0m %s\n' "$*" +} + +die() { + printf '\n\033[1;31m[run-all] FATAL:\033[0m %s\n' "$*" >&2 + exit 1 +} + +# ---- preflight ------------------------------------------------------------ + +log "Preflight checks" + +command -v agent-relay >/dev/null || die "agent-relay not on PATH" +command -v gh >/dev/null || die "gh (github cli) not on PATH" +[[ -d "$TRAJ_ROOT/.git" ]] || die "TRAJ_ROOT is not a git repo: $TRAJ_ROOT" +[[ -d "$WORKFORCE_ROOT/.git" ]] || die "WORKFORCE_ROOT is not a git repo: $WORKFORCE_ROOT" +[[ -f "$INVESTIGATE" ]] || die "missing: $INVESTIGATE" +[[ -f "$IMPLEMENT" ]] || die "missing: $IMPLEMENT" + +# Ensure the trajectories base branch actually exists locally +if ! git -C "$TRAJ_ROOT" show-ref --verify --quiet "refs/heads/${TRAJ_BASE_BRANCH}"; then + die "TRAJ_BASE_BRANCH=${TRAJ_BASE_BRANCH} does not exist in $TRAJ_ROOT" +fi + +# ---- worktree setup ------------------------------------------------------- + +setup_worktree() { + local repo="$1" wt="$2" base="$3" branch="$4" + + if [[ "${SKIP_WORKTREES:-0}" == "1" ]]; then + [[ -d "$wt" ]] || die "SKIP_WORKTREES=1 but $wt does not exist" + log "reusing worktree $wt" + return + fi + + # Tear down any existing worktree at the same path + if git -C "$repo" worktree list | grep -q "$wt"; then + log "removing stale worktree at $wt" + git -C "$repo" worktree remove --force "$wt" || true + fi + rm -rf "$wt" + + # Delete the branch if it already exists locally so -b can recreate it + if git -C "$repo" show-ref --verify --quiet "refs/heads/${branch}"; then + log "deleting stale local branch $branch in $repo" + git -C "$repo" branch -D "$branch" + fi + + log "creating worktree $wt (branch $branch from $base)" + git -C "$repo" worktree add "$wt" -b "$branch" "$base" +} + +setup_worktree "$TRAJ_ROOT" "$TRAJ_WORKTREE" "$TRAJ_BASE_BRANCH" "$TRAJ_BRANCH" +setup_worktree "$WORKFORCE_ROOT" "$WORKFORCE_WORKTREE" "$WORKFORCE_BASE_BRANCH" "$WORKFORCE_BRANCH" + +# Link node_modules into the trajectories worktree so build/test work +if [[ ! -e "$TRAJ_WORKTREE/node_modules" && -d "$TRAJ_ROOT/node_modules" ]]; then + log "symlinking node_modules into $TRAJ_WORKTREE" + ln -sf "$TRAJ_ROOT/node_modules" "$TRAJ_WORKTREE/node_modules" +fi + +# ---- export for downstream workflows -------------------------------------- + +export TRAJ_ROOT WORKFORCE_ROOT +export TRAJ_WORKTREE WORKFORCE_WORKTREE +export TRAJ_BRANCH WORKFORCE_BRANCH + +# ---- phase 1: investigate ------------------------------------------------- + +if [[ "${SKIP_INVESTIGATE:-0}" == "1" ]]; then + log "SKIP_INVESTIGATE=1 — reusing existing findings + DECISION" + [[ -f "$WORKFLOW_DIR/investigation-findings.md" ]] || die "findings missing" + [[ -f "$WORKFLOW_DIR/DECISION.md" ]] || die "DECISION.md missing" +else + log "running phase 1: investigate" + agent-relay run "$INVESTIGATE" +fi + +[[ -f "$WORKFLOW_DIR/investigation-findings.md" ]] || die "investigate did not produce findings" +[[ -f "$WORKFLOW_DIR/DECISION.md" ]] || die "investigate did not produce DECISION.md" + +log "DECISION.md contents:" +cat "$WORKFLOW_DIR/DECISION.md" + +# ---- phase 2: implement --------------------------------------------------- + +log "running phase 2: implement" +agent-relay run "$IMPLEMENT" + +# ---- done ----------------------------------------------------------------- + +log "all phases complete" +echo "" +echo "Trajectories worktree: $TRAJ_WORKTREE (branch $TRAJ_BRANCH)" +echo "Workforce worktree: $WORKFORCE_WORKTREE (branch $WORKFORCE_BRANCH)" +echo "" +echo "Worktrees are left in place for inspection. Remove with:" +echo " git -C $TRAJ_ROOT worktree remove $TRAJ_WORKTREE" +echo " git -C $WORKFORCE_ROOT worktree remove $WORKFORCE_WORKTREE"