From 0fe8d29b2f0f4a968d8bf9ddc82a41c1240306e4 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 19:04:47 +0300 Subject: [PATCH 01/31] Add mongo-migration-authoring project spec and design reference Scaffold project for adding a TypeScript migration authoring surface to the Mongo target. Factory functions produce operations directly, migration files are self-executing via a Migration base class, and strategies are plain function composition over the factories. --- .../assets/migration-authoring-design.md | 328 ++++++++++++++++++ projects/mongo-migration-authoring/spec.md | 168 +++++++++ 2 files changed, 496 insertions(+) create mode 100644 projects/mongo-migration-authoring/assets/migration-authoring-design.md create mode 100644 projects/mongo-migration-authoring/spec.md diff --git a/projects/mongo-migration-authoring/assets/migration-authoring-design.md b/projects/mongo-migration-authoring/assets/migration-authoring-design.md new file mode 100644 index 000000000..ec07fc3c8 --- /dev/null +++ b/projects/mongo-migration-authoring/assets/migration-authoring-design.md @@ -0,0 +1,328 @@ +# Migration Authoring — Design Proposal + +## Overview + +Migrations are authored as TypeScript files. Each migration file exports a class that extends `Migration` and defines a `plan()` method returning a list of operations. The file is self-contained — run it directly with `node migration.ts` to produce `ops.json`, or let the CLI import it. + +This is similar in spirit to Active Record Migrations, where a migration is a Ruby file that subclasses `ActiveRecord::Migration` and expresses schema changes as method calls (`add_column`, `remove_column`, etc.). Our equivalent is a `Migration` subclass whose `plan()` method composes TypeScript factory functions that produce serializable operation objects. + +The design has three layers: + +1. **Operation factories** — atomic primitives (`addColumn`, `setNotNull`, etc.), each producing a single operation +2. **Strategies** — plain functions that compose the primitives into correct multi-step sequences +3. **Migration class** — the file's export, providing the operation list and a self-executing entrypoint + +Everything downstream — the runner, attestation, the migration graph — consumes the same `ops.json` format. + +--- + +## Migration files + +A migration file exports a class and makes itself runnable: + +```typescript +import { Migration, addColumn } from "@prisma-next/target-postgres/migration" + +export default class extends Migration { + plan() { + return [ + addColumn("users", "display_name", { type: "varchar", nullable: true }), + ] + } +} + +Migration.run(import.meta) +``` + +`node migration.ts` calls `plan()`, serializes the result, and writes `ops.json`. `node migration.ts --dry-run` prints the operations without writing. The CLI can also import the file, get the class, and call `plan()` directly — `Migration.run()` detects that it's not the entrypoint and is a no-op. + +The `Migration` base class owns the lifecycle: argument parsing, serialization, output. The author's only job is to return operations from `plan()`. + +## Operation factories + +Each factory function produces a single `SqlMigrationPlanOperation` — a plain object with `id`, `label`, `operationClass`, and `precheck`/`execute`/`postcheck` arrays containing `{ description, sql }` steps. + +```typescript +export default class extends Migration { + plan() { + return [ + addColumn("users", "display_name", { type: "varchar", nullable: true }), + setNotNull("users", "display_name"), + ] + } +} +``` + +The library provides factories for common DDL: `addColumn`, `dropColumn`, `renameColumn`, `addTable`, `dropTable`, `setNotNull`, `dropNotNull`, `setDefault`, `dropDefault`, `addIndex`, `dropIndex`, `addUnique`, `addForeignKey`, `createEnumType`, and so on. + +Each factory returns a plain object. `JSON.stringify()` is the serializer. + +## Strategies are functions + +Some schema changes require multiple operations in a specific order. A strategy is a regular TypeScript function that composes the atomic factories: + +```typescript +function nonNullBackfill(table: string, column: string, backfillExpr: string) { + return [ + addColumn(table, column, { nullable: true }), + dataTransform(`backfill-${table}-${column}`, { + check: (db) => /* ... */, + run: (db) => /* ... */, + }), + setNotNull(table, column), + ] +} +``` + +It takes parameters, calls the primitives, returns `SqlMigrationPlanOperation[]`. The ordering is correct by construction. + +A user writes a migration with it: + +```typescript +export default class extends Migration { + plan() { + return nonNullBackfill("users", "displayName", "'unnamed'") + } +} + +Migration.run(import.meta) +``` + +A `renameColumnSafe` strategy does expand-and-contract: + +```typescript +export default class extends Migration { + plan() { + return renameColumnSafe("users", "name", "full_name") + // Internally produces: addColumn("full_name") → copyData → dropColumn("name") + } +} + +Migration.run(import.meta) +``` + +Users write their own strategies the same way — compose the atomic primitives, return the same operation type. A `columnSplit` strategy, a `typeChange` strategy, a `tableExtraction` strategy — each one encapsulates the correct operation sequence for its scenario and asks the user only for the information gap (how to derive the new values from the old). + +## Data transforms + +A data transform is an operation that modifies data rather than schema. It has a name (its invariant identity for ledger recording and routing), plus a check/run pair: + +```typescript +export default class extends Migration { + plan() { + return [ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + dataTransform("split-user-name", { + check: (db) => db.query("SELECT 1 FROM users WHERE first_name IS NULL LIMIT 1"), + run: (db) => db.query( + "UPDATE users SET first_name = split_part(name, ' ', 1), " + + "last_name = split_part(name, ' ', 2) WHERE first_name IS NULL" + ), + }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ] + } +} + +Migration.run(import.meta) +``` + +The name (`"split-user-name"`) is the invariant. The ledger records it on successful completion; routing can require it via environment refs. Check runs before run (retry safety) and after run (validation). Data transforms serialize to JSON ASTs and appear in the operation chain wherever they need to. + +### A strategy for column splits + +The manual composition above is verbose. A `columnSplit` strategy encapsulates the pattern: + +```typescript +export default class extends Migration { + plan() { + return columnSplit("users", "name", ["first_name", "last_name"], (db) => + db.users.update({ + firstName: expr("split_part(name, ' ', 1)"), + lastName: expr("split_part(name, ' ', 2)"), + }) + ) + } +} + +Migration.run(import.meta) +``` + +`columnSplit` internally produces the same six operations — add columns, backfill via the user's expression, tighten constraints, drop the old column. The ordering is correct by construction. The user provides only the derivation logic. + +## The planner + +The planner's job is scenario detection and strategy selection: + +1. Detect which scenario applies (column added as NOT NULL without default, non-widening type change, etc.) +2. Pick the strategy function +3. Call it with the right arguments + +The planner calls the exact same functions that users call when authoring migrations by hand. It can either call the strategy directly to produce operations, or scaffold a `migration.ts` that calls it — the result is the same. + +Each strategy handles its own ordering internally. Adding support for a new scenario means writing a new strategy function. + +## Typed query builder access mid-chain + +If a data transform appears partway through an operation chain, the user may want typed query builder access against the schema state at that point. This is a hard problem in general — it would require manipulating TypeScript types through an arbitrary preceding operation sequence. + +The practical answer for v1: the user provides an intermediate contract definition. The tools for this already exist — copy the contract authoring surface (PSL or TS builders) into the migration directory, modify it to describe the schema at the point you care about, and emit it: + +``` +migrations/0003-split-name/ +├── migration.ts # the migration itself +├── intermediate.psl # schema at the mid-point (after additive ops) +├── intermediate.json # emitted contract +└── intermediate.d.ts # emitted types +``` + +Then import it: + +```typescript +import type { Contract } from "./intermediate.d" +import intermediateJson from "./intermediate.json" + +export default class extends Migration { + plan() { + return [ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + dataTransform({ + contract: intermediateJson, + check: (db) => db.users.findFirst({ where: { firstName: null } }), + run: (db) => db.users.update({ + firstName: expr("split_part(name, ' ', 1)"), + lastName: expr("split_part(name, ' ', 2)"), + }), + }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ] + } +} + +Migration.run(import.meta) +``` + +Multiple intermediate contracts are supported — one per data transform if a complex migration needs them. + +## Transactions + +The operation chain can carry transaction annotations. A `transaction()` wrapper tells the runner to execute a sequence of operations atomically: + +```typescript +export default class extends Migration { + plan() { + return transaction([ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + dataTransform({ /* ... */ }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ]) + } +} + +Migration.run(import.meta) +``` + +Transaction boundaries are the user's decision — they know whether the table is small enough for a single transaction or whether they need to break it up. + +## Multi-migration deployments + +In production, you'll often want to deploy application updates between migration steps. A column split is conceptually one change, but in a blue-green / rolling deployment it happens over days: + +1. **Deploy migration 1** — add nullable columns +2. **Deploy app** — application dual-writes to old and new columns +3. **Deploy migration 2** — backfill existing rows, tighten constraints, drop old column +4. **Deploy app** — application reads from new columns + +Each step is a separate migration file, a separate edge in the graph, applied at the user's pace. The intermediate state (nullable columns exist, not yet backfilled) is a real deployment state — the application may run against it for hours. + +```typescript +// Migration 1 (deploy first) +export default class extends Migration { + plan() { + return [ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + ] + } +} + +Migration.run(import.meta) +``` + +```typescript +// Migration 2 (deploy after app update) +export default class extends Migration { + plan() { + return [ + dataTransform("split-user-name", { + check: (db) => db.query("SELECT 1 FROM users WHERE first_name IS NULL LIMIT 1"), + run: (db) => db.query( + "UPDATE users SET first_name = split_part(name, ' ', 1), " + + "last_name = split_part(name, ' ', 2) WHERE first_name IS NULL" + ), + }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ] + } +} + +Migration.run(import.meta) +``` + +The user knows their deployment process; they decide the granularity. + +## Serialization model + +Operations are plain objects. `SqlMigrationPlanOperation` is an interface with `id`, `label`, `operationClass`, plus `precheck`/`execute`/`postcheck` arrays containing `{ description, sql }` steps. Factory functions return these directly. `JSON.stringify(operations)` is the serializer. + +The Mongo migration system already follows this pattern — command classes (`CreateIndexCommand`, `DropIndexCommand`, etc.) store everything as public readonly properties, and `JSON.stringify()` produces the on-disk format directly: + +```typescript +function serializeMongoOps(ops) { + return JSON.stringify(ops, null, 2); +} +``` + +The SQL case is the same principle with even simpler data — plain interfaces rather than class instances. + +## Self-contained files + +The migration file is self-contained. Two things happen in every file: + +1. **Declare** — export a `Migration` subclass with a `plan()` method that returns operations +2. **Run** — `Migration.run(import.meta)` makes the file directly executable + +`node migration.ts` produces `ops.json`. The CLI can also import the file, get the class, and call `plan()` — `Migration.run()` detects it's not the entrypoint and is a no-op. The `Migration` base class handles argument parsing (`--dry-run`, `--help`), serialization, and output. + +The file is a pure declaration of what the migration produces, plus one line that makes it runnable. The framework owns everything else. + +--- + +## Summary + +1. **Migration class** — each file exports a `Migration` subclass with a `plan()` method. `Migration.run(import.meta)` makes it directly executable. +2. **Operation factories** — atomic primitives (`addColumn`, `dropColumn`, etc.) that each produce a single `SqlMigrationPlanOperation` +3. **Strategies as functions** — regular TypeScript functions that compose the primitives into correct operation sequences (`nonNullBackfill`, `columnSplit`, `typeChange`). Users write their own the same way. The planner calls the same functions. +4. **Data transforms** — operations in the chain with check/run semantics and an invariant name for ledger/routing +5. **Direct serialization** — operations are plain objects that serialize to JSON via `JSON.stringify` +6. **Intermediate contracts** provided by the user when typed query builder access is needed mid-chain +7. **Transaction annotations** as a composable primitive +8. **Multi-migration deployments** when the user needs app updates between steps + +The planner detects scenarios and calls strategy functions. The strategy encapsulates ordering. The runner consumes `ops.json`. Everything composes through plain functions and plain JSON. + +## Open questions + +1. **What's the minimal strategy set for VP1?** Probably just the manual composition path (raw operations) to prove the model, plus one strategy (`columnSplit` or `nonNullBackfill`) to demonstrate the pattern. +2. **Should the planner ever produce multi-migration sequences automatically?** Or is splitting into multiple migrations always a manual decision? Leaning toward manual — the planner scaffolds a single migration, the user splits when their deployment process requires it. diff --git a/projects/mongo-migration-authoring/spec.md b/projects/mongo-migration-authoring/spec.md new file mode 100644 index 000000000..4aec6b7ec --- /dev/null +++ b/projects/mongo-migration-authoring/spec.md @@ -0,0 +1,168 @@ +# Summary + +Users can author Mongo migrations by hand in TypeScript. A migration file exports a class, runs as a standalone script, and produces `ops.json` that the existing runner consumes unchanged. + +# Description + +## What a migration file looks like + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/target-mongo/migration" + +export default class extends Migration { + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta) +``` + +`node migration.ts` produces `ops.json`. The existing `MongoMigrationRunner` consumes it unchanged. That's the entire authoring workflow. + +## How it works + +The file has two parts: + +1. **The class** — exports a `Migration` subclass with a `plan()` method. `plan()` returns an array of `MongoMigrationPlanOperation` objects built by factory functions. +2. **The run line** — `Migration.run(import.meta)` makes the file self-executing. When run directly (`node migration.ts`), it calls `plan()`, serializes the result, and writes `ops.json`. When imported by the CLI or a test, it's a no-op. + +## Operation factories + +Each factory function produces a single `MongoMigrationPlanOperation` — a plain object containing the operation's identity, its DDL command, and its pre/postchecks: + +- `createIndex(collection, keys, options?)` — adds an index with a precheck that it doesn't already exist +- `dropIndex(collection, keys)` — removes an index with a precheck that it exists +- `createCollection(collection, options?)` — creates a collection with optional validator, collation, capped settings, etc. +- `dropCollection(collection)` — drops a collection +- `collMod(collection, options)` — modifies collection options (validator, changeStreamPreAndPostImages, etc.) + +The factories produce the same output as the existing `MongoMigrationPlanner`. The runner cannot distinguish between planner-generated and hand-authored operations. + +## Composing operations into strategies + +A strategy is a plain function that composes the atomic factories: + +```typescript +function validatedCollection( + name: string, + schema: Record, + indexes: Array<{ keys: MongoIndexKey[]; unique?: boolean }>, +) { + return [ + createCollection(name, { + validator: { $jsonSchema: schema }, + validationLevel: "strict", + validationAction: "error", + }), + ...indexes.map(idx => createIndex(name, idx.keys, { unique: idx.unique })), + ] +} +``` + +Used in a migration: + +```typescript +export default class extends Migration { + plan() { + return validatedCollection("users", + { required: ["email", "name"] }, + [{ keys: [{ field: "email", direction: 1 }], unique: true }], + ) + } +} +``` + +Strategies are regular functions. Users write their own the same way — compose factories, return operations. The planner could call the same functions to produce its output (though refactoring the planner is out of scope here). + +## Serialization + +The Mongo command classes (`CreateIndexCommand`, `DropIndexCommand`, etc.) store all data as public readonly properties. `JSON.stringify()` serializes them directly — the existing `serializeMongoOps` is literally `JSON.stringify(ops, null, 2)`. The factory functions produce the same command class instances the planner does, so serialization works identically. + +## Why Mongo + +Mongo is a good starting point for this pattern because: + +- The operation set is small and well-defined (5 DDL commands) +- The command classes already serialize via `JSON.stringify` +- The planner already produces `MongoMigrationPlanOperation[]` directly — the architecture is already aligned +- It's a self-contained family, so this work doesn't touch other targets + +The pattern is designed to generalize to SQL migrations, where factory functions like `addColumn`, `setNotNull`, etc. would produce `SqlMigrationPlanOperation` objects the same way. See [the design proposal](assets/migration-authoring-design.md) for the full cross-target vision. + +# Requirements + +## Functional Requirements + +- Factory functions for each Mongo DDL operation (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`) that produce `MongoMigrationPlanOperation` objects with correct prechecks, commands, and postchecks. Factory functions and planner are co-located in `packages/3-mongo-target/1-mongo-target`, exported from `@prisma-next/target-mongo/migration`. +- A `Migration` base class with: + - An abstract `plan()` method returning `MongoMigrationPlanOperation[]` + - A static `Migration.run(import.meta)` method that handles self-execution (entrypoint detection, serialization, file writing) + - `--dry-run` flag support (print operations without writing) + - `--help` flag support +- At least one compound strategy function demonstrating composition of multiple factories +- Factory output that serializes identically to planner output — the runner consumes both without distinction + +## Non-Functional Requirements + +- The `Migration` base class interface is target-agnostic (so the SQL target can provide its own version later). **Assumption:** a generic `Migration` base in the framework, with a Mongo-specific alias that fixes the type parameter. +- No changes to the existing `MongoMigrationPlanner` or `MongoMigrationRunner` + +## Non-goals + +- Rewriting the Mongo planner to use factory functions internally — the planner works; refactoring it is separate +- Data transform support for Mongo migrations +- CLI integration (`prisma migration new/plan/verify` for Mongo) — future work +- Scaffolding tooling (auto-generating `migration.ts` from planner output) — future work +- Transaction support for Mongo migrations + +# Acceptance Criteria + +## Authoring a migration + +- [ ] A migration file with `export default class extends Migration` and factory function calls in `plan()` type-checks and runs with `node migration.ts` +- [ ] The file produces `ops.json` in its own directory +- [ ] Running with `--dry-run` prints operations to stdout without writing `ops.json` +- [ ] Running with `--help` prints usage information + +## Importing a migration + +- [ ] When imported (not run directly), `Migration.run(import.meta)` is a no-op +- [ ] The default export class can be instantiated and `plan()` called directly (for CLI and test use) + +## Operation correctness + +- [ ] Each factory (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`) produces a `MongoMigrationPlanOperation` with the correct prechecks, commands, and postchecks +- [ ] Factory output serializes identically to planner output for the same operation (verified by test comparing JSON output) +- [ ] Round-trip works: factory → `JSON.stringify` → `deserializeMongoOps` → runner execution + +## Composition + +- [ ] At least one compound strategy function composes multiple factories and returns a flat operation list +- [ ] The strategy is a plain exported function — users compose operations the same way + +# References + +- [Migration Authoring Design Proposal](assets/migration-authoring-design.md) +- Existing Mongo migration system: + - `packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts` — planner + - `packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts` — serializer/deserializer + - `packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts` — runner +- Mongo query AST (command classes, filter expressions): + - `packages/2-mongo-family/4-query/query-ast/src/ddl-commands.ts` + - `packages/2-mongo-family/4-query/query-ast/src/inspection-commands.ts` + - `packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts` + - `packages/2-mongo-family/4-query/query-ast/src/migration-operation-types.ts` + +# Decisions + +1. **Factory functions and planner co-located in `packages/3-mongo-target/1-mongo-target`.** Exported from `@prisma-next/target-mongo/migration`. Users import everything from one place: `import { Migration, createIndex } from "@prisma-next/target-mongo/migration"`. + +2. **Factory signatures are an implementation detail.** The only constraint is that the planner can depend on the factory functions it uses. Since both live in the same package, the signatures can evolve freely. From 199e30717e0229dce890ae42f43663a14709fdd7 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 19:37:26 +0300 Subject: [PATCH 02/31] Add Mongo migration factory functions Five factory functions (createIndex, dropIndex, createCollection, dropCollection, collMod) that produce MongoMigrationPlanOperation objects identical to what the planner generates. Extracted from the planner's inline logic, reusing existing AST primitives and helpers. Exported from @prisma-next/target-mongo/migration. --- .../1-mongo-target/package.json | 1 + .../src/core/migration-factories.ts | 171 ++++++++ .../1-mongo-target/src/exports/migration.ts | 7 + .../test/migration-factories.test.ts | 405 ++++++++++++++++++ .../1-mongo-target/tsdown.config.ts | 7 +- 5 files changed, 590 insertions(+), 1 deletion(-) create mode 100644 packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts create mode 100644 packages/3-mongo-target/1-mongo-target/src/exports/migration.ts create mode 100644 packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts diff --git a/packages/3-mongo-target/1-mongo-target/package.json b/packages/3-mongo-target/1-mongo-target/package.json index 1e21b0172..b3dc8e8bc 100644 --- a/packages/3-mongo-target/1-mongo-target/package.json +++ b/packages/3-mongo-target/1-mongo-target/package.json @@ -36,6 +36,7 @@ "exports": { "./codec-types": "./dist/codec-types.mjs", "./control": "./dist/control.mjs", + "./migration": "./dist/migration.mjs", "./pack": "./dist/pack.mjs", "./package.json": "./package.json" }, diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts new file mode 100644 index 000000000..5cb583c88 --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts @@ -0,0 +1,171 @@ +import type { MongoIndexKey } from '@prisma-next/mongo-query-ast/control'; +import { + buildIndexOpId, + CollModCommand, + type CollModOptions, + CreateCollectionCommand, + type CreateCollectionOptions, + CreateIndexCommand, + type CreateIndexOptions, + DropCollectionCommand, + DropIndexCommand, + defaultMongoIndexName, + keysToKeySpec, + ListCollectionsCommand, + ListIndexesCommand, + MongoAndExpr, + MongoFieldFilter, + type MongoMigrationPlanOperation, +} from '@prisma-next/mongo-query-ast/control'; + +function formatKeys(keys: ReadonlyArray): string { + return keys.map((k) => `${k.field}:${k.direction}`).join(', '); +} + +function isTextIndex(keys: ReadonlyArray): boolean { + return keys.some((k) => k.direction === 'text'); +} + +function keyFilter(collection: string, keys: ReadonlyArray) { + return isTextIndex(keys) + ? MongoFieldFilter.eq('key._fts', 'text') + : MongoFieldFilter.eq('key', keysToKeySpec(keys)); +} + +export function createIndex( + collection: string, + keys: ReadonlyArray, + options?: CreateIndexOptions, +): MongoMigrationPlanOperation { + const name = defaultMongoIndexName(keys); + const filter = keyFilter(collection, keys); + const fullFilter = options?.unique + ? MongoAndExpr.of([filter, MongoFieldFilter.eq('unique', true)]) + : filter; + + return { + id: buildIndexOpId('create', collection, keys), + label: `Create index on ${collection} (${formatKeys(keys)})`, + operationClass: 'additive', + precheck: [ + { + description: `index does not already exist on ${collection}`, + source: new ListIndexesCommand(collection), + filter, + expect: 'notExists', + }, + ], + execute: [ + { + description: `create index on ${collection}`, + command: new CreateIndexCommand(collection, keys, { + ...options, + unique: options?.unique || undefined, + name, + }), + }, + ], + postcheck: [ + { + description: `index exists on ${collection}`, + source: new ListIndexesCommand(collection), + filter: fullFilter, + expect: 'exists', + }, + ], + }; +} + +export function dropIndex( + collection: string, + keys: ReadonlyArray, +): MongoMigrationPlanOperation { + const indexName = defaultMongoIndexName(keys); + const filter = keyFilter(collection, keys); + + return { + id: buildIndexOpId('drop', collection, keys), + label: `Drop index on ${collection} (${formatKeys(keys)})`, + operationClass: 'destructive', + precheck: [ + { + description: `index exists on ${collection}`, + source: new ListIndexesCommand(collection), + filter, + expect: 'exists', + }, + ], + execute: [ + { + description: `drop index on ${collection}`, + command: new DropIndexCommand(collection, indexName), + }, + ], + postcheck: [ + { + description: `index no longer exists on ${collection}`, + source: new ListIndexesCommand(collection), + filter, + expect: 'notExists', + }, + ], + }; +} + +export function createCollection( + collection: string, + options?: CreateCollectionOptions, +): MongoMigrationPlanOperation { + return { + id: `collection.${collection}.create`, + label: `Create collection ${collection}`, + operationClass: 'additive', + precheck: [ + { + description: `collection ${collection} does not exist`, + source: new ListCollectionsCommand(), + filter: MongoFieldFilter.eq('name', collection), + expect: 'notExists', + }, + ], + execute: [ + { + description: `create collection ${collection}`, + command: new CreateCollectionCommand(collection, options), + }, + ], + postcheck: [], + }; +} + +export function dropCollection(collection: string): MongoMigrationPlanOperation { + return { + id: `collection.${collection}.drop`, + label: `Drop collection ${collection}`, + operationClass: 'destructive', + precheck: [], + execute: [ + { + description: `drop collection ${collection}`, + command: new DropCollectionCommand(collection), + }, + ], + postcheck: [], + }; +} + +export function collMod(collection: string, options: CollModOptions): MongoMigrationPlanOperation { + return { + id: `collMod.${collection}`, + label: `Modify collection ${collection}`, + operationClass: 'destructive', + precheck: [], + execute: [ + { + description: `modify collection ${collection}`, + command: new CollModCommand(collection, options), + }, + ], + postcheck: [], + }; +} diff --git a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts new file mode 100644 index 000000000..9d214e66b --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts @@ -0,0 +1,7 @@ +export { + collMod, + createCollection, + createIndex, + dropCollection, + dropIndex, +} from '../core/migration-factories'; diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts new file mode 100644 index 000000000..d873cf2b1 --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts @@ -0,0 +1,405 @@ +import { + buildIndexOpId, + CollModCommand, + CreateCollectionCommand, + CreateIndexCommand, + DropCollectionCommand, + DropIndexCommand, + defaultMongoIndexName, + keysToKeySpec, + ListCollectionsCommand, + ListIndexesCommand, + MongoAndExpr, + MongoFieldFilter, + type MongoMigrationPlanOperation, +} from '@prisma-next/mongo-query-ast/control'; +import { describe, expect, it } from 'vitest'; +import { + collMod, + createCollection, + createIndex, + dropCollection, + dropIndex, +} from '../src/core/migration-factories'; + +describe('createIndex', () => { + const keys = [{ field: 'email', direction: 1 as const }]; + + it('produces correct operation structure', () => { + const op = createIndex('users', keys); + + expect(op.id).toBe(buildIndexOpId('create', 'users', keys)); + expect(op.label).toBe('Create index on users (email:1)'); + expect(op.operationClass).toBe('additive'); + }); + + it('includes precheck that index does not exist', () => { + const op = createIndex('users', keys); + + expect(op.precheck).toHaveLength(1); + expect(op.precheck[0]!.expect).toBe('notExists'); + expect(op.precheck[0]!.source).toBeInstanceOf(ListIndexesCommand); + expect((op.precheck[0]!.source as ListIndexesCommand).collection).toBe('users'); + expect(op.precheck[0]!.filter).toBeInstanceOf(MongoFieldFilter); + }); + + it('includes execute with CreateIndexCommand', () => { + const op = createIndex('users', keys, { unique: true }); + + expect(op.execute).toHaveLength(1); + const cmd = op.execute[0]!.command as CreateIndexCommand; + expect(cmd).toBeInstanceOf(CreateIndexCommand); + expect(cmd.collection).toBe('users'); + expect(cmd.keys).toEqual(keys); + expect(cmd.unique).toBe(true); + expect(cmd.name).toBe(defaultMongoIndexName(keys)); + }); + + it('includes postcheck that index exists', () => { + const op = createIndex('users', keys); + + expect(op.postcheck).toHaveLength(1); + expect(op.postcheck[0]!.expect).toBe('exists'); + }); + + it('adds unique filter to postcheck when unique: true', () => { + const op = createIndex('users', keys, { unique: true }); + + expect(op.postcheck[0]!.filter).toBeInstanceOf(MongoAndExpr); + const andExpr = op.postcheck[0]!.filter as MongoAndExpr; + expect(andExpr.exprs).toHaveLength(2); + }); + + it('uses key._fts filter for text indexes', () => { + const textKeys = [{ field: 'content', direction: 'text' as const }]; + const op = createIndex('posts', textKeys); + + const preFilter = op.precheck[0]!.filter as MongoFieldFilter; + expect(preFilter.field).toBe('key._fts'); + expect(preFilter.value).toBe('text'); + }); + + it('passes through all index options', () => { + const op = createIndex('users', keys, { + sparse: true, + expireAfterSeconds: 3600, + collation: { locale: 'en' }, + }); + + const cmd = op.execute[0]!.command as CreateIndexCommand; + expect(cmd.sparse).toBe(true); + expect(cmd.expireAfterSeconds).toBe(3600); + expect(cmd.collation).toEqual({ locale: 'en' }); + }); + + it('uses key spec filter for non-text indexes', () => { + const op = createIndex('users', keys); + + const preFilter = op.precheck[0]!.filter as MongoFieldFilter; + expect(preFilter.field).toBe('key'); + expect(preFilter.value).toEqual(keysToKeySpec(keys)); + }); + + it('handles compound keys', () => { + const compoundKeys = [ + { field: 'email', direction: 1 as const }, + { field: 'name', direction: -1 as const }, + ]; + const op = createIndex('users', compoundKeys); + + expect(op.label).toBe('Create index on users (email:1, name:-1)'); + expect(op.id).toBe(buildIndexOpId('create', 'users', compoundKeys)); + }); +}); + +describe('dropIndex', () => { + const keys = [{ field: 'email', direction: 1 as const }]; + + it('produces correct operation structure', () => { + const op = dropIndex('users', keys); + + expect(op.id).toBe(buildIndexOpId('drop', 'users', keys)); + expect(op.label).toBe('Drop index on users (email:1)'); + expect(op.operationClass).toBe('destructive'); + }); + + it('includes precheck that index exists', () => { + const op = dropIndex('users', keys); + + expect(op.precheck).toHaveLength(1); + expect(op.precheck[0]!.expect).toBe('exists'); + }); + + it('includes execute with DropIndexCommand using derived name', () => { + const op = dropIndex('users', keys); + + const cmd = op.execute[0]!.command as DropIndexCommand; + expect(cmd).toBeInstanceOf(DropIndexCommand); + expect(cmd.collection).toBe('users'); + expect(cmd.name).toBe(defaultMongoIndexName(keys)); + }); + + it('includes postcheck that index no longer exists', () => { + const op = dropIndex('users', keys); + + expect(op.postcheck).toHaveLength(1); + expect(op.postcheck[0]!.expect).toBe('notExists'); + }); + + it('uses key._fts filter for text indexes', () => { + const textKeys = [{ field: 'content', direction: 'text' as const }]; + const op = dropIndex('posts', textKeys); + + const preFilter = op.precheck[0]!.filter as MongoFieldFilter; + expect(preFilter.field).toBe('key._fts'); + }); +}); + +describe('createCollection', () => { + it('produces correct operation structure', () => { + const op = createCollection('users'); + + expect(op.id).toBe('collection.users.create'); + expect(op.label).toBe('Create collection users'); + expect(op.operationClass).toBe('additive'); + }); + + it('includes precheck that collection does not exist', () => { + const op = createCollection('users'); + + expect(op.precheck).toHaveLength(1); + expect(op.precheck[0]!.expect).toBe('notExists'); + expect(op.precheck[0]!.source).toBeInstanceOf(ListCollectionsCommand); + const filter = op.precheck[0]!.filter as MongoFieldFilter; + expect(filter.field).toBe('name'); + expect(filter.value).toBe('users'); + }); + + it('includes execute with CreateCollectionCommand', () => { + const op = createCollection('users'); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd).toBeInstanceOf(CreateCollectionCommand); + expect(cmd.collection).toBe('users'); + }); + + it('passes through validator options', () => { + const op = createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + validationAction: 'error', + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + expect(cmd.validationLevel).toBe('strict'); + expect(cmd.validationAction).toBe('error'); + }); + + it('passes through capped options', () => { + const op = createCollection('logs', { + capped: true, + size: 1000000, + max: 5000, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.capped).toBe(true); + expect(cmd.size).toBe(1000000); + expect(cmd.max).toBe(5000); + }); + + it('passes through timeseries options', () => { + const op = createCollection('metrics', { + timeseries: { timeField: 'timestamp', metaField: 'source', granularity: 'minutes' }, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.timeseries).toEqual({ + timeField: 'timestamp', + metaField: 'source', + granularity: 'minutes', + }); + }); + + it('passes through collation and clusteredIndex', () => { + const op = createCollection('users', { + collation: { locale: 'en', strength: 2 }, + clusteredIndex: { key: { _id: 1 }, unique: true }, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.collation).toEqual({ locale: 'en', strength: 2 }); + expect(cmd.clusteredIndex).toEqual({ key: { _id: 1 }, unique: true }); + }); + + it('passes through changeStreamPreAndPostImages', () => { + const op = createCollection('events', { + changeStreamPreAndPostImages: { enabled: true }, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.changeStreamPreAndPostImages).toEqual({ enabled: true }); + }); + + it('has empty postcheck', () => { + const op = createCollection('users'); + expect(op.postcheck).toHaveLength(0); + }); +}); + +describe('dropCollection', () => { + it('produces correct operation structure', () => { + const op = dropCollection('users'); + + expect(op.id).toBe('collection.users.drop'); + expect(op.label).toBe('Drop collection users'); + expect(op.operationClass).toBe('destructive'); + }); + + it('includes execute with DropCollectionCommand', () => { + const op = dropCollection('users'); + + const cmd = op.execute[0]!.command as DropCollectionCommand; + expect(cmd).toBeInstanceOf(DropCollectionCommand); + expect(cmd.collection).toBe('users'); + }); + + it('has empty precheck and postcheck', () => { + const op = dropCollection('users'); + expect(op.precheck).toHaveLength(0); + expect(op.postcheck).toHaveLength(0); + }); +}); + +describe('collMod', () => { + it('produces correct operation structure', () => { + const op = collMod('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }); + + expect(op.id).toBe('collMod.users'); + expect(op.label).toBe('Modify collection users'); + expect(op.operationClass).toBe('destructive'); + }); + + it('includes execute with CollModCommand', () => { + const op = collMod('users', { + validator: { $jsonSchema: { required: ['email'] } }, + }); + + const cmd = op.execute[0]!.command as CollModCommand; + expect(cmd).toBeInstanceOf(CollModCommand); + expect(cmd.collection).toBe('users'); + expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + }); + + it('passes through changeStreamPreAndPostImages', () => { + const op = collMod('users', { + changeStreamPreAndPostImages: { enabled: true }, + }); + + const cmd = op.execute[0]!.command as CollModCommand; + expect(cmd.changeStreamPreAndPostImages).toEqual({ enabled: true }); + }); + + it('has empty precheck and postcheck', () => { + const op = collMod('users', { validator: {} }); + expect(op.precheck).toHaveLength(0); + expect(op.postcheck).toHaveLength(0); + }); +}); + +describe('serialization round-trip', () => { + it('createIndex round-trips through JSON', () => { + const op = createIndex('users', [{ field: 'email', direction: 1 }], { unique: true }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.id).toBe(op.id); + expect(json.label).toBe(op.label); + expect(json.operationClass).toBe(op.operationClass); + expect(json.precheck).toHaveLength(1); + expect(json.execute).toHaveLength(1); + expect(json.postcheck).toHaveLength(1); + expect(json.execute[0].command.kind).toBe('createIndex'); + }); + + it('dropIndex round-trips through JSON', () => { + const op = dropIndex('users', [{ field: 'email', direction: 1 }]); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('dropIndex'); + expect(json.precheck[0].source.kind).toBe('listIndexes'); + }); + + it('createCollection round-trips through JSON', () => { + const op = createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('createCollection'); + expect(json.execute[0].command.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + }); + + it('dropCollection round-trips through JSON', () => { + const op = dropCollection('users'); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('dropCollection'); + }); + + it('collMod round-trips through JSON', () => { + const op = collMod('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('collMod'); + expect(json.execute[0].command.validationLevel).toBe('strict'); + }); + + it('factory output matches planner-equivalent createIndex structure', () => { + const keys = [{ field: 'email', direction: 1 as const }]; + const factoryOp = createIndex('users', keys, { unique: true }); + + const plannerOp: MongoMigrationPlanOperation = { + id: buildIndexOpId('create', 'users', keys), + label: 'Create index on users (email:1)', + operationClass: 'additive', + precheck: [ + { + description: 'index does not already exist on users', + source: new ListIndexesCommand('users'), + filter: MongoFieldFilter.eq('key', keysToKeySpec(keys)), + expect: 'notExists', + }, + ], + execute: [ + { + description: 'create index on users', + command: new CreateIndexCommand('users', keys, { + unique: true, + name: defaultMongoIndexName(keys), + }), + }, + ], + postcheck: [ + { + description: 'index exists on users', + source: new ListIndexesCommand('users'), + filter: MongoAndExpr.of([ + MongoFieldFilter.eq('key', keysToKeySpec(keys)), + MongoFieldFilter.eq('unique', true), + ]), + expect: 'exists', + }, + ], + }; + + expect(JSON.stringify(factoryOp)).toBe(JSON.stringify(plannerOp)); + }); +}); diff --git a/packages/3-mongo-target/1-mongo-target/tsdown.config.ts b/packages/3-mongo-target/1-mongo-target/tsdown.config.ts index 2d45076b9..467efd577 100644 --- a/packages/3-mongo-target/1-mongo-target/tsdown.config.ts +++ b/packages/3-mongo-target/1-mongo-target/tsdown.config.ts @@ -1,5 +1,10 @@ import { defineConfig } from '@prisma-next/tsdown'; export default defineConfig({ - entry: ['src/exports/pack.ts', 'src/exports/codec-types.ts', 'src/exports/control.ts'], + entry: [ + 'src/exports/pack.ts', + 'src/exports/codec-types.ts', + 'src/exports/control.ts', + 'src/exports/migration.ts', + ], }); From 59e73cb76432ed0aaa5799c3750b500cf8ad12d8 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 19:44:19 +0300 Subject: [PATCH 03/31] Add target-agnostic Migration base class Generic Migration with abstract plan() and static run() for self-executing migration files. Entrypoint detection uses import.meta.url (not import.meta object) for tsx compatibility. Supports --dry-run and --help flags. Exported from @prisma-next/migration-tools/migration. --- .../3-tooling/migration/package.json | 4 + .../migration/src/exports/migration.ts | 1 + .../3-tooling/migration/src/migration-base.ts | 94 ++++++++++++ .../migration/test/migration-base.test.ts | 136 ++++++++++++++++++ .../3-tooling/migration/tsdown.config.ts | 1 + 5 files changed, 236 insertions(+) create mode 100644 packages/1-framework/3-tooling/migration/src/exports/migration.ts create mode 100644 packages/1-framework/3-tooling/migration/src/migration-base.ts create mode 100644 packages/1-framework/3-tooling/migration/test/migration-base.test.ts diff --git a/packages/1-framework/3-tooling/migration/package.json b/packages/1-framework/3-tooling/migration/package.json index 749776ff3..ad257e956 100644 --- a/packages/1-framework/3-tooling/migration/package.json +++ b/packages/1-framework/3-tooling/migration/package.json @@ -64,6 +64,10 @@ "types": "./dist/exports/migration-ts.d.mts", "import": "./dist/exports/migration-ts.mjs" }, + "./migration": { + "types": "./dist/exports/migration.d.mts", + "import": "./dist/exports/migration.mjs" + }, "./package.json": "./package.json" }, "repository": { diff --git a/packages/1-framework/3-tooling/migration/src/exports/migration.ts b/packages/1-framework/3-tooling/migration/src/exports/migration.ts new file mode 100644 index 000000000..a57233b79 --- /dev/null +++ b/packages/1-framework/3-tooling/migration/src/exports/migration.ts @@ -0,0 +1 @@ +export { Migration } from '../migration-base'; diff --git a/packages/1-framework/3-tooling/migration/src/migration-base.ts b/packages/1-framework/3-tooling/migration/src/migration-base.ts new file mode 100644 index 000000000..28a650272 --- /dev/null +++ b/packages/1-framework/3-tooling/migration/src/migration-base.ts @@ -0,0 +1,94 @@ +import { realpathSync, writeFileSync } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import { dirname, join } from 'pathe'; + +export abstract class Migration { + abstract plan(): TOperation[]; + + /** + * Makes the migration file self-executing. Call at module scope: + * + * Migration.run(import.meta.url) + * + * When the file is the entrypoint, calls plan(), serializes, and writes ops.json. + * When imported by another module, this is a no-op. + */ + static run(importMetaUrl: string): void { + if (!importMetaUrl) return; + + const metaFilename = fileURLToPath(importMetaUrl); + const argv1 = process.argv[1]; + if (!argv1) return; + + let isEntrypoint: boolean; + try { + isEntrypoint = realpathSync(metaFilename) === realpathSync(argv1); + } catch { + return; + } + if (!isEntrypoint) return; + + const args = process.argv.slice(2); + + if (args.includes('--help')) { + printHelp(); + return; + } + + const dryRun = args.includes('--dry-run'); + const migrationDir = dirname(metaFilename); + const outputPath = join(migrationDir, 'ops.json'); + + executeMigration(importMetaUrl, outputPath, dryRun).catch((err) => { + process.stderr.write(`${err instanceof Error ? err.message : String(err)}\n`); + process.exitCode = 1; + }); + } +} + +function printHelp(): void { + process.stdout.write( + [ + 'Usage: node [options]', + '', + 'Options:', + ' --dry-run Print operations to stdout without writing ops.json', + ' --help Show this help message', + '', + ].join('\n'), + ); +} + +async function executeMigration( + fileUrl: string, + outputPath: string, + dryRun: boolean, +): Promise { + const mod = await import(fileUrl); + const MigrationClass = mod.default; + + if (!MigrationClass || typeof MigrationClass !== 'function') { + throw new Error('Migration file must have a default export class'); + } + + const instance = new MigrationClass(); + + if (typeof instance.plan !== 'function') { + throw new Error('Migration class must implement plan()'); + } + + const ops = instance.plan(); + + if (!Array.isArray(ops)) { + throw new Error('plan() must return an array of operations'); + } + + const serialized = JSON.stringify(ops, null, 2); + + if (dryRun) { + process.stdout.write(`${serialized}\n`); + return; + } + + writeFileSync(outputPath, serialized); +} diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts new file mode 100644 index 000000000..b68c35137 --- /dev/null +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -0,0 +1,136 @@ +import { execFile } from 'node:child_process'; +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { promisify } from 'node:util'; +import { join, resolve } from 'pathe'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { Migration } from '../src/migration-base'; + +const execFileAsync = promisify(execFile); +const packageRoot = resolve(import.meta.dirname, '..'); +const repoRoot = resolve(packageRoot, '../../../..'); + +describe('Migration', () => { + describe('plan() contract', () => { + it('can be subclassed and plan() called directly', () => { + class TestMigration extends Migration<{ id: string }> { + plan() { + return [{ id: 'op1' }, { id: 'op2' }]; + } + } + + const m = new TestMigration(); + const ops = m.plan(); + expect(ops).toEqual([{ id: 'op1' }, { id: 'op2' }]); + }); + }); +}); + +describe('Migration.run() subprocess', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'migration-run-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + const migrationBasePath = join(packageRoot, 'src/migration-base.ts').replace(/\\/g, '/'); + + function migrationScript(planReturn: string): string { + return [ + `import { Migration } from '${migrationBasePath}';`, + '', + 'export default class extends Migration {', + ' plan() {', + ` return ${planReturn};`, + ' }', + '}', + '', + 'Migration.run(import.meta.url);', + ].join('\n'); + } + + async function runMigration( + filename: string, + args: string[] = [], + ): Promise<{ stdout: string; stderr: string; exitCode: number }> { + const filePath = join(tmpDir, filename); + const tsxPath = join(repoRoot, 'node_modules/.bin/tsx'); + try { + const result = await execFileAsync(tsxPath, [filePath, ...args], { cwd: tmpDir }); + return { stdout: result.stdout, stderr: result.stderr, exitCode: 0 }; + } catch (error) { + const e = error as { stdout: string; stderr: string; code: number }; + return { stdout: e.stdout || '', stderr: e.stderr || '', exitCode: e.code || 1 }; + } + } + + it('writes ops.json when run as entrypoint', async () => { + const script = migrationScript('[{ id: "op1", label: "Test op" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + + const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); + const ops = JSON.parse(opsJson); + expect(ops).toEqual([{ id: 'op1', label: 'Test op' }]); + }); + + it('prints operations with --dry-run and does not write ops.json', async () => { + const script = migrationScript('[{ id: "op1", label: "Dry run op" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts', ['--dry-run']); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('op1'); + expect(result.stdout).toContain('Dry run op'); + + const opsExists = await readFile(join(tmpDir, 'ops.json'), 'utf-8').catch(() => null); + expect(opsExists).toBeNull(); + }); + + it('prints usage with --help', async () => { + const script = migrationScript('[]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts', ['--help']); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('--dry-run'); + expect(result.stdout).toContain('--help'); + }); + + it('is a no-op when the file is imported', async () => { + const migrationFile = migrationScript('[{ id: "op1" }]'); + await writeFile(join(tmpDir, 'migration.ts'), migrationFile); + + const importerScript = [ + `import Migration from '${join(tmpDir, 'migration.ts').replace(/\\/g, '/')}';`, + 'const m = new Migration();', + 'const ops = m.plan();', + 'console.log(JSON.stringify(ops));', + ].join('\n'); + await writeFile(join(tmpDir, 'importer.ts'), importerScript); + + const result = await runMigration('importer.ts'); + expect(result.exitCode).toBe(0); + + const opsExists = await readFile(join(tmpDir, 'ops.json'), 'utf-8').catch(() => null); + expect(opsExists).toBeNull(); + + const importedOps = JSON.parse(result.stdout.trim()); + expect(importedOps).toEqual([{ id: 'op1' }]); + }); + + it('exits with error when plan() returns non-array', async () => { + const script = migrationScript('"not an array"'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('plan()'); + }); +}); diff --git a/packages/1-framework/3-tooling/migration/tsdown.config.ts b/packages/1-framework/3-tooling/migration/tsdown.config.ts index b834cd40c..13fff3542 100644 --- a/packages/1-framework/3-tooling/migration/tsdown.config.ts +++ b/packages/1-framework/3-tooling/migration/tsdown.config.ts @@ -9,6 +9,7 @@ export default defineConfig({ 'exports/refs': 'src/exports/refs.ts', 'exports/constants': 'src/exports/constants.ts', 'exports/migration-ts': 'src/exports/migration-ts.ts', + 'exports/migration': 'src/exports/migration.ts', }, exports: { enabled: false }, }); From b3cbc56a49601207b93a447fd249c323439d9588 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 19:45:07 +0300 Subject: [PATCH 04/31] Add Mongo-specific Migration alias MongoMigration extends Migration so users get a single import for both Migration and factory functions from @prisma-next/target-mongo/migration. --- packages/3-mongo-target/1-mongo-target/package.json | 1 + .../3-mongo-target/1-mongo-target/src/core/mongo-migration.ts | 4 ++++ .../3-mongo-target/1-mongo-target/src/exports/migration.ts | 2 ++ pnpm-lock.yaml | 3 +++ 4 files changed, 10 insertions(+) create mode 100644 packages/3-mongo-target/1-mongo-target/src/core/mongo-migration.ts diff --git a/packages/3-mongo-target/1-mongo-target/package.json b/packages/3-mongo-target/1-mongo-target/package.json index b3dc8e8bc..f89b838f2 100644 --- a/packages/3-mongo-target/1-mongo-target/package.json +++ b/packages/3-mongo-target/1-mongo-target/package.json @@ -17,6 +17,7 @@ "dependencies": { "@prisma-next/contract": "workspace:*", "@prisma-next/framework-components": "workspace:*", + "@prisma-next/migration-tools": "workspace:*", "@prisma-next/mongo-query-ast": "workspace:*", "mongodb": "catalog:" }, diff --git a/packages/3-mongo-target/1-mongo-target/src/core/mongo-migration.ts b/packages/3-mongo-target/1-mongo-target/src/core/mongo-migration.ts new file mode 100644 index 000000000..c4b110854 --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/src/core/mongo-migration.ts @@ -0,0 +1,4 @@ +import { Migration } from '@prisma-next/migration-tools/migration'; +import type { MongoMigrationPlanOperation } from '@prisma-next/mongo-query-ast/control'; + +export abstract class MongoMigration extends Migration {} diff --git a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts index 9d214e66b..aaa1fd703 100644 --- a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts +++ b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts @@ -5,3 +5,5 @@ export { dropCollection, dropIndex, } from '../core/migration-factories'; + +export { MongoMigration as Migration } from '../core/mongo-migration'; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3fa00ae90..4ef4a4f78 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2234,6 +2234,9 @@ importers: '@prisma-next/framework-components': specifier: workspace:* version: link:../../1-framework/1-core/framework-components + '@prisma-next/migration-tools': + specifier: workspace:* + version: link:../../1-framework/3-tooling/migration '@prisma-next/mongo-query-ast': specifier: workspace:* version: link:../../2-mongo-family/4-query/query-ast From 16bd7792409df71fc3a117c1258cbd2fc329623d Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 19:48:15 +0300 Subject: [PATCH 05/31] Add validatedCollection strategy and end-to-end tests validatedCollection composes createCollection (with validator) + createIndex calls into a flat operation list. E2E tests verify the full pipeline: write migration.ts, run with tsx, validate ops.json structure, --dry-run output, and strategy composition. --- .../src/core/migration-strategies.ts | 20 ++ .../1-mongo-target/src/exports/migration.ts | 2 +- .../1-mongo-target/test/migration-e2e.test.ts | 184 ++++++++++++++++++ .../test/migration-strategies.test.ts | 67 +++++++ 4 files changed, 272 insertions(+), 1 deletion(-) create mode 100644 packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts create mode 100644 packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts create mode 100644 packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts new file mode 100644 index 000000000..8e5112954 --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts @@ -0,0 +1,20 @@ +import type { + MongoIndexKey, + MongoMigrationPlanOperation, +} from '@prisma-next/mongo-query-ast/control'; +import { createCollection, createIndex } from './migration-factories'; + +export function validatedCollection( + name: string, + schema: Record, + indexes: ReadonlyArray<{ keys: MongoIndexKey[]; unique?: boolean }>, +): MongoMigrationPlanOperation[] { + return [ + createCollection(name, { + validator: { $jsonSchema: schema }, + validationLevel: 'strict', + validationAction: 'error', + }), + ...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique })), + ]; +} diff --git a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts index aaa1fd703..159881a00 100644 --- a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts +++ b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts @@ -5,5 +5,5 @@ export { dropCollection, dropIndex, } from '../core/migration-factories'; - +export { validatedCollection } from '../core/migration-strategies'; export { MongoMigration as Migration } from '../core/mongo-migration'; diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts new file mode 100644 index 000000000..81f5ca81c --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -0,0 +1,184 @@ +import { execFile } from 'node:child_process'; +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { join, resolve } from 'node:path'; +import { promisify } from 'node:util'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; + +const execFileAsync = promisify(execFile); +const packageRoot = resolve(import.meta.dirname, '..'); +const repoRoot = resolve(packageRoot, '../../..'); +const tsxPath = join(repoRoot, 'node_modules/.bin/tsx'); + +const migrationExports = join(packageRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); + +describe('migration file E2E', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'migration-e2e-')); + await writeFile(join(tmpDir, 'package.json'), '{"type":"module"}'); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + async function runFile( + filename: string, + args: string[] = [], + ): Promise<{ stdout: string; stderr: string; exitCode: number }> { + const filePath = join(tmpDir, filename); + try { + const result = await execFileAsync(tsxPath, [filePath, ...args], { cwd: tmpDir }); + return { stdout: result.stdout, stderr: result.stderr, exitCode: 0 }; + } catch (error) { + const e = error as { stdout: string; stderr: string; code: number }; + return { stdout: e.stdout || '', stderr: e.stderr || '', exitCode: e.code || 1 }; + } + } + + describe('factory-based migration', () => { + const factoryMigration = [ + `import { Migration, createIndex, createCollection } from '${migrationExports}';`, + '', + 'export default class extends Migration {', + ' plan() {', + ' return [', + ' createCollection("users", {', + ' validator: { $jsonSchema: { required: ["email"] } },', + ' validationLevel: "strict",', + ' }),', + ' createIndex("users", [{ field: "email", direction: 1 }], { unique: true }),', + ' ];', + ' }', + '}', + '', + 'Migration.run(import.meta.url);', + ].join('\n'); + + it('produces ops.json with correct structure', async () => { + await writeFile(join(tmpDir, 'migration.ts'), factoryMigration); + + const result = await runFile('migration.ts'); + expect(result.exitCode).toBe(0); + + const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); + const ops = JSON.parse(opsJson); + + expect(ops).toHaveLength(2); + expect(ops[0].id).toBe('collection.users.create'); + expect(ops[0].operationClass).toBe('additive'); + expect(ops[0].execute[0].command.kind).toBe('createCollection'); + + expect(ops[1].id).toContain('index.users.create'); + expect(ops[1].execute[0].command.kind).toBe('createIndex'); + expect(ops[1].execute[0].command.unique).toBe(true); + }); + + it('prints operations with --dry-run and does not write ops.json', async () => { + await writeFile(join(tmpDir, 'migration.ts'), factoryMigration); + + const result = await runFile('migration.ts', ['--dry-run']); + expect(result.exitCode).toBe(0); + + const parsed = JSON.parse(result.stdout); + expect(parsed).toHaveLength(2); + expect(parsed[0].id).toBe('collection.users.create'); + + const opsExists = await readFile(join(tmpDir, 'ops.json'), 'utf-8').catch(() => null); + expect(opsExists).toBeNull(); + }); + }); + + describe('strategy-based migration', () => { + const strategyMigration = [ + `import { Migration, validatedCollection } from '${migrationExports}';`, + '', + 'export default class extends Migration {', + ' plan() {', + ' return validatedCollection(', + ' "users",', + ' { required: ["email", "name"] },', + ' [{ keys: [{ field: "email", direction: 1 }], unique: true }],', + ' );', + ' }', + '}', + '', + 'Migration.run(import.meta.url);', + ].join('\n'); + + it('produces ops.json from strategy composition', async () => { + await writeFile(join(tmpDir, 'migration.ts'), strategyMigration); + + const result = await runFile('migration.ts'); + expect(result.exitCode).toBe(0); + + const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); + const ops = JSON.parse(opsJson); + + expect(ops).toHaveLength(2); + + expect(ops[0].id).toBe('collection.users.create'); + expect(ops[0].execute[0].command.validator).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + expect(ops[0].execute[0].command.validationLevel).toBe('strict'); + + expect(ops[1].id).toContain('index.users.create'); + expect(ops[1].execute[0].command.unique).toBe(true); + }); + }); + + describe('serialization format', () => { + it('produces JSON that the runner can consume (correct kind discriminants)', async () => { + const migration = [ + `import { Migration, createIndex, dropIndex, createCollection, dropCollection, collMod } from '${migrationExports}';`, + '', + 'export default class extends Migration {', + ' plan() {', + ' return [', + ' createCollection("users"),', + ' createIndex("users", [{ field: "email", direction: 1 }]),', + ' collMod("users", { validator: { $jsonSchema: { required: ["email"] } } }),', + ' dropIndex("users", [{ field: "email", direction: 1 }]),', + ' dropCollection("users"),', + ' ];', + ' }', + '}', + '', + 'Migration.run(import.meta.url);', + ].join('\n'); + + await writeFile(join(tmpDir, 'migration.ts'), migration); + + const result = await runFile('migration.ts'); + expect(result.exitCode).toBe(0); + + const ops = JSON.parse(await readFile(join(tmpDir, 'ops.json'), 'utf-8')); + expect(ops).toHaveLength(5); + + const commandKinds = ops.map((op: Record) => + (op['execute'] as Record[]).map( + (s) => (s as Record>)['command']['kind'], + ), + ); + expect(commandKinds).toEqual([ + ['createCollection'], + ['createIndex'], + ['collMod'], + ['dropIndex'], + ['dropCollection'], + ]); + + for (const op of ops) { + expect(op).toHaveProperty('id'); + expect(op).toHaveProperty('label'); + expect(op).toHaveProperty('operationClass'); + expect(op).toHaveProperty('precheck'); + expect(op).toHaveProperty('execute'); + expect(op).toHaveProperty('postcheck'); + } + }); + }); +}); diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts new file mode 100644 index 000000000..f31ff2254 --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts @@ -0,0 +1,67 @@ +import { CreateCollectionCommand, CreateIndexCommand } from '@prisma-next/mongo-query-ast/control'; +import { describe, expect, it } from 'vitest'; +import { validatedCollection } from '../src/core/migration-strategies'; + +describe('validatedCollection', () => { + it('returns a createCollection op followed by createIndex ops', () => { + const ops = validatedCollection('users', { required: ['email', 'name'] }, [ + { keys: [{ field: 'email', direction: 1 }], unique: true }, + { keys: [{ field: 'name', direction: 1 }] }, + ]); + + expect(ops).toHaveLength(3); + expect(ops[0]!.id).toBe('collection.users.create'); + expect(ops[1]!.id).toContain('index.users.create'); + expect(ops[2]!.id).toContain('index.users.create'); + }); + + it('produces correct createCollection with validator', () => { + const ops = validatedCollection('users', { required: ['email'] }, []); + + expect(ops).toHaveLength(1); + const cmd = ops[0]!.execute[0]!.command as CreateCollectionCommand; + expect(cmd).toBeInstanceOf(CreateCollectionCommand); + expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + expect(cmd.validationLevel).toBe('strict'); + expect(cmd.validationAction).toBe('error'); + }); + + it('passes index options through to createIndex', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }], unique: true }, + ]); + + const indexCmd = ops[1]!.execute[0]!.command as CreateIndexCommand; + expect(indexCmd).toBeInstanceOf(CreateIndexCommand); + expect(indexCmd.unique).toBe(true); + }); + + it('returns a flat array', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }] }, + ]); + + expect(Array.isArray(ops)).toBe(true); + for (const op of ops) { + expect(op).toHaveProperty('id'); + expect(op).toHaveProperty('execute'); + } + }); + + it('handles empty indexes array', () => { + const ops = validatedCollection('users', { required: ['email'] }, []); + + expect(ops).toHaveLength(1); + expect(ops[0]!.id).toBe('collection.users.create'); + }); + + it('all operations are additive', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }] }, + ]); + + for (const op of ops) { + expect(op.operationClass).toBe('additive'); + } + }); +}); From a65957fb716944f6684151cb8a05726f85df0158 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 19:49:01 +0300 Subject: [PATCH 06/31] Document migration authoring in target-mongo README Add migration entrypoint, usage example, factory and strategy reference to the package README. --- .../3-mongo-target/1-mongo-target/README.md | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/packages/3-mongo-target/1-mongo-target/README.md b/packages/3-mongo-target/1-mongo-target/README.md index 8870912d9..40d8f8ecb 100644 --- a/packages/3-mongo-target/1-mongo-target/README.md +++ b/packages/3-mongo-target/1-mongo-target/README.md @@ -7,14 +7,18 @@ MongoDB target pack for Prisma Next. - **Target pack assembly**: Exports the MongoDB target pack for authoring and family composition - **Target metadata**: Defines the stable Mongo target identity (`kind`, `familyId`, `targetId`, `version`, `capabilities`) - **Codec type surface**: Exposes the base Mongo codec type map used by authoring-time type composition +- **Migration authoring**: Factory functions and strategies for hand-authored MongoDB migrations ## Entrypoints - `./pack`: pure target pack ref used by `@prisma-next/family-mongo` and `@prisma-next/mongo-contract-ts` - `./codec-types`: base Mongo codec type map +- `./migration`: migration authoring — `Migration` base class, factory functions, strategies ## Usage +### Contract definition + ```typescript import mongoFamily from '@prisma-next/family-mongo/pack'; import { defineContract } from '@prisma-next/mongo-contract-ts/contract-builder'; @@ -25,3 +29,38 @@ const contract = defineContract({ target: mongoTarget, }); ``` + +### Migration authoring + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/target-mongo/migration" + +export default class extends Migration { + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta.url) +``` + +Run `node migration.ts` to produce `ops.json`. Use `--dry-run` to preview without writing. + +### Available factories + +- `createIndex(collection, keys, options?)` — create an index +- `dropIndex(collection, keys)` — drop an index +- `createCollection(collection, options?)` — create a collection +- `dropCollection(collection)` — drop a collection +- `collMod(collection, options)` — modify collection options + +### Strategies + +- `validatedCollection(name, schema, indexes)` — create a collection with a JSON Schema validator and indexes From a49285c33ef3f0dcd01d7b36e27394da4f791c90 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:05:52 +0300 Subject: [PATCH 07/31] Add migration.json output to Migration base class Migration files can now implement describe() to provide metadata (from, to, kind, labels). When present, Migration.run() writes both ops.json and migration.json. The manifest is a draft (migrationId: null) with createdAt auto-set. Dry-run prints both files to stdout. --- .../migration/src/exports/migration.ts | 2 +- .../3-tooling/migration/src/migration-base.ts | 45 +++++-- .../migration/test/migration-base.test.ts | 112 ++++++++++++++++++ 3 files changed, 151 insertions(+), 8 deletions(-) diff --git a/packages/1-framework/3-tooling/migration/src/exports/migration.ts b/packages/1-framework/3-tooling/migration/src/exports/migration.ts index a57233b79..a42ef7feb 100644 --- a/packages/1-framework/3-tooling/migration/src/exports/migration.ts +++ b/packages/1-framework/3-tooling/migration/src/exports/migration.ts @@ -1 +1 @@ -export { Migration } from '../migration-base'; +export { Migration, type MigrationMeta } from '../migration-base'; diff --git a/packages/1-framework/3-tooling/migration/src/migration-base.ts b/packages/1-framework/3-tooling/migration/src/migration-base.ts index 28a650272..4d853b35b 100644 --- a/packages/1-framework/3-tooling/migration/src/migration-base.ts +++ b/packages/1-framework/3-tooling/migration/src/migration-base.ts @@ -2,9 +2,20 @@ import { realpathSync, writeFileSync } from 'node:fs'; import { fileURLToPath } from 'node:url'; import { dirname, join } from 'pathe'; +export interface MigrationMeta { + readonly from: string; + readonly to: string; + readonly kind?: 'regular' | 'baseline'; + readonly labels?: readonly string[]; +} + export abstract class Migration { abstract plan(): TOperation[]; + describe(): MigrationMeta | undefined { + return undefined; + } + /** * Makes the migration file self-executing. Call at module scope: * @@ -37,9 +48,8 @@ export abstract class Migration { const dryRun = args.includes('--dry-run'); const migrationDir = dirname(metaFilename); - const outputPath = join(migrationDir, 'ops.json'); - executeMigration(importMetaUrl, outputPath, dryRun).catch((err) => { + executeMigration(importMetaUrl, migrationDir, dryRun).catch((err) => { process.stderr.write(`${err instanceof Error ? err.message : String(err)}\n`); process.exitCode = 1; }); @@ -52,16 +62,27 @@ function printHelp(): void { 'Usage: node [options]', '', 'Options:', - ' --dry-run Print operations to stdout without writing ops.json', + ' --dry-run Print operations to stdout without writing files', ' --help Show this help message', '', ].join('\n'), ); } +function buildManifest(meta: MigrationMeta): Record { + return { + migrationId: null, + from: meta.from, + to: meta.to, + kind: meta.kind ?? 'regular', + labels: meta.labels ?? [], + createdAt: new Date().toISOString(), + }; +} + async function executeMigration( fileUrl: string, - outputPath: string, + migrationDir: string, dryRun: boolean, ): Promise { const mod = await import(fileUrl); @@ -83,12 +104,22 @@ async function executeMigration( throw new Error('plan() must return an array of operations'); } - const serialized = JSON.stringify(ops, null, 2); + const serializedOps = JSON.stringify(ops, null, 2); + const meta: MigrationMeta | undefined = + typeof instance.describe === 'function' ? instance.describe() : undefined; + const manifest = meta ? buildManifest(meta) : undefined; if (dryRun) { - process.stdout.write(`${serialized}\n`); + if (manifest) { + process.stdout.write(`--- migration.json ---\n${JSON.stringify(manifest, null, 2)}\n`); + process.stdout.write('--- ops.json ---\n'); + } + process.stdout.write(`${serializedOps}\n`); return; } - writeFileSync(outputPath, serialized); + writeFileSync(join(migrationDir, 'ops.json'), serializedOps); + if (manifest) { + writeFileSync(join(migrationDir, 'migration.json'), JSON.stringify(manifest, null, 2)); + } } diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts index b68c35137..60c78c03e 100644 --- a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -24,6 +24,33 @@ describe('Migration', () => { expect(ops).toEqual([{ id: 'op1' }, { id: 'op2' }]); }); }); + + describe('describe() contract', () => { + it('returns undefined by default', () => { + class TestMigration extends Migration { + plan() { + return []; + } + } + + const m = new TestMigration(); + expect(m.describe()).toBeUndefined(); + }); + + it('can be overridden to provide migration metadata', () => { + class TestMigration extends Migration { + describe() { + return { from: 'abc', to: 'def', labels: ['test'] }; + } + plan() { + return []; + } + } + + const m = new TestMigration(); + expect(m.describe()).toEqual({ from: 'abc', to: 'def', labels: ['test'] }); + }); + }); }); describe('Migration.run() subprocess', () => { @@ -133,4 +160,89 @@ describe('Migration.run() subprocess', () => { expect(result.exitCode).not.toBe(0); expect(result.stderr).toContain('plan()'); }); + + describe('migration.json output', () => { + function migrationWithDescribe(meta: string, planReturn: string): string { + return [ + `import { Migration } from '${migrationBasePath}';`, + '', + 'export default class extends Migration {', + ' describe() {', + ` return ${meta};`, + ' }', + ' plan() {', + ` return ${planReturn};`, + ' }', + '}', + '', + 'Migration.run(import.meta.url);', + ].join('\n'); + } + + it('writes migration.json when describe() is implemented', async () => { + const script = migrationWithDescribe( + '{ from: "abc123", to: "def456", labels: ["add-users"] }', + '[{ id: "op1" }]', + ); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + + const manifest = JSON.parse(await readFile(join(tmpDir, 'migration.json'), 'utf-8')); + expect(manifest.from).toBe('abc123'); + expect(manifest.to).toBe('def456'); + expect(manifest.labels).toEqual(['add-users']); + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + + it('does not write migration.json when describe() is absent', async () => { + const script = migrationScript('[{ id: "op1" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + + expect(await readFile(join(tmpDir, 'ops.json'), 'utf-8')).toBeTruthy(); + const manifestExists = await readFile(join(tmpDir, 'migration.json'), 'utf-8').catch( + () => null, + ); + expect(manifestExists).toBeNull(); + }); + + it('defaults kind to regular and labels to empty', async () => { + const script = migrationWithDescribe('{ from: "abc", to: "def" }', '[]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + + const manifest = JSON.parse(await readFile(join(tmpDir, 'migration.json'), 'utf-8')); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual([]); + }); + + it('includes migration.json content in --dry-run output', async () => { + const script = migrationWithDescribe( + '{ from: "abc", to: "def", labels: ["test"] }', + '[{ id: "op1" }]', + ); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts', ['--dry-run']); + expect(result.exitCode).toBe(0); + + const output = result.stdout; + expect(output).toContain('"from"'); + expect(output).toContain('"to"'); + expect(output).toContain('"op1"'); + + const manifestExists = await readFile(join(tmpDir, 'migration.json'), 'utf-8').catch( + () => null, + ); + expect(manifestExists).toBeNull(); + }); + }); }); From d12b3e656bddd882f612dc0d3f233de4df934c2b Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:09:06 +0300 Subject: [PATCH 08/31] Move MongoMigration from target-mongo to family-mongo MongoMigration is a family-level concern (not target-specific). Canonical import is now @prisma-next/family-mongo/migration which re-exports factories and strategies from @prisma-next/target-mongo/migration. --- packages/2-mongo-family/9-family/package.json | 3 +++ .../9-family}/src/core/mongo-migration.ts | 0 .../2-mongo-family/9-family/src/exports/migration.ts | 9 +++++++++ packages/2-mongo-family/9-family/tsdown.config.ts | 2 +- packages/3-mongo-target/1-mongo-target/package.json | 1 - .../1-mongo-target/src/exports/migration.ts | 1 - .../1-mongo-target/test/migration-e2e.test.ts | 3 ++- pnpm-lock.yaml | 9 ++++++--- 8 files changed, 21 insertions(+), 7 deletions(-) rename packages/{3-mongo-target/1-mongo-target => 2-mongo-family/9-family}/src/core/mongo-migration.ts (100%) create mode 100644 packages/2-mongo-family/9-family/src/exports/migration.ts diff --git a/packages/2-mongo-family/9-family/package.json b/packages/2-mongo-family/9-family/package.json index e056cd9ec..ed9780e3e 100644 --- a/packages/2-mongo-family/9-family/package.json +++ b/packages/2-mongo-family/9-family/package.json @@ -16,8 +16,10 @@ "@prisma-next/contract": "workspace:*", "@prisma-next/emitter": "workspace:*", "@prisma-next/framework-components": "workspace:*", + "@prisma-next/migration-tools": "workspace:*", "@prisma-next/mongo-contract": "workspace:*", "@prisma-next/mongo-emitter": "workspace:*", + "@prisma-next/mongo-query-ast": "workspace:*", "@prisma-next/mongo-schema-ir": "workspace:*", "@prisma-next/target-mongo": "workspace:*", "@prisma-next/utils": "workspace:*", @@ -45,6 +47,7 @@ "types": "./dist/control.d.mts", "exports": { "./control": "./dist/control.mjs", + "./migration": "./dist/migration.mjs", "./pack": "./dist/pack.mjs", "./package.json": "./package.json" } diff --git a/packages/3-mongo-target/1-mongo-target/src/core/mongo-migration.ts b/packages/2-mongo-family/9-family/src/core/mongo-migration.ts similarity index 100% rename from packages/3-mongo-target/1-mongo-target/src/core/mongo-migration.ts rename to packages/2-mongo-family/9-family/src/core/mongo-migration.ts diff --git a/packages/2-mongo-family/9-family/src/exports/migration.ts b/packages/2-mongo-family/9-family/src/exports/migration.ts new file mode 100644 index 000000000..bac08fa31 --- /dev/null +++ b/packages/2-mongo-family/9-family/src/exports/migration.ts @@ -0,0 +1,9 @@ +export { + collMod, + createCollection, + createIndex, + dropCollection, + dropIndex, + validatedCollection, +} from '@prisma-next/target-mongo/migration'; +export { MongoMigration as Migration } from '../core/mongo-migration'; diff --git a/packages/2-mongo-family/9-family/tsdown.config.ts b/packages/2-mongo-family/9-family/tsdown.config.ts index 8fdb3adc9..cce916918 100644 --- a/packages/2-mongo-family/9-family/tsdown.config.ts +++ b/packages/2-mongo-family/9-family/tsdown.config.ts @@ -1,5 +1,5 @@ import { defineConfig } from '@prisma-next/tsdown'; export default defineConfig({ - entry: ['src/exports/control.ts', 'src/exports/pack.ts'], + entry: ['src/exports/control.ts', 'src/exports/pack.ts', 'src/exports/migration.ts'], }); diff --git a/packages/3-mongo-target/1-mongo-target/package.json b/packages/3-mongo-target/1-mongo-target/package.json index f89b838f2..b3dc8e8bc 100644 --- a/packages/3-mongo-target/1-mongo-target/package.json +++ b/packages/3-mongo-target/1-mongo-target/package.json @@ -17,7 +17,6 @@ "dependencies": { "@prisma-next/contract": "workspace:*", "@prisma-next/framework-components": "workspace:*", - "@prisma-next/migration-tools": "workspace:*", "@prisma-next/mongo-query-ast": "workspace:*", "mongodb": "catalog:" }, diff --git a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts index 159881a00..11df525b7 100644 --- a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts +++ b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts @@ -6,4 +6,3 @@ export { dropIndex, } from '../core/migration-factories'; export { validatedCollection } from '../core/migration-strategies'; -export { MongoMigration as Migration } from '../core/mongo-migration'; diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts index 81f5ca81c..1ce0f5dfe 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -10,7 +10,8 @@ const packageRoot = resolve(import.meta.dirname, '..'); const repoRoot = resolve(packageRoot, '../../..'); const tsxPath = join(repoRoot, 'node_modules/.bin/tsx'); -const migrationExports = join(packageRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); +const familyMongoRoot = resolve(repoRoot, 'packages/2-mongo-family/9-family'); +const migrationExports = join(familyMongoRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); describe('migration file E2E', () => { let tmpDir: string; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4ef4a4f78..e95b9ebea 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1389,12 +1389,18 @@ importers: '@prisma-next/framework-components': specifier: workspace:* version: link:../../1-framework/1-core/framework-components + '@prisma-next/migration-tools': + specifier: workspace:* + version: link:../../1-framework/3-tooling/migration '@prisma-next/mongo-contract': specifier: workspace:* version: link:../1-foundation/mongo-contract '@prisma-next/mongo-emitter': specifier: workspace:* version: link:../3-tooling/emitter + '@prisma-next/mongo-query-ast': + specifier: workspace:* + version: link:../4-query/query-ast '@prisma-next/mongo-schema-ir': specifier: workspace:* version: link:../3-tooling/mongo-schema-ir @@ -2234,9 +2240,6 @@ importers: '@prisma-next/framework-components': specifier: workspace:* version: link:../../1-framework/1-core/framework-components - '@prisma-next/migration-tools': - specifier: workspace:* - version: link:../../1-framework/3-tooling/migration '@prisma-next/mongo-query-ast': specifier: workspace:* version: link:../../2-mongo-family/4-query/query-ast From f2d62768db95067862b298d5b92280aae6730918 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:10:50 +0300 Subject: [PATCH 09/31] Add E2E tests for migration authoring against real MongoDB MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Covers all five factory functions (createCollection, createIndex, dropIndex, dropCollection, collMod) and the validatedCollection strategy. Verifies the full round-trip: factory → JSON.stringify → deserializeMongoOps → MongoMigrationRunner → verify DDL in MongoDB. Also tests a multi-step create → modify → drop lifecycle. --- .../mongo/migration-authoring-e2e.test.ts | 364 ++++++++++++++++++ 1 file changed, 364 insertions(+) create mode 100644 test/integration/test/mongo/migration-authoring-e2e.test.ts diff --git a/test/integration/test/mongo/migration-authoring-e2e.test.ts b/test/integration/test/mongo/migration-authoring-e2e.test.ts new file mode 100644 index 000000000..1e2326051 --- /dev/null +++ b/test/integration/test/mongo/migration-authoring-e2e.test.ts @@ -0,0 +1,364 @@ +import { + deserializeMongoOps, + MongoMigrationRunner, + serializeMongoOps, +} from '@prisma-next/adapter-mongo/control'; +import mongoControlDriver from '@prisma-next/driver-mongo/control'; +import type { MongoMigrationPlanOperation } from '@prisma-next/mongo-query-ast/control'; +import { + collMod, + createCollection, + createIndex, + dropCollection, + dropIndex, + validatedCollection, +} from '@prisma-next/target-mongo/migration'; +import { timeouts } from '@prisma-next/test-utils'; +import { type Db, MongoClient } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; + +const ALL_POLICY = { + allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, +}; + +describe( + 'Migration authoring round-trip (factory → serialize → deserialize → runner → DB)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let db: Db; + const dbName = 'authoring_e2e_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + instanceOpts: [ + { launchTimeout: timeouts.spinUpMongoMemoryServer, storageEngine: 'wiredTiger' }, + ], + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + db = client.db(dbName); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await db.dropDatabase(); + }); + + afterAll(async () => { + try { + await client?.close(); + await replSet?.stop(); + } catch { + // ignore cleanup errors + } + }, timeouts.spinUpMongoMemoryServer); + + async function runOps(ops: readonly MongoMigrationPlanOperation[]): Promise<{ + operationsPlanned: number; + operationsExecuted: number; + }> { + const serialized = JSON.parse(serializeMongoOps(ops)); + const controlDriver = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result = await runner.execute({ + plan: { + targetId: 'mongo', + destination: { storageHash: 'authoring-test' }, + operations: serialized, + }, + driver: controlDriver, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + if (!result.ok) throw new Error(`Runner failed: ${result.error.summary}`); + return result.value; + } finally { + await controlDriver.close(); + } + } + + describe('createCollection', () => { + it('creates the collection in MongoDB', async () => { + const ops = [createCollection('users')]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(1); + }); + + it('creates a collection with JSON schema validation', async () => { + const ops = [ + createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + validationAction: 'error', + }), + ]; + await runOps(ops); + + const info = await db.listCollections({ name: 'users' }).toArray(); + const options = info[0]!['options'] as Record; + expect(options['validator']).toEqual({ $jsonSchema: { required: ['email'] } }); + }); + }); + + describe('createIndex', () => { + it('creates an index on the collection', async () => { + await db.createCollection('users'); + const ops = [createIndex('users', [{ field: 'email', direction: 1 as const }])]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + }); + + it('creates a unique index', async () => { + await db.createCollection('users'); + const ops = [ + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + ]; + await runOps(ops); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + expect(emailIndex!['unique']).toBe(true); + }); + }); + + describe('dropIndex', () => { + it('drops an existing index', async () => { + await db.createCollection('users'); + await db.collection('users').createIndex({ email: 1 }, { name: 'email_1' }); + + const ops = [dropIndex('users', [{ field: 'email', direction: 1 as const }])]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeUndefined(); + }); + }); + + describe('dropCollection', () => { + it('drops an existing collection', async () => { + await db.createCollection('users'); + const ops = [dropCollection('users')]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(0); + }); + }); + + describe('collMod', () => { + it('modifies collection validation', async () => { + await db.createCollection('users'); + const ops = [ + collMod('users', { + validator: { $jsonSchema: { required: ['email', 'name'] } }, + validationLevel: 'strict', + }), + ]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const info = await db.listCollections({ name: 'users' }).toArray(); + const options = info[0]!['options'] as Record; + expect(options['validator']).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + }); + }); + + describe('round-trip serialization', () => { + it('factory → JSON.stringify → deserializeMongoOps produces equivalent ops', () => { + const original = [ + createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }), + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + dropIndex('users', [{ field: 'email', direction: 1 as const }]), + collMod('users', { validator: { $jsonSchema: { required: ['email', 'name'] } } }), + dropCollection('users'), + ]; + + const json = JSON.stringify(original); + const deserialized = deserializeMongoOps(JSON.parse(json)); + + expect(deserialized).toHaveLength(5); + for (let i = 0; i < original.length; i++) { + expect(deserialized[i]!.id).toBe(original[i]!.id); + expect(deserialized[i]!.label).toBe(original[i]!.label); + expect(deserialized[i]!.operationClass).toBe(original[i]!.operationClass); + } + }); + + it('deserialized ops execute successfully against the DB', async () => { + const original = [ + createCollection('users'), + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + ]; + + const json = JSON.stringify(original); + const deserialized = deserializeMongoOps(JSON.parse(json)); + + const result = await runOps(deserialized); + expect(result.operationsExecuted).toBe(2); + + const collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(1); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + expect(emailIndex!['unique']).toBe(true); + }); + }); + + describe('validatedCollection strategy', () => { + it('creates collection with schema validation and indexes', async () => { + const ops = validatedCollection('users', { required: ['email', 'name'] }, [ + { keys: [{ field: 'email', direction: 1 }], unique: true }, + { keys: [{ field: 'name', direction: 1 }] }, + ]); + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(3); + + const info = await db.listCollections({ name: 'users' }).toArray(); + expect(info).toHaveLength(1); + const options = info[0]!['options'] as Record; + expect(options['validator']).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + expect(emailIndex!['unique']).toBe(true); + + const nameIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['name'] === 1, + ); + expect(nameIndex).toBeDefined(); + }); + + it('round-trips through serialization and runs against the DB', async () => { + const ops = validatedCollection('posts', { required: ['title'] }, [ + { keys: [{ field: 'title', direction: 1 }] }, + ]); + + const json = JSON.stringify(ops); + const deserialized = deserializeMongoOps(JSON.parse(json)); + const result = await runOps(deserialized); + expect(result.operationsExecuted).toBe(2); + + const collections = await db.listCollections({ name: 'posts' }).toArray(); + expect(collections).toHaveLength(1); + }); + }); + + describe('multi-step migration lifecycle', () => { + it('applies a full create → modify → drop lifecycle', async () => { + const step1 = [ + createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }), + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + ]; + await runOps(step1); + + let collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(1); + const indexes = await db.collection('users').listIndexes().toArray(); + expect(indexes.some((idx) => (idx['key'] as Record)?.['email'] === 1)).toBe( + true, + ); + + const step2 = [ + collMod('users', { + validator: { $jsonSchema: { required: ['email', 'name'] } }, + }), + ]; + + const serialized2 = JSON.parse(serializeMongoOps(step2)); + const controlDriver2 = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result2 = await runner.execute({ + plan: { + targetId: 'mongo', + origin: { storageHash: 'authoring-test' }, + destination: { storageHash: 'authoring-test-v2' }, + operations: serialized2, + }, + driver: controlDriver2, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + expect(result2.ok).toBe(true); + } finally { + await controlDriver2.close(); + } + + const info = await db.listCollections({ name: 'users' }).toArray(); + const options = info[0]!['options'] as Record; + expect(options['validator']).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + + const step3 = [ + dropIndex('users', [{ field: 'email', direction: 1 as const }]), + dropCollection('users'), + ]; + + const serialized3 = JSON.parse(serializeMongoOps(step3)); + const controlDriver3 = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result3 = await runner.execute({ + plan: { + targetId: 'mongo', + origin: { storageHash: 'authoring-test-v2' }, + destination: { storageHash: 'authoring-test-v3' }, + operations: serialized3, + }, + driver: controlDriver3, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + expect(result3.ok).toBe(true); + } finally { + await controlDriver3.close(); + } + + collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(0); + }); + }); + }, +); From e1307cd33e9c6fcc064c106d0133f6fdc2b0dd8d Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:12:03 +0300 Subject: [PATCH 10/31] Update READMEs to reflect migration authoring entrypoint move Canonical import for migration authoring is now @prisma-next/family-mongo/migration. Updated both target-mongo and family-mongo READMEs with correct import paths and describe() example. --- packages/2-mongo-family/9-family/README.md | 34 ++++++++++++++++++- .../3-mongo-target/1-mongo-target/README.md | 10 +++--- 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/packages/2-mongo-family/9-family/README.md b/packages/2-mongo-family/9-family/README.md index 940de2007..e11734a1c 100644 --- a/packages/2-mongo-family/9-family/README.md +++ b/packages/2-mongo-family/9-family/README.md @@ -21,6 +21,7 @@ This package is the Mongo family integration point for both control-plane assemb ## Entrypoints - `./control`: control-plane entrypoint exporting `mongoFamilyDescriptor`, `mongoTargetDescriptor`, `createMongoFamilyInstance`, and `MongoControlFamilyInstance` +- `./migration`: migration authoring — `Migration` class, factory functions, and strategies (re-exported from `@prisma-next/target-mongo/migration`) - `./pack`: pure pack ref for TypeScript authoring flows such as `@prisma-next/mongo-contract-ts/contract-builder` ## Usage @@ -95,18 +96,49 @@ export const contract = defineContract({ The current `contract.ts` slice supports roots and collections, typed reference relations, owned models with `storage.relations`, value objects, and discriminator-based polymorphism. +### Migration authoring + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/family-mongo/migration" + +export default class extends Migration { + describe() { + return { from: "abc123", to: "def456", labels: ["add-users"] } + } + + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta.url) +``` + +Run `node migration.ts` to produce `ops.json` and `migration.json`. Use `--dry-run` to preview without writing. + ## Package Structure - `src/core/control-descriptor.ts`: `MongoFamilyDescriptor` implementation - `src/core/control-instance.ts`: `createMongoFamilyInstance()` and `MongoControlFamilyInstance` - `src/core/mongo-target-descriptor.ts`: pre-built control target descriptor derived from `@prisma-next/target-mongo/pack` +- `src/core/mongo-migration.ts`: `MongoMigration` class (fixes the `Migration` type parameter to `MongoMigrationPlanOperation`) - `src/exports/control.ts`: control-plane entrypoint +- `src/exports/migration.ts`: migration authoring entrypoint - `src/exports/pack.ts`: authoring-time family pack ref ## Dependencies - `@prisma-next/framework-components`: control-plane types and stack assembly +- `@prisma-next/migration-tools`: generic `Migration` base class - `@prisma-next/mongo-contract`: Mongo contract validation and types - `@prisma-next/mongo-contract-ts`: Mongo `contract.ts` authoring surface - `@prisma-next/mongo-emitter`: Mongo family emission hook -- `@prisma-next/target-mongo`: Mongo target pack metadata +- `@prisma-next/mongo-query-ast`: Mongo command AST types (`MongoMigrationPlanOperation`) +- `@prisma-next/target-mongo`: Mongo target pack metadata and migration factories diff --git a/packages/3-mongo-target/1-mongo-target/README.md b/packages/3-mongo-target/1-mongo-target/README.md index 40d8f8ecb..c407e6e24 100644 --- a/packages/3-mongo-target/1-mongo-target/README.md +++ b/packages/3-mongo-target/1-mongo-target/README.md @@ -7,13 +7,13 @@ MongoDB target pack for Prisma Next. - **Target pack assembly**: Exports the MongoDB target pack for authoring and family composition - **Target metadata**: Defines the stable Mongo target identity (`kind`, `familyId`, `targetId`, `version`, `capabilities`) - **Codec type surface**: Exposes the base Mongo codec type map used by authoring-time type composition -- **Migration authoring**: Factory functions and strategies for hand-authored MongoDB migrations +- **Migration factories and strategies**: Atomic factory functions and compound strategies for MongoDB migration operations ## Entrypoints - `./pack`: pure target pack ref used by `@prisma-next/family-mongo` and `@prisma-next/mongo-contract-ts` - `./codec-types`: base Mongo codec type map -- `./migration`: migration authoring — `Migration` base class, factory functions, strategies +- `./migration`: factory functions and strategies (the `Migration` base class is in `@prisma-next/family-mongo/migration`) ## Usage @@ -32,9 +32,11 @@ const contract = defineContract({ ### Migration authoring +Import from `@prisma-next/family-mongo/migration` for the full authoring experience (Migration class + factories + strategies): + ```typescript import { Migration, createIndex, createCollection } - from "@prisma-next/target-mongo/migration" + from "@prisma-next/family-mongo/migration" export default class extends Migration { plan() { @@ -51,7 +53,7 @@ export default class extends Migration { Migration.run(import.meta.url) ``` -Run `node migration.ts` to produce `ops.json`. Use `--dry-run` to preview without writing. +Run `node migration.ts` to produce `ops.json` and `migration.json` (when `describe()` is implemented). Use `--dry-run` to preview without writing. ### Available factories From f7632feeac90b5c3e82be69c42413d9736ac8de5 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:18:31 +0300 Subject: [PATCH 11/31] Add hand-authored TS migration to mongo-demo Demonstrates the migration authoring workflow: a TypeScript migration file using factory functions and describe() to produce both ops.json and migration.json. Test verifies the migration imports, runs against a real MongoDB, and produces valid artifacts. --- .../migration.json | 8 ++ .../migration.ts | 23 +++ .../20260415_add-posts-author-index/ops.json | 122 ++++++++++++++++ .../mongo-demo/test/manual-migration.test.ts | 132 ++++++++++++++++++ 4 files changed, 285 insertions(+) create mode 100644 examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json create mode 100644 examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts create mode 100644 examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json create mode 100644 examples/mongo-demo/test/manual-migration.test.ts diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json new file mode 100644 index 000000000..22ec0f993 --- /dev/null +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json @@ -0,0 +1,8 @@ +{ + "migrationId": null, + "from": "sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe", + "to": "sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe", + "kind": "regular", + "labels": ["add-posts-author-index"], + "createdAt": "2026-04-15T17:17:30.570Z" +} diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts new file mode 100644 index 000000000..76cc3e560 --- /dev/null +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts @@ -0,0 +1,23 @@ +import { createIndex, Migration } from '@prisma-next/family-mongo/migration'; + +export default class extends Migration { + describe() { + return { + from: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', + to: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', + labels: ['add-posts-author-index'], + }; + } + + plan() { + return [ + createIndex('posts', [{ field: 'authorId', direction: 1 }]), + createIndex('posts', [ + { field: 'createdAt', direction: -1 }, + { field: 'authorId', direction: 1 }, + ]), + ]; + } +} + +Migration.run(import.meta.url); diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json b/examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json new file mode 100644 index 000000000..3a2b1c7ea --- /dev/null +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json @@ -0,0 +1,122 @@ +[ + { + "id": "index.posts.create(authorId:1)", + "label": "Create index on posts (authorId:1)", + "operationClass": "additive", + "precheck": [ + { + "description": "index does not already exist on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "authorId": 1 + } + }, + "expect": "notExists" + } + ], + "execute": [ + { + "description": "create index on posts", + "command": { + "kind": "createIndex", + "collection": "posts", + "keys": [ + { + "field": "authorId", + "direction": 1 + } + ], + "name": "authorId_1" + } + } + ], + "postcheck": [ + { + "description": "index exists on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "authorId": 1 + } + }, + "expect": "exists" + } + ] + }, + { + "id": "index.posts.create(createdAt:-1,authorId:1)", + "label": "Create index on posts (createdAt:-1, authorId:1)", + "operationClass": "additive", + "precheck": [ + { + "description": "index does not already exist on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "createdAt": -1, + "authorId": 1 + } + }, + "expect": "notExists" + } + ], + "execute": [ + { + "description": "create index on posts", + "command": { + "kind": "createIndex", + "collection": "posts", + "keys": [ + { + "field": "createdAt", + "direction": -1 + }, + { + "field": "authorId", + "direction": 1 + } + ], + "name": "createdAt_-1_authorId_1" + } + } + ], + "postcheck": [ + { + "description": "index exists on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "createdAt": -1, + "authorId": 1 + } + }, + "expect": "exists" + } + ] + } +] diff --git a/examples/mongo-demo/test/manual-migration.test.ts b/examples/mongo-demo/test/manual-migration.test.ts new file mode 100644 index 000000000..13e672908 --- /dev/null +++ b/examples/mongo-demo/test/manual-migration.test.ts @@ -0,0 +1,132 @@ +import { readFileSync } from 'node:fs'; +import { resolve } from 'node:path'; +import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; +import mongoControlDriver from '@prisma-next/driver-mongo/control'; +import { Migration } from '@prisma-next/family-mongo/migration'; +import { timeouts } from '@prisma-next/test-utils'; +import { type Db, MongoClient } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; + +const ALL_POLICY = { + allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, +}; + +const migrationDir = resolve(import.meta.dirname, '../migrations/20260415_add-posts-author-index'); + +describe( + 'hand-authored migration (20260415_add-posts-author-index)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let db: Db; + const dbName = 'manual_migration_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + instanceOpts: [ + { launchTimeout: timeouts.spinUpMongoMemoryServer, storageEngine: 'wiredTiger' }, + ], + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + db = client.db(dbName); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await db.dropDatabase(); + }); + + afterAll(async () => { + try { + await client?.close(); + await replSet?.stop(); + } catch { + // ignore cleanup errors + } + }, timeouts.spinUpMongoMemoryServer); + + it('migration.ts can be imported and plan() called directly', async () => { + const mod = await import('../migrations/20260415_add-posts-author-index/migration.ts'); + const MigrationClass = mod.default; + const instance = new MigrationClass(); + + const ops = instance.plan(); + expect(ops).toHaveLength(2); + expect(ops[0].id).toBe('index.posts.create(authorId:1)'); + expect(ops[1].id).toBe('index.posts.create(createdAt:-1,authorId:1)'); + }); + + it('migration.ts describe() returns correct metadata', async () => { + const mod = await import('../migrations/20260415_add-posts-author-index/migration.ts'); + const instance = new mod.default(); + const meta = instance.describe(); + expect(meta.labels).toEqual(['add-posts-author-index']); + }); + + it('ops.json deserializes and applies against real MongoDB', async () => { + await db.createCollection('posts'); + + const opsJson = readFileSync(resolve(migrationDir, 'ops.json'), 'utf-8'); + const ops = deserializeMongoOps(JSON.parse(opsJson)); + expect(ops).toHaveLength(2); + + const controlDriver = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result = await runner.execute({ + plan: { + targetId: 'mongo', + destination: { + storageHash: + 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', + }, + operations: JSON.parse(opsJson), + }, + driver: controlDriver, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + + expect(result.ok).toBe(true); + if (!result.ok) return; + expect(result.value.operationsExecuted).toBe(2); + + const indexes = await db.collection('posts').listIndexes().toArray(); + + const authorIdIndex = indexes.find( + (idx) => + idx['key'] && + (idx['key'] as Record)['authorId'] === 1 && + !('createdAt' in (idx['key'] as Record)), + ); + expect(authorIdIndex).toBeDefined(); + + const compoundIndex = indexes.find( + (idx) => + idx['key'] && + (idx['key'] as Record)['createdAt'] === -1 && + (idx['key'] as Record)['authorId'] === 1, + ); + expect(compoundIndex).toBeDefined(); + } finally { + await controlDriver.close(); + } + }); + + it('migration.json exists and has expected structure', () => { + const manifestJson = readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8'); + const manifest = JSON.parse(manifestJson); + + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual(['add-posts-author-index']); + expect(manifest.from).toMatch(/^sha256:/); + expect(manifest.to).toMatch(/^sha256:/); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + }, +); From b4d7f9df32115a73898b70c646bb0ec83a21b531 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:19:51 +0300 Subject: [PATCH 12/31] Add hand-authored TS migration to retail-store Demonstrates collMod (JSON Schema validation on products) and compound index creation. Test verifies the migration can be imported, run against a real MongoDB, and produces valid artifacts. --- .../migration.json | 8 ++ .../migration.ts | 37 ++++++ .../20260415_add-product-validation/ops.json | 101 ++++++++++++++ .../test/manual-migration.test.ts | 125 ++++++++++++++++++ 4 files changed, 271 insertions(+) create mode 100644 examples/retail-store/migrations/20260415_add-product-validation/migration.json create mode 100644 examples/retail-store/migrations/20260415_add-product-validation/migration.ts create mode 100644 examples/retail-store/migrations/20260415_add-product-validation/ops.json create mode 100644 examples/retail-store/test/manual-migration.test.ts diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.json b/examples/retail-store/migrations/20260415_add-product-validation/migration.json new file mode 100644 index 000000000..f42f6470e --- /dev/null +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.json @@ -0,0 +1,8 @@ +{ + "migrationId": null, + "from": "sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d", + "to": "sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d", + "kind": "regular", + "labels": ["add-product-validation"], + "createdAt": "2026-04-15T17:19:07.772Z" +} diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts new file mode 100644 index 000000000..618f6f157 --- /dev/null +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts @@ -0,0 +1,37 @@ +import { collMod, createIndex, Migration } from '@prisma-next/family-mongo/migration'; + +export default class extends Migration { + describe() { + return { + from: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', + to: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', + labels: ['add-product-validation'], + }; + } + + plan() { + return [ + collMod('products', { + validator: { + $jsonSchema: { + bsonType: 'object', + required: ['name', 'price', 'category'], + properties: { + name: { bsonType: 'string' }, + price: { bsonType: 'number', minimum: 0 }, + category: { bsonType: 'string' }, + }, + }, + }, + validationLevel: 'moderate', + validationAction: 'warn', + }), + createIndex('products', [ + { field: 'category', direction: 1 }, + { field: 'price', direction: 1 }, + ]), + ]; + } +} + +Migration.run(import.meta.url); diff --git a/examples/retail-store/migrations/20260415_add-product-validation/ops.json b/examples/retail-store/migrations/20260415_add-product-validation/ops.json new file mode 100644 index 000000000..83836be00 --- /dev/null +++ b/examples/retail-store/migrations/20260415_add-product-validation/ops.json @@ -0,0 +1,101 @@ +[ + { + "id": "collMod.products", + "label": "Modify collection products", + "operationClass": "destructive", + "precheck": [], + "execute": [ + { + "description": "modify collection products", + "command": { + "kind": "collMod", + "collection": "products", + "validator": { + "$jsonSchema": { + "bsonType": "object", + "required": ["name", "price", "category"], + "properties": { + "name": { + "bsonType": "string" + }, + "price": { + "bsonType": "number", + "minimum": 0 + }, + "category": { + "bsonType": "string" + } + } + } + }, + "validationLevel": "moderate", + "validationAction": "warn" + } + } + ], + "postcheck": [] + }, + { + "id": "index.products.create(category:1,price:1)", + "label": "Create index on products (category:1, price:1)", + "operationClass": "additive", + "precheck": [ + { + "description": "index does not already exist on products", + "source": { + "kind": "listIndexes", + "collection": "products" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "category": 1, + "price": 1 + } + }, + "expect": "notExists" + } + ], + "execute": [ + { + "description": "create index on products", + "command": { + "kind": "createIndex", + "collection": "products", + "keys": [ + { + "field": "category", + "direction": 1 + }, + { + "field": "price", + "direction": 1 + } + ], + "name": "category_1_price_1" + } + } + ], + "postcheck": [ + { + "description": "index exists on products", + "source": { + "kind": "listIndexes", + "collection": "products" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "category": 1, + "price": 1 + } + }, + "expect": "exists" + } + ] + } +] diff --git a/examples/retail-store/test/manual-migration.test.ts b/examples/retail-store/test/manual-migration.test.ts new file mode 100644 index 000000000..04c6cc568 --- /dev/null +++ b/examples/retail-store/test/manual-migration.test.ts @@ -0,0 +1,125 @@ +import { readFileSync } from 'node:fs'; +import { resolve } from 'node:path'; +import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; +import mongoControlDriver from '@prisma-next/driver-mongo/control'; +import { timeouts } from '@prisma-next/test-utils'; +import { type Db, MongoClient } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; + +const ALL_POLICY = { + allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, +}; + +const migrationDir = resolve(import.meta.dirname, '../migrations/20260415_add-product-validation'); + +describe( + 'hand-authored migration (20260415_add-product-validation)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let db: Db; + const dbName = 'manual_migration_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + instanceOpts: [ + { launchTimeout: timeouts.spinUpMongoMemoryServer, storageEngine: 'wiredTiger' }, + ], + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + db = client.db(dbName); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await db.dropDatabase(); + }); + + afterAll(async () => { + try { + await client?.close(); + await replSet?.stop(); + } catch { + // ignore cleanup errors + } + }, timeouts.spinUpMongoMemoryServer); + + it('migration.ts can be imported and plan() called directly', async () => { + const mod = await import('../migrations/20260415_add-product-validation/migration.ts'); + const instance = new mod.default(); + + const ops = instance.plan(); + expect(ops).toHaveLength(2); + expect(ops[0].id).toBe('collMod.products'); + expect(ops[1].id).toContain('index.products.create'); + }); + + it('migration.ts describe() returns correct metadata', async () => { + const mod = await import('../migrations/20260415_add-product-validation/migration.ts'); + const instance = new mod.default(); + const meta = instance.describe(); + expect(meta.labels).toEqual(['add-product-validation']); + }); + + it('ops.json deserializes and applies against real MongoDB', async () => { + await db.createCollection('products'); + + const opsJson = readFileSync(resolve(migrationDir, 'ops.json'), 'utf-8'); + const ops = deserializeMongoOps(JSON.parse(opsJson)); + expect(ops).toHaveLength(2); + + const controlDriver = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result = await runner.execute({ + plan: { + targetId: 'mongo', + destination: { + storageHash: + 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', + }, + operations: JSON.parse(opsJson), + }, + driver: controlDriver, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + + expect(result.ok).toBe(true); + if (!result.ok) return; + expect(result.value.operationsExecuted).toBe(2); + + const info = await db.listCollections({ name: 'products' }).toArray(); + const options = info[0]!['options'] as Record; + expect(options['validator']).toBeDefined(); + + const indexes = await db.collection('products').listIndexes().toArray(); + const categoryPriceIndex = indexes.find( + (idx) => + idx['key'] && + (idx['key'] as Record)['category'] === 1 && + (idx['key'] as Record)['price'] === 1, + ); + expect(categoryPriceIndex).toBeDefined(); + } finally { + await controlDriver.close(); + } + }); + + it('migration.json exists and has expected structure', () => { + const manifestJson = readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8'); + const manifest = JSON.parse(manifestJson); + + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual(['add-product-validation']); + expect(manifest.from).toMatch(/^sha256:/); + expect(manifest.to).toMatch(/^sha256:/); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + }, +); From a7976920c355998ee3db4ffff953dac435072ee7 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 20:44:38 +0300 Subject: [PATCH 13/31] fix(mongo-migration): add override modifier to Migration subclass methods noImplicitOverride is enabled in the shared tsconfig base, requiring the override keyword on methods that override base class members. --- .../20260415_add-posts-author-index/migration.ts | 4 ++-- .../20260415_add-product-validation/migration.ts | 4 ++-- .../3-tooling/migration/test/migration-base.test.ts | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts index 76cc3e560..8504b162d 100644 --- a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts @@ -1,7 +1,7 @@ import { createIndex, Migration } from '@prisma-next/family-mongo/migration'; export default class extends Migration { - describe() { + override describe() { return { from: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', to: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', @@ -9,7 +9,7 @@ export default class extends Migration { }; } - plan() { + override plan() { return [ createIndex('posts', [{ field: 'authorId', direction: 1 }]), createIndex('posts', [ diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts index 618f6f157..60cfc1f15 100644 --- a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts @@ -1,7 +1,7 @@ import { collMod, createIndex, Migration } from '@prisma-next/family-mongo/migration'; export default class extends Migration { - describe() { + override describe() { return { from: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', to: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', @@ -9,7 +9,7 @@ export default class extends Migration { }; } - plan() { + override plan() { return [ collMod('products', { validator: { diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts index 60c78c03e..a4fc5d7db 100644 --- a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -14,7 +14,7 @@ describe('Migration', () => { describe('plan() contract', () => { it('can be subclassed and plan() called directly', () => { class TestMigration extends Migration<{ id: string }> { - plan() { + override plan() { return [{ id: 'op1' }, { id: 'op2' }]; } } @@ -28,7 +28,7 @@ describe('Migration', () => { describe('describe() contract', () => { it('returns undefined by default', () => { class TestMigration extends Migration { - plan() { + override plan() { return []; } } @@ -39,10 +39,10 @@ describe('Migration', () => { it('can be overridden to provide migration metadata', () => { class TestMigration extends Migration { - describe() { + override describe() { return { from: 'abc', to: 'def', labels: ['test'] }; } - plan() { + override plan() { return []; } } From 04ef6e87d8c8321c79cf38128f304619f0589e46 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:01:19 +0300 Subject: [PATCH 14/31] feat(migration): print confirmation after writing migration files --- packages/1-framework/3-tooling/migration/src/migration-base.ts | 3 +++ .../3-tooling/migration/test/migration-base.test.ts | 2 ++ 2 files changed, 5 insertions(+) diff --git a/packages/1-framework/3-tooling/migration/src/migration-base.ts b/packages/1-framework/3-tooling/migration/src/migration-base.ts index 4d853b35b..642a2abd1 100644 --- a/packages/1-framework/3-tooling/migration/src/migration-base.ts +++ b/packages/1-framework/3-tooling/migration/src/migration-base.ts @@ -122,4 +122,7 @@ async function executeMigration( if (manifest) { writeFileSync(join(migrationDir, 'migration.json'), JSON.stringify(manifest, null, 2)); } + + const files = manifest ? 'ops.json + migration.json' : 'ops.json'; + process.stdout.write(`Wrote ${files} to ${migrationDir}\n`); } diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts index a4fc5d7db..afce9bb54 100644 --- a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -101,6 +101,7 @@ describe('Migration.run() subprocess', () => { const result = await runMigration('migration.ts'); expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Wrote ops.json'); const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); const ops = JSON.parse(opsJson); @@ -188,6 +189,7 @@ describe('Migration.run() subprocess', () => { const result = await runMigration('migration.ts'); expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('ops.json + migration.json'); const manifest = JSON.parse(await readFile(join(tmpDir, 'migration.json'), 'utf-8')); expect(manifest.from).toBe('abc123'); From b14e76900e1bff98569bdd3507bc504a72683e9c Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:29:18 +0300 Subject: [PATCH 15/31] docs: add specs for migration subsystem refactor and planner dual output Two task specs under the mongo-migration-authoring project: 1. migration-subsystem-refactor: move planner, runner, serializer, and supporting modules from adapter-mongo to target-mongo; refactor runner to accept abstract visitor interfaces instead of Db. 2. planner-dual-output: introduce OpFactoryCall intermediate representation, with renderers for raw operations (existing behavior) and TypeScript migration files. --- .../migration-subsystem-refactor.spec.md | 138 +++++++++++++++++ .../specs/planner-dual-output.spec.md | 146 ++++++++++++++++++ 2 files changed, 284 insertions(+) create mode 100644 projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md create mode 100644 projects/mongo-migration-authoring/specs/planner-dual-output.spec.md diff --git a/projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md b/projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md new file mode 100644 index 000000000..bae6fdc3e --- /dev/null +++ b/projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md @@ -0,0 +1,138 @@ +# Summary + +Move the Mongo migration subsystem (planner, runner, serializer, and supporting modules) from `@prisma-next/adapter-mongo` to `@prisma-next/target-mongo`, and refactor the runner to accept abstract visitor interfaces instead of depending directly on the `mongodb` driver. This corrects the layering: the planner, runner, and serializer are migration-plane concerns that belong in the target package, not the adapter. + +# Description + +The Mongo migration subsystem currently lives in `packages/3-mongo-target/2-mongo-adapter/src/core/`. Most of these modules have no dependency on the `mongodb` driver — they operate purely on AST types from `@prisma-next/mongo-query-ast` and schema IR from `@prisma-next/mongo-schema-ir`. They were placed in the adapter historically, but they belong in the target layer. + +The one module that does touch `mongodb` is the runner (`mongo-runner.ts`), but only through two concrete executor classes (`MongoCommandExecutor`, `MongoInspectionExecutor`). These implement visitor interfaces (`MongoDdlCommandVisitor`, `MongoInspectionCommandVisitor`) already defined in the family layer (`@prisma-next/mongo-query-ast`). The runner itself only calls `command.accept(executor)` — it never uses `Db` directly for DDL execution. By accepting the visitor interfaces as injected dependencies, the runner can move to the target package while the concrete executor implementations stay in the adapter. + +This follows the same adapter/driver pattern used for query execution: the orchestrator operates on abstract interfaces, and the adapter provides the concrete backing. + +## Modules to move + +From `packages/3-mongo-target/2-mongo-adapter/src/core/` to `packages/3-mongo-target/1-mongo-target/src/core/`: + +| Module | Rationale | +|---|---| +| `mongo-planner.ts` | Pure diffing logic; depends on query-ast and schema-ir, not `mongodb` | +| `mongo-ops-serializer.ts` | Serialization/deserialization of AST; depends on query-ast and arktype, not `mongodb` | +| `contract-to-schema.ts` | Contract-to-schema-IR conversion; depends on mongo-contract and schema-ir, not `mongodb` | +| `ddl-formatter.ts` | Human-readable DDL formatting via visitor pattern; no `mongodb` dependency | +| `filter-evaluator.ts` | Pure filter evaluation logic; depends on query-ast filter types, not `mongodb` | + +## Modules to refactor + +| Module | Change | +|---|---| +| `mongo-runner.ts` | Refactor to accept `MongoDdlCommandVisitor>` and `MongoInspectionCommandVisitor>` as injected dependencies instead of constructing them from `Db`. Moves to target. | + +## Modules that stay in adapter + +| Module | Rationale | +|---|---| +| `command-executor.ts` | Concrete `MongoDdlCommandVisitor` and `MongoInspectionCommandVisitor` implementations that use `mongodb` driver's `Db` type | +| `mongo-control-driver.ts` | Creates/manages `mongodb` connection, exposes `Db` | +| `introspect-schema.ts` | Directly queries `mongodb` to build schema IR from live database | + +## Wiring changes + +The `mongoTargetDescriptor` in `packages/2-mongo-family/9-family/src/core/mongo-target-descriptor.ts` currently imports `MongoMigrationPlanner`, `MongoMigrationRunner`, and `contractToMongoSchemaIR` from `@prisma-next/adapter-mongo/control`. After the move, these imports come from `@prisma-next/target-mongo/control`. + +The `createRunner` factory on the target descriptor will need to wire the concrete executor implementations from the adapter into the runner. This can be achieved by either: +- Having the family instance (passed to `createRunner`) provide the executors, or +- Passing the adapter's executor factory to the target descriptor at composition time + +**Assumption:** The simplest approach is to have `createRunner` accept the family instance (which already has access to the driver) and construct the concrete executors there. The runner's `execute` method signature changes to accept executor instances rather than extracting `Db` internally. + +# Requirements + +## Functional Requirements + +- All six modules listed above move from `adapter-mongo` to `target-mongo`, with their corresponding test files +- The runner's `execute` method accepts abstract visitor interfaces (`MongoDdlCommandVisitor>` and `MongoInspectionCommandVisitor>`) rather than constructing them internally from a `Db` handle +- `@prisma-next/adapter-mongo/control` re-exports the moved symbols for backward compatibility during the transition (the adapter already re-exports `target-mongo/control` symbols like `initMarker`, `readMarker`, etc.) +- `@prisma-next/target-mongo/control` exports the planner, runner, serializer, contract-to-schema converter, DDL formatter, and filter evaluator +- The `mongoTargetDescriptor` in `9-family` imports planner, runner, and `contractToMongoSchemaIR` from `@prisma-next/target-mongo/control` instead of `@prisma-next/adapter-mongo/control` +- All existing tests pass without behavioral changes +- New dependencies added to `@prisma-next/target-mongo`: `@prisma-next/mongo-contract`, `@prisma-next/mongo-schema-ir`, `@prisma-next/utils`, `arktype` + +## Non-Functional Requirements + +- No behavioral changes — this is a pure structural refactoring +- Package layering validation (`pnpm lint:deps`) passes after the move +- The adapter's `mongodb` dependency does not leak into the target package + +## Non-goals + +- Refactoring the planner's internal logic (that's spec 2) +- Changing the `MigrationRunner` framework interface +- Moving `introspect-schema.ts` to the target (it genuinely needs the driver) +- Removing the backward-compat re-exports from `adapter-mongo/control` in this change + +# Acceptance Criteria + +## Module relocation + +- [ ] `mongo-planner.ts`, `mongo-ops-serializer.ts`, `contract-to-schema.ts`, `ddl-formatter.ts`, `filter-evaluator.ts` live in `packages/3-mongo-target/1-mongo-target/src/core/` +- [ ] `mongo-runner.ts` lives in `packages/3-mongo-target/1-mongo-target/src/core/` +- [ ] Their test files move to `packages/3-mongo-target/1-mongo-target/test/` +- [ ] `@prisma-next/target-mongo/control` exports all moved symbols + +## Runner abstraction + +- [ ] `MongoMigrationRunner.execute()` accepts `MongoDdlCommandVisitor>` and `MongoInspectionCommandVisitor>` as parameters (or via constructor injection) +- [ ] The runner has no `import ... from 'mongodb'` statement +- [ ] `MongoCommandExecutor` and `MongoInspectionExecutor` remain in `adapter-mongo` and are wired into the runner at composition time + +## Backward compatibility + +- [ ] `@prisma-next/adapter-mongo/control` re-exports all moved symbols so existing consumers are not broken +- [ ] `mongoTargetDescriptor` in `9-family` imports from `@prisma-next/target-mongo/control` + +## Validation + +- [ ] All existing tests pass (`pnpm test:packages`) +- [ ] Package layering passes (`pnpm lint:deps`) +- [ ] `@prisma-next/target-mongo` does not depend on `mongodb` for the moved modules (the existing marker-ledger dependency is acceptable) +- [ ] E2E and integration tests pass (`pnpm test:e2e`, `pnpm test:integration`) + +# Other Considerations + +## Security + +Not applicable — pure internal refactoring, no new public API surface. + +## Cost + +No runtime cost impact. Build/CI times unchanged. + +## Observability + +Not applicable. + +## Data Protection + +Not applicable. + +## Analytics + +Not applicable. + +# References + +- Current planner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts) +- Current runner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts) +- Current serializer: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts) +- Visitor interfaces (family layer): [`packages/2-mongo-family/4-query/query-ast/src/ddl-visitors.ts`](packages/2-mongo-family/4-query/query-ast/src/ddl-visitors.ts) +- Concrete executors (stay in adapter): [`packages/3-mongo-target/2-mongo-adapter/src/core/command-executor.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/command-executor.ts) +- Target descriptor (consumer): [`packages/2-mongo-family/9-family/src/core/mongo-target-descriptor.ts`](packages/2-mongo-family/9-family/src/core/mongo-target-descriptor.ts) +- Framework `MigrationRunner` interface: [`packages/1-framework/1-core/framework-components/src/control-migration-types.ts`](packages/1-framework/1-core/framework-components/src/control-migration-types.ts) +- Parent project spec: [`projects/mongo-migration-authoring/spec.md`](../spec.md) + +# Open Questions + +1. **Runner dependency injection style**: Should the runner accept executors via constructor injection (set once, reused across `execute` calls) or as parameters to each `execute` call? Constructor injection is simpler if the runner is created once per session; parameter injection is more flexible. **Default assumption:** Constructor injection, since `createRunner` already creates a fresh instance per session. + +2. **`target-mongo` already depends on `mongodb`** (for marker-ledger operations which use `Db`). Should the marker-ledger also be refactored to accept an abstract interface, or is that a separate concern? **Default assumption:** Out of scope; the marker operations are small and isolated, and refactoring them can happen independently. diff --git a/projects/mongo-migration-authoring/specs/planner-dual-output.spec.md b/projects/mongo-migration-authoring/specs/planner-dual-output.spec.md new file mode 100644 index 000000000..f2cd4715c --- /dev/null +++ b/projects/mongo-migration-authoring/specs/planner-dual-output.spec.md @@ -0,0 +1,146 @@ +# Summary + +Refactor the `MongoMigrationPlanner` to produce an intermediate `OpFactoryCall[]` representation instead of constructing `MongoMigrationPlanOperation[]` directly. Add two renderers: one that materializes `OpFactoryCall[]` into `MongoMigrationPlanOperation[]` (preserving current behavior), and one that renders `OpFactoryCall[]` into TypeScript migration files that call the existing factory functions. + +# Description + +Today the `MongoMigrationPlanner` has inline `planCreateIndex`, `planDropIndex`, `planCreateCollection`, `planDropCollection`, `planValidatorDiff`, and `planMutableOptionsDiff` helper functions that directly construct `MongoMigrationPlanOperation` objects with AST command classes, filter expressions, and check structures. The same logic is duplicated in the hand-authored migration factory functions (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`). + +The goal is to make the planner produce a higher-level representation — an array of `OpFactoryCall` objects — that describes *which factory to call with which arguments*, rather than the fully-expanded operation. Two renderers then interpret this representation: + +1. **Operation renderer** (`renderOps`): Calls the factory functions to produce `MongoMigrationPlanOperation[]`. This is the existing behavior, preserved for `db init`, `db update`, and any other path that needs raw operations. + +2. **TypeScript renderer** (`renderTypeScript`): Generates a complete TypeScript migration file (`migration.ts`) that imports the factory functions and calls them in `plan()`. This enables `prisma migration plan` to produce editable migration files that users can modify before applying. + +## `OpFactoryCall` type + +A discriminated union where each variant corresponds to one factory function: + +```typescript +type OpFactoryCall = + | { readonly factory: 'createIndex'; readonly collection: string; readonly keys: ReadonlyArray; readonly options?: CreateIndexOptions } + | { readonly factory: 'dropIndex'; readonly collection: string; readonly keys: ReadonlyArray } + | { readonly factory: 'createCollection'; readonly collection: string; readonly options?: CreateCollectionOptions } + | { readonly factory: 'dropCollection'; readonly collection: string } + | { readonly factory: 'collMod'; readonly collection: string; readonly options: CollModOptions }; +``` + +Each variant captures exactly the arguments of the corresponding factory function. The planner produces these instead of fully-expanded operations. + +## Factory alignment + +The existing planner helper functions and the hand-authored factory functions have slightly different signatures and behaviors in some cases (e.g., the planner's `planCreateCollection` maps `MongoSchemaCollectionOptions` to `CreateCollectionOptions`, while the factory takes `CreateCollectionOptions` directly). As part of this work, the factory signatures are aligned so the planner can produce `OpFactoryCall` values that map 1:1 to factory calls. Since the factories were just created for the migration authoring project, their signatures can be adjusted freely. + +The planner's `planValidatorDiff` function currently produces `collMod` operations directly. After alignment, this maps to `OpFactoryCall` with `factory: 'collMod'`, and the operation-class classification logic (`classifyValidatorUpdate`) moves to a helper that the operation renderer calls (since `OpFactoryCall` doesn't carry `operationClass`). + +**Assumption:** `operationClass` is derived by the operation renderer, not stored in `OpFactoryCall`. The renderer has enough context (factory type + arguments) to determine the class. + +## Conflict detection stays in the planner + +The planner's conflict detection logic (immutable option changes, policy violations) operates on the schema diff, not on the generated operations. This stays in the planner and runs before `OpFactoryCall[]` generation. + +# Requirements + +## Functional Requirements + +- `OpFactoryCall` discriminated union type defined in `@prisma-next/target-mongo/control`, covering all five factory functions +- `MongoMigrationPlanner.plan()` internally produces `OpFactoryCall[]` and passes it through the operation renderer to return `MigrationPlannerResult` (preserving the existing interface) +- A new `MongoMigrationPlanner.planCalls()` method (or similar) returns the raw `OpFactoryCall[]` for consumers that need the intermediate representation +- An operation renderer function (`renderOps(calls: OpFactoryCall[]): MongoMigrationPlanOperation[]`) that calls the factory functions to produce operations +- A TypeScript renderer function (`renderTypeScript(calls: OpFactoryCall[], meta?: MigrationMeta): string`) that produces a complete, runnable migration file +- Factory function signatures in `migration-factories.ts` are aligned with `OpFactoryCall` argument shapes so the mapping is 1:1 +- The operation renderer assigns `operationClass` based on factory type and arguments (same classification logic the planner uses today) +- The TypeScript renderer generates valid TypeScript that imports from `@prisma-next/target-mongo/migration` and calls the factory functions + +## Non-Functional Requirements + +- The `plan()` method's external behavior is unchanged — consumers (CLI, target descriptor, tests) see the same `MigrationPlannerResult` +- The TypeScript renderer produces readable, idiomatic code (proper formatting, minimal boilerplate) +- The `OpFactoryCall` type is serializable (no class instances, only plain data) + +## Non-goals + +- CLI integration for `prisma migration plan --emit-ts` (future work — the renderer is the building block) +- Automatic migration file scaffolding from the CLI +- Supporting SQL targets with this specific `OpFactoryCall` type (each target will have its own factory call type) +- Data transform operations in the generated TypeScript + +# Acceptance Criteria + +## OpFactoryCall type + +- [ ] `OpFactoryCall` is a discriminated union with variants for `createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod` +- [ ] Each variant's fields match the aligned factory function's parameters exactly +- [ ] The type is exported from `@prisma-next/target-mongo/control` + +## Planner refactoring + +- [ ] `MongoMigrationPlanner` internally produces `OpFactoryCall[]` from the schema diff +- [ ] `plan()` returns the same `MigrationPlannerResult` as before (behavioral equivalence verified by existing tests) +- [ ] A method or function exposes the raw `OpFactoryCall[]` for downstream consumers +- [ ] Conflict detection (immutable options, policy violations) is preserved + +## Operation renderer + +- [ ] `renderOps(calls)` produces `MongoMigrationPlanOperation[]` identical to the current planner output for the same inputs +- [ ] Round-trip equivalence: for any schema diff, `renderOps(planner.planCalls(...))` produces the same operations as the current `planner.plan(...)` (verified by test comparing JSON output) +- [ ] `operationClass` is correctly derived for each factory call + +## TypeScript renderer + +- [ ] `renderTypeScript(calls)` produces a syntactically valid TypeScript file +- [ ] The generated file imports from `@prisma-next/target-mongo/migration` +- [ ] The generated file can be executed with `tsx` to produce `ops.json` +- [ ] The generated `ops.json` is identical to what `renderOps(calls)` produces when serialized (round-trip equivalence) +- [ ] When `meta` is provided, the generated file includes a `describe()` method returning the metadata + +## Factory alignment + +- [ ] Factory function signatures in `migration-factories.ts` align with `OpFactoryCall` argument shapes +- [ ] The planner's `planCreateCollection` mapping from `MongoSchemaCollectionOptions` to `CreateCollectionOptions` is extracted to a reusable helper + +# Other Considerations + +## Security + +Not applicable — no new external API surface; the TypeScript renderer produces source code that is written to disk by the CLI. + +## Cost + +No runtime cost impact. The intermediate representation adds negligible overhead (one extra array allocation per plan). + +## Observability + +Not applicable. + +## Data Protection + +Not applicable. + +## Analytics + +Not applicable. + +# References + +- Current planner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts) (will move to `target-mongo` per spec 1) +- Factory functions: [`packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts`](packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts) +- `Migration` base class: [`packages/1-framework/3-tooling/migration/src/migration-base.ts`](packages/1-framework/3-tooling/migration/src/migration-base.ts) +- Migration authoring spec (parent): [`projects/mongo-migration-authoring/spec.md`](../spec.md) +- Migration subsystem refactor spec (prerequisite): [`projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md`](migration-subsystem-refactor.spec.md) + +# Decisions + +1. **`OpFactoryCall` is plain data, not class instances.** This makes it serializable and testable without constructing AST nodes. The factory functions handle AST construction. + +2. **`operationClass` is not part of `OpFactoryCall`.** It's a derived property that the operation renderer computes. This keeps the intermediate representation simple and avoids duplicating classification logic. + +3. **Prerequisite: migration subsystem refactor.** This spec assumes the planner and factories are co-located in `@prisma-next/target-mongo` (per the migration-subsystem-refactor spec). The planner needs to import and call the factory functions, which requires them to be in the same package or a dependency. + +# Open Questions + +1. **How should `operationClass` be derived for `collMod` calls?** Today the planner classifies validator updates as `widening` or `destructive` based on comparing origin and destination validators. The operation renderer needs access to this context (origin validator state) to make the same determination. Should the `collMod` variant of `OpFactoryCall` carry an explicit `operationClass` override, or should the renderer receive the origin schema as context? **Default assumption:** The `collMod` variant carries an optional `operationClass` field that the planner sets when it has the context to determine it; if omitted, the renderer defaults to `destructive`. + +2. **Should `renderTypeScript` produce the `Migration.run(import.meta)` line?** The hand-authored migration pattern includes this line at the bottom. The rendered file should include it so it's immediately runnable. **Default assumption:** Yes, include it. + +3. **Should `planCalls()` be a separate method on the planner, or should `plan()` return a richer result that includes both `OpFactoryCall[]` and the rendered operations?** **Default assumption:** A separate `planCalls()` method that returns `{ kind: 'success'; calls: OpFactoryCall[] } | { kind: 'failure'; conflicts: ... }`, sharing the same conflict-detection logic as `plan()`. From 07fb8bd9d49e316c23bbade68e767461e399e14e Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:30:50 +0300 Subject: [PATCH 16/31] Add plan.md --- projects/mongo-migration-authoring/plan.md | 129 +++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 projects/mongo-migration-authoring/plan.md diff --git a/projects/mongo-migration-authoring/plan.md b/projects/mongo-migration-authoring/plan.md new file mode 100644 index 000000000..96bca2a58 --- /dev/null +++ b/projects/mongo-migration-authoring/plan.md @@ -0,0 +1,129 @@ +# Mongo Migration Authoring — Plan + +## What we're building + +A migration file that looks like this: + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/target-mongo/migration" + +export default class extends Migration { + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta) +``` + +`node migration.ts` produces `ops.json`. The existing `MongoMigrationRunner` consumes it unchanged. + +**Spec:** `projects/mongo-migration-authoring/spec.md` + +## How we get there + +Three milestones, each building on the last: + +1. **Factories** — implement the five operation factory functions (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`). After this milestone, you can call `createIndex(...)` in a test and get a valid `MongoMigrationPlanOperation`. + +2. **Runnable migration files** — implement the `Migration` base class with `plan()` and `Migration.run(import.meta)`. After this milestone, you can write a migration.ts file, run it with `node`, and get `ops.json`. + +3. **Composition and end-to-end validation** — implement a compound strategy function, validate the full pipeline against a real MongoDB instance, and close out the project. + +--- + +## Milestone 1: Factories + +Implement the five factory functions in `packages/3-mongo-target/1-mongo-target`. Each one produces a `MongoMigrationPlanOperation` with the correct command, prechecks, and postchecks — identical to what the planner produces. + +The factories are extracted from the planner's existing inline logic (`planCreateIndex`, `planDropIndex`, etc.) using the same helpers (`buildIndexOpId`, `defaultMongoIndexName`, `keysToKeySpec`, filter expression assembly). + +**Tasks:** + +- [ ] `createIndex(collection, keys, options?)` — test and implement. Verify: output structure, text index handling, JSON serialization, round-trip through `deserializeMongoOps`, comparison against planner output for equivalent operation. +- [ ] `dropIndex(collection, keys)` — test and implement. Same verification pattern. +- [ ] `createCollection(collection, options?)` — test and implement. Cover: basic creation, validator options, capped/timeseries/collation/clusteredIndex options. +- [ ] `dropCollection(collection)` — test and implement. +- [ ] `collMod(collection, options)` — test and implement. Cover: validator update, changeStreamPreAndPostImages update. +- [ ] Create `src/exports/migration.ts` in `packages/3-mongo-target/1-mongo-target`. Configure the `@prisma-next/target-mongo/migration` export path in `package.json` and tsdown config. Export the five factories. + +**Acceptance criteria covered:** + +- Each factory produces correct prechecks/commands/postchecks +- Factory output serializes identically to planner output +- Round-trip: factory → `JSON.stringify` → `deserializeMongoOps` + +## Milestone 2: Runnable migration files + +Implement the `Migration` base class. After this, a `.ts` file with `export default class extends Migration` and `Migration.run(import.meta)` can be run directly to produce `ops.json`. + +**Tasks:** + +- [ ] Implement `Migration` base class in the framework layer (`packages/1-framework`). Abstract `plan()` method, generic over operation type. Static `run(meta: ImportMeta)` handling entrypoint detection, arg parsing, serialization, and file output. +- [ ] Entrypoint detection: check `import.meta.main` (Bun/Deno), fall back to `import.meta.filename` vs `resolve(process.argv[1])` (Node). +- [ ] `--dry-run` flag: print serialized operations to stdout without writing. +- [ ] `--help` flag: print usage information. +- [ ] Create Mongo-specific alias that fixes the type parameter to `MongoMigrationPlanOperation`. Re-export from `@prisma-next/target-mongo/migration` alongside the factory functions. + +**Tests:** + +- [ ] `Migration.run(import.meta)` is a no-op when the file is imported (not run directly) +- [ ] `ops.json` is written when the file is run as entrypoint +- [ ] `--dry-run` prints to stdout, does not write `ops.json` +- [ ] `--help` prints usage +- [ ] Default export class can be instantiated and `plan()` called directly (for CLI and test use) +- [ ] Error handling: non-array return from `plan()` + +**Acceptance criteria covered:** + +- Migration file type-checks and runs with `node migration.ts` +- File produces `ops.json` in its own directory +- `--dry-run` prints operations without writing +- `--help` prints usage +- `Migration.run(import.meta)` is no-op when imported +- Default export can be instantiated and `plan()` called directly + +## Milestone 3: Composition and end-to-end validation + +Implement a compound strategy function to demonstrate that strategies are plain function composition. Run full end-to-end tests against a real MongoDB instance to validate the entire pipeline: author → run → serialize → deserialize → runner. + +**Tasks:** + +- [ ] Implement `validatedCollection(name, schema, indexes)` — composes `createCollection` with validator + `createIndex` for each index. Returns a flat operation list. +- [ ] Export the strategy from `@prisma-next/target-mongo/migration`. +- [ ] End-to-end test with factory functions: write a migration.ts file, run it with `node`, verify `ops.json`, deserialize with `deserializeMongoOps`, execute against MongoDB memory server. +- [ ] End-to-end test with strategy function: same pipeline, using `validatedCollection` in the migration file. + +**Acceptance criteria covered:** + +- Strategy composes multiple factories and returns a flat operation list +- Strategy is a plain exported function +- Round-trip through runner execution (E2E) + +**Close-out:** + +- [ ] Verify all acceptance criteria from the spec are met +- [ ] Migrate any long-lived documentation into `docs/` +- [ ] Strip repo-wide references to `projects/mongo-migration-authoring/` +- [ ] Delete `projects/mongo-migration-authoring/` + +--- + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | TBD | Drives execution | +| Reviewer | TBD | Architectural review — particularly the `Migration` base class design | + +## Open items + +- **Where in the framework should `Migration` live?** It's target-agnostic. Candidates: `@prisma-next/framework-components` (where `MigrationPlanOperation` already lives) or a new package. Decide during M2. +- **Entrypoint detection portability.** Node lacks `import.meta.main`. The fallback needs testing for edge cases (symlinks, path normalization). From bc3daf748a5117202e626e12d533de15c43f3c7f Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:38:45 +0300 Subject: [PATCH 17/31] fix(mongo-migration): address code review findings F01-F06 - Replace node:path with pathe in E2E and demo test files (F01, F05) - Remove unused collection param from keyFilter helper (F02) - Add optional operationClass override to collMod factory (F03) - Remove factory re-exports from family-mongo/migration (F06) - Split demo migration imports: Migration from family-mongo, factories from target-mongo --- .../20260415_add-posts-author-index/migration.ts | 3 ++- examples/mongo-demo/package.json | 4 +++- examples/mongo-demo/test/manual-migration.test.ts | 2 +- .../20260415_add-product-validation/migration.ts | 3 ++- examples/retail-store/package.json | 4 +++- .../retail-store/test/manual-migration.test.ts | 2 +- .../9-family/src/exports/migration.ts | 8 -------- .../3-mongo-target/1-mongo-target/package.json | 1 + .../src/core/migration-factories.ts | 15 ++++++++++----- .../1-mongo-target/test/migration-e2e.test.ts | 14 +++++++++----- .../test/migration-factories.test.ts | 10 ++++++++++ pnpm-lock.yaml | 15 +++++++++++++++ 12 files changed, 57 insertions(+), 24 deletions(-) diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts index 8504b162d..9fdc07c87 100644 --- a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts @@ -1,4 +1,5 @@ -import { createIndex, Migration } from '@prisma-next/family-mongo/migration'; +import { Migration } from '@prisma-next/family-mongo/migration'; +import { createIndex } from '@prisma-next/target-mongo/migration'; export default class extends Migration { override describe() { diff --git a/examples/mongo-demo/package.json b/examples/mongo-demo/package.json index 2141bf20e..a56eaa392 100644 --- a/examples/mongo-demo/package.json +++ b/examples/mongo-demo/package.json @@ -17,9 +17,10 @@ }, "dependencies": { "@prisma-next/adapter-mongo": "workspace:*", + "@prisma-next/target-mongo": "workspace:*", "@prisma-next/contract": "workspace:*", - "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/driver-mongo": "workspace:*", + "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/mongo-contract": "workspace:*", "@prisma-next/mongo-orm": "workspace:*", "@prisma-next/mongo-pipeline-builder": "workspace:*", @@ -40,6 +41,7 @@ "@types/react-dom": "^19.2.3", "@vitejs/plugin-react-swc": "^4.2.3", "mongodb-memory-server": "catalog:", + "pathe": "^2.0.3", "tsx": "^4.19.2", "typescript": "catalog:", "vite": "catalog:", diff --git a/examples/mongo-demo/test/manual-migration.test.ts b/examples/mongo-demo/test/manual-migration.test.ts index 13e672908..2bbef8d49 100644 --- a/examples/mongo-demo/test/manual-migration.test.ts +++ b/examples/mongo-demo/test/manual-migration.test.ts @@ -1,11 +1,11 @@ import { readFileSync } from 'node:fs'; -import { resolve } from 'node:path'; import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; import mongoControlDriver from '@prisma-next/driver-mongo/control'; import { Migration } from '@prisma-next/family-mongo/migration'; import { timeouts } from '@prisma-next/test-utils'; import { type Db, MongoClient } from 'mongodb'; import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { resolve } from 'pathe'; import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; const ALL_POLICY = { diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts index 60cfc1f15..4337530ab 100644 --- a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts @@ -1,4 +1,5 @@ -import { collMod, createIndex, Migration } from '@prisma-next/family-mongo/migration'; +import { Migration } from '@prisma-next/family-mongo/migration'; +import { collMod, createIndex } from '@prisma-next/target-mongo/migration'; export default class extends Migration { override describe() { diff --git a/examples/retail-store/package.json b/examples/retail-store/package.json index a5b6bc84a..d5afe08da 100644 --- a/examples/retail-store/package.json +++ b/examples/retail-store/package.json @@ -18,9 +18,10 @@ }, "dependencies": { "@prisma-next/adapter-mongo": "workspace:*", + "@prisma-next/target-mongo": "workspace:*", "@prisma-next/contract": "workspace:*", - "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/driver-mongo": "workspace:*", + "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/mongo-contract": "workspace:*", "@prisma-next/mongo-orm": "workspace:*", "@prisma-next/mongo-pipeline-builder": "workspace:*", @@ -55,6 +56,7 @@ "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", "mongodb-memory-server": "catalog:", + "pathe": "^2.0.3", "tsx": "^4.19.2", "typescript": "catalog:", "vitest": "catalog:" diff --git a/examples/retail-store/test/manual-migration.test.ts b/examples/retail-store/test/manual-migration.test.ts index 04c6cc568..d44c91e6b 100644 --- a/examples/retail-store/test/manual-migration.test.ts +++ b/examples/retail-store/test/manual-migration.test.ts @@ -1,10 +1,10 @@ import { readFileSync } from 'node:fs'; -import { resolve } from 'node:path'; import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; import mongoControlDriver from '@prisma-next/driver-mongo/control'; import { timeouts } from '@prisma-next/test-utils'; import { type Db, MongoClient } from 'mongodb'; import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { resolve } from 'pathe'; import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; const ALL_POLICY = { diff --git a/packages/2-mongo-family/9-family/src/exports/migration.ts b/packages/2-mongo-family/9-family/src/exports/migration.ts index bac08fa31..915e9bc6f 100644 --- a/packages/2-mongo-family/9-family/src/exports/migration.ts +++ b/packages/2-mongo-family/9-family/src/exports/migration.ts @@ -1,9 +1 @@ -export { - collMod, - createCollection, - createIndex, - dropCollection, - dropIndex, - validatedCollection, -} from '@prisma-next/target-mongo/migration'; export { MongoMigration as Migration } from '../core/mongo-migration'; diff --git a/packages/3-mongo-target/1-mongo-target/package.json b/packages/3-mongo-target/1-mongo-target/package.json index b3dc8e8bc..6aa42c0f8 100644 --- a/packages/3-mongo-target/1-mongo-target/package.json +++ b/packages/3-mongo-target/1-mongo-target/package.json @@ -25,6 +25,7 @@ "@prisma-next/tsconfig": "workspace:*", "@prisma-next/tsdown": "workspace:*", "mongodb-memory-server": "catalog:", + "pathe": "^2.0.3", "tsdown": "catalog:", "typescript": "catalog:", "vitest": "catalog:" diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts index 5cb583c88..5a1d9d5a9 100644 --- a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts @@ -1,3 +1,4 @@ +import type { MigrationOperationClass } from '@prisma-next/framework-components/control'; import type { MongoIndexKey } from '@prisma-next/mongo-query-ast/control'; import { buildIndexOpId, @@ -26,7 +27,7 @@ function isTextIndex(keys: ReadonlyArray): boolean { return keys.some((k) => k.direction === 'text'); } -function keyFilter(collection: string, keys: ReadonlyArray) { +function keyFilter(keys: ReadonlyArray) { return isTextIndex(keys) ? MongoFieldFilter.eq('key._fts', 'text') : MongoFieldFilter.eq('key', keysToKeySpec(keys)); @@ -38,7 +39,7 @@ export function createIndex( options?: CreateIndexOptions, ): MongoMigrationPlanOperation { const name = defaultMongoIndexName(keys); - const filter = keyFilter(collection, keys); + const filter = keyFilter(keys); const fullFilter = options?.unique ? MongoAndExpr.of([filter, MongoFieldFilter.eq('unique', true)]) : filter; @@ -81,7 +82,7 @@ export function dropIndex( keys: ReadonlyArray, ): MongoMigrationPlanOperation { const indexName = defaultMongoIndexName(keys); - const filter = keyFilter(collection, keys); + const filter = keyFilter(keys); return { id: buildIndexOpId('drop', collection, keys), @@ -154,11 +155,15 @@ export function dropCollection(collection: string): MongoMigrationPlanOperation }; } -export function collMod(collection: string, options: CollModOptions): MongoMigrationPlanOperation { +export function collMod( + collection: string, + options: CollModOptions, + overrides?: { operationClass?: MigrationOperationClass }, +): MongoMigrationPlanOperation { return { id: `collMod.${collection}`, label: `Modify collection ${collection}`, - operationClass: 'destructive', + operationClass: overrides?.operationClass ?? 'destructive', precheck: [], execute: [ { diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts index 1ce0f5dfe..7d5af07fe 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -1,8 +1,8 @@ import { execFile } from 'node:child_process'; import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'; import { tmpdir } from 'node:os'; -import { join, resolve } from 'node:path'; import { promisify } from 'node:util'; +import { join, resolve } from 'pathe'; import { afterEach, beforeEach, describe, expect, it } from 'vitest'; const execFileAsync = promisify(execFile); @@ -11,7 +11,8 @@ const repoRoot = resolve(packageRoot, '../../..'); const tsxPath = join(repoRoot, 'node_modules/.bin/tsx'); const familyMongoRoot = resolve(repoRoot, 'packages/2-mongo-family/9-family'); -const migrationExports = join(familyMongoRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); +const migrationExport = join(familyMongoRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); +const factoryExport = join(packageRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); describe('migration file E2E', () => { let tmpDir: string; @@ -41,7 +42,8 @@ describe('migration file E2E', () => { describe('factory-based migration', () => { const factoryMigration = [ - `import { Migration, createIndex, createCollection } from '${migrationExports}';`, + `import { Migration } from '${migrationExport}';`, + `import { createIndex, createCollection } from '${factoryExport}';`, '', 'export default class extends Migration {', ' plan() {', @@ -94,7 +96,8 @@ describe('migration file E2E', () => { describe('strategy-based migration', () => { const strategyMigration = [ - `import { Migration, validatedCollection } from '${migrationExports}';`, + `import { Migration } from '${migrationExport}';`, + `import { validatedCollection } from '${factoryExport}';`, '', 'export default class extends Migration {', ' plan() {', @@ -134,7 +137,8 @@ describe('migration file E2E', () => { describe('serialization format', () => { it('produces JSON that the runner can consume (correct kind discriminants)', async () => { const migration = [ - `import { Migration, createIndex, dropIndex, createCollection, dropCollection, collMod } from '${migrationExports}';`, + `import { Migration } from '${migrationExport}';`, + `import { createIndex, dropIndex, createCollection, dropCollection, collMod } from '${factoryExport}';`, '', 'export default class extends Migration {', ' plan() {', diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts index d873cf2b1..2393b183e 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts @@ -309,6 +309,16 @@ describe('collMod', () => { expect(op.precheck).toHaveLength(0); expect(op.postcheck).toHaveLength(0); }); + + it('defaults operationClass to destructive', () => { + const op = collMod('users', { validator: {} }); + expect(op.operationClass).toBe('destructive'); + }); + + it('accepts operationClass override', () => { + const op = collMod('users', { validator: {} }, { operationClass: 'widening' }); + expect(op.operationClass).toBe('widening'); + }); }); describe('serialization round-trip', () => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e95b9ebea..720f13c14 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -124,6 +124,9 @@ importers: '@prisma-next/mongo-runtime': specifier: workspace:* version: link:../../packages/2-mongo-family/7-runtime + '@prisma-next/target-mongo': + specifier: workspace:* + version: link:../../packages/3-mongo-target/1-mongo-target mongodb: specifier: 'catalog:' version: 6.21.0 @@ -164,6 +167,9 @@ importers: mongodb-memory-server: specifier: 'catalog:' version: 10.4.3 + pathe: + specifier: ^2.0.3 + version: 2.0.3 tsx: specifier: ^4.19.2 version: 4.20.6 @@ -339,6 +345,9 @@ importers: '@prisma-next/mongo-value': specifier: workspace:* version: link:../../packages/2-mongo-family/1-foundation/mongo-value + '@prisma-next/target-mongo': + specifier: workspace:* + version: link:../../packages/3-mongo-target/1-mongo-target '@radix-ui/react-dropdown-menu': specifier: ^2.1.16 version: 2.1.16(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -418,6 +427,9 @@ importers: mongodb-memory-server: specifier: 'catalog:' version: 10.4.3 + pathe: + specifier: ^2.0.3 + version: 2.0.3 tsx: specifier: ^4.19.2 version: 4.20.6 @@ -2259,6 +2271,9 @@ importers: mongodb-memory-server: specifier: 'catalog:' version: 10.4.3 + pathe: + specifier: ^2.0.3 + version: 2.0.3 tsdown: specifier: 'catalog:' version: 0.18.4(typescript@5.9.3) From 987f46675f0267c7791b5459a1b8f8e375f3f0e0 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:38:56 +0300 Subject: [PATCH 18/31] docs: update spec to reflect implementation decisions - Migration.run(import.meta) -> Migration.run(import.meta.url) - Document describe() and migration.json as optional features - Document collMod operationClass override parameter --- projects/mongo-migration-authoring/spec.md | 25 +++++++++++++++------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/projects/mongo-migration-authoring/spec.md b/projects/mongo-migration-authoring/spec.md index 4aec6b7ec..f97fb203a 100644 --- a/projects/mongo-migration-authoring/spec.md +++ b/projects/mongo-migration-authoring/spec.md @@ -11,7 +11,15 @@ import { Migration, createIndex, createCollection } from "@prisma-next/target-mongo/migration" export default class extends Migration { - plan() { + override describe() { + return { + from: 'sha256:abc123...', + to: 'sha256:def456...', + labels: ['add-user-email-index'], + } + } + + override plan() { return [ createCollection("users", { validator: { $jsonSchema: { required: ["email"] } }, @@ -22,17 +30,17 @@ export default class extends Migration { } } -Migration.run(import.meta) +Migration.run(import.meta.url) ``` -`node migration.ts` produces `ops.json`. The existing `MongoMigrationRunner` consumes it unchanged. That's the entire authoring workflow. +`node migration.ts` produces `ops.json` and (if `describe()` is implemented) `migration.json`. The existing `MongoMigrationRunner` consumes `ops.json` unchanged. That's the entire authoring workflow. ## How it works The file has two parts: -1. **The class** — exports a `Migration` subclass with a `plan()` method. `plan()` returns an array of `MongoMigrationPlanOperation` objects built by factory functions. -2. **The run line** — `Migration.run(import.meta)` makes the file self-executing. When run directly (`node migration.ts`), it calls `plan()`, serializes the result, and writes `ops.json`. When imported by the CLI or a test, it's a no-op. +1. **The class** — exports a `Migration` subclass with a `plan()` method that returns an array of `MongoMigrationPlanOperation` objects built by factory functions. Optionally overrides `describe()` to provide migration metadata (origin/destination hashes, labels). +2. **The run line** — `Migration.run(import.meta.url)` makes the file self-executing. When run directly (`node migration.ts`), it calls `plan()`, serializes the result, and writes `ops.json`. If `describe()` returns metadata, it also writes `migration.json`. When imported by the CLI or a test, it's a no-op. ## Operation factories @@ -42,7 +50,7 @@ Each factory function produces a single `MongoMigrationPlanOperation` — a plai - `dropIndex(collection, keys)` — removes an index with a precheck that it exists - `createCollection(collection, options?)` — creates a collection with optional validator, collation, capped settings, etc. - `dropCollection(collection)` — drops a collection -- `collMod(collection, options)` — modifies collection options (validator, changeStreamPreAndPostImages, etc.) +- `collMod(collection, options, overrides?)` — modifies collection options (validator, changeStreamPreAndPostImages, etc.). Accepts an optional `overrides` parameter with `operationClass` to classify the operation as `widening` or `destructive` (defaults to `destructive`). The factories produce the same output as the existing `MongoMigrationPlanner`. The runner cannot distinguish between planner-generated and hand-authored operations. @@ -104,7 +112,8 @@ The pattern is designed to generalize to SQL migrations, where factory functions - Factory functions for each Mongo DDL operation (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`) that produce `MongoMigrationPlanOperation` objects with correct prechecks, commands, and postchecks. Factory functions and planner are co-located in `packages/3-mongo-target/1-mongo-target`, exported from `@prisma-next/target-mongo/migration`. - A `Migration` base class with: - An abstract `plan()` method returning `MongoMigrationPlanOperation[]` - - A static `Migration.run(import.meta)` method that handles self-execution (entrypoint detection, serialization, file writing) + - An optional `describe()` method returning `MigrationMeta` (origin/destination hashes, kind, labels) for `migration.json` generation + - A static `Migration.run(import.meta.url)` method that handles self-execution (entrypoint detection, serialization, file writing) - `--dry-run` flag support (print operations without writing) - `--help` flag support - At least one compound strategy function demonstrating composition of multiple factories @@ -134,7 +143,7 @@ The pattern is designed to generalize to SQL migrations, where factory functions ## Importing a migration -- [ ] When imported (not run directly), `Migration.run(import.meta)` is a no-op +- [ ] When imported (not run directly), `Migration.run(import.meta.url)` is a no-op - [ ] The default export class can be instantiated and `plan()` called directly (for CLI and test use) ## Operation correctness From 2a174b7ce472986eb466416dc313a1407a049d0c Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:47:07 +0300 Subject: [PATCH 19/31] refactor(mongo-migration): replace collMod with setValidation, merge strategies - Remove thin collMod wrapper; add setValidation factory with schema-level API - Move validatedCollection from separate strategies file into factories - Delete migration-strategies.ts and its test file - Update README to remove atomic/non-atomic differentiation - Update demo migrations and all tests to use new API --- .../migration.json | 2 +- .../migration.ts | 26 ++--- .../20260415_add-product-validation/ops.json | 6 +- .../test/manual-migration.test.ts | 2 +- .../3-mongo-target/1-mongo-target/README.md | 17 +-- .../src/core/migration-factories.ts | 37 ++++-- .../src/core/migration-strategies.ts | 20 ---- .../1-mongo-target/src/exports/migration.ts | 4 +- .../1-mongo-target/test/migration-e2e.test.ts | 4 +- .../test/migration-factories.test.ts | 105 ++++++++++++------ .../test/migration-strategies.test.ts | 67 ----------- .../mongo/migration-authoring-e2e.test.ts | 19 +--- 12 files changed, 129 insertions(+), 180 deletions(-) delete mode 100644 packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts delete mode 100644 packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.json b/examples/retail-store/migrations/20260415_add-product-validation/migration.json index f42f6470e..2ed42e273 100644 --- a/examples/retail-store/migrations/20260415_add-product-validation/migration.json +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.json @@ -4,5 +4,5 @@ "to": "sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d", "kind": "regular", "labels": ["add-product-validation"], - "createdAt": "2026-04-15T17:19:07.772Z" + "createdAt": "2026-04-15T18:46:18.776Z" } diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts index 4337530ab..7f1a8c2de 100644 --- a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts @@ -1,5 +1,5 @@ import { Migration } from '@prisma-next/family-mongo/migration'; -import { collMod, createIndex } from '@prisma-next/target-mongo/migration'; +import { createIndex, setValidation } from '@prisma-next/target-mongo/migration'; export default class extends Migration { override describe() { @@ -12,21 +12,19 @@ export default class extends Migration { override plan() { return [ - collMod('products', { - validator: { - $jsonSchema: { - bsonType: 'object', - required: ['name', 'price', 'category'], - properties: { - name: { bsonType: 'string' }, - price: { bsonType: 'number', minimum: 0 }, - category: { bsonType: 'string' }, - }, + setValidation( + 'products', + { + bsonType: 'object', + required: ['name', 'price', 'category'], + properties: { + name: { bsonType: 'string' }, + price: { bsonType: 'number', minimum: 0 }, + category: { bsonType: 'string' }, }, }, - validationLevel: 'moderate', - validationAction: 'warn', - }), + { validationLevel: 'moderate', validationAction: 'warn' }, + ), createIndex('products', [ { field: 'category', direction: 1 }, { field: 'price', direction: 1 }, diff --git a/examples/retail-store/migrations/20260415_add-product-validation/ops.json b/examples/retail-store/migrations/20260415_add-product-validation/ops.json index 83836be00..f3a945173 100644 --- a/examples/retail-store/migrations/20260415_add-product-validation/ops.json +++ b/examples/retail-store/migrations/20260415_add-product-validation/ops.json @@ -1,12 +1,12 @@ [ { - "id": "collMod.products", - "label": "Modify collection products", + "id": "collection.products.setValidation", + "label": "Set validation on products", "operationClass": "destructive", "precheck": [], "execute": [ { - "description": "modify collection products", + "description": "set validation on products", "command": { "kind": "collMod", "collection": "products", diff --git a/examples/retail-store/test/manual-migration.test.ts b/examples/retail-store/test/manual-migration.test.ts index d44c91e6b..73e71617b 100644 --- a/examples/retail-store/test/manual-migration.test.ts +++ b/examples/retail-store/test/manual-migration.test.ts @@ -53,7 +53,7 @@ describe( const ops = instance.plan(); expect(ops).toHaveLength(2); - expect(ops[0].id).toBe('collMod.products'); + expect(ops[0].id).toBe('collection.products.setValidation'); expect(ops[1].id).toContain('index.products.create'); }); diff --git a/packages/3-mongo-target/1-mongo-target/README.md b/packages/3-mongo-target/1-mongo-target/README.md index c407e6e24..42995decb 100644 --- a/packages/3-mongo-target/1-mongo-target/README.md +++ b/packages/3-mongo-target/1-mongo-target/README.md @@ -7,13 +7,13 @@ MongoDB target pack for Prisma Next. - **Target pack assembly**: Exports the MongoDB target pack for authoring and family composition - **Target metadata**: Defines the stable Mongo target identity (`kind`, `familyId`, `targetId`, `version`, `capabilities`) - **Codec type surface**: Exposes the base Mongo codec type map used by authoring-time type composition -- **Migration factories and strategies**: Atomic factory functions and compound strategies for MongoDB migration operations +- **Migration operation factories**: Factory functions for MongoDB migration operations ## Entrypoints - `./pack`: pure target pack ref used by `@prisma-next/family-mongo` and `@prisma-next/mongo-contract-ts` - `./codec-types`: base Mongo codec type map -- `./migration`: factory functions and strategies (the `Migration` base class is in `@prisma-next/family-mongo/migration`) +- `./migration`: factory functions (the `Migration` base class is in `@prisma-next/family-mongo/migration`) ## Usage @@ -32,11 +32,9 @@ const contract = defineContract({ ### Migration authoring -Import from `@prisma-next/family-mongo/migration` for the full authoring experience (Migration class + factories + strategies): - ```typescript -import { Migration, createIndex, createCollection } - from "@prisma-next/family-mongo/migration" +import { Migration } from '@prisma-next/family-mongo/migration'; +import { createIndex, createCollection } from '@prisma-next/target-mongo/migration'; export default class extends Migration { plan() { @@ -53,7 +51,7 @@ export default class extends Migration { Migration.run(import.meta.url) ``` -Run `node migration.ts` to produce `ops.json` and `migration.json` (when `describe()` is implemented). Use `--dry-run` to preview without writing. +Run `tsx migration.ts` to produce `ops.json` and `migration.json` (when `describe()` is implemented). Use `--dry-run` to preview without writing. ### Available factories @@ -61,8 +59,5 @@ Run `node migration.ts` to produce `ops.json` and `migration.json` (when `descri - `dropIndex(collection, keys)` — drop an index - `createCollection(collection, options?)` — create a collection - `dropCollection(collection)` — drop a collection -- `collMod(collection, options)` — modify collection options - -### Strategies - +- `setValidation(collection, schema, options?)` — set document validation on a collection - `validatedCollection(name, schema, indexes)` — create a collection with a JSON Schema validator and indexes diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts index 5a1d9d5a9..0c34d2604 100644 --- a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts @@ -1,9 +1,7 @@ -import type { MigrationOperationClass } from '@prisma-next/framework-components/control'; import type { MongoIndexKey } from '@prisma-next/mongo-query-ast/control'; import { buildIndexOpId, CollModCommand, - type CollModOptions, CreateCollectionCommand, type CreateCollectionOptions, CreateIndexCommand, @@ -155,22 +153,41 @@ export function dropCollection(collection: string): MongoMigrationPlanOperation }; } -export function collMod( +export function setValidation( collection: string, - options: CollModOptions, - overrides?: { operationClass?: MigrationOperationClass }, + schema: Record, + options?: { validationLevel?: string; validationAction?: string }, ): MongoMigrationPlanOperation { return { - id: `collMod.${collection}`, - label: `Modify collection ${collection}`, - operationClass: overrides?.operationClass ?? 'destructive', + id: `collection.${collection}.setValidation`, + label: `Set validation on ${collection}`, + operationClass: 'destructive', precheck: [], execute: [ { - description: `modify collection ${collection}`, - command: new CollModCommand(collection, options), + description: `set validation on ${collection}`, + command: new CollModCommand(collection, { + validator: { $jsonSchema: schema }, + validationLevel: options?.validationLevel, + validationAction: options?.validationAction, + }), }, ], postcheck: [], }; } + +export function validatedCollection( + name: string, + schema: Record, + indexes: ReadonlyArray<{ keys: MongoIndexKey[]; unique?: boolean }>, +): MongoMigrationPlanOperation[] { + return [ + createCollection(name, { + validator: { $jsonSchema: schema }, + validationLevel: 'strict', + validationAction: 'error', + }), + ...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique })), + ]; +} diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts deleted file mode 100644 index 8e5112954..000000000 --- a/packages/3-mongo-target/1-mongo-target/src/core/migration-strategies.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { - MongoIndexKey, - MongoMigrationPlanOperation, -} from '@prisma-next/mongo-query-ast/control'; -import { createCollection, createIndex } from './migration-factories'; - -export function validatedCollection( - name: string, - schema: Record, - indexes: ReadonlyArray<{ keys: MongoIndexKey[]; unique?: boolean }>, -): MongoMigrationPlanOperation[] { - return [ - createCollection(name, { - validator: { $jsonSchema: schema }, - validationLevel: 'strict', - validationAction: 'error', - }), - ...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique })), - ]; -} diff --git a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts index 11df525b7..65a0c2412 100644 --- a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts +++ b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts @@ -1,8 +1,8 @@ export { - collMod, createCollection, createIndex, dropCollection, dropIndex, + setValidation, + validatedCollection, } from '../core/migration-factories'; -export { validatedCollection } from '../core/migration-strategies'; diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts index 7d5af07fe..4180320c5 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -138,14 +138,14 @@ describe('migration file E2E', () => { it('produces JSON that the runner can consume (correct kind discriminants)', async () => { const migration = [ `import { Migration } from '${migrationExport}';`, - `import { createIndex, dropIndex, createCollection, dropCollection, collMod } from '${factoryExport}';`, + `import { createIndex, dropIndex, createCollection, dropCollection, setValidation } from '${factoryExport}';`, '', 'export default class extends Migration {', ' plan() {', ' return [', ' createCollection("users"),', ' createIndex("users", [{ field: "email", direction: 1 }]),', - ' collMod("users", { validator: { $jsonSchema: { required: ["email"] } } }),', + ' setValidation("users", { required: ["email"] }),', ' dropIndex("users", [{ field: "email", direction: 1 }]),', ' dropCollection("users"),', ' ];', diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts index 2393b183e..9ec6bdbe8 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts @@ -15,11 +15,12 @@ import { } from '@prisma-next/mongo-query-ast/control'; import { describe, expect, it } from 'vitest'; import { - collMod, createCollection, createIndex, dropCollection, dropIndex, + setValidation, + validatedCollection, } from '../src/core/migration-factories'; describe('createIndex', () => { @@ -272,52 +273,95 @@ describe('dropCollection', () => { }); }); -describe('collMod', () => { +describe('setValidation', () => { it('produces correct operation structure', () => { - const op = collMod('users', { - validator: { $jsonSchema: { required: ['email'] } }, - validationLevel: 'strict', - }); + const op = setValidation('users', { required: ['email'] }); - expect(op.id).toBe('collMod.users'); - expect(op.label).toBe('Modify collection users'); + expect(op.id).toBe('collection.users.setValidation'); + expect(op.label).toBe('Set validation on users'); expect(op.operationClass).toBe('destructive'); }); - it('includes execute with CollModCommand', () => { - const op = collMod('users', { - validator: { $jsonSchema: { required: ['email'] } }, - }); + it('wraps schema in $jsonSchema validator', () => { + const schema = { required: ['email'], properties: { email: { bsonType: 'string' } } }; + const op = setValidation('users', schema); const cmd = op.execute[0]!.command as CollModCommand; expect(cmd).toBeInstanceOf(CollModCommand); expect(cmd.collection).toBe('users'); - expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + expect(cmd.validator).toEqual({ $jsonSchema: schema }); }); - it('passes through changeStreamPreAndPostImages', () => { - const op = collMod('users', { - changeStreamPreAndPostImages: { enabled: true }, - }); + it('passes through validationLevel and validationAction', () => { + const op = setValidation( + 'users', + { required: ['email'] }, + { + validationLevel: 'moderate', + validationAction: 'warn', + }, + ); const cmd = op.execute[0]!.command as CollModCommand; - expect(cmd.changeStreamPreAndPostImages).toEqual({ enabled: true }); + expect(cmd.validationLevel).toBe('moderate'); + expect(cmd.validationAction).toBe('warn'); }); it('has empty precheck and postcheck', () => { - const op = collMod('users', { validator: {} }); + const op = setValidation('users', { required: ['email'] }); expect(op.precheck).toHaveLength(0); expect(op.postcheck).toHaveLength(0); }); - it('defaults operationClass to destructive', () => { - const op = collMod('users', { validator: {} }); - expect(op.operationClass).toBe('destructive'); + it('round-trips through JSON', () => { + const op = setValidation('users', { required: ['email'] }, { validationLevel: 'strict' }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('collMod'); + expect(json.execute[0].command.validator).toEqual({ + $jsonSchema: { required: ['email'] }, + }); }); +}); + +describe('validatedCollection', () => { + it('creates collection with schema validation', () => { + const ops = validatedCollection('users', { required: ['email'] }, []); - it('accepts operationClass override', () => { - const op = collMod('users', { validator: {} }, { operationClass: 'widening' }); - expect(op.operationClass).toBe('widening'); + expect(ops).toHaveLength(1); + expect(ops[0]!.id).toBe('collection.users.create'); + + const cmd = ops[0]!.execute[0]!.command as CreateCollectionCommand; + expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + expect(cmd.validationLevel).toBe('strict'); + expect(cmd.validationAction).toBe('error'); + }); + + it('includes indexes after collection creation', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }], unique: true }, + { keys: [{ field: 'name', direction: 1 }] }, + ]); + + expect(ops).toHaveLength(3); + expect(ops[0]!.id).toBe('collection.users.create'); + + const idx1 = ops[1]!.execute[0]!.command as CreateIndexCommand; + expect(idx1.collection).toBe('users'); + expect(idx1.unique).toBe(true); + + const idx2 = ops[2]!.execute[0]!.command as CreateIndexCommand; + expect(idx2.collection).toBe('users'); + expect(idx2.unique).toBeUndefined(); + }); + + it('returns flat array of operations', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }] }, + ]); + + expect(Array.isArray(ops)).toBe(true); + expect(ops.every((op) => 'id' in op && 'execute' in op)).toBe(true); }); }); @@ -361,17 +405,6 @@ describe('serialization round-trip', () => { expect(json.execute[0].command.kind).toBe('dropCollection'); }); - it('collMod round-trips through JSON', () => { - const op = collMod('users', { - validator: { $jsonSchema: { required: ['email'] } }, - validationLevel: 'strict', - }); - const json = JSON.parse(JSON.stringify(op)); - - expect(json.execute[0].command.kind).toBe('collMod'); - expect(json.execute[0].command.validationLevel).toBe('strict'); - }); - it('factory output matches planner-equivalent createIndex structure', () => { const keys = [{ field: 'email', direction: 1 as const }]; const factoryOp = createIndex('users', keys, { unique: true }); diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts deleted file mode 100644 index f31ff2254..000000000 --- a/packages/3-mongo-target/1-mongo-target/test/migration-strategies.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { CreateCollectionCommand, CreateIndexCommand } from '@prisma-next/mongo-query-ast/control'; -import { describe, expect, it } from 'vitest'; -import { validatedCollection } from '../src/core/migration-strategies'; - -describe('validatedCollection', () => { - it('returns a createCollection op followed by createIndex ops', () => { - const ops = validatedCollection('users', { required: ['email', 'name'] }, [ - { keys: [{ field: 'email', direction: 1 }], unique: true }, - { keys: [{ field: 'name', direction: 1 }] }, - ]); - - expect(ops).toHaveLength(3); - expect(ops[0]!.id).toBe('collection.users.create'); - expect(ops[1]!.id).toContain('index.users.create'); - expect(ops[2]!.id).toContain('index.users.create'); - }); - - it('produces correct createCollection with validator', () => { - const ops = validatedCollection('users', { required: ['email'] }, []); - - expect(ops).toHaveLength(1); - const cmd = ops[0]!.execute[0]!.command as CreateCollectionCommand; - expect(cmd).toBeInstanceOf(CreateCollectionCommand); - expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); - expect(cmd.validationLevel).toBe('strict'); - expect(cmd.validationAction).toBe('error'); - }); - - it('passes index options through to createIndex', () => { - const ops = validatedCollection('users', { required: ['email'] }, [ - { keys: [{ field: 'email', direction: 1 }], unique: true }, - ]); - - const indexCmd = ops[1]!.execute[0]!.command as CreateIndexCommand; - expect(indexCmd).toBeInstanceOf(CreateIndexCommand); - expect(indexCmd.unique).toBe(true); - }); - - it('returns a flat array', () => { - const ops = validatedCollection('users', { required: ['email'] }, [ - { keys: [{ field: 'email', direction: 1 }] }, - ]); - - expect(Array.isArray(ops)).toBe(true); - for (const op of ops) { - expect(op).toHaveProperty('id'); - expect(op).toHaveProperty('execute'); - } - }); - - it('handles empty indexes array', () => { - const ops = validatedCollection('users', { required: ['email'] }, []); - - expect(ops).toHaveLength(1); - expect(ops[0]!.id).toBe('collection.users.create'); - }); - - it('all operations are additive', () => { - const ops = validatedCollection('users', { required: ['email'] }, [ - { keys: [{ field: 'email', direction: 1 }] }, - ]); - - for (const op of ops) { - expect(op.operationClass).toBe('additive'); - } - }); -}); diff --git a/test/integration/test/mongo/migration-authoring-e2e.test.ts b/test/integration/test/mongo/migration-authoring-e2e.test.ts index 1e2326051..7b6e18a1a 100644 --- a/test/integration/test/mongo/migration-authoring-e2e.test.ts +++ b/test/integration/test/mongo/migration-authoring-e2e.test.ts @@ -6,11 +6,11 @@ import { import mongoControlDriver from '@prisma-next/driver-mongo/control'; import type { MongoMigrationPlanOperation } from '@prisma-next/mongo-query-ast/control'; import { - collMod, createCollection, createIndex, dropCollection, dropIndex, + setValidation, validatedCollection, } from '@prisma-next/target-mongo/migration'; import { timeouts } from '@prisma-next/test-utils'; @@ -167,14 +167,11 @@ describe( }); }); - describe('collMod', () => { + describe('setValidation', () => { it('modifies collection validation', async () => { await db.createCollection('users'); const ops = [ - collMod('users', { - validator: { $jsonSchema: { required: ['email', 'name'] } }, - validationLevel: 'strict', - }), + setValidation('users', { required: ['email', 'name'] }, { validationLevel: 'strict' }), ]; const result = await runOps(ops); expect(result.operationsExecuted).toBe(1); @@ -196,7 +193,7 @@ describe( }), createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), dropIndex('users', [{ field: 'email', direction: 1 as const }]), - collMod('users', { validator: { $jsonSchema: { required: ['email', 'name'] } } }), + setValidation('users', { required: ['email', 'name'] }), dropCollection('users'), ]; @@ -235,7 +232,7 @@ describe( }); }); - describe('validatedCollection strategy', () => { + describe('validatedCollection', () => { it('creates collection with schema validation and indexes', async () => { const ops = validatedCollection('users', { required: ['email', 'name'] }, [ { keys: [{ field: 'email', direction: 1 }], unique: true }, @@ -297,11 +294,7 @@ describe( true, ); - const step2 = [ - collMod('users', { - validator: { $jsonSchema: { required: ['email', 'name'] } }, - }), - ]; + const step2 = [setValidation('users', { required: ['email', 'name'] })]; const serialized2 = JSON.parse(serializeMongoOps(step2)); const controlDriver2 = await mongoControlDriver.create(replSet.getUri(dbName)); From be9e5960a12acc7e58f4a1c119f1ed3cb48708f9 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:48:24 +0300 Subject: [PATCH 20/31] fix(migration): improve Migration.run() API and add describe() validation - Rewrite doc comment to clearly explain entrypoint guard behavior - Rename executeMigration to serializeMigration (it serializes, not executes) - Update help text from node to tsx - Validate describe() return with arktype before building manifest --- .../3-tooling/migration/src/migration-base.ts | 40 ++++++++++++++----- .../migration/test/migration-base.test.ts | 10 +++++ 2 files changed, 40 insertions(+), 10 deletions(-) diff --git a/packages/1-framework/3-tooling/migration/src/migration-base.ts b/packages/1-framework/3-tooling/migration/src/migration-base.ts index 642a2abd1..3c5a0f404 100644 --- a/packages/1-framework/3-tooling/migration/src/migration-base.ts +++ b/packages/1-framework/3-tooling/migration/src/migration-base.ts @@ -1,5 +1,6 @@ import { realpathSync, writeFileSync } from 'node:fs'; import { fileURLToPath } from 'node:url'; +import { type } from 'arktype'; import { dirname, join } from 'pathe'; export interface MigrationMeta { @@ -9,6 +10,13 @@ export interface MigrationMeta { readonly labels?: readonly string[]; } +const MigrationMetaSchema = type({ + from: 'string', + to: 'string', + 'kind?': "'regular' | 'baseline'", + 'labels?': 'string[]', +}); + export abstract class Migration { abstract plan(): TOperation[]; @@ -17,12 +25,15 @@ export abstract class Migration { } /** - * Makes the migration file self-executing. Call at module scope: + * Entrypoint guard for migration files. When called at module scope, + * detects whether the file is being run directly (e.g. `tsx migration.ts`) + * and if so, serializes the migration plan to `ops.json` (and optionally + * `migration.json`) in the same directory. When the file is imported by + * another module, this is a no-op. * - * Migration.run(import.meta.url) + * Usage (at module scope, after the class definition): * - * When the file is the entrypoint, calls plan(), serializes, and writes ops.json. - * When imported by another module, this is a no-op. + * Migration.run(import.meta.url) */ static run(importMetaUrl: string): void { if (!importMetaUrl) return; @@ -49,7 +60,7 @@ export abstract class Migration { const dryRun = args.includes('--dry-run'); const migrationDir = dirname(metaFilename); - executeMigration(importMetaUrl, migrationDir, dryRun).catch((err) => { + serializeMigration(importMetaUrl, migrationDir, dryRun).catch((err) => { process.stderr.write(`${err instanceof Error ? err.message : String(err)}\n`); process.exitCode = 1; }); @@ -59,7 +70,7 @@ export abstract class Migration { function printHelp(): void { process.stdout.write( [ - 'Usage: node [options]', + 'Usage: tsx [options]', '', 'Options:', ' --dry-run Print operations to stdout without writing files', @@ -80,7 +91,7 @@ function buildManifest(meta: MigrationMeta): Record { }; } -async function executeMigration( +async function serializeMigration( fileUrl: string, migrationDir: string, dryRun: boolean, @@ -105,9 +116,18 @@ async function executeMigration( } const serializedOps = JSON.stringify(ops, null, 2); - const meta: MigrationMeta | undefined = - typeof instance.describe === 'function' ? instance.describe() : undefined; - const manifest = meta ? buildManifest(meta) : undefined; + + let manifest: Record | undefined; + if (typeof instance.describe === 'function') { + const rawMeta: unknown = instance.describe(); + if (rawMeta !== undefined) { + const parsed = MigrationMetaSchema(rawMeta); + if (parsed instanceof type.errors) { + throw new Error(`describe() returned invalid metadata: ${parsed.summary}`); + } + manifest = buildManifest(parsed); + } + } if (dryRun) { if (manifest) { diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts index afce9bb54..c26c8c842 100644 --- a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -226,6 +226,16 @@ describe('Migration.run() subprocess', () => { expect(manifest.labels).toEqual([]); }); + it('rejects invalid describe() return with clear error', async () => { + const script = migrationWithDescribe('{ bad: true }', '[{ id: "op1" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('describe()'); + expect(result.stderr).toContain('invalid'); + }); + it('includes migration.json content in --dry-run output', async () => { const script = migrationWithDescribe( '{ from: "abc", to: "def", labels: ["test"] }', From 3c022339a7b57363aae6f740b6970cc49735d4b1 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:49:50 +0300 Subject: [PATCH 21/31] fix(demo-tests): use static imports and load artifacts from disk - Replace dynamic await import() with static imports for migration classes - Load migration.json from disk to verify structure - Remove unused Migration import from mongo-demo test --- .../mongo-demo/test/manual-migration.test.ts | 35 +++++++------------ .../test/manual-migration.test.ts | 33 +++++++---------- 2 files changed, 24 insertions(+), 44 deletions(-) diff --git a/examples/mongo-demo/test/manual-migration.test.ts b/examples/mongo-demo/test/manual-migration.test.ts index 2bbef8d49..492c2b7bf 100644 --- a/examples/mongo-demo/test/manual-migration.test.ts +++ b/examples/mongo-demo/test/manual-migration.test.ts @@ -1,12 +1,12 @@ import { readFileSync } from 'node:fs'; import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; import mongoControlDriver from '@prisma-next/driver-mongo/control'; -import { Migration } from '@prisma-next/family-mongo/migration'; import { timeouts } from '@prisma-next/test-utils'; import { type Db, MongoClient } from 'mongodb'; import { MongoMemoryReplSet } from 'mongodb-memory-server'; import { resolve } from 'pathe'; import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import AddPostsAuthorIndex from '../migrations/20260415_add-posts-author-index/migration'; const ALL_POLICY = { allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, @@ -48,22 +48,23 @@ describe( } }, timeouts.spinUpMongoMemoryServer); - it('migration.ts can be imported and plan() called directly', async () => { - const mod = await import('../migrations/20260415_add-posts-author-index/migration.ts'); - const MigrationClass = mod.default; - const instance = new MigrationClass(); - + it('migration class can be imported and plan() called directly', () => { + const instance = new AddPostsAuthorIndex(); const ops = instance.plan(); expect(ops).toHaveLength(2); expect(ops[0].id).toBe('index.posts.create(authorId:1)'); expect(ops[1].id).toBe('index.posts.create(createdAt:-1,authorId:1)'); }); - it('migration.ts describe() returns correct metadata', async () => { - const mod = await import('../migrations/20260415_add-posts-author-index/migration.ts'); - const instance = new mod.default(); - const meta = instance.describe(); - expect(meta.labels).toEqual(['add-posts-author-index']); + it('migration.json has expected structure', () => { + const manifest = JSON.parse(readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8')); + + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual(['add-posts-author-index']); + expect(manifest.from).toMatch(/^sha256:/); + expect(manifest.to).toMatch(/^sha256:/); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); }); it('ops.json deserializes and applies against real MongoDB', async () => { @@ -116,17 +117,5 @@ describe( await controlDriver.close(); } }); - - it('migration.json exists and has expected structure', () => { - const manifestJson = readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8'); - const manifest = JSON.parse(manifestJson); - - expect(manifest.migrationId).toBeNull(); - expect(manifest.kind).toBe('regular'); - expect(manifest.labels).toEqual(['add-posts-author-index']); - expect(manifest.from).toMatch(/^sha256:/); - expect(manifest.to).toMatch(/^sha256:/); - expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); - }); }, ); diff --git a/examples/retail-store/test/manual-migration.test.ts b/examples/retail-store/test/manual-migration.test.ts index 73e71617b..deb0cab33 100644 --- a/examples/retail-store/test/manual-migration.test.ts +++ b/examples/retail-store/test/manual-migration.test.ts @@ -6,6 +6,7 @@ import { type Db, MongoClient } from 'mongodb'; import { MongoMemoryReplSet } from 'mongodb-memory-server'; import { resolve } from 'pathe'; import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import AddProductValidation from '../migrations/20260415_add-product-validation/migration'; const ALL_POLICY = { allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, @@ -47,21 +48,23 @@ describe( } }, timeouts.spinUpMongoMemoryServer); - it('migration.ts can be imported and plan() called directly', async () => { - const mod = await import('../migrations/20260415_add-product-validation/migration.ts'); - const instance = new mod.default(); - + it('migration class can be imported and plan() called directly', () => { + const instance = new AddProductValidation(); const ops = instance.plan(); expect(ops).toHaveLength(2); expect(ops[0].id).toBe('collection.products.setValidation'); expect(ops[1].id).toContain('index.products.create'); }); - it('migration.ts describe() returns correct metadata', async () => { - const mod = await import('../migrations/20260415_add-product-validation/migration.ts'); - const instance = new mod.default(); - const meta = instance.describe(); - expect(meta.labels).toEqual(['add-product-validation']); + it('migration.json has expected structure', () => { + const manifest = JSON.parse(readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8')); + + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual(['add-product-validation']); + expect(manifest.from).toMatch(/^sha256:/); + expect(manifest.to).toMatch(/^sha256:/); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); }); it('ops.json deserializes and applies against real MongoDB', async () => { @@ -109,17 +112,5 @@ describe( await controlDriver.close(); } }); - - it('migration.json exists and has expected structure', () => { - const manifestJson = readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8'); - const manifest = JSON.parse(manifestJson); - - expect(manifest.migrationId).toBeNull(); - expect(manifest.kind).toBe('regular'); - expect(manifest.labels).toEqual(['add-product-validation']); - expect(manifest.from).toMatch(/^sha256:/); - expect(manifest.to).toMatch(/^sha256:/); - expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); - }); }, ); From 1a5a5275790e6a88c60fde925971d29b1a835fdf Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 21:55:25 +0300 Subject: [PATCH 22/31] refactor(migration): pass class constructor to Migration.run() instead of re-importing Eliminates the dynamic import() in serializeMigration by accepting the migration class directly. This makes the dependency graph static and the serialization fully synchronous. API change: Migration.run(import.meta.url, MyMigration) --- .../migration.ts | 5 ++-- .../migration.ts | 5 ++-- .../3-tooling/migration/src/migration-base.ts | 29 +++++++------------ .../migration/test/migration-base.test.ts | 14 +++++---- .../1-mongo-target/test/migration-e2e.test.ts | 15 ++++++---- 5 files changed, 34 insertions(+), 34 deletions(-) diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts index 9fdc07c87..fd99122bd 100644 --- a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts @@ -1,7 +1,7 @@ import { Migration } from '@prisma-next/family-mongo/migration'; import { createIndex } from '@prisma-next/target-mongo/migration'; -export default class extends Migration { +class AddPostsAuthorIndex extends Migration { override describe() { return { from: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', @@ -21,4 +21,5 @@ export default class extends Migration { } } -Migration.run(import.meta.url); +export default AddPostsAuthorIndex; +Migration.run(import.meta.url, AddPostsAuthorIndex); diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts index 7f1a8c2de..2693013c7 100644 --- a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts @@ -1,7 +1,7 @@ import { Migration } from '@prisma-next/family-mongo/migration'; import { createIndex, setValidation } from '@prisma-next/target-mongo/migration'; -export default class extends Migration { +class AddProductValidation extends Migration { override describe() { return { from: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', @@ -33,4 +33,5 @@ export default class extends Migration { } } -Migration.run(import.meta.url); +export default AddProductValidation; +Migration.run(import.meta.url, AddProductValidation); diff --git a/packages/1-framework/3-tooling/migration/src/migration-base.ts b/packages/1-framework/3-tooling/migration/src/migration-base.ts index 3c5a0f404..918a8b6ab 100644 --- a/packages/1-framework/3-tooling/migration/src/migration-base.ts +++ b/packages/1-framework/3-tooling/migration/src/migration-base.ts @@ -33,9 +33,11 @@ export abstract class Migration { * * Usage (at module scope, after the class definition): * - * Migration.run(import.meta.url) + * class MyMigration extends Migration { ... } + * export default MyMigration; + * Migration.run(import.meta.url, MyMigration); */ - static run(importMetaUrl: string): void { + static run(importMetaUrl: string, MigrationClass: new () => Migration): void { if (!importMetaUrl) return; const metaFilename = fileURLToPath(importMetaUrl); @@ -60,10 +62,12 @@ export abstract class Migration { const dryRun = args.includes('--dry-run'); const migrationDir = dirname(metaFilename); - serializeMigration(importMetaUrl, migrationDir, dryRun).catch((err) => { + try { + serializeMigration(MigrationClass, migrationDir, dryRun); + } catch (err) { process.stderr.write(`${err instanceof Error ? err.message : String(err)}\n`); process.exitCode = 1; - }); + } } } @@ -91,24 +95,13 @@ function buildManifest(meta: MigrationMeta): Record { }; } -async function serializeMigration( - fileUrl: string, +function serializeMigration( + MigrationClass: new () => Migration, migrationDir: string, dryRun: boolean, -): Promise { - const mod = await import(fileUrl); - const MigrationClass = mod.default; - - if (!MigrationClass || typeof MigrationClass !== 'function') { - throw new Error('Migration file must have a default export class'); - } - +): void { const instance = new MigrationClass(); - if (typeof instance.plan !== 'function') { - throw new Error('Migration class must implement plan()'); - } - const ops = instance.plan(); if (!Array.isArray(ops)) { diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts index c26c8c842..d156f172b 100644 --- a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -70,13 +70,14 @@ describe('Migration.run() subprocess', () => { return [ `import { Migration } from '${migrationBasePath}';`, '', - 'export default class extends Migration {', + 'class M extends Migration {', ' plan() {', ` return ${planReturn};`, ' }', '}', + 'export default M;', '', - 'Migration.run(import.meta.url);', + 'Migration.run(import.meta.url, M);', ].join('\n'); } @@ -136,8 +137,8 @@ describe('Migration.run() subprocess', () => { await writeFile(join(tmpDir, 'migration.ts'), migrationFile); const importerScript = [ - `import Migration from '${join(tmpDir, 'migration.ts').replace(/\\/g, '/')}';`, - 'const m = new Migration();', + `import M from '${join(tmpDir, 'migration.ts').replace(/\\/g, '/')}';`, + 'const m = new M();', 'const ops = m.plan();', 'console.log(JSON.stringify(ops));', ].join('\n'); @@ -167,7 +168,7 @@ describe('Migration.run() subprocess', () => { return [ `import { Migration } from '${migrationBasePath}';`, '', - 'export default class extends Migration {', + 'class M extends Migration {', ' describe() {', ` return ${meta};`, ' }', @@ -175,8 +176,9 @@ describe('Migration.run() subprocess', () => { ` return ${planReturn};`, ' }', '}', + 'export default M;', '', - 'Migration.run(import.meta.url);', + 'Migration.run(import.meta.url, M);', ].join('\n'); } diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts index 4180320c5..6ea4b93ae 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -45,7 +45,7 @@ describe('migration file E2E', () => { `import { Migration } from '${migrationExport}';`, `import { createIndex, createCollection } from '${factoryExport}';`, '', - 'export default class extends Migration {', + 'class M extends Migration {', ' plan() {', ' return [', ' createCollection("users", {', @@ -56,8 +56,9 @@ describe('migration file E2E', () => { ' ];', ' }', '}', + 'export default M;', '', - 'Migration.run(import.meta.url);', + 'Migration.run(import.meta.url, M);', ].join('\n'); it('produces ops.json with correct structure', async () => { @@ -99,7 +100,7 @@ describe('migration file E2E', () => { `import { Migration } from '${migrationExport}';`, `import { validatedCollection } from '${factoryExport}';`, '', - 'export default class extends Migration {', + 'class M extends Migration {', ' plan() {', ' return validatedCollection(', ' "users",', @@ -108,8 +109,9 @@ describe('migration file E2E', () => { ' );', ' }', '}', + 'export default M;', '', - 'Migration.run(import.meta.url);', + 'Migration.run(import.meta.url, M);', ].join('\n'); it('produces ops.json from strategy composition', async () => { @@ -140,7 +142,7 @@ describe('migration file E2E', () => { `import { Migration } from '${migrationExport}';`, `import { createIndex, dropIndex, createCollection, dropCollection, setValidation } from '${factoryExport}';`, '', - 'export default class extends Migration {', + 'class M extends Migration {', ' plan() {', ' return [', ' createCollection("users"),', @@ -151,8 +153,9 @@ describe('migration file E2E', () => { ' ];', ' }', '}', + 'export default M;', '', - 'Migration.run(import.meta.url);', + 'Migration.run(import.meta.url, M);', ].join('\n'); await writeFile(join(tmpDir, 'migration.ts'), migration); From 7096833486fce1733cabc08db85160c9e5b2151e Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:05:04 +0300 Subject: [PATCH 23/31] fix(target-mongo): narrow setValidation option types to match CollModCommand --- .../1-mongo-target/src/core/migration-factories.ts | 2 +- .../3-mongo-target/1-mongo-target/test/migration-e2e.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts index 0c34d2604..46f279bbc 100644 --- a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts @@ -156,7 +156,7 @@ export function dropCollection(collection: string): MongoMigrationPlanOperation export function setValidation( collection: string, schema: Record, - options?: { validationLevel?: string; validationAction?: string }, + options?: { validationLevel?: 'strict' | 'moderate'; validationAction?: 'error' | 'warn' }, ): MongoMigrationPlanOperation { return { id: `collection.${collection}.setValidation`, diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts index 6ea4b93ae..fb84f2bb3 100644 --- a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -168,7 +168,7 @@ describe('migration file E2E', () => { const commandKinds = ops.map((op: Record) => (op['execute'] as Record[]).map( - (s) => (s as Record>)['command']['kind'], + (s: Record) => (s['command'] as Record)['kind'], ), ); expect(commandKinds).toEqual([ From 41626d30ce043fd45b7c59508fc7049e4a9aad24 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:14:59 +0300 Subject: [PATCH 24/31] fix(integration): fix typecheck errors in migration-authoring E2E test Use Result.failure (not .error) and cast CollectionInfo to Record for options access. --- .../test/mongo/migration-authoring-e2e.test.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/integration/test/mongo/migration-authoring-e2e.test.ts b/test/integration/test/mongo/migration-authoring-e2e.test.ts index 7b6e18a1a..6a3c9fe4d 100644 --- a/test/integration/test/mongo/migration-authoring-e2e.test.ts +++ b/test/integration/test/mongo/migration-authoring-e2e.test.ts @@ -75,7 +75,7 @@ describe( policy: ALL_POLICY, frameworkComponents: [], }); - if (!result.ok) throw new Error(`Runner failed: ${result.error.summary}`); + if (!result.ok) throw new Error(`Runner failed: ${result.failure.summary}`); return result.value; } finally { await controlDriver.close(); @@ -103,7 +103,7 @@ describe( await runOps(ops); const info = await db.listCollections({ name: 'users' }).toArray(); - const options = info[0]!['options'] as Record; + const options = (info[0] as Record)['options'] as Record; expect(options['validator']).toEqual({ $jsonSchema: { required: ['email'] } }); }); }); @@ -177,7 +177,7 @@ describe( expect(result.operationsExecuted).toBe(1); const info = await db.listCollections({ name: 'users' }).toArray(); - const options = info[0]!['options'] as Record; + const options = (info[0] as Record)['options'] as Record; expect(options['validator']).toEqual({ $jsonSchema: { required: ['email', 'name'] }, }); @@ -243,7 +243,7 @@ describe( const info = await db.listCollections({ name: 'users' }).toArray(); expect(info).toHaveLength(1); - const options = info[0]!['options'] as Record; + const options = (info[0] as Record)['options'] as Record; expect(options['validator']).toEqual({ $jsonSchema: { required: ['email', 'name'] }, }); @@ -318,7 +318,7 @@ describe( } const info = await db.listCollections({ name: 'users' }).toArray(); - const options = info[0]!['options'] as Record; + const options = (info[0] as Record)['options'] as Record; expect(options['validator']).toEqual({ $jsonSchema: { required: ['email', 'name'] }, }); From d5c7a78ede5afbe4c0e37b01c8b30fed093f904c Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:19:32 +0300 Subject: [PATCH 25/31] fix(examples): fix typecheck errors in mongo-demo and retail-store tests --- examples/mongo-demo/test/manual-migration.test.ts | 4 ++-- examples/retail-store/test/manual-migration.test.ts | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/mongo-demo/test/manual-migration.test.ts b/examples/mongo-demo/test/manual-migration.test.ts index 492c2b7bf..9151e3591 100644 --- a/examples/mongo-demo/test/manual-migration.test.ts +++ b/examples/mongo-demo/test/manual-migration.test.ts @@ -52,8 +52,8 @@ describe( const instance = new AddPostsAuthorIndex(); const ops = instance.plan(); expect(ops).toHaveLength(2); - expect(ops[0].id).toBe('index.posts.create(authorId:1)'); - expect(ops[1].id).toBe('index.posts.create(createdAt:-1,authorId:1)'); + expect(ops[0]!.id).toBe('index.posts.create(authorId:1)'); + expect(ops[1]!.id).toBe('index.posts.create(createdAt:-1,authorId:1)'); }); it('migration.json has expected structure', () => { diff --git a/examples/retail-store/test/manual-migration.test.ts b/examples/retail-store/test/manual-migration.test.ts index deb0cab33..59f3bd054 100644 --- a/examples/retail-store/test/manual-migration.test.ts +++ b/examples/retail-store/test/manual-migration.test.ts @@ -52,8 +52,8 @@ describe( const instance = new AddProductValidation(); const ops = instance.plan(); expect(ops).toHaveLength(2); - expect(ops[0].id).toBe('collection.products.setValidation'); - expect(ops[1].id).toContain('index.products.create'); + expect(ops[0]!.id).toBe('collection.products.setValidation'); + expect(ops[1]!.id).toContain('index.products.create'); }); it('migration.json has expected structure', () => { @@ -97,7 +97,7 @@ describe( expect(result.value.operationsExecuted).toBe(2); const info = await db.listCollections({ name: 'products' }).toArray(); - const options = info[0]!['options'] as Record; + const options = (info[0] as Record)['options'] as Record; expect(options['validator']).toBeDefined(); const indexes = await db.collection('products').listIndexes().toArray(); From 67327c568d9e5ae8d7a438d2de699d3b32ff3f7f Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:29:10 +0300 Subject: [PATCH 26/31] docs(mongo-migration-authoring): add data migrations spec and plan Spec and plan for adding data transform support to MongoDB migrations. Users author transforms using existing query builders (mongoRaw, mongoPipeline), which produce MongoQueryPlan ASTs from a scaffolded contract. Plans serialize to ops.json as JSON using the same kind-based pattern as DDL commands and execute at apply time via MongoAdapter. --- .../plans/data-migrations-plan.md | 116 ++++++++ .../specs/data-migrations.spec.md | 250 ++++++++++++++++++ 2 files changed, 366 insertions(+) create mode 100644 projects/mongo-migration-authoring/plans/data-migrations-plan.md create mode 100644 projects/mongo-migration-authoring/specs/data-migrations.spec.md diff --git a/projects/mongo-migration-authoring/plans/data-migrations-plan.md b/projects/mongo-migration-authoring/plans/data-migrations-plan.md new file mode 100644 index 000000000..8e1d71afd --- /dev/null +++ b/projects/mongo-migration-authoring/plans/data-migrations-plan.md @@ -0,0 +1,116 @@ +# Mongo Data Migrations — Plan + +## Summary + +Add data transform support to MongoDB migrations. Users author transforms using the existing `mongoRaw` and `mongoPipeline` query builders, which produce `MongoQueryPlan` ASTs from a scaffolded contract. The plans serialize to `ops.json` as JSON (same pattern as DDL commands) and execute at apply time via `MongoAdapter` → `MongoDriver`. No TypeScript runs at apply time. + +**Spec:** `projects/mongo-migration-authoring/specs/data-migrations.spec.md` + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | TBD | Drives execution | +| Reviewer | TBD | Architectural review — serialization model, runner extension | + +## Milestones + +### Milestone 1: DML command serialization + +Extend the serialization layer to handle DML commands (`RawMongoCommand` kinds and typed `AggregateCommand`). After this milestone, a `MongoQueryPlan` containing any supported command kind round-trips through `JSON.stringify` → deserialize → rehydrated AST. + +**Tasks:** + +- [ ] Define arktype schemas for each `RawMongoCommand` kind: `rawUpdateMany`, `rawUpdateOne`, `rawInsertOne`, `rawInsertMany`, `rawDeleteMany`, `rawDeleteOne`, `rawAggregate`, `rawFindOneAndUpdate`, `rawFindOneAndDelete` +- [ ] Define arktype schema for typed `AggregateCommand` (and the pipeline stage subset needed for `check` queries: `$match`, `$limit`, `$sort`, `$project`) +- [ ] Implement `deserializeDmlCommand(json)` — switch on `kind`, validate with arktype, reconstruct the command class instance +- [ ] Implement `deserializeMongoQueryPlan(json)` — deserializes the full `MongoQueryPlan` envelope (collection, command, meta) +- [ ] Tests: round-trip every supported command kind through serialize → deserialize; verify rehydrated AST matches original +- [ ] Tests: error cases — unknown `kind`, missing required fields, invalid field types + +### Milestone 2: `dataTransform` factory and operation type + +Implement the `dataTransform` factory function and define the data transform operation shape in `ops.json`. After this milestone, a migration file can include `dataTransform(...)` calls that produce serializable operations. + +**Tasks:** + +- [ ] Define the data transform operation type — extends `MigrationPlanOperation` with `operationClass: 'data'`, `check` (serialized `MongoQueryPlan` | boolean), `run` (serialized `MongoQueryPlan[]`) +- [ ] Implement `dataTransform(name, { check, run })` factory function in `@prisma-next/target-mongo/migration`. Accepts closures returning `Buildable` or `MongoQueryPlan`. Calls `.build()` on `Buildable` returns. +- [ ] Implement `TODO` sentinel support — a `dataTransform` with `TODO` placeholders prevents attestation +- [ ] Support `check: false` (always run) and `check: true` (always skip) +- [ ] Export `dataTransform` from `@prisma-next/target-mongo/migration` +- [ ] Tests: `dataTransform` produces correct operation shape +- [ ] Tests: `.build()` is called on `Buildable` returns +- [ ] Tests: `TODO` sentinel prevents attestation +- [ ] Tests: `check: false` and `check: true` produce correct serialized output +- [ ] Tests: data transform operations serialize to JSON and deserialize correctly + +### Milestone 3: Runner DML execution + +Extend the migration runner to execute data transform operations via the `MongoAdapter` → `MongoDriver` path. After this milestone, `migration apply` can execute a migration containing data transforms against a real MongoDB instance. + +**Tasks:** + +- [ ] Extend the runner's operation dispatch to recognize `operationClass: 'data'` operations +- [ ] Implement the check → (skip or run) → check again → (fail or proceed) execution sequence +- [ ] Wire DML execution through `MongoAdapter.lower()` → `MongoDriver.execute()` (distinct from the DDL `MongoCommandExecutor` path) +- [ ] Handle `check: false` (always run) and `check: true` (always skip) in the runner +- [ ] Add logging for data transform start/completion/failure with the migration name +- [ ] Tests: runner executes data transform operations in sequence with DDL operations +- [ ] Tests: check → skip when check returns empty result (already applied) +- [ ] Tests: check → run → check again → fail when violations remain +- [ ] Tests: retry safety — re-running a completed data transform skips via check + +### Milestone 4: Contract scaffolding and end-to-end + +Wire contract scaffolding into the migration directory and validate the full pipeline end-to-end against a real MongoDB instance. + +**Tasks:** + +- [ ] Extend migration scaffolding to dump `contract.json` and `contract.d.ts` into the migration directory +- [ ] E2E test: author a migration with DDL + `dataTransform`, verify (serialize), apply (deserialize + execute) against MongoDB +- [ ] E2E test: migration with an intermediate contract — two query builder contexts in the same file +- [ ] E2E test: retry safety — apply a migration that was partially applied, verify check skips completed transforms +- [ ] E2E test: check failure — data transform whose `run` doesn't fix all violations, verify runner fails with diagnostic +- [ ] Verify all acceptance criteria from the spec are met + +**Close-out:** + +- [ ] Verify all acceptance criteria from the spec are met +- [ ] Update project documentation if needed + +## Test Coverage + +| Acceptance Criterion | Test Type | Milestone | Notes | +|---|---|---|---| +| Migration file with `dataTransform` type-checks and verifies | Unit | M2 | Factory produces correct operation shape | +| Closures use module-scoped query builders (no injected params) | Unit | M2 | Verified by API — no `db` parameter | +| Resolver calls `.build()` on `Buildable` returns | Unit | M2 | | +| `TODO` sentinel prevents attestation | Unit | M2 | | +| `check: false` and `check: true` supported | Unit | M2 | | +| `MongoQueryPlan` round-trips through serialize → deserialize | Unit | M1 | All command kinds | +| All `RawMongoCommand` kinds handled | Unit | M1 | 9 command kinds | +| Typed `aggregate` command handled | Unit | M1 | Pipeline stage subset | +| Deserialization validates with arktype | Unit | M1 | | +| Data transform ops appear in `ops.json` | Unit | M2 | Serialization of data transform envelope | +| Runner: check → skip or run → check → fail or proceed | Unit | M3 | | +| DML via `MongoAdapter.lower()` → `MongoDriver.execute()` | Integration | M3 | | +| Retry: check determines whether to skip | Unit | M3 | | +| Check violations after run → migration fails | Unit | M3 | | +| Contract scaffolded into migration directory | Integration | M4 | | +| Intermediate contracts for complex migrations | E2E | M4 | | +| Full round-trip: author → verify → apply | E2E | M4 | Against real MongoDB | +| Mixed DDL + data transform in sequence | E2E | M4 | | +| Intermediate contract with mid-chain queries | E2E | M4 | | + +## Open Items + +1. **Operation type shape in ops.json**: The spec proposes `operationClass: 'data'` as discriminant with `check`/`run` fields instead of `precheck`/`execute`/`postcheck`. This needs to be validated against the framework's `MigrationPlanOperation` base type — it may need to be extended or the data transform may need its own type. Resolve during M2. + +2. **Aggregation pipeline stage deserialization scope**: The pipeline builder produces ~25 stage kinds. For v1, implementing deserialization for the subset needed by `check` queries (`$match`, `$limit`, `$sort`, `$project`) plus common data transform patterns (`$addFields`, `$merge`, `$lookup`) is likely sufficient. Extend as users hit gaps. + +3. **Where the serializer lives**: The existing DDL serializer is in `mongo-ops-serializer.ts` in the adapter package. If the migration-subsystem-refactor spec is implemented first (moving the serializer to `target-mongo`), the DML serializer goes there too. Otherwise, it goes in the adapter alongside the existing serializer for now and moves later. + +4. **Runner architecture**: The runner currently only handles DDL via `MongoCommandExecutor` (visitor pattern). Data transforms need a different execution path (`MongoAdapter` + `MongoDriver`). The runner needs access to both. If the migration-subsystem-refactor spec is done first (runner accepts injected executors), the adapter/driver can be injected alongside the DDL executors. Otherwise, the runner needs to be extended to accept the adapter/driver as additional dependencies. + +5. **Filter expression serialization for typed commands**: The `mongoPipeline` builder produces typed `MongoPipelineStage` and `MongoFilterExpr` objects. The existing `mongo-ops-serializer` already handles `MongoFilterExpr` deserialization (for DDL prechecks/postchecks). Pipeline stages need new deserialization logic, but the pattern is identical. diff --git a/projects/mongo-migration-authoring/specs/data-migrations.spec.md b/projects/mongo-migration-authoring/specs/data-migrations.spec.md new file mode 100644 index 000000000..a4839311c --- /dev/null +++ b/projects/mongo-migration-authoring/specs/data-migrations.spec.md @@ -0,0 +1,250 @@ +# Summary + +Users can author data transformations in MongoDB migrations using the existing query builders (`mongoRaw`, `mongoPipeline`). A `dataTransform` operation slots into the migration operation chain alongside structural DDL factories. Query builders produce `MongoQueryPlan` AST objects from a contract — no database connection needed at authoring time. The plans serialize to JSON in `ops.json` using the same `kind`-based pattern as DDL commands, and the runner executes them via the existing `MongoAdapter` → `MongoDriver` pipeline. + +# Description + +## Context + +Manual Mongo migration authoring is implemented: factory functions (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `setValidation`) produce `MongoMigrationPlanOperation` objects containing DDL commands. The `Migration` base class makes files self-executing. The runner consumes `ops.json` and executes DDL via `MongoCommandExecutor`. + +Data migrations for Postgres are also implemented: `dataTransform` accepts typed callbacks that produce SQL query plans, which are lowered to `{ sql, params }` at verify time and stored in `ops.json`. No TypeScript runs at apply time. + +MongoDB needs the same capability: data transformations expressed as migration operations, serialized as JSON, executed at apply time without loading user code. + +## How it works + +The key insight is that MongoDB commands are natively JSON. Unlike SQL (where an AST must be lowered to text), MongoDB command ASTs serialize directly via `JSON.stringify` and deserialize via `kind`-based rehydration — the same mechanism already used for DDL commands. The existing query builders (`mongoRaw`, `mongoPipeline`) produce `MongoQueryPlan` objects statically from a contract, with no runtime or database connection required. + +### Migration file + +```typescript +// migrations/0002_backfill-status/migration.ts +import type { Contract } from './contract.d' +import contractJson from './contract.json' with { type: 'json' } +import { createCollection, setValidation, dataTransform } from '@prisma-next/target-mongo/migration' +import { mongoRaw } from '@prisma-next/mongo-orm' +import { mongoPipeline } from '@prisma-next/mongo-pipeline-builder' + +const raw = mongoRaw({ contract: contractJson as Contract }) +const agg = mongoPipeline({ contractJson }) + +export default class extends Migration { + plan() { + return [ + createCollection("users", { ... }), + dataTransform("backfill-status", { + check: () => agg.from('users') + .match({ status: { $exists: false } }) + .limit(1), + run: () => raw.collection('users') + .updateMany({ status: { $exists: false } }, { $set: { status: "active" } }), + }), + setValidation("users", { ... }), + ] + } +} + +Migration.run(import.meta.url, exports.default) +``` + +The query builders are constructed at module scope from the contract. The `check` and `run` closures use them to produce `Buildable` objects (query chains before `.build()`). The resolver calls `.build()` to capture `MongoQueryPlan` ASTs. + +### Contract in the migration folder + +When a migration is scaffolded, `contract.json` and `contract.d.ts` are dumped into the migration directory. This gives the query builders their type information without depending on a path outside the migration folder. + +### Intermediate contracts + +Complex migrations need queries typed against an intermediate schema state (e.g., after adding nullable columns but before tightening to NOT NULL). The user copies their authoring surface (e.g., `schema.prisma`) into the migration folder, modifies it to reflect the intermediate state, and runs `contract emit` to produce a second `contract.json` + `contract.d.ts`: + +``` +migrations/0003_split-name/ +├── migration.ts +├── contract.json # destination contract (scaffolded) +├── contract.d.ts +├── intermediate.prisma # intermediate schema (user-authored) +├── intermediate.json # emitted from intermediate.prisma +└── intermediate.d.ts +``` + +```typescript +import type { Contract } from './contract.d' +import type { IntermediateContract } from './intermediate.d' +import contractJson from './contract.json' with { type: 'json' } +import intermediateJson from './intermediate.json' with { type: 'json' } + +const finalRaw = mongoRaw({ contract: contractJson as Contract }) +const intermediateRaw = mongoRaw({ contract: intermediateJson as IntermediateContract }) +``` + +Multiple intermediate contracts are supported — one per data transform if needed. + +### Serialization lifecycle + +1. **Scaffold (Draft)**: `migration plan` or `migration new` produces the migration directory with `contract.json`, `contract.d.ts`, and `migration.ts`. If a data transform is needed, `migration.ts` includes a `dataTransform` with TODO placeholders. +2. **Author (Draft)**: User fills in `check`/`run` using the query builders. Still draft. +3. **Verify/Attest**: `migration verify` evaluates the TypeScript, calls `.build()` on the `Buildable` objects returned by `check`/`run`, serializes the resulting `MongoQueryPlan` ASTs to `ops.json`. The package is now attested. +4. **Apply**: `migration apply` reads `ops.json`, deserializes the command ASTs via `kind`-based rehydration, and executes them via `MongoAdapter.lower()` → `MongoDriver.execute()`. No TypeScript is loaded. + +### Representation in ops.json + +A data transform operation in `ops.json` follows the same three-phase envelope as DDL operations, with the `check` and `run` fields carrying serialized `MongoQueryPlan` command ASTs: + +```json +{ + "id": "data_migration.backfill-status", + "label": "Data transform: backfill-status", + "operationClass": "data", + "check": { + "collection": "users", + "command": { + "kind": "aggregate", + "collection": "users", + "pipeline": [ + { "kind": "match", "filter": { "kind": "field", "field": "status", "op": "$exists", "value": false } }, + { "kind": "limit", "count": 1 } + ] + }, + "meta": { "target": "mongo", "storageHash": "...", "lane": "mongo-pipeline", "paramDescriptors": [] } + }, + "run": [{ + "collection": "users", + "command": { + "kind": "rawUpdateMany", + "collection": "users", + "filter": { "status": { "$exists": false } }, + "update": { "$set": { "status": "active" } } + }, + "meta": { "target": "mongo", "storageHash": "...", "lane": "mongo-raw", "paramDescriptors": [] } + }] +} +``` + +The command `kind` discriminant (`"aggregate"`, `"rawUpdateMany"`, `"rawInsertOne"`, etc.) drives deserialization — the same pattern used for DDL commands (`"createIndex"`, `"dropCollection"`, etc.). + +### Runner execution model + +The runner processes data transform operations with the check → run → check sequence: + +1. **Check**: deserialize and execute the `check` query. Empty result = already applied (skip `run`). Non-empty = needs to run. `check: false` means always run; `check: true` means always skip. +2. **Run**: deserialize and execute each `run` command sequentially. +3. **Check again**: re-execute the `check` query. If violations remain, the migration fails before subsequent tightening operations. + +DML commands are executed via the same `MongoAdapter.lower()` → `MongoDriver.execute()` path used for runtime queries, not via `MongoCommandExecutor` (which handles DDL only). + +### Query builder typing + +`mongoRaw` provides type-safe collection name access (constrained to `keyof TContract['roots']`) but untyped filter/update documents (`Document` = `Record`). `mongoPipeline` provides richer typing via field proxies and filter proxies. + +A strongly typed query builder that validates field names and update operators against the contract can be added later. It will slot in transparently — same contract input, same `MongoQueryPlan` output. The migration infrastructure does not change. + +# Requirements + +## Functional Requirements + +- A `dataTransform(name, { check, run })` factory function that produces a data transform operation for MongoDB migrations. `check` and `run` are closures returning `Buildable` (or `MongoQueryPlan`) objects. +- The resolver calls `.build()` on `Buildable` returns and serializes the `MongoQueryPlan` ASTs to `ops.json`. +- DML command serialization: `MongoQueryPlan` command ASTs (all `RawMongoCommand` and `AnyMongoCommand` kinds) serialize via `JSON.stringify` and deserialize via `kind`-based rehydration with arktype validation, following the existing DDL command serialization pattern in `mongo-ops-serializer.ts`. +- The migration runner executes data transform operations: deserializes `MongoQueryPlan` from ops.json, lowers via `MongoAdapter`, executes via `MongoDriver`. +- The check → run → check execution sequence matches the Postgres data migration pattern. +- `check` supports three modes: a closure returning a `Buildable`/`MongoQueryPlan` (the common case — empty result = done), `false` (always run), `true` (always skip). +- Migration scaffolding dumps `contract.json` and `contract.d.ts` into the migration directory. +- Users can create intermediate contracts for complex migrations by emitting from a modified schema in the migration directory. +- A `TODO` sentinel in `dataTransform` prevents attestation (same as Postgres). + +## Non-Functional Requirements + +- No TypeScript is executed at apply time. Only serialized command ASTs from `ops.json` are loaded and executed. +- DML command serialization/deserialization is consistent with the existing DDL pattern — same `kind`-based dispatch, same arktype validation schemas, same module (`mongo-ops-serializer.ts` or equivalent). +- No changes to the existing DDL factory functions, the DDL runner path, or the `MongoCommandExecutor`. + +## Non-goals + +- A strongly typed Mongo query builder that validates field names and operators against the contract. The existing `mongoRaw` (untyped documents) and `mongoPipeline` (typed aggregation) are sufficient for v1. A typed builder can be added later and will work transparently. +- Auto-detection of data migration needs from contract diffs (planner integration). For v1, data transforms are manually authored via `migration new`. +- Scaffolding `dataTransform` with TODO placeholders from the planner. This requires planner integration and is future work. +- Transaction/session support for data transforms. MongoDB multi-document transactions are orthogonal and can be layered on later. +- Graph integration (invariant tracking, invariant-aware routing, ledger recording of data migration names). This is the same scope as the Postgres graph integration work and is deferred. +- Aggregation pipeline mutations via `$merge`/`$out` as a first-class pattern. Users can express these via `mongoPipeline` already; no special infrastructure needed. + +# Acceptance Criteria + +## Authoring + +- [ ] A migration file with `dataTransform` using `mongoRaw` for `run` and `mongoPipeline` for `check` type-checks and can be verified +- [ ] `check` and `run` closures receive no injected parameters — they use module-scoped query builders +- [ ] The resolver calls `.build()` on `Buildable` returns from `check`/`run` +- [ ] A `TODO` sentinel in `dataTransform` prevents attestation +- [ ] `check: false` (always run) and `check: true` (always skip) are supported + +## Serialization + +- [ ] `MongoQueryPlan` command ASTs round-trip through `JSON.stringify` → deserialize via `kind`-based rehydration +- [ ] All `RawMongoCommand` kinds are handled: `rawUpdateMany`, `rawUpdateOne`, `rawInsertOne`, `rawInsertMany`, `rawDeleteMany`, `rawDeleteOne`, `rawAggregate`, `rawFindOneAndUpdate`, `rawFindOneAndDelete` +- [ ] Typed command kinds are handled: `aggregate` (from `mongoPipeline`) +- [ ] Deserialization validates each command shape with arktype schemas +- [ ] Data transform operations appear in `ops.json` with serialized `check` and `run` fields + +## Execution + +- [ ] The runner executes data transform operations: check → (skip or run) → check again → (fail or proceed) +- [ ] DML commands are executed via `MongoAdapter.lower()` → `MongoDriver.execute()` +- [ ] On retry, `check` determines whether to skip the data transform's `run` +- [ ] If `check` returns violations after `run`, the migration fails with a diagnostic + +## Scaffolding + +- [ ] Migration scaffolding produces `contract.json` and `contract.d.ts` in the migration directory +- [ ] Users can create intermediate contracts in the migration directory for complex migrations + +## End-to-end + +- [ ] A data transform migration round-trips: author → verify (serialize) → apply (deserialize + execute) against a real MongoDB instance +- [ ] A migration with both DDL operations and a data transform executes correctly in sequence +- [ ] A migration with an intermediate contract for mid-chain typed queries works end-to-end + +# Other Considerations + +## Security + +No change from existing model. No TypeScript is executed at apply time — only serialized command ASTs from `ops.json`. Data migration commands run with the same database permissions as the migration runner. + +## Observability + +The runner logs data transform start/completion/failure with the migration name, matching the Postgres runner behavior. + +# References + +- Parent project spec: [`projects/mongo-migration-authoring/spec.md`](../spec.md) +- Cross-target data migrations spec: [`projects/graph-based-migrations/specs/data-migrations-spec.md`](../../graph-based-migrations/specs/data-migrations-spec.md) +- Existing Mongo DDL factories: [`packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts`](../../../packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts) +- Existing DDL serializer: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts) +- DML command AST: [`packages/2-mongo-family/4-query/query-ast/src/commands.ts`](../../../packages/2-mongo-family/4-query/query-ast/src/commands.ts) +- Raw command AST: [`packages/2-mongo-family/4-query/query-ast/src/raw-commands.ts`](../../../packages/2-mongo-family/4-query/query-ast/src/raw-commands.ts) +- `mongoRaw` client: [`packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts`](../../../packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts) +- `mongoPipeline` builder: [`packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts`](../../../packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts) +- Postgres data transform implementation: [`packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts`](../../../packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts) +- Mongo runner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts) +- Mongo adapter (DML lowering): [`packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts) +- ADR 188 — MongoDB migration operation model +- ADR 191 — Generic three-phase migration operation envelope +- ADR 176 — Data migrations as invariant-guarded transitions + +# Decisions + +1. **No callback injection.** Unlike Postgres `dataTransform` where the resolver creates a `Db` and passes it to callbacks, Mongo data transforms use module-scoped query builders constructed from the scaffolded contract. The closures capture these builders via closure scope. This is simpler because the Mongo query builders are fully static — they need only a contract, not a runtime context. + +2. **Same serialization pattern as DDL.** DML commands serialize/deserialize using the same `kind`-based rehydration as DDL commands. No separate serialization mechanism. The `mongo-ops-serializer` is extended (or a parallel module added) to handle DML command kinds. + +3. **`MongoAdapter` + `MongoDriver` for DML execution.** Data transform commands are executed through the existing runtime execution path (`MongoAdapter.lower()` → `MongoDriver.execute()`), not through `MongoCommandExecutor` (which handles DDL only). This reuses proven infrastructure without extending the DDL executor. + +4. **Contract scaffolded into migration directory.** The contract is co-located with the migration rather than referenced by path. This makes migrations self-contained and avoids breakage when the source schema evolves. + +# Open Questions + +1. **Operation type in ops.json**: Data transform operations don't fit the existing `MongoMigrationPlanOperation` shape (which has `precheck`/`execute`/`postcheck` containing DDL commands). Options: (a) extend the union to include a data transform variant with `check`/`run` fields, (b) use a separate operation discriminant (like Postgres's `operationClass: 'data'`), (c) use a new top-level type. **Default assumption:** Option (b) — use `operationClass: 'data'` as the discriminant, with `check` and `run` fields instead of `precheck`/`execute`/`postcheck`. + +2. **Where does `dataTransform` live?** The DDL factories are in `@prisma-next/target-mongo/migration`. `dataTransform` could go there too, or in `@prisma-next/family-mongo/migration` alongside `MongoMigration`. **Default assumption:** In `@prisma-next/target-mongo/migration` alongside the DDL factories, since it produces an operation that goes into the same `ops.json` and is consumed by the same runner. + +3. **Aggregation pipeline stage serialization**: The typed `MongoPipelineStage` classes (from `mongoPipeline`) are `MongoAstNode` subclasses that serialize via `JSON.stringify`. Deserialization needs to reconstruct the stage class instances from `kind` discriminants. There are ~25 stage kinds. Is it worth implementing full rehydration for all of them for v1, or should we start with the most common subset (`$match`, `$limit`, `$sort`, `$project`, `$addFields`, `$lookup`, `$merge`)? **Default assumption:** Start with a subset that covers the check query pattern (match + limit) and the most common data transform patterns, and extend as needed. From 00d2d8bdec736faae324c6152c5079c2d84e5791 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:30:48 +0300 Subject: [PATCH 27/31] fix(migration-tools): increase subprocess test timeout for CI --- .../1-framework/3-tooling/migration/test/migration-base.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts index d156f172b..f985bc1c3 100644 --- a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -53,7 +53,7 @@ describe('Migration', () => { }); }); -describe('Migration.run() subprocess', () => { +describe('Migration.run() subprocess', { timeout: 15_000 }, () => { let tmpDir: string; beforeEach(async () => { From ae9bf642e538f8e7e21b192beb9e4f66e79d7c66 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:31:28 +0300 Subject: [PATCH 28/31] docs(mongo-migration-authoring): rewrite data migrations spec for clarity MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Restructure to lead with the problem and a grounding example, build the narrative progressively (authoring → serialization → execution), move decisions before implementation details, and push alternatives considered to the end. --- .../specs/data-migrations.spec.md | 228 +++++++----------- 1 file changed, 92 insertions(+), 136 deletions(-) diff --git a/projects/mongo-migration-authoring/specs/data-migrations.spec.md b/projects/mongo-migration-authoring/specs/data-migrations.spec.md index a4839311c..aa0489b73 100644 --- a/projects/mongo-migration-authoring/specs/data-migrations.spec.md +++ b/projects/mongo-migration-authoring/specs/data-migrations.spec.md @@ -1,28 +1,23 @@ # Summary -Users can author data transformations in MongoDB migrations using the existing query builders (`mongoRaw`, `mongoPipeline`). A `dataTransform` operation slots into the migration operation chain alongside structural DDL factories. Query builders produce `MongoQueryPlan` AST objects from a contract — no database connection needed at authoring time. The plans serialize to JSON in `ops.json` using the same `kind`-based pattern as DDL commands, and the runner executes them via the existing `MongoAdapter` → `MongoDriver` pipeline. +Users can express data transformations in MongoDB migrations — backfilling fields, reshaping documents, fixing constraint violations — alongside structural operations in the same migration file. # Description -## Context +## The problem -Manual Mongo migration authoring is implemented: factory functions (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `setValidation`) produce `MongoMigrationPlanOperation` objects containing DDL commands. The `Migration` base class makes files self-executing. The runner consumes `ops.json` and executes DDL via `MongoCommandExecutor`. +MongoDB migrations today can only express structural DDL: create/drop collections, create/drop indexes, set validation rules. But schema evolution often requires changing data too. Adding a required `status` field to a `users` collection means you need to backfill `"active"` into every existing document before you can enforce the validator. Today, there's no way to express that backfill as part of the migration. -Data migrations for Postgres are also implemented: `dataTransform` accepts typed callbacks that produce SQL query plans, which are lowered to `{ sql, params }` at verify time and stored in `ops.json`. No TypeScript runs at apply time. +## What it looks like -MongoDB needs the same capability: data transformations expressed as migration operations, serialized as JSON, executed at apply time without loading user code. - -## How it works - -The key insight is that MongoDB commands are natively JSON. Unlike SQL (where an AST must be lowered to text), MongoDB command ASTs serialize directly via `JSON.stringify` and deserialize via `kind`-based rehydration — the same mechanism already used for DDL commands. The existing query builders (`mongoRaw`, `mongoPipeline`) produce `MongoQueryPlan` objects statically from a contract, with no runtime or database connection required. - -### Migration file +A data transform is an operation in the migration's operation chain, alongside structural operations: ```typescript // migrations/0002_backfill-status/migration.ts import type { Contract } from './contract.d' import contractJson from './contract.json' with { type: 'json' } -import { createCollection, setValidation, dataTransform } from '@prisma-next/target-mongo/migration' +import { Migration, createCollection, setValidation, dataTransform } + from '@prisma-next/target-mongo/migration' import { mongoRaw } from '@prisma-next/mongo-orm' import { mongoPipeline } from '@prisma-next/mongo-pipeline-builder' @@ -32,7 +27,10 @@ const agg = mongoPipeline({ contractJson }) export default class extends Migration { plan() { return [ - createCollection("users", { ... }), + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + }), + dataTransform("backfill-status", { check: () => agg.from('users') .match({ status: { $exists: false } }) @@ -40,7 +38,10 @@ export default class extends Migration { run: () => raw.collection('users') .updateMany({ status: { $exists: false } }, { $set: { status: "active" } }), }), - setValidation("users", { ... }), + + setValidation("users", { + $jsonSchema: { required: ["email", "status"] }, + }), ] } } @@ -48,15 +49,38 @@ export default class extends Migration { Migration.run(import.meta.url, exports.default) ``` -The query builders are constructed at module scope from the contract. The `check` and `run` closures use them to produce `Buildable` objects (query chains before `.build()`). The resolver calls `.build()` to capture `MongoQueryPlan` ASTs. +The ordering matters: create the collection, backfill the data, *then* tighten the validator. The `dataTransform` sits between structural operations exactly where the data needs to be in the right shape. + +## How authoring works + +The query builders (`mongoRaw`, `mongoPipeline`) are the existing tools for building MongoDB queries. They take a contract and produce `MongoQueryPlan` objects — static command descriptions, no database connection required. + +The user constructs these builders at the top of the migration file from the scaffolded contract. The `check` and `run` closures use them to describe what the migration should do: + +- **`check`** describes a query for "violation" documents — rows that still need the transform. If the result is empty, the transform has already been applied. This gives retry safety: if a migration fails partway through and is re-run, completed transforms are skipped. +- **`run`** describes the actual data modification — an `updateMany`, `insertMany`, `deleteMany`, or aggregation pipeline. + +`check` also runs *after* `run` to verify the transform worked. If violations remain, the migration fails with a diagnostic *before* the subsequent `setValidation` would produce a cryptic database error. -### Contract in the migration folder +`check` also accepts `false` (always run — for idempotent-by-construction cases) or `true` (always skip). -When a migration is scaffolded, `contract.json` and `contract.d.ts` are dumped into the migration directory. This gives the query builders their type information without depending on a path outside the migration folder. +## How serialization works -### Intermediate contracts +This is the key constraint: **no TypeScript runs at apply time**. The migration file is evaluated once during `migration verify`, and the resulting command descriptions are written to `ops.json` as JSON. At `migration apply`, only the JSON is loaded and executed. -Complex migrations need queries typed against an intermediate schema state (e.g., after adding nullable columns but before tightening to NOT NULL). The user copies their authoring surface (e.g., `schema.prisma`) into the migration folder, modifies it to reflect the intermediate state, and runs `contract emit` to produce a second `contract.json` + `contract.d.ts`: +This works naturally for MongoDB because MongoDB commands *are* JSON. The query builders produce AST objects (`UpdateManyCommand`, `AggregateCommand`, etc.) that are `MongoAstNode` subclasses. These serialize directly via `JSON.stringify` — every node has a `kind` discriminant and public readonly properties. Deserialization reconstructs the class instances from the `kind` field, validated by arktype schemas. This is exactly the same mechanism already used for DDL commands (`CreateIndexCommand`, `CollModCommand`, etc.) in the existing migration serializer. + +The lifecycle: + +1. **Author**: User writes `migration.ts` with structural ops and data transforms. +2. **Verify**: `migration verify` evaluates the TypeScript, calls `.build()` on the query chain objects to produce `MongoQueryPlan` ASTs, and writes them to `ops.json`. +3. **Apply**: `migration apply` reads `ops.json`, deserializes the command ASTs, and executes them. DDL commands go through `MongoCommandExecutor` (existing path). DML commands go through `MongoAdapter.lower()` → `MongoDriver.execute()` (the existing runtime query execution path). + +## Contract in the migration folder + +When a migration is scaffolded, `contract.json` and `contract.d.ts` are copied into the migration directory. This gives the query builders their type information and makes the migration self-contained — it doesn't break if the source schema evolves after the migration is written. + +For complex migrations that need queries typed against an intermediate schema state (e.g., after adding a nullable field but before tightening to NOT NULL), the user copies their schema authoring surface into the migration folder, modifies it, and runs `contract emit` to produce a second contract: ``` migrations/0003_split-name/ @@ -68,183 +92,115 @@ migrations/0003_split-name/ └── intermediate.d.ts ``` -```typescript -import type { Contract } from './contract.d' -import type { IntermediateContract } from './intermediate.d' -import contractJson from './contract.json' with { type: 'json' } -import intermediateJson from './intermediate.json' with { type: 'json' } +The user creates a second set of query builders from the intermediate contract and uses them for the data transform that operates against that schema state. Multiple intermediate contracts are supported. -const finalRaw = mongoRaw({ contract: contractJson as Contract }) -const intermediateRaw = mongoRaw({ contract: intermediateJson as IntermediateContract }) -``` - -Multiple intermediate contracts are supported — one per data transform if needed. - -### Serialization lifecycle - -1. **Scaffold (Draft)**: `migration plan` or `migration new` produces the migration directory with `contract.json`, `contract.d.ts`, and `migration.ts`. If a data transform is needed, `migration.ts` includes a `dataTransform` with TODO placeholders. -2. **Author (Draft)**: User fills in `check`/`run` using the query builders. Still draft. -3. **Verify/Attest**: `migration verify` evaluates the TypeScript, calls `.build()` on the `Buildable` objects returned by `check`/`run`, serializes the resulting `MongoQueryPlan` ASTs to `ops.json`. The package is now attested. -4. **Apply**: `migration apply` reads `ops.json`, deserializes the command ASTs via `kind`-based rehydration, and executes them via `MongoAdapter.lower()` → `MongoDriver.execute()`. No TypeScript is loaded. - -### Representation in ops.json - -A data transform operation in `ops.json` follows the same three-phase envelope as DDL operations, with the `check` and `run` fields carrying serialized `MongoQueryPlan` command ASTs: - -```json -{ - "id": "data_migration.backfill-status", - "label": "Data transform: backfill-status", - "operationClass": "data", - "check": { - "collection": "users", - "command": { - "kind": "aggregate", - "collection": "users", - "pipeline": [ - { "kind": "match", "filter": { "kind": "field", "field": "status", "op": "$exists", "value": false } }, - { "kind": "limit", "count": 1 } - ] - }, - "meta": { "target": "mongo", "storageHash": "...", "lane": "mongo-pipeline", "paramDescriptors": [] } - }, - "run": [{ - "collection": "users", - "command": { - "kind": "rawUpdateMany", - "collection": "users", - "filter": { "status": { "$exists": false } }, - "update": { "$set": { "status": "active" } } - }, - "meta": { "target": "mongo", "storageHash": "...", "lane": "mongo-raw", "paramDescriptors": [] } - }] -} -``` - -The command `kind` discriminant (`"aggregate"`, `"rawUpdateMany"`, `"rawInsertOne"`, etc.) drives deserialization — the same pattern used for DDL commands (`"createIndex"`, `"dropCollection"`, etc.). - -### Runner execution model - -The runner processes data transform operations with the check → run → check sequence: +# Decisions -1. **Check**: deserialize and execute the `check` query. Empty result = already applied (skip `run`). Non-empty = needs to run. `check: false` means always run; `check: true` means always skip. -2. **Run**: deserialize and execute each `run` command sequentially. -3. **Check again**: re-execute the `check` query. If violations remain, the migration fails before subsequent tightening operations. +1. **Use existing query builders, not a new abstraction.** `mongoRaw` and `mongoPipeline` already produce `MongoQueryPlan` objects from a contract. The `dataTransform` factory consumes these — no migration-specific query API is needed. A future strongly typed query builder (validating field names and operators against the contract) will slot in transparently because it produces the same `MongoQueryPlan` output. -DML commands are executed via the same `MongoAdapter.lower()` → `MongoDriver.execute()` path used for runtime queries, not via `MongoCommandExecutor` (which handles DDL only). +2. **Module-scoped builders, not injected callbacks.** The Postgres `dataTransform` injects a `Db` client into callbacks because the SQL query builder needs a runtime execution context. The Mongo query builders are fully static — they need only a contract. So the user constructs them at module scope and the closures capture them via closure. Simpler, and no resolver infrastructure needed. -### Query builder typing +3. **Same serialization pattern as DDL.** DML commands (`updateMany`, `aggregate`, etc.) serialize and deserialize using the same `kind`-based rehydration mechanism as DDL commands (`createIndex`, `collMod`, etc.). The existing `mongo-ops-serializer` is extended with DML command kinds. No separate serialization mechanism. -`mongoRaw` provides type-safe collection name access (constrained to `keyof TContract['roots']`) but untyped filter/update documents (`Document` = `Record`). `mongoPipeline` provides richer typing via field proxies and filter proxies. +4. **DML execution via `MongoAdapter` + `MongoDriver`.** Data transform commands execute through the existing runtime query path, not through `MongoCommandExecutor` (which handles DDL only). This reuses proven infrastructure. -A strongly typed query builder that validates field names and update operators against the contract can be added later. It will slot in transparently — same contract input, same `MongoQueryPlan` output. The migration infrastructure does not change. +5. **Contract co-located with the migration.** The contract is scaffolded into the migration directory rather than referenced by path. Makes migrations self-contained and resilient to schema evolution after authoring. # Requirements ## Functional Requirements -- A `dataTransform(name, { check, run })` factory function that produces a data transform operation for MongoDB migrations. `check` and `run` are closures returning `Buildable` (or `MongoQueryPlan`) objects. -- The resolver calls `.build()` on `Buildable` returns and serializes the `MongoQueryPlan` ASTs to `ops.json`. -- DML command serialization: `MongoQueryPlan` command ASTs (all `RawMongoCommand` and `AnyMongoCommand` kinds) serialize via `JSON.stringify` and deserialize via `kind`-based rehydration with arktype validation, following the existing DDL command serialization pattern in `mongo-ops-serializer.ts`. -- The migration runner executes data transform operations: deserializes `MongoQueryPlan` from ops.json, lowers via `MongoAdapter`, executes via `MongoDriver`. -- The check → run → check execution sequence matches the Postgres data migration pattern. -- `check` supports three modes: a closure returning a `Buildable`/`MongoQueryPlan` (the common case — empty result = done), `false` (always run), `true` (always skip). -- Migration scaffolding dumps `contract.json` and `contract.d.ts` into the migration directory. -- Users can create intermediate contracts for complex migrations by emitting from a modified schema in the migration directory. -- A `TODO` sentinel in `dataTransform` prevents attestation (same as Postgres). +- A `dataTransform(name, { check, run })` factory that produces a data transform migration operation. `check` and `run` are closures returning `Buildable` or `MongoQueryPlan` objects. The resolver calls `.build()` on `Buildable` returns. +- DML command serialization: all `RawMongoCommand` kinds and typed `AggregateCommand` serialize via `JSON.stringify` and deserialize via `kind`-based rehydration with arktype validation, following the existing DDL pattern. +- The migration runner executes data transform operations with the check → (skip or run) → check → (fail or proceed) sequence. +- `check` supports three modes: a closure returning a query (empty result = done), `false` (always run), `true` (always skip). +- A `TODO` sentinel in `dataTransform` prevents attestation. +- Migration scaffolding copies `contract.json` and `contract.d.ts` into the migration directory. ## Non-Functional Requirements -- No TypeScript is executed at apply time. Only serialized command ASTs from `ops.json` are loaded and executed. -- DML command serialization/deserialization is consistent with the existing DDL pattern — same `kind`-based dispatch, same arktype validation schemas, same module (`mongo-ops-serializer.ts` or equivalent). -- No changes to the existing DDL factory functions, the DDL runner path, or the `MongoCommandExecutor`. +- No TypeScript is executed at apply time. +- DML serialization is consistent with the existing DDL pattern — same module, same dispatch mechanism. +- No changes to existing DDL factories, DDL runner path, or `MongoCommandExecutor`. ## Non-goals -- A strongly typed Mongo query builder that validates field names and operators against the contract. The existing `mongoRaw` (untyped documents) and `mongoPipeline` (typed aggregation) are sufficient for v1. A typed builder can be added later and will work transparently. -- Auto-detection of data migration needs from contract diffs (planner integration). For v1, data transforms are manually authored via `migration new`. -- Scaffolding `dataTransform` with TODO placeholders from the planner. This requires planner integration and is future work. -- Transaction/session support for data transforms. MongoDB multi-document transactions are orthogonal and can be layered on later. -- Graph integration (invariant tracking, invariant-aware routing, ledger recording of data migration names). This is the same scope as the Postgres graph integration work and is deferred. -- Aggregation pipeline mutations via `$merge`/`$out` as a first-class pattern. Users can express these via `mongoPipeline` already; no special infrastructure needed. +- **Strongly typed Mongo query builder.** `mongoRaw` has untyped filter/update documents; `mongoPipeline` has richer typing for aggregations. A fully typed builder validating field names against the contract is future work and will plug in transparently. +- **Planner integration.** Auto-detecting data migration needs from contract diffs and scaffolding `dataTransform` with TODO placeholders. For v1, data transforms are manually authored. +- **Transaction/session support.** MongoDB multi-document transactions are orthogonal and can be layered on. +- **Graph integration.** Invariant tracking, invariant-aware routing, and ledger recording of data migration names are deferred (same scope as the Postgres graph integration work). # Acceptance Criteria ## Authoring - [ ] A migration file with `dataTransform` using `mongoRaw` for `run` and `mongoPipeline` for `check` type-checks and can be verified -- [ ] `check` and `run` closures receive no injected parameters — they use module-scoped query builders -- [ ] The resolver calls `.build()` on `Buildable` returns from `check`/`run` +- [ ] The resolver calls `.build()` on `Buildable` returns from `check`/`run` closures - [ ] A `TODO` sentinel in `dataTransform` prevents attestation - [ ] `check: false` (always run) and `check: true` (always skip) are supported ## Serialization -- [ ] `MongoQueryPlan` command ASTs round-trip through `JSON.stringify` → deserialize via `kind`-based rehydration +- [ ] `MongoQueryPlan` command ASTs round-trip through `JSON.stringify` → `kind`-based deserialization - [ ] All `RawMongoCommand` kinds are handled: `rawUpdateMany`, `rawUpdateOne`, `rawInsertOne`, `rawInsertMany`, `rawDeleteMany`, `rawDeleteOne`, `rawAggregate`, `rawFindOneAndUpdate`, `rawFindOneAndDelete` -- [ ] Typed command kinds are handled: `aggregate` (from `mongoPipeline`) +- [ ] Typed `aggregate` command (from `mongoPipeline`) is handled - [ ] Deserialization validates each command shape with arktype schemas -- [ ] Data transform operations appear in `ops.json` with serialized `check` and `run` fields ## Execution - [ ] The runner executes data transform operations: check → (skip or run) → check again → (fail or proceed) -- [ ] DML commands are executed via `MongoAdapter.lower()` → `MongoDriver.execute()` +- [ ] DML commands execute via `MongoAdapter.lower()` → `MongoDriver.execute()` - [ ] On retry, `check` determines whether to skip the data transform's `run` - [ ] If `check` returns violations after `run`, the migration fails with a diagnostic -## Scaffolding - -- [ ] Migration scaffolding produces `contract.json` and `contract.d.ts` in the migration directory -- [ ] Users can create intermediate contracts in the migration directory for complex migrations - ## End-to-end -- [ ] A data transform migration round-trips: author → verify (serialize) → apply (deserialize + execute) against a real MongoDB instance +- [ ] A data transform migration round-trips: author → verify → apply against a real MongoDB instance - [ ] A migration with both DDL operations and a data transform executes correctly in sequence -- [ ] A migration with an intermediate contract for mid-chain typed queries works end-to-end +- [ ] Migration scaffolding produces `contract.json` and `contract.d.ts` in the migration directory # Other Considerations ## Security -No change from existing model. No TypeScript is executed at apply time — only serialized command ASTs from `ops.json`. Data migration commands run with the same database permissions as the migration runner. +No change from existing model. No TypeScript is executed at apply time. Data migration commands run with the same database permissions as the migration runner. ## Observability -The runner logs data transform start/completion/failure with the migration name, matching the Postgres runner behavior. +The runner logs data transform start/completion/failure with the migration name. + +# Alternatives considered + +## Callback injection (Postgres pattern) + +The Postgres `dataTransform` injects a typed `Db` client into `check`/`run` callbacks. This is necessary for SQL because the query builder needs a runtime execution context (contract + query operation types + adapter) to construct queries. For MongoDB, the query builders are fully static — they need only a contract — so injection adds complexity without benefit. + +## Migration-specific query builders (`createMongoBuilders`) + +A `createMongoBuilders()` helper (analogous to Postgres's `createBuilders()`) that returns data-transform-specific builder functions. Rejected because it restricts what operations the user can express and duplicates the existing query builder API surface. Using the general-purpose query builders directly is simpler and more flexible. + +## Direct `MongoQueryPlan` construction (no closures) + +Since the query builders are static, `check`/`run` could accept `MongoQueryPlan` objects directly instead of closures. Closures are marginally better because they defer `.build()` to the resolver (consistent with the Postgres pattern) and allow the resolver to call `.build()` automatically rather than requiring the user to write it. # References - Parent project spec: [`projects/mongo-migration-authoring/spec.md`](../spec.md) - Cross-target data migrations spec: [`projects/graph-based-migrations/specs/data-migrations-spec.md`](../../graph-based-migrations/specs/data-migrations-spec.md) - Existing Mongo DDL factories: [`packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts`](../../../packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts) -- Existing DDL serializer: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts) +- DDL serializer: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts) - DML command AST: [`packages/2-mongo-family/4-query/query-ast/src/commands.ts`](../../../packages/2-mongo-family/4-query/query-ast/src/commands.ts) - Raw command AST: [`packages/2-mongo-family/4-query/query-ast/src/raw-commands.ts`](../../../packages/2-mongo-family/4-query/query-ast/src/raw-commands.ts) -- `mongoRaw` client: [`packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts`](../../../packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts) -- `mongoPipeline` builder: [`packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts`](../../../packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts) -- Postgres data transform implementation: [`packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts`](../../../packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts) -- Mongo runner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts) -- Mongo adapter (DML lowering): [`packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts) +- `mongoRaw`: [`packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts`](../../../packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts) +- `mongoPipeline`: [`packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts`](../../../packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts) +- Postgres data transform: [`packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts`](../../../packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts) - ADR 188 — MongoDB migration operation model - ADR 191 — Generic three-phase migration operation envelope -- ADR 176 — Data migrations as invariant-guarded transitions - -# Decisions - -1. **No callback injection.** Unlike Postgres `dataTransform` where the resolver creates a `Db` and passes it to callbacks, Mongo data transforms use module-scoped query builders constructed from the scaffolded contract. The closures capture these builders via closure scope. This is simpler because the Mongo query builders are fully static — they need only a contract, not a runtime context. - -2. **Same serialization pattern as DDL.** DML commands serialize/deserialize using the same `kind`-based rehydration as DDL commands. No separate serialization mechanism. The `mongo-ops-serializer` is extended (or a parallel module added) to handle DML command kinds. - -3. **`MongoAdapter` + `MongoDriver` for DML execution.** Data transform commands are executed through the existing runtime execution path (`MongoAdapter.lower()` → `MongoDriver.execute()`), not through `MongoCommandExecutor` (which handles DDL only). This reuses proven infrastructure without extending the DDL executor. - -4. **Contract scaffolded into migration directory.** The contract is co-located with the migration rather than referenced by path. This makes migrations self-contained and avoids breakage when the source schema evolves. # Open Questions -1. **Operation type in ops.json**: Data transform operations don't fit the existing `MongoMigrationPlanOperation` shape (which has `precheck`/`execute`/`postcheck` containing DDL commands). Options: (a) extend the union to include a data transform variant with `check`/`run` fields, (b) use a separate operation discriminant (like Postgres's `operationClass: 'data'`), (c) use a new top-level type. **Default assumption:** Option (b) — use `operationClass: 'data'` as the discriminant, with `check` and `run` fields instead of `precheck`/`execute`/`postcheck`. +1. **Operation type shape in ops.json.** Data transform operations don't fit the existing `MongoMigrationPlanOperation` shape (which has `precheck`/`execute`/`postcheck` containing DDL commands). **Default assumption:** use `operationClass: 'data'` as discriminant, with `check`/`run` fields instead of `precheck`/`execute`/`postcheck`. -2. **Where does `dataTransform` live?** The DDL factories are in `@prisma-next/target-mongo/migration`. `dataTransform` could go there too, or in `@prisma-next/family-mongo/migration` alongside `MongoMigration`. **Default assumption:** In `@prisma-next/target-mongo/migration` alongside the DDL factories, since it produces an operation that goes into the same `ops.json` and is consumed by the same runner. +2. **Where does `dataTransform` live?** **Default assumption:** in `@prisma-next/target-mongo/migration` alongside the DDL factories, since it produces an operation consumed by the same runner and serialized to the same `ops.json`. -3. **Aggregation pipeline stage serialization**: The typed `MongoPipelineStage` classes (from `mongoPipeline`) are `MongoAstNode` subclasses that serialize via `JSON.stringify`. Deserialization needs to reconstruct the stage class instances from `kind` discriminants. There are ~25 stage kinds. Is it worth implementing full rehydration for all of them for v1, or should we start with the most common subset (`$match`, `$limit`, `$sort`, `$project`, `$addFields`, `$lookup`, `$merge`)? **Default assumption:** Start with a subset that covers the check query pattern (match + limit) and the most common data transform patterns, and extend as needed. +3. **Aggregation pipeline stage deserialization scope.** The typed `MongoPipelineStage` classes have ~25 `kind` values. **Default assumption:** implement the subset needed for `check` queries (`$match`, `$limit`, `$sort`, `$project`) and common transform patterns (`$addFields`, `$lookup`, `$merge`); extend as needed. From e8830a3734590324d9be70890d9fa9b3c64a0022 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 22:47:13 +0300 Subject: [PATCH 29/31] docs(data-migrations): use typed filter proxy in check example --- .../mongo-migration-authoring/specs/data-migrations.spec.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/projects/mongo-migration-authoring/specs/data-migrations.spec.md b/projects/mongo-migration-authoring/specs/data-migrations.spec.md index aa0489b73..2547c5608 100644 --- a/projects/mongo-migration-authoring/specs/data-migrations.spec.md +++ b/projects/mongo-migration-authoring/specs/data-migrations.spec.md @@ -33,7 +33,7 @@ export default class extends Migration { dataTransform("backfill-status", { check: () => agg.from('users') - .match({ status: { $exists: false } }) + .match((f) => f.status.exists(false)) .limit(1), run: () => raw.collection('users') .updateMany({ status: { $exists: false } }, { $set: { status: "active" } }), From d42276f2161b253b72c05eef4cb10434ab3379e5 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 23:03:38 +0300 Subject: [PATCH 30/31] fix(target-mongo): preserve explicit unique:false in createIndex factory Replace || with ?? to avoid coercing false to undefined. --- .../1-mongo-target/src/core/migration-factories.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts index 46f279bbc..b23bc4748 100644 --- a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts @@ -59,7 +59,7 @@ export function createIndex( description: `create index on ${collection}`, command: new CreateIndexCommand(collection, keys, { ...options, - unique: options?.unique || undefined, + unique: options?.unique ?? undefined, name, }), }, From 4ecabe157684a5c2b9051231fec5872758048869 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 23:04:46 +0300 Subject: [PATCH 31/31] fix(integration): use Promise.allSettled in afterAll teardown Ensures replSet.stop() runs even if client.close() throws. --- .../test/mongo/migration-authoring-e2e.test.ts | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/test/integration/test/mongo/migration-authoring-e2e.test.ts b/test/integration/test/mongo/migration-authoring-e2e.test.ts index 6a3c9fe4d..cdd65e791 100644 --- a/test/integration/test/mongo/migration-authoring-e2e.test.ts +++ b/test/integration/test/mongo/migration-authoring-e2e.test.ts @@ -48,12 +48,10 @@ describe( }); afterAll(async () => { - try { - await client?.close(); - await replSet?.stop(); - } catch { - // ignore cleanup errors - } + await Promise.allSettled([ + client?.close() ?? Promise.resolve(), + replSet?.stop() ?? Promise.resolve(), + ]); }, timeouts.spinUpMongoMemoryServer); async function runOps(ops: readonly MongoMigrationPlanOperation[]): Promise<{