diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json new file mode 100644 index 0000000000..22ec0f993f --- /dev/null +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.json @@ -0,0 +1,8 @@ +{ + "migrationId": null, + "from": "sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe", + "to": "sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe", + "kind": "regular", + "labels": ["add-posts-author-index"], + "createdAt": "2026-04-15T17:17:30.570Z" +} diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts new file mode 100644 index 0000000000..fd99122bd3 --- /dev/null +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/migration.ts @@ -0,0 +1,25 @@ +import { Migration } from '@prisma-next/family-mongo/migration'; +import { createIndex } from '@prisma-next/target-mongo/migration'; + +class AddPostsAuthorIndex extends Migration { + override describe() { + return { + from: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', + to: 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', + labels: ['add-posts-author-index'], + }; + } + + override plan() { + return [ + createIndex('posts', [{ field: 'authorId', direction: 1 }]), + createIndex('posts', [ + { field: 'createdAt', direction: -1 }, + { field: 'authorId', direction: 1 }, + ]), + ]; + } +} + +export default AddPostsAuthorIndex; +Migration.run(import.meta.url, AddPostsAuthorIndex); diff --git a/examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json b/examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json new file mode 100644 index 0000000000..3a2b1c7ea9 --- /dev/null +++ b/examples/mongo-demo/migrations/20260415_add-posts-author-index/ops.json @@ -0,0 +1,122 @@ +[ + { + "id": "index.posts.create(authorId:1)", + "label": "Create index on posts (authorId:1)", + "operationClass": "additive", + "precheck": [ + { + "description": "index does not already exist on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "authorId": 1 + } + }, + "expect": "notExists" + } + ], + "execute": [ + { + "description": "create index on posts", + "command": { + "kind": "createIndex", + "collection": "posts", + "keys": [ + { + "field": "authorId", + "direction": 1 + } + ], + "name": "authorId_1" + } + } + ], + "postcheck": [ + { + "description": "index exists on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "authorId": 1 + } + }, + "expect": "exists" + } + ] + }, + { + "id": "index.posts.create(createdAt:-1,authorId:1)", + "label": "Create index on posts (createdAt:-1, authorId:1)", + "operationClass": "additive", + "precheck": [ + { + "description": "index does not already exist on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "createdAt": -1, + "authorId": 1 + } + }, + "expect": "notExists" + } + ], + "execute": [ + { + "description": "create index on posts", + "command": { + "kind": "createIndex", + "collection": "posts", + "keys": [ + { + "field": "createdAt", + "direction": -1 + }, + { + "field": "authorId", + "direction": 1 + } + ], + "name": "createdAt_-1_authorId_1" + } + } + ], + "postcheck": [ + { + "description": "index exists on posts", + "source": { + "kind": "listIndexes", + "collection": "posts" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "createdAt": -1, + "authorId": 1 + } + }, + "expect": "exists" + } + ] + } +] diff --git a/examples/mongo-demo/package.json b/examples/mongo-demo/package.json index 2141bf20ea..a56eaa392d 100644 --- a/examples/mongo-demo/package.json +++ b/examples/mongo-demo/package.json @@ -17,9 +17,10 @@ }, "dependencies": { "@prisma-next/adapter-mongo": "workspace:*", + "@prisma-next/target-mongo": "workspace:*", "@prisma-next/contract": "workspace:*", - "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/driver-mongo": "workspace:*", + "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/mongo-contract": "workspace:*", "@prisma-next/mongo-orm": "workspace:*", "@prisma-next/mongo-pipeline-builder": "workspace:*", @@ -40,6 +41,7 @@ "@types/react-dom": "^19.2.3", "@vitejs/plugin-react-swc": "^4.2.3", "mongodb-memory-server": "catalog:", + "pathe": "^2.0.3", "tsx": "^4.19.2", "typescript": "catalog:", "vite": "catalog:", diff --git a/examples/mongo-demo/test/manual-migration.test.ts b/examples/mongo-demo/test/manual-migration.test.ts new file mode 100644 index 0000000000..9151e35917 --- /dev/null +++ b/examples/mongo-demo/test/manual-migration.test.ts @@ -0,0 +1,121 @@ +import { readFileSync } from 'node:fs'; +import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; +import mongoControlDriver from '@prisma-next/driver-mongo/control'; +import { timeouts } from '@prisma-next/test-utils'; +import { type Db, MongoClient } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { resolve } from 'pathe'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import AddPostsAuthorIndex from '../migrations/20260415_add-posts-author-index/migration'; + +const ALL_POLICY = { + allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, +}; + +const migrationDir = resolve(import.meta.dirname, '../migrations/20260415_add-posts-author-index'); + +describe( + 'hand-authored migration (20260415_add-posts-author-index)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let db: Db; + const dbName = 'manual_migration_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + instanceOpts: [ + { launchTimeout: timeouts.spinUpMongoMemoryServer, storageEngine: 'wiredTiger' }, + ], + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + db = client.db(dbName); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await db.dropDatabase(); + }); + + afterAll(async () => { + try { + await client?.close(); + await replSet?.stop(); + } catch { + // ignore cleanup errors + } + }, timeouts.spinUpMongoMemoryServer); + + it('migration class can be imported and plan() called directly', () => { + const instance = new AddPostsAuthorIndex(); + const ops = instance.plan(); + expect(ops).toHaveLength(2); + expect(ops[0]!.id).toBe('index.posts.create(authorId:1)'); + expect(ops[1]!.id).toBe('index.posts.create(createdAt:-1,authorId:1)'); + }); + + it('migration.json has expected structure', () => { + const manifest = JSON.parse(readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8')); + + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual(['add-posts-author-index']); + expect(manifest.from).toMatch(/^sha256:/); + expect(manifest.to).toMatch(/^sha256:/); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + + it('ops.json deserializes and applies against real MongoDB', async () => { + await db.createCollection('posts'); + + const opsJson = readFileSync(resolve(migrationDir, 'ops.json'), 'utf-8'); + const ops = deserializeMongoOps(JSON.parse(opsJson)); + expect(ops).toHaveLength(2); + + const controlDriver = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result = await runner.execute({ + plan: { + targetId: 'mongo', + destination: { + storageHash: + 'sha256:358522152ebe3ca9db3d573471c656778c1845f4cdd424caf06632352b9772fe', + }, + operations: JSON.parse(opsJson), + }, + driver: controlDriver, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + + expect(result.ok).toBe(true); + if (!result.ok) return; + expect(result.value.operationsExecuted).toBe(2); + + const indexes = await db.collection('posts').listIndexes().toArray(); + + const authorIdIndex = indexes.find( + (idx) => + idx['key'] && + (idx['key'] as Record)['authorId'] === 1 && + !('createdAt' in (idx['key'] as Record)), + ); + expect(authorIdIndex).toBeDefined(); + + const compoundIndex = indexes.find( + (idx) => + idx['key'] && + (idx['key'] as Record)['createdAt'] === -1 && + (idx['key'] as Record)['authorId'] === 1, + ); + expect(compoundIndex).toBeDefined(); + } finally { + await controlDriver.close(); + } + }); + }, +); diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.json b/examples/retail-store/migrations/20260415_add-product-validation/migration.json new file mode 100644 index 0000000000..2ed42e2730 --- /dev/null +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.json @@ -0,0 +1,8 @@ +{ + "migrationId": null, + "from": "sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d", + "to": "sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d", + "kind": "regular", + "labels": ["add-product-validation"], + "createdAt": "2026-04-15T18:46:18.776Z" +} diff --git a/examples/retail-store/migrations/20260415_add-product-validation/migration.ts b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts new file mode 100644 index 0000000000..2693013c77 --- /dev/null +++ b/examples/retail-store/migrations/20260415_add-product-validation/migration.ts @@ -0,0 +1,37 @@ +import { Migration } from '@prisma-next/family-mongo/migration'; +import { createIndex, setValidation } from '@prisma-next/target-mongo/migration'; + +class AddProductValidation extends Migration { + override describe() { + return { + from: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', + to: 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', + labels: ['add-product-validation'], + }; + } + + override plan() { + return [ + setValidation( + 'products', + { + bsonType: 'object', + required: ['name', 'price', 'category'], + properties: { + name: { bsonType: 'string' }, + price: { bsonType: 'number', minimum: 0 }, + category: { bsonType: 'string' }, + }, + }, + { validationLevel: 'moderate', validationAction: 'warn' }, + ), + createIndex('products', [ + { field: 'category', direction: 1 }, + { field: 'price', direction: 1 }, + ]), + ]; + } +} + +export default AddProductValidation; +Migration.run(import.meta.url, AddProductValidation); diff --git a/examples/retail-store/migrations/20260415_add-product-validation/ops.json b/examples/retail-store/migrations/20260415_add-product-validation/ops.json new file mode 100644 index 0000000000..f3a945173a --- /dev/null +++ b/examples/retail-store/migrations/20260415_add-product-validation/ops.json @@ -0,0 +1,101 @@ +[ + { + "id": "collection.products.setValidation", + "label": "Set validation on products", + "operationClass": "destructive", + "precheck": [], + "execute": [ + { + "description": "set validation on products", + "command": { + "kind": "collMod", + "collection": "products", + "validator": { + "$jsonSchema": { + "bsonType": "object", + "required": ["name", "price", "category"], + "properties": { + "name": { + "bsonType": "string" + }, + "price": { + "bsonType": "number", + "minimum": 0 + }, + "category": { + "bsonType": "string" + } + } + } + }, + "validationLevel": "moderate", + "validationAction": "warn" + } + } + ], + "postcheck": [] + }, + { + "id": "index.products.create(category:1,price:1)", + "label": "Create index on products (category:1, price:1)", + "operationClass": "additive", + "precheck": [ + { + "description": "index does not already exist on products", + "source": { + "kind": "listIndexes", + "collection": "products" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "category": 1, + "price": 1 + } + }, + "expect": "notExists" + } + ], + "execute": [ + { + "description": "create index on products", + "command": { + "kind": "createIndex", + "collection": "products", + "keys": [ + { + "field": "category", + "direction": 1 + }, + { + "field": "price", + "direction": 1 + } + ], + "name": "category_1_price_1" + } + } + ], + "postcheck": [ + { + "description": "index exists on products", + "source": { + "kind": "listIndexes", + "collection": "products" + }, + "filter": { + "kind": "field", + "field": "key", + "op": "$eq", + "value": { + "category": 1, + "price": 1 + } + }, + "expect": "exists" + } + ] + } +] diff --git a/examples/retail-store/package.json b/examples/retail-store/package.json index a5b6bc84a6..d5afe08daa 100644 --- a/examples/retail-store/package.json +++ b/examples/retail-store/package.json @@ -18,9 +18,10 @@ }, "dependencies": { "@prisma-next/adapter-mongo": "workspace:*", + "@prisma-next/target-mongo": "workspace:*", "@prisma-next/contract": "workspace:*", - "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/driver-mongo": "workspace:*", + "@prisma-next/middleware-telemetry": "workspace:*", "@prisma-next/mongo-contract": "workspace:*", "@prisma-next/mongo-orm": "workspace:*", "@prisma-next/mongo-pipeline-builder": "workspace:*", @@ -55,6 +56,7 @@ "@types/react": "^19.2.14", "@types/react-dom": "^19.2.3", "mongodb-memory-server": "catalog:", + "pathe": "^2.0.3", "tsx": "^4.19.2", "typescript": "catalog:", "vitest": "catalog:" diff --git a/examples/retail-store/test/manual-migration.test.ts b/examples/retail-store/test/manual-migration.test.ts new file mode 100644 index 0000000000..59f3bd0546 --- /dev/null +++ b/examples/retail-store/test/manual-migration.test.ts @@ -0,0 +1,116 @@ +import { readFileSync } from 'node:fs'; +import { deserializeMongoOps, MongoMigrationRunner } from '@prisma-next/adapter-mongo/control'; +import mongoControlDriver from '@prisma-next/driver-mongo/control'; +import { timeouts } from '@prisma-next/test-utils'; +import { type Db, MongoClient } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { resolve } from 'pathe'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import AddProductValidation from '../migrations/20260415_add-product-validation/migration'; + +const ALL_POLICY = { + allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, +}; + +const migrationDir = resolve(import.meta.dirname, '../migrations/20260415_add-product-validation'); + +describe( + 'hand-authored migration (20260415_add-product-validation)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let db: Db; + const dbName = 'manual_migration_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + instanceOpts: [ + { launchTimeout: timeouts.spinUpMongoMemoryServer, storageEngine: 'wiredTiger' }, + ], + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + db = client.db(dbName); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await db.dropDatabase(); + }); + + afterAll(async () => { + try { + await client?.close(); + await replSet?.stop(); + } catch { + // ignore cleanup errors + } + }, timeouts.spinUpMongoMemoryServer); + + it('migration class can be imported and plan() called directly', () => { + const instance = new AddProductValidation(); + const ops = instance.plan(); + expect(ops).toHaveLength(2); + expect(ops[0]!.id).toBe('collection.products.setValidation'); + expect(ops[1]!.id).toContain('index.products.create'); + }); + + it('migration.json has expected structure', () => { + const manifest = JSON.parse(readFileSync(resolve(migrationDir, 'migration.json'), 'utf-8')); + + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual(['add-product-validation']); + expect(manifest.from).toMatch(/^sha256:/); + expect(manifest.to).toMatch(/^sha256:/); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + + it('ops.json deserializes and applies against real MongoDB', async () => { + await db.createCollection('products'); + + const opsJson = readFileSync(resolve(migrationDir, 'ops.json'), 'utf-8'); + const ops = deserializeMongoOps(JSON.parse(opsJson)); + expect(ops).toHaveLength(2); + + const controlDriver = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result = await runner.execute({ + plan: { + targetId: 'mongo', + destination: { + storageHash: + 'sha256:e5cfc21670435e53a4af14a665d61d8ba716d5e2e67b63c1443affdcad86985d', + }, + operations: JSON.parse(opsJson), + }, + driver: controlDriver, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + + expect(result.ok).toBe(true); + if (!result.ok) return; + expect(result.value.operationsExecuted).toBe(2); + + const info = await db.listCollections({ name: 'products' }).toArray(); + const options = (info[0] as Record)['options'] as Record; + expect(options['validator']).toBeDefined(); + + const indexes = await db.collection('products').listIndexes().toArray(); + const categoryPriceIndex = indexes.find( + (idx) => + idx['key'] && + (idx['key'] as Record)['category'] === 1 && + (idx['key'] as Record)['price'] === 1, + ); + expect(categoryPriceIndex).toBeDefined(); + } finally { + await controlDriver.close(); + } + }); + }, +); diff --git a/packages/1-framework/3-tooling/migration/package.json b/packages/1-framework/3-tooling/migration/package.json index 749776ff3e..ad257e9566 100644 --- a/packages/1-framework/3-tooling/migration/package.json +++ b/packages/1-framework/3-tooling/migration/package.json @@ -64,6 +64,10 @@ "types": "./dist/exports/migration-ts.d.mts", "import": "./dist/exports/migration-ts.mjs" }, + "./migration": { + "types": "./dist/exports/migration.d.mts", + "import": "./dist/exports/migration.mjs" + }, "./package.json": "./package.json" }, "repository": { diff --git a/packages/1-framework/3-tooling/migration/src/exports/migration.ts b/packages/1-framework/3-tooling/migration/src/exports/migration.ts new file mode 100644 index 0000000000..a42ef7feba --- /dev/null +++ b/packages/1-framework/3-tooling/migration/src/exports/migration.ts @@ -0,0 +1 @@ +export { Migration, type MigrationMeta } from '../migration-base'; diff --git a/packages/1-framework/3-tooling/migration/src/migration-base.ts b/packages/1-framework/3-tooling/migration/src/migration-base.ts new file mode 100644 index 0000000000..918a8b6ab7 --- /dev/null +++ b/packages/1-framework/3-tooling/migration/src/migration-base.ts @@ -0,0 +1,141 @@ +import { realpathSync, writeFileSync } from 'node:fs'; +import { fileURLToPath } from 'node:url'; +import { type } from 'arktype'; +import { dirname, join } from 'pathe'; + +export interface MigrationMeta { + readonly from: string; + readonly to: string; + readonly kind?: 'regular' | 'baseline'; + readonly labels?: readonly string[]; +} + +const MigrationMetaSchema = type({ + from: 'string', + to: 'string', + 'kind?': "'regular' | 'baseline'", + 'labels?': 'string[]', +}); + +export abstract class Migration { + abstract plan(): TOperation[]; + + describe(): MigrationMeta | undefined { + return undefined; + } + + /** + * Entrypoint guard for migration files. When called at module scope, + * detects whether the file is being run directly (e.g. `tsx migration.ts`) + * and if so, serializes the migration plan to `ops.json` (and optionally + * `migration.json`) in the same directory. When the file is imported by + * another module, this is a no-op. + * + * Usage (at module scope, after the class definition): + * + * class MyMigration extends Migration { ... } + * export default MyMigration; + * Migration.run(import.meta.url, MyMigration); + */ + static run(importMetaUrl: string, MigrationClass: new () => Migration): void { + if (!importMetaUrl) return; + + const metaFilename = fileURLToPath(importMetaUrl); + const argv1 = process.argv[1]; + if (!argv1) return; + + let isEntrypoint: boolean; + try { + isEntrypoint = realpathSync(metaFilename) === realpathSync(argv1); + } catch { + return; + } + if (!isEntrypoint) return; + + const args = process.argv.slice(2); + + if (args.includes('--help')) { + printHelp(); + return; + } + + const dryRun = args.includes('--dry-run'); + const migrationDir = dirname(metaFilename); + + try { + serializeMigration(MigrationClass, migrationDir, dryRun); + } catch (err) { + process.stderr.write(`${err instanceof Error ? err.message : String(err)}\n`); + process.exitCode = 1; + } + } +} + +function printHelp(): void { + process.stdout.write( + [ + 'Usage: tsx [options]', + '', + 'Options:', + ' --dry-run Print operations to stdout without writing files', + ' --help Show this help message', + '', + ].join('\n'), + ); +} + +function buildManifest(meta: MigrationMeta): Record { + return { + migrationId: null, + from: meta.from, + to: meta.to, + kind: meta.kind ?? 'regular', + labels: meta.labels ?? [], + createdAt: new Date().toISOString(), + }; +} + +function serializeMigration( + MigrationClass: new () => Migration, + migrationDir: string, + dryRun: boolean, +): void { + const instance = new MigrationClass(); + + const ops = instance.plan(); + + if (!Array.isArray(ops)) { + throw new Error('plan() must return an array of operations'); + } + + const serializedOps = JSON.stringify(ops, null, 2); + + let manifest: Record | undefined; + if (typeof instance.describe === 'function') { + const rawMeta: unknown = instance.describe(); + if (rawMeta !== undefined) { + const parsed = MigrationMetaSchema(rawMeta); + if (parsed instanceof type.errors) { + throw new Error(`describe() returned invalid metadata: ${parsed.summary}`); + } + manifest = buildManifest(parsed); + } + } + + if (dryRun) { + if (manifest) { + process.stdout.write(`--- migration.json ---\n${JSON.stringify(manifest, null, 2)}\n`); + process.stdout.write('--- ops.json ---\n'); + } + process.stdout.write(`${serializedOps}\n`); + return; + } + + writeFileSync(join(migrationDir, 'ops.json'), serializedOps); + if (manifest) { + writeFileSync(join(migrationDir, 'migration.json'), JSON.stringify(manifest, null, 2)); + } + + const files = manifest ? 'ops.json + migration.json' : 'ops.json'; + process.stdout.write(`Wrote ${files} to ${migrationDir}\n`); +} diff --git a/packages/1-framework/3-tooling/migration/test/migration-base.test.ts b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts new file mode 100644 index 0000000000..f985bc1c3f --- /dev/null +++ b/packages/1-framework/3-tooling/migration/test/migration-base.test.ts @@ -0,0 +1,262 @@ +import { execFile } from 'node:child_process'; +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { promisify } from 'node:util'; +import { join, resolve } from 'pathe'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { Migration } from '../src/migration-base'; + +const execFileAsync = promisify(execFile); +const packageRoot = resolve(import.meta.dirname, '..'); +const repoRoot = resolve(packageRoot, '../../../..'); + +describe('Migration', () => { + describe('plan() contract', () => { + it('can be subclassed and plan() called directly', () => { + class TestMigration extends Migration<{ id: string }> { + override plan() { + return [{ id: 'op1' }, { id: 'op2' }]; + } + } + + const m = new TestMigration(); + const ops = m.plan(); + expect(ops).toEqual([{ id: 'op1' }, { id: 'op2' }]); + }); + }); + + describe('describe() contract', () => { + it('returns undefined by default', () => { + class TestMigration extends Migration { + override plan() { + return []; + } + } + + const m = new TestMigration(); + expect(m.describe()).toBeUndefined(); + }); + + it('can be overridden to provide migration metadata', () => { + class TestMigration extends Migration { + override describe() { + return { from: 'abc', to: 'def', labels: ['test'] }; + } + override plan() { + return []; + } + } + + const m = new TestMigration(); + expect(m.describe()).toEqual({ from: 'abc', to: 'def', labels: ['test'] }); + }); + }); +}); + +describe('Migration.run() subprocess', { timeout: 15_000 }, () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'migration-run-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + const migrationBasePath = join(packageRoot, 'src/migration-base.ts').replace(/\\/g, '/'); + + function migrationScript(planReturn: string): string { + return [ + `import { Migration } from '${migrationBasePath}';`, + '', + 'class M extends Migration {', + ' plan() {', + ` return ${planReturn};`, + ' }', + '}', + 'export default M;', + '', + 'Migration.run(import.meta.url, M);', + ].join('\n'); + } + + async function runMigration( + filename: string, + args: string[] = [], + ): Promise<{ stdout: string; stderr: string; exitCode: number }> { + const filePath = join(tmpDir, filename); + const tsxPath = join(repoRoot, 'node_modules/.bin/tsx'); + try { + const result = await execFileAsync(tsxPath, [filePath, ...args], { cwd: tmpDir }); + return { stdout: result.stdout, stderr: result.stderr, exitCode: 0 }; + } catch (error) { + const e = error as { stdout: string; stderr: string; code: number }; + return { stdout: e.stdout || '', stderr: e.stderr || '', exitCode: e.code || 1 }; + } + } + + it('writes ops.json when run as entrypoint', async () => { + const script = migrationScript('[{ id: "op1", label: "Test op" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Wrote ops.json'); + + const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); + const ops = JSON.parse(opsJson); + expect(ops).toEqual([{ id: 'op1', label: 'Test op' }]); + }); + + it('prints operations with --dry-run and does not write ops.json', async () => { + const script = migrationScript('[{ id: "op1", label: "Dry run op" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts', ['--dry-run']); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('op1'); + expect(result.stdout).toContain('Dry run op'); + + const opsExists = await readFile(join(tmpDir, 'ops.json'), 'utf-8').catch(() => null); + expect(opsExists).toBeNull(); + }); + + it('prints usage with --help', async () => { + const script = migrationScript('[]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts', ['--help']); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('--dry-run'); + expect(result.stdout).toContain('--help'); + }); + + it('is a no-op when the file is imported', async () => { + const migrationFile = migrationScript('[{ id: "op1" }]'); + await writeFile(join(tmpDir, 'migration.ts'), migrationFile); + + const importerScript = [ + `import M from '${join(tmpDir, 'migration.ts').replace(/\\/g, '/')}';`, + 'const m = new M();', + 'const ops = m.plan();', + 'console.log(JSON.stringify(ops));', + ].join('\n'); + await writeFile(join(tmpDir, 'importer.ts'), importerScript); + + const result = await runMigration('importer.ts'); + expect(result.exitCode).toBe(0); + + const opsExists = await readFile(join(tmpDir, 'ops.json'), 'utf-8').catch(() => null); + expect(opsExists).toBeNull(); + + const importedOps = JSON.parse(result.stdout.trim()); + expect(importedOps).toEqual([{ id: 'op1' }]); + }); + + it('exits with error when plan() returns non-array', async () => { + const script = migrationScript('"not an array"'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('plan()'); + }); + + describe('migration.json output', () => { + function migrationWithDescribe(meta: string, planReturn: string): string { + return [ + `import { Migration } from '${migrationBasePath}';`, + '', + 'class M extends Migration {', + ' describe() {', + ` return ${meta};`, + ' }', + ' plan() {', + ` return ${planReturn};`, + ' }', + '}', + 'export default M;', + '', + 'Migration.run(import.meta.url, M);', + ].join('\n'); + } + + it('writes migration.json when describe() is implemented', async () => { + const script = migrationWithDescribe( + '{ from: "abc123", to: "def456", labels: ["add-users"] }', + '[{ id: "op1" }]', + ); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('ops.json + migration.json'); + + const manifest = JSON.parse(await readFile(join(tmpDir, 'migration.json'), 'utf-8')); + expect(manifest.from).toBe('abc123'); + expect(manifest.to).toBe('def456'); + expect(manifest.labels).toEqual(['add-users']); + expect(manifest.migrationId).toBeNull(); + expect(manifest.kind).toBe('regular'); + expect(manifest.createdAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + + it('does not write migration.json when describe() is absent', async () => { + const script = migrationScript('[{ id: "op1" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + + expect(await readFile(join(tmpDir, 'ops.json'), 'utf-8')).toBeTruthy(); + const manifestExists = await readFile(join(tmpDir, 'migration.json'), 'utf-8').catch( + () => null, + ); + expect(manifestExists).toBeNull(); + }); + + it('defaults kind to regular and labels to empty', async () => { + const script = migrationWithDescribe('{ from: "abc", to: "def" }', '[]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).toBe(0); + + const manifest = JSON.parse(await readFile(join(tmpDir, 'migration.json'), 'utf-8')); + expect(manifest.kind).toBe('regular'); + expect(manifest.labels).toEqual([]); + }); + + it('rejects invalid describe() return with clear error', async () => { + const script = migrationWithDescribe('{ bad: true }', '[{ id: "op1" }]'); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts'); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('describe()'); + expect(result.stderr).toContain('invalid'); + }); + + it('includes migration.json content in --dry-run output', async () => { + const script = migrationWithDescribe( + '{ from: "abc", to: "def", labels: ["test"] }', + '[{ id: "op1" }]', + ); + await writeFile(join(tmpDir, 'migration.ts'), script); + + const result = await runMigration('migration.ts', ['--dry-run']); + expect(result.exitCode).toBe(0); + + const output = result.stdout; + expect(output).toContain('"from"'); + expect(output).toContain('"to"'); + expect(output).toContain('"op1"'); + + const manifestExists = await readFile(join(tmpDir, 'migration.json'), 'utf-8').catch( + () => null, + ); + expect(manifestExists).toBeNull(); + }); + }); +}); diff --git a/packages/1-framework/3-tooling/migration/tsdown.config.ts b/packages/1-framework/3-tooling/migration/tsdown.config.ts index b834cd40cd..13fff35420 100644 --- a/packages/1-framework/3-tooling/migration/tsdown.config.ts +++ b/packages/1-framework/3-tooling/migration/tsdown.config.ts @@ -9,6 +9,7 @@ export default defineConfig({ 'exports/refs': 'src/exports/refs.ts', 'exports/constants': 'src/exports/constants.ts', 'exports/migration-ts': 'src/exports/migration-ts.ts', + 'exports/migration': 'src/exports/migration.ts', }, exports: { enabled: false }, }); diff --git a/packages/2-mongo-family/9-family/README.md b/packages/2-mongo-family/9-family/README.md index 940de2007c..e11734a1c8 100644 --- a/packages/2-mongo-family/9-family/README.md +++ b/packages/2-mongo-family/9-family/README.md @@ -21,6 +21,7 @@ This package is the Mongo family integration point for both control-plane assemb ## Entrypoints - `./control`: control-plane entrypoint exporting `mongoFamilyDescriptor`, `mongoTargetDescriptor`, `createMongoFamilyInstance`, and `MongoControlFamilyInstance` +- `./migration`: migration authoring — `Migration` class, factory functions, and strategies (re-exported from `@prisma-next/target-mongo/migration`) - `./pack`: pure pack ref for TypeScript authoring flows such as `@prisma-next/mongo-contract-ts/contract-builder` ## Usage @@ -95,18 +96,49 @@ export const contract = defineContract({ The current `contract.ts` slice supports roots and collections, typed reference relations, owned models with `storage.relations`, value objects, and discriminator-based polymorphism. +### Migration authoring + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/family-mongo/migration" + +export default class extends Migration { + describe() { + return { from: "abc123", to: "def456", labels: ["add-users"] } + } + + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta.url) +``` + +Run `node migration.ts` to produce `ops.json` and `migration.json`. Use `--dry-run` to preview without writing. + ## Package Structure - `src/core/control-descriptor.ts`: `MongoFamilyDescriptor` implementation - `src/core/control-instance.ts`: `createMongoFamilyInstance()` and `MongoControlFamilyInstance` - `src/core/mongo-target-descriptor.ts`: pre-built control target descriptor derived from `@prisma-next/target-mongo/pack` +- `src/core/mongo-migration.ts`: `MongoMigration` class (fixes the `Migration` type parameter to `MongoMigrationPlanOperation`) - `src/exports/control.ts`: control-plane entrypoint +- `src/exports/migration.ts`: migration authoring entrypoint - `src/exports/pack.ts`: authoring-time family pack ref ## Dependencies - `@prisma-next/framework-components`: control-plane types and stack assembly +- `@prisma-next/migration-tools`: generic `Migration` base class - `@prisma-next/mongo-contract`: Mongo contract validation and types - `@prisma-next/mongo-contract-ts`: Mongo `contract.ts` authoring surface - `@prisma-next/mongo-emitter`: Mongo family emission hook -- `@prisma-next/target-mongo`: Mongo target pack metadata +- `@prisma-next/mongo-query-ast`: Mongo command AST types (`MongoMigrationPlanOperation`) +- `@prisma-next/target-mongo`: Mongo target pack metadata and migration factories diff --git a/packages/2-mongo-family/9-family/package.json b/packages/2-mongo-family/9-family/package.json index e056cd9ec5..ed9780e3eb 100644 --- a/packages/2-mongo-family/9-family/package.json +++ b/packages/2-mongo-family/9-family/package.json @@ -16,8 +16,10 @@ "@prisma-next/contract": "workspace:*", "@prisma-next/emitter": "workspace:*", "@prisma-next/framework-components": "workspace:*", + "@prisma-next/migration-tools": "workspace:*", "@prisma-next/mongo-contract": "workspace:*", "@prisma-next/mongo-emitter": "workspace:*", + "@prisma-next/mongo-query-ast": "workspace:*", "@prisma-next/mongo-schema-ir": "workspace:*", "@prisma-next/target-mongo": "workspace:*", "@prisma-next/utils": "workspace:*", @@ -45,6 +47,7 @@ "types": "./dist/control.d.mts", "exports": { "./control": "./dist/control.mjs", + "./migration": "./dist/migration.mjs", "./pack": "./dist/pack.mjs", "./package.json": "./package.json" } diff --git a/packages/2-mongo-family/9-family/src/core/mongo-migration.ts b/packages/2-mongo-family/9-family/src/core/mongo-migration.ts new file mode 100644 index 0000000000..c4b110854a --- /dev/null +++ b/packages/2-mongo-family/9-family/src/core/mongo-migration.ts @@ -0,0 +1,4 @@ +import { Migration } from '@prisma-next/migration-tools/migration'; +import type { MongoMigrationPlanOperation } from '@prisma-next/mongo-query-ast/control'; + +export abstract class MongoMigration extends Migration {} diff --git a/packages/2-mongo-family/9-family/src/exports/migration.ts b/packages/2-mongo-family/9-family/src/exports/migration.ts new file mode 100644 index 0000000000..915e9bc6f3 --- /dev/null +++ b/packages/2-mongo-family/9-family/src/exports/migration.ts @@ -0,0 +1 @@ +export { MongoMigration as Migration } from '../core/mongo-migration'; diff --git a/packages/2-mongo-family/9-family/tsdown.config.ts b/packages/2-mongo-family/9-family/tsdown.config.ts index 8fdb3adc90..cce9169182 100644 --- a/packages/2-mongo-family/9-family/tsdown.config.ts +++ b/packages/2-mongo-family/9-family/tsdown.config.ts @@ -1,5 +1,5 @@ import { defineConfig } from '@prisma-next/tsdown'; export default defineConfig({ - entry: ['src/exports/control.ts', 'src/exports/pack.ts'], + entry: ['src/exports/control.ts', 'src/exports/pack.ts', 'src/exports/migration.ts'], }); diff --git a/packages/3-mongo-target/1-mongo-target/README.md b/packages/3-mongo-target/1-mongo-target/README.md index 8870912d99..42995decbd 100644 --- a/packages/3-mongo-target/1-mongo-target/README.md +++ b/packages/3-mongo-target/1-mongo-target/README.md @@ -7,14 +7,18 @@ MongoDB target pack for Prisma Next. - **Target pack assembly**: Exports the MongoDB target pack for authoring and family composition - **Target metadata**: Defines the stable Mongo target identity (`kind`, `familyId`, `targetId`, `version`, `capabilities`) - **Codec type surface**: Exposes the base Mongo codec type map used by authoring-time type composition +- **Migration operation factories**: Factory functions for MongoDB migration operations ## Entrypoints - `./pack`: pure target pack ref used by `@prisma-next/family-mongo` and `@prisma-next/mongo-contract-ts` - `./codec-types`: base Mongo codec type map +- `./migration`: factory functions (the `Migration` base class is in `@prisma-next/family-mongo/migration`) ## Usage +### Contract definition + ```typescript import mongoFamily from '@prisma-next/family-mongo/pack'; import { defineContract } from '@prisma-next/mongo-contract-ts/contract-builder'; @@ -25,3 +29,35 @@ const contract = defineContract({ target: mongoTarget, }); ``` + +### Migration authoring + +```typescript +import { Migration } from '@prisma-next/family-mongo/migration'; +import { createIndex, createCollection } from '@prisma-next/target-mongo/migration'; + +export default class extends Migration { + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta.url) +``` + +Run `tsx migration.ts` to produce `ops.json` and `migration.json` (when `describe()` is implemented). Use `--dry-run` to preview without writing. + +### Available factories + +- `createIndex(collection, keys, options?)` — create an index +- `dropIndex(collection, keys)` — drop an index +- `createCollection(collection, options?)` — create a collection +- `dropCollection(collection)` — drop a collection +- `setValidation(collection, schema, options?)` — set document validation on a collection +- `validatedCollection(name, schema, indexes)` — create a collection with a JSON Schema validator and indexes diff --git a/packages/3-mongo-target/1-mongo-target/package.json b/packages/3-mongo-target/1-mongo-target/package.json index 1e21b01724..6aa42c0f83 100644 --- a/packages/3-mongo-target/1-mongo-target/package.json +++ b/packages/3-mongo-target/1-mongo-target/package.json @@ -25,6 +25,7 @@ "@prisma-next/tsconfig": "workspace:*", "@prisma-next/tsdown": "workspace:*", "mongodb-memory-server": "catalog:", + "pathe": "^2.0.3", "tsdown": "catalog:", "typescript": "catalog:", "vitest": "catalog:" @@ -36,6 +37,7 @@ "exports": { "./codec-types": "./dist/codec-types.mjs", "./control": "./dist/control.mjs", + "./migration": "./dist/migration.mjs", "./pack": "./dist/pack.mjs", "./package.json": "./package.json" }, diff --git a/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts new file mode 100644 index 0000000000..b23bc4748f --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts @@ -0,0 +1,193 @@ +import type { MongoIndexKey } from '@prisma-next/mongo-query-ast/control'; +import { + buildIndexOpId, + CollModCommand, + CreateCollectionCommand, + type CreateCollectionOptions, + CreateIndexCommand, + type CreateIndexOptions, + DropCollectionCommand, + DropIndexCommand, + defaultMongoIndexName, + keysToKeySpec, + ListCollectionsCommand, + ListIndexesCommand, + MongoAndExpr, + MongoFieldFilter, + type MongoMigrationPlanOperation, +} from '@prisma-next/mongo-query-ast/control'; + +function formatKeys(keys: ReadonlyArray): string { + return keys.map((k) => `${k.field}:${k.direction}`).join(', '); +} + +function isTextIndex(keys: ReadonlyArray): boolean { + return keys.some((k) => k.direction === 'text'); +} + +function keyFilter(keys: ReadonlyArray) { + return isTextIndex(keys) + ? MongoFieldFilter.eq('key._fts', 'text') + : MongoFieldFilter.eq('key', keysToKeySpec(keys)); +} + +export function createIndex( + collection: string, + keys: ReadonlyArray, + options?: CreateIndexOptions, +): MongoMigrationPlanOperation { + const name = defaultMongoIndexName(keys); + const filter = keyFilter(keys); + const fullFilter = options?.unique + ? MongoAndExpr.of([filter, MongoFieldFilter.eq('unique', true)]) + : filter; + + return { + id: buildIndexOpId('create', collection, keys), + label: `Create index on ${collection} (${formatKeys(keys)})`, + operationClass: 'additive', + precheck: [ + { + description: `index does not already exist on ${collection}`, + source: new ListIndexesCommand(collection), + filter, + expect: 'notExists', + }, + ], + execute: [ + { + description: `create index on ${collection}`, + command: new CreateIndexCommand(collection, keys, { + ...options, + unique: options?.unique ?? undefined, + name, + }), + }, + ], + postcheck: [ + { + description: `index exists on ${collection}`, + source: new ListIndexesCommand(collection), + filter: fullFilter, + expect: 'exists', + }, + ], + }; +} + +export function dropIndex( + collection: string, + keys: ReadonlyArray, +): MongoMigrationPlanOperation { + const indexName = defaultMongoIndexName(keys); + const filter = keyFilter(keys); + + return { + id: buildIndexOpId('drop', collection, keys), + label: `Drop index on ${collection} (${formatKeys(keys)})`, + operationClass: 'destructive', + precheck: [ + { + description: `index exists on ${collection}`, + source: new ListIndexesCommand(collection), + filter, + expect: 'exists', + }, + ], + execute: [ + { + description: `drop index on ${collection}`, + command: new DropIndexCommand(collection, indexName), + }, + ], + postcheck: [ + { + description: `index no longer exists on ${collection}`, + source: new ListIndexesCommand(collection), + filter, + expect: 'notExists', + }, + ], + }; +} + +export function createCollection( + collection: string, + options?: CreateCollectionOptions, +): MongoMigrationPlanOperation { + return { + id: `collection.${collection}.create`, + label: `Create collection ${collection}`, + operationClass: 'additive', + precheck: [ + { + description: `collection ${collection} does not exist`, + source: new ListCollectionsCommand(), + filter: MongoFieldFilter.eq('name', collection), + expect: 'notExists', + }, + ], + execute: [ + { + description: `create collection ${collection}`, + command: new CreateCollectionCommand(collection, options), + }, + ], + postcheck: [], + }; +} + +export function dropCollection(collection: string): MongoMigrationPlanOperation { + return { + id: `collection.${collection}.drop`, + label: `Drop collection ${collection}`, + operationClass: 'destructive', + precheck: [], + execute: [ + { + description: `drop collection ${collection}`, + command: new DropCollectionCommand(collection), + }, + ], + postcheck: [], + }; +} + +export function setValidation( + collection: string, + schema: Record, + options?: { validationLevel?: 'strict' | 'moderate'; validationAction?: 'error' | 'warn' }, +): MongoMigrationPlanOperation { + return { + id: `collection.${collection}.setValidation`, + label: `Set validation on ${collection}`, + operationClass: 'destructive', + precheck: [], + execute: [ + { + description: `set validation on ${collection}`, + command: new CollModCommand(collection, { + validator: { $jsonSchema: schema }, + validationLevel: options?.validationLevel, + validationAction: options?.validationAction, + }), + }, + ], + postcheck: [], + }; +} + +export function validatedCollection( + name: string, + schema: Record, + indexes: ReadonlyArray<{ keys: MongoIndexKey[]; unique?: boolean }>, +): MongoMigrationPlanOperation[] { + return [ + createCollection(name, { + validator: { $jsonSchema: schema }, + validationLevel: 'strict', + validationAction: 'error', + }), + ...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique })), + ]; +} diff --git a/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts new file mode 100644 index 0000000000..65a0c2412c --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/src/exports/migration.ts @@ -0,0 +1,8 @@ +export { + createCollection, + createIndex, + dropCollection, + dropIndex, + setValidation, + validatedCollection, +} from '../core/migration-factories'; diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts new file mode 100644 index 0000000000..fb84f2bb36 --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/test/migration-e2e.test.ts @@ -0,0 +1,192 @@ +import { execFile } from 'node:child_process'; +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'; +import { tmpdir } from 'node:os'; +import { promisify } from 'node:util'; +import { join, resolve } from 'pathe'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; + +const execFileAsync = promisify(execFile); +const packageRoot = resolve(import.meta.dirname, '..'); +const repoRoot = resolve(packageRoot, '../../..'); +const tsxPath = join(repoRoot, 'node_modules/.bin/tsx'); + +const familyMongoRoot = resolve(repoRoot, 'packages/2-mongo-family/9-family'); +const migrationExport = join(familyMongoRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); +const factoryExport = join(packageRoot, 'src/exports/migration.ts').replace(/\\/g, '/'); + +describe('migration file E2E', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'migration-e2e-')); + await writeFile(join(tmpDir, 'package.json'), '{"type":"module"}'); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + async function runFile( + filename: string, + args: string[] = [], + ): Promise<{ stdout: string; stderr: string; exitCode: number }> { + const filePath = join(tmpDir, filename); + try { + const result = await execFileAsync(tsxPath, [filePath, ...args], { cwd: tmpDir }); + return { stdout: result.stdout, stderr: result.stderr, exitCode: 0 }; + } catch (error) { + const e = error as { stdout: string; stderr: string; code: number }; + return { stdout: e.stdout || '', stderr: e.stderr || '', exitCode: e.code || 1 }; + } + } + + describe('factory-based migration', () => { + const factoryMigration = [ + `import { Migration } from '${migrationExport}';`, + `import { createIndex, createCollection } from '${factoryExport}';`, + '', + 'class M extends Migration {', + ' plan() {', + ' return [', + ' createCollection("users", {', + ' validator: { $jsonSchema: { required: ["email"] } },', + ' validationLevel: "strict",', + ' }),', + ' createIndex("users", [{ field: "email", direction: 1 }], { unique: true }),', + ' ];', + ' }', + '}', + 'export default M;', + '', + 'Migration.run(import.meta.url, M);', + ].join('\n'); + + it('produces ops.json with correct structure', async () => { + await writeFile(join(tmpDir, 'migration.ts'), factoryMigration); + + const result = await runFile('migration.ts'); + expect(result.exitCode).toBe(0); + + const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); + const ops = JSON.parse(opsJson); + + expect(ops).toHaveLength(2); + expect(ops[0].id).toBe('collection.users.create'); + expect(ops[0].operationClass).toBe('additive'); + expect(ops[0].execute[0].command.kind).toBe('createCollection'); + + expect(ops[1].id).toContain('index.users.create'); + expect(ops[1].execute[0].command.kind).toBe('createIndex'); + expect(ops[1].execute[0].command.unique).toBe(true); + }); + + it('prints operations with --dry-run and does not write ops.json', async () => { + await writeFile(join(tmpDir, 'migration.ts'), factoryMigration); + + const result = await runFile('migration.ts', ['--dry-run']); + expect(result.exitCode).toBe(0); + + const parsed = JSON.parse(result.stdout); + expect(parsed).toHaveLength(2); + expect(parsed[0].id).toBe('collection.users.create'); + + const opsExists = await readFile(join(tmpDir, 'ops.json'), 'utf-8').catch(() => null); + expect(opsExists).toBeNull(); + }); + }); + + describe('strategy-based migration', () => { + const strategyMigration = [ + `import { Migration } from '${migrationExport}';`, + `import { validatedCollection } from '${factoryExport}';`, + '', + 'class M extends Migration {', + ' plan() {', + ' return validatedCollection(', + ' "users",', + ' { required: ["email", "name"] },', + ' [{ keys: [{ field: "email", direction: 1 }], unique: true }],', + ' );', + ' }', + '}', + 'export default M;', + '', + 'Migration.run(import.meta.url, M);', + ].join('\n'); + + it('produces ops.json from strategy composition', async () => { + await writeFile(join(tmpDir, 'migration.ts'), strategyMigration); + + const result = await runFile('migration.ts'); + expect(result.exitCode).toBe(0); + + const opsJson = await readFile(join(tmpDir, 'ops.json'), 'utf-8'); + const ops = JSON.parse(opsJson); + + expect(ops).toHaveLength(2); + + expect(ops[0].id).toBe('collection.users.create'); + expect(ops[0].execute[0].command.validator).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + expect(ops[0].execute[0].command.validationLevel).toBe('strict'); + + expect(ops[1].id).toContain('index.users.create'); + expect(ops[1].execute[0].command.unique).toBe(true); + }); + }); + + describe('serialization format', () => { + it('produces JSON that the runner can consume (correct kind discriminants)', async () => { + const migration = [ + `import { Migration } from '${migrationExport}';`, + `import { createIndex, dropIndex, createCollection, dropCollection, setValidation } from '${factoryExport}';`, + '', + 'class M extends Migration {', + ' plan() {', + ' return [', + ' createCollection("users"),', + ' createIndex("users", [{ field: "email", direction: 1 }]),', + ' setValidation("users", { required: ["email"] }),', + ' dropIndex("users", [{ field: "email", direction: 1 }]),', + ' dropCollection("users"),', + ' ];', + ' }', + '}', + 'export default M;', + '', + 'Migration.run(import.meta.url, M);', + ].join('\n'); + + await writeFile(join(tmpDir, 'migration.ts'), migration); + + const result = await runFile('migration.ts'); + expect(result.exitCode).toBe(0); + + const ops = JSON.parse(await readFile(join(tmpDir, 'ops.json'), 'utf-8')); + expect(ops).toHaveLength(5); + + const commandKinds = ops.map((op: Record) => + (op['execute'] as Record[]).map( + (s: Record) => (s['command'] as Record)['kind'], + ), + ); + expect(commandKinds).toEqual([ + ['createCollection'], + ['createIndex'], + ['collMod'], + ['dropIndex'], + ['dropCollection'], + ]); + + for (const op of ops) { + expect(op).toHaveProperty('id'); + expect(op).toHaveProperty('label'); + expect(op).toHaveProperty('operationClass'); + expect(op).toHaveProperty('precheck'); + expect(op).toHaveProperty('execute'); + expect(op).toHaveProperty('postcheck'); + } + }); + }); +}); diff --git a/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts new file mode 100644 index 0000000000..9ec6bdbe8a --- /dev/null +++ b/packages/3-mongo-target/1-mongo-target/test/migration-factories.test.ts @@ -0,0 +1,448 @@ +import { + buildIndexOpId, + CollModCommand, + CreateCollectionCommand, + CreateIndexCommand, + DropCollectionCommand, + DropIndexCommand, + defaultMongoIndexName, + keysToKeySpec, + ListCollectionsCommand, + ListIndexesCommand, + MongoAndExpr, + MongoFieldFilter, + type MongoMigrationPlanOperation, +} from '@prisma-next/mongo-query-ast/control'; +import { describe, expect, it } from 'vitest'; +import { + createCollection, + createIndex, + dropCollection, + dropIndex, + setValidation, + validatedCollection, +} from '../src/core/migration-factories'; + +describe('createIndex', () => { + const keys = [{ field: 'email', direction: 1 as const }]; + + it('produces correct operation structure', () => { + const op = createIndex('users', keys); + + expect(op.id).toBe(buildIndexOpId('create', 'users', keys)); + expect(op.label).toBe('Create index on users (email:1)'); + expect(op.operationClass).toBe('additive'); + }); + + it('includes precheck that index does not exist', () => { + const op = createIndex('users', keys); + + expect(op.precheck).toHaveLength(1); + expect(op.precheck[0]!.expect).toBe('notExists'); + expect(op.precheck[0]!.source).toBeInstanceOf(ListIndexesCommand); + expect((op.precheck[0]!.source as ListIndexesCommand).collection).toBe('users'); + expect(op.precheck[0]!.filter).toBeInstanceOf(MongoFieldFilter); + }); + + it('includes execute with CreateIndexCommand', () => { + const op = createIndex('users', keys, { unique: true }); + + expect(op.execute).toHaveLength(1); + const cmd = op.execute[0]!.command as CreateIndexCommand; + expect(cmd).toBeInstanceOf(CreateIndexCommand); + expect(cmd.collection).toBe('users'); + expect(cmd.keys).toEqual(keys); + expect(cmd.unique).toBe(true); + expect(cmd.name).toBe(defaultMongoIndexName(keys)); + }); + + it('includes postcheck that index exists', () => { + const op = createIndex('users', keys); + + expect(op.postcheck).toHaveLength(1); + expect(op.postcheck[0]!.expect).toBe('exists'); + }); + + it('adds unique filter to postcheck when unique: true', () => { + const op = createIndex('users', keys, { unique: true }); + + expect(op.postcheck[0]!.filter).toBeInstanceOf(MongoAndExpr); + const andExpr = op.postcheck[0]!.filter as MongoAndExpr; + expect(andExpr.exprs).toHaveLength(2); + }); + + it('uses key._fts filter for text indexes', () => { + const textKeys = [{ field: 'content', direction: 'text' as const }]; + const op = createIndex('posts', textKeys); + + const preFilter = op.precheck[0]!.filter as MongoFieldFilter; + expect(preFilter.field).toBe('key._fts'); + expect(preFilter.value).toBe('text'); + }); + + it('passes through all index options', () => { + const op = createIndex('users', keys, { + sparse: true, + expireAfterSeconds: 3600, + collation: { locale: 'en' }, + }); + + const cmd = op.execute[0]!.command as CreateIndexCommand; + expect(cmd.sparse).toBe(true); + expect(cmd.expireAfterSeconds).toBe(3600); + expect(cmd.collation).toEqual({ locale: 'en' }); + }); + + it('uses key spec filter for non-text indexes', () => { + const op = createIndex('users', keys); + + const preFilter = op.precheck[0]!.filter as MongoFieldFilter; + expect(preFilter.field).toBe('key'); + expect(preFilter.value).toEqual(keysToKeySpec(keys)); + }); + + it('handles compound keys', () => { + const compoundKeys = [ + { field: 'email', direction: 1 as const }, + { field: 'name', direction: -1 as const }, + ]; + const op = createIndex('users', compoundKeys); + + expect(op.label).toBe('Create index on users (email:1, name:-1)'); + expect(op.id).toBe(buildIndexOpId('create', 'users', compoundKeys)); + }); +}); + +describe('dropIndex', () => { + const keys = [{ field: 'email', direction: 1 as const }]; + + it('produces correct operation structure', () => { + const op = dropIndex('users', keys); + + expect(op.id).toBe(buildIndexOpId('drop', 'users', keys)); + expect(op.label).toBe('Drop index on users (email:1)'); + expect(op.operationClass).toBe('destructive'); + }); + + it('includes precheck that index exists', () => { + const op = dropIndex('users', keys); + + expect(op.precheck).toHaveLength(1); + expect(op.precheck[0]!.expect).toBe('exists'); + }); + + it('includes execute with DropIndexCommand using derived name', () => { + const op = dropIndex('users', keys); + + const cmd = op.execute[0]!.command as DropIndexCommand; + expect(cmd).toBeInstanceOf(DropIndexCommand); + expect(cmd.collection).toBe('users'); + expect(cmd.name).toBe(defaultMongoIndexName(keys)); + }); + + it('includes postcheck that index no longer exists', () => { + const op = dropIndex('users', keys); + + expect(op.postcheck).toHaveLength(1); + expect(op.postcheck[0]!.expect).toBe('notExists'); + }); + + it('uses key._fts filter for text indexes', () => { + const textKeys = [{ field: 'content', direction: 'text' as const }]; + const op = dropIndex('posts', textKeys); + + const preFilter = op.precheck[0]!.filter as MongoFieldFilter; + expect(preFilter.field).toBe('key._fts'); + }); +}); + +describe('createCollection', () => { + it('produces correct operation structure', () => { + const op = createCollection('users'); + + expect(op.id).toBe('collection.users.create'); + expect(op.label).toBe('Create collection users'); + expect(op.operationClass).toBe('additive'); + }); + + it('includes precheck that collection does not exist', () => { + const op = createCollection('users'); + + expect(op.precheck).toHaveLength(1); + expect(op.precheck[0]!.expect).toBe('notExists'); + expect(op.precheck[0]!.source).toBeInstanceOf(ListCollectionsCommand); + const filter = op.precheck[0]!.filter as MongoFieldFilter; + expect(filter.field).toBe('name'); + expect(filter.value).toBe('users'); + }); + + it('includes execute with CreateCollectionCommand', () => { + const op = createCollection('users'); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd).toBeInstanceOf(CreateCollectionCommand); + expect(cmd.collection).toBe('users'); + }); + + it('passes through validator options', () => { + const op = createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + validationAction: 'error', + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + expect(cmd.validationLevel).toBe('strict'); + expect(cmd.validationAction).toBe('error'); + }); + + it('passes through capped options', () => { + const op = createCollection('logs', { + capped: true, + size: 1000000, + max: 5000, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.capped).toBe(true); + expect(cmd.size).toBe(1000000); + expect(cmd.max).toBe(5000); + }); + + it('passes through timeseries options', () => { + const op = createCollection('metrics', { + timeseries: { timeField: 'timestamp', metaField: 'source', granularity: 'minutes' }, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.timeseries).toEqual({ + timeField: 'timestamp', + metaField: 'source', + granularity: 'minutes', + }); + }); + + it('passes through collation and clusteredIndex', () => { + const op = createCollection('users', { + collation: { locale: 'en', strength: 2 }, + clusteredIndex: { key: { _id: 1 }, unique: true }, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.collation).toEqual({ locale: 'en', strength: 2 }); + expect(cmd.clusteredIndex).toEqual({ key: { _id: 1 }, unique: true }); + }); + + it('passes through changeStreamPreAndPostImages', () => { + const op = createCollection('events', { + changeStreamPreAndPostImages: { enabled: true }, + }); + + const cmd = op.execute[0]!.command as CreateCollectionCommand; + expect(cmd.changeStreamPreAndPostImages).toEqual({ enabled: true }); + }); + + it('has empty postcheck', () => { + const op = createCollection('users'); + expect(op.postcheck).toHaveLength(0); + }); +}); + +describe('dropCollection', () => { + it('produces correct operation structure', () => { + const op = dropCollection('users'); + + expect(op.id).toBe('collection.users.drop'); + expect(op.label).toBe('Drop collection users'); + expect(op.operationClass).toBe('destructive'); + }); + + it('includes execute with DropCollectionCommand', () => { + const op = dropCollection('users'); + + const cmd = op.execute[0]!.command as DropCollectionCommand; + expect(cmd).toBeInstanceOf(DropCollectionCommand); + expect(cmd.collection).toBe('users'); + }); + + it('has empty precheck and postcheck', () => { + const op = dropCollection('users'); + expect(op.precheck).toHaveLength(0); + expect(op.postcheck).toHaveLength(0); + }); +}); + +describe('setValidation', () => { + it('produces correct operation structure', () => { + const op = setValidation('users', { required: ['email'] }); + + expect(op.id).toBe('collection.users.setValidation'); + expect(op.label).toBe('Set validation on users'); + expect(op.operationClass).toBe('destructive'); + }); + + it('wraps schema in $jsonSchema validator', () => { + const schema = { required: ['email'], properties: { email: { bsonType: 'string' } } }; + const op = setValidation('users', schema); + + const cmd = op.execute[0]!.command as CollModCommand; + expect(cmd).toBeInstanceOf(CollModCommand); + expect(cmd.collection).toBe('users'); + expect(cmd.validator).toEqual({ $jsonSchema: schema }); + }); + + it('passes through validationLevel and validationAction', () => { + const op = setValidation( + 'users', + { required: ['email'] }, + { + validationLevel: 'moderate', + validationAction: 'warn', + }, + ); + + const cmd = op.execute[0]!.command as CollModCommand; + expect(cmd.validationLevel).toBe('moderate'); + expect(cmd.validationAction).toBe('warn'); + }); + + it('has empty precheck and postcheck', () => { + const op = setValidation('users', { required: ['email'] }); + expect(op.precheck).toHaveLength(0); + expect(op.postcheck).toHaveLength(0); + }); + + it('round-trips through JSON', () => { + const op = setValidation('users', { required: ['email'] }, { validationLevel: 'strict' }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('collMod'); + expect(json.execute[0].command.validator).toEqual({ + $jsonSchema: { required: ['email'] }, + }); + }); +}); + +describe('validatedCollection', () => { + it('creates collection with schema validation', () => { + const ops = validatedCollection('users', { required: ['email'] }, []); + + expect(ops).toHaveLength(1); + expect(ops[0]!.id).toBe('collection.users.create'); + + const cmd = ops[0]!.execute[0]!.command as CreateCollectionCommand; + expect(cmd.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + expect(cmd.validationLevel).toBe('strict'); + expect(cmd.validationAction).toBe('error'); + }); + + it('includes indexes after collection creation', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }], unique: true }, + { keys: [{ field: 'name', direction: 1 }] }, + ]); + + expect(ops).toHaveLength(3); + expect(ops[0]!.id).toBe('collection.users.create'); + + const idx1 = ops[1]!.execute[0]!.command as CreateIndexCommand; + expect(idx1.collection).toBe('users'); + expect(idx1.unique).toBe(true); + + const idx2 = ops[2]!.execute[0]!.command as CreateIndexCommand; + expect(idx2.collection).toBe('users'); + expect(idx2.unique).toBeUndefined(); + }); + + it('returns flat array of operations', () => { + const ops = validatedCollection('users', { required: ['email'] }, [ + { keys: [{ field: 'email', direction: 1 }] }, + ]); + + expect(Array.isArray(ops)).toBe(true); + expect(ops.every((op) => 'id' in op && 'execute' in op)).toBe(true); + }); +}); + +describe('serialization round-trip', () => { + it('createIndex round-trips through JSON', () => { + const op = createIndex('users', [{ field: 'email', direction: 1 }], { unique: true }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.id).toBe(op.id); + expect(json.label).toBe(op.label); + expect(json.operationClass).toBe(op.operationClass); + expect(json.precheck).toHaveLength(1); + expect(json.execute).toHaveLength(1); + expect(json.postcheck).toHaveLength(1); + expect(json.execute[0].command.kind).toBe('createIndex'); + }); + + it('dropIndex round-trips through JSON', () => { + const op = dropIndex('users', [{ field: 'email', direction: 1 }]); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('dropIndex'); + expect(json.precheck[0].source.kind).toBe('listIndexes'); + }); + + it('createCollection round-trips through JSON', () => { + const op = createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('createCollection'); + expect(json.execute[0].command.validator).toEqual({ $jsonSchema: { required: ['email'] } }); + }); + + it('dropCollection round-trips through JSON', () => { + const op = dropCollection('users'); + const json = JSON.parse(JSON.stringify(op)); + + expect(json.execute[0].command.kind).toBe('dropCollection'); + }); + + it('factory output matches planner-equivalent createIndex structure', () => { + const keys = [{ field: 'email', direction: 1 as const }]; + const factoryOp = createIndex('users', keys, { unique: true }); + + const plannerOp: MongoMigrationPlanOperation = { + id: buildIndexOpId('create', 'users', keys), + label: 'Create index on users (email:1)', + operationClass: 'additive', + precheck: [ + { + description: 'index does not already exist on users', + source: new ListIndexesCommand('users'), + filter: MongoFieldFilter.eq('key', keysToKeySpec(keys)), + expect: 'notExists', + }, + ], + execute: [ + { + description: 'create index on users', + command: new CreateIndexCommand('users', keys, { + unique: true, + name: defaultMongoIndexName(keys), + }), + }, + ], + postcheck: [ + { + description: 'index exists on users', + source: new ListIndexesCommand('users'), + filter: MongoAndExpr.of([ + MongoFieldFilter.eq('key', keysToKeySpec(keys)), + MongoFieldFilter.eq('unique', true), + ]), + expect: 'exists', + }, + ], + }; + + expect(JSON.stringify(factoryOp)).toBe(JSON.stringify(plannerOp)); + }); +}); diff --git a/packages/3-mongo-target/1-mongo-target/tsdown.config.ts b/packages/3-mongo-target/1-mongo-target/tsdown.config.ts index 2d45076b99..467efd5779 100644 --- a/packages/3-mongo-target/1-mongo-target/tsdown.config.ts +++ b/packages/3-mongo-target/1-mongo-target/tsdown.config.ts @@ -1,5 +1,10 @@ import { defineConfig } from '@prisma-next/tsdown'; export default defineConfig({ - entry: ['src/exports/pack.ts', 'src/exports/codec-types.ts', 'src/exports/control.ts'], + entry: [ + 'src/exports/pack.ts', + 'src/exports/codec-types.ts', + 'src/exports/control.ts', + 'src/exports/migration.ts', + ], }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f012e32db0..05446c2404 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -124,6 +124,9 @@ importers: '@prisma-next/mongo-runtime': specifier: workspace:* version: link:../../packages/2-mongo-family/7-runtime + '@prisma-next/target-mongo': + specifier: workspace:* + version: link:../../packages/3-mongo-target/1-mongo-target mongodb: specifier: 'catalog:' version: 6.21.0 @@ -164,6 +167,9 @@ importers: mongodb-memory-server: specifier: 'catalog:' version: 10.4.3 + pathe: + specifier: ^2.0.3 + version: 2.0.3 tsx: specifier: ^4.19.2 version: 4.20.6 @@ -339,6 +345,9 @@ importers: '@prisma-next/mongo-value': specifier: workspace:* version: link:../../packages/2-mongo-family/1-foundation/mongo-value + '@prisma-next/target-mongo': + specifier: workspace:* + version: link:../../packages/3-mongo-target/1-mongo-target '@radix-ui/react-dropdown-menu': specifier: ^2.1.16 version: 2.1.16(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -418,6 +427,9 @@ importers: mongodb-memory-server: specifier: 'catalog:' version: 10.4.3 + pathe: + specifier: ^2.0.3 + version: 2.0.3 tsx: specifier: ^4.19.2 version: 4.20.6 @@ -1392,12 +1404,18 @@ importers: '@prisma-next/framework-components': specifier: workspace:* version: link:../../1-framework/1-core/framework-components + '@prisma-next/migration-tools': + specifier: workspace:* + version: link:../../1-framework/3-tooling/migration '@prisma-next/mongo-contract': specifier: workspace:* version: link:../1-foundation/mongo-contract '@prisma-next/mongo-emitter': specifier: workspace:* version: link:../3-tooling/emitter + '@prisma-next/mongo-query-ast': + specifier: workspace:* + version: link:../4-query/query-ast '@prisma-next/mongo-schema-ir': specifier: workspace:* version: link:../3-tooling/mongo-schema-ir @@ -2335,6 +2353,9 @@ importers: mongodb-memory-server: specifier: 'catalog:' version: 10.4.3 + pathe: + specifier: ^2.0.3 + version: 2.0.3 tsdown: specifier: 'catalog:' version: 0.18.4(typescript@5.9.3) diff --git a/projects/mongo-migration-authoring/assets/migration-authoring-design.md b/projects/mongo-migration-authoring/assets/migration-authoring-design.md new file mode 100644 index 0000000000..ec07fc3c8a --- /dev/null +++ b/projects/mongo-migration-authoring/assets/migration-authoring-design.md @@ -0,0 +1,328 @@ +# Migration Authoring — Design Proposal + +## Overview + +Migrations are authored as TypeScript files. Each migration file exports a class that extends `Migration` and defines a `plan()` method returning a list of operations. The file is self-contained — run it directly with `node migration.ts` to produce `ops.json`, or let the CLI import it. + +This is similar in spirit to Active Record Migrations, where a migration is a Ruby file that subclasses `ActiveRecord::Migration` and expresses schema changes as method calls (`add_column`, `remove_column`, etc.). Our equivalent is a `Migration` subclass whose `plan()` method composes TypeScript factory functions that produce serializable operation objects. + +The design has three layers: + +1. **Operation factories** — atomic primitives (`addColumn`, `setNotNull`, etc.), each producing a single operation +2. **Strategies** — plain functions that compose the primitives into correct multi-step sequences +3. **Migration class** — the file's export, providing the operation list and a self-executing entrypoint + +Everything downstream — the runner, attestation, the migration graph — consumes the same `ops.json` format. + +--- + +## Migration files + +A migration file exports a class and makes itself runnable: + +```typescript +import { Migration, addColumn } from "@prisma-next/target-postgres/migration" + +export default class extends Migration { + plan() { + return [ + addColumn("users", "display_name", { type: "varchar", nullable: true }), + ] + } +} + +Migration.run(import.meta) +``` + +`node migration.ts` calls `plan()`, serializes the result, and writes `ops.json`. `node migration.ts --dry-run` prints the operations without writing. The CLI can also import the file, get the class, and call `plan()` directly — `Migration.run()` detects that it's not the entrypoint and is a no-op. + +The `Migration` base class owns the lifecycle: argument parsing, serialization, output. The author's only job is to return operations from `plan()`. + +## Operation factories + +Each factory function produces a single `SqlMigrationPlanOperation` — a plain object with `id`, `label`, `operationClass`, and `precheck`/`execute`/`postcheck` arrays containing `{ description, sql }` steps. + +```typescript +export default class extends Migration { + plan() { + return [ + addColumn("users", "display_name", { type: "varchar", nullable: true }), + setNotNull("users", "display_name"), + ] + } +} +``` + +The library provides factories for common DDL: `addColumn`, `dropColumn`, `renameColumn`, `addTable`, `dropTable`, `setNotNull`, `dropNotNull`, `setDefault`, `dropDefault`, `addIndex`, `dropIndex`, `addUnique`, `addForeignKey`, `createEnumType`, and so on. + +Each factory returns a plain object. `JSON.stringify()` is the serializer. + +## Strategies are functions + +Some schema changes require multiple operations in a specific order. A strategy is a regular TypeScript function that composes the atomic factories: + +```typescript +function nonNullBackfill(table: string, column: string, backfillExpr: string) { + return [ + addColumn(table, column, { nullable: true }), + dataTransform(`backfill-${table}-${column}`, { + check: (db) => /* ... */, + run: (db) => /* ... */, + }), + setNotNull(table, column), + ] +} +``` + +It takes parameters, calls the primitives, returns `SqlMigrationPlanOperation[]`. The ordering is correct by construction. + +A user writes a migration with it: + +```typescript +export default class extends Migration { + plan() { + return nonNullBackfill("users", "displayName", "'unnamed'") + } +} + +Migration.run(import.meta) +``` + +A `renameColumnSafe` strategy does expand-and-contract: + +```typescript +export default class extends Migration { + plan() { + return renameColumnSafe("users", "name", "full_name") + // Internally produces: addColumn("full_name") → copyData → dropColumn("name") + } +} + +Migration.run(import.meta) +``` + +Users write their own strategies the same way — compose the atomic primitives, return the same operation type. A `columnSplit` strategy, a `typeChange` strategy, a `tableExtraction` strategy — each one encapsulates the correct operation sequence for its scenario and asks the user only for the information gap (how to derive the new values from the old). + +## Data transforms + +A data transform is an operation that modifies data rather than schema. It has a name (its invariant identity for ledger recording and routing), plus a check/run pair: + +```typescript +export default class extends Migration { + plan() { + return [ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + dataTransform("split-user-name", { + check: (db) => db.query("SELECT 1 FROM users WHERE first_name IS NULL LIMIT 1"), + run: (db) => db.query( + "UPDATE users SET first_name = split_part(name, ' ', 1), " + + "last_name = split_part(name, ' ', 2) WHERE first_name IS NULL" + ), + }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ] + } +} + +Migration.run(import.meta) +``` + +The name (`"split-user-name"`) is the invariant. The ledger records it on successful completion; routing can require it via environment refs. Check runs before run (retry safety) and after run (validation). Data transforms serialize to JSON ASTs and appear in the operation chain wherever they need to. + +### A strategy for column splits + +The manual composition above is verbose. A `columnSplit` strategy encapsulates the pattern: + +```typescript +export default class extends Migration { + plan() { + return columnSplit("users", "name", ["first_name", "last_name"], (db) => + db.users.update({ + firstName: expr("split_part(name, ' ', 1)"), + lastName: expr("split_part(name, ' ', 2)"), + }) + ) + } +} + +Migration.run(import.meta) +``` + +`columnSplit` internally produces the same six operations — add columns, backfill via the user's expression, tighten constraints, drop the old column. The ordering is correct by construction. The user provides only the derivation logic. + +## The planner + +The planner's job is scenario detection and strategy selection: + +1. Detect which scenario applies (column added as NOT NULL without default, non-widening type change, etc.) +2. Pick the strategy function +3. Call it with the right arguments + +The planner calls the exact same functions that users call when authoring migrations by hand. It can either call the strategy directly to produce operations, or scaffold a `migration.ts` that calls it — the result is the same. + +Each strategy handles its own ordering internally. Adding support for a new scenario means writing a new strategy function. + +## Typed query builder access mid-chain + +If a data transform appears partway through an operation chain, the user may want typed query builder access against the schema state at that point. This is a hard problem in general — it would require manipulating TypeScript types through an arbitrary preceding operation sequence. + +The practical answer for v1: the user provides an intermediate contract definition. The tools for this already exist — copy the contract authoring surface (PSL or TS builders) into the migration directory, modify it to describe the schema at the point you care about, and emit it: + +``` +migrations/0003-split-name/ +├── migration.ts # the migration itself +├── intermediate.psl # schema at the mid-point (after additive ops) +├── intermediate.json # emitted contract +└── intermediate.d.ts # emitted types +``` + +Then import it: + +```typescript +import type { Contract } from "./intermediate.d" +import intermediateJson from "./intermediate.json" + +export default class extends Migration { + plan() { + return [ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + dataTransform({ + contract: intermediateJson, + check: (db) => db.users.findFirst({ where: { firstName: null } }), + run: (db) => db.users.update({ + firstName: expr("split_part(name, ' ', 1)"), + lastName: expr("split_part(name, ' ', 2)"), + }), + }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ] + } +} + +Migration.run(import.meta) +``` + +Multiple intermediate contracts are supported — one per data transform if a complex migration needs them. + +## Transactions + +The operation chain can carry transaction annotations. A `transaction()` wrapper tells the runner to execute a sequence of operations atomically: + +```typescript +export default class extends Migration { + plan() { + return transaction([ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + dataTransform({ /* ... */ }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ]) + } +} + +Migration.run(import.meta) +``` + +Transaction boundaries are the user's decision — they know whether the table is small enough for a single transaction or whether they need to break it up. + +## Multi-migration deployments + +In production, you'll often want to deploy application updates between migration steps. A column split is conceptually one change, but in a blue-green / rolling deployment it happens over days: + +1. **Deploy migration 1** — add nullable columns +2. **Deploy app** — application dual-writes to old and new columns +3. **Deploy migration 2** — backfill existing rows, tighten constraints, drop old column +4. **Deploy app** — application reads from new columns + +Each step is a separate migration file, a separate edge in the graph, applied at the user's pace. The intermediate state (nullable columns exist, not yet backfilled) is a real deployment state — the application may run against it for hours. + +```typescript +// Migration 1 (deploy first) +export default class extends Migration { + plan() { + return [ + addColumn("users", "first_name", { type: "varchar", nullable: true }), + addColumn("users", "last_name", { type: "varchar", nullable: true }), + ] + } +} + +Migration.run(import.meta) +``` + +```typescript +// Migration 2 (deploy after app update) +export default class extends Migration { + plan() { + return [ + dataTransform("split-user-name", { + check: (db) => db.query("SELECT 1 FROM users WHERE first_name IS NULL LIMIT 1"), + run: (db) => db.query( + "UPDATE users SET first_name = split_part(name, ' ', 1), " + + "last_name = split_part(name, ' ', 2) WHERE first_name IS NULL" + ), + }), + setNotNull("users", "first_name"), + setNotNull("users", "last_name"), + dropColumn("users", "name"), + ] + } +} + +Migration.run(import.meta) +``` + +The user knows their deployment process; they decide the granularity. + +## Serialization model + +Operations are plain objects. `SqlMigrationPlanOperation` is an interface with `id`, `label`, `operationClass`, plus `precheck`/`execute`/`postcheck` arrays containing `{ description, sql }` steps. Factory functions return these directly. `JSON.stringify(operations)` is the serializer. + +The Mongo migration system already follows this pattern — command classes (`CreateIndexCommand`, `DropIndexCommand`, etc.) store everything as public readonly properties, and `JSON.stringify()` produces the on-disk format directly: + +```typescript +function serializeMongoOps(ops) { + return JSON.stringify(ops, null, 2); +} +``` + +The SQL case is the same principle with even simpler data — plain interfaces rather than class instances. + +## Self-contained files + +The migration file is self-contained. Two things happen in every file: + +1. **Declare** — export a `Migration` subclass with a `plan()` method that returns operations +2. **Run** — `Migration.run(import.meta)` makes the file directly executable + +`node migration.ts` produces `ops.json`. The CLI can also import the file, get the class, and call `plan()` — `Migration.run()` detects it's not the entrypoint and is a no-op. The `Migration` base class handles argument parsing (`--dry-run`, `--help`), serialization, and output. + +The file is a pure declaration of what the migration produces, plus one line that makes it runnable. The framework owns everything else. + +--- + +## Summary + +1. **Migration class** — each file exports a `Migration` subclass with a `plan()` method. `Migration.run(import.meta)` makes it directly executable. +2. **Operation factories** — atomic primitives (`addColumn`, `dropColumn`, etc.) that each produce a single `SqlMigrationPlanOperation` +3. **Strategies as functions** — regular TypeScript functions that compose the primitives into correct operation sequences (`nonNullBackfill`, `columnSplit`, `typeChange`). Users write their own the same way. The planner calls the same functions. +4. **Data transforms** — operations in the chain with check/run semantics and an invariant name for ledger/routing +5. **Direct serialization** — operations are plain objects that serialize to JSON via `JSON.stringify` +6. **Intermediate contracts** provided by the user when typed query builder access is needed mid-chain +7. **Transaction annotations** as a composable primitive +8. **Multi-migration deployments** when the user needs app updates between steps + +The planner detects scenarios and calls strategy functions. The strategy encapsulates ordering. The runner consumes `ops.json`. Everything composes through plain functions and plain JSON. + +## Open questions + +1. **What's the minimal strategy set for VP1?** Probably just the manual composition path (raw operations) to prove the model, plus one strategy (`columnSplit` or `nonNullBackfill`) to demonstrate the pattern. +2. **Should the planner ever produce multi-migration sequences automatically?** Or is splitting into multiple migrations always a manual decision? Leaning toward manual — the planner scaffolds a single migration, the user splits when their deployment process requires it. diff --git a/projects/mongo-migration-authoring/plan.md b/projects/mongo-migration-authoring/plan.md new file mode 100644 index 0000000000..96bca2a583 --- /dev/null +++ b/projects/mongo-migration-authoring/plan.md @@ -0,0 +1,129 @@ +# Mongo Migration Authoring — Plan + +## What we're building + +A migration file that looks like this: + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/target-mongo/migration" + +export default class extends Migration { + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta) +``` + +`node migration.ts` produces `ops.json`. The existing `MongoMigrationRunner` consumes it unchanged. + +**Spec:** `projects/mongo-migration-authoring/spec.md` + +## How we get there + +Three milestones, each building on the last: + +1. **Factories** — implement the five operation factory functions (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`). After this milestone, you can call `createIndex(...)` in a test and get a valid `MongoMigrationPlanOperation`. + +2. **Runnable migration files** — implement the `Migration` base class with `plan()` and `Migration.run(import.meta)`. After this milestone, you can write a migration.ts file, run it with `node`, and get `ops.json`. + +3. **Composition and end-to-end validation** — implement a compound strategy function, validate the full pipeline against a real MongoDB instance, and close out the project. + +--- + +## Milestone 1: Factories + +Implement the five factory functions in `packages/3-mongo-target/1-mongo-target`. Each one produces a `MongoMigrationPlanOperation` with the correct command, prechecks, and postchecks — identical to what the planner produces. + +The factories are extracted from the planner's existing inline logic (`planCreateIndex`, `planDropIndex`, etc.) using the same helpers (`buildIndexOpId`, `defaultMongoIndexName`, `keysToKeySpec`, filter expression assembly). + +**Tasks:** + +- [ ] `createIndex(collection, keys, options?)` — test and implement. Verify: output structure, text index handling, JSON serialization, round-trip through `deserializeMongoOps`, comparison against planner output for equivalent operation. +- [ ] `dropIndex(collection, keys)` — test and implement. Same verification pattern. +- [ ] `createCollection(collection, options?)` — test and implement. Cover: basic creation, validator options, capped/timeseries/collation/clusteredIndex options. +- [ ] `dropCollection(collection)` — test and implement. +- [ ] `collMod(collection, options)` — test and implement. Cover: validator update, changeStreamPreAndPostImages update. +- [ ] Create `src/exports/migration.ts` in `packages/3-mongo-target/1-mongo-target`. Configure the `@prisma-next/target-mongo/migration` export path in `package.json` and tsdown config. Export the five factories. + +**Acceptance criteria covered:** + +- Each factory produces correct prechecks/commands/postchecks +- Factory output serializes identically to planner output +- Round-trip: factory → `JSON.stringify` → `deserializeMongoOps` + +## Milestone 2: Runnable migration files + +Implement the `Migration` base class. After this, a `.ts` file with `export default class extends Migration` and `Migration.run(import.meta)` can be run directly to produce `ops.json`. + +**Tasks:** + +- [ ] Implement `Migration` base class in the framework layer (`packages/1-framework`). Abstract `plan()` method, generic over operation type. Static `run(meta: ImportMeta)` handling entrypoint detection, arg parsing, serialization, and file output. +- [ ] Entrypoint detection: check `import.meta.main` (Bun/Deno), fall back to `import.meta.filename` vs `resolve(process.argv[1])` (Node). +- [ ] `--dry-run` flag: print serialized operations to stdout without writing. +- [ ] `--help` flag: print usage information. +- [ ] Create Mongo-specific alias that fixes the type parameter to `MongoMigrationPlanOperation`. Re-export from `@prisma-next/target-mongo/migration` alongside the factory functions. + +**Tests:** + +- [ ] `Migration.run(import.meta)` is a no-op when the file is imported (not run directly) +- [ ] `ops.json` is written when the file is run as entrypoint +- [ ] `--dry-run` prints to stdout, does not write `ops.json` +- [ ] `--help` prints usage +- [ ] Default export class can be instantiated and `plan()` called directly (for CLI and test use) +- [ ] Error handling: non-array return from `plan()` + +**Acceptance criteria covered:** + +- Migration file type-checks and runs with `node migration.ts` +- File produces `ops.json` in its own directory +- `--dry-run` prints operations without writing +- `--help` prints usage +- `Migration.run(import.meta)` is no-op when imported +- Default export can be instantiated and `plan()` called directly + +## Milestone 3: Composition and end-to-end validation + +Implement a compound strategy function to demonstrate that strategies are plain function composition. Run full end-to-end tests against a real MongoDB instance to validate the entire pipeline: author → run → serialize → deserialize → runner. + +**Tasks:** + +- [ ] Implement `validatedCollection(name, schema, indexes)` — composes `createCollection` with validator + `createIndex` for each index. Returns a flat operation list. +- [ ] Export the strategy from `@prisma-next/target-mongo/migration`. +- [ ] End-to-end test with factory functions: write a migration.ts file, run it with `node`, verify `ops.json`, deserialize with `deserializeMongoOps`, execute against MongoDB memory server. +- [ ] End-to-end test with strategy function: same pipeline, using `validatedCollection` in the migration file. + +**Acceptance criteria covered:** + +- Strategy composes multiple factories and returns a flat operation list +- Strategy is a plain exported function +- Round-trip through runner execution (E2E) + +**Close-out:** + +- [ ] Verify all acceptance criteria from the spec are met +- [ ] Migrate any long-lived documentation into `docs/` +- [ ] Strip repo-wide references to `projects/mongo-migration-authoring/` +- [ ] Delete `projects/mongo-migration-authoring/` + +--- + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | TBD | Drives execution | +| Reviewer | TBD | Architectural review — particularly the `Migration` base class design | + +## Open items + +- **Where in the framework should `Migration` live?** It's target-agnostic. Candidates: `@prisma-next/framework-components` (where `MigrationPlanOperation` already lives) or a new package. Decide during M2. +- **Entrypoint detection portability.** Node lacks `import.meta.main`. The fallback needs testing for edge cases (symlinks, path normalization). diff --git a/projects/mongo-migration-authoring/plans/data-migrations-plan.md b/projects/mongo-migration-authoring/plans/data-migrations-plan.md new file mode 100644 index 0000000000..8e1d71afde --- /dev/null +++ b/projects/mongo-migration-authoring/plans/data-migrations-plan.md @@ -0,0 +1,116 @@ +# Mongo Data Migrations — Plan + +## Summary + +Add data transform support to MongoDB migrations. Users author transforms using the existing `mongoRaw` and `mongoPipeline` query builders, which produce `MongoQueryPlan` ASTs from a scaffolded contract. The plans serialize to `ops.json` as JSON (same pattern as DDL commands) and execute at apply time via `MongoAdapter` → `MongoDriver`. No TypeScript runs at apply time. + +**Spec:** `projects/mongo-migration-authoring/specs/data-migrations.spec.md` + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | TBD | Drives execution | +| Reviewer | TBD | Architectural review — serialization model, runner extension | + +## Milestones + +### Milestone 1: DML command serialization + +Extend the serialization layer to handle DML commands (`RawMongoCommand` kinds and typed `AggregateCommand`). After this milestone, a `MongoQueryPlan` containing any supported command kind round-trips through `JSON.stringify` → deserialize → rehydrated AST. + +**Tasks:** + +- [ ] Define arktype schemas for each `RawMongoCommand` kind: `rawUpdateMany`, `rawUpdateOne`, `rawInsertOne`, `rawInsertMany`, `rawDeleteMany`, `rawDeleteOne`, `rawAggregate`, `rawFindOneAndUpdate`, `rawFindOneAndDelete` +- [ ] Define arktype schema for typed `AggregateCommand` (and the pipeline stage subset needed for `check` queries: `$match`, `$limit`, `$sort`, `$project`) +- [ ] Implement `deserializeDmlCommand(json)` — switch on `kind`, validate with arktype, reconstruct the command class instance +- [ ] Implement `deserializeMongoQueryPlan(json)` — deserializes the full `MongoQueryPlan` envelope (collection, command, meta) +- [ ] Tests: round-trip every supported command kind through serialize → deserialize; verify rehydrated AST matches original +- [ ] Tests: error cases — unknown `kind`, missing required fields, invalid field types + +### Milestone 2: `dataTransform` factory and operation type + +Implement the `dataTransform` factory function and define the data transform operation shape in `ops.json`. After this milestone, a migration file can include `dataTransform(...)` calls that produce serializable operations. + +**Tasks:** + +- [ ] Define the data transform operation type — extends `MigrationPlanOperation` with `operationClass: 'data'`, `check` (serialized `MongoQueryPlan` | boolean), `run` (serialized `MongoQueryPlan[]`) +- [ ] Implement `dataTransform(name, { check, run })` factory function in `@prisma-next/target-mongo/migration`. Accepts closures returning `Buildable` or `MongoQueryPlan`. Calls `.build()` on `Buildable` returns. +- [ ] Implement `TODO` sentinel support — a `dataTransform` with `TODO` placeholders prevents attestation +- [ ] Support `check: false` (always run) and `check: true` (always skip) +- [ ] Export `dataTransform` from `@prisma-next/target-mongo/migration` +- [ ] Tests: `dataTransform` produces correct operation shape +- [ ] Tests: `.build()` is called on `Buildable` returns +- [ ] Tests: `TODO` sentinel prevents attestation +- [ ] Tests: `check: false` and `check: true` produce correct serialized output +- [ ] Tests: data transform operations serialize to JSON and deserialize correctly + +### Milestone 3: Runner DML execution + +Extend the migration runner to execute data transform operations via the `MongoAdapter` → `MongoDriver` path. After this milestone, `migration apply` can execute a migration containing data transforms against a real MongoDB instance. + +**Tasks:** + +- [ ] Extend the runner's operation dispatch to recognize `operationClass: 'data'` operations +- [ ] Implement the check → (skip or run) → check again → (fail or proceed) execution sequence +- [ ] Wire DML execution through `MongoAdapter.lower()` → `MongoDriver.execute()` (distinct from the DDL `MongoCommandExecutor` path) +- [ ] Handle `check: false` (always run) and `check: true` (always skip) in the runner +- [ ] Add logging for data transform start/completion/failure with the migration name +- [ ] Tests: runner executes data transform operations in sequence with DDL operations +- [ ] Tests: check → skip when check returns empty result (already applied) +- [ ] Tests: check → run → check again → fail when violations remain +- [ ] Tests: retry safety — re-running a completed data transform skips via check + +### Milestone 4: Contract scaffolding and end-to-end + +Wire contract scaffolding into the migration directory and validate the full pipeline end-to-end against a real MongoDB instance. + +**Tasks:** + +- [ ] Extend migration scaffolding to dump `contract.json` and `contract.d.ts` into the migration directory +- [ ] E2E test: author a migration with DDL + `dataTransform`, verify (serialize), apply (deserialize + execute) against MongoDB +- [ ] E2E test: migration with an intermediate contract — two query builder contexts in the same file +- [ ] E2E test: retry safety — apply a migration that was partially applied, verify check skips completed transforms +- [ ] E2E test: check failure — data transform whose `run` doesn't fix all violations, verify runner fails with diagnostic +- [ ] Verify all acceptance criteria from the spec are met + +**Close-out:** + +- [ ] Verify all acceptance criteria from the spec are met +- [ ] Update project documentation if needed + +## Test Coverage + +| Acceptance Criterion | Test Type | Milestone | Notes | +|---|---|---|---| +| Migration file with `dataTransform` type-checks and verifies | Unit | M2 | Factory produces correct operation shape | +| Closures use module-scoped query builders (no injected params) | Unit | M2 | Verified by API — no `db` parameter | +| Resolver calls `.build()` on `Buildable` returns | Unit | M2 | | +| `TODO` sentinel prevents attestation | Unit | M2 | | +| `check: false` and `check: true` supported | Unit | M2 | | +| `MongoQueryPlan` round-trips through serialize → deserialize | Unit | M1 | All command kinds | +| All `RawMongoCommand` kinds handled | Unit | M1 | 9 command kinds | +| Typed `aggregate` command handled | Unit | M1 | Pipeline stage subset | +| Deserialization validates with arktype | Unit | M1 | | +| Data transform ops appear in `ops.json` | Unit | M2 | Serialization of data transform envelope | +| Runner: check → skip or run → check → fail or proceed | Unit | M3 | | +| DML via `MongoAdapter.lower()` → `MongoDriver.execute()` | Integration | M3 | | +| Retry: check determines whether to skip | Unit | M3 | | +| Check violations after run → migration fails | Unit | M3 | | +| Contract scaffolded into migration directory | Integration | M4 | | +| Intermediate contracts for complex migrations | E2E | M4 | | +| Full round-trip: author → verify → apply | E2E | M4 | Against real MongoDB | +| Mixed DDL + data transform in sequence | E2E | M4 | | +| Intermediate contract with mid-chain queries | E2E | M4 | | + +## Open Items + +1. **Operation type shape in ops.json**: The spec proposes `operationClass: 'data'` as discriminant with `check`/`run` fields instead of `precheck`/`execute`/`postcheck`. This needs to be validated against the framework's `MigrationPlanOperation` base type — it may need to be extended or the data transform may need its own type. Resolve during M2. + +2. **Aggregation pipeline stage deserialization scope**: The pipeline builder produces ~25 stage kinds. For v1, implementing deserialization for the subset needed by `check` queries (`$match`, `$limit`, `$sort`, `$project`) plus common data transform patterns (`$addFields`, `$merge`, `$lookup`) is likely sufficient. Extend as users hit gaps. + +3. **Where the serializer lives**: The existing DDL serializer is in `mongo-ops-serializer.ts` in the adapter package. If the migration-subsystem-refactor spec is implemented first (moving the serializer to `target-mongo`), the DML serializer goes there too. Otherwise, it goes in the adapter alongside the existing serializer for now and moves later. + +4. **Runner architecture**: The runner currently only handles DDL via `MongoCommandExecutor` (visitor pattern). Data transforms need a different execution path (`MongoAdapter` + `MongoDriver`). The runner needs access to both. If the migration-subsystem-refactor spec is done first (runner accepts injected executors), the adapter/driver can be injected alongside the DDL executors. Otherwise, the runner needs to be extended to accept the adapter/driver as additional dependencies. + +5. **Filter expression serialization for typed commands**: The `mongoPipeline` builder produces typed `MongoPipelineStage` and `MongoFilterExpr` objects. The existing `mongo-ops-serializer` already handles `MongoFilterExpr` deserialization (for DDL prechecks/postchecks). Pipeline stages need new deserialization logic, but the pattern is identical. diff --git a/projects/mongo-migration-authoring/spec.md b/projects/mongo-migration-authoring/spec.md new file mode 100644 index 0000000000..f97fb203a7 --- /dev/null +++ b/projects/mongo-migration-authoring/spec.md @@ -0,0 +1,177 @@ +# Summary + +Users can author Mongo migrations by hand in TypeScript. A migration file exports a class, runs as a standalone script, and produces `ops.json` that the existing runner consumes unchanged. + +# Description + +## What a migration file looks like + +```typescript +import { Migration, createIndex, createCollection } + from "@prisma-next/target-mongo/migration" + +export default class extends Migration { + override describe() { + return { + from: 'sha256:abc123...', + to: 'sha256:def456...', + labels: ['add-user-email-index'], + } + } + + override plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + validationLevel: "strict", + }), + createIndex("users", [{ field: "email", direction: 1 }], { unique: true }), + ] + } +} + +Migration.run(import.meta.url) +``` + +`node migration.ts` produces `ops.json` and (if `describe()` is implemented) `migration.json`. The existing `MongoMigrationRunner` consumes `ops.json` unchanged. That's the entire authoring workflow. + +## How it works + +The file has two parts: + +1. **The class** — exports a `Migration` subclass with a `plan()` method that returns an array of `MongoMigrationPlanOperation` objects built by factory functions. Optionally overrides `describe()` to provide migration metadata (origin/destination hashes, labels). +2. **The run line** — `Migration.run(import.meta.url)` makes the file self-executing. When run directly (`node migration.ts`), it calls `plan()`, serializes the result, and writes `ops.json`. If `describe()` returns metadata, it also writes `migration.json`. When imported by the CLI or a test, it's a no-op. + +## Operation factories + +Each factory function produces a single `MongoMigrationPlanOperation` — a plain object containing the operation's identity, its DDL command, and its pre/postchecks: + +- `createIndex(collection, keys, options?)` — adds an index with a precheck that it doesn't already exist +- `dropIndex(collection, keys)` — removes an index with a precheck that it exists +- `createCollection(collection, options?)` — creates a collection with optional validator, collation, capped settings, etc. +- `dropCollection(collection)` — drops a collection +- `collMod(collection, options, overrides?)` — modifies collection options (validator, changeStreamPreAndPostImages, etc.). Accepts an optional `overrides` parameter with `operationClass` to classify the operation as `widening` or `destructive` (defaults to `destructive`). + +The factories produce the same output as the existing `MongoMigrationPlanner`. The runner cannot distinguish between planner-generated and hand-authored operations. + +## Composing operations into strategies + +A strategy is a plain function that composes the atomic factories: + +```typescript +function validatedCollection( + name: string, + schema: Record, + indexes: Array<{ keys: MongoIndexKey[]; unique?: boolean }>, +) { + return [ + createCollection(name, { + validator: { $jsonSchema: schema }, + validationLevel: "strict", + validationAction: "error", + }), + ...indexes.map(idx => createIndex(name, idx.keys, { unique: idx.unique })), + ] +} +``` + +Used in a migration: + +```typescript +export default class extends Migration { + plan() { + return validatedCollection("users", + { required: ["email", "name"] }, + [{ keys: [{ field: "email", direction: 1 }], unique: true }], + ) + } +} +``` + +Strategies are regular functions. Users write their own the same way — compose factories, return operations. The planner could call the same functions to produce its output (though refactoring the planner is out of scope here). + +## Serialization + +The Mongo command classes (`CreateIndexCommand`, `DropIndexCommand`, etc.) store all data as public readonly properties. `JSON.stringify()` serializes them directly — the existing `serializeMongoOps` is literally `JSON.stringify(ops, null, 2)`. The factory functions produce the same command class instances the planner does, so serialization works identically. + +## Why Mongo + +Mongo is a good starting point for this pattern because: + +- The operation set is small and well-defined (5 DDL commands) +- The command classes already serialize via `JSON.stringify` +- The planner already produces `MongoMigrationPlanOperation[]` directly — the architecture is already aligned +- It's a self-contained family, so this work doesn't touch other targets + +The pattern is designed to generalize to SQL migrations, where factory functions like `addColumn`, `setNotNull`, etc. would produce `SqlMigrationPlanOperation` objects the same way. See [the design proposal](assets/migration-authoring-design.md) for the full cross-target vision. + +# Requirements + +## Functional Requirements + +- Factory functions for each Mongo DDL operation (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`) that produce `MongoMigrationPlanOperation` objects with correct prechecks, commands, and postchecks. Factory functions and planner are co-located in `packages/3-mongo-target/1-mongo-target`, exported from `@prisma-next/target-mongo/migration`. +- A `Migration` base class with: + - An abstract `plan()` method returning `MongoMigrationPlanOperation[]` + - An optional `describe()` method returning `MigrationMeta` (origin/destination hashes, kind, labels) for `migration.json` generation + - A static `Migration.run(import.meta.url)` method that handles self-execution (entrypoint detection, serialization, file writing) + - `--dry-run` flag support (print operations without writing) + - `--help` flag support +- At least one compound strategy function demonstrating composition of multiple factories +- Factory output that serializes identically to planner output — the runner consumes both without distinction + +## Non-Functional Requirements + +- The `Migration` base class interface is target-agnostic (so the SQL target can provide its own version later). **Assumption:** a generic `Migration` base in the framework, with a Mongo-specific alias that fixes the type parameter. +- No changes to the existing `MongoMigrationPlanner` or `MongoMigrationRunner` + +## Non-goals + +- Rewriting the Mongo planner to use factory functions internally — the planner works; refactoring it is separate +- Data transform support for Mongo migrations +- CLI integration (`prisma migration new/plan/verify` for Mongo) — future work +- Scaffolding tooling (auto-generating `migration.ts` from planner output) — future work +- Transaction support for Mongo migrations + +# Acceptance Criteria + +## Authoring a migration + +- [ ] A migration file with `export default class extends Migration` and factory function calls in `plan()` type-checks and runs with `node migration.ts` +- [ ] The file produces `ops.json` in its own directory +- [ ] Running with `--dry-run` prints operations to stdout without writing `ops.json` +- [ ] Running with `--help` prints usage information + +## Importing a migration + +- [ ] When imported (not run directly), `Migration.run(import.meta.url)` is a no-op +- [ ] The default export class can be instantiated and `plan()` called directly (for CLI and test use) + +## Operation correctness + +- [ ] Each factory (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`) produces a `MongoMigrationPlanOperation` with the correct prechecks, commands, and postchecks +- [ ] Factory output serializes identically to planner output for the same operation (verified by test comparing JSON output) +- [ ] Round-trip works: factory → `JSON.stringify` → `deserializeMongoOps` → runner execution + +## Composition + +- [ ] At least one compound strategy function composes multiple factories and returns a flat operation list +- [ ] The strategy is a plain exported function — users compose operations the same way + +# References + +- [Migration Authoring Design Proposal](assets/migration-authoring-design.md) +- Existing Mongo migration system: + - `packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts` — planner + - `packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts` — serializer/deserializer + - `packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts` — runner +- Mongo query AST (command classes, filter expressions): + - `packages/2-mongo-family/4-query/query-ast/src/ddl-commands.ts` + - `packages/2-mongo-family/4-query/query-ast/src/inspection-commands.ts` + - `packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts` + - `packages/2-mongo-family/4-query/query-ast/src/migration-operation-types.ts` + +# Decisions + +1. **Factory functions and planner co-located in `packages/3-mongo-target/1-mongo-target`.** Exported from `@prisma-next/target-mongo/migration`. Users import everything from one place: `import { Migration, createIndex } from "@prisma-next/target-mongo/migration"`. + +2. **Factory signatures are an implementation detail.** The only constraint is that the planner can depend on the factory functions it uses. Since both live in the same package, the signatures can evolve freely. diff --git a/projects/mongo-migration-authoring/specs/data-migrations.spec.md b/projects/mongo-migration-authoring/specs/data-migrations.spec.md new file mode 100644 index 0000000000..2547c56082 --- /dev/null +++ b/projects/mongo-migration-authoring/specs/data-migrations.spec.md @@ -0,0 +1,206 @@ +# Summary + +Users can express data transformations in MongoDB migrations — backfilling fields, reshaping documents, fixing constraint violations — alongside structural operations in the same migration file. + +# Description + +## The problem + +MongoDB migrations today can only express structural DDL: create/drop collections, create/drop indexes, set validation rules. But schema evolution often requires changing data too. Adding a required `status` field to a `users` collection means you need to backfill `"active"` into every existing document before you can enforce the validator. Today, there's no way to express that backfill as part of the migration. + +## What it looks like + +A data transform is an operation in the migration's operation chain, alongside structural operations: + +```typescript +// migrations/0002_backfill-status/migration.ts +import type { Contract } from './contract.d' +import contractJson from './contract.json' with { type: 'json' } +import { Migration, createCollection, setValidation, dataTransform } + from '@prisma-next/target-mongo/migration' +import { mongoRaw } from '@prisma-next/mongo-orm' +import { mongoPipeline } from '@prisma-next/mongo-pipeline-builder' + +const raw = mongoRaw({ contract: contractJson as Contract }) +const agg = mongoPipeline({ contractJson }) + +export default class extends Migration { + plan() { + return [ + createCollection("users", { + validator: { $jsonSchema: { required: ["email"] } }, + }), + + dataTransform("backfill-status", { + check: () => agg.from('users') + .match((f) => f.status.exists(false)) + .limit(1), + run: () => raw.collection('users') + .updateMany({ status: { $exists: false } }, { $set: { status: "active" } }), + }), + + setValidation("users", { + $jsonSchema: { required: ["email", "status"] }, + }), + ] + } +} + +Migration.run(import.meta.url, exports.default) +``` + +The ordering matters: create the collection, backfill the data, *then* tighten the validator. The `dataTransform` sits between structural operations exactly where the data needs to be in the right shape. + +## How authoring works + +The query builders (`mongoRaw`, `mongoPipeline`) are the existing tools for building MongoDB queries. They take a contract and produce `MongoQueryPlan` objects — static command descriptions, no database connection required. + +The user constructs these builders at the top of the migration file from the scaffolded contract. The `check` and `run` closures use them to describe what the migration should do: + +- **`check`** describes a query for "violation" documents — rows that still need the transform. If the result is empty, the transform has already been applied. This gives retry safety: if a migration fails partway through and is re-run, completed transforms are skipped. +- **`run`** describes the actual data modification — an `updateMany`, `insertMany`, `deleteMany`, or aggregation pipeline. + +`check` also runs *after* `run` to verify the transform worked. If violations remain, the migration fails with a diagnostic *before* the subsequent `setValidation` would produce a cryptic database error. + +`check` also accepts `false` (always run — for idempotent-by-construction cases) or `true` (always skip). + +## How serialization works + +This is the key constraint: **no TypeScript runs at apply time**. The migration file is evaluated once during `migration verify`, and the resulting command descriptions are written to `ops.json` as JSON. At `migration apply`, only the JSON is loaded and executed. + +This works naturally for MongoDB because MongoDB commands *are* JSON. The query builders produce AST objects (`UpdateManyCommand`, `AggregateCommand`, etc.) that are `MongoAstNode` subclasses. These serialize directly via `JSON.stringify` — every node has a `kind` discriminant and public readonly properties. Deserialization reconstructs the class instances from the `kind` field, validated by arktype schemas. This is exactly the same mechanism already used for DDL commands (`CreateIndexCommand`, `CollModCommand`, etc.) in the existing migration serializer. + +The lifecycle: + +1. **Author**: User writes `migration.ts` with structural ops and data transforms. +2. **Verify**: `migration verify` evaluates the TypeScript, calls `.build()` on the query chain objects to produce `MongoQueryPlan` ASTs, and writes them to `ops.json`. +3. **Apply**: `migration apply` reads `ops.json`, deserializes the command ASTs, and executes them. DDL commands go through `MongoCommandExecutor` (existing path). DML commands go through `MongoAdapter.lower()` → `MongoDriver.execute()` (the existing runtime query execution path). + +## Contract in the migration folder + +When a migration is scaffolded, `contract.json` and `contract.d.ts` are copied into the migration directory. This gives the query builders their type information and makes the migration self-contained — it doesn't break if the source schema evolves after the migration is written. + +For complex migrations that need queries typed against an intermediate schema state (e.g., after adding a nullable field but before tightening to NOT NULL), the user copies their schema authoring surface into the migration folder, modifies it, and runs `contract emit` to produce a second contract: + +``` +migrations/0003_split-name/ +├── migration.ts +├── contract.json # destination contract (scaffolded) +├── contract.d.ts +├── intermediate.prisma # intermediate schema (user-authored) +├── intermediate.json # emitted from intermediate.prisma +└── intermediate.d.ts +``` + +The user creates a second set of query builders from the intermediate contract and uses them for the data transform that operates against that schema state. Multiple intermediate contracts are supported. + +# Decisions + +1. **Use existing query builders, not a new abstraction.** `mongoRaw` and `mongoPipeline` already produce `MongoQueryPlan` objects from a contract. The `dataTransform` factory consumes these — no migration-specific query API is needed. A future strongly typed query builder (validating field names and operators against the contract) will slot in transparently because it produces the same `MongoQueryPlan` output. + +2. **Module-scoped builders, not injected callbacks.** The Postgres `dataTransform` injects a `Db` client into callbacks because the SQL query builder needs a runtime execution context. The Mongo query builders are fully static — they need only a contract. So the user constructs them at module scope and the closures capture them via closure. Simpler, and no resolver infrastructure needed. + +3. **Same serialization pattern as DDL.** DML commands (`updateMany`, `aggregate`, etc.) serialize and deserialize using the same `kind`-based rehydration mechanism as DDL commands (`createIndex`, `collMod`, etc.). The existing `mongo-ops-serializer` is extended with DML command kinds. No separate serialization mechanism. + +4. **DML execution via `MongoAdapter` + `MongoDriver`.** Data transform commands execute through the existing runtime query path, not through `MongoCommandExecutor` (which handles DDL only). This reuses proven infrastructure. + +5. **Contract co-located with the migration.** The contract is scaffolded into the migration directory rather than referenced by path. Makes migrations self-contained and resilient to schema evolution after authoring. + +# Requirements + +## Functional Requirements + +- A `dataTransform(name, { check, run })` factory that produces a data transform migration operation. `check` and `run` are closures returning `Buildable` or `MongoQueryPlan` objects. The resolver calls `.build()` on `Buildable` returns. +- DML command serialization: all `RawMongoCommand` kinds and typed `AggregateCommand` serialize via `JSON.stringify` and deserialize via `kind`-based rehydration with arktype validation, following the existing DDL pattern. +- The migration runner executes data transform operations with the check → (skip or run) → check → (fail or proceed) sequence. +- `check` supports three modes: a closure returning a query (empty result = done), `false` (always run), `true` (always skip). +- A `TODO` sentinel in `dataTransform` prevents attestation. +- Migration scaffolding copies `contract.json` and `contract.d.ts` into the migration directory. + +## Non-Functional Requirements + +- No TypeScript is executed at apply time. +- DML serialization is consistent with the existing DDL pattern — same module, same dispatch mechanism. +- No changes to existing DDL factories, DDL runner path, or `MongoCommandExecutor`. + +## Non-goals + +- **Strongly typed Mongo query builder.** `mongoRaw` has untyped filter/update documents; `mongoPipeline` has richer typing for aggregations. A fully typed builder validating field names against the contract is future work and will plug in transparently. +- **Planner integration.** Auto-detecting data migration needs from contract diffs and scaffolding `dataTransform` with TODO placeholders. For v1, data transforms are manually authored. +- **Transaction/session support.** MongoDB multi-document transactions are orthogonal and can be layered on. +- **Graph integration.** Invariant tracking, invariant-aware routing, and ledger recording of data migration names are deferred (same scope as the Postgres graph integration work). + +# Acceptance Criteria + +## Authoring + +- [ ] A migration file with `dataTransform` using `mongoRaw` for `run` and `mongoPipeline` for `check` type-checks and can be verified +- [ ] The resolver calls `.build()` on `Buildable` returns from `check`/`run` closures +- [ ] A `TODO` sentinel in `dataTransform` prevents attestation +- [ ] `check: false` (always run) and `check: true` (always skip) are supported + +## Serialization + +- [ ] `MongoQueryPlan` command ASTs round-trip through `JSON.stringify` → `kind`-based deserialization +- [ ] All `RawMongoCommand` kinds are handled: `rawUpdateMany`, `rawUpdateOne`, `rawInsertOne`, `rawInsertMany`, `rawDeleteMany`, `rawDeleteOne`, `rawAggregate`, `rawFindOneAndUpdate`, `rawFindOneAndDelete` +- [ ] Typed `aggregate` command (from `mongoPipeline`) is handled +- [ ] Deserialization validates each command shape with arktype schemas + +## Execution + +- [ ] The runner executes data transform operations: check → (skip or run) → check again → (fail or proceed) +- [ ] DML commands execute via `MongoAdapter.lower()` → `MongoDriver.execute()` +- [ ] On retry, `check` determines whether to skip the data transform's `run` +- [ ] If `check` returns violations after `run`, the migration fails with a diagnostic + +## End-to-end + +- [ ] A data transform migration round-trips: author → verify → apply against a real MongoDB instance +- [ ] A migration with both DDL operations and a data transform executes correctly in sequence +- [ ] Migration scaffolding produces `contract.json` and `contract.d.ts` in the migration directory + +# Other Considerations + +## Security + +No change from existing model. No TypeScript is executed at apply time. Data migration commands run with the same database permissions as the migration runner. + +## Observability + +The runner logs data transform start/completion/failure with the migration name. + +# Alternatives considered + +## Callback injection (Postgres pattern) + +The Postgres `dataTransform` injects a typed `Db` client into `check`/`run` callbacks. This is necessary for SQL because the query builder needs a runtime execution context (contract + query operation types + adapter) to construct queries. For MongoDB, the query builders are fully static — they need only a contract — so injection adds complexity without benefit. + +## Migration-specific query builders (`createMongoBuilders`) + +A `createMongoBuilders()` helper (analogous to Postgres's `createBuilders()`) that returns data-transform-specific builder functions. Rejected because it restricts what operations the user can express and duplicates the existing query builder API surface. Using the general-purpose query builders directly is simpler and more flexible. + +## Direct `MongoQueryPlan` construction (no closures) + +Since the query builders are static, `check`/`run` could accept `MongoQueryPlan` objects directly instead of closures. Closures are marginally better because they defer `.build()` to the resolver (consistent with the Postgres pattern) and allow the resolver to call `.build()` automatically rather than requiring the user to write it. + +# References + +- Parent project spec: [`projects/mongo-migration-authoring/spec.md`](../spec.md) +- Cross-target data migrations spec: [`projects/graph-based-migrations/specs/data-migrations-spec.md`](../../graph-based-migrations/specs/data-migrations-spec.md) +- Existing Mongo DDL factories: [`packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts`](../../../packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts) +- DDL serializer: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts`](../../../packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts) +- DML command AST: [`packages/2-mongo-family/4-query/query-ast/src/commands.ts`](../../../packages/2-mongo-family/4-query/query-ast/src/commands.ts) +- Raw command AST: [`packages/2-mongo-family/4-query/query-ast/src/raw-commands.ts`](../../../packages/2-mongo-family/4-query/query-ast/src/raw-commands.ts) +- `mongoRaw`: [`packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts`](../../../packages/2-mongo-family/5-query-builders/orm/src/mongo-raw.ts) +- `mongoPipeline`: [`packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts`](../../../packages/2-mongo-family/5-query-builders/pipeline-builder/src/pipeline.ts) +- Postgres data transform: [`packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts`](../../../packages/3-targets/3-targets/postgres/src/core/migrations/operation-descriptors.ts) +- ADR 188 — MongoDB migration operation model +- ADR 191 — Generic three-phase migration operation envelope + +# Open Questions + +1. **Operation type shape in ops.json.** Data transform operations don't fit the existing `MongoMigrationPlanOperation` shape (which has `precheck`/`execute`/`postcheck` containing DDL commands). **Default assumption:** use `operationClass: 'data'` as discriminant, with `check`/`run` fields instead of `precheck`/`execute`/`postcheck`. + +2. **Where does `dataTransform` live?** **Default assumption:** in `@prisma-next/target-mongo/migration` alongside the DDL factories, since it produces an operation consumed by the same runner and serialized to the same `ops.json`. + +3. **Aggregation pipeline stage deserialization scope.** The typed `MongoPipelineStage` classes have ~25 `kind` values. **Default assumption:** implement the subset needed for `check` queries (`$match`, `$limit`, `$sort`, `$project`) and common transform patterns (`$addFields`, `$lookup`, `$merge`); extend as needed. diff --git a/projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md b/projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md new file mode 100644 index 0000000000..bae6fdc3e3 --- /dev/null +++ b/projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md @@ -0,0 +1,138 @@ +# Summary + +Move the Mongo migration subsystem (planner, runner, serializer, and supporting modules) from `@prisma-next/adapter-mongo` to `@prisma-next/target-mongo`, and refactor the runner to accept abstract visitor interfaces instead of depending directly on the `mongodb` driver. This corrects the layering: the planner, runner, and serializer are migration-plane concerns that belong in the target package, not the adapter. + +# Description + +The Mongo migration subsystem currently lives in `packages/3-mongo-target/2-mongo-adapter/src/core/`. Most of these modules have no dependency on the `mongodb` driver — they operate purely on AST types from `@prisma-next/mongo-query-ast` and schema IR from `@prisma-next/mongo-schema-ir`. They were placed in the adapter historically, but they belong in the target layer. + +The one module that does touch `mongodb` is the runner (`mongo-runner.ts`), but only through two concrete executor classes (`MongoCommandExecutor`, `MongoInspectionExecutor`). These implement visitor interfaces (`MongoDdlCommandVisitor`, `MongoInspectionCommandVisitor`) already defined in the family layer (`@prisma-next/mongo-query-ast`). The runner itself only calls `command.accept(executor)` — it never uses `Db` directly for DDL execution. By accepting the visitor interfaces as injected dependencies, the runner can move to the target package while the concrete executor implementations stay in the adapter. + +This follows the same adapter/driver pattern used for query execution: the orchestrator operates on abstract interfaces, and the adapter provides the concrete backing. + +## Modules to move + +From `packages/3-mongo-target/2-mongo-adapter/src/core/` to `packages/3-mongo-target/1-mongo-target/src/core/`: + +| Module | Rationale | +|---|---| +| `mongo-planner.ts` | Pure diffing logic; depends on query-ast and schema-ir, not `mongodb` | +| `mongo-ops-serializer.ts` | Serialization/deserialization of AST; depends on query-ast and arktype, not `mongodb` | +| `contract-to-schema.ts` | Contract-to-schema-IR conversion; depends on mongo-contract and schema-ir, not `mongodb` | +| `ddl-formatter.ts` | Human-readable DDL formatting via visitor pattern; no `mongodb` dependency | +| `filter-evaluator.ts` | Pure filter evaluation logic; depends on query-ast filter types, not `mongodb` | + +## Modules to refactor + +| Module | Change | +|---|---| +| `mongo-runner.ts` | Refactor to accept `MongoDdlCommandVisitor>` and `MongoInspectionCommandVisitor>` as injected dependencies instead of constructing them from `Db`. Moves to target. | + +## Modules that stay in adapter + +| Module | Rationale | +|---|---| +| `command-executor.ts` | Concrete `MongoDdlCommandVisitor` and `MongoInspectionCommandVisitor` implementations that use `mongodb` driver's `Db` type | +| `mongo-control-driver.ts` | Creates/manages `mongodb` connection, exposes `Db` | +| `introspect-schema.ts` | Directly queries `mongodb` to build schema IR from live database | + +## Wiring changes + +The `mongoTargetDescriptor` in `packages/2-mongo-family/9-family/src/core/mongo-target-descriptor.ts` currently imports `MongoMigrationPlanner`, `MongoMigrationRunner`, and `contractToMongoSchemaIR` from `@prisma-next/adapter-mongo/control`. After the move, these imports come from `@prisma-next/target-mongo/control`. + +The `createRunner` factory on the target descriptor will need to wire the concrete executor implementations from the adapter into the runner. This can be achieved by either: +- Having the family instance (passed to `createRunner`) provide the executors, or +- Passing the adapter's executor factory to the target descriptor at composition time + +**Assumption:** The simplest approach is to have `createRunner` accept the family instance (which already has access to the driver) and construct the concrete executors there. The runner's `execute` method signature changes to accept executor instances rather than extracting `Db` internally. + +# Requirements + +## Functional Requirements + +- All six modules listed above move from `adapter-mongo` to `target-mongo`, with their corresponding test files +- The runner's `execute` method accepts abstract visitor interfaces (`MongoDdlCommandVisitor>` and `MongoInspectionCommandVisitor>`) rather than constructing them internally from a `Db` handle +- `@prisma-next/adapter-mongo/control` re-exports the moved symbols for backward compatibility during the transition (the adapter already re-exports `target-mongo/control` symbols like `initMarker`, `readMarker`, etc.) +- `@prisma-next/target-mongo/control` exports the planner, runner, serializer, contract-to-schema converter, DDL formatter, and filter evaluator +- The `mongoTargetDescriptor` in `9-family` imports planner, runner, and `contractToMongoSchemaIR` from `@prisma-next/target-mongo/control` instead of `@prisma-next/adapter-mongo/control` +- All existing tests pass without behavioral changes +- New dependencies added to `@prisma-next/target-mongo`: `@prisma-next/mongo-contract`, `@prisma-next/mongo-schema-ir`, `@prisma-next/utils`, `arktype` + +## Non-Functional Requirements + +- No behavioral changes — this is a pure structural refactoring +- Package layering validation (`pnpm lint:deps`) passes after the move +- The adapter's `mongodb` dependency does not leak into the target package + +## Non-goals + +- Refactoring the planner's internal logic (that's spec 2) +- Changing the `MigrationRunner` framework interface +- Moving `introspect-schema.ts` to the target (it genuinely needs the driver) +- Removing the backward-compat re-exports from `adapter-mongo/control` in this change + +# Acceptance Criteria + +## Module relocation + +- [ ] `mongo-planner.ts`, `mongo-ops-serializer.ts`, `contract-to-schema.ts`, `ddl-formatter.ts`, `filter-evaluator.ts` live in `packages/3-mongo-target/1-mongo-target/src/core/` +- [ ] `mongo-runner.ts` lives in `packages/3-mongo-target/1-mongo-target/src/core/` +- [ ] Their test files move to `packages/3-mongo-target/1-mongo-target/test/` +- [ ] `@prisma-next/target-mongo/control` exports all moved symbols + +## Runner abstraction + +- [ ] `MongoMigrationRunner.execute()` accepts `MongoDdlCommandVisitor>` and `MongoInspectionCommandVisitor>` as parameters (or via constructor injection) +- [ ] The runner has no `import ... from 'mongodb'` statement +- [ ] `MongoCommandExecutor` and `MongoInspectionExecutor` remain in `adapter-mongo` and are wired into the runner at composition time + +## Backward compatibility + +- [ ] `@prisma-next/adapter-mongo/control` re-exports all moved symbols so existing consumers are not broken +- [ ] `mongoTargetDescriptor` in `9-family` imports from `@prisma-next/target-mongo/control` + +## Validation + +- [ ] All existing tests pass (`pnpm test:packages`) +- [ ] Package layering passes (`pnpm lint:deps`) +- [ ] `@prisma-next/target-mongo` does not depend on `mongodb` for the moved modules (the existing marker-ledger dependency is acceptable) +- [ ] E2E and integration tests pass (`pnpm test:e2e`, `pnpm test:integration`) + +# Other Considerations + +## Security + +Not applicable — pure internal refactoring, no new public API surface. + +## Cost + +No runtime cost impact. Build/CI times unchanged. + +## Observability + +Not applicable. + +## Data Protection + +Not applicable. + +## Analytics + +Not applicable. + +# References + +- Current planner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts) +- Current runner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-runner.ts) +- Current serializer: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-ops-serializer.ts) +- Visitor interfaces (family layer): [`packages/2-mongo-family/4-query/query-ast/src/ddl-visitors.ts`](packages/2-mongo-family/4-query/query-ast/src/ddl-visitors.ts) +- Concrete executors (stay in adapter): [`packages/3-mongo-target/2-mongo-adapter/src/core/command-executor.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/command-executor.ts) +- Target descriptor (consumer): [`packages/2-mongo-family/9-family/src/core/mongo-target-descriptor.ts`](packages/2-mongo-family/9-family/src/core/mongo-target-descriptor.ts) +- Framework `MigrationRunner` interface: [`packages/1-framework/1-core/framework-components/src/control-migration-types.ts`](packages/1-framework/1-core/framework-components/src/control-migration-types.ts) +- Parent project spec: [`projects/mongo-migration-authoring/spec.md`](../spec.md) + +# Open Questions + +1. **Runner dependency injection style**: Should the runner accept executors via constructor injection (set once, reused across `execute` calls) or as parameters to each `execute` call? Constructor injection is simpler if the runner is created once per session; parameter injection is more flexible. **Default assumption:** Constructor injection, since `createRunner` already creates a fresh instance per session. + +2. **`target-mongo` already depends on `mongodb`** (for marker-ledger operations which use `Db`). Should the marker-ledger also be refactored to accept an abstract interface, or is that a separate concern? **Default assumption:** Out of scope; the marker operations are small and isolated, and refactoring them can happen independently. diff --git a/projects/mongo-migration-authoring/specs/planner-dual-output.spec.md b/projects/mongo-migration-authoring/specs/planner-dual-output.spec.md new file mode 100644 index 0000000000..f2cd4715cd --- /dev/null +++ b/projects/mongo-migration-authoring/specs/planner-dual-output.spec.md @@ -0,0 +1,146 @@ +# Summary + +Refactor the `MongoMigrationPlanner` to produce an intermediate `OpFactoryCall[]` representation instead of constructing `MongoMigrationPlanOperation[]` directly. Add two renderers: one that materializes `OpFactoryCall[]` into `MongoMigrationPlanOperation[]` (preserving current behavior), and one that renders `OpFactoryCall[]` into TypeScript migration files that call the existing factory functions. + +# Description + +Today the `MongoMigrationPlanner` has inline `planCreateIndex`, `planDropIndex`, `planCreateCollection`, `planDropCollection`, `planValidatorDiff`, and `planMutableOptionsDiff` helper functions that directly construct `MongoMigrationPlanOperation` objects with AST command classes, filter expressions, and check structures. The same logic is duplicated in the hand-authored migration factory functions (`createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod`). + +The goal is to make the planner produce a higher-level representation — an array of `OpFactoryCall` objects — that describes *which factory to call with which arguments*, rather than the fully-expanded operation. Two renderers then interpret this representation: + +1. **Operation renderer** (`renderOps`): Calls the factory functions to produce `MongoMigrationPlanOperation[]`. This is the existing behavior, preserved for `db init`, `db update`, and any other path that needs raw operations. + +2. **TypeScript renderer** (`renderTypeScript`): Generates a complete TypeScript migration file (`migration.ts`) that imports the factory functions and calls them in `plan()`. This enables `prisma migration plan` to produce editable migration files that users can modify before applying. + +## `OpFactoryCall` type + +A discriminated union where each variant corresponds to one factory function: + +```typescript +type OpFactoryCall = + | { readonly factory: 'createIndex'; readonly collection: string; readonly keys: ReadonlyArray; readonly options?: CreateIndexOptions } + | { readonly factory: 'dropIndex'; readonly collection: string; readonly keys: ReadonlyArray } + | { readonly factory: 'createCollection'; readonly collection: string; readonly options?: CreateCollectionOptions } + | { readonly factory: 'dropCollection'; readonly collection: string } + | { readonly factory: 'collMod'; readonly collection: string; readonly options: CollModOptions }; +``` + +Each variant captures exactly the arguments of the corresponding factory function. The planner produces these instead of fully-expanded operations. + +## Factory alignment + +The existing planner helper functions and the hand-authored factory functions have slightly different signatures and behaviors in some cases (e.g., the planner's `planCreateCollection` maps `MongoSchemaCollectionOptions` to `CreateCollectionOptions`, while the factory takes `CreateCollectionOptions` directly). As part of this work, the factory signatures are aligned so the planner can produce `OpFactoryCall` values that map 1:1 to factory calls. Since the factories were just created for the migration authoring project, their signatures can be adjusted freely. + +The planner's `planValidatorDiff` function currently produces `collMod` operations directly. After alignment, this maps to `OpFactoryCall` with `factory: 'collMod'`, and the operation-class classification logic (`classifyValidatorUpdate`) moves to a helper that the operation renderer calls (since `OpFactoryCall` doesn't carry `operationClass`). + +**Assumption:** `operationClass` is derived by the operation renderer, not stored in `OpFactoryCall`. The renderer has enough context (factory type + arguments) to determine the class. + +## Conflict detection stays in the planner + +The planner's conflict detection logic (immutable option changes, policy violations) operates on the schema diff, not on the generated operations. This stays in the planner and runs before `OpFactoryCall[]` generation. + +# Requirements + +## Functional Requirements + +- `OpFactoryCall` discriminated union type defined in `@prisma-next/target-mongo/control`, covering all five factory functions +- `MongoMigrationPlanner.plan()` internally produces `OpFactoryCall[]` and passes it through the operation renderer to return `MigrationPlannerResult` (preserving the existing interface) +- A new `MongoMigrationPlanner.planCalls()` method (or similar) returns the raw `OpFactoryCall[]` for consumers that need the intermediate representation +- An operation renderer function (`renderOps(calls: OpFactoryCall[]): MongoMigrationPlanOperation[]`) that calls the factory functions to produce operations +- A TypeScript renderer function (`renderTypeScript(calls: OpFactoryCall[], meta?: MigrationMeta): string`) that produces a complete, runnable migration file +- Factory function signatures in `migration-factories.ts` are aligned with `OpFactoryCall` argument shapes so the mapping is 1:1 +- The operation renderer assigns `operationClass` based on factory type and arguments (same classification logic the planner uses today) +- The TypeScript renderer generates valid TypeScript that imports from `@prisma-next/target-mongo/migration` and calls the factory functions + +## Non-Functional Requirements + +- The `plan()` method's external behavior is unchanged — consumers (CLI, target descriptor, tests) see the same `MigrationPlannerResult` +- The TypeScript renderer produces readable, idiomatic code (proper formatting, minimal boilerplate) +- The `OpFactoryCall` type is serializable (no class instances, only plain data) + +## Non-goals + +- CLI integration for `prisma migration plan --emit-ts` (future work — the renderer is the building block) +- Automatic migration file scaffolding from the CLI +- Supporting SQL targets with this specific `OpFactoryCall` type (each target will have its own factory call type) +- Data transform operations in the generated TypeScript + +# Acceptance Criteria + +## OpFactoryCall type + +- [ ] `OpFactoryCall` is a discriminated union with variants for `createIndex`, `dropIndex`, `createCollection`, `dropCollection`, `collMod` +- [ ] Each variant's fields match the aligned factory function's parameters exactly +- [ ] The type is exported from `@prisma-next/target-mongo/control` + +## Planner refactoring + +- [ ] `MongoMigrationPlanner` internally produces `OpFactoryCall[]` from the schema diff +- [ ] `plan()` returns the same `MigrationPlannerResult` as before (behavioral equivalence verified by existing tests) +- [ ] A method or function exposes the raw `OpFactoryCall[]` for downstream consumers +- [ ] Conflict detection (immutable options, policy violations) is preserved + +## Operation renderer + +- [ ] `renderOps(calls)` produces `MongoMigrationPlanOperation[]` identical to the current planner output for the same inputs +- [ ] Round-trip equivalence: for any schema diff, `renderOps(planner.planCalls(...))` produces the same operations as the current `planner.plan(...)` (verified by test comparing JSON output) +- [ ] `operationClass` is correctly derived for each factory call + +## TypeScript renderer + +- [ ] `renderTypeScript(calls)` produces a syntactically valid TypeScript file +- [ ] The generated file imports from `@prisma-next/target-mongo/migration` +- [ ] The generated file can be executed with `tsx` to produce `ops.json` +- [ ] The generated `ops.json` is identical to what `renderOps(calls)` produces when serialized (round-trip equivalence) +- [ ] When `meta` is provided, the generated file includes a `describe()` method returning the metadata + +## Factory alignment + +- [ ] Factory function signatures in `migration-factories.ts` align with `OpFactoryCall` argument shapes +- [ ] The planner's `planCreateCollection` mapping from `MongoSchemaCollectionOptions` to `CreateCollectionOptions` is extracted to a reusable helper + +# Other Considerations + +## Security + +Not applicable — no new external API surface; the TypeScript renderer produces source code that is written to disk by the CLI. + +## Cost + +No runtime cost impact. The intermediate representation adds negligible overhead (one extra array allocation per plan). + +## Observability + +Not applicable. + +## Data Protection + +Not applicable. + +## Analytics + +Not applicable. + +# References + +- Current planner: [`packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts`](packages/3-mongo-target/2-mongo-adapter/src/core/mongo-planner.ts) (will move to `target-mongo` per spec 1) +- Factory functions: [`packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts`](packages/3-mongo-target/1-mongo-target/src/core/migration-factories.ts) +- `Migration` base class: [`packages/1-framework/3-tooling/migration/src/migration-base.ts`](packages/1-framework/3-tooling/migration/src/migration-base.ts) +- Migration authoring spec (parent): [`projects/mongo-migration-authoring/spec.md`](../spec.md) +- Migration subsystem refactor spec (prerequisite): [`projects/mongo-migration-authoring/specs/migration-subsystem-refactor.spec.md`](migration-subsystem-refactor.spec.md) + +# Decisions + +1. **`OpFactoryCall` is plain data, not class instances.** This makes it serializable and testable without constructing AST nodes. The factory functions handle AST construction. + +2. **`operationClass` is not part of `OpFactoryCall`.** It's a derived property that the operation renderer computes. This keeps the intermediate representation simple and avoids duplicating classification logic. + +3. **Prerequisite: migration subsystem refactor.** This spec assumes the planner and factories are co-located in `@prisma-next/target-mongo` (per the migration-subsystem-refactor spec). The planner needs to import and call the factory functions, which requires them to be in the same package or a dependency. + +# Open Questions + +1. **How should `operationClass` be derived for `collMod` calls?** Today the planner classifies validator updates as `widening` or `destructive` based on comparing origin and destination validators. The operation renderer needs access to this context (origin validator state) to make the same determination. Should the `collMod` variant of `OpFactoryCall` carry an explicit `operationClass` override, or should the renderer receive the origin schema as context? **Default assumption:** The `collMod` variant carries an optional `operationClass` field that the planner sets when it has the context to determine it; if omitted, the renderer defaults to `destructive`. + +2. **Should `renderTypeScript` produce the `Migration.run(import.meta)` line?** The hand-authored migration pattern includes this line at the bottom. The rendered file should include it so it's immediately runnable. **Default assumption:** Yes, include it. + +3. **Should `planCalls()` be a separate method on the planner, or should `plan()` return a richer result that includes both `OpFactoryCall[]` and the rendered operations?** **Default assumption:** A separate `planCalls()` method that returns `{ kind: 'success'; calls: OpFactoryCall[] } | { kind: 'failure'; conflicts: ... }`, sharing the same conflict-detection logic as `plan()`. diff --git a/test/integration/test/mongo/migration-authoring-e2e.test.ts b/test/integration/test/mongo/migration-authoring-e2e.test.ts new file mode 100644 index 0000000000..cdd65e7915 --- /dev/null +++ b/test/integration/test/mongo/migration-authoring-e2e.test.ts @@ -0,0 +1,355 @@ +import { + deserializeMongoOps, + MongoMigrationRunner, + serializeMongoOps, +} from '@prisma-next/adapter-mongo/control'; +import mongoControlDriver from '@prisma-next/driver-mongo/control'; +import type { MongoMigrationPlanOperation } from '@prisma-next/mongo-query-ast/control'; +import { + createCollection, + createIndex, + dropCollection, + dropIndex, + setValidation, + validatedCollection, +} from '@prisma-next/target-mongo/migration'; +import { timeouts } from '@prisma-next/test-utils'; +import { type Db, MongoClient } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; + +const ALL_POLICY = { + allowedOperationClasses: ['additive', 'widening', 'destructive'] as const, +}; + +describe( + 'Migration authoring round-trip (factory → serialize → deserialize → runner → DB)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let db: Db; + const dbName = 'authoring_e2e_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + instanceOpts: [ + { launchTimeout: timeouts.spinUpMongoMemoryServer, storageEngine: 'wiredTiger' }, + ], + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + db = client.db(dbName); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await db.dropDatabase(); + }); + + afterAll(async () => { + await Promise.allSettled([ + client?.close() ?? Promise.resolve(), + replSet?.stop() ?? Promise.resolve(), + ]); + }, timeouts.spinUpMongoMemoryServer); + + async function runOps(ops: readonly MongoMigrationPlanOperation[]): Promise<{ + operationsPlanned: number; + operationsExecuted: number; + }> { + const serialized = JSON.parse(serializeMongoOps(ops)); + const controlDriver = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result = await runner.execute({ + plan: { + targetId: 'mongo', + destination: { storageHash: 'authoring-test' }, + operations: serialized, + }, + driver: controlDriver, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + if (!result.ok) throw new Error(`Runner failed: ${result.failure.summary}`); + return result.value; + } finally { + await controlDriver.close(); + } + } + + describe('createCollection', () => { + it('creates the collection in MongoDB', async () => { + const ops = [createCollection('users')]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(1); + }); + + it('creates a collection with JSON schema validation', async () => { + const ops = [ + createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + validationAction: 'error', + }), + ]; + await runOps(ops); + + const info = await db.listCollections({ name: 'users' }).toArray(); + const options = (info[0] as Record)['options'] as Record; + expect(options['validator']).toEqual({ $jsonSchema: { required: ['email'] } }); + }); + }); + + describe('createIndex', () => { + it('creates an index on the collection', async () => { + await db.createCollection('users'); + const ops = [createIndex('users', [{ field: 'email', direction: 1 as const }])]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + }); + + it('creates a unique index', async () => { + await db.createCollection('users'); + const ops = [ + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + ]; + await runOps(ops); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + expect(emailIndex!['unique']).toBe(true); + }); + }); + + describe('dropIndex', () => { + it('drops an existing index', async () => { + await db.createCollection('users'); + await db.collection('users').createIndex({ email: 1 }, { name: 'email_1' }); + + const ops = [dropIndex('users', [{ field: 'email', direction: 1 as const }])]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeUndefined(); + }); + }); + + describe('dropCollection', () => { + it('drops an existing collection', async () => { + await db.createCollection('users'); + const ops = [dropCollection('users')]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(0); + }); + }); + + describe('setValidation', () => { + it('modifies collection validation', async () => { + await db.createCollection('users'); + const ops = [ + setValidation('users', { required: ['email', 'name'] }, { validationLevel: 'strict' }), + ]; + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(1); + + const info = await db.listCollections({ name: 'users' }).toArray(); + const options = (info[0] as Record)['options'] as Record; + expect(options['validator']).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + }); + }); + + describe('round-trip serialization', () => { + it('factory → JSON.stringify → deserializeMongoOps produces equivalent ops', () => { + const original = [ + createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }), + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + dropIndex('users', [{ field: 'email', direction: 1 as const }]), + setValidation('users', { required: ['email', 'name'] }), + dropCollection('users'), + ]; + + const json = JSON.stringify(original); + const deserialized = deserializeMongoOps(JSON.parse(json)); + + expect(deserialized).toHaveLength(5); + for (let i = 0; i < original.length; i++) { + expect(deserialized[i]!.id).toBe(original[i]!.id); + expect(deserialized[i]!.label).toBe(original[i]!.label); + expect(deserialized[i]!.operationClass).toBe(original[i]!.operationClass); + } + }); + + it('deserialized ops execute successfully against the DB', async () => { + const original = [ + createCollection('users'), + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + ]; + + const json = JSON.stringify(original); + const deserialized = deserializeMongoOps(JSON.parse(json)); + + const result = await runOps(deserialized); + expect(result.operationsExecuted).toBe(2); + + const collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(1); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + expect(emailIndex!['unique']).toBe(true); + }); + }); + + describe('validatedCollection', () => { + it('creates collection with schema validation and indexes', async () => { + const ops = validatedCollection('users', { required: ['email', 'name'] }, [ + { keys: [{ field: 'email', direction: 1 }], unique: true }, + { keys: [{ field: 'name', direction: 1 }] }, + ]); + const result = await runOps(ops); + expect(result.operationsExecuted).toBe(3); + + const info = await db.listCollections({ name: 'users' }).toArray(); + expect(info).toHaveLength(1); + const options = (info[0] as Record)['options'] as Record; + expect(options['validator']).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + + const indexes = await db.collection('users').listIndexes().toArray(); + const emailIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['email'] === 1, + ); + expect(emailIndex).toBeDefined(); + expect(emailIndex!['unique']).toBe(true); + + const nameIndex = indexes.find( + (idx) => idx['key'] && (idx['key'] as Record)['name'] === 1, + ); + expect(nameIndex).toBeDefined(); + }); + + it('round-trips through serialization and runs against the DB', async () => { + const ops = validatedCollection('posts', { required: ['title'] }, [ + { keys: [{ field: 'title', direction: 1 }] }, + ]); + + const json = JSON.stringify(ops); + const deserialized = deserializeMongoOps(JSON.parse(json)); + const result = await runOps(deserialized); + expect(result.operationsExecuted).toBe(2); + + const collections = await db.listCollections({ name: 'posts' }).toArray(); + expect(collections).toHaveLength(1); + }); + }); + + describe('multi-step migration lifecycle', () => { + it('applies a full create → modify → drop lifecycle', async () => { + const step1 = [ + createCollection('users', { + validator: { $jsonSchema: { required: ['email'] } }, + validationLevel: 'strict', + }), + createIndex('users', [{ field: 'email', direction: 1 as const }], { unique: true }), + ]; + await runOps(step1); + + let collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(1); + const indexes = await db.collection('users').listIndexes().toArray(); + expect(indexes.some((idx) => (idx['key'] as Record)?.['email'] === 1)).toBe( + true, + ); + + const step2 = [setValidation('users', { required: ['email', 'name'] })]; + + const serialized2 = JSON.parse(serializeMongoOps(step2)); + const controlDriver2 = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result2 = await runner.execute({ + plan: { + targetId: 'mongo', + origin: { storageHash: 'authoring-test' }, + destination: { storageHash: 'authoring-test-v2' }, + operations: serialized2, + }, + driver: controlDriver2, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + expect(result2.ok).toBe(true); + } finally { + await controlDriver2.close(); + } + + const info = await db.listCollections({ name: 'users' }).toArray(); + const options = (info[0] as Record)['options'] as Record; + expect(options['validator']).toEqual({ + $jsonSchema: { required: ['email', 'name'] }, + }); + + const step3 = [ + dropIndex('users', [{ field: 'email', direction: 1 as const }]), + dropCollection('users'), + ]; + + const serialized3 = JSON.parse(serializeMongoOps(step3)); + const controlDriver3 = await mongoControlDriver.create(replSet.getUri(dbName)); + try { + const runner = new MongoMigrationRunner(); + const result3 = await runner.execute({ + plan: { + targetId: 'mongo', + origin: { storageHash: 'authoring-test-v2' }, + destination: { storageHash: 'authoring-test-v3' }, + operations: serialized3, + }, + driver: controlDriver3, + destinationContract: {}, + policy: ALL_POLICY, + frameworkComponents: [], + }); + expect(result3.ok).toBe(true); + } finally { + await controlDriver3.close(); + } + + collections = await db.listCollections({ name: 'users' }).toArray(); + expect(collections).toHaveLength(0); + }); + }); + }, +);