diff --git a/docs/planning/mongo-target/next-steps.md b/docs/planning/mongo-target/next-steps.md index 5e3156ad6..2aef4cd11 100644 --- a/docs/planning/mongo-target/next-steps.md +++ b/docs/planning/mongo-target/next-steps.md @@ -65,7 +65,7 @@ Full details in [projects/mongo-example-apps/framework-limitations.md](../../../ | FL-03 | Timestamp codec type incompatible with `Date`/`string` | Type ergonomics | | FL-04 | No typed `$push`/`$pull`/`$inc` | ORM mutations | | FL-05 | Pipeline/raw results untyped | Query results | -| FL-06 | ObjectId filter requires manual `MongoParamRef` wrapping | ORM queries | +| FL-06 | `where()` does not encode filter values through codecs | ORM queries | | FL-07 | No `$vectorSearch` in pipeline builder | Extension (deferred) | | FL-08 | 1:N back-relation loading not available/tested | ORM queries | | FL-09 | Migration planner creates separate collections for variants | Migration bugs | @@ -110,10 +110,10 @@ Full details in [projects/mongo-example-apps/framework-limitations.md](../../../ **Scope**: - **FL-04**: Implement dot-path field accessor mutations — `$push`, `$pull`, `$inc`, `$set` on nested paths via `u("field.path")` (deferred Phase 1.5 M4). Maps to [ADR 180](../../architecture%20docs/adrs/ADR%20180%20-%20Dot-path%20field%20accessor.md). -- **FL-06**: ORM `where()` should auto-encode ObjectId-typed fields. When a contract field has `codecId: 'mongo/objectId@1'`, the ORM should wrap the value in `MongoParamRef` automatically instead of requiring the user to construct it manually. +- **FL-06**: ORM `where()` should encode filter values through codecs, the same way mutations already do. When a contract field has a `codecId`, the ORM should wrap the value in `MongoParamRef` with that codec automatically. Most visible with ObjectId (string → BSON ObjectId), but applies to any codec with a non-identity `encode`. - **FL-08**: Validate and test 1:N back-relation loading via `include()`. If it works, add test coverage. If it doesn't, implement it. -**Proof**: The retail store's `mongoRaw` calls for cart add/remove and order status update are replaced with ORM `update()` calls. ObjectId filter helpers (`objectIdEq()`) are removed. +**Proof**: The retail store's `mongoRaw` calls for cart add/remove and order status update are replaced with ORM `update()` calls. Manual filter helpers (`objectIdEq()`, `rawObjectIdFilter()`) are removed — `where({ userId })` encodes values through codecs automatically. **Depends on**: Area 1 (type fixes reduce noise, but not a hard blocker). diff --git a/examples/retail-store/src/data/carts.ts b/examples/retail-store/src/data/carts.ts index 120bac1cf..1e03953a0 100644 --- a/examples/retail-store/src/data/carts.ts +++ b/examples/retail-store/src/data/carts.ts @@ -1,64 +1,32 @@ +import type { CartItemInput } from '../contract'; import type { Db } from '../db'; -import { executeRaw } from './execute-raw'; -import { objectIdEq, rawObjectIdFilter } from './object-id-filter'; export function getCartByUserId(db: Db, userId: string) { - return db.orm.carts.where(objectIdEq('userId', userId)).first(); + return db.orm.carts.where({ userId }).first(); } export function getCartWithUser(db: Db, userId: string) { - return db.orm.carts.include('user').where(objectIdEq('userId', userId)).first(); + return db.orm.carts.include('user').where({ userId }).first(); } -export function upsertCart( - db: Db, - userId: string, - items: ReadonlyArray<{ - productId: string; - name: string; - brand: string; - amount: number; - price: { amount: number; currency: string }; - image: { url: string }; - }>, -) { - return db.orm.carts.where(objectIdEq('userId', userId)).upsert({ +export function upsertCart(db: Db, userId: string, items: ReadonlyArray) { + return db.orm.carts.where({ userId }).upsert({ create: { userId, items: [...items] }, update: { items: [...items] }, }); } export function clearCart(db: Db, userId: string) { - return db.orm.carts.where(objectIdEq('userId', userId)).update({ items: [] }); + return db.orm.carts.where({ userId }).update({ items: [] }); } -export async function addToCart( - db: Db, - userId: string, - item: { - productId: string; - name: string; - brand: string; - amount: number; - price: { amount: number; currency: string }; - image: { url: string }; - }, -) { - const plan = db.raw - .collection('carts') - .findOneAndUpdate( - rawObjectIdFilter('userId', userId), - { $push: { items: item }, $setOnInsert: rawObjectIdFilter('userId', userId) }, - { upsert: true }, - ) - .build(); - await executeRaw(db, plan); +export function addToCart(db: Db, userId: string, item: CartItemInput) { + return db.orm.carts.where({ userId }).upsert({ + create: { userId, items: [item] }, + update: (u) => [u.items.push(item)], + }); } -export async function removeFromCart(db: Db, userId: string, productId: string) { - const plan = db.raw - .collection('carts') - .updateOne(rawObjectIdFilter('userId', userId), { $pull: { items: { productId } } }) - .build(); - await executeRaw(db, plan); +export function removeFromCart(db: Db, userId: string, productId: string) { + return db.orm.carts.where({ userId }).update((u) => [u.items.pull({ productId })]); } diff --git a/examples/retail-store/src/data/events.ts b/examples/retail-store/src/data/events.ts index ea1d81908..333764ec3 100644 --- a/examples/retail-store/src/data/events.ts +++ b/examples/retail-store/src/data/events.ts @@ -1,19 +1,14 @@ import { acc } from '@prisma-next/mongo-pipeline-builder'; import { MongoFieldFilter } from '@prisma-next/mongo-query-ast/execution'; +import type { FieldInputTypes } from '../contract'; import type { Db } from '../db'; import { collectResults } from './execute-raw'; +type EventBase = Omit; + export function createViewProductEvent( db: Db, - event: { - userId: string; - sessionId: string; - timestamp: Date; - productId: string; - subCategory: string; - brand: string; - exitMethod?: string | null; - }, + event: EventBase & FieldInputTypes['ViewProductEvent'], ) { return db.orm.events.variant('ViewProductEvent').create({ userId: event.userId, @@ -22,19 +17,11 @@ export function createViewProductEvent( productId: event.productId, subCategory: event.subCategory, brand: event.brand, - exitMethod: event.exitMethod ?? null, + exitMethod: event.exitMethod, }); } -export function createSearchEvent( - db: Db, - event: { - userId: string; - sessionId: string; - timestamp: Date; - query: string; - }, -) { +export function createSearchEvent(db: Db, event: EventBase & FieldInputTypes['SearchEvent']) { return db.orm.events.variant('SearchEvent').create({ userId: event.userId, sessionId: event.sessionId, @@ -43,16 +30,7 @@ export function createSearchEvent( }); } -export function createAddToCartEvent( - db: Db, - event: { - userId: string; - sessionId: string; - timestamp: Date; - productId: string; - brand: string; - }, -) { +export function createAddToCartEvent(db: Db, event: EventBase & FieldInputTypes['AddToCartEvent']) { return db.orm.events.variant('AddToCartEvent').create({ userId: event.userId, sessionId: event.sessionId, diff --git a/examples/retail-store/src/data/invoices.ts b/examples/retail-store/src/data/invoices.ts index c6350a93a..8bf6a9a04 100644 --- a/examples/retail-store/src/data/invoices.ts +++ b/examples/retail-store/src/data/invoices.ts @@ -1,24 +1,19 @@ +import type { InvoiceLineItemInput } from '../contract'; import type { Db } from '../db'; -import { objectIdEq } from './object-id-filter'; export function findInvoiceById(db: Db, id: string) { - return db.orm.invoices.where(objectIdEq('_id', id)).first(); + return db.orm.invoices.where({ _id: id }).first(); } export function findInvoiceWithOrder(db: Db, id: string) { - return db.orm.invoices.include('order').where(objectIdEq('_id', id)).first(); + return db.orm.invoices.include('order').where({ _id: id }).first(); } export function createInvoice( db: Db, invoice: { orderId: string; - items: ReadonlyArray<{ - name: string; - amount: number; - unitPrice: number; - lineTotal: number; - }>; + items: ReadonlyArray; subtotal: number; tax: number; total: number; diff --git a/examples/retail-store/src/data/object-id-filter.ts b/examples/retail-store/src/data/object-id-filter.ts deleted file mode 100644 index 887bc8469..000000000 --- a/examples/retail-store/src/data/object-id-filter.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { MongoFieldFilter } from '@prisma-next/mongo-query-ast/execution'; -import { MongoParamRef } from '@prisma-next/mongo-value'; -import { ObjectId } from 'mongodb'; - -function toObjectId(id: string | ObjectId): ObjectId { - if (id instanceof ObjectId) return id; - if (!ObjectId.isValid(id)) throw new Error(`Invalid ObjectId: ${id}`); - return new ObjectId(id); -} - -export function objectIdEq(field: string, id: string | ObjectId): MongoFieldFilter { - return MongoFieldFilter.eq(field, new MongoParamRef(toObjectId(id))); -} - -export function rawObjectIdFilter(field: string, id: string | ObjectId): Record { - return { [field]: toObjectId(id) }; -} diff --git a/examples/retail-store/src/data/orders.ts b/examples/retail-store/src/data/orders.ts index a05d93d53..b294e11b2 100644 --- a/examples/retail-store/src/data/orders.ts +++ b/examples/retail-store/src/data/orders.ts @@ -1,34 +1,26 @@ +import type { OrderLineItemInput, StatusEntryInput } from '../contract'; import type { Db } from '../db'; -import { collectFirstResult } from './execute-raw'; -import { objectIdEq, rawObjectIdFilter } from './object-id-filter'; export function getUserOrders(db: Db, userId: string) { - return db.orm.orders.where(objectIdEq('userId', userId)).all(); + return db.orm.orders.where({ userId }).all(); } export function getOrderById(db: Db, id: string) { - return db.orm.orders.where(objectIdEq('_id', id)).first(); + return db.orm.orders.where({ _id: id }).first(); } export function getOrderWithUser(db: Db, id: string) { - return db.orm.orders.include('user').where(objectIdEq('_id', id)).first(); + return db.orm.orders.include('user').where({ _id: id }).first(); } export function createOrder( db: Db, order: { userId: string; - items: ReadonlyArray<{ - productId: string; - name: string; - brand: string; - amount: number; - price: { amount: number; currency: string }; - image: { url: string }; - }>; + items: ReadonlyArray; shippingAddress: string; type: string; - statusHistory: ReadonlyArray<{ status: string; timestamp: Date }>; + statusHistory: ReadonlyArray; }, ) { return db.orm.orders.create({ @@ -41,17 +33,9 @@ export function createOrder( } export function deleteOrder(db: Db, id: string) { - return db.orm.orders.where(objectIdEq('_id', id)).delete(); + return db.orm.orders.where({ _id: id }).delete(); } -export async function updateOrderStatus( - db: Db, - orderId: string, - entry: { status: string; timestamp: Date }, -) { - const plan = db.raw - .collection('orders') - .findOneAndUpdate(rawObjectIdFilter('_id', orderId), { $push: { statusHistory: entry } }) - .build(); - return collectFirstResult>(db, plan); +export function updateOrderStatus(db: Db, orderId: string, entry: StatusEntryInput) { + return db.orm.orders.where({ _id: orderId }).update((u) => [u.statusHistory.push(entry)]); } diff --git a/examples/retail-store/src/data/products.ts b/examples/retail-store/src/data/products.ts index 7105cc02d..5e2edb8a1 100644 --- a/examples/retail-store/src/data/products.ts +++ b/examples/retail-store/src/data/products.ts @@ -3,7 +3,6 @@ import { MongoParamRef } from '@prisma-next/mongo-value'; import type { FieldOutputTypes } from '../contract'; import type { Db } from '../db'; import { collectResults } from './execute-raw'; -import { objectIdEq } from './object-id-filter'; type Product = FieldOutputTypes['Product']; @@ -21,7 +20,7 @@ export async function findProductsPaginated( } export function findProductById(db: Db, id: string) { - return db.orm.products.where(objectIdEq('_id', id)).first(); + return db.orm.products.where({ _id: id }).first(); } function escapeRegex(str: string) { diff --git a/examples/retail-store/src/data/users.ts b/examples/retail-store/src/data/users.ts index 4e5ee44ae..58804beb9 100644 --- a/examples/retail-store/src/data/users.ts +++ b/examples/retail-store/src/data/users.ts @@ -1,14 +1,14 @@ +import type { FieldInputTypes } from '../contract'; import type { Db } from '../db'; -import { objectIdEq } from './object-id-filter'; export function findUsers(db: Db) { return db.orm.users.all(); } export function findUserById(db: Db, id: string) { - return db.orm.users.where(objectIdEq('_id', id)).first(); + return db.orm.users.where({ _id: id }).first(); } -export function createUser(db: Db, data: { name: string; email: string; address: null }) { +export function createUser(db: Db, data: Omit) { return db.orm.users.create(data); } diff --git a/examples/retail-store/src/seed.ts b/examples/retail-store/src/seed.ts index 5c3593d72..82fdfd2fd 100644 --- a/examples/retail-store/src/seed.ts +++ b/examples/retail-store/src/seed.ts @@ -366,6 +366,7 @@ export async function seed(db: Db) { productId: p0._id, subCategory: 'Topwear', brand: 'Heritage', + exitMethod: null, }); await createAddToCartEvent(db, { diff --git a/examples/retail-store/test/aggregation.test.ts b/examples/retail-store/test/aggregation.test.ts index 30c49ab57..975bde3ba 100644 --- a/examples/retail-store/test/aggregation.test.ts +++ b/examples/retail-store/test/aggregation.test.ts @@ -21,6 +21,7 @@ describe('aggregation pipelines', { timeout: timeouts.spinUpMongoMemoryServer }, productId: `prod-${i}`, subCategory: 'Topwear', brand: 'TestBrand', + exitMethod: null, }); } for (let i = 0; i < 2; i++) { diff --git a/examples/retail-store/test/crud-lifecycle.test.ts b/examples/retail-store/test/crud-lifecycle.test.ts index aa7e39445..0cd3bf0f0 100644 --- a/examples/retail-store/test/crud-lifecycle.test.ts +++ b/examples/retail-store/test/crud-lifecycle.test.ts @@ -243,6 +243,7 @@ describe('CRUD lifecycle', { timeout: timeouts.spinUpMongoMemoryServer }, () => productId: 'prod-1', subCategory: 'Topwear', brand: 'TestBrand', + exitMethod: null, }); const events = await findEventsByUser(ctx.db, 'user-1'); diff --git a/examples/retail-store/test/order-lifecycle.test.ts b/examples/retail-store/test/order-lifecycle.test.ts index 8eceb7312..56d68b2bf 100644 --- a/examples/retail-store/test/order-lifecycle.test.ts +++ b/examples/retail-store/test/order-lifecycle.test.ts @@ -80,7 +80,7 @@ describe('order lifecycle (integration)', { timeout: timeouts.spinUpMongoMemoryS expect(delivered).not.toBeNull(); expect(delivered!['statusHistory']).toHaveLength(3); - const statuses = (delivered!['statusHistory'] as Array<{ status: string }>).map( + const statuses = (delivered!['statusHistory'] as ReadonlyArray<{ status: string }>).map( (s) => s.status, ); expect(statuses).toEqual(['placed', 'shipped', 'delivered']); diff --git a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts index b1d3e3dce..bb2b64e34 100644 --- a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts +++ b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts @@ -34,7 +34,8 @@ type ContractBase = { readonly extensionPacks: { }; readonly meta: { }; readonly roots: { readonly tasks: 'Task'; readonly users: 'User' }; - readonly models: { readonly Task: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly title: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly type: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly assigneeId: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false } }; readonly relations: { readonly assignee: { readonly to: 'User'; readonly cardinality: 'N:1'; readonly on: { readonly localFields: readonly ['assigneeId']; readonly targetFields: readonly ['_id'] } }; readonly comments: { readonly to: 'Comment'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'tasks'; readonly relations: { readonly comments: { readonly field: 'comments' } } }; readonly discriminator: { readonly field: 'type' }; readonly variants: { readonly Bug: { readonly value: 'bug' }; readonly Feature: { readonly value: 'feature' } } }; readonly Bug: { readonly fields: { readonly severity: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly Feature: { readonly fields: { readonly priority: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly targetRelease: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly User: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly name: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly email: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: { readonly addresses: { readonly to: 'Address'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'users'; readonly relations: { readonly addresses: { readonly field: 'addresses' } } } }; readonly Address: { readonly fields: { readonly street: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly zip: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'User' }; readonly Comment: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly text: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly createdAt: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/date@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'Task' } }; + readonly models: { readonly Task: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly title: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly type: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly assigneeId: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false } }; readonly relations: { readonly assignee: { readonly to: 'User'; readonly cardinality: 'N:1'; readonly on: { readonly localFields: readonly ['assigneeId']; readonly targetFields: readonly ['_id'] } }; readonly comments: { readonly to: 'Comment'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'tasks'; readonly relations: { readonly comments: { readonly field: 'comments' } } }; readonly discriminator: { readonly field: 'type' }; readonly variants: { readonly Bug: { readonly value: 'bug' }; readonly Feature: { readonly value: 'feature' } } }; readonly Bug: { readonly fields: { readonly severity: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly Feature: { readonly fields: { readonly priority: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly targetRelease: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly User: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly name: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly email: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly loginCount: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/int32@1' }; readonly nullable: false }; readonly tags: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false; readonly many: true }; readonly homeAddress: { readonly type: { readonly kind: 'valueObject'; readonly name: 'HomeAddress' }; readonly nullable: true } }; readonly relations: { readonly addresses: { readonly to: 'Address'; readonly cardinality: '1:N' }; readonly tasks: { readonly to: 'Task'; readonly cardinality: '1:N'; readonly on: { readonly localFields: readonly ['_id']; readonly targetFields: readonly ['assigneeId'] } } }; readonly storage: { readonly collection: 'users'; readonly relations: { readonly addresses: { readonly field: 'addresses' } } } }; readonly Address: { readonly fields: { readonly street: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly zip: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'User' }; readonly Comment: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly text: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly createdAt: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/date@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'Task' } }; + readonly valueObjects: { readonly HomeAddress: { readonly fields: { readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly country: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } } } }; readonly storage: { readonly collections: { readonly tasks: Record; readonly users: Record }; readonly storageHash: StorageHash }; }; diff --git a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json index a1022cc98..dcfabf6d0 100644 --- a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json +++ b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json @@ -77,12 +77,33 @@ "fields": { "_id": { "type": { "kind": "scalar", "codecId": "mongo/objectId@1" }, "nullable": false }, "name": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false }, - "email": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false } + "email": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false }, + "loginCount": { + "type": { "kind": "scalar", "codecId": "mongo/int32@1" }, + "nullable": false + }, + "tags": { + "type": { "kind": "scalar", "codecId": "mongo/string@1" }, + "nullable": false, + "many": true + }, + "homeAddress": { + "type": { "kind": "valueObject", "name": "HomeAddress" }, + "nullable": true + } }, "relations": { "addresses": { "to": "Address", "cardinality": "1:N" + }, + "tasks": { + "to": "Task", + "cardinality": "1:N", + "on": { + "localFields": ["_id"], + "targetFields": ["assigneeId"] + } } } }, @@ -106,5 +127,13 @@ "relations": {}, "owner": "Task" } + }, + "valueObjects": { + "HomeAddress": { + "fields": { + "city": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false }, + "country": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false } + } + } } } diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts index 6105d8277..60f6ad241 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts @@ -28,6 +28,7 @@ export { } from '../commands'; export type { MongoFilterExpr } from '../filter-expressions'; export { + isMongoFilterExpr, MongoAndExpr, MongoExistsExpr, MongoExprFilter, diff --git a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts index 742f386bc..d8428b569 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts @@ -3,6 +3,12 @@ import type { MongoAggExpr } from './aggregation-expressions'; import { MongoAstNode } from './ast-node'; import type { MongoFilterRewriter, MongoFilterVisitor } from './visitors'; +const FILTER_EXPR_BRAND = '__prismaNextMongoFilter__'; + +export function isMongoFilterExpr(value: unknown): value is MongoFilterExpr { + return typeof value === 'object' && value !== null && FILTER_EXPR_BRAND in value; +} + abstract class MongoFilterExpression extends MongoAstNode { abstract accept(visitor: MongoFilterVisitor): R; abstract rewrite(rewriter: MongoFilterRewriter): MongoFilterExpr; @@ -12,6 +18,13 @@ abstract class MongoFilterExpression extends MongoAstNode { } } +Object.defineProperty(MongoFilterExpression.prototype, FILTER_EXPR_BRAND, { + value: true, + writable: false, + enumerable: false, + configurable: false, +}); + export class MongoFieldFilter extends MongoFilterExpression { readonly kind = 'field' as const; readonly field: string; diff --git a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts index 638588298..d4030fdd8 100644 --- a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts +++ b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts @@ -2,6 +2,7 @@ import { describe, expect, it } from 'vitest'; import { MongoAggFieldRef, MongoAggOperator } from '../src/aggregation-expressions'; import type { MongoFilterExpr } from '../src/filter-expressions'; import { + isMongoFilterExpr, MongoAndExpr, MongoExistsExpr, MongoExprFilter, @@ -264,6 +265,45 @@ describe('MongoFilterRewriter', () => { }); }); +describe('isMongoFilterExpr', () => { + it('returns true for all filter expression types', () => { + const field = MongoFieldFilter.eq('x', 1); + const and = MongoAndExpr.of([field]); + const or = MongoOrExpr.of([field]); + const not = new MongoNotExpr(field); + const exists = MongoExistsExpr.exists('x'); + const expr = MongoExprFilter.of(MongoAggFieldRef.of('x')); + + for (const node of [field, and, or, not, exists, expr]) { + expect(isMongoFilterExpr(node)).toBe(true); + } + }); + + it('returns false for plain objects with a kind property', () => { + const impersonator = { kind: 'field', field: 'x', op: '$eq', value: 1 }; + expect(isMongoFilterExpr(impersonator)).toBe(false); + }); + + it('returns false for null and primitives', () => { + expect(isMongoFilterExpr(null)).toBe(false); + expect(isMongoFilterExpr(undefined)).toBe(false); + expect(isMongoFilterExpr(42)).toBe(false); + }); + + it('brand property is non-enumerable', () => { + const field = MongoFieldFilter.eq('x', 1); + const brandKey = '__prismaNextMongoFilter__'; + expect(Object.keys(field)).not.toContain(brandKey); + expect(JSON.parse(JSON.stringify(field))).not.toHaveProperty(brandKey); + }); + + it('survives dual-package scenario (string-based lookup)', () => { + const field = MongoFieldFilter.eq('x', 1); + const brandKey = '__prismaNextMongoFilter__'; + expect(brandKey in field).toBe(true); + }); +}); + describe('composite nesting', () => { it('supports $and containing $or and $not', () => { const expr: MongoFilterExpr = MongoAndExpr.of([ diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 171d9fe02..ba1e08a9e 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -21,6 +21,7 @@ import { FindOneAndUpdateCommand, InsertManyCommand, InsertOneCommand, + isMongoFilterExpr, MongoAndExpr, MongoFieldFilter, UpdateManyCommand, @@ -32,10 +33,17 @@ import type { MongoIncludeExpr } from './collection-state'; import { emptyCollectionState, type MongoCollectionState } from './collection-state'; import { compileMongoQuery } from './compile'; import type { MongoQueryExecutor } from './executor'; +import { + compileFieldOperations, + createFieldAccessor, + type FieldAccessor, + type FieldOperation, +} from './field-accessor'; import type { DefaultModelRow, IncludedRow, MongoIncludeSpec, + MongoWhereFilter, NoIncludes, ReferenceRelationKeys, ResolvedCreateInput, @@ -57,7 +65,11 @@ export interface MongoCollection< variant>( variantName: V, ): MongoCollection; - /** Appends a filter condition. Returns a new immutable collection. */ + /** Appends equality filters from a plain object. Values are encoded through codecs. */ + where( + filter: MongoWhereFilter, + ): MongoCollection; + /** Appends a filter condition from a raw filter expression. */ where(filter: MongoFilterExpr): MongoCollection; /** Restricts returned fields to the given subset. Returns a new immutable collection. */ select( @@ -95,12 +107,24 @@ export interface MongoCollection< update( data: Partial>, ): Promise | null>; + /** Updates one matching document using field operations from a callback. Requires `.where()`. */ + update( + callback: (u: FieldAccessor) => FieldOperation[], + ): Promise | null>; /** Non-atomic: captures matching `_id`s, updates, then re-reads by `_id`. Requires `.where()`. */ updateAll( data: Partial>, ): AsyncIterableResult>; + /** Updates all matching documents using field operations from a callback. Requires `.where()`. */ + updateAll( + callback: (u: FieldAccessor) => FieldOperation[], + ): AsyncIterableResult>; /** Updates all matching documents and returns the number modified. Requires `.where()`. */ updateCount(data: Partial>): Promise; + /** Updates all matching documents using field operations and returns the number modified. Requires `.where()`. */ + updateCount( + callback: (u: FieldAccessor) => FieldOperation[], + ): Promise; /** Deletes one matching document via `findOneAndDelete`. Returns the deleted document or `null`. Requires `.where()`. */ delete(): Promise | null>; /** Non-atomic: reads matching docs then deletes them. Concurrent writes may cause stale results. Requires `.where()`. */ @@ -116,6 +140,11 @@ export interface MongoCollection< create: ResolvedCreateInput; update: Partial>; }): Promise>; + /** Upsert using field operations callback for the update part. Requires `.where()`. */ + upsert(input: { + create: ResolvedCreateInput; + update: (u: FieldAccessor) => FieldOperation[]; + }): Promise>; } function resolveCollectionName(model: MongoModelDefinition, modelName: string): string { @@ -171,10 +200,14 @@ class MongoCollectionImpl< ); } - where(filter: MongoFilterExpr): MongoCollection { - return this.#clone({ - filters: [...this.#state.filters, filter], - }); + where( + filter: MongoWhereFilter | MongoFilterExpr, + ): MongoCollection { + if (isMongoFilterExpr(filter)) { + return this.#clone({ filters: [...this.#state.filters, filter] }); + } + const compiled = this.#compileWhereObject(filter as Record); + return this.#clone({ filters: [...this.#state.filters, ...compiled] }); } select( @@ -316,20 +349,24 @@ class MongoCollectionImpl< } async update( - data: Partial>, + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), ): Promise | null> { this.#requireFilters('update'); this.#rejectWindowing('update'); this.#rejectIncludes('update'); const filter = this.#mergeFilters(); - const updateDoc = this.#toUpdateDocument(data as Record); + const updateDoc = this.#resolveUpdateDoc(dataOrCallback); const command = new FindOneAndUpdateCommand(this.#collectionName, filter, updateDoc, false); const results = await this.#drainPlan(command); return (results[0] as IncludedRow) ?? null; } updateAll( - data: Partial>, + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), ): AsyncIterableResult> { this.#requireFilters('updateAll'); this.#rejectWindowing('updateAll'); @@ -339,7 +376,7 @@ class MongoCollectionImpl< if (ids.length === 0) return; const filter = self.#mergeFilters(); - const updateDoc = self.#toUpdateDocument(data as Record); + const updateDoc = self.#resolveUpdateDoc(dataOrCallback); const command = new UpdateManyCommand(self.#collectionName, filter, updateDoc); await self.#drainPlan(command); @@ -352,12 +389,16 @@ class MongoCollectionImpl< return new AsyncIterableResult(gen()); } - async updateCount(data: Partial>): Promise { + async updateCount( + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), + ): Promise { this.#requireFilters('updateCount'); this.#rejectWindowing('updateCount'); this.#rejectIncludes('updateCount'); const filter = this.#mergeFilters(); - const updateDoc = this.#toUpdateDocument(data as Record); + const updateDoc = this.#resolveUpdateDoc(dataOrCallback); const command = new UpdateManyCommand(this.#collectionName, filter, updateDoc); const results = await this.#drainPlan(command); return (results[0] as { modifiedCount: number }).modifiedCount; @@ -402,30 +443,62 @@ class MongoCollectionImpl< async upsert(input: { create: ResolvedCreateInput; - update: Partial>; + update: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]); }): Promise> { this.#requireFilters('upsert'); this.#rejectWindowing('upsert'); this.#rejectIncludes('upsert'); const filter = this.#mergeFilters(); - const setFields = this.#toSetFields(input.update as Record); + const allCreateFields = this.#toDocument( this.#injectDiscriminator(input.create as Record), ); - const setKeys = new Set(Object.keys(setFields)); + + let updateDoc: Record>; + if (typeof input.update === 'function') { + const accessor = createFieldAccessor(); + const ops = input.update(accessor); + const idOp = ops.find((op) => op.field === '_id'); + if (idOp) { + throw new Error('Mutation payloads cannot modify `_id`'); + } + const dotPathOp = ops.find((op) => op.field.includes('.')); + if (dotPathOp) { + throw new Error( + `upsert() does not support dot-path field operations (found "${dotPathOp.field}"). ` + + 'Dot-path updates conflict with $setOnInsert on the insert path, producing incomplete documents. ' + + 'Use top-level field operations instead.', + ); + } + updateDoc = compileFieldOperations(ops, (field, value, operator) => + this.#wrapFieldOpValue(field, value, operator), + ); + } else { + const setFields = this.#toSetFields(input.update as Record); + updateDoc = {}; + if (Object.keys(setFields).length > 0) { + updateDoc['$set'] = setFields; + } + } + + const updatedFields = new Set(); + for (const operatorGroup of Object.values(updateDoc)) { + for (const fieldPath of Object.keys(operatorGroup)) { + updatedFields.add(fieldPath.split('.')[0] ?? fieldPath); + } + } const insertOnlyFields: Record = {}; for (const [key, value] of Object.entries(allCreateFields)) { - if (!setKeys.has(key)) { + if (!updatedFields.has(key)) { insertOnlyFields[key] = value; } } - const updateDoc: Record = {}; - if (Object.keys(setFields).length > 0) { - updateDoc['$set'] = setFields; - } if (Object.keys(insertOnlyFields).length > 0) { updateDoc['$setOnInsert'] = insertOnlyFields; } + const command = new FindOneAndUpdateCommand(this.#collectionName, filter, updateDoc, true); const results = await this.#drainPlan(command); return results[0] as IncludedRow; @@ -478,6 +551,17 @@ class MongoCollectionImpl< return model?.fields ?? {}; } + #compileWhereObject(data: Record): MongoFilterExpr[] { + const fields = this.#modelFields(); + const filters: MongoFilterExpr[] = []; + for (const [key, value] of Object.entries(data)) { + if (value === undefined) continue; + const wrapped = this.#wrapFieldValue(value, fields[key]); + filters.push(MongoFieldFilter.eq(key, wrapped)); + } + return filters; + } + #wrapFieldValue(value: unknown, field: ContractField | undefined): MongoValue { if (field === undefined) return new MongoParamRef(value); @@ -550,10 +634,97 @@ class MongoCollectionImpl< return result; } - #toUpdateDocument(data: Record): Record { + #toUpdateDocument(data: Record): Record> { return { $set: this.#toSetFields(data) }; } + #resolveUpdateDoc( + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), + ): Record> { + if (typeof dataOrCallback === 'function') { + const accessor = createFieldAccessor(); + const ops = dataOrCallback(accessor); + const idOp = ops.find((op) => op.field === '_id'); + if (idOp) { + throw new Error('Mutation payloads cannot modify `_id`'); + } + if (ops.length === 0) { + return { $set: {} }; + } + return compileFieldOperations(ops, (field, value, operator) => + this.#wrapFieldOpValue(field, value, operator), + ); + } + return this.#toUpdateDocument(dataOrCallback as Record); + } + + #wrapFieldOpValue(field: string, value: MongoValue, operator?: string): MongoValue { + if (operator === '$unset') return value; + + const topLevelField = field.split('.')[0] ?? field; + const fields = this.#modelFields(); + const contractField = fields[topLevelField]; + if (!contractField) return value; + + if (field.includes('.')) { + return this.#wrapDotPathValue(field, value); + } + + if (value instanceof MongoParamRef && contractField.type.kind === 'scalar') { + return new MongoParamRef(value.value, { codecId: contractField.type.codecId }); + } + + if (contractField.type.kind === 'valueObject' && value instanceof MongoParamRef) { + const raw = value.value; + if (typeof raw === 'object' && raw !== null && !Array.isArray(raw)) { + const voName = contractField.type.name; + const voDef = (this.#contract as { valueObjects?: Record }) + .valueObjects?.[voName]; + if (voDef) { + return this.#wrapValueObject(raw as Record, voDef); + } + } + } + + return value; + } + + #wrapDotPathValue(dotPath: string, value: MongoValue): MongoValue { + const parts = dotPath.split('.'); + const fields = this.#modelFields(); + let currentField: ContractField | undefined = parts[0] ? fields[parts[0]] : undefined; + + for (let i = 1; i < parts.length; i++) { + if (!currentField || currentField.type.kind !== 'valueObject') return value; + const voName = currentField.type.name; + const voDef = (this.#contract as { valueObjects?: Record }) + .valueObjects?.[voName]; + if (!voDef) return value; + const partKey = parts[i]; + currentField = partKey ? voDef.fields[partKey] : undefined; + } + + if (currentField?.type.kind === 'scalar' && value instanceof MongoParamRef) { + return new MongoParamRef(value.value, { codecId: currentField.type.codecId }); + } + + if (currentField?.type.kind === 'valueObject' && value instanceof MongoParamRef) { + const raw = value.value; + if (typeof raw === 'object' && raw !== null && !Array.isArray(raw)) { + const voName = currentField.type.name; + const voDef = (this.#contract as { valueObjects?: Record }) + .valueObjects?.[voName]; + if (voDef) { + return this.#wrapValueObject(raw as Record, voDef); + } + } + } + + return value; + } + #mergeFilters(): MongoFilterExpr { const [single] = this.#state.filters; if (this.#state.filters.length === 1 && single) { diff --git a/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts b/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts index 31aebab46..e4d287db3 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts @@ -2,6 +2,15 @@ export type { MongoCollection } from '../collection'; export { createMongoCollection } from '../collection'; export { compileMongoQuery } from '../compile'; export type { MongoQueryExecutor } from '../executor'; +export type { + DotPath, + FieldAccessor, + FieldExpression, + FieldOperation, + ResolveDotPathType, + UpdateOperator, +} from '../field-accessor'; +export { createFieldAccessor } from '../field-accessor'; export type { MongoOrmClient, MongoOrmOptions } from '../mongo-orm'; export { mongoOrm } from '../mongo-orm'; export type { MongoRawClient } from '../mongo-raw'; diff --git a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts new file mode 100644 index 000000000..f7b9bd3c7 --- /dev/null +++ b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts @@ -0,0 +1,265 @@ +import type { ContractField, ContractValueObject } from '@prisma-next/contract/types'; +import type { + ExtractMongoCodecTypes, + ExtractMongoFieldOutputTypes, + InferModelRow, + MongoContract, + MongoContractWithTypeMaps, + MongoTypeMaps, +} from '@prisma-next/mongo-contract'; +import type { MongoValue } from '@prisma-next/mongo-value'; +import { MongoParamRef } from '@prisma-next/mongo-value'; + +// ── Runtime types ──────────────────────────────────────────────────────────── + +export type UpdateOperator = + | '$set' + | '$unset' + | '$inc' + | '$mul' + | '$push' + | '$pull' + | '$addToSet' + | '$pop'; + +export interface FieldOperation { + readonly operator: UpdateOperator; + readonly field: string; + readonly value: MongoValue; +} + +// ── Compile-time types ─────────────────────────────────────────────────────── + +type ScalarFieldKeys< + TContract extends MongoContract, + ModelName extends string & keyof TContract['models'], +> = { + [K in keyof TContract['models'][ModelName]['fields'] & + string]: TContract['models'][ModelName]['fields'][K] extends { + readonly type: { readonly kind: 'scalar' }; + } + ? K + : never; +}[keyof TContract['models'][ModelName]['fields'] & string]; + +type ValueObjectFieldKeys< + TContract extends MongoContract, + ModelName extends string & keyof TContract['models'], +> = { + [K in keyof TContract['models'][ModelName]['fields'] & + string]: TContract['models'][ModelName]['fields'][K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: string }; + } + ? K + : never; +}[keyof TContract['models'][ModelName]['fields'] & string]; + +type ResolvedModelRow< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +> = string extends keyof ExtractMongoFieldOutputTypes + ? InferModelRow + : ModelName extends keyof ExtractMongoFieldOutputTypes + ? { + -readonly [K in keyof ExtractMongoFieldOutputTypes[ModelName]]: ExtractMongoFieldOutputTypes[ModelName][K]; + } + : InferModelRow; + +type ResolveFieldType< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], + K extends keyof TContract['models'][ModelName]['fields'] & string, +> = K extends keyof ResolvedModelRow + ? ResolvedModelRow[K] + : unknown; + +type NumericOps = { + inc(value: number): FieldOperation; + mul(value: number): FieldOperation; +}; + +export type FieldExpression = { + set(value: T): FieldOperation; + unset(): FieldOperation; + push(value: T extends readonly (infer E)[] ? E : unknown): FieldOperation; + pull(match: T extends readonly (infer E)[] ? E | Partial : unknown): FieldOperation; + addToSet(value: T extends readonly (infer E)[] ? E : unknown): FieldOperation; + pop(end: 1 | -1): FieldOperation; +} & (T extends number ? NumericOps : unknown); + +type HasValueObjects = { readonly valueObjects?: Record }; + +type VOFields = TContract extends { + readonly valueObjects: infer VOs extends Record; +} + ? VOName extends keyof VOs + ? VOs[VOName]['fields'] + : never + : never; + +type VOScalarFieldKeys> = { + [K in keyof Fields & string]: Fields[K] extends { readonly type: { readonly kind: 'scalar' } } + ? K + : never; +}[keyof Fields & string]; + +type VOValueObjectFieldKeys> = { + [K in keyof Fields & string]: Fields[K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: string }; + } + ? K + : never; +}[keyof Fields & string]; + +type VODotPaths< + TContract extends HasValueObjects, + Fields extends Record, + Prefix extends string, +> = + | { [K in VOScalarFieldKeys]: `${Prefix}${K}` }[VOScalarFieldKeys] + | { + [K in VOValueObjectFieldKeys]: Fields[K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? VODotPaths, `${Prefix}${K}.`> + : never; + }[VOValueObjectFieldKeys]; + +export type DotPath< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +> = { + [K in ValueObjectFieldKeys< + TContract, + ModelName + >]: TContract['models'][ModelName]['fields'][K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? VODotPaths, `${K}.`> + : never; +}[ValueObjectFieldKeys]; + +type ResolveDotPathInFields< + TContract extends HasValueObjects, + Fields extends Record, + Path extends string, + TCodecTypes extends Record, +> = Path extends `${infer Head}.${infer Rest}` + ? Head extends keyof Fields & string + ? Fields[Head] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? ResolveDotPathInFields, Rest, TCodecTypes> + : never + : never + : Path extends keyof Fields & string + ? Fields[Path] extends { + readonly type: { + readonly kind: 'scalar'; + readonly codecId: infer CId extends string & keyof TCodecTypes; + }; + } + ? TCodecTypes[CId]['output'] + : unknown + : never; + +export type ResolveDotPathType< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], + Path extends string, + TCodecTypes extends Record = ExtractMongoCodecTypes, +> = Path extends `${infer Head}.${infer Rest}` + ? Head extends keyof TContract['models'][ModelName]['fields'] & string + ? TContract['models'][ModelName]['fields'][Head] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? ResolveDotPathInFields, Rest, TCodecTypes> + : never + : never + : never; + +export type FieldAccessor< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +> = { + readonly [K in ScalarFieldKeys]: FieldExpression< + ResolveFieldType + >; +} & { + readonly [K in ValueObjectFieldKeys]: FieldExpression< + ResolveFieldType + >; +} & (

>( + path: P, + ) => FieldExpression>); + +// ── Runtime implementation ─────────────────────────────────────────────────── + +// Runtime expression has all methods; type-level gating happens via FieldExpression +interface RuntimeFieldExpression extends NumericOps { + set(value: unknown): FieldOperation; + unset(): FieldOperation; + push(value: unknown): FieldOperation; + pull(match: unknown): FieldOperation; + addToSet(value: unknown): FieldOperation; + pop(end: 1 | -1): FieldOperation; +} + +function createFieldExpression(fieldPath: string): RuntimeFieldExpression { + return { + set(value: unknown): FieldOperation { + return { operator: '$set', field: fieldPath, value: new MongoParamRef(value) }; + }, + unset(): FieldOperation { + return { operator: '$unset', field: fieldPath, value: new MongoParamRef('') }; + }, + inc(value: number): FieldOperation { + return { operator: '$inc', field: fieldPath, value: new MongoParamRef(value) }; + }, + mul(value: number): FieldOperation { + return { operator: '$mul', field: fieldPath, value: new MongoParamRef(value) }; + }, + push(value: unknown): FieldOperation { + return { operator: '$push', field: fieldPath, value: new MongoParamRef(value) }; + }, + pull(match: unknown): FieldOperation { + return { operator: '$pull', field: fieldPath, value: new MongoParamRef(match) }; + }, + addToSet(value: unknown): FieldOperation { + return { operator: '$addToSet', field: fieldPath, value: new MongoParamRef(value) }; + }, + pop(end: 1 | -1): FieldOperation { + return { operator: '$pop', field: fieldPath, value: new MongoParamRef(end) }; + }, + }; +} + +export function createFieldAccessor< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +>(): FieldAccessor { + return new Proxy((() => {}) as unknown as FieldAccessor, { + get(_target, prop: string): RuntimeFieldExpression { + return createFieldExpression(prop); + }, + apply(_target, _thisArg, args: [string]): RuntimeFieldExpression { + return createFieldExpression(args[0]); + }, + }); +} + +export function compileFieldOperations( + ops: readonly FieldOperation[], + wrapValue: (field: string, value: MongoValue, operator: UpdateOperator) => MongoValue, +): Record> { + const grouped: Record> = {}; + for (const op of ops) { + let group = grouped[op.operator]; + if (!group) { + group = {}; + grouped[op.operator] = group; + } + group[op.field] = wrapValue(op.field, op.value, op.operator); + } + return grouped; +} diff --git a/packages/2-mongo-family/5-query-builders/orm/src/types.ts b/packages/2-mongo-family/5-query-builders/orm/src/types.ts index b18cfdee7..95e360dde 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/types.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/types.ts @@ -183,7 +183,10 @@ export type MongoWhereFilter< TCodecTypes extends Record = ExtractMongoCodecTypes, > = { readonly [K in keyof TContract['models'][ModelName]['fields']]?: TContract['models'][ModelName]['fields'][K] extends { - readonly codecId: infer CId extends string & keyof TCodecTypes; + readonly type: { + readonly kind: 'scalar'; + readonly codecId: infer CId extends string & keyof TCodecTypes; + }; } ? TCodecTypes[CId]['output'] : unknown; diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index 1adfba007..a6351e7c0 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -9,6 +9,7 @@ import { type MongoSkipStage, type MongoSortStage, } from '@prisma-next/mongo-query-ast/execution'; +import type { MongoValue } from '@prisma-next/mongo-value'; import { MongoParamRef } from '@prisma-next/mongo-value'; import { AsyncIterableResult } from '@prisma-next/runtime-executor'; import { describe, expect, it } from 'vitest'; @@ -16,9 +17,23 @@ import type { Contract } from '../../../1-foundation/mongo-contract/test/fixture import ormContractJson from '../../../1-foundation/mongo-contract/test/fixtures/orm-contract.json'; import { createMongoCollection } from '../src/collection'; import type { MongoQueryExecutor } from '../src/executor'; +import { + compileFieldOperations, + createFieldAccessor, + type FieldAccessor, + type FieldOperation, +} from '../src/field-accessor'; const contract = ormContractJson as unknown as Contract; +const defaultUserData = { + name: 'Alice', + email: 'a@b.c', + loginCount: 0, + tags: [] as string[], + homeAddress: null, +}; + function createMockExecutor(...responses: unknown[][]): MongoQueryExecutor & { lastPlan: MongoQueryPlan | undefined; readonly lastCommand: MongoQueryPlan['command'] | undefined; @@ -144,6 +159,205 @@ describe('MongoCollection chaining', () => { }); }); +describe('MongoCollection object-based where()', () => { + it('produces eq filter with string codecId for string field', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor).where({ name: 'Alice' }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('field'); + if (match.filter.kind === 'field') { + expect(match.filter.field).toBe('name'); + expect(match.filter.op).toBe('$eq'); + const ref = match.filter.value as MongoParamRef; + expect(ref).toBeInstanceOf(MongoParamRef); + expect(ref.codecId).toBe('mongo/string@1'); + expect(ref.value).toBe('Alice'); + } + }); + + it('produces eq filter with objectId codecId for ObjectId field', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'Task', executor).where({ + assigneeId: 'abc123', + }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('field'); + if (match.filter.kind === 'field') { + expect(match.filter.field).toBe('assigneeId'); + expect(match.filter.op).toBe('$eq'); + const ref = match.filter.value as MongoParamRef; + expect(ref).toBeInstanceOf(MongoParamRef); + expect(ref.codecId).toBe('mongo/objectId@1'); + expect(ref.value).toBe('abc123'); + } + }); + + it('produces AND of multiple eq filters for multi-field object', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor).where({ + name: 'Alice', + email: 'a@b.c', + }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('and'); + if (match.filter.kind === 'and') { + expect(match.filter.exprs).toHaveLength(2); + const first = match.filter.exprs[0]!; + const second = match.filter.exprs[1]!; + expect(first.kind).toBe('field'); + expect(second.kind).toBe('field'); + } + }); + + it('chains with MongoFilterExpr where()', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor) + .where({ name: 'Alice' }) + .where(MongoFieldFilter.gte('email', 'a')); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('and'); + }); + + it('chains MongoFilterExpr where() then object where()', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor) + .where(MongoFieldFilter.eq('_id', 'id-1')) + .where({ name: 'Alice' }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('and'); + }); +}); + +describe('createFieldAccessor()', () => { + it('property access returns FieldExpression for top-level field', () => { + const u = createFieldAccessor(); + const op = u.name.set('Bob'); + expect(op.operator).toBe('$set'); + expect(op.field).toBe('name'); + expect((op.value as MongoParamRef).value).toBe('Bob'); + }); + + it('set() produces $set operation', () => { + const u = createFieldAccessor(); + const op = u.name.set('Alice'); + expect(op.operator).toBe('$set'); + expect(op.field).toBe('name'); + }); + + it('unset() produces $unset operation', () => { + const u = createFieldAccessor(); + const op = u.name.unset(); + expect(op.operator).toBe('$unset'); + expect(op.field).toBe('name'); + }); + + it('inc() produces $inc operation', () => { + const u = createFieldAccessor(); + const op = u.loginCount.inc(1); + expect(op.operator).toBe('$inc'); + expect(op.field).toBe('loginCount'); + expect((op.value as MongoParamRef).value).toBe(1); + }); + + it('mul() produces $mul operation', () => { + const u = createFieldAccessor(); + const op = u.loginCount.mul(2); + expect(op.operator).toBe('$mul'); + expect(op.field).toBe('loginCount'); + expect((op.value as MongoParamRef).value).toBe(2); + }); + + it('push() produces $push operation', () => { + const u = createFieldAccessor(); + const op = u.tags.push('admin'); + expect(op.operator).toBe('$push'); + expect(op.field).toBe('tags'); + expect((op.value as MongoParamRef).value).toBe('admin'); + }); + + it('pull() produces $pull operation', () => { + const u = createFieldAccessor(); + const op = u.tags.pull('admin'); + expect(op.operator).toBe('$pull'); + expect(op.field).toBe('tags'); + expect((op.value as MongoParamRef).value).toBe('admin'); + }); + + it('addToSet() produces $addToSet operation', () => { + const u = createFieldAccessor(); + const op = u.tags.addToSet('admin'); + expect(op.operator).toBe('$addToSet'); + expect(op.field).toBe('tags'); + }); + + it('pop() produces $pop operation', () => { + const u = createFieldAccessor(); + const op = u.tags.pop(1); + expect(op.operator).toBe('$pop'); + expect(op.field).toBe('tags'); + expect((op.value as MongoParamRef).value).toBe(1); + }); + + it('call signature returns FieldExpression for dot-path', () => { + const u = createFieldAccessor(); + const op = u('homeAddress.city').set('NYC'); + expect(op.operator).toBe('$set'); + expect(op.field).toBe('homeAddress.city'); + expect((op.value as MongoParamRef).value).toBe('NYC'); + }); +}); + +describe('compileFieldOperations()', () => { + const identity = (_field: string, value: MongoValue) => value; + + it('groups operations by operator', () => { + const ops: FieldOperation[] = [ + { operator: '$set', field: 'name', value: new MongoParamRef('Alice') }, + { operator: '$inc', field: 'loginCount', value: new MongoParamRef(1) }, + { operator: '$set', field: 'email', value: new MongoParamRef('a@b.c') }, + ]; + const result = compileFieldOperations(ops, identity); + expect(result).toEqual({ + $set: { + name: new MongoParamRef('Alice'), + email: new MongoParamRef('a@b.c'), + }, + $inc: { + loginCount: new MongoParamRef(1), + }, + }); + }); + + it('applies wrapValue to each operation', () => { + const ops: FieldOperation[] = [ + { operator: '$set', field: 'name', value: new MongoParamRef('Alice') }, + ]; + const wrap = (_field: string, value: MongoValue) => + new MongoParamRef((value as MongoParamRef).value, { codecId: 'mongo/string@1' }); + const result = compileFieldOperations(ops, wrap); + expect(result['$set']!['name']!).toBeInstanceOf(MongoParamRef); + expect((result['$set']!['name']! as MongoParamRef).codecId).toBe('mongo/string@1'); + }); + + it('passes operator to wrapValue callback', () => { + const ops: FieldOperation[] = [ + { operator: '$set', field: 'name', value: new MongoParamRef('Alice') }, + { operator: '$unset', field: 'email', value: new MongoParamRef('') }, + ]; + const operators: string[] = []; + compileFieldOperations(ops, (_field, value, operator) => { + operators.push(operator); + return value; + }); + expect(operators).toEqual(['$set', '$unset']); + }); +}); + describe('MongoCollection variant()', () => { it('returns a new instance from variant()', () => { const executor = createMockExecutor(); @@ -253,6 +467,20 @@ describe('MongoCollection include()', () => { // @ts-expect-error 'comments' is an embed relation, not a reference relation expect(() => col.include('comments')).toThrow('embed relation'); }); + + it('produces $lookup without $unwind for 1:N reference relation', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor).include('tasks'); + col.all(); + const stages = executor.lastStages!; + const lookup = stages.find((s) => s.kind === 'lookup') as MongoLookupStage; + expect(lookup.from).toBe('tasks'); + expect(lookup.localField).toBe('_id'); + expect(lookup.foreignField).toBe('assigneeId'); + expect(lookup.as).toBe('tasks'); + const unwind = stages.find((s) => s.kind === 'unwind'); + expect(unwind).toBeUndefined(); + }); }); describe('MongoCollection terminal methods', () => { @@ -298,14 +526,14 @@ describe('MongoCollection write methods', () => { it('returns created row with _id from insertedId', async () => { const executor = createMockExecutor([{ insertedId: 'new-id-1' }]); const col = createMongoCollection(contract, 'User', executor); - const result = await col.create({ name: 'Alice', email: 'a@b.c' }); - expect(result).toEqual({ _id: 'new-id-1', name: 'Alice', email: 'a@b.c' }); + const result = await col.create(defaultUserData); + expect(result).toEqual({ _id: 'new-id-1', ...defaultUserData }); }); it('sends an InsertOneCommand', async () => { const executor = createMockExecutor([{ insertedId: 'id' }]); const col = createMongoCollection(contract, 'User', executor); - await col.create({ name: 'Bob', email: 'b@b.c' }); + await col.create({ ...defaultUserData, name: 'Bob', email: 'b@b.c' }); expect(executor.lastCommand).toBeDefined(); expect(executor.lastCommand!.kind).toBe('insertOne'); expect(executor.lastCommand!.collection).toBe('users'); @@ -314,7 +542,7 @@ describe('MongoCollection write methods', () => { it('attaches codecId from contract fields to MongoParamRef in document', async () => { const executor = createMockExecutor([{ insertedId: 'id' }]); const col = createMongoCollection(contract, 'User', executor); - await col.create({ name: 'Alice', email: 'a@b.c' }); + await col.create(defaultUserData); const command = executor.lastCommand!; expect(command.kind).toBe('insertOne'); if (command.kind === 'insertOne') { @@ -347,14 +575,14 @@ describe('MongoCollection write methods', () => { const col = createMongoCollection(contract, 'User', executor); const rows: unknown[] = []; for await (const row of col.createAll([ - { name: 'Alice', email: 'a@b.c' }, - { name: 'Bob', email: 'b@b.c' }, + defaultUserData, + { ...defaultUserData, name: 'Bob', email: 'b@b.c' }, ])) { rows.push(row); } expect(rows).toEqual([ - { _id: 'id-1', name: 'Alice', email: 'a@b.c' }, - { _id: 'id-2', name: 'Bob', email: 'b@b.c' }, + { _id: 'id-1', ...defaultUserData }, + { _id: 'id-2', ...defaultUserData, name: 'Bob', email: 'b@b.c' }, ]); }); }); @@ -364,8 +592,8 @@ describe('MongoCollection write methods', () => { const executor = createMockExecutor([{ insertedIds: ['a', 'b'], insertedCount: 2 }]); const col = createMongoCollection(contract, 'User', executor); const count = await col.createCount([ - { name: 'Alice', email: 'a@b.c' }, - { name: 'Bob', email: 'b@b.c' }, + defaultUserData, + { ...defaultUserData, name: 'Bob', email: 'b@b.c' }, ]); expect(count).toBe(2); }); @@ -422,6 +650,163 @@ describe('MongoCollection write methods', () => { }); }); + describe('update() with callback', () => { + it('produces correct update doc from field operations', async () => { + const executor = createMockExecutor([{ _id: 'id-1', name: 'Updated' }]); + const col = createMongoCollection(contract, 'User', executor); + await col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .update((u) => [u.name.set('Updated'), u.loginCount.inc(1)]); + const command = executor.lastCommand!; + expect(command.kind).toBe('findOneAndUpdate'); + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$set']!['name']).toBeInstanceOf(MongoParamRef); + expect(update['$inc']!['loginCount']).toBeInstanceOf(MongoParamRef); + } + }); + + it('applies codec to callback operations for scalar fields', async () => { + const executor = createMockExecutor([{ _id: 'id-1', name: 'Updated' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u.name.set('Updated')]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$set']!['name']!.codecId).toBe('mongo/string@1'); + } + }); + + it('produces $push operations from callback', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u.tags.push('admin')]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$push']).toBeDefined(); + expect(update['$push']!['tags']).toBeInstanceOf(MongoParamRef); + } + }); + + it('does not attach codecId to $unset sentinel value', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u.name.unset()]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + const unsetRef = update['$unset']!['name']!; + expect(unsetRef).toBeInstanceOf(MongoParamRef); + expect(unsetRef.codecId).toBeUndefined(); + expect(unsetRef.value).toBe(''); + } + }); + + it('produces dot-path operations from callback', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .update((u) => [u('homeAddress.city').set('NYC')]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$set']!['homeAddress.city']).toBeInstanceOf(MongoParamRef); + expect(update['$set']!['homeAddress.city']!.codecId).toBe('mongo/string@1'); + } + }); + + it('normalizes empty callback to { $set: {} }', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update(() => []); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + expect(command.update).toEqual({ $set: {} }); + } + }); + + it('wraps value-object payload through codec in set()', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .update((u) => [u.homeAddress.set({ city: 'NYC', country: 'US' })]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + const setDoc = update['$set']!['homeAddress'] as Record; + expect(setDoc['city']).toBeInstanceOf(MongoParamRef); + expect(setDoc['city']!.codecId).toBe('mongo/string@1'); + expect(setDoc['country']).toBeInstanceOf(MongoParamRef); + expect(setDoc['country']!.codecId).toBe('mongo/string@1'); + } + }); + }); + + describe('updateAll() with callback', () => { + it('produces correct update doc from field operations', async () => { + const executor = createMockExecutor( + [{ _id: 'id-1' }, { _id: 'id-2' }], + [{ matchedCount: 2, modifiedCount: 2 }], + [ + { _id: 'id-1', name: 'Alice', loginCount: 1 }, + { _id: 'id-2', name: 'Bob', loginCount: 1 }, + ], + ); + const col = createMongoCollection(contract, 'User', executor); + const rows: unknown[] = []; + for await (const row of col + .where(MongoFieldFilter.eq('email', 'a@b.c')) + .updateAll((u) => [u.loginCount.inc(1)])) { + rows.push(row); + } + expect(rows).toHaveLength(2); + const updateCommand = executor.lastPlan; + expect(updateCommand).toBeDefined(); + }); + }); + + describe('updateCount() with callback', () => { + it('produces correct update doc from field operations', async () => { + const executor = createMockExecutor([{ modifiedCount: 1 }]); + const col = createMongoCollection(contract, 'User', executor); + const count = await col + .where(MongoFieldFilter.eq('email', 'a')) + .updateCount((u) => [u.name.set('X')]); + expect(count).toBe(1); + }); + }); + + describe('upsert() with callback', () => { + it('uses field operations for update part', async () => { + const executor = createMockExecutor([{ _id: 'new-id' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ + create: defaultUserData, + update: (u: FieldAccessor) => [u.loginCount.inc(1)], + }); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$inc']!['loginCount']).toBeInstanceOf(MongoParamRef); + expect(update['$setOnInsert']).toBeDefined(); + } + }); + + it('throws when callback produces dot-path operations', async () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor); + await expect( + col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ + create: { ...defaultUserData, homeAddress: { city: 'SF', country: 'US' } }, + update: (u) => [u('homeAddress.city').set('LA')], + }), + ).rejects.toThrow('dot-path'); + }); + }); + describe('updateCount()', () => { it('throws without .where()', async () => { const executor = createMockExecutor(); @@ -491,7 +876,7 @@ describe('MongoCollection write methods', () => { const executor = createMockExecutor([{ _id: 'new-id', name: 'Alice', email: 'a@b.c' }]); const col = createMongoCollection(contract, 'User', executor); const result = await col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ - create: { name: 'Alice', email: 'a@b.c' }, + create: defaultUserData, update: { name: 'Alice Updated' }, }); expect(result).toEqual({ _id: 'new-id', name: 'Alice', email: 'a@b.c' }); @@ -503,7 +888,7 @@ describe('MongoCollection write methods', () => { const col = createMongoCollection(contract, 'User', executor); await expect( col.upsert({ - create: { name: 'A', email: 'a@b.c' }, + create: { ...defaultUserData, name: 'A' }, update: { name: 'B' }, }), ).rejects.toThrow('requires a .where()'); @@ -560,7 +945,7 @@ describe('MongoCollection write methods', () => { await expect( withFilter(executor) .take(1) - .upsert({ create: { name: 'A', email: 'a@b.c' }, update: { name: 'B' } }), + .upsert({ create: { ...defaultUserData, name: 'A' }, update: { name: 'B' } }), ).rejects.toThrow('orderBy/skip/take'); }); }); @@ -703,18 +1088,50 @@ describe('MongoCollection write methods', () => { const col = createMongoCollection(contract, 'User', executor); await expect( col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ - create: { name: 'Alice', email: 'a@b.c' }, + create: defaultUserData, update: { _id: 'new-id', name: 'B' }, }), ).rejects.toThrow('_id'); }); + + it('update() with callback throws when _id is targeted', async () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor); + await expect( + col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u._id.set('new-id')]), + ).rejects.toThrow('_id'); + }); + + it('updateAll() with callback throws when _id is targeted', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + const result = col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .updateAll((u: FieldAccessor) => [u._id.set('new-id')]); + await expect(async () => { + for await (const _ of result) { + /* drain */ + } + }).rejects.toThrow('_id'); + }); + + it('upsert() with callback throws when _id is targeted', async () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor); + await expect( + col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ + create: defaultUserData, + update: (u: FieldAccessor) => [u._id.set('new-id')], + }), + ).rejects.toThrow('_id'); + }); }); describe('immutability', () => { it('write methods do not mutate collection state', async () => { const executor = createMockExecutor([{ insertedId: 'x' }]); const col = createMongoCollection(contract, 'User', executor); - await col.create({ name: 'Alice', email: 'a@b.c' }); + await col.create(defaultUserData); const filtered = col.where(MongoFieldFilter.eq('name', 'Alice')); expect(filtered).not.toBe(col); }); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts new file mode 100644 index 000000000..809221c06 --- /dev/null +++ b/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts @@ -0,0 +1,219 @@ +import { createMongoAdapter } from '@prisma-next/adapter-mongo'; +import { createMongoDriver } from '@prisma-next/driver-mongo'; +import { createMongoRuntime, type MongoRuntime } from '@prisma-next/mongo-runtime'; +import { timeouts } from '@prisma-next/test-utils'; +import { MongoClient, ObjectId } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import type { Contract } from '../../../../1-foundation/mongo-contract/test/fixtures/orm-contract'; +import ormContractJson from '../../../../1-foundation/mongo-contract/test/fixtures/orm-contract.json'; +import type { FieldAccessor } from '../../src/field-accessor'; +import { mongoOrm } from '../../src/mongo-orm'; + +const contract = ormContractJson as unknown as Contract; + +const defaultUserData = { + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, +}; + +function getUserId(user: Record): ObjectId { + return new ObjectId(user['_id'] as string); +} + +describe( + 'ORM ergonomics integration (FL-04, FL-06, FL-08)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let runtime: MongoRuntime; + const dbName = 'orm_ergonomics_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + + const adapter = createMongoAdapter(); + const driver = await createMongoDriver(replSet.getUri(), dbName); + runtime = createMongoRuntime({ adapter, driver }); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await client.db(dbName).dropDatabase(); + }); + + afterAll(async () => { + await Promise.allSettled([runtime?.close(), client?.close(), replSet?.stop()]); + }, timeouts.spinUpMongoMemoryServer); + + describe('FL-06: codec-aware where()', () => { + it('retrieves document by ObjectId field using object where', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + const found = await orm.users.where({ _id: user._id as string }).first(); + expect(found).not.toBeNull(); + expect(found!.name).toBe('Alice'); + }); + + it('retrieves document by string field using object where', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + const found = await orm.users.where({ name: 'Alice' }).first(); + expect(found).not.toBeNull(); + expect(found!.email).toBe('alice@test.com'); + }); + + it('filters by multiple fields using object where', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + await orm.users.create({ ...defaultUserData, name: 'Bob', email: 'bob@test.com' }); + const found = await orm.users.where({ name: 'Alice', email: 'alice@test.com' }).first(); + expect(found).not.toBeNull(); + expect(found!.name).toBe('Alice'); + }); + }); + + describe('FL-04: field accessor mutations', () => { + it('$push adds element to array field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + const updated = await orm.users + .where({ _id: user._id as string }) + .update((u) => [u.tags.push('admin')]); + expect(updated).not.toBeNull(); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['tags']).toEqual(['admin']); + }); + + it('$pull removes element from array field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create({ ...defaultUserData, tags: ['admin', 'editor'] }); + await orm.users.where({ _id: user._id as string }).update((u) => [u.tags.pull('admin')]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['tags']).toEqual(['editor']); + }); + + it('$inc increments numeric field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + await orm.users.where({ _id: user._id as string }).update((u) => [u.loginCount.inc(1)]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['loginCount']).toBe(1); + }); + + it('dot-path $set updates nested value object field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create({ + ...defaultUserData, + homeAddress: { city: 'SF', country: 'US' }, + }); + await orm.users + .where({ _id: user._id as string }) + .update((u) => [u('homeAddress.city').set('NYC')]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['homeAddress']).toEqual({ city: 'NYC', country: 'US' }); + }); + + it('multiple operations in one callback are applied together', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + await orm.users + .where({ _id: user._id as string }) + .update((u) => [u.tags.push('admin'), u.loginCount.inc(5)]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['tags']).toEqual(['admin']); + expect(doc!['loginCount']).toBe(5); + }); + + it('updateAll with callback updates multiple documents', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + await orm.users.create({ ...defaultUserData, name: 'Bob', email: 'bob@test.com' }); + + const rows: unknown[] = []; + for await (const row of orm.users + .where({ loginCount: 0 }) + .updateAll((u: FieldAccessor) => [u.loginCount.inc(1)])) { + rows.push(row); + } + expect(rows).toHaveLength(2); + + const docs = await client.db(dbName).collection('users').find({}).toArray(); + for (const doc of docs) { + expect(doc['loginCount']).toBe(1); + } + }); + }); + + describe('upsert() dot-path guard', () => { + it('throws when callback uses a dot-path operation', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + await expect( + orm.users.where({ name: 'Alice' }).upsert({ + create: { ...defaultUserData, homeAddress: { city: 'SF', country: 'US' } }, + update: (u) => [u('homeAddress.city').set('LA')], + }), + ).rejects.toThrow('dot-path'); + }); + }); + + describe('FL-08: 1:N reference relation include', () => { + it('include() on 1:N relation returns array of related documents', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + + await orm.tasks.create({ + title: 'Task 1', + type: 'bug', + assigneeId: user._id as string, + } as never); + await orm.tasks.create({ + title: 'Task 2', + type: 'feature', + assigneeId: user._id as string, + } as never); + + const result = await orm.users + .include('tasks') + .where({ _id: user._id as string }) + .first(); + expect(result).not.toBeNull(); + const tasks = (result as Record)['tasks'] as Record[]; + expect(tasks).toHaveLength(2); + const titles = tasks.map((t) => t['title']).sort(); + expect(titles).toEqual(['Task 1', 'Task 2']); + }); + + it('include() on 1:N returns empty array when no related documents', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + + const result = await orm.users + .include('tasks') + .where({ _id: user._id as string }) + .first(); + expect(result).not.toBeNull(); + const tasks = (result as Record)['tasks'] as unknown[]; + expect(tasks).toEqual([]); + }); + }); + }, +); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts index 3c4ff41f6..30e8b3a60 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts @@ -42,7 +42,13 @@ describe( it('base query returns rows with discriminator values', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.create({ title: 'Fix crash', @@ -63,7 +69,13 @@ describe( it('variant("Bug") filters to only Bug rows', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.create({ title: 'Fix crash', @@ -84,7 +96,13 @@ describe( it('variant("Feature") filters to only Feature rows', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.create({ title: 'Fix crash', @@ -105,7 +123,13 @@ describe( it('variant create injects discriminator and persists it', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); const bug = await orm.tasks.variant('Bug').create({ title: 'Null pointer', @@ -123,7 +147,13 @@ describe( it('round-trip: create via variant, read back via base', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.variant('Bug').create({ title: 'Memory leak', @@ -150,8 +180,20 @@ describe( const orm = mongoOrm({ contract, executor: runtime }); await orm.users.createAll([ - { name: 'Alice', email: 'alice@test.com' }, - { name: 'Bob', email: 'bob@test.com' }, + { + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }, + { + name: 'Bob', + email: 'bob@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }, ]); const users = await orm.users.all(); @@ -162,7 +204,13 @@ describe( it('variant().first() returns narrowed result', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.variant('Bug').create({ title: 'Fix crash', @@ -178,7 +226,13 @@ describe( it('variant createAll injects discriminator into each document', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.variant('Bug').createAll([ { title: 'Bug 1', severity: 'low', assigneeId: user._id as string }, diff --git a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts index 64adaba62..70f99cc27 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts @@ -2,6 +2,12 @@ import type { AsyncIterableResult } from '@prisma-next/runtime-executor'; import { expectTypeOf, test } from 'vitest'; import type { Contract } from '../../../1-foundation/mongo-contract/test/fixtures/orm-contract'; import type { MongoCollection } from '../src/collection'; +import type { + DotPath, + FieldAccessor, + FieldExpression, + ResolveDotPathType, +} from '../src/field-accessor'; import type { MongoOrmClient } from '../src/mongo-orm'; import type { CreateInput, @@ -69,6 +75,25 @@ test('where filter keys are constrained to model field names', () => { expectTypeOf().toHaveProperty('email'); }); +test('where filter rejects invalid field names', () => { + type UserFilter = MongoWhereFilter; + // @ts-expect-error 'nonexistent' is not a field on User + void ({ nonexistent: 'value' } satisfies UserFilter); +}); + +test('where filter enforces value types from codec', () => { + type UserFilter = MongoWhereFilter; + void ({ name: 'Alice' } satisfies UserFilter); + // @ts-expect-error number is not assignable to string field + void ({ name: 123 } satisfies UserFilter); +}); + +test('object-based where() accepts MongoWhereFilter', () => { + const col = {} as MongoCollection; + const filtered = col.where({ name: 'Alice' }); + expectTypeOf(filtered).toExtend>(); +}); + // --- Include constrained to reference relations only --- test('ReferenceRelationKeys picks only reference relations', () => { @@ -87,9 +112,9 @@ test('MongoIncludeSpec only allows reference relation keys', () => { expectTypeOf().not.toHaveProperty('comments'); }); -test('MongoIncludeSpec has no includable keys for models with only embed relations', () => { +test('ReferenceRelationKeys picks reference relations on User', () => { type UserRefKeys = ReferenceRelationKeys; - expectTypeOf().toBeNever(); + expectTypeOf().toEqualTypeOf<'tasks'>(); }); // --- Polymorphic root returns discriminated union --- @@ -327,3 +352,74 @@ test('variant() preserves TVariant through chaining', () => { expectTypeOf().toHaveProperty('severity'); expectTypeOf().not.toHaveProperty('type'); }); + +// --- 1:N reference relation include --- + +test('include() on 1:N reference relation returns array type', () => { + const col = {} as MongoCollection; + const result = col.include('tasks').first(); + expectTypeOf(result).toExtend< + Promise<(InferRootRow & { tasks: InferFullRow[] }) | null> + >(); +}); + +test('include() on 1:N reference relation all() returns array type', () => { + const col = {} as MongoCollection; + const result = col.include('tasks').all(); + expectTypeOf(result).toExtend< + AsyncIterableResult< + InferRootRow & { tasks: InferFullRow[] } + > + >(); +}); + +// --- Field accessor types --- + +test('FieldAccessor has FieldExpression for scalar fields', () => { + type Accessor = FieldAccessor; + expectTypeOf().toExtend>(); + expectTypeOf().toExtend>(); +}); + +test('FieldAccessor has FieldExpression for array fields', () => { + type Accessor = FieldAccessor; + expectTypeOf().toExtend>(); +}); + +test('FieldAccessor resolves value-object field to concrete type, not unknown', () => { + type Accessor = FieldAccessor; + type HomeAddressExpr = Accessor['homeAddress']; + + // @ts-expect-error set() rejects a number when field type is value-object + void ({} as HomeAddressExpr).set(42); +}); + +test('DotPath resolves value object dot-paths', () => { + type Paths = DotPath; + expectTypeOf<'homeAddress.city'>().toExtend(); + expectTypeOf<'homeAddress.country'>().toExtend(); +}); + +test('DotPath rejects invalid paths', () => { + type Paths = DotPath; + expectTypeOf<'homeAddress.nonexistent'>().not.toExtend(); + expectTypeOf<'nonexistent.field'>().not.toExtend(); +}); + +test('ResolveDotPathType resolves to scalar type', () => { + type CityType = ResolveDotPathType; + expectTypeOf().toEqualTypeOf(); +}); + +test('FieldExpression inc/mul restricted to numeric types', () => { + type StringExpr = FieldExpression; + type NumberExpr = FieldExpression; + + // @ts-expect-error inc is not available on string fields + void ({} as StringExpr).inc(1); + // @ts-expect-error mul is not available on string fields + void ({} as StringExpr).mul(2); + + void ({} as NumberExpr).inc(1); + void ({} as NumberExpr).mul(2); +}); diff --git a/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts b/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts index 6382c2f98..54de6e98c 100644 --- a/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts +++ b/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts @@ -18,6 +18,9 @@ test('InferModelRow resolves User fields', () => { _id: string; name: string; email: string; + loginCount: number; + tags: string[]; + homeAddress: { city: string; country: string } | null; }>(); }); diff --git a/projects/mongo-example-apps/framework-limitations.md b/projects/mongo-example-apps/framework-limitations.md new file mode 100644 index 000000000..4f0b01f90 --- /dev/null +++ b/projects/mongo-example-apps/framework-limitations.md @@ -0,0 +1,79 @@ +# Framework Limitations — Retail Store Example + +Framework gaps surfaced by the retail store example app. Each entry is a signal that the framework needs work — the example app is designed to expose these, not paper over them. + +**Source:** Consolidated from [reviews/code-review.md](reviews/code-review.md) (round 1/2 review) and [reviews/pr-327/code-review.md](reviews/pr-327/code-review.md) (PR #327 review). + +--- + +## Type ergonomics + +These are the highest-impact issues. They force type casts at nearly every boundary between the ORM and application code. + +| ID | Issue | Impact | Workaround in app | Status | +|---|---|---|---|---| +| FL-01 | **Scalar codec output types are not assignable to `string`/`number`** | ORM results for string fields (name, brand, code, currency) are codec wrapper types, not `string`. Every ORM-to-UI boundary requires `as string`. | ~15 `as string` casts in UI components, ~15 `String()` calls in seed | Open | +| FL-02 | **`_id` codec output type is not assignable to `string`** | ORM returns `_id` as `CodecTypes['mongo/objectId@1']['output']`. Every ID handoff between data access functions requires `as string` or `String()`. | ~30 casts across tests, data layer, and seed | Open | +| FL-03 | **Timestamp codec output type incompatible with `Date` or `string`** | The `mongo/dateTime@1` codec output type is neither `Date` nor `string`, forcing `as unknown as string` double casts — the most aggressive form of type assertion. | 1 double cast in order detail page | Open | + +**Root cause:** The codec type map resolves scalar codec IDs (`mongo/string@1`, `mongo/objectId@1`, `mongo/dateTime@1`) to opaque wrapper types instead of their underlying TypeScript primitives. The runtime values *are* the expected primitives — the types just don't reflect that. + +**Framework action:** Codec output types for simple scalars should resolve to types assignable to their JS primitives (`string`, `number`, `boolean`, `Date`). + +--- + +## Query capabilities + +| ID | Issue | Impact | Workaround in app | Status | +|---|---|---|---|---| +| FL-04 | **ORM lacks typed `$push`/`$pull`/`$inc` array update operators** | ORM `update()` only supports `$set` semantics. Array mutations require dropping to `mongoRaw` with untyped commands and manual `MongoParamRef` construction. This is the most exercised workaround in the app. | 3 data access functions use `mongoRaw`: cart add (`$push`), cart remove (`$pull`), order status update (`$push`) | Open | +| FL-05 | **Pipeline and raw query results are untyped** | `runtime.execute()` yields `unknown`. Pipeline builder `build()` produces a plan with no result type. Every pipeline/raw call site casts `row as T` with no compile-time or runtime verification. | `collectResults()` helper centralizes the cast but provides no type safety | Open | +| FL-06 | **ORM `where()` does not encode filter values through codecs** | The ORM auto-encodes values through codecs on writes (create/update) but not on reads (where). Filter values are passed as raw `MongoFilterExpr` AST nodes and the user must manually construct `MongoParamRef` with the correct `codecId`. Most visible with ObjectId fields (string → BSON ObjectId), but affects any codec with a non-identity `encode`. | `objectIdEq()` helper in `object-id-filter.ts` wraps ObjectId filters; no generic workaround for other codecs | Open | +| FL-07 | **No `$vectorSearch` stage in pipeline builder** | The pipeline builder doesn't expose a `vectorSearch()` stage. Implementing vector search requires raw aggregate with fully untyped commands. Atlas-specific, so likely needs an extension pack. | `findSimilarProducts()` is a stub; would need raw aggregate | Open | +| FL-08 | **1:N back-relation loading not available or not tested** | `include()` only tested for N:1 relations (cart→user, order→user, invoice→order). Loading a user's carts or orders via `include()` from the user side has not been demonstrated. | N/A — only N:1 direction used | Open | + +--- + +## Schema & migration + +| ID | Issue | Impact | Workaround in app | Status | +|---|---|---|---|---| +| FL-09 | **Migration planner creates separate collections for polymorphic variants** | Variant models without `@@map` get their own collection creation operations (e.g., `collection.addToCartEvent.create`). Polymorphic variants share the base model's collection — these operations are incorrect and would create unnecessary empty collections if applied. | Migration artifacts committed as-is; incorrect variant collection ops would need manual removal before applying | Open | +| FL-10 | **Variant collection validators are incomplete** | The generated validators for variant collections include only variant-specific fields (e.g., `searchEvent` validator has only `query`) and miss all base model fields (`_id`, `userId`, `sessionId`, `timestamp`, `type`). Structurally wrong even if variant collections were intentional. | N/A — consequence of FL-09 | Open | +| FL-11 | **`$jsonSchema` validator drops `Float` fields** | The JSON schema derivation doesn't recognize the `Float` scalar type. Fields typed as `Float` are silently omitted from validators. E.g., `Price` validator has `required: ["currency"]` but no `amount`. `InvoiceLineItem` drops `unitPrice` and `lineTotal`. | Validators are weaker than intended — Float fields are not validated | Open | +| FL-12 | **Embedded models via `owner` not supported end-to-end** | The contract schema and emitter accept `owner`, and the TS contract builder supports it, but PSL has no `@@owner` attribute and the ORM has no embedded entity CRUD handling. Can't demonstrate embedded entities (as distinct from value objects) in PSL-authored apps. | N/A — feature not usable from PSL | Open | + +--- + +## Missing capabilities + +| ID | Issue | Impact | Status | +|---|---|---|---| +| FL-13 | **TypeScript DSL contract authoring not available for Mongo** | The spec requires authoring contracts in both PSL and TS DSL. Only PSL is available for Mongo contracts. Can't validate that both surfaces produce equivalent output. | Open | +| FL-14 | **Change stream support not available** | Can't demonstrate real-time order status updates or event processing via change streams. The spec lists this as a requirement (with the caveat that it requires a replica set). | Open | +| FL-15 | **Atlas Search (`$search`) requires extension pack not yet built** | Product search uses `$regex` as a fallback. Atlas Search would provide relevance-scored full-text search but requires an extension pack. | Open | + +--- + +## Addressed by this branch + +These were previously open limitations that have been resolved in the current branch. + +| ID | Issue | Resolution | +|---|---|---| +| ~~FL-A1~~ | **`@@index`/`@@textIndex`/`@unique` not supported in Mongo PSL** | Now supported — the retail store schema uses text indexes with weights, compound, hashed, TTL, sparse, and collation-aware indexes. All flow through to migration operations. | +| ~~FL-A2~~ | **Polymorphism not demonstrated** | Now demonstrated — `Event` model with `@@discriminator(type)` and 3 variants (`ViewProductEvent`, `SearchEvent`, `AddToCartEvent`). Tests cover variant creation, base queries, discriminator filtering. | +| ~~FL-A3~~ | **Migration planner only handles index create/drop** | Now generates collection creation operations with `$jsonSchema` validators and index creation with full options (unique, sparse, TTL, collation, weights, hashed). Partially addressed — variant collection handling is incorrect (FL-09). | +| ~~FL-A4~~ | **ORM mutations didn't encode values through codec registry** | Fixed — ORM now attaches `codecId` from contract fields to `MongoParamRef`; adapter encodes via codec registry. ObjectId fields are properly encoded to BSON ObjectIds. | +| ~~FL-A5~~ | **Nullable value object fields produced incorrect `$jsonSchema` validators** | Fixed — nullable VOs now produce `oneOf: [{ bsonType: "null" }, { bsonType: "object", ... }]`. | +| ~~FL-A6~~ | **Adapter crashed on optional `codec.encode`** | Fixed — guard added before invocation. | + +--- + +## App-level gaps (not framework) + +| ID | Issue | Note | +|---|---|---| +| AG-01 | **Polymorphic events not surfaced in UI** | The data layer and tests fully exercise polymorphism, but no user-facing page displays or creates typed events. Could add an analytics/event log page. | +| AG-02 | **Fabricated image URLs** | Product images use `/images/products/...` paths that don't exist. Products render with broken images. | +| AG-03 | **README domain model diagram out of date** | Says `Events ─── EventMetadata (embedded)` but schema now uses polymorphic variants. `EventMetadata` type no longer exists. | diff --git a/projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md b/projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md new file mode 100644 index 000000000..0d5bfdf38 --- /dev/null +++ b/projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md @@ -0,0 +1,105 @@ +# ORM Query & Mutation Ergonomics Plan + +## Summary + +Implement codec-aware `where()` filtering, ADR 180 field accessor mutations, and 1:N back-relation test coverage in the Mongo ORM. Success means the retail store example app compiles without `mongoRaw` workarounds or manual `MongoParamRef`/`MongoFieldFilter` construction for common operations. + +**Spec:** [specs/orm-query-mutation-ergonomics.spec.md](../specs/orm-query-mutation-ergonomics.spec.md) + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | Will | Drives execution | + +## Milestones + +### Milestone 1: 1:N back-relation loading (FL-08) + +Verify that `include()` works for 1:N reference relations and add test coverage. Lowest risk — validates existing implementation. + +**Tasks:** + +- [ ] Add a 1:N reference relation to the ORM test fixture (e.g., `User.tasks` → `Task` via `assigneeId`) +- [ ] Add unit tests: `include()` on 1:N reference relation produces `$lookup` without `$unwind` +- [ ] Add unit tests: return type for 1:N included relation is an array (type-level test) +- [ ] Verify with retail store contract: `User.carts`, `User.orders`, `Order.invoices` all work with `include()` + +### Milestone 2: Codec-aware `where()` (FL-06) + +Add a plain-object `where()` overload that encodes filter values through codecs automatically. Establishes the codec-encoding pattern that FL-04 also uses. + +**Tasks:** + +- [ ] Write unit tests for object-based `where()`: ObjectId field, string field, multi-field AND, chaining with `MongoFilterExpr` +- [ ] Write type-level tests: invalid field names error, wrong value types error +- [ ] Add `where()` overload accepting `MongoWhereFilter` to the `MongoCollection` interface +- [ ] Implement object-to-filter compilation in `MongoCollectionImpl`: iterate fields, look up `codecId` from contract, wrap with `#wrapFieldValue`, build `MongoFieldFilter.eq`, AND if multiple +- [ ] Verify all tests pass + +### Milestone 3: ADR 180 field accessor for mutations (FL-04) + +Implement the Proxy-based field accessor with property access for top-level fields and callable dot-path for nested value object traversal. Capability-gated mutation operators. + +**Tasks:** + +- [ ] Define the `FieldOperation` type: `{ operator, field, value }` representing a single update operation +- [ ] Define the `FieldExpression` interface with mutation operators: `set()`, `unset()`, `inc()`, `mul()`, `push()`, `pull()`, `addToSet()`, `pop()` +- [ ] Define recursive template literal types for dot-path validation: `DotPath` and `ResolveDotPathType` +- [ ] Define the `FieldAccessor` type: top-level fields as properties returning `FieldExpression`, callable with dot-path string for nested value object fields +- [ ] Write unit tests for field operation compilation: individual operators (`$set`, `$push`, `$pull`, `$inc`, `$unset`, `$addToSet`, `$pop`, `$mul`) produce correct update documents +- [ ] Write unit tests for multi-operation merging: multiple operations in one callback are grouped by operator key +- [ ] Write unit tests for codec encoding in field operations: values carry correct `codecId` +- [ ] Write unit tests for dot-path operations: `u("address.city").set("NYC")` produces `{ $set: { "address.city": ... } }` +- [ ] Write type-level tests: invalid dot-paths error, operator/value type mismatch errors +- [ ] Implement `createFieldAccessor()`: Proxy-based factory that returns the accessor +- [ ] Implement field operation → update document compilation: group `FieldOperation[]` by operator, merge into `{ $set: {...}, $push: {...}, ... }`, encode values through `#wrapFieldValue` +- [ ] Add `update()` callback overload to `MongoCollection` interface and `MongoCollectionImpl` +- [ ] Extend `updateAll()`, `updateCount()` to accept the callback form +- [ ] Extend `upsert()` to accept the callback form for the `update` part +- [ ] Add value objects to the ORM test fixture contract to support dot-path tests +- [ ] Verify all tests pass + +### Milestone 4: Retail store cleanup + +Replace workarounds in the retail store with the new ORM features. This is the end-to-end proof. + +**Tasks:** + +- [ ] Replace `objectIdEq()` / `rawObjectIdFilter()` calls with object-based `where()` in all data access functions +- [ ] Delete `src/data/object-id-filter.ts` +- [ ] Replace `mongoRaw` in `addToCart()` with `update(u => [u.items.push(item)])` +- [ ] Replace `mongoRaw` in `removeFromCart()` with `update(u => [u.items.pull({ productId })])` +- [ ] Replace `mongoRaw` in `updateOrderStatus()` with `update(u => [u.statusHistory.push(entry)])` +- [ ] Remove `execute-raw.ts` imports/functions that are no longer needed +- [ ] Verify retail store compiles and tests pass + +## Test Coverage + +| Acceptance Criterion | Test Type | Milestone | Notes | +|---|---|---|---| +| `where({ userId })` with ObjectId codec produces correct filter | Unit | M2 | | +| `where({ name })` with string codec produces correct filter | Unit | M2 | | +| Multi-field `where()` produces AND | Unit | M2 | | +| Object `where()` chainable with `MongoFilterExpr` `where()` | Unit | M2 | | +| Type error for invalid field names in where object | Type test | M2 | `@ts-expect-error` | +| Type error for wrong value types in where object | Type test | M2 | `@ts-expect-error` | +| `u.items.push(item)` → `{ $push: { "items": ... } }` | Unit | M3 | | +| `u.count.inc(1)` → `{ $inc: { "count": 1 } }` | Unit | M3 | | +| `u.name.set("Alice")` → `{ $set: { "name": ... } }` | Unit | M3 | | +| `u.name.unset()` → `{ $unset: { "name": "" } }` | Unit | M3 | | +| `u("address.city").set("NYC")` → `{ $set: { "address.city": ... } }` | Unit | M3 | | +| Multiple operations merged by operator key | Unit | M3 | | +| Values in operations encoded through codecs | Unit | M3 | | +| Callback works with `updateAll()`, `updateCount()`, `upsert()` | Unit | M3 | | +| Type error for invalid dot-path | Type test | M3 | `@ts-expect-error` | +| Type error for operator/value mismatch | Type test | M3 | `@ts-expect-error` | +| `include()` on 1:N reference relation: `$lookup` without `$unwind` | Unit | M1 | | +| 1:N included relation return type is array | Type test | M1 | | +| Retail store `mongoRaw` calls replaced | Integration | M4 | Existing retail store tests pass | +| `objectIdEq()` helpers removed | Integration | M4 | File deleted, no import errors | + +## Open Items + +- The ORM test fixture needs value objects added (M3) and a 1:N reference back-relation added (M1). Both are test fixture changes, not contract schema changes. +- `execute-raw.ts` may still be needed for pipeline/raw queries outside the scope of this ticket. Only remove functions that are no longer referenced after the cleanup. diff --git a/projects/mongo-example-apps/plans/retail-store-round-2-plan.md b/projects/mongo-example-apps/plans/retail-store-round-2-plan.md new file mode 100644 index 000000000..b79336bc4 --- /dev/null +++ b/projects/mongo-example-apps/plans/retail-store-round-2-plan.md @@ -0,0 +1,139 @@ +# Retail Store Round 2 — Interactive E-Commerce + +## Summary + +Make the retail-store example app interactive so a user can sign up, browse and search products, manage a cart, check out, and view orders — all backed by the PN data access layer. This builds on the contract, data access layer, and integration tests delivered in round 1. The result is a working e-commerce demo where every user action exercises a distinct PN Mongo capability (ORM CRUD, `$push`/`$pull`, `$regex` search, upsert, aggregation). + +**Spec:** `projects/mongo-example-apps/specs/retail-store-round-2.spec.md` + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | Agent / Engineer | Drives execution | +| Reviewer | Will | Architectural review, UX feedback | + +--- + +## Milestones + +### Milestone 1: Foundation — UI library, auth stub, expanded seed data + +Set up shadcn/ui, implement the login stub with cookie-based auth, expand the seed data, and replace the existing inline-style pages with library components. After this milestone, the app has a working auth flow and a polished but still read-only product catalog. + +**Tasks:** + +- [ ] Install and configure shadcn/ui (Tailwind CSS, cn utility, base components: Button, Card, Input, Badge, Select, DropdownMenu, Dialog, Separator, Skeleton) +- [ ] Replace `app/globals.css` inline reset with Tailwind base styles +- [ ] Implement auth middleware: Next.js middleware that checks for a `userId` cookie; redirects unauthenticated requests to `/login` (except `/login` and `/api/auth/*` routes) +- [ ] Create `/login` page with "Sign Up" button — calls `POST /api/auth/signup` which creates a user via `orm.users.create()` with a generated name (e.g. "User-{shortId}"), sets a `userId` cookie, and redirects to `/` +- [ ] Create `POST /api/auth/signup` API route +- [ ] Create `POST /api/auth/logout` API route — clears the `userId` cookie, redirects to `/login` +- [ ] Create a shared `getAuthUser()` server helper that reads the `userId` cookie via `cookies()`, fetches the user via `findUserById()`, and returns null if not found (used by all server components) +- [ ] Rebuild navbar as a server component using shadcn: show user name from `getAuthUser()`, cart item count (placeholder 0 for now), nav links (Products, Cart, Orders), and a Log Out dropdown item +- [ ] Expand seed data to ~24 products across 4+ categories (Apparel/Topwear, Apparel/Bottomwear, Accessories/Bags, Footwear/Shoes) and 5+ brands. Add 3+ store locations. Keep 2 users for test purposes but the demo flow creates its own users via signup. +- [ ] Rebuild product catalog page (`app/page.tsx`) with shadcn Card components in a grid layout +- [ ] Rebuild product detail page (`app/products/[id]/page.tsx`) with shadcn components (no "Add to Cart" yet — that's M2) +- [ ] Add pagination to the product catalog: extend `findProducts()` with `skip`/`limit` parameters, add Previous/Next controls using shadcn Button +- [ ] Add product search: create `searchProducts(query)` data access function using a pipeline with `$match` + `$regex` (case-insensitive match on `name`, `brand`, `articleType`); add a search Input to the catalog page that submits as a query parameter; catalog page filters results via the search function when a query is present +- [ ] Write integration test for `searchProducts()`: seed products, search by partial name, verify filtered results +- [ ] Remove `DEMO_USER_ID` references from all API routes and page components — replace with `getAuthUser()` or cookie-based user resolution + +**Validates:** auth acceptance criteria (AC 1–5), product browsing (AC 6–8 except "Add to Cart" button), search test (AC 21) + +### Milestone 2: Cart — add to cart, manage items, navbar count + +Wire up the full cart experience: add items from product pages, view/manage the cart, and show a live item count in the navbar. + +**Tasks:** + +- [ ] Add "Add to Cart" button to the product detail page (client component) — calls `POST /api/cart` with the product data; shows loading/success feedback via shadcn Button states and a toast or inline message +- [ ] Add "Add to Cart" button to product cards on the catalog page (small icon button on each card) +- [ ] Update `POST /api/cart` route to read `userId` from the auth cookie instead of `DEMO_USER_ID`; on POST, upsert the cart (create if none exists) then `$push` the item +- [ ] Rebuild cart page (`app/cart/page.tsx`) with shadcn components: list items with name, brand, quantity, price; "Remove" button per item; "Clear Cart" button; subtotal display; "Proceed to Checkout" link +- [ ] Wire "Remove" button to `DELETE /api/cart?productId=X` (calls `removeFromCart()`) +- [ ] Wire "Clear Cart" button to `DELETE /api/cart` with no productId (calls `clearCart()`) +- [ ] Update `DELETE /api/cart` and `GET /api/cart` routes to use the auth cookie for user ID +- [ ] Implement navbar cart count: create `GET /api/cart/count` route that returns `{ count: items.length }` for the authenticated user; navbar client component fetches this on mount and after mutations (via a simple polling interval or custom event) +- [ ] Add a `CartProvider` React context that tracks cart count and provides an `invalidateCart()` function for mutation components to call after add/remove/clear + +**Validates:** cart acceptance criteria (AC 9–13) + +### Milestone 3: Checkout and orders — place order, view history, update status + +Complete the checkout flow and rebuild the order pages with interactive status updates. + +**Tasks:** + +- [ ] Create checkout page (`app/checkout/page.tsx`) with shadcn components: + - Order summary section (items from cart, subtotal, total) + - Shipping address field (pre-filled from user's address if available, otherwise text Input) + - Order type radio group (Home Delivery / BOPIS) using shadcn RadioGroup + - Conditional store location Select dropdown when BOPIS is chosen (populated from `GET /api/locations`) + - "Place Order" Button +- [ ] Create `POST /api/orders` handler update: read user from auth cookie; accept `{ items, shippingAddress, type }` from the checkout form; call `createOrder()` with initial status `{ status: 'placed', timestamp: now }`; call `clearCart()`; return the created order +- [ ] Wire "Place Order": on success, redirect to `/orders/{id}` for the new order +- [ ] Rebuild orders list page (`app/orders/page.tsx`) with shadcn: show each order as a Card with item count, total, latest status Badge, and link to detail +- [ ] Update orders routes to use auth cookie instead of `DEMO_USER_ID` +- [ ] Rebuild order detail page (`app/orders/[id]/page.tsx`) with shadcn: items list, shipping address, status history timeline (using a vertical list with Badges), total +- [ ] Add status progression buttons to the order detail page: show the next logical status as a Button (placed → shipped → delivered). Clicking calls `PATCH /api/orders/[id]` which runs `updateOrderStatus()` (`$push` to `statusHistory`). Disable when status is `delivered`. +- [ ] Verify the order detail page loads correctly using the order ID from the URL and the auth cookie (no hardcoded env var) + +**Validates:** checkout acceptance criteria (AC 14–18), order acceptance criteria (AC 19–22) + +### Milestone 4: Polish and close-out + +Final pass: verify all acceptance criteria, fix rough edges, update documentation. + +**Tasks:** + +- [ ] Run the full test suite — all existing integration tests pass +- [ ] Run typecheck — no errors +- [ ] Manually walk through the full user journey: sign up → browse → search → add to cart → checkout (home + BOPIS) → view orders → update status → log out → log back in and see persisted orders +- [ ] Verify all acceptance criteria from the spec (checklist pass) +- [ ] Fix any remaining inline-style remnants — all UI uses shadcn/Tailwind +- [ ] Update `examples/retail-store/README.md` with updated Quick Start (no more `DEMO_USER_ID`), feature table, and screenshots or description of the interactive flows +- [ ] Update the seed script: remove `DEMO_USER_ID` auto-write logic (no longer needed); seed only products, locations, and sample data — users are created via signup +- [ ] Verify data access layer constraint: all mutations go through PN data access functions, no raw MongoDB driver calls in routes or components + +--- + +## Test Coverage + +| Acceptance Criterion | Test Type | Task/Milestone | Notes | +|---|---|---|---| +| Unauthenticated visitors redirected to login | Manual | M1 | Middleware redirect; verify in walkthrough | +| Sign Up creates user + sets cookie | Integration | M1 | Test `orm.users.create()` path (existing seed test covers ORM create) | +| Auth cookie persists across navigations | Manual | M1 | Verify in walkthrough | +| Navbar shows user name | Manual | M1 | Verify in walkthrough | +| Log out clears cookie + redirects | Manual | M1 | Verify in walkthrough | +| Product catalog paginates | Integration | M1 | Test `findProducts(skip, limit)` with >12 seeded products | +| Search filters products | Integration | M1 | New test for `searchProducts()` with `$regex` | +| Product detail shows "Add to Cart" | Manual | M2 | Verify in walkthrough | +| Add to Cart upserts cart + adds item | Integration | M2 | Existing `upsertCart` + `addToCart` tests cover this | +| Cart page shows items | Manual | M2 | Verify in walkthrough | +| Remove button removes item (`$pull`) | Integration | M2 | Existing `removeFromCart` test covers this | +| Clear Cart empties cart | Integration | M2 | Existing `clearCart` test covers this | +| Navbar cart count updates | Manual | M2 | Verify in walkthrough | +| Checkout shows order summary | Manual | M3 | Verify in walkthrough | +| Shipping address entry | Manual | M3 | Verify in walkthrough | +| Home delivery vs BOPIS selection | Manual | M3 | Verify in walkthrough | +| BOPIS store dropdown from DB | Integration | M3 | Existing `findLocations()` test covers data path | +| Place Order creates order + clears cart | Integration | M3 | Existing `createOrder` + `clearCart` tests cover this | +| Orders page lists user's orders | Manual | M3 | Verify in walkthrough | +| Order detail shows items/address/status/total | Manual | M3 | Verify in walkthrough | +| Status update button (`$push` statusHistory) | Integration | M3 | Existing `updateOrderStatus` test covers this | +| All order pages use auth user, not env var | Manual | M3/M4 | Verify no `DEMO_USER_ID` references remain | +| All mutations via PN data access layer | Code review | M4 | Verify in close-out | +| Existing integration tests pass | CI | M4 | Run full suite | +| New search test | Integration | M1 | `searchProducts()` test | + +## Open Items + +1. **Search implementation**: Assumed pipeline with `$match` + `$regex`. If the pipeline builder doesn't support `$regex` in `$match`, fall back to raw command or ORM `where` with string equality. Resolve during M1 implementation. + +2. **Seed data volume**: Expanding from 3 to ~24 products. The seed function's return type (`SeedResult`) may need updating if we stop seeding demo users for the app flow (users are now created via signup). Test seed still creates users for integration tests. + +3. **Cart count reactivity**: The spec requires navbar cart count to update after mutations. The simplest approach is a React context with manual invalidation (components call `invalidateCart()` after mutation). More sophisticated approaches (SSE, polling) are out of scope per the spec's non-goals. + +4. **Carry-forward from round 1 code review**: F01 (`objectIdEq` type) and F08 (`biome.jsonc` extends) are already fixed. F07 (no migration artifacts) is out of scope for round 2; tracked in round 1 plan. diff --git a/projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md b/projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md new file mode 100644 index 000000000..cf474dd71 --- /dev/null +++ b/projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md @@ -0,0 +1,131 @@ +# Summary + +Close the ergonomics gap in the Mongo ORM by making `where()` codec-aware, implementing the ADR 180 field accessor for typed mutation operators, and verifying 1:N back-relation loading. This eliminates the need for `mongoRaw` workarounds and manual `MongoParamRef` construction in the retail store example app. + +# Description + +The retail store example app exposed three framework limitations (FL-04, FL-06, FL-08) in the Mongo ORM that force users into low-level workarounds: + +- **FL-06**: The ORM encodes values through codecs on writes (create/update) but not on reads (where). Users must manually construct `MongoParamRef` with the correct `codecId` for every filter. Most visible with ObjectId fields (string → BSON ObjectId), but affects any codec with a non-identity `encode`. +- **FL-04**: The ORM `update()` only supports `$set` semantics. Array mutations (`$push`, `$pull`) and numeric updates (`$inc`) require dropping to `mongoRaw` with untyped commands. ADR 180 specifies the field accessor pattern for these operations. +- **FL-08**: 1:N back-relation loading via `include()` (e.g., User → carts, User → orders) has not been tested. The implementation appears correct but lacks test coverage. + +**Linear**: [TML-2246](https://linear.app/prisma-company/issue/TML-2246) + +# Requirements + +## Functional Requirements + +### FL-06: Codec-aware `where()` overload + +1. `where()` accepts a plain object `{ fieldName: value }` in addition to the existing `MongoFilterExpr`. +2. Each field-value pair is resolved to `$eq` semantics. +3. Values are encoded through codecs using the same `#wrapFieldValue` logic that mutations use — the field's `codecId` is looked up from the contract and attached to the `MongoParamRef`. +4. Multiple fields in a single object are AND'd together. +5. The plain-object `where()` is chainable with other `where()` calls (both object and `MongoFilterExpr` forms). +6. The type of the object maps each field to its codec input type (from the contract type maps). + +### FL-04: ADR 180 field accessor for mutations + +7. `update()` accepts a callback `(u) => FieldOperation[]` in addition to the existing plain-object (`$set`) form. +8. Top-level scalar fields are accessible as properties on the accessor proxy: `u.fieldName` returns an expression with mutation operators. +9. Nested value object fields are accessible via callable dot-path: `u("address.city")` returns an expression with mutation operators. +10. Dot-path strings are type-checked at compile time using recursive template literal types. Invalid paths produce type errors. +11. The expression returned by the accessor provides capability-gated mutation operators: + - All targets: `.set(value)`, `.unset()` + - Mongo: `.inc(value)`, `.mul(value)`, `.push(value)`, `.pull(match)`, `.addToSet(value)`, `.pop(end)` +12. Each operator produces a `FieldOperation` that is collected and compiled into a MongoDB update document (e.g., `{ $push: { "items": ... }, $inc: { "count": 1 } }`). +13. Values in field operations are encoded through codecs using `#wrapFieldValue`. +14. The callback form works with `update()`, `updateAll()`, `updateCount()`, and the `update` part of `upsert()`. + +### FL-08: 1:N back-relation loading + +15. `include()` works correctly for 1:N reference relations (e.g., User → carts, Order → invoices). +16. The `$lookup` stage is generated without `$unwind` for 1:N cardinality. +17. The return type for 1:N included relations is an array. + +## Non-Functional Requirements + +18. No new runtime dependencies. +19. Type-level dot-path resolution must not cause noticeable IDE slowdown for schemas with ≤ 3 levels of value object nesting. + +## Non-goals + +- **Query-side dot-path accessor** (`u("address.city").eq("NYC")` for filter expressions): ADR 180 describes this but it's a separate concern from mutations. The object-based `where()` overload covers the immediate need. +- **Extended comparison operators in object-based `where()`** (e.g., `{ price: { $gte: 10 } }`): `$eq`-only is consistent with the SQL family. Complex filters use the existing `MongoFilterExpr` chain. +- **`$vectorSearch` pipeline stage** (FL-07): Requires Atlas extension pack — separate project. +- **Change streams** (FL-14): Requires streaming subscription support. + +# Acceptance Criteria + +## FL-06: Codec-aware `where()` + +- [ ] `where({ userId: "abc123" })` on a model with `userId: mongo/objectId@1` produces a filter with `MongoParamRef` carrying `codecId: 'mongo/objectId@1'` +- [ ] `where({ name: "Alice" })` on a string field produces a filter with `MongoParamRef` carrying `codecId: 'mongo/string@1'` +- [ ] `where({ userId: "abc", name: "Alice" })` produces an AND of two equality filters +- [ ] Object-based `where()` is chainable with `MongoFilterExpr`-based `where()` +- [ ] Type errors for invalid field names in the where object +- [ ] Type errors for wrong value types (e.g., number for a string field) + +## FL-04: Field accessor mutations + +- [ ] `update(u => [u.items.push(newItem)])` produces `{ $push: { "items": } }` +- [ ] `update(u => [u.count.inc(1)])` produces `{ $inc: { "count": 1 } }` +- [ ] `update(u => [u.name.set("Alice")])` produces `{ $set: { "name": } }` +- [ ] `update(u => [u.name.unset()])` produces `{ $unset: { "name": "" } }` +- [ ] `update(u => [u("address.city").set("NYC")])` produces `{ $set: { "address.city": } }` +- [ ] Multiple operations in a single callback are merged into the update document by operator key +- [ ] Values in field operations are encoded through codecs (codecId attached to MongoParamRef) +- [ ] Callback form works with `updateAll()`, `updateCount()`, and `upsert()` +- [ ] Type error for invalid dot-paths (e.g., `u("address.nonexistent")`) +- [ ] Type error for operator/value type mismatch (e.g., `.inc()` on a string field) + +## FL-08: 1:N back-relation loading + +- [ ] `include()` on a 1:N reference relation produces a `$lookup` without `$unwind` +- [ ] Return type for 1:N included relation is an array + +## Retail store cleanup + +- [ ] Retail store `mongoRaw` calls for cart add/remove and order status update are replaced with ORM `update()` calls using the field accessor +- [ ] `objectIdEq()` and `rawObjectIdFilter()` helpers are removed — replaced with object-based `where()` +- [ ] `object-id-filter.ts` is deleted + +# Other Considerations + +## Security + +N/A — internal query builder changes, no auth or data sensitivity impact. + +## Cost + +N/A — no infrastructure changes. + +## Observability + +N/A — no new runtime surfaces. + +## Data Protection + +N/A — no change to data handling. + +## Analytics + +N/A. + +# References + +- [ADR 180 — Dot-path field accessor](../../../docs/architecture%20docs/adrs/ADR%20180%20-%20Dot-path%20field%20accessor.md) — authoritative design for the field accessor pattern, mutation semantics, capability-gated operators, and backend translation +- [ADR 178 — Value objects in the contract](../../../docs/architecture%20docs/adrs/ADR%20178%20-%20Value%20objects%20in%20the%20contract.md) — value object definitions that the dot-path accessor navigates +- [Framework limitations](../framework-limitations.md) — FL-04, FL-06, FL-08 detailed descriptions and workarounds +- [Next steps](../../../docs/planning/mongo-target/next-steps.md) — Area 2 scope and sequencing +- [ORM collection implementation](../../../packages/2-mongo-family/5-query-builders/orm/src/collection.ts) — current `where()`, `update()`, `include()` implementation +- [Retail store workarounds](../../../examples/retail-store/src/data/object-id-filter.ts) — `objectIdEq()` helper that FL-06 eliminates + +# Open Questions + +None — all design decisions resolved during discussion: + +1. **FL-04 API shape**: ADR 180 callback pattern with Proxy-based field accessor (resolved by existing ADR). +2. **FL-06 operator scope**: `$eq`-only, consistent with SQL. Complex filters use existing `MongoFilterExpr` chain. +3. **Dot-path scope**: Full callable dot-path accessor, not just top-level fields. This ticket is the natural home; value objects are landed and nothing blocks it. diff --git a/projects/mongo-example-apps/specs/retail-store-round-2.spec.md b/projects/mongo-example-apps/specs/retail-store-round-2.spec.md new file mode 100644 index 000000000..a6b7e3e49 --- /dev/null +++ b/projects/mongo-example-apps/specs/retail-store-round-2.spec.md @@ -0,0 +1,159 @@ +# Summary + +Make the retail-store example app interactive — a user can browse products, search, add items to a cart, check out, view orders, and see order status updates — all backed by the PN data access layer. This is the second round of development, building on the data access layer, contract, and static UI delivered in round 1 (M1–M6). The result should be a working e-commerce demo that exercises PN Mongo capabilities through real user flows, not just static data viewing. + +# Description + +Round 1 delivered the contract (PSL with embedded value objects), the typed data access layer (ORM CRUD, relations, `$push`/`$pull`, aggregation), integration tests, and a static Next.js UI that renders pre-seeded data. The API routes support mutations but the UI never calls them — there are no interactive elements (no "Add to Cart" button, no checkout flow, no user picker). + +The original [retail-store-v2](https://github.com/mongodb-industry-solutions/retail-store-v2) is a full interactive demo with Redux state management, product search, cart management, checkout, order tracking with SSE, store locator for BOPIS, a chatbot, customer retention analytics, and guided demo "talk tracks." We don't need all of that — much of it validates external services (Dataworkz chatbot, Atlas Stream Processing, ML recommendations), not PN. + +What we need is **the core e-commerce loop made interactive**: browse → search → add to cart → manage cart → check out → view orders. Each step exercises a distinct PN Mongo capability. The UI should be functional and pleasant, not a pixel-perfect port. + +**Reference material:** The original repo is cloned at `wip/retail-store-v2-reference/` for reference during development. + +# Requirements + +## Functional Requirements + +### User identity + +1. **Login stub**: Unauthenticated visitors are redirected to a login page with a "Sign Up" button. Clicking it creates a new user document (via `orm.users.create()` with a generated name and empty address) and sets an auth cookie with the new user's ID. The rest of the app treats this cookie as the authenticated session — no real auth provider needed, but the app behaves like one exists. +2. **User display**: Show the authenticated user's name in the navbar. Provide a "Log out" action that clears the cookie and redirects to the login page. + +### Product browsing + +3. **Product catalog with pagination**: The product catalog page loads products in pages (e.g. 12 per page) with next/previous navigation. Uses the existing `findProducts()` function, extended with skip/limit support. +4. **Product search**: A search bar on the catalog page filters products by name, brand, or category. This should use the ORM's `where` clause with text matching (or a pipeline with `$regex` if ORM text filters aren't available). Atlas Search (`$search`) is a stretch goal if an Atlas cluster is available. +5. **Product detail**: Clicking a product navigates to its detail page (already exists) and shows an "Add to Cart" button. + +### Cart management + +6. **Add to cart**: From the product detail page (or a product card), the user can add a product to their cart. This calls the existing `addToCart()` data access function (which uses `$push`). If no cart exists, it creates one via `upsertCart()`. +7. **Cart page with item management**: The cart page shows the current user's items with quantities and prices. Each item has a "Remove" button that calls `removeFromCart()` (`$pull`). A "Clear Cart" button calls `clearCart()`. +8. **Cart item count in navbar**: The navbar shows the number of items in the current user's cart, updating after add/remove operations. + +### Checkout + +9. **Checkout flow**: From the cart page, a "Checkout" button navigates to a checkout page that shows: + - Order summary (items, subtotal, total) + - Shipping address (pre-filled from the user's address, or a simple text input) + - Order type selector (home delivery vs. BOPIS) + - If BOPIS: a store location picker populated from `findLocations()` + - A "Place Order" button +10. **Place order**: Confirming the order calls `createOrder()` with the cart items, shipping info, and an initial `{ status: 'placed', timestamp: now }` status entry. Then clears the cart via `clearCart()`. Navigates to the order detail page. + +### Order management + +11. **Orders page**: Lists the current user's orders (existing `getUserOrders()`), showing item count, total, and latest status. Each order links to its detail page. +12. **Order detail page**: Shows order items, shipping address, status history timeline, and total. Already partially exists but needs to use the current user context instead of a hardcoded env var. +13. **Order status updates**: The order detail page has a button to simulate status progression (e.g. "Mark as Shipped", "Mark as Delivered") that calls `updateOrderStatus()` (`$push` to `statusHistory`). This exercises the `$push` update operator in an interactive context. + +### Store locator + +14. **Store locations for BOPIS**: During checkout, if BOPIS is selected, show a dropdown of store locations from `findLocations()`. The selected store's address becomes the shipping address. + +## Non-Functional Requirements + +1. **Client-side interactivity**: Interactive features use Next.js client components (`"use client"`) with `fetch` calls to the existing API routes. Server components remain for initial data loading where appropriate. +2. **UI component library**: Use an established component library (e.g. shadcn/ui, Radix, or similar) so the app looks polished out of the box without rolling custom components. Replace the existing inline-style UI from round 1 with library components. +3. **No external dependencies**: The interactive features work against `mongodb-memory-server` for tests and any MongoDB instance for the demo. No Atlas-specific features are required for the core interactive loop. +4. **Responsive layout**: The UI should look reasonable on desktop and tablet widths. Mobile is not a priority. +4. **Type safety**: All API request/response types should be derived from the contract types where possible. No `any` types. +5. **Test coverage**: Each new interactive flow should have at least one integration test proving the data access path works end-to-end (most already exist from round 1). + +## Non-goals + +- **Chatbot**: Validates Dataworkz, not PN. Out of scope. +- **Customer retention / CEP / Next Best Actions**: Complex event processing with Atlas Stream Processing and external microservices. Not a PN concern. +- **Personalized recommendations / ML pipeline**: External service populates `lastRecommendations` on user documents. Not a PN concern. +- **Real-time SSE / change streams**: The PN runtime doesn't yet support change streams. Deferred until the framework ships this capability. Order status updates will use polling or manual refresh instead. +- **Atlas Search (`$search`)**: Requires an extension pack not yet built. Product search will use `$regex` or ORM filters as a fallback. Atlas Search is a stretch goal. +- **Talk tracks / guided tours / demo mode**: Presentation tooling for sales demos. Not relevant to PN validation. +- **Digital receipt PDF generation / external invoice URLs**: External service concern. +- **Real authentication**: The login stub fabricates users and sets a cookie. No OAuth, JWT validation, password hashing, or session management beyond a simple cookie. +- **Redux or complex client state management**: Use simple React state or context. The original app's Redux store is overkill for what we need. + +# Acceptance Criteria + +## User identity + +- [ ] Unauthenticated visitors are redirected to a login page +- [ ] "Sign Up" creates a new user document and sets an auth cookie +- [ ] Auth cookie persists across page navigations; server components can read it +- [ ] Navbar displays the authenticated user's name +- [ ] "Log out" clears the cookie and redirects to the login page + +## Product browsing + +- [ ] Product catalog paginates (at least 2 pages when >12 products are seeded) +- [ ] Search bar filters products by text match (name, brand, or category) +- [ ] Product detail page shows an "Add to Cart" button + +## Cart + +- [ ] "Add to Cart" creates a cart (upsert) and adds the product +- [ ] Cart page shows current user's items with prices and quantities +- [ ] "Remove" button removes a specific item (`$pull`) +- [ ] "Clear Cart" button empties the cart (`$set items: []`) +- [ ] Navbar shows cart item count that updates after mutations + +## Checkout + +- [ ] Checkout page shows order summary with items and total +- [ ] User can enter/confirm shipping address +- [ ] User can select home delivery or BOPIS +- [ ] BOPIS selection shows a store location dropdown populated from DB +- [ ] "Place Order" creates an order, clears the cart, and navigates to order detail + +## Orders + +- [ ] Orders page lists the current user's orders sorted by most recent +- [ ] Order detail page shows items, address, status history, and total +- [ ] Status update button appends a new status entry (`$push` to `statusHistory`) +- [ ] All order pages use the selected demo user, not a hardcoded env var + +## Data access + +- [ ] All interactive mutations go through the PN data access layer (no raw MongoDB driver calls in API routes or components) +- [ ] The existing integration tests continue to pass +- [ ] At least one new test covers the search/filter data access function + +# Other Considerations + +## Security + +The login stub creates user documents and sets a plain-text user ID cookie. No encryption, signing, or real session management. This is a local demo app — the auth surface simulates the UX of a real app without any of the security infrastructure. + +## Cost + +Zero. Runs against `mongodb-memory-server` locally or any MongoDB instance the developer provides. + +## Observability + +Not applicable beyond standard Next.js dev server output. + +## Data Protection + +Not applicable — all data is synthetic demo data. + +## Analytics + +Not applicable. + +# References + +- [Original retail-store-v2](https://github.com/mongodb-industry-solutions/retail-store-v2) — source repo (cloned to `wip/retail-store-v2-reference/`) +- [Project spec](../spec.md) — parent project spec +- [Round 1 plan](../plans/retail-store-plan.md) — milestones M1–M6 (delivered) +- [Round 1 code review](../reviews/code-review.md) — findings and acceptance criteria status + +# Open Questions + +1. **Search implementation**: Should product search use `$regex` matching via the ORM's `where` clause, or a pipeline with `$regex` in a `$match` stage? The ORM may not support text pattern matching natively. **Assumption:** Use a pipeline with `$match` + `$regex` for search, since the pipeline builder is already used for aggregation and this exercises another PN surface. Fall back to ORM `where` with exact string filters if `$regex` isn't feasible. + +2. **Seed data volume**: The current seed has 3 products, which is too few for pagination or meaningful search. Should we expand the seed to ~20–30 products? **Assumption:** Yes — expand the seed to at least 20 products across multiple categories and brands to make browsing, search, and pagination meaningful. + +3. **Cart item quantity**: When adding a product that's already in the cart, should it increment the quantity or add a duplicate entry? The original app uses `$push` which adds duplicates. **Assumption:** Same as original — `$push` adds a new entry. Simplifies the implementation and matches the existing data access function. + +4. **~~User persistence mechanism~~**: Resolved — cookie set by the login stub sign-up flow. Server components read it via `cookies()` for initial data loading.