From 64742388202a58463f14f7f423c6ddd511540b7e Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 06:38:45 +0300 Subject: [PATCH 01/30] docs(retail-store): add framework limitations doc and PR #327 review artifacts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extract consolidated framework limitations from code reviews into a standalone document at projects/mongo-example-apps/framework-limitations.md. Covers type ergonomics (FL-01–FL-03), query capabilities (FL-04–FL-08), schema/migration gaps (FL-09–FL-12), and missing capabilities (FL-13–FL-15). Tracks 6 items addressed by this branch (FL-A1–FL-A6). Also commits the PR #327 review artifacts (system-design-review, code-review, walkthrough), the round 2 spec, and the round 2 plan. --- .../framework-limitations.md | 79 +++++++++ .../plans/retail-store-round-2-plan.md | 139 +++++++++++++++ .../reviews/pr-327/code-review.md | 104 ++++++++++++ .../reviews/pr-327/system-design-review.md | 106 ++++++++++++ .../reviews/pr-327/walkthrough.md | 122 ++++++++++++++ .../specs/retail-store-round-2.spec.md | 159 ++++++++++++++++++ 6 files changed, 709 insertions(+) create mode 100644 projects/mongo-example-apps/framework-limitations.md create mode 100644 projects/mongo-example-apps/plans/retail-store-round-2-plan.md create mode 100644 projects/mongo-example-apps/reviews/pr-327/code-review.md create mode 100644 projects/mongo-example-apps/reviews/pr-327/system-design-review.md create mode 100644 projects/mongo-example-apps/reviews/pr-327/walkthrough.md create mode 100644 projects/mongo-example-apps/specs/retail-store-round-2.spec.md diff --git a/projects/mongo-example-apps/framework-limitations.md b/projects/mongo-example-apps/framework-limitations.md new file mode 100644 index 000000000..2b6a8882b --- /dev/null +++ b/projects/mongo-example-apps/framework-limitations.md @@ -0,0 +1,79 @@ +# Framework Limitations — Retail Store Example + +Framework gaps surfaced by the retail store example app. Each entry is a signal that the framework needs work — the example app is designed to expose these, not paper over them. + +**Source:** Consolidated from [reviews/code-review.md](reviews/code-review.md) (round 1/2 review) and [reviews/pr-327/code-review.md](reviews/pr-327/code-review.md) (PR #327 review). + +--- + +## Type ergonomics + +These are the highest-impact issues. They force type casts at nearly every boundary between the ORM and application code. + +| ID | Issue | Impact | Workaround in app | Status | +|---|---|---|---|---| +| FL-01 | **Scalar codec output types are not assignable to `string`/`number`** | ORM results for string fields (name, brand, code, currency) are codec wrapper types, not `string`. Every ORM-to-UI boundary requires `as string`. | ~15 `as string` casts in UI components, ~15 `String()` calls in seed | Open | +| FL-02 | **`_id` codec output type is not assignable to `string`** | ORM returns `_id` as `CodecTypes['mongo/objectId@1']['output']`. Every ID handoff between data access functions requires `as string` or `String()`. | ~30 casts across tests, data layer, and seed | Open | +| FL-03 | **Timestamp codec output type incompatible with `Date` or `string`** | The `mongo/dateTime@1` codec output type is neither `Date` nor `string`, forcing `as unknown as string` double casts — the most aggressive form of type assertion. | 1 double cast in order detail page | Open | + +**Root cause:** The codec type map resolves scalar codec IDs (`mongo/string@1`, `mongo/objectId@1`, `mongo/dateTime@1`) to opaque wrapper types instead of their underlying TypeScript primitives. The runtime values *are* the expected primitives — the types just don't reflect that. + +**Framework action:** Codec output types for simple scalars should resolve to types assignable to their JS primitives (`string`, `number`, `boolean`, `Date`). + +--- + +## Query capabilities + +| ID | Issue | Impact | Workaround in app | Status | +|---|---|---|---|---| +| FL-04 | **ORM lacks typed `$push`/`$pull`/`$inc` array update operators** | ORM `update()` only supports `$set` semantics. Array mutations require dropping to `mongoRaw` with untyped commands and manual `MongoParamRef` construction. This is the most exercised workaround in the app. | 3 data access functions use `mongoRaw`: cart add (`$push`), cart remove (`$pull`), order status update (`$push`) | Open | +| FL-05 | **Pipeline and raw query results are untyped** | `runtime.execute()` yields `unknown`. Pipeline builder `build()` produces a plan with no result type. Every pipeline/raw call site casts `row as T` with no compile-time or runtime verification. | `collectResults()` helper centralizes the cast but provides no type safety | Open | +| FL-06 | **ObjectId filter requires manual `MongoParamRef` wrapping** | Filtering by ObjectId-typed fields requires `MongoFieldFilter.of('userId', '$eq', new MongoParamRef(userId, { codecId: 'mongo/objectId@1' }))` instead of a simpler `where({ userId })`. | `objectIdEq()` helper in `object-id-filter.ts` reduces boilerplate | Open | +| FL-07 | **No `$vectorSearch` stage in pipeline builder** | The pipeline builder doesn't expose a `vectorSearch()` stage. Implementing vector search requires raw aggregate with fully untyped commands. Atlas-specific, so likely needs an extension pack. | `findSimilarProducts()` is a stub; would need raw aggregate | Open | +| FL-08 | **1:N back-relation loading not available or not tested** | `include()` only tested for N:1 relations (cart→user, order→user, invoice→order). Loading a user's carts or orders via `include()` from the user side has not been demonstrated. | N/A — only N:1 direction used | Open | + +--- + +## Schema & migration + +| ID | Issue | Impact | Workaround in app | Status | +|---|---|---|---|---| +| FL-09 | **Migration planner creates separate collections for polymorphic variants** | Variant models without `@@map` get their own collection creation operations (e.g., `collection.addToCartEvent.create`). Polymorphic variants share the base model's collection — these operations are incorrect and would create unnecessary empty collections if applied. | Migration artifacts committed as-is; incorrect variant collection ops would need manual removal before applying | Open | +| FL-10 | **Variant collection validators are incomplete** | The generated validators for variant collections include only variant-specific fields (e.g., `searchEvent` validator has only `query`) and miss all base model fields (`_id`, `userId`, `sessionId`, `timestamp`, `type`). Structurally wrong even if variant collections were intentional. | N/A — consequence of FL-09 | Open | +| FL-11 | **`$jsonSchema` validator drops `Float` fields** | The JSON schema derivation doesn't recognize the `Float` scalar type. Fields typed as `Float` are silently omitted from validators. E.g., `Price` validator has `required: ["currency"]` but no `amount`. `InvoiceLineItem` drops `unitPrice` and `lineTotal`. | Validators are weaker than intended — Float fields are not validated | Open | +| FL-12 | **Embedded models via `owner` not supported end-to-end** | The contract schema and emitter accept `owner`, and the TS contract builder supports it, but PSL has no `@@owner` attribute and the ORM has no embedded entity CRUD handling. Can't demonstrate embedded entities (as distinct from value objects) in PSL-authored apps. | N/A — feature not usable from PSL | Open | + +--- + +## Missing capabilities + +| ID | Issue | Impact | Status | +|---|---|---|---| +| FL-13 | **TypeScript DSL contract authoring not available for Mongo** | The spec requires authoring contracts in both PSL and TS DSL. Only PSL is available for Mongo contracts. Can't validate that both surfaces produce equivalent output. | Open | +| FL-14 | **Change stream support not available** | Can't demonstrate real-time order status updates or event processing via change streams. The spec lists this as a requirement (with the caveat that it requires a replica set). | Open | +| FL-15 | **Atlas Search (`$search`) requires extension pack not yet built** | Product search uses `$regex` as a fallback. Atlas Search would provide relevance-scored full-text search but requires an extension pack. | Open | + +--- + +## Addressed by this branch + +These were previously open limitations that have been resolved in the current branch. + +| ID | Issue | Resolution | +|---|---|---| +| ~~FL-A1~~ | **`@@index`/`@@textIndex`/`@unique` not supported in Mongo PSL** | Now supported — the retail store schema uses text indexes with weights, compound, hashed, TTL, sparse, and collation-aware indexes. All flow through to migration operations. | +| ~~FL-A2~~ | **Polymorphism not demonstrated** | Now demonstrated — `Event` model with `@@discriminator(type)` and 3 variants (`ViewProductEvent`, `SearchEvent`, `AddToCartEvent`). Tests cover variant creation, base queries, discriminator filtering. | +| ~~FL-A3~~ | **Migration planner only handles index create/drop** | Now generates collection creation operations with `$jsonSchema` validators and index creation with full options (unique, sparse, TTL, collation, weights, hashed). Partially addressed — variant collection handling is incorrect (FL-09). | +| ~~FL-A4~~ | **ORM mutations didn't encode values through codec registry** | Fixed — ORM now attaches `codecId` from contract fields to `MongoParamRef`; adapter encodes via codec registry. ObjectId fields are properly encoded to BSON ObjectIds. | +| ~~FL-A5~~ | **Nullable value object fields produced incorrect `$jsonSchema` validators** | Fixed — nullable VOs now produce `oneOf: [{ bsonType: "null" }, { bsonType: "object", ... }]`. | +| ~~FL-A6~~ | **Adapter crashed on optional `codec.encode`** | Fixed — guard added before invocation. | + +--- + +## App-level gaps (not framework) + +| ID | Issue | Note | +|---|---|---| +| AG-01 | **Polymorphic events not surfaced in UI** | The data layer and tests fully exercise polymorphism, but no user-facing page displays or creates typed events. Could add an analytics/event log page. | +| AG-02 | **Fabricated image URLs** | Product images use `/images/products/...` paths that don't exist. Products render with broken images. | +| AG-03 | **README domain model diagram out of date** | Says `Events ─── EventMetadata (embedded)` but schema now uses polymorphic variants. `EventMetadata` type no longer exists. | diff --git a/projects/mongo-example-apps/plans/retail-store-round-2-plan.md b/projects/mongo-example-apps/plans/retail-store-round-2-plan.md new file mode 100644 index 000000000..b79336bc4 --- /dev/null +++ b/projects/mongo-example-apps/plans/retail-store-round-2-plan.md @@ -0,0 +1,139 @@ +# Retail Store Round 2 — Interactive E-Commerce + +## Summary + +Make the retail-store example app interactive so a user can sign up, browse and search products, manage a cart, check out, and view orders — all backed by the PN data access layer. This builds on the contract, data access layer, and integration tests delivered in round 1. The result is a working e-commerce demo where every user action exercises a distinct PN Mongo capability (ORM CRUD, `$push`/`$pull`, `$regex` search, upsert, aggregation). + +**Spec:** `projects/mongo-example-apps/specs/retail-store-round-2.spec.md` + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | Agent / Engineer | Drives execution | +| Reviewer | Will | Architectural review, UX feedback | + +--- + +## Milestones + +### Milestone 1: Foundation — UI library, auth stub, expanded seed data + +Set up shadcn/ui, implement the login stub with cookie-based auth, expand the seed data, and replace the existing inline-style pages with library components. After this milestone, the app has a working auth flow and a polished but still read-only product catalog. + +**Tasks:** + +- [ ] Install and configure shadcn/ui (Tailwind CSS, cn utility, base components: Button, Card, Input, Badge, Select, DropdownMenu, Dialog, Separator, Skeleton) +- [ ] Replace `app/globals.css` inline reset with Tailwind base styles +- [ ] Implement auth middleware: Next.js middleware that checks for a `userId` cookie; redirects unauthenticated requests to `/login` (except `/login` and `/api/auth/*` routes) +- [ ] Create `/login` page with "Sign Up" button — calls `POST /api/auth/signup` which creates a user via `orm.users.create()` with a generated name (e.g. "User-{shortId}"), sets a `userId` cookie, and redirects to `/` +- [ ] Create `POST /api/auth/signup` API route +- [ ] Create `POST /api/auth/logout` API route — clears the `userId` cookie, redirects to `/login` +- [ ] Create a shared `getAuthUser()` server helper that reads the `userId` cookie via `cookies()`, fetches the user via `findUserById()`, and returns null if not found (used by all server components) +- [ ] Rebuild navbar as a server component using shadcn: show user name from `getAuthUser()`, cart item count (placeholder 0 for now), nav links (Products, Cart, Orders), and a Log Out dropdown item +- [ ] Expand seed data to ~24 products across 4+ categories (Apparel/Topwear, Apparel/Bottomwear, Accessories/Bags, Footwear/Shoes) and 5+ brands. Add 3+ store locations. Keep 2 users for test purposes but the demo flow creates its own users via signup. +- [ ] Rebuild product catalog page (`app/page.tsx`) with shadcn Card components in a grid layout +- [ ] Rebuild product detail page (`app/products/[id]/page.tsx`) with shadcn components (no "Add to Cart" yet — that's M2) +- [ ] Add pagination to the product catalog: extend `findProducts()` with `skip`/`limit` parameters, add Previous/Next controls using shadcn Button +- [ ] Add product search: create `searchProducts(query)` data access function using a pipeline with `$match` + `$regex` (case-insensitive match on `name`, `brand`, `articleType`); add a search Input to the catalog page that submits as a query parameter; catalog page filters results via the search function when a query is present +- [ ] Write integration test for `searchProducts()`: seed products, search by partial name, verify filtered results +- [ ] Remove `DEMO_USER_ID` references from all API routes and page components — replace with `getAuthUser()` or cookie-based user resolution + +**Validates:** auth acceptance criteria (AC 1–5), product browsing (AC 6–8 except "Add to Cart" button), search test (AC 21) + +### Milestone 2: Cart — add to cart, manage items, navbar count + +Wire up the full cart experience: add items from product pages, view/manage the cart, and show a live item count in the navbar. + +**Tasks:** + +- [ ] Add "Add to Cart" button to the product detail page (client component) — calls `POST /api/cart` with the product data; shows loading/success feedback via shadcn Button states and a toast or inline message +- [ ] Add "Add to Cart" button to product cards on the catalog page (small icon button on each card) +- [ ] Update `POST /api/cart` route to read `userId` from the auth cookie instead of `DEMO_USER_ID`; on POST, upsert the cart (create if none exists) then `$push` the item +- [ ] Rebuild cart page (`app/cart/page.tsx`) with shadcn components: list items with name, brand, quantity, price; "Remove" button per item; "Clear Cart" button; subtotal display; "Proceed to Checkout" link +- [ ] Wire "Remove" button to `DELETE /api/cart?productId=X` (calls `removeFromCart()`) +- [ ] Wire "Clear Cart" button to `DELETE /api/cart` with no productId (calls `clearCart()`) +- [ ] Update `DELETE /api/cart` and `GET /api/cart` routes to use the auth cookie for user ID +- [ ] Implement navbar cart count: create `GET /api/cart/count` route that returns `{ count: items.length }` for the authenticated user; navbar client component fetches this on mount and after mutations (via a simple polling interval or custom event) +- [ ] Add a `CartProvider` React context that tracks cart count and provides an `invalidateCart()` function for mutation components to call after add/remove/clear + +**Validates:** cart acceptance criteria (AC 9–13) + +### Milestone 3: Checkout and orders — place order, view history, update status + +Complete the checkout flow and rebuild the order pages with interactive status updates. + +**Tasks:** + +- [ ] Create checkout page (`app/checkout/page.tsx`) with shadcn components: + - Order summary section (items from cart, subtotal, total) + - Shipping address field (pre-filled from user's address if available, otherwise text Input) + - Order type radio group (Home Delivery / BOPIS) using shadcn RadioGroup + - Conditional store location Select dropdown when BOPIS is chosen (populated from `GET /api/locations`) + - "Place Order" Button +- [ ] Create `POST /api/orders` handler update: read user from auth cookie; accept `{ items, shippingAddress, type }` from the checkout form; call `createOrder()` with initial status `{ status: 'placed', timestamp: now }`; call `clearCart()`; return the created order +- [ ] Wire "Place Order": on success, redirect to `/orders/{id}` for the new order +- [ ] Rebuild orders list page (`app/orders/page.tsx`) with shadcn: show each order as a Card with item count, total, latest status Badge, and link to detail +- [ ] Update orders routes to use auth cookie instead of `DEMO_USER_ID` +- [ ] Rebuild order detail page (`app/orders/[id]/page.tsx`) with shadcn: items list, shipping address, status history timeline (using a vertical list with Badges), total +- [ ] Add status progression buttons to the order detail page: show the next logical status as a Button (placed → shipped → delivered). Clicking calls `PATCH /api/orders/[id]` which runs `updateOrderStatus()` (`$push` to `statusHistory`). Disable when status is `delivered`. +- [ ] Verify the order detail page loads correctly using the order ID from the URL and the auth cookie (no hardcoded env var) + +**Validates:** checkout acceptance criteria (AC 14–18), order acceptance criteria (AC 19–22) + +### Milestone 4: Polish and close-out + +Final pass: verify all acceptance criteria, fix rough edges, update documentation. + +**Tasks:** + +- [ ] Run the full test suite — all existing integration tests pass +- [ ] Run typecheck — no errors +- [ ] Manually walk through the full user journey: sign up → browse → search → add to cart → checkout (home + BOPIS) → view orders → update status → log out → log back in and see persisted orders +- [ ] Verify all acceptance criteria from the spec (checklist pass) +- [ ] Fix any remaining inline-style remnants — all UI uses shadcn/Tailwind +- [ ] Update `examples/retail-store/README.md` with updated Quick Start (no more `DEMO_USER_ID`), feature table, and screenshots or description of the interactive flows +- [ ] Update the seed script: remove `DEMO_USER_ID` auto-write logic (no longer needed); seed only products, locations, and sample data — users are created via signup +- [ ] Verify data access layer constraint: all mutations go through PN data access functions, no raw MongoDB driver calls in routes or components + +--- + +## Test Coverage + +| Acceptance Criterion | Test Type | Task/Milestone | Notes | +|---|---|---|---| +| Unauthenticated visitors redirected to login | Manual | M1 | Middleware redirect; verify in walkthrough | +| Sign Up creates user + sets cookie | Integration | M1 | Test `orm.users.create()` path (existing seed test covers ORM create) | +| Auth cookie persists across navigations | Manual | M1 | Verify in walkthrough | +| Navbar shows user name | Manual | M1 | Verify in walkthrough | +| Log out clears cookie + redirects | Manual | M1 | Verify in walkthrough | +| Product catalog paginates | Integration | M1 | Test `findProducts(skip, limit)` with >12 seeded products | +| Search filters products | Integration | M1 | New test for `searchProducts()` with `$regex` | +| Product detail shows "Add to Cart" | Manual | M2 | Verify in walkthrough | +| Add to Cart upserts cart + adds item | Integration | M2 | Existing `upsertCart` + `addToCart` tests cover this | +| Cart page shows items | Manual | M2 | Verify in walkthrough | +| Remove button removes item (`$pull`) | Integration | M2 | Existing `removeFromCart` test covers this | +| Clear Cart empties cart | Integration | M2 | Existing `clearCart` test covers this | +| Navbar cart count updates | Manual | M2 | Verify in walkthrough | +| Checkout shows order summary | Manual | M3 | Verify in walkthrough | +| Shipping address entry | Manual | M3 | Verify in walkthrough | +| Home delivery vs BOPIS selection | Manual | M3 | Verify in walkthrough | +| BOPIS store dropdown from DB | Integration | M3 | Existing `findLocations()` test covers data path | +| Place Order creates order + clears cart | Integration | M3 | Existing `createOrder` + `clearCart` tests cover this | +| Orders page lists user's orders | Manual | M3 | Verify in walkthrough | +| Order detail shows items/address/status/total | Manual | M3 | Verify in walkthrough | +| Status update button (`$push` statusHistory) | Integration | M3 | Existing `updateOrderStatus` test covers this | +| All order pages use auth user, not env var | Manual | M3/M4 | Verify no `DEMO_USER_ID` references remain | +| All mutations via PN data access layer | Code review | M4 | Verify in close-out | +| Existing integration tests pass | CI | M4 | Run full suite | +| New search test | Integration | M1 | `searchProducts()` test | + +## Open Items + +1. **Search implementation**: Assumed pipeline with `$match` + `$regex`. If the pipeline builder doesn't support `$regex` in `$match`, fall back to raw command or ORM `where` with string equality. Resolve during M1 implementation. + +2. **Seed data volume**: Expanding from 3 to ~24 products. The seed function's return type (`SeedResult`) may need updating if we stop seeding demo users for the app flow (users are now created via signup). Test seed still creates users for integration tests. + +3. **Cart count reactivity**: The spec requires navbar cart count to update after mutations. The simplest approach is a React context with manual invalidation (components call `invalidateCart()` after mutation). More sophisticated approaches (SSE, polling) are out of scope per the spec's non-goals. + +4. **Carry-forward from round 1 code review**: F01 (`objectIdEq` type) and F08 (`biome.jsonc` extends) are already fixed. F07 (no migration artifacts) is out of scope for round 2; tracked in round 1 plan. diff --git a/projects/mongo-example-apps/reviews/pr-327/code-review.md b/projects/mongo-example-apps/reviews/pr-327/code-review.md new file mode 100644 index 000000000..493f9b913 --- /dev/null +++ b/projects/mongo-example-apps/reviews/pr-327/code-review.md @@ -0,0 +1,104 @@ +# Code Review + +**Branch:** `tml-2185-port-retail-store-v2-e-commerce-app-to-prisma-next-mongodb` +**Base:** `origin/main` +**PR:** [#327](https://github.com/prisma/prisma-next/pull/327) +**Specs:** [projects/mongo-example-apps/spec.md](../../spec.md), [projects/mongo-example-apps/specs/retail-store-round-2.spec.md](../../specs/retail-store-round-2.spec.md) + +--- + +## Acceptance criteria status + +### Project spec (spec.md) — Retail Store portion + +| Criterion | Status | Evidence | +|---|---|---| +| PN contract authored in PSL | DONE | [prisma/contract.prisma](../../../../examples/retail-store/prisma/contract.prisma) | +| Contract emits valid contract.json + contract.d.ts | DONE | [src/contract.json](../../../../examples/retail-store/src/contract.json), [src/contract.d.ts](../../../../examples/retail-store/src/contract.d.ts) | +| Schema migrations create collections, indexes, validators | DONE (with caveats — see F01) | [migrations/20260413T0314_migration/](../../../../examples/retail-store/migrations/20260413T0314_migration/) | +| All CRUD via PN ORM | DONE | [src/data/](../../../../examples/retail-store/src/data/) | +| Embedded documents inline in results | DONE | Value objects (Price, Image, Address, CartItem, etc.) returned inline | +| Referenced relations via $lookup include() | DONE | [test/relations.test.ts](../../../../examples/retail-store/test/relations.test.ts) | +| Query results fully typed | PARTIAL — `as string` casts needed for _id fields, `as T` for pipeline results | See F04, F05 | +| At least one mutation uses $push/$pull | DONE | Cart add/remove, order status update | +| Vector search works via extension pack | STUB | `findSimilarProducts` defined but requires Atlas; not tested | +| At least one data migration runs | NO | Migration planner generates schema-only operations | +| Change stream subscription | NO | Not supported in framework | +| Runs against mongodb-memory-server | DONE | [test/setup.ts](../../../../examples/retail-store/test/setup.ts) | +| Demonstrates ≥3 MongoDB idioms | DONE | Embedded docs, referenced relations, polymorphism, update operators, aggregation, search, indexes | + +### Round 2 spec (retail-store-round-2.spec.md) + +| Criterion | Status | Evidence | +|---|---|---| +| Login stub with signup/logout | DONE | [app/login/page.tsx](../../../../examples/retail-store/app/login/page.tsx), [app/api/auth/](../../../../examples/retail-store/app/api/auth/) | +| Navbar displays user name + logout | DONE | [src/components/navbar.tsx](../../../../examples/retail-store/src/components/navbar.tsx) | +| Product catalog with pagination | DONE | [app/page.tsx](../../../../examples/retail-store/app/page.tsx) with skip/take | +| Product search | DONE | [app/api/products/route.ts](../../../../examples/retail-store/app/api/products/route.ts) with $regex | +| Add to cart | DONE | [src/components/add-to-cart-button.tsx](../../../../examples/retail-store/src/components/add-to-cart-button.tsx) | +| Cart page with remove/clear | DONE | [app/cart/page.tsx](../../../../examples/retail-store/app/cart/page.tsx) | +| Navbar cart count | DONE | [src/components/cart-badge.tsx](../../../../examples/retail-store/src/components/cart-badge.tsx) | +| Checkout with home/BOPIS | DONE | [app/checkout/page.tsx](../../../../examples/retail-store/app/checkout/page.tsx) | +| Orders page + detail | DONE | [app/orders/page.tsx](../../../../examples/retail-store/app/orders/page.tsx), [app/orders/\[id\]/page.tsx](../../../../examples/retail-store/app/orders/[id]/page.tsx) | +| Order status progression | DONE | [app/orders/\[id\]/order-status-buttons.tsx](../../../../examples/retail-store/app/orders/[id]/order-status-buttons.tsx) | +| BOPIS location picker | DONE | Fetches from /api/locations in checkout | +| UI component library | DONE | shadcn-style components under [src/components/ui/](../../../../examples/retail-store/src/components/ui/) | +| Existing tests pass | DONE | All test files exercise mongodb-memory-server | + +--- + +## Findings + +### FRAMEWORK LIMITATIONS + +These are gaps in the Prisma Next framework surfaced by this example app. Each one is a signal that the framework needs work. + +| ID | Finding | Description | Location | +|---|---|---|---| +| F01 | **Migration planner creates separate collections for polymorphic variants** | The planner generates `collection.addToCartEvent.create`, `collection.searchEvent.create`, `collection.viewProductEvent.create` as separate collection operations. Variant models share the base model's collection — they should not get their own. This produces incorrect migration artifacts that would create unnecessary empty collections if applied. | [ops.json L1–47, L509–551, L626–675](../../../../examples/retail-store/migrations/20260413T0314_migration/ops.json) | +| F02 | **Variant collection validators are incomplete** | The validators for variant collections (e.g., `searchEvent`) include only variant-specific fields (e.g., `query`) and miss all base model fields (`_id`, `userId`, `sessionId`, `timestamp`, `type`). Even if variant collections were intentional, the validators would be structurally wrong. | [ops.json L529–541](../../../../examples/retail-store/migrations/20260413T0314_migration/ops.json) | +| F03 | **$jsonSchema validator drops `Float` fields** | The `Price` value object validator includes `currency` but not `amount`. The `InvoiceLineItem` validator includes `amount` and `name` but drops `unitPrice`, `lineTotal`. The JSON schema derivation doesn't recognize the `Float` scalar type, so fields typed as `Float` are silently omitted from validators. | [ops.json L105–115](../../../../examples/retail-store/migrations/20260413T0314_migration/ops.json) — `Price` validator has `required: ["currency"]` | +| F04 | **`_id` field returns opaque type requiring String() casts** | ORM query results return `_id` as an opaque codec output type, not `string`. Code must cast via `String(entity._id)` or `entity._id as string` throughout the data layer and seed script. This is pervasive across the entire app. | [src/seed.ts](../../../../examples/retail-store/src/seed.ts), [src/data/orders.ts](../../../../examples/retail-store/src/data/orders.ts), [test/api-flows.test.ts](../../../../examples/retail-store/test/api-flows.test.ts) | +| F05 | **ORM string fields return opaque codec output type** | Fields like `name`, `brand`, `code` return codec output types rather than `string`, forcing `String(p0.name)` casts in the seed. The contract types know these are `mongo/string@1` codec fields, but the ORM's output type resolution doesn't simplify codec types to their JS primitives. | [src/seed.ts L152–175](../../../../examples/retail-store/src/seed.ts) | +| F06 | **Pipeline and raw query results are untyped** | The pipeline builder and `runtime.execute()` return untyped async iterables. The `collectResults()` helper casts `row as T` at the boundary. The framework has no mechanism to propagate result types through aggregation stages or raw commands. | [src/data/execute-raw.ts L11–14](../../../../examples/retail-store/src/data/execute-raw.ts) | +| F07 | **ORM lacks typed $push/$pull array update operators** | The ORM's `update()` method only supports `$set` semantics. Array update operators (`$push`, `$pull`, `$inc`) require dropping to `mongoRaw` with untyped commands and manual `MongoParamRef` wrapping. This is the most exercised workaround in the app (cart add/remove, order status update). | [src/data/carts.ts L43–63](../../../../examples/retail-store/src/data/carts.ts), [src/data/orders.ts](../../../../examples/retail-store/src/data/orders.ts) | +| F08 | **ObjectId filter requires MongoParamRef wrapping** | Filtering by ObjectId-typed fields requires constructing `MongoFieldFilter.of('userId', '$eq', new MongoParamRef(userId, { codecId: 'mongo/objectId@1' }))` instead of a simpler `where({ userId })`. The helper `objectIdEq()` in `object-id-filter.ts` exists to reduce this boilerplate. | [src/data/object-id-filter.ts](../../../../examples/retail-store/src/data/object-id-filter.ts) | +| F09 | **No $vectorSearch stage in pipeline builder** | `findSimilarProducts` in `products.ts` is a stub that would need raw aggregate to implement `$vectorSearch`. The pipeline builder doesn't expose a `vectorSearch()` stage. Requires an extension pack not yet built. | [src/data/products.ts](../../../../examples/retail-store/src/data/products.ts) | +| F10 | **Embedded models via `owner` not supported in PSL** | The contract schema and emitter accept `owner`, but PSL has no `@@owner` attribute and the ORM has no embedded entity CRUD handling. Can't demonstrate embedded entities (as distinct from value objects) in PSL-authored apps. | — | +| F11 | **Polymorphism demonstrated but not demonstrated in the UI** | `@@discriminator`/`@@base` work correctly in the ORM (create with auto-injected discriminator, variant queries with discriminator filter). The data layer and tests fully exercise this. The UI doesn't surface event variants — events are only created/queried in tests and seed, not in user-facing pages. | [test/polymorphism.test.ts](../../../../examples/retail-store/test/polymorphism.test.ts), [src/data/events.ts](../../../../examples/retail-store/src/data/events.ts) | + +### CODE QUALITY + +| ID | Finding | Description | Location | +|---|---|---|---| +| C01 | **Fabricated image URLs** | Product images use URLs like `/images/products/her-oxf-001.jpg` but no such images exist in the project. The product cards and detail page render broken `` tags. Consider using placeholder services or removing the `` tag. | [src/seed.ts L145](../../../../examples/retail-store/src/seed.ts) | +| C02 | **Domain model README out of date** | The README says `Events ─── EventMetadata (embedded)` but the schema now uses polymorphic variants (ViewProductEvent, SearchEvent, AddToCartEvent). The `EventMetadata` type no longer exists. | [README.md L86](../../../../examples/retail-store/README.md) | +| C03 | **`String()` calls throughout seed for value-object-nested field access** | The seed script uses `String(p0.name)`, `String(p0.brand)`, `String(p0.code)` to convert codec output types to strings when constructing cart/order items. This is verbose and obscures intent. This is caused by F05 (framework limitation). | [src/seed.ts L147–175](../../../../examples/retail-store/src/seed.ts) | +| C04 | **`UNAUTHORIZED` response object may be shared across requests** | `const UNAUTHORIZED = NextResponse.json(...)` is defined at module scope in the order route. Next.js `Response` objects may not be safely reused across requests in some contexts (headers can be mutated). Consider creating fresh responses per request. | [app/api/orders/\[id\]/route.ts L11–12](../../../../examples/retail-store/app/api/orders/[id]/route.ts) | + +### POSITIVE OBSERVATIONS + +| ID | Observation | Detail | +|---|---|---| +| P01 | **Clean data access separation** | Each collection has its own module with typed functions. No raw MongoDB calls leak into routes or components. | +| P02 | **Framework fixes driven by example** | The branch includes 4 framework fixes discovered during development: ORM codec attachment, adapter codec encoding, nullable VO validators, optional codec.encode guard. Each fix has a corresponding test. | +| P03 | **Polymorphic events demonstrate real-world pattern** | The Event model with discriminator and 3 variants is the cleanest demonstration of PN's polymorphism support. Tests cover variant creation, base-collection queries, and discriminator-filtered queries. | +| P04 | **Index variety** | The PSL contract demonstrates text indexes with weights, compound indexes, hashed indexes, TTL indexes, sparse indexes, and collation-aware indexes — all in one schema. The migration test verifies each one. | +| P05 | **Test infrastructure** | The shared `setupTestDb()` helper creates isolated MongoMemoryReplSet instances per test suite, with proper cleanup. 12 test files cover the full data access surface. | +| P06 | **Interactive e-commerce loop** | Browse → search → add to cart → checkout → orders — each step exercises a distinct PN capability through the data access layer. | + +--- + +## Summary + +The branch delivers a substantial, working e-commerce example that validates PN's MongoDB support across embedded value objects, reference relations, polymorphism, array operators, pipelines, search, and schema indexes. The framework fixes (ORM codec attachment, adapter encoding, nullable validators) are well-scoped and tested. + +The most significant framework limitations surfaced are: + +1. **Migration planner bug with polymorphic variants** (F01–F02) — creates incorrect separate collections +2. **Float fields dropped from validators** (F03) — silent data loss in schema validation +3. **Opaque codec output types** (F04–F05) — forces String() casts everywhere +4. **No typed array update operators** (F07) — the most exercised workaround +5. **Untyped pipeline/raw results** (F06) — forces `as T` casts + +These are genuine framework signals. The app is correctly structured to highlight them rather than paper over them. diff --git a/projects/mongo-example-apps/reviews/pr-327/system-design-review.md b/projects/mongo-example-apps/reviews/pr-327/system-design-review.md new file mode 100644 index 000000000..0b32f918d --- /dev/null +++ b/projects/mongo-example-apps/reviews/pr-327/system-design-review.md @@ -0,0 +1,106 @@ +# System Design Review + +**Branch:** `tml-2185-port-retail-store-v2-e-commerce-app-to-prisma-next-mongodb` +**Base:** `origin/main` +**PR:** [#327](https://github.com/prisma/prisma-next/pull/327) +**Specs:** [projects/mongo-example-apps/spec.md](../../spec.md), [projects/mongo-example-apps/specs/retail-store-round-2.spec.md](../../specs/retail-store-round-2.spec.md) + +--- + +## Problem being solved + +Validate that Prisma Next's MongoDB implementation handles a real-world data model — an interactive e-commerce platform with embedded value objects, referenced relations, polymorphic types, array update operators, aggregation pipelines, search, schema indexes, and migration artifacts. The retail store exercises more PN Mongo features in one app than any existing example. + +## New guarantees / invariants + +1. **PSL contract with embedded value objects**: 8 `type` definitions (Price, Image, Address, CartItem, OrderLineItem, StatusEntry, InvoiceLineItem, EventMetadata) produce `valueObject`-kind fields in the contract. The ORM wraps value object data in correctly-typed `MongoParamRef` entries with `codecId` at mutation time. + +2. **Polymorphic event collection**: The `Event` base model with `@@discriminator(type)` and three variant models (`ViewProductEvent`, `SearchEvent`, `AddToCartEvent`) via `@@base` produce a single `events` collection. The ORM's `variant()` method injects discriminator filters on read and auto-injects discriminator values on create. + +3. **ORM codec encoding**: Mutations now attach `codecId` from the contract's field definition to `MongoParamRef` instances, and the adapter encodes values through the codec registry before sending to the wire. This ensures ObjectId fields written as strings are properly encoded to BSON ObjectIds. + +4. **Nullable value object validators**: The `$jsonSchema` validator derivation now handles nullable value object fields correctly, producing `oneOf: [{ bsonType: "null" }, { bsonType: "object", ... }]` instead of incorrectly requiring a non-null object. + +5. **Schema indexes via PSL**: `@@index`, `@@textIndex`, `@@unique`, `@unique` in the PSL contract produce index definitions in `contract.json`, which flow through to migration operations. + +## Subsystem fit + +### Contract (PSL → contract.json → contract.d.ts) + +The retail store uses a single `contract.prisma` file with 7 models, 3 polymorphic variants, and 8 value object types. The PSL interpreter produces the correct contract structure: + +- Value objects → `type: { kind: "valueObject", name: "..." }` on model fields +- Polymorphic models → `discriminator`, `variants`, `base` on the correct models +- Indexes → `storage.collections.*.indexes` with the correct keys, options (unique, sparse, TTL, collation, weights, hashed) +- Validators → `$jsonSchema` validators derived from model field definitions + +The emitted `contract.d.ts` carries fully-typed model definitions including value object nesting and polymorphic variant structure. + +### ORM + +The ORM handles: + +- CRUD with value object fields (nested objects correctly wrapped) +- `include()` for N:1 reference relations (cart→user, order→user, invoice→order) +- `variant()` for polymorphic collection narrowing +- `skip()`/`take()` for pagination +- `where()` with `MongoFieldFilter` for filtering + +The ORM does **not** handle: + +- Typed `$push`/`$pull` array operators (uses `mongoRaw` instead) +- Embedded models via `owner` (value objects work; entities with identity don't) + +### Runtime / Adapter + +The adapter was extended to encode `MongoParamRef` values through the codec registry. This is a behavioral change: previously, all `MongoParamRef.value` was passed to the wire as-is. Now, if a `MongoParamRef` carries a `codecId`, the adapter looks up the codec and calls `encode()` before sending. This ensures that string-typed ObjectId fields are properly encoded as BSON ObjectIds. + +### Migration + +The planner generates collection creation operations with `$jsonSchema` validators and index creation operations from the contract. **However, there is a bug** — variant models without `@@map` get separate collection creation operations (e.g., `collection.addToCartEvent.create`, `collection.searchEvent.create`) instead of being recognized as part of the base model's collection. See F01 in code review. + +## Boundary correctness + +- All retail-store code lives under `examples/retail-store/` — no import into framework packages +- Framework fixes live in their proper packages: ORM changes in `packages/2-mongo-family/5-query-builders/orm/`, adapter changes in `packages/3-mongo-target/2-mongo-adapter/`, PSL validator derivation in `packages/2-mongo-family/2-authoring/contract-psl/` +- No new cross-layer or cross-domain imports introduced + +## Design review + +### Cookie-based auth (appropriate for demo scope) + +The auth system uses a plain-text `userId` cookie set on signup, checked by Next.js middleware. This is explicitly scoped as a demo stub — no encryption, signing, or real session management. The middleware matcher correctly excludes `/login`, `/api/auth/*`, and static assets. All order routes verify ownership by comparing `order.userId` to the authenticated user. + +### Cart management via raw commands + +The cart add/remove operations use `mongoRaw` with `$push`/`$pull` because the ORM doesn't expose typed array update operators. This is a documented framework limitation. The `addToCart` function uses `upsert: true` with `$setOnInsert` to handle first-cart creation atomically. + +### Data access layer separation + +Each collection has its own module under `src/data/` with typed functions that accept `Db` and return typed results. API routes and pages compose these functions — no raw MongoDB calls leak into the UI layer. The `executeRaw()` and `collectResults()` helpers in `execute-raw.ts` centralize the `for await` draining pattern for pipeline and raw results. + +## Test strategy adequacy + +The branch includes 12 test files: + +| Test file | Coverage | +|---|---| +| `crud-lifecycle.test.ts` | Create, read, update, delete for products, users, carts, orders | +| `relations.test.ts` | `$lookup` via `include()` for cart→user, order→user, invoice→order | +| `update-operators.test.ts` | `$push`/`$pull` for cart items, `$push` for order status | +| `aggregation.test.ts` | Event type aggregation pipeline, random product sampling | +| `polymorphism.test.ts` | Variant creation, base-collection queries, discriminator filtering | +| `search.test.ts` | Multi-field `$regex` search, pagination with skip/take | +| `cart-lifecycle.test.ts` | Add, remove, clear, upsert cart operations | +| `order-lifecycle.test.ts` | Create order, status updates, get/delete operations | +| `api-flows.test.ts` | Order ownership verification, checkout flow, status progression | +| `seed.test.ts` | Seed data correctness (counts, structure) | +| `migration.test.ts` | Contract index definitions, index creation on real MongoDB | +| `setup.ts` | Shared test infrastructure with MongoMemoryReplSet | + +All tests run against `mongodb-memory-server` — no external DB required. The coverage is strong for the data access layer. The gap is API route-level tests — the `api-flows.test.ts` tests the data access functions directly rather than making HTTP calls through the routes, so middleware/auth cookie behavior is not tested programmatically. + +## Risk assessment + +- **Migration planner produces incorrect operations for polymorphic models** — variant models get separate collection creation operations. This will fail or create unnecessary collections if applied. Low impact today (migration can be applied manually or the ops corrected), but the planner bug should be tracked. +- **No integration test verifying the migration operations apply successfully** — the `migration.test.ts` validates contract index definitions and manually creates indexes, but doesn't run the actual migration planner output against a database. diff --git a/projects/mongo-example-apps/reviews/pr-327/walkthrough.md b/projects/mongo-example-apps/reviews/pr-327/walkthrough.md new file mode 100644 index 000000000..37d0624c8 --- /dev/null +++ b/projects/mongo-example-apps/reviews/pr-327/walkthrough.md @@ -0,0 +1,122 @@ +# Walkthrough + +## Sources + +- PR: [#327](https://github.com/prisma/prisma-next/pull/327) +- Specs: [projects/mongo-example-apps/spec.md](../../spec.md), [projects/mongo-example-apps/specs/retail-store-round-2.spec.md](../../specs/retail-store-round-2.spec.md) +- Commit range: `origin/main...HEAD` (43 commits, 91 files, ~12.8k lines) + +## Intent + +Build a working interactive e-commerce application — the "retail store" — that validates Prisma Next's MongoDB support against a real-world data model. The app exercises embedded value objects, referenced relations, polymorphic types, array update operators, aggregation pipelines, multi-field search, pagination, schema indexes, and migration artifacts. Along the way, fix the framework bugs discovered during development. + +## The story + +1. **Define the domain in PSL** — A contract with 7 models, 3 polymorphic variants, and 8 embedded value object types establishes the retail domain: products, users, carts, orders, invoices, locations, and events. The PSL contract uses `@@discriminator`/`@@base` for polymorphic events, `@@textIndex` with weights, compound/hashed/TTL/sparse/collation-aware indexes, and `@unique` for email. + +2. **Build a typed data access layer** — One module per collection under `src/data/` wraps all database operations in typed functions that accept a `Db` handle and return typed results. ORM CRUD for standard operations, `mongoRaw` for array update operators (`$push`/`$pull`), pipeline builder for aggregation, and `$regex` for search. No raw MongoDB calls leak outside this layer. + +3. **Make it interactive** — Cookie-based auth (signup creates a user, sets a `userId` cookie), product catalog with pagination and search, add-to-cart with live badge updates, checkout with home delivery vs. BOPIS (store location picker), order management with status progression. Built with Next.js App Router, Tailwind CSS v4, and shadcn-style UI components. + +4. **Fix the framework to make it work** — Four framework bugs discovered during development were fixed in their proper packages: ORM codec attachment on mutations, adapter codec encoding via registry, nullable value object validator derivation, and optional codec.encode guard. + +5. **Validate with tests** — 12 test files covering CRUD lifecycle, relations, polymorphism, aggregation, search, cart/order lifecycle, API-level flows, migration/indexes, and seeding. All tests run against `mongodb-memory-server`. + +## Behavior changes & evidence + +### Adds a complete e-commerce example app + +Adds a Next.js retail store application under `examples/retail-store/` that demonstrates the full range of PN's MongoDB capabilities through an interactive storefront. + +- **Why**: The existing examples (mongo-demo) cover basic CRUD. The project spec requires validation against a real-world data model with real complexity. +- **Implementation**: + - [examples/retail-store/prisma/contract.prisma](examples/retail-store/prisma/contract.prisma) — PSL contract with 7 models, 3 variants, 8 value objects, 11 indexes + - [examples/retail-store/src/data/](examples/retail-store/src/data/) — data access layer (carts, events, invoices, locations, orders, products, users) + - [examples/retail-store/src/db.ts](examples/retail-store/src/db.ts) — db factory + - [examples/retail-store/src/seed.ts](examples/retail-store/src/seed.ts) — seed data (24 products, 4 locations, users, orders, events) + - [examples/retail-store/app/](examples/retail-store/app/) — Next.js pages and API routes + - [examples/retail-store/src/components/](examples/retail-store/src/components/) — navbar, cart provider, add-to-cart, UI primitives + - [examples/retail-store/middleware.ts](examples/retail-store/middleware.ts) — auth middleware +- **Tests**: + - [examples/retail-store/test/crud-lifecycle.test.ts](examples/retail-store/test/crud-lifecycle.test.ts) — CRUD operations per collection + - [examples/retail-store/test/relations.test.ts](examples/retail-store/test/relations.test.ts) — $lookup via include() + - [examples/retail-store/test/update-operators.test.ts](examples/retail-store/test/update-operators.test.ts) — $push/$pull for carts and orders + - [examples/retail-store/test/aggregation.test.ts](examples/retail-store/test/aggregation.test.ts) — event aggregation, random product sampling + - [examples/retail-store/test/polymorphism.test.ts](examples/retail-store/test/polymorphism.test.ts) — variant creation, discriminator filtering, base queries + - [examples/retail-store/test/search.test.ts](examples/retail-store/test/search.test.ts) — multi-field $regex search, pagination + - [examples/retail-store/test/cart-lifecycle.test.ts](examples/retail-store/test/cart-lifecycle.test.ts) — cart add/remove/clear/upsert + - [examples/retail-store/test/order-lifecycle.test.ts](examples/retail-store/test/order-lifecycle.test.ts) — order create/status/delete + - [examples/retail-store/test/api-flows.test.ts](examples/retail-store/test/api-flows.test.ts) — order ownership, checkout flow, status progression + - [examples/retail-store/test/seed.test.ts](examples/retail-store/test/seed.test.ts) — seed data integrity + - [examples/retail-store/test/migration.test.ts](examples/retail-store/test/migration.test.ts) — contract index definitions, index creation on real MongoDB + +### ORM mutations now encode values through the codec registry + +**Before**: `MongoParamRef` instances created by the ORM carried no codec information. The adapter passed `MongoParamRef.value` to the wire as-is. String-typed ObjectId fields were sent as plain strings, causing type mismatches when MongoDB expected BSON ObjectIds. + +**After**: The ORM's `#toDocument()` and `#toSetFields()` methods look up each field's `codecId` from the contract and attach it to the `MongoParamRef`. The adapter's `resolveValue()` checks for `codecId` and calls `codec.encode()` before sending to the wire. This ensures ObjectId fields are properly encoded to BSON ObjectIds without manual wrapping. + +- **Why**: Without codec encoding, ORM mutations that write to `ObjectId`-typed foreign key fields (e.g., `cart.userId`, `order.userId`) would write plain strings instead of BSON ObjectIds, breaking `$lookup` joins and index usage. +- **Implementation**: + - [packages/2-mongo-family/5-query-builders/orm/src/collection.ts](packages/2-mongo-family/5-query-builders/orm/src/collection.ts) — `#wrapFieldValue()`, `#wrapValueObject()`, `#modelFields()` methods; updated `#toDocument()` and `#toSetFields()` + - [packages/3-mongo-target/2-mongo-adapter/src/resolve-value.ts](packages/3-mongo-target/2-mongo-adapter/src/resolve-value.ts) — codec lookup and encode in `resolveValue()` + - [packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts](packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts) — adapter accepts codec registry, passes to resolveDocument/resolveValue +- **Tests**: + - [packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts](packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts) — "attaches codecId from contract fields to MongoParamRef", "attaches objectId codecId" + - [packages/3-mongo-target/2-mongo-adapter/test/resolve-value.test.ts](packages/3-mongo-target/2-mongo-adapter/test/resolve-value.test.ts) — codec encode with/without registry, nested objects/arrays + - [packages/3-mongo-target/2-mongo-adapter/test/mongo-adapter.test.ts](packages/3-mongo-target/2-mongo-adapter/test/mongo-adapter.test.ts) — "MongoAdapter with codec registry" suite + +### Nullable value object fields produce correct $jsonSchema validators + +**Before**: A nullable value object field like `address Address?` produced a required-object validator, rejecting `null` values. Documents with `address: null` would fail schema validation. + +**After**: Nullable value object fields produce `oneOf: [{ bsonType: "null" }, { bsonType: "object", ... }]` and are excluded from the `required` array. + +- **Why**: The retail store's `User` model has `address Address?` (users can sign up without an address). Without this fix, inserting a user with `address: null` would fail the validator. +- **Implementation**: + - [packages/2-mongo-family/2-authoring/contract-psl/src/derive-json-schema.ts](packages/2-mongo-family/2-authoring/contract-psl/src/derive-json-schema.ts) — nullable check in `fieldToBsonSchema()` +- **Tests**: + - [packages/2-mongo-family/2-authoring/contract-psl/test/interpreter.test.ts](packages/2-mongo-family/2-authoring/contract-psl/test/interpreter.test.ts) — "handles nullable value object fields with oneOf null or object" + +### Schema indexes are authored in PSL and flow through to migration operations + +Adds `@@index`, `@@textIndex`, and `@unique` support to the retail store's PSL contract. The contract produces index definitions that the migration planner converts to `createIndex` operations with the correct options (unique, sparse, TTL expiry, collation, text weights, hashed type, compound sort directions). + +- **Why**: Real MongoDB applications need indexes for query performance. The retail store's indexes are representative of production usage: text search with relevance weights, TTL expiry for analytics events, case-insensitive location lookup, and foreign key indexes for $lookup joins. +- **Implementation**: + - [examples/retail-store/prisma/contract.prisma](examples/retail-store/prisma/contract.prisma) — 11 index definitions across 7 collections + - [examples/retail-store/migrations/20260413T0314_migration/ops.json](examples/retail-store/migrations/20260413T0314_migration/ops.json) — generated index operations +- **Tests**: + - [examples/retail-store/test/migration.test.ts](examples/retail-store/test/migration.test.ts) — validates contract index structure and creates indexes on real MongoDB + +### Polymorphic events via @@discriminator/@@base + +Adds a polymorphic `Event` collection with three variant models (`ViewProductEvent`, `SearchEvent`, `AddToCartEvent`) using `@@discriminator(type)` and `@@base(Event, "...")`. The ORM's `variant()` method auto-injects discriminator values on create and discriminator filters on query. + +- **Why**: Polymorphism is a common MongoDB pattern. The Event model is a natural fit: all events share base fields (userId, sessionId, timestamp) but each type has different metadata fields. +- **Implementation**: + - [examples/retail-store/prisma/contract.prisma](examples/retail-store/prisma/contract.prisma) — lines 120–151 + - [examples/retail-store/src/data/events.ts](examples/retail-store/src/data/events.ts) — typed create/query functions per variant +- **Tests**: + - [examples/retail-store/test/polymorphism.test.ts](examples/retail-store/test/polymorphism.test.ts) — variant creation, base queries, discriminator filtering, variant field access + +## Compatibility / migration / risk + +- **Framework package changes are backward-compatible**: The ORM and adapter changes add optional behavior (codec encoding) that only activates when `MongoParamRef` carries a `codecId`. Existing code that constructs bare `MongoParamRef(value)` is unaffected. +- **`createMongoAdapter()` signature change**: Now accepts an optional `MongoCodecRegistry` parameter. Existing callers that pass no arguments get a default registry with all built-in codecs. This is non-breaking. +- **Migration planner bug (F01)**: The generated `ops.json` includes incorrect collection creation operations for polymorphic variant models. If applied as-is, it would create 3 empty collections (`addToCartEvent`, `searchEvent`, `viewProductEvent`) that serve no purpose. The base `events` collection and its indexes are correct. + +## Follow-ups / open questions + +- **Migration planner variant handling (F01–F02)**: The planner needs to recognize `@@base` models and suppress collection creation for them. Their fields should be validated as part of the base model's validator, not independently. +- **Float field in $jsonSchema (F03)**: The validator derivation silently drops Float-typed fields. Should map to `bsonType: "double"`. +- **Codec output type ergonomics (F04–F05)**: The most pervasive friction in the app — every `_id` access and many string field accesses require `String()` casts. The type system knows these are `mongo/string@1` and `mongo/objectId@1` — it should resolve to `string` in the ORM output type. +- **Typed array update operators (F07)**: The most frequently exercised workaround. ORM `update()` only supports `$set`; `$push`/`$pull`/`$inc` require raw commands. +- **Pipeline output types (F06)**: Pipeline builder results are untyped. No mechanism to propagate types through aggregation stages. + +## Non-goals / intentionally out of scope + +- **Predictive maintenance app**: The second app in the project spec. Tracked separately. +- **Atlas-specific features**: Vector search, Atlas Search, change streams. Stubs exist for vector search but require Atlas credentials. +- **Real authentication**: The login stub fabricates users and sets a plain-text cookie. No OAuth, JWT, or session management. +- **Full UI port**: The UI is functional but simplified. No Redux, no chatbot, no guided tours, no real-time SSE. diff --git a/projects/mongo-example-apps/specs/retail-store-round-2.spec.md b/projects/mongo-example-apps/specs/retail-store-round-2.spec.md new file mode 100644 index 000000000..a6b7e3e49 --- /dev/null +++ b/projects/mongo-example-apps/specs/retail-store-round-2.spec.md @@ -0,0 +1,159 @@ +# Summary + +Make the retail-store example app interactive — a user can browse products, search, add items to a cart, check out, view orders, and see order status updates — all backed by the PN data access layer. This is the second round of development, building on the data access layer, contract, and static UI delivered in round 1 (M1–M6). The result should be a working e-commerce demo that exercises PN Mongo capabilities through real user flows, not just static data viewing. + +# Description + +Round 1 delivered the contract (PSL with embedded value objects), the typed data access layer (ORM CRUD, relations, `$push`/`$pull`, aggregation), integration tests, and a static Next.js UI that renders pre-seeded data. The API routes support mutations but the UI never calls them — there are no interactive elements (no "Add to Cart" button, no checkout flow, no user picker). + +The original [retail-store-v2](https://github.com/mongodb-industry-solutions/retail-store-v2) is a full interactive demo with Redux state management, product search, cart management, checkout, order tracking with SSE, store locator for BOPIS, a chatbot, customer retention analytics, and guided demo "talk tracks." We don't need all of that — much of it validates external services (Dataworkz chatbot, Atlas Stream Processing, ML recommendations), not PN. + +What we need is **the core e-commerce loop made interactive**: browse → search → add to cart → manage cart → check out → view orders. Each step exercises a distinct PN Mongo capability. The UI should be functional and pleasant, not a pixel-perfect port. + +**Reference material:** The original repo is cloned at `wip/retail-store-v2-reference/` for reference during development. + +# Requirements + +## Functional Requirements + +### User identity + +1. **Login stub**: Unauthenticated visitors are redirected to a login page with a "Sign Up" button. Clicking it creates a new user document (via `orm.users.create()` with a generated name and empty address) and sets an auth cookie with the new user's ID. The rest of the app treats this cookie as the authenticated session — no real auth provider needed, but the app behaves like one exists. +2. **User display**: Show the authenticated user's name in the navbar. Provide a "Log out" action that clears the cookie and redirects to the login page. + +### Product browsing + +3. **Product catalog with pagination**: The product catalog page loads products in pages (e.g. 12 per page) with next/previous navigation. Uses the existing `findProducts()` function, extended with skip/limit support. +4. **Product search**: A search bar on the catalog page filters products by name, brand, or category. This should use the ORM's `where` clause with text matching (or a pipeline with `$regex` if ORM text filters aren't available). Atlas Search (`$search`) is a stretch goal if an Atlas cluster is available. +5. **Product detail**: Clicking a product navigates to its detail page (already exists) and shows an "Add to Cart" button. + +### Cart management + +6. **Add to cart**: From the product detail page (or a product card), the user can add a product to their cart. This calls the existing `addToCart()` data access function (which uses `$push`). If no cart exists, it creates one via `upsertCart()`. +7. **Cart page with item management**: The cart page shows the current user's items with quantities and prices. Each item has a "Remove" button that calls `removeFromCart()` (`$pull`). A "Clear Cart" button calls `clearCart()`. +8. **Cart item count in navbar**: The navbar shows the number of items in the current user's cart, updating after add/remove operations. + +### Checkout + +9. **Checkout flow**: From the cart page, a "Checkout" button navigates to a checkout page that shows: + - Order summary (items, subtotal, total) + - Shipping address (pre-filled from the user's address, or a simple text input) + - Order type selector (home delivery vs. BOPIS) + - If BOPIS: a store location picker populated from `findLocations()` + - A "Place Order" button +10. **Place order**: Confirming the order calls `createOrder()` with the cart items, shipping info, and an initial `{ status: 'placed', timestamp: now }` status entry. Then clears the cart via `clearCart()`. Navigates to the order detail page. + +### Order management + +11. **Orders page**: Lists the current user's orders (existing `getUserOrders()`), showing item count, total, and latest status. Each order links to its detail page. +12. **Order detail page**: Shows order items, shipping address, status history timeline, and total. Already partially exists but needs to use the current user context instead of a hardcoded env var. +13. **Order status updates**: The order detail page has a button to simulate status progression (e.g. "Mark as Shipped", "Mark as Delivered") that calls `updateOrderStatus()` (`$push` to `statusHistory`). This exercises the `$push` update operator in an interactive context. + +### Store locator + +14. **Store locations for BOPIS**: During checkout, if BOPIS is selected, show a dropdown of store locations from `findLocations()`. The selected store's address becomes the shipping address. + +## Non-Functional Requirements + +1. **Client-side interactivity**: Interactive features use Next.js client components (`"use client"`) with `fetch` calls to the existing API routes. Server components remain for initial data loading where appropriate. +2. **UI component library**: Use an established component library (e.g. shadcn/ui, Radix, or similar) so the app looks polished out of the box without rolling custom components. Replace the existing inline-style UI from round 1 with library components. +3. **No external dependencies**: The interactive features work against `mongodb-memory-server` for tests and any MongoDB instance for the demo. No Atlas-specific features are required for the core interactive loop. +4. **Responsive layout**: The UI should look reasonable on desktop and tablet widths. Mobile is not a priority. +4. **Type safety**: All API request/response types should be derived from the contract types where possible. No `any` types. +5. **Test coverage**: Each new interactive flow should have at least one integration test proving the data access path works end-to-end (most already exist from round 1). + +## Non-goals + +- **Chatbot**: Validates Dataworkz, not PN. Out of scope. +- **Customer retention / CEP / Next Best Actions**: Complex event processing with Atlas Stream Processing and external microservices. Not a PN concern. +- **Personalized recommendations / ML pipeline**: External service populates `lastRecommendations` on user documents. Not a PN concern. +- **Real-time SSE / change streams**: The PN runtime doesn't yet support change streams. Deferred until the framework ships this capability. Order status updates will use polling or manual refresh instead. +- **Atlas Search (`$search`)**: Requires an extension pack not yet built. Product search will use `$regex` or ORM filters as a fallback. Atlas Search is a stretch goal. +- **Talk tracks / guided tours / demo mode**: Presentation tooling for sales demos. Not relevant to PN validation. +- **Digital receipt PDF generation / external invoice URLs**: External service concern. +- **Real authentication**: The login stub fabricates users and sets a cookie. No OAuth, JWT validation, password hashing, or session management beyond a simple cookie. +- **Redux or complex client state management**: Use simple React state or context. The original app's Redux store is overkill for what we need. + +# Acceptance Criteria + +## User identity + +- [ ] Unauthenticated visitors are redirected to a login page +- [ ] "Sign Up" creates a new user document and sets an auth cookie +- [ ] Auth cookie persists across page navigations; server components can read it +- [ ] Navbar displays the authenticated user's name +- [ ] "Log out" clears the cookie and redirects to the login page + +## Product browsing + +- [ ] Product catalog paginates (at least 2 pages when >12 products are seeded) +- [ ] Search bar filters products by text match (name, brand, or category) +- [ ] Product detail page shows an "Add to Cart" button + +## Cart + +- [ ] "Add to Cart" creates a cart (upsert) and adds the product +- [ ] Cart page shows current user's items with prices and quantities +- [ ] "Remove" button removes a specific item (`$pull`) +- [ ] "Clear Cart" button empties the cart (`$set items: []`) +- [ ] Navbar shows cart item count that updates after mutations + +## Checkout + +- [ ] Checkout page shows order summary with items and total +- [ ] User can enter/confirm shipping address +- [ ] User can select home delivery or BOPIS +- [ ] BOPIS selection shows a store location dropdown populated from DB +- [ ] "Place Order" creates an order, clears the cart, and navigates to order detail + +## Orders + +- [ ] Orders page lists the current user's orders sorted by most recent +- [ ] Order detail page shows items, address, status history, and total +- [ ] Status update button appends a new status entry (`$push` to `statusHistory`) +- [ ] All order pages use the selected demo user, not a hardcoded env var + +## Data access + +- [ ] All interactive mutations go through the PN data access layer (no raw MongoDB driver calls in API routes or components) +- [ ] The existing integration tests continue to pass +- [ ] At least one new test covers the search/filter data access function + +# Other Considerations + +## Security + +The login stub creates user documents and sets a plain-text user ID cookie. No encryption, signing, or real session management. This is a local demo app — the auth surface simulates the UX of a real app without any of the security infrastructure. + +## Cost + +Zero. Runs against `mongodb-memory-server` locally or any MongoDB instance the developer provides. + +## Observability + +Not applicable beyond standard Next.js dev server output. + +## Data Protection + +Not applicable — all data is synthetic demo data. + +## Analytics + +Not applicable. + +# References + +- [Original retail-store-v2](https://github.com/mongodb-industry-solutions/retail-store-v2) — source repo (cloned to `wip/retail-store-v2-reference/`) +- [Project spec](../spec.md) — parent project spec +- [Round 1 plan](../plans/retail-store-plan.md) — milestones M1–M6 (delivered) +- [Round 1 code review](../reviews/code-review.md) — findings and acceptance criteria status + +# Open Questions + +1. **Search implementation**: Should product search use `$regex` matching via the ORM's `where` clause, or a pipeline with `$regex` in a `$match` stage? The ORM may not support text pattern matching natively. **Assumption:** Use a pipeline with `$match` + `$regex` for search, since the pipeline builder is already used for aggregation and this exercises another PN surface. Fall back to ORM `where` with exact string filters if `$regex` isn't feasible. + +2. **Seed data volume**: The current seed has 3 products, which is too few for pagination or meaningful search. Should we expand the seed to ~20–30 products? **Assumption:** Yes — expand the seed to at least 20 products across multiple categories and brands to make browsing, search, and pagination meaningful. + +3. **Cart item quantity**: When adding a product that's already in the cart, should it increment the quantity or add a duplicate entry? The original app uses `$push` which adds duplicates. **Assumption:** Same as original — `$push` adds a new entry. Simplifies the implementation and matches the existing data access function. + +4. **~~User persistence mechanism~~**: Resolved — cookie set by the login stub sign-up flow. Server components read it via `cookies()` for initial data loading. From cf3db5a5c9ffa26d626e6376696ea753ba2bcf0d Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 08:03:05 +0300 Subject: [PATCH 02/30] Remove code review artifacts from branch, keep locally Review artifacts under projects/**/reviews/ are useful during development but should not ship in PRs. Added a gitignore rule to prevent re-addition. --- .../reviews/pr-327/code-review.md | 104 --------------- .../reviews/pr-327/system-design-review.md | 106 --------------- .../reviews/pr-327/walkthrough.md | 122 ------------------ 3 files changed, 332 deletions(-) delete mode 100644 projects/mongo-example-apps/reviews/pr-327/code-review.md delete mode 100644 projects/mongo-example-apps/reviews/pr-327/system-design-review.md delete mode 100644 projects/mongo-example-apps/reviews/pr-327/walkthrough.md diff --git a/projects/mongo-example-apps/reviews/pr-327/code-review.md b/projects/mongo-example-apps/reviews/pr-327/code-review.md deleted file mode 100644 index 493f9b913..000000000 --- a/projects/mongo-example-apps/reviews/pr-327/code-review.md +++ /dev/null @@ -1,104 +0,0 @@ -# Code Review - -**Branch:** `tml-2185-port-retail-store-v2-e-commerce-app-to-prisma-next-mongodb` -**Base:** `origin/main` -**PR:** [#327](https://github.com/prisma/prisma-next/pull/327) -**Specs:** [projects/mongo-example-apps/spec.md](../../spec.md), [projects/mongo-example-apps/specs/retail-store-round-2.spec.md](../../specs/retail-store-round-2.spec.md) - ---- - -## Acceptance criteria status - -### Project spec (spec.md) — Retail Store portion - -| Criterion | Status | Evidence | -|---|---|---| -| PN contract authored in PSL | DONE | [prisma/contract.prisma](../../../../examples/retail-store/prisma/contract.prisma) | -| Contract emits valid contract.json + contract.d.ts | DONE | [src/contract.json](../../../../examples/retail-store/src/contract.json), [src/contract.d.ts](../../../../examples/retail-store/src/contract.d.ts) | -| Schema migrations create collections, indexes, validators | DONE (with caveats — see F01) | [migrations/20260413T0314_migration/](../../../../examples/retail-store/migrations/20260413T0314_migration/) | -| All CRUD via PN ORM | DONE | [src/data/](../../../../examples/retail-store/src/data/) | -| Embedded documents inline in results | DONE | Value objects (Price, Image, Address, CartItem, etc.) returned inline | -| Referenced relations via $lookup include() | DONE | [test/relations.test.ts](../../../../examples/retail-store/test/relations.test.ts) | -| Query results fully typed | PARTIAL — `as string` casts needed for _id fields, `as T` for pipeline results | See F04, F05 | -| At least one mutation uses $push/$pull | DONE | Cart add/remove, order status update | -| Vector search works via extension pack | STUB | `findSimilarProducts` defined but requires Atlas; not tested | -| At least one data migration runs | NO | Migration planner generates schema-only operations | -| Change stream subscription | NO | Not supported in framework | -| Runs against mongodb-memory-server | DONE | [test/setup.ts](../../../../examples/retail-store/test/setup.ts) | -| Demonstrates ≥3 MongoDB idioms | DONE | Embedded docs, referenced relations, polymorphism, update operators, aggregation, search, indexes | - -### Round 2 spec (retail-store-round-2.spec.md) - -| Criterion | Status | Evidence | -|---|---|---| -| Login stub with signup/logout | DONE | [app/login/page.tsx](../../../../examples/retail-store/app/login/page.tsx), [app/api/auth/](../../../../examples/retail-store/app/api/auth/) | -| Navbar displays user name + logout | DONE | [src/components/navbar.tsx](../../../../examples/retail-store/src/components/navbar.tsx) | -| Product catalog with pagination | DONE | [app/page.tsx](../../../../examples/retail-store/app/page.tsx) with skip/take | -| Product search | DONE | [app/api/products/route.ts](../../../../examples/retail-store/app/api/products/route.ts) with $regex | -| Add to cart | DONE | [src/components/add-to-cart-button.tsx](../../../../examples/retail-store/src/components/add-to-cart-button.tsx) | -| Cart page with remove/clear | DONE | [app/cart/page.tsx](../../../../examples/retail-store/app/cart/page.tsx) | -| Navbar cart count | DONE | [src/components/cart-badge.tsx](../../../../examples/retail-store/src/components/cart-badge.tsx) | -| Checkout with home/BOPIS | DONE | [app/checkout/page.tsx](../../../../examples/retail-store/app/checkout/page.tsx) | -| Orders page + detail | DONE | [app/orders/page.tsx](../../../../examples/retail-store/app/orders/page.tsx), [app/orders/\[id\]/page.tsx](../../../../examples/retail-store/app/orders/[id]/page.tsx) | -| Order status progression | DONE | [app/orders/\[id\]/order-status-buttons.tsx](../../../../examples/retail-store/app/orders/[id]/order-status-buttons.tsx) | -| BOPIS location picker | DONE | Fetches from /api/locations in checkout | -| UI component library | DONE | shadcn-style components under [src/components/ui/](../../../../examples/retail-store/src/components/ui/) | -| Existing tests pass | DONE | All test files exercise mongodb-memory-server | - ---- - -## Findings - -### FRAMEWORK LIMITATIONS - -These are gaps in the Prisma Next framework surfaced by this example app. Each one is a signal that the framework needs work. - -| ID | Finding | Description | Location | -|---|---|---|---| -| F01 | **Migration planner creates separate collections for polymorphic variants** | The planner generates `collection.addToCartEvent.create`, `collection.searchEvent.create`, `collection.viewProductEvent.create` as separate collection operations. Variant models share the base model's collection — they should not get their own. This produces incorrect migration artifacts that would create unnecessary empty collections if applied. | [ops.json L1–47, L509–551, L626–675](../../../../examples/retail-store/migrations/20260413T0314_migration/ops.json) | -| F02 | **Variant collection validators are incomplete** | The validators for variant collections (e.g., `searchEvent`) include only variant-specific fields (e.g., `query`) and miss all base model fields (`_id`, `userId`, `sessionId`, `timestamp`, `type`). Even if variant collections were intentional, the validators would be structurally wrong. | [ops.json L529–541](../../../../examples/retail-store/migrations/20260413T0314_migration/ops.json) | -| F03 | **$jsonSchema validator drops `Float` fields** | The `Price` value object validator includes `currency` but not `amount`. The `InvoiceLineItem` validator includes `amount` and `name` but drops `unitPrice`, `lineTotal`. The JSON schema derivation doesn't recognize the `Float` scalar type, so fields typed as `Float` are silently omitted from validators. | [ops.json L105–115](../../../../examples/retail-store/migrations/20260413T0314_migration/ops.json) — `Price` validator has `required: ["currency"]` | -| F04 | **`_id` field returns opaque type requiring String() casts** | ORM query results return `_id` as an opaque codec output type, not `string`. Code must cast via `String(entity._id)` or `entity._id as string` throughout the data layer and seed script. This is pervasive across the entire app. | [src/seed.ts](../../../../examples/retail-store/src/seed.ts), [src/data/orders.ts](../../../../examples/retail-store/src/data/orders.ts), [test/api-flows.test.ts](../../../../examples/retail-store/test/api-flows.test.ts) | -| F05 | **ORM string fields return opaque codec output type** | Fields like `name`, `brand`, `code` return codec output types rather than `string`, forcing `String(p0.name)` casts in the seed. The contract types know these are `mongo/string@1` codec fields, but the ORM's output type resolution doesn't simplify codec types to their JS primitives. | [src/seed.ts L152–175](../../../../examples/retail-store/src/seed.ts) | -| F06 | **Pipeline and raw query results are untyped** | The pipeline builder and `runtime.execute()` return untyped async iterables. The `collectResults()` helper casts `row as T` at the boundary. The framework has no mechanism to propagate result types through aggregation stages or raw commands. | [src/data/execute-raw.ts L11–14](../../../../examples/retail-store/src/data/execute-raw.ts) | -| F07 | **ORM lacks typed $push/$pull array update operators** | The ORM's `update()` method only supports `$set` semantics. Array update operators (`$push`, `$pull`, `$inc`) require dropping to `mongoRaw` with untyped commands and manual `MongoParamRef` wrapping. This is the most exercised workaround in the app (cart add/remove, order status update). | [src/data/carts.ts L43–63](../../../../examples/retail-store/src/data/carts.ts), [src/data/orders.ts](../../../../examples/retail-store/src/data/orders.ts) | -| F08 | **ObjectId filter requires MongoParamRef wrapping** | Filtering by ObjectId-typed fields requires constructing `MongoFieldFilter.of('userId', '$eq', new MongoParamRef(userId, { codecId: 'mongo/objectId@1' }))` instead of a simpler `where({ userId })`. The helper `objectIdEq()` in `object-id-filter.ts` exists to reduce this boilerplate. | [src/data/object-id-filter.ts](../../../../examples/retail-store/src/data/object-id-filter.ts) | -| F09 | **No $vectorSearch stage in pipeline builder** | `findSimilarProducts` in `products.ts` is a stub that would need raw aggregate to implement `$vectorSearch`. The pipeline builder doesn't expose a `vectorSearch()` stage. Requires an extension pack not yet built. | [src/data/products.ts](../../../../examples/retail-store/src/data/products.ts) | -| F10 | **Embedded models via `owner` not supported in PSL** | The contract schema and emitter accept `owner`, but PSL has no `@@owner` attribute and the ORM has no embedded entity CRUD handling. Can't demonstrate embedded entities (as distinct from value objects) in PSL-authored apps. | — | -| F11 | **Polymorphism demonstrated but not demonstrated in the UI** | `@@discriminator`/`@@base` work correctly in the ORM (create with auto-injected discriminator, variant queries with discriminator filter). The data layer and tests fully exercise this. The UI doesn't surface event variants — events are only created/queried in tests and seed, not in user-facing pages. | [test/polymorphism.test.ts](../../../../examples/retail-store/test/polymorphism.test.ts), [src/data/events.ts](../../../../examples/retail-store/src/data/events.ts) | - -### CODE QUALITY - -| ID | Finding | Description | Location | -|---|---|---|---| -| C01 | **Fabricated image URLs** | Product images use URLs like `/images/products/her-oxf-001.jpg` but no such images exist in the project. The product cards and detail page render broken `` tags. Consider using placeholder services or removing the `` tag. | [src/seed.ts L145](../../../../examples/retail-store/src/seed.ts) | -| C02 | **Domain model README out of date** | The README says `Events ─── EventMetadata (embedded)` but the schema now uses polymorphic variants (ViewProductEvent, SearchEvent, AddToCartEvent). The `EventMetadata` type no longer exists. | [README.md L86](../../../../examples/retail-store/README.md) | -| C03 | **`String()` calls throughout seed for value-object-nested field access** | The seed script uses `String(p0.name)`, `String(p0.brand)`, `String(p0.code)` to convert codec output types to strings when constructing cart/order items. This is verbose and obscures intent. This is caused by F05 (framework limitation). | [src/seed.ts L147–175](../../../../examples/retail-store/src/seed.ts) | -| C04 | **`UNAUTHORIZED` response object may be shared across requests** | `const UNAUTHORIZED = NextResponse.json(...)` is defined at module scope in the order route. Next.js `Response` objects may not be safely reused across requests in some contexts (headers can be mutated). Consider creating fresh responses per request. | [app/api/orders/\[id\]/route.ts L11–12](../../../../examples/retail-store/app/api/orders/[id]/route.ts) | - -### POSITIVE OBSERVATIONS - -| ID | Observation | Detail | -|---|---|---| -| P01 | **Clean data access separation** | Each collection has its own module with typed functions. No raw MongoDB calls leak into routes or components. | -| P02 | **Framework fixes driven by example** | The branch includes 4 framework fixes discovered during development: ORM codec attachment, adapter codec encoding, nullable VO validators, optional codec.encode guard. Each fix has a corresponding test. | -| P03 | **Polymorphic events demonstrate real-world pattern** | The Event model with discriminator and 3 variants is the cleanest demonstration of PN's polymorphism support. Tests cover variant creation, base-collection queries, and discriminator-filtered queries. | -| P04 | **Index variety** | The PSL contract demonstrates text indexes with weights, compound indexes, hashed indexes, TTL indexes, sparse indexes, and collation-aware indexes — all in one schema. The migration test verifies each one. | -| P05 | **Test infrastructure** | The shared `setupTestDb()` helper creates isolated MongoMemoryReplSet instances per test suite, with proper cleanup. 12 test files cover the full data access surface. | -| P06 | **Interactive e-commerce loop** | Browse → search → add to cart → checkout → orders — each step exercises a distinct PN capability through the data access layer. | - ---- - -## Summary - -The branch delivers a substantial, working e-commerce example that validates PN's MongoDB support across embedded value objects, reference relations, polymorphism, array operators, pipelines, search, and schema indexes. The framework fixes (ORM codec attachment, adapter encoding, nullable validators) are well-scoped and tested. - -The most significant framework limitations surfaced are: - -1. **Migration planner bug with polymorphic variants** (F01–F02) — creates incorrect separate collections -2. **Float fields dropped from validators** (F03) — silent data loss in schema validation -3. **Opaque codec output types** (F04–F05) — forces String() casts everywhere -4. **No typed array update operators** (F07) — the most exercised workaround -5. **Untyped pipeline/raw results** (F06) — forces `as T` casts - -These are genuine framework signals. The app is correctly structured to highlight them rather than paper over them. diff --git a/projects/mongo-example-apps/reviews/pr-327/system-design-review.md b/projects/mongo-example-apps/reviews/pr-327/system-design-review.md deleted file mode 100644 index 0b32f918d..000000000 --- a/projects/mongo-example-apps/reviews/pr-327/system-design-review.md +++ /dev/null @@ -1,106 +0,0 @@ -# System Design Review - -**Branch:** `tml-2185-port-retail-store-v2-e-commerce-app-to-prisma-next-mongodb` -**Base:** `origin/main` -**PR:** [#327](https://github.com/prisma/prisma-next/pull/327) -**Specs:** [projects/mongo-example-apps/spec.md](../../spec.md), [projects/mongo-example-apps/specs/retail-store-round-2.spec.md](../../specs/retail-store-round-2.spec.md) - ---- - -## Problem being solved - -Validate that Prisma Next's MongoDB implementation handles a real-world data model — an interactive e-commerce platform with embedded value objects, referenced relations, polymorphic types, array update operators, aggregation pipelines, search, schema indexes, and migration artifacts. The retail store exercises more PN Mongo features in one app than any existing example. - -## New guarantees / invariants - -1. **PSL contract with embedded value objects**: 8 `type` definitions (Price, Image, Address, CartItem, OrderLineItem, StatusEntry, InvoiceLineItem, EventMetadata) produce `valueObject`-kind fields in the contract. The ORM wraps value object data in correctly-typed `MongoParamRef` entries with `codecId` at mutation time. - -2. **Polymorphic event collection**: The `Event` base model with `@@discriminator(type)` and three variant models (`ViewProductEvent`, `SearchEvent`, `AddToCartEvent`) via `@@base` produce a single `events` collection. The ORM's `variant()` method injects discriminator filters on read and auto-injects discriminator values on create. - -3. **ORM codec encoding**: Mutations now attach `codecId` from the contract's field definition to `MongoParamRef` instances, and the adapter encodes values through the codec registry before sending to the wire. This ensures ObjectId fields written as strings are properly encoded to BSON ObjectIds. - -4. **Nullable value object validators**: The `$jsonSchema` validator derivation now handles nullable value object fields correctly, producing `oneOf: [{ bsonType: "null" }, { bsonType: "object", ... }]` instead of incorrectly requiring a non-null object. - -5. **Schema indexes via PSL**: `@@index`, `@@textIndex`, `@@unique`, `@unique` in the PSL contract produce index definitions in `contract.json`, which flow through to migration operations. - -## Subsystem fit - -### Contract (PSL → contract.json → contract.d.ts) - -The retail store uses a single `contract.prisma` file with 7 models, 3 polymorphic variants, and 8 value object types. The PSL interpreter produces the correct contract structure: - -- Value objects → `type: { kind: "valueObject", name: "..." }` on model fields -- Polymorphic models → `discriminator`, `variants`, `base` on the correct models -- Indexes → `storage.collections.*.indexes` with the correct keys, options (unique, sparse, TTL, collation, weights, hashed) -- Validators → `$jsonSchema` validators derived from model field definitions - -The emitted `contract.d.ts` carries fully-typed model definitions including value object nesting and polymorphic variant structure. - -### ORM - -The ORM handles: - -- CRUD with value object fields (nested objects correctly wrapped) -- `include()` for N:1 reference relations (cart→user, order→user, invoice→order) -- `variant()` for polymorphic collection narrowing -- `skip()`/`take()` for pagination -- `where()` with `MongoFieldFilter` for filtering - -The ORM does **not** handle: - -- Typed `$push`/`$pull` array operators (uses `mongoRaw` instead) -- Embedded models via `owner` (value objects work; entities with identity don't) - -### Runtime / Adapter - -The adapter was extended to encode `MongoParamRef` values through the codec registry. This is a behavioral change: previously, all `MongoParamRef.value` was passed to the wire as-is. Now, if a `MongoParamRef` carries a `codecId`, the adapter looks up the codec and calls `encode()` before sending. This ensures that string-typed ObjectId fields are properly encoded as BSON ObjectIds. - -### Migration - -The planner generates collection creation operations with `$jsonSchema` validators and index creation operations from the contract. **However, there is a bug** — variant models without `@@map` get separate collection creation operations (e.g., `collection.addToCartEvent.create`, `collection.searchEvent.create`) instead of being recognized as part of the base model's collection. See F01 in code review. - -## Boundary correctness - -- All retail-store code lives under `examples/retail-store/` — no import into framework packages -- Framework fixes live in their proper packages: ORM changes in `packages/2-mongo-family/5-query-builders/orm/`, adapter changes in `packages/3-mongo-target/2-mongo-adapter/`, PSL validator derivation in `packages/2-mongo-family/2-authoring/contract-psl/` -- No new cross-layer or cross-domain imports introduced - -## Design review - -### Cookie-based auth (appropriate for demo scope) - -The auth system uses a plain-text `userId` cookie set on signup, checked by Next.js middleware. This is explicitly scoped as a demo stub — no encryption, signing, or real session management. The middleware matcher correctly excludes `/login`, `/api/auth/*`, and static assets. All order routes verify ownership by comparing `order.userId` to the authenticated user. - -### Cart management via raw commands - -The cart add/remove operations use `mongoRaw` with `$push`/`$pull` because the ORM doesn't expose typed array update operators. This is a documented framework limitation. The `addToCart` function uses `upsert: true` with `$setOnInsert` to handle first-cart creation atomically. - -### Data access layer separation - -Each collection has its own module under `src/data/` with typed functions that accept `Db` and return typed results. API routes and pages compose these functions — no raw MongoDB calls leak into the UI layer. The `executeRaw()` and `collectResults()` helpers in `execute-raw.ts` centralize the `for await` draining pattern for pipeline and raw results. - -## Test strategy adequacy - -The branch includes 12 test files: - -| Test file | Coverage | -|---|---| -| `crud-lifecycle.test.ts` | Create, read, update, delete for products, users, carts, orders | -| `relations.test.ts` | `$lookup` via `include()` for cart→user, order→user, invoice→order | -| `update-operators.test.ts` | `$push`/`$pull` for cart items, `$push` for order status | -| `aggregation.test.ts` | Event type aggregation pipeline, random product sampling | -| `polymorphism.test.ts` | Variant creation, base-collection queries, discriminator filtering | -| `search.test.ts` | Multi-field `$regex` search, pagination with skip/take | -| `cart-lifecycle.test.ts` | Add, remove, clear, upsert cart operations | -| `order-lifecycle.test.ts` | Create order, status updates, get/delete operations | -| `api-flows.test.ts` | Order ownership verification, checkout flow, status progression | -| `seed.test.ts` | Seed data correctness (counts, structure) | -| `migration.test.ts` | Contract index definitions, index creation on real MongoDB | -| `setup.ts` | Shared test infrastructure with MongoMemoryReplSet | - -All tests run against `mongodb-memory-server` — no external DB required. The coverage is strong for the data access layer. The gap is API route-level tests — the `api-flows.test.ts` tests the data access functions directly rather than making HTTP calls through the routes, so middleware/auth cookie behavior is not tested programmatically. - -## Risk assessment - -- **Migration planner produces incorrect operations for polymorphic models** — variant models get separate collection creation operations. This will fail or create unnecessary collections if applied. Low impact today (migration can be applied manually or the ops corrected), but the planner bug should be tracked. -- **No integration test verifying the migration operations apply successfully** — the `migration.test.ts` validates contract index definitions and manually creates indexes, but doesn't run the actual migration planner output against a database. diff --git a/projects/mongo-example-apps/reviews/pr-327/walkthrough.md b/projects/mongo-example-apps/reviews/pr-327/walkthrough.md deleted file mode 100644 index 37d0624c8..000000000 --- a/projects/mongo-example-apps/reviews/pr-327/walkthrough.md +++ /dev/null @@ -1,122 +0,0 @@ -# Walkthrough - -## Sources - -- PR: [#327](https://github.com/prisma/prisma-next/pull/327) -- Specs: [projects/mongo-example-apps/spec.md](../../spec.md), [projects/mongo-example-apps/specs/retail-store-round-2.spec.md](../../specs/retail-store-round-2.spec.md) -- Commit range: `origin/main...HEAD` (43 commits, 91 files, ~12.8k lines) - -## Intent - -Build a working interactive e-commerce application — the "retail store" — that validates Prisma Next's MongoDB support against a real-world data model. The app exercises embedded value objects, referenced relations, polymorphic types, array update operators, aggregation pipelines, multi-field search, pagination, schema indexes, and migration artifacts. Along the way, fix the framework bugs discovered during development. - -## The story - -1. **Define the domain in PSL** — A contract with 7 models, 3 polymorphic variants, and 8 embedded value object types establishes the retail domain: products, users, carts, orders, invoices, locations, and events. The PSL contract uses `@@discriminator`/`@@base` for polymorphic events, `@@textIndex` with weights, compound/hashed/TTL/sparse/collation-aware indexes, and `@unique` for email. - -2. **Build a typed data access layer** — One module per collection under `src/data/` wraps all database operations in typed functions that accept a `Db` handle and return typed results. ORM CRUD for standard operations, `mongoRaw` for array update operators (`$push`/`$pull`), pipeline builder for aggregation, and `$regex` for search. No raw MongoDB calls leak outside this layer. - -3. **Make it interactive** — Cookie-based auth (signup creates a user, sets a `userId` cookie), product catalog with pagination and search, add-to-cart with live badge updates, checkout with home delivery vs. BOPIS (store location picker), order management with status progression. Built with Next.js App Router, Tailwind CSS v4, and shadcn-style UI components. - -4. **Fix the framework to make it work** — Four framework bugs discovered during development were fixed in their proper packages: ORM codec attachment on mutations, adapter codec encoding via registry, nullable value object validator derivation, and optional codec.encode guard. - -5. **Validate with tests** — 12 test files covering CRUD lifecycle, relations, polymorphism, aggregation, search, cart/order lifecycle, API-level flows, migration/indexes, and seeding. All tests run against `mongodb-memory-server`. - -## Behavior changes & evidence - -### Adds a complete e-commerce example app - -Adds a Next.js retail store application under `examples/retail-store/` that demonstrates the full range of PN's MongoDB capabilities through an interactive storefront. - -- **Why**: The existing examples (mongo-demo) cover basic CRUD. The project spec requires validation against a real-world data model with real complexity. -- **Implementation**: - - [examples/retail-store/prisma/contract.prisma](examples/retail-store/prisma/contract.prisma) — PSL contract with 7 models, 3 variants, 8 value objects, 11 indexes - - [examples/retail-store/src/data/](examples/retail-store/src/data/) — data access layer (carts, events, invoices, locations, orders, products, users) - - [examples/retail-store/src/db.ts](examples/retail-store/src/db.ts) — db factory - - [examples/retail-store/src/seed.ts](examples/retail-store/src/seed.ts) — seed data (24 products, 4 locations, users, orders, events) - - [examples/retail-store/app/](examples/retail-store/app/) — Next.js pages and API routes - - [examples/retail-store/src/components/](examples/retail-store/src/components/) — navbar, cart provider, add-to-cart, UI primitives - - [examples/retail-store/middleware.ts](examples/retail-store/middleware.ts) — auth middleware -- **Tests**: - - [examples/retail-store/test/crud-lifecycle.test.ts](examples/retail-store/test/crud-lifecycle.test.ts) — CRUD operations per collection - - [examples/retail-store/test/relations.test.ts](examples/retail-store/test/relations.test.ts) — $lookup via include() - - [examples/retail-store/test/update-operators.test.ts](examples/retail-store/test/update-operators.test.ts) — $push/$pull for carts and orders - - [examples/retail-store/test/aggregation.test.ts](examples/retail-store/test/aggregation.test.ts) — event aggregation, random product sampling - - [examples/retail-store/test/polymorphism.test.ts](examples/retail-store/test/polymorphism.test.ts) — variant creation, discriminator filtering, base queries - - [examples/retail-store/test/search.test.ts](examples/retail-store/test/search.test.ts) — multi-field $regex search, pagination - - [examples/retail-store/test/cart-lifecycle.test.ts](examples/retail-store/test/cart-lifecycle.test.ts) — cart add/remove/clear/upsert - - [examples/retail-store/test/order-lifecycle.test.ts](examples/retail-store/test/order-lifecycle.test.ts) — order create/status/delete - - [examples/retail-store/test/api-flows.test.ts](examples/retail-store/test/api-flows.test.ts) — order ownership, checkout flow, status progression - - [examples/retail-store/test/seed.test.ts](examples/retail-store/test/seed.test.ts) — seed data integrity - - [examples/retail-store/test/migration.test.ts](examples/retail-store/test/migration.test.ts) — contract index definitions, index creation on real MongoDB - -### ORM mutations now encode values through the codec registry - -**Before**: `MongoParamRef` instances created by the ORM carried no codec information. The adapter passed `MongoParamRef.value` to the wire as-is. String-typed ObjectId fields were sent as plain strings, causing type mismatches when MongoDB expected BSON ObjectIds. - -**After**: The ORM's `#toDocument()` and `#toSetFields()` methods look up each field's `codecId` from the contract and attach it to the `MongoParamRef`. The adapter's `resolveValue()` checks for `codecId` and calls `codec.encode()` before sending to the wire. This ensures ObjectId fields are properly encoded to BSON ObjectIds without manual wrapping. - -- **Why**: Without codec encoding, ORM mutations that write to `ObjectId`-typed foreign key fields (e.g., `cart.userId`, `order.userId`) would write plain strings instead of BSON ObjectIds, breaking `$lookup` joins and index usage. -- **Implementation**: - - [packages/2-mongo-family/5-query-builders/orm/src/collection.ts](packages/2-mongo-family/5-query-builders/orm/src/collection.ts) — `#wrapFieldValue()`, `#wrapValueObject()`, `#modelFields()` methods; updated `#toDocument()` and `#toSetFields()` - - [packages/3-mongo-target/2-mongo-adapter/src/resolve-value.ts](packages/3-mongo-target/2-mongo-adapter/src/resolve-value.ts) — codec lookup and encode in `resolveValue()` - - [packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts](packages/3-mongo-target/2-mongo-adapter/src/mongo-adapter.ts) — adapter accepts codec registry, passes to resolveDocument/resolveValue -- **Tests**: - - [packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts](packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts) — "attaches codecId from contract fields to MongoParamRef", "attaches objectId codecId" - - [packages/3-mongo-target/2-mongo-adapter/test/resolve-value.test.ts](packages/3-mongo-target/2-mongo-adapter/test/resolve-value.test.ts) — codec encode with/without registry, nested objects/arrays - - [packages/3-mongo-target/2-mongo-adapter/test/mongo-adapter.test.ts](packages/3-mongo-target/2-mongo-adapter/test/mongo-adapter.test.ts) — "MongoAdapter with codec registry" suite - -### Nullable value object fields produce correct $jsonSchema validators - -**Before**: A nullable value object field like `address Address?` produced a required-object validator, rejecting `null` values. Documents with `address: null` would fail schema validation. - -**After**: Nullable value object fields produce `oneOf: [{ bsonType: "null" }, { bsonType: "object", ... }]` and are excluded from the `required` array. - -- **Why**: The retail store's `User` model has `address Address?` (users can sign up without an address). Without this fix, inserting a user with `address: null` would fail the validator. -- **Implementation**: - - [packages/2-mongo-family/2-authoring/contract-psl/src/derive-json-schema.ts](packages/2-mongo-family/2-authoring/contract-psl/src/derive-json-schema.ts) — nullable check in `fieldToBsonSchema()` -- **Tests**: - - [packages/2-mongo-family/2-authoring/contract-psl/test/interpreter.test.ts](packages/2-mongo-family/2-authoring/contract-psl/test/interpreter.test.ts) — "handles nullable value object fields with oneOf null or object" - -### Schema indexes are authored in PSL and flow through to migration operations - -Adds `@@index`, `@@textIndex`, and `@unique` support to the retail store's PSL contract. The contract produces index definitions that the migration planner converts to `createIndex` operations with the correct options (unique, sparse, TTL expiry, collation, text weights, hashed type, compound sort directions). - -- **Why**: Real MongoDB applications need indexes for query performance. The retail store's indexes are representative of production usage: text search with relevance weights, TTL expiry for analytics events, case-insensitive location lookup, and foreign key indexes for $lookup joins. -- **Implementation**: - - [examples/retail-store/prisma/contract.prisma](examples/retail-store/prisma/contract.prisma) — 11 index definitions across 7 collections - - [examples/retail-store/migrations/20260413T0314_migration/ops.json](examples/retail-store/migrations/20260413T0314_migration/ops.json) — generated index operations -- **Tests**: - - [examples/retail-store/test/migration.test.ts](examples/retail-store/test/migration.test.ts) — validates contract index structure and creates indexes on real MongoDB - -### Polymorphic events via @@discriminator/@@base - -Adds a polymorphic `Event` collection with three variant models (`ViewProductEvent`, `SearchEvent`, `AddToCartEvent`) using `@@discriminator(type)` and `@@base(Event, "...")`. The ORM's `variant()` method auto-injects discriminator values on create and discriminator filters on query. - -- **Why**: Polymorphism is a common MongoDB pattern. The Event model is a natural fit: all events share base fields (userId, sessionId, timestamp) but each type has different metadata fields. -- **Implementation**: - - [examples/retail-store/prisma/contract.prisma](examples/retail-store/prisma/contract.prisma) — lines 120–151 - - [examples/retail-store/src/data/events.ts](examples/retail-store/src/data/events.ts) — typed create/query functions per variant -- **Tests**: - - [examples/retail-store/test/polymorphism.test.ts](examples/retail-store/test/polymorphism.test.ts) — variant creation, base queries, discriminator filtering, variant field access - -## Compatibility / migration / risk - -- **Framework package changes are backward-compatible**: The ORM and adapter changes add optional behavior (codec encoding) that only activates when `MongoParamRef` carries a `codecId`. Existing code that constructs bare `MongoParamRef(value)` is unaffected. -- **`createMongoAdapter()` signature change**: Now accepts an optional `MongoCodecRegistry` parameter. Existing callers that pass no arguments get a default registry with all built-in codecs. This is non-breaking. -- **Migration planner bug (F01)**: The generated `ops.json` includes incorrect collection creation operations for polymorphic variant models. If applied as-is, it would create 3 empty collections (`addToCartEvent`, `searchEvent`, `viewProductEvent`) that serve no purpose. The base `events` collection and its indexes are correct. - -## Follow-ups / open questions - -- **Migration planner variant handling (F01–F02)**: The planner needs to recognize `@@base` models and suppress collection creation for them. Their fields should be validated as part of the base model's validator, not independently. -- **Float field in $jsonSchema (F03)**: The validator derivation silently drops Float-typed fields. Should map to `bsonType: "double"`. -- **Codec output type ergonomics (F04–F05)**: The most pervasive friction in the app — every `_id` access and many string field accesses require `String()` casts. The type system knows these are `mongo/string@1` and `mongo/objectId@1` — it should resolve to `string` in the ORM output type. -- **Typed array update operators (F07)**: The most frequently exercised workaround. ORM `update()` only supports `$set`; `$push`/`$pull`/`$inc` require raw commands. -- **Pipeline output types (F06)**: Pipeline builder results are untyped. No mechanism to propagate types through aggregation stages. - -## Non-goals / intentionally out of scope - -- **Predictive maintenance app**: The second app in the project spec. Tracked separately. -- **Atlas-specific features**: Vector search, Atlas Search, change streams. Stubs exist for vector search but require Atlas credentials. -- **Real authentication**: The login stub fabricates users and sets a plain-text cookie. No OAuth, JWT, or session management. -- **Full UI port**: The UI is functional but simplified. No Redux, no chatbot, no guided tours, no real-time SSE. From 7b48e49c1db13810d40f100409c232a93ee73db6 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:07:22 +0300 Subject: [PATCH 03/30] Reframe FL-06 as codec-generic, not ObjectId-specific The where() codec gap applies to any field whose codec has a non-identity encode, not just ObjectId. Updated framework-limitations.md and next-steps.md to reflect the corrected root cause and scope. --- docs/planning/mongo-target/next-steps.md | 6 +++--- projects/mongo-example-apps/framework-limitations.md | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/planning/mongo-target/next-steps.md b/docs/planning/mongo-target/next-steps.md index 5e3156ad6..2aef4cd11 100644 --- a/docs/planning/mongo-target/next-steps.md +++ b/docs/planning/mongo-target/next-steps.md @@ -65,7 +65,7 @@ Full details in [projects/mongo-example-apps/framework-limitations.md](../../../ | FL-03 | Timestamp codec type incompatible with `Date`/`string` | Type ergonomics | | FL-04 | No typed `$push`/`$pull`/`$inc` | ORM mutations | | FL-05 | Pipeline/raw results untyped | Query results | -| FL-06 | ObjectId filter requires manual `MongoParamRef` wrapping | ORM queries | +| FL-06 | `where()` does not encode filter values through codecs | ORM queries | | FL-07 | No `$vectorSearch` in pipeline builder | Extension (deferred) | | FL-08 | 1:N back-relation loading not available/tested | ORM queries | | FL-09 | Migration planner creates separate collections for variants | Migration bugs | @@ -110,10 +110,10 @@ Full details in [projects/mongo-example-apps/framework-limitations.md](../../../ **Scope**: - **FL-04**: Implement dot-path field accessor mutations — `$push`, `$pull`, `$inc`, `$set` on nested paths via `u("field.path")` (deferred Phase 1.5 M4). Maps to [ADR 180](../../architecture%20docs/adrs/ADR%20180%20-%20Dot-path%20field%20accessor.md). -- **FL-06**: ORM `where()` should auto-encode ObjectId-typed fields. When a contract field has `codecId: 'mongo/objectId@1'`, the ORM should wrap the value in `MongoParamRef` automatically instead of requiring the user to construct it manually. +- **FL-06**: ORM `where()` should encode filter values through codecs, the same way mutations already do. When a contract field has a `codecId`, the ORM should wrap the value in `MongoParamRef` with that codec automatically. Most visible with ObjectId (string → BSON ObjectId), but applies to any codec with a non-identity `encode`. - **FL-08**: Validate and test 1:N back-relation loading via `include()`. If it works, add test coverage. If it doesn't, implement it. -**Proof**: The retail store's `mongoRaw` calls for cart add/remove and order status update are replaced with ORM `update()` calls. ObjectId filter helpers (`objectIdEq()`) are removed. +**Proof**: The retail store's `mongoRaw` calls for cart add/remove and order status update are replaced with ORM `update()` calls. Manual filter helpers (`objectIdEq()`, `rawObjectIdFilter()`) are removed — `where({ userId })` encodes values through codecs automatically. **Depends on**: Area 1 (type fixes reduce noise, but not a hard blocker). diff --git a/projects/mongo-example-apps/framework-limitations.md b/projects/mongo-example-apps/framework-limitations.md index 2b6a8882b..4f0b01f90 100644 --- a/projects/mongo-example-apps/framework-limitations.md +++ b/projects/mongo-example-apps/framework-limitations.md @@ -28,7 +28,7 @@ These are the highest-impact issues. They force type casts at nearly every bound |---|---|---|---|---| | FL-04 | **ORM lacks typed `$push`/`$pull`/`$inc` array update operators** | ORM `update()` only supports `$set` semantics. Array mutations require dropping to `mongoRaw` with untyped commands and manual `MongoParamRef` construction. This is the most exercised workaround in the app. | 3 data access functions use `mongoRaw`: cart add (`$push`), cart remove (`$pull`), order status update (`$push`) | Open | | FL-05 | **Pipeline and raw query results are untyped** | `runtime.execute()` yields `unknown`. Pipeline builder `build()` produces a plan with no result type. Every pipeline/raw call site casts `row as T` with no compile-time or runtime verification. | `collectResults()` helper centralizes the cast but provides no type safety | Open | -| FL-06 | **ObjectId filter requires manual `MongoParamRef` wrapping** | Filtering by ObjectId-typed fields requires `MongoFieldFilter.of('userId', '$eq', new MongoParamRef(userId, { codecId: 'mongo/objectId@1' }))` instead of a simpler `where({ userId })`. | `objectIdEq()` helper in `object-id-filter.ts` reduces boilerplate | Open | +| FL-06 | **ORM `where()` does not encode filter values through codecs** | The ORM auto-encodes values through codecs on writes (create/update) but not on reads (where). Filter values are passed as raw `MongoFilterExpr` AST nodes and the user must manually construct `MongoParamRef` with the correct `codecId`. Most visible with ObjectId fields (string → BSON ObjectId), but affects any codec with a non-identity `encode`. | `objectIdEq()` helper in `object-id-filter.ts` wraps ObjectId filters; no generic workaround for other codecs | Open | | FL-07 | **No `$vectorSearch` stage in pipeline builder** | The pipeline builder doesn't expose a `vectorSearch()` stage. Implementing vector search requires raw aggregate with fully untyped commands. Atlas-specific, so likely needs an extension pack. | `findSimilarProducts()` is a stub; would need raw aggregate | Open | | FL-08 | **1:N back-relation loading not available or not tested** | `include()` only tested for N:1 relations (cart→user, order→user, invoice→order). Loading a user's carts or orders via `include()` from the user side has not been demonstrated. | N/A — only N:1 direction used | Open | From 76ca857c102f001b50c7c927a40e715d60067522 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:26:36 +0300 Subject: [PATCH 04/30] Add spec for ORM query and mutation ergonomics (TML-2246) Captures design decisions from discussion: codec-generic where() encoding, full ADR 180 field accessor with dot-path traversal, 1:N back-relation test coverage, and retail store cleanup scope. --- .../orm-query-mutation-ergonomics.spec.md | 131 ++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md diff --git a/projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md b/projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md new file mode 100644 index 000000000..cf474dd71 --- /dev/null +++ b/projects/mongo-example-apps/specs/orm-query-mutation-ergonomics.spec.md @@ -0,0 +1,131 @@ +# Summary + +Close the ergonomics gap in the Mongo ORM by making `where()` codec-aware, implementing the ADR 180 field accessor for typed mutation operators, and verifying 1:N back-relation loading. This eliminates the need for `mongoRaw` workarounds and manual `MongoParamRef` construction in the retail store example app. + +# Description + +The retail store example app exposed three framework limitations (FL-04, FL-06, FL-08) in the Mongo ORM that force users into low-level workarounds: + +- **FL-06**: The ORM encodes values through codecs on writes (create/update) but not on reads (where). Users must manually construct `MongoParamRef` with the correct `codecId` for every filter. Most visible with ObjectId fields (string → BSON ObjectId), but affects any codec with a non-identity `encode`. +- **FL-04**: The ORM `update()` only supports `$set` semantics. Array mutations (`$push`, `$pull`) and numeric updates (`$inc`) require dropping to `mongoRaw` with untyped commands. ADR 180 specifies the field accessor pattern for these operations. +- **FL-08**: 1:N back-relation loading via `include()` (e.g., User → carts, User → orders) has not been tested. The implementation appears correct but lacks test coverage. + +**Linear**: [TML-2246](https://linear.app/prisma-company/issue/TML-2246) + +# Requirements + +## Functional Requirements + +### FL-06: Codec-aware `where()` overload + +1. `where()` accepts a plain object `{ fieldName: value }` in addition to the existing `MongoFilterExpr`. +2. Each field-value pair is resolved to `$eq` semantics. +3. Values are encoded through codecs using the same `#wrapFieldValue` logic that mutations use — the field's `codecId` is looked up from the contract and attached to the `MongoParamRef`. +4. Multiple fields in a single object are AND'd together. +5. The plain-object `where()` is chainable with other `where()` calls (both object and `MongoFilterExpr` forms). +6. The type of the object maps each field to its codec input type (from the contract type maps). + +### FL-04: ADR 180 field accessor for mutations + +7. `update()` accepts a callback `(u) => FieldOperation[]` in addition to the existing plain-object (`$set`) form. +8. Top-level scalar fields are accessible as properties on the accessor proxy: `u.fieldName` returns an expression with mutation operators. +9. Nested value object fields are accessible via callable dot-path: `u("address.city")` returns an expression with mutation operators. +10. Dot-path strings are type-checked at compile time using recursive template literal types. Invalid paths produce type errors. +11. The expression returned by the accessor provides capability-gated mutation operators: + - All targets: `.set(value)`, `.unset()` + - Mongo: `.inc(value)`, `.mul(value)`, `.push(value)`, `.pull(match)`, `.addToSet(value)`, `.pop(end)` +12. Each operator produces a `FieldOperation` that is collected and compiled into a MongoDB update document (e.g., `{ $push: { "items": ... }, $inc: { "count": 1 } }`). +13. Values in field operations are encoded through codecs using `#wrapFieldValue`. +14. The callback form works with `update()`, `updateAll()`, `updateCount()`, and the `update` part of `upsert()`. + +### FL-08: 1:N back-relation loading + +15. `include()` works correctly for 1:N reference relations (e.g., User → carts, Order → invoices). +16. The `$lookup` stage is generated without `$unwind` for 1:N cardinality. +17. The return type for 1:N included relations is an array. + +## Non-Functional Requirements + +18. No new runtime dependencies. +19. Type-level dot-path resolution must not cause noticeable IDE slowdown for schemas with ≤ 3 levels of value object nesting. + +## Non-goals + +- **Query-side dot-path accessor** (`u("address.city").eq("NYC")` for filter expressions): ADR 180 describes this but it's a separate concern from mutations. The object-based `where()` overload covers the immediate need. +- **Extended comparison operators in object-based `where()`** (e.g., `{ price: { $gte: 10 } }`): `$eq`-only is consistent with the SQL family. Complex filters use the existing `MongoFilterExpr` chain. +- **`$vectorSearch` pipeline stage** (FL-07): Requires Atlas extension pack — separate project. +- **Change streams** (FL-14): Requires streaming subscription support. + +# Acceptance Criteria + +## FL-06: Codec-aware `where()` + +- [ ] `where({ userId: "abc123" })` on a model with `userId: mongo/objectId@1` produces a filter with `MongoParamRef` carrying `codecId: 'mongo/objectId@1'` +- [ ] `where({ name: "Alice" })` on a string field produces a filter with `MongoParamRef` carrying `codecId: 'mongo/string@1'` +- [ ] `where({ userId: "abc", name: "Alice" })` produces an AND of two equality filters +- [ ] Object-based `where()` is chainable with `MongoFilterExpr`-based `where()` +- [ ] Type errors for invalid field names in the where object +- [ ] Type errors for wrong value types (e.g., number for a string field) + +## FL-04: Field accessor mutations + +- [ ] `update(u => [u.items.push(newItem)])` produces `{ $push: { "items": } }` +- [ ] `update(u => [u.count.inc(1)])` produces `{ $inc: { "count": 1 } }` +- [ ] `update(u => [u.name.set("Alice")])` produces `{ $set: { "name": } }` +- [ ] `update(u => [u.name.unset()])` produces `{ $unset: { "name": "" } }` +- [ ] `update(u => [u("address.city").set("NYC")])` produces `{ $set: { "address.city": } }` +- [ ] Multiple operations in a single callback are merged into the update document by operator key +- [ ] Values in field operations are encoded through codecs (codecId attached to MongoParamRef) +- [ ] Callback form works with `updateAll()`, `updateCount()`, and `upsert()` +- [ ] Type error for invalid dot-paths (e.g., `u("address.nonexistent")`) +- [ ] Type error for operator/value type mismatch (e.g., `.inc()` on a string field) + +## FL-08: 1:N back-relation loading + +- [ ] `include()` on a 1:N reference relation produces a `$lookup` without `$unwind` +- [ ] Return type for 1:N included relation is an array + +## Retail store cleanup + +- [ ] Retail store `mongoRaw` calls for cart add/remove and order status update are replaced with ORM `update()` calls using the field accessor +- [ ] `objectIdEq()` and `rawObjectIdFilter()` helpers are removed — replaced with object-based `where()` +- [ ] `object-id-filter.ts` is deleted + +# Other Considerations + +## Security + +N/A — internal query builder changes, no auth or data sensitivity impact. + +## Cost + +N/A — no infrastructure changes. + +## Observability + +N/A — no new runtime surfaces. + +## Data Protection + +N/A — no change to data handling. + +## Analytics + +N/A. + +# References + +- [ADR 180 — Dot-path field accessor](../../../docs/architecture%20docs/adrs/ADR%20180%20-%20Dot-path%20field%20accessor.md) — authoritative design for the field accessor pattern, mutation semantics, capability-gated operators, and backend translation +- [ADR 178 — Value objects in the contract](../../../docs/architecture%20docs/adrs/ADR%20178%20-%20Value%20objects%20in%20the%20contract.md) — value object definitions that the dot-path accessor navigates +- [Framework limitations](../framework-limitations.md) — FL-04, FL-06, FL-08 detailed descriptions and workarounds +- [Next steps](../../../docs/planning/mongo-target/next-steps.md) — Area 2 scope and sequencing +- [ORM collection implementation](../../../packages/2-mongo-family/5-query-builders/orm/src/collection.ts) — current `where()`, `update()`, `include()` implementation +- [Retail store workarounds](../../../examples/retail-store/src/data/object-id-filter.ts) — `objectIdEq()` helper that FL-06 eliminates + +# Open Questions + +None — all design decisions resolved during discussion: + +1. **FL-04 API shape**: ADR 180 callback pattern with Proxy-based field accessor (resolved by existing ADR). +2. **FL-06 operator scope**: `$eq`-only, consistent with SQL. Complex filters use existing `MongoFilterExpr` chain. +3. **Dot-path scope**: Full callable dot-path accessor, not just top-level fields. This ticket is the natural home; value objects are landed and nothing blocks it. From 701f3c864090e9e193cf0b142435687ae415650e Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:27:37 +0300 Subject: [PATCH 05/30] Add execution plan for ORM query and mutation ergonomics 4 milestones: 1:N back-relation tests, codec-aware where(), ADR 180 field accessor with dot-path traversal, retail store cleanup. All acceptance criteria mapped to unit or type tests. --- .../orm-query-mutation-ergonomics-plan.md | 105 ++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md diff --git a/projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md b/projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md new file mode 100644 index 000000000..0d5bfdf38 --- /dev/null +++ b/projects/mongo-example-apps/plans/orm-query-mutation-ergonomics-plan.md @@ -0,0 +1,105 @@ +# ORM Query & Mutation Ergonomics Plan + +## Summary + +Implement codec-aware `where()` filtering, ADR 180 field accessor mutations, and 1:N back-relation test coverage in the Mongo ORM. Success means the retail store example app compiles without `mongoRaw` workarounds or manual `MongoParamRef`/`MongoFieldFilter` construction for common operations. + +**Spec:** [specs/orm-query-mutation-ergonomics.spec.md](../specs/orm-query-mutation-ergonomics.spec.md) + +## Collaborators + +| Role | Person/Team | Context | +|---|---|---| +| Maker | Will | Drives execution | + +## Milestones + +### Milestone 1: 1:N back-relation loading (FL-08) + +Verify that `include()` works for 1:N reference relations and add test coverage. Lowest risk — validates existing implementation. + +**Tasks:** + +- [ ] Add a 1:N reference relation to the ORM test fixture (e.g., `User.tasks` → `Task` via `assigneeId`) +- [ ] Add unit tests: `include()` on 1:N reference relation produces `$lookup` without `$unwind` +- [ ] Add unit tests: return type for 1:N included relation is an array (type-level test) +- [ ] Verify with retail store contract: `User.carts`, `User.orders`, `Order.invoices` all work with `include()` + +### Milestone 2: Codec-aware `where()` (FL-06) + +Add a plain-object `where()` overload that encodes filter values through codecs automatically. Establishes the codec-encoding pattern that FL-04 also uses. + +**Tasks:** + +- [ ] Write unit tests for object-based `where()`: ObjectId field, string field, multi-field AND, chaining with `MongoFilterExpr` +- [ ] Write type-level tests: invalid field names error, wrong value types error +- [ ] Add `where()` overload accepting `MongoWhereFilter` to the `MongoCollection` interface +- [ ] Implement object-to-filter compilation in `MongoCollectionImpl`: iterate fields, look up `codecId` from contract, wrap with `#wrapFieldValue`, build `MongoFieldFilter.eq`, AND if multiple +- [ ] Verify all tests pass + +### Milestone 3: ADR 180 field accessor for mutations (FL-04) + +Implement the Proxy-based field accessor with property access for top-level fields and callable dot-path for nested value object traversal. Capability-gated mutation operators. + +**Tasks:** + +- [ ] Define the `FieldOperation` type: `{ operator, field, value }` representing a single update operation +- [ ] Define the `FieldExpression` interface with mutation operators: `set()`, `unset()`, `inc()`, `mul()`, `push()`, `pull()`, `addToSet()`, `pop()` +- [ ] Define recursive template literal types for dot-path validation: `DotPath` and `ResolveDotPathType` +- [ ] Define the `FieldAccessor` type: top-level fields as properties returning `FieldExpression`, callable with dot-path string for nested value object fields +- [ ] Write unit tests for field operation compilation: individual operators (`$set`, `$push`, `$pull`, `$inc`, `$unset`, `$addToSet`, `$pop`, `$mul`) produce correct update documents +- [ ] Write unit tests for multi-operation merging: multiple operations in one callback are grouped by operator key +- [ ] Write unit tests for codec encoding in field operations: values carry correct `codecId` +- [ ] Write unit tests for dot-path operations: `u("address.city").set("NYC")` produces `{ $set: { "address.city": ... } }` +- [ ] Write type-level tests: invalid dot-paths error, operator/value type mismatch errors +- [ ] Implement `createFieldAccessor()`: Proxy-based factory that returns the accessor +- [ ] Implement field operation → update document compilation: group `FieldOperation[]` by operator, merge into `{ $set: {...}, $push: {...}, ... }`, encode values through `#wrapFieldValue` +- [ ] Add `update()` callback overload to `MongoCollection` interface and `MongoCollectionImpl` +- [ ] Extend `updateAll()`, `updateCount()` to accept the callback form +- [ ] Extend `upsert()` to accept the callback form for the `update` part +- [ ] Add value objects to the ORM test fixture contract to support dot-path tests +- [ ] Verify all tests pass + +### Milestone 4: Retail store cleanup + +Replace workarounds in the retail store with the new ORM features. This is the end-to-end proof. + +**Tasks:** + +- [ ] Replace `objectIdEq()` / `rawObjectIdFilter()` calls with object-based `where()` in all data access functions +- [ ] Delete `src/data/object-id-filter.ts` +- [ ] Replace `mongoRaw` in `addToCart()` with `update(u => [u.items.push(item)])` +- [ ] Replace `mongoRaw` in `removeFromCart()` with `update(u => [u.items.pull({ productId })])` +- [ ] Replace `mongoRaw` in `updateOrderStatus()` with `update(u => [u.statusHistory.push(entry)])` +- [ ] Remove `execute-raw.ts` imports/functions that are no longer needed +- [ ] Verify retail store compiles and tests pass + +## Test Coverage + +| Acceptance Criterion | Test Type | Milestone | Notes | +|---|---|---|---| +| `where({ userId })` with ObjectId codec produces correct filter | Unit | M2 | | +| `where({ name })` with string codec produces correct filter | Unit | M2 | | +| Multi-field `where()` produces AND | Unit | M2 | | +| Object `where()` chainable with `MongoFilterExpr` `where()` | Unit | M2 | | +| Type error for invalid field names in where object | Type test | M2 | `@ts-expect-error` | +| Type error for wrong value types in where object | Type test | M2 | `@ts-expect-error` | +| `u.items.push(item)` → `{ $push: { "items": ... } }` | Unit | M3 | | +| `u.count.inc(1)` → `{ $inc: { "count": 1 } }` | Unit | M3 | | +| `u.name.set("Alice")` → `{ $set: { "name": ... } }` | Unit | M3 | | +| `u.name.unset()` → `{ $unset: { "name": "" } }` | Unit | M3 | | +| `u("address.city").set("NYC")` → `{ $set: { "address.city": ... } }` | Unit | M3 | | +| Multiple operations merged by operator key | Unit | M3 | | +| Values in operations encoded through codecs | Unit | M3 | | +| Callback works with `updateAll()`, `updateCount()`, `upsert()` | Unit | M3 | | +| Type error for invalid dot-path | Type test | M3 | `@ts-expect-error` | +| Type error for operator/value mismatch | Type test | M3 | `@ts-expect-error` | +| `include()` on 1:N reference relation: `$lookup` without `$unwind` | Unit | M1 | | +| 1:N included relation return type is array | Type test | M1 | | +| Retail store `mongoRaw` calls replaced | Integration | M4 | Existing retail store tests pass | +| `objectIdEq()` helpers removed | Integration | M4 | File deleted, no import errors | + +## Open Items + +- The ORM test fixture needs value objects added (M3) and a 1:N reference back-relation added (M1). Both are test fixture changes, not contract schema changes. +- `execute-raw.ts` may still be needed for pipeline/raw queries outside the scope of this ticket. Only remove functions that are no longer referenced after the cleanup. From 8d054b394e0bd3dc8bed78090d8351be64755aaf Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:40:43 +0300 Subject: [PATCH 06/30] feat(mongo-orm): add 1:N reference relation test coverage (FL-08) Add User.tasks 1:N reference relation to the ORM test fixture and verify that include() produces $lookup without $unwind. Type tests confirm the return type for 1:N included relations is an array. --- .../test/fixtures/orm-contract.d.ts | 2 +- .../test/fixtures/orm-contract.json | 8 +++++++ .../orm/test/collection.test.ts | 14 +++++++++++ .../orm/test/orm-types.test-d.ts | 24 +++++++++++++++++-- 4 files changed, 45 insertions(+), 3 deletions(-) diff --git a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts index b1d3e3dce..5e4270210 100644 --- a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts +++ b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts @@ -34,7 +34,7 @@ type ContractBase = { readonly extensionPacks: { }; readonly meta: { }; readonly roots: { readonly tasks: 'Task'; readonly users: 'User' }; - readonly models: { readonly Task: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly title: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly type: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly assigneeId: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false } }; readonly relations: { readonly assignee: { readonly to: 'User'; readonly cardinality: 'N:1'; readonly on: { readonly localFields: readonly ['assigneeId']; readonly targetFields: readonly ['_id'] } }; readonly comments: { readonly to: 'Comment'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'tasks'; readonly relations: { readonly comments: { readonly field: 'comments' } } }; readonly discriminator: { readonly field: 'type' }; readonly variants: { readonly Bug: { readonly value: 'bug' }; readonly Feature: { readonly value: 'feature' } } }; readonly Bug: { readonly fields: { readonly severity: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly Feature: { readonly fields: { readonly priority: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly targetRelease: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly User: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly name: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly email: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: { readonly addresses: { readonly to: 'Address'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'users'; readonly relations: { readonly addresses: { readonly field: 'addresses' } } } }; readonly Address: { readonly fields: { readonly street: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly zip: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'User' }; readonly Comment: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly text: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly createdAt: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/date@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'Task' } }; + readonly models: { readonly Task: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly title: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly type: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly assigneeId: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false } }; readonly relations: { readonly assignee: { readonly to: 'User'; readonly cardinality: 'N:1'; readonly on: { readonly localFields: readonly ['assigneeId']; readonly targetFields: readonly ['_id'] } }; readonly comments: { readonly to: 'Comment'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'tasks'; readonly relations: { readonly comments: { readonly field: 'comments' } } }; readonly discriminator: { readonly field: 'type' }; readonly variants: { readonly Bug: { readonly value: 'bug' }; readonly Feature: { readonly value: 'feature' } } }; readonly Bug: { readonly fields: { readonly severity: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly Feature: { readonly fields: { readonly priority: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly targetRelease: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly User: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly name: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly email: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: { readonly addresses: { readonly to: 'Address'; readonly cardinality: '1:N' }; readonly tasks: { readonly to: 'Task'; readonly cardinality: '1:N'; readonly on: { readonly localFields: readonly ['_id']; readonly targetFields: readonly ['assigneeId'] } } }; readonly storage: { readonly collection: 'users'; readonly relations: { readonly addresses: { readonly field: 'addresses' } } } }; readonly Address: { readonly fields: { readonly street: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly zip: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'User' }; readonly Comment: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly text: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly createdAt: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/date@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'Task' } }; readonly storage: { readonly collections: { readonly tasks: Record; readonly users: Record }; readonly storageHash: StorageHash }; }; diff --git a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json index a1022cc98..941b28ab1 100644 --- a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json +++ b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json @@ -83,6 +83,14 @@ "addresses": { "to": "Address", "cardinality": "1:N" + }, + "tasks": { + "to": "Task", + "cardinality": "1:N", + "on": { + "localFields": ["_id"], + "targetFields": ["assigneeId"] + } } } }, diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index 1adfba007..3091c8fad 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -253,6 +253,20 @@ describe('MongoCollection include()', () => { // @ts-expect-error 'comments' is an embed relation, not a reference relation expect(() => col.include('comments')).toThrow('embed relation'); }); + + it('produces $lookup without $unwind for 1:N reference relation', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor).include('tasks'); + col.all(); + const stages = executor.lastStages!; + const lookup = stages.find((s) => s.kind === 'lookup') as MongoLookupStage; + expect(lookup.from).toBe('tasks'); + expect(lookup.localField).toBe('_id'); + expect(lookup.foreignField).toBe('assigneeId'); + expect(lookup.as).toBe('tasks'); + const unwind = stages.find((s) => s.kind === 'unwind'); + expect(unwind).toBeUndefined(); + }); }); describe('MongoCollection terminal methods', () => { diff --git a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts index 64adaba62..b5d2a39f4 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts @@ -87,9 +87,9 @@ test('MongoIncludeSpec only allows reference relation keys', () => { expectTypeOf().not.toHaveProperty('comments'); }); -test('MongoIncludeSpec has no includable keys for models with only embed relations', () => { +test('ReferenceRelationKeys picks reference relations on User', () => { type UserRefKeys = ReferenceRelationKeys; - expectTypeOf().toBeNever(); + expectTypeOf().toEqualTypeOf<'tasks'>(); }); // --- Polymorphic root returns discriminated union --- @@ -327,3 +327,23 @@ test('variant() preserves TVariant through chaining', () => { expectTypeOf().toHaveProperty('severity'); expectTypeOf().not.toHaveProperty('type'); }); + +// --- 1:N reference relation include --- + +test('include() on 1:N reference relation returns array type', () => { + const col = {} as MongoCollection; + const result = col.include('tasks').first(); + expectTypeOf(result).toExtend< + Promise<(InferRootRow & { tasks: InferFullRow[] }) | null> + >(); +}); + +test('include() on 1:N reference relation all() returns array type', () => { + const col = {} as MongoCollection; + const result = col.include('tasks').all(); + expectTypeOf(result).toExtend< + AsyncIterableResult< + InferRootRow & { tasks: InferFullRow[] } + > + >(); +}); From a42a352fbad67734f861da9ee363cdbeee3cbc7f Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:45:32 +0300 Subject: [PATCH 07/30] feat(mongo-orm): add codec-aware object-based where() overload (FL-06) where() now accepts a plain object { fieldName: value } in addition to MongoFilterExpr. Values are encoded through codecs automatically via the field codecId from the contract. Multiple fields produce an AND of equality filters. Chainable with MongoFilterExpr where() calls. Also fixes MongoWhereFilter type to correctly navigate the contract field type structure (field.type.codecId instead of field.codecId). --- .../5-query-builders/orm/src/collection.ts | 34 +++++++-- .../5-query-builders/orm/src/types.ts | 5 +- .../orm/test/collection.test.ts | 74 +++++++++++++++++++ .../orm/test/orm-types.test-d.ts | 19 +++++ 4 files changed, 126 insertions(+), 6 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 171d9fe02..84273b0a5 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -36,6 +36,7 @@ import type { DefaultModelRow, IncludedRow, MongoIncludeSpec, + MongoWhereFilter, NoIncludes, ReferenceRelationKeys, ResolvedCreateInput, @@ -57,7 +58,11 @@ export interface MongoCollection< variant>( variantName: V, ): MongoCollection; - /** Appends a filter condition. Returns a new immutable collection. */ + /** Appends equality filters from a plain object. Values are encoded through codecs. */ + where( + filter: MongoWhereFilter, + ): MongoCollection; + /** Appends a filter condition from a raw filter expression. */ where(filter: MongoFilterExpr): MongoCollection; /** Restricts returned fields to the given subset. Returns a new immutable collection. */ select( @@ -171,10 +176,14 @@ class MongoCollectionImpl< ); } - where(filter: MongoFilterExpr): MongoCollection { - return this.#clone({ - filters: [...this.#state.filters, filter], - }); + where( + filter: MongoWhereFilter | MongoFilterExpr, + ): MongoCollection { + if (this.#isFilterExpr(filter)) { + return this.#clone({ filters: [...this.#state.filters, filter] }); + } + const compiled = this.#compileWhereObject(filter as Record); + return this.#clone({ filters: [...this.#state.filters, ...compiled] }); } select( @@ -478,6 +487,21 @@ class MongoCollectionImpl< return model?.fields ?? {}; } + #isFilterExpr(filter: unknown): filter is MongoFilterExpr { + return typeof filter === 'object' && filter !== null && 'kind' in filter; + } + + #compileWhereObject(data: Record): MongoFilterExpr[] { + const fields = this.#modelFields(); + const filters: MongoFilterExpr[] = []; + for (const [key, value] of Object.entries(data)) { + if (value === undefined) continue; + const wrapped = this.#wrapFieldValue(value, fields[key]); + filters.push(MongoFieldFilter.eq(key, wrapped)); + } + return filters; + } + #wrapFieldValue(value: unknown, field: ContractField | undefined): MongoValue { if (field === undefined) return new MongoParamRef(value); diff --git a/packages/2-mongo-family/5-query-builders/orm/src/types.ts b/packages/2-mongo-family/5-query-builders/orm/src/types.ts index b18cfdee7..95e360dde 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/types.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/types.ts @@ -183,7 +183,10 @@ export type MongoWhereFilter< TCodecTypes extends Record = ExtractMongoCodecTypes, > = { readonly [K in keyof TContract['models'][ModelName]['fields']]?: TContract['models'][ModelName]['fields'][K] extends { - readonly codecId: infer CId extends string & keyof TCodecTypes; + readonly type: { + readonly kind: 'scalar'; + readonly codecId: infer CId extends string & keyof TCodecTypes; + }; } ? TCodecTypes[CId]['output'] : unknown; diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index 3091c8fad..f2397fd9b 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -144,6 +144,80 @@ describe('MongoCollection chaining', () => { }); }); +describe('MongoCollection object-based where()', () => { + it('produces eq filter with string codecId for string field', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor).where({ name: 'Alice' }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('field'); + if (match.filter.kind === 'field') { + expect(match.filter.field).toBe('name'); + expect(match.filter.op).toBe('$eq'); + const ref = match.filter.value as MongoParamRef; + expect(ref).toBeInstanceOf(MongoParamRef); + expect(ref.codecId).toBe('mongo/string@1'); + expect(ref.value).toBe('Alice'); + } + }); + + it('produces eq filter with objectId codecId for ObjectId field', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'Task', executor).where({ + assigneeId: 'abc123', + }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('field'); + if (match.filter.kind === 'field') { + expect(match.filter.field).toBe('assigneeId'); + expect(match.filter.op).toBe('$eq'); + const ref = match.filter.value as MongoParamRef; + expect(ref).toBeInstanceOf(MongoParamRef); + expect(ref.codecId).toBe('mongo/objectId@1'); + expect(ref.value).toBe('abc123'); + } + }); + + it('produces AND of multiple eq filters for multi-field object', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor).where({ + name: 'Alice', + email: 'a@b.c', + }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('and'); + if (match.filter.kind === 'and') { + expect(match.filter.exprs).toHaveLength(2); + const first = match.filter.exprs[0]!; + const second = match.filter.exprs[1]!; + expect(first.kind).toBe('field'); + expect(second.kind).toBe('field'); + } + }); + + it('chains with MongoFilterExpr where()', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor) + .where({ name: 'Alice' }) + .where(MongoFieldFilter.gte('email', 'a')); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('and'); + }); + + it('chains MongoFilterExpr where() then object where()', () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor) + .where(MongoFieldFilter.eq('_id', 'id-1')) + .where({ name: 'Alice' }); + col.all(); + const match = executor.lastStages![0] as MongoMatchStage; + expect(match.filter.kind).toBe('and'); + }); +}); + describe('MongoCollection variant()', () => { it('returns a new instance from variant()', () => { const executor = createMockExecutor(); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts index b5d2a39f4..81bbd4e29 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts @@ -69,6 +69,25 @@ test('where filter keys are constrained to model field names', () => { expectTypeOf().toHaveProperty('email'); }); +test('where filter rejects invalid field names', () => { + type UserFilter = MongoWhereFilter; + // @ts-expect-error 'nonexistent' is not a field on User + void ({ nonexistent: 'value' } satisfies UserFilter); +}); + +test('where filter enforces value types from codec', () => { + type UserFilter = MongoWhereFilter; + void ({ name: 'Alice' } satisfies UserFilter); + // @ts-expect-error number is not assignable to string field + void ({ name: 123 } satisfies UserFilter); +}); + +test('object-based where() accepts MongoWhereFilter', () => { + const col = {} as MongoCollection; + const filtered = col.where({ name: 'Alice' }); + expectTypeOf(filtered).toExtend>(); +}); + // --- Include constrained to reference relations only --- test('ReferenceRelationKeys picks only reference relations', () => { From ebb37d99bd772456a1a1ec57ebc8562a107ef570 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:53:09 +0300 Subject: [PATCH 08/30] feat(mongo-orm): add field accessor types and fixture for FL-04 Add value objects (HomeAddress), numeric field (loginCount), and array field (tags) to the ORM test fixture for testing mutation operators and dot-path access. Implement createFieldAccessor() with Proxy-based field expressions supporting $set, $unset, $inc, $mul, $push, $pull, $addToSet, $pop. Add compile-time DotPath and ResolveDotPathType types for type-safe dot-path access into value objects (ADR 180). --- .../test/fixtures/orm-contract.d.ts | 3 +- .../test/fixtures/orm-contract.json | 23 +- .../orm/src/field-accessor.ts | 251 ++++++++++++++++++ .../orm/test/collection.test.ts | 38 +-- .../orm/test/integration/polymorphism.test.ts | 72 ++++- 5 files changed, 361 insertions(+), 26 deletions(-) create mode 100644 packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts diff --git a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts index 5e4270210..bb2b64e34 100644 --- a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts +++ b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.d.ts @@ -34,7 +34,8 @@ type ContractBase = { readonly extensionPacks: { }; readonly meta: { }; readonly roots: { readonly tasks: 'Task'; readonly users: 'User' }; - readonly models: { readonly Task: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly title: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly type: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly assigneeId: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false } }; readonly relations: { readonly assignee: { readonly to: 'User'; readonly cardinality: 'N:1'; readonly on: { readonly localFields: readonly ['assigneeId']; readonly targetFields: readonly ['_id'] } }; readonly comments: { readonly to: 'Comment'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'tasks'; readonly relations: { readonly comments: { readonly field: 'comments' } } }; readonly discriminator: { readonly field: 'type' }; readonly variants: { readonly Bug: { readonly value: 'bug' }; readonly Feature: { readonly value: 'feature' } } }; readonly Bug: { readonly fields: { readonly severity: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly Feature: { readonly fields: { readonly priority: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly targetRelease: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly User: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly name: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly email: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: { readonly addresses: { readonly to: 'Address'; readonly cardinality: '1:N' }; readonly tasks: { readonly to: 'Task'; readonly cardinality: '1:N'; readonly on: { readonly localFields: readonly ['_id']; readonly targetFields: readonly ['assigneeId'] } } }; readonly storage: { readonly collection: 'users'; readonly relations: { readonly addresses: { readonly field: 'addresses' } } } }; readonly Address: { readonly fields: { readonly street: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly zip: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'User' }; readonly Comment: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly text: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly createdAt: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/date@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'Task' } }; + readonly models: { readonly Task: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly title: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly type: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly assigneeId: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false } }; readonly relations: { readonly assignee: { readonly to: 'User'; readonly cardinality: 'N:1'; readonly on: { readonly localFields: readonly ['assigneeId']; readonly targetFields: readonly ['_id'] } }; readonly comments: { readonly to: 'Comment'; readonly cardinality: '1:N' } }; readonly storage: { readonly collection: 'tasks'; readonly relations: { readonly comments: { readonly field: 'comments' } } }; readonly discriminator: { readonly field: 'type' }; readonly variants: { readonly Bug: { readonly value: 'bug' }; readonly Feature: { readonly value: 'feature' } } }; readonly Bug: { readonly fields: { readonly severity: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly Feature: { readonly fields: { readonly priority: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly targetRelease: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: { readonly collection: 'tasks' }; readonly base: 'Task' }; readonly User: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly name: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly email: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly loginCount: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/int32@1' }; readonly nullable: false }; readonly tags: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false; readonly many: true }; readonly homeAddress: { readonly type: { readonly kind: 'valueObject'; readonly name: 'HomeAddress' }; readonly nullable: true } }; readonly relations: { readonly addresses: { readonly to: 'Address'; readonly cardinality: '1:N' }; readonly tasks: { readonly to: 'Task'; readonly cardinality: '1:N'; readonly on: { readonly localFields: readonly ['_id']; readonly targetFields: readonly ['assigneeId'] } } }; readonly storage: { readonly collection: 'users'; readonly relations: { readonly addresses: { readonly field: 'addresses' } } } }; readonly Address: { readonly fields: { readonly street: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly zip: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'User' }; readonly Comment: { readonly fields: { readonly _id: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/objectId@1' }; readonly nullable: false }; readonly text: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly createdAt: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/date@1' }; readonly nullable: false } }; readonly relations: Record; readonly storage: Record; readonly owner: 'Task' } }; + readonly valueObjects: { readonly HomeAddress: { readonly fields: { readonly city: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false }; readonly country: { readonly type: { readonly kind: 'scalar'; readonly codecId: 'mongo/string@1' }; readonly nullable: false } } } }; readonly storage: { readonly collections: { readonly tasks: Record; readonly users: Record }; readonly storageHash: StorageHash }; }; diff --git a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json index 941b28ab1..dcfabf6d0 100644 --- a/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json +++ b/packages/2-mongo-family/1-foundation/mongo-contract/test/fixtures/orm-contract.json @@ -77,7 +77,20 @@ "fields": { "_id": { "type": { "kind": "scalar", "codecId": "mongo/objectId@1" }, "nullable": false }, "name": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false }, - "email": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false } + "email": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false }, + "loginCount": { + "type": { "kind": "scalar", "codecId": "mongo/int32@1" }, + "nullable": false + }, + "tags": { + "type": { "kind": "scalar", "codecId": "mongo/string@1" }, + "nullable": false, + "many": true + }, + "homeAddress": { + "type": { "kind": "valueObject", "name": "HomeAddress" }, + "nullable": true + } }, "relations": { "addresses": { @@ -114,5 +127,13 @@ "relations": {}, "owner": "Task" } + }, + "valueObjects": { + "HomeAddress": { + "fields": { + "city": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false }, + "country": { "type": { "kind": "scalar", "codecId": "mongo/string@1" }, "nullable": false } + } + } } } diff --git a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts new file mode 100644 index 000000000..691cb6c07 --- /dev/null +++ b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts @@ -0,0 +1,251 @@ +import type { ContractField, ContractValueObject } from '@prisma-next/contract/types'; +import type { + ExtractMongoCodecTypes, + MongoContract, + MongoContractWithTypeMaps, + MongoTypeMaps, +} from '@prisma-next/mongo-contract'; +import type { MongoValue } from '@prisma-next/mongo-value'; +import { MongoParamRef } from '@prisma-next/mongo-value'; + +// ── Runtime types ──────────────────────────────────────────────────────────── + +export type UpdateOperator = + | '$set' + | '$unset' + | '$inc' + | '$mul' + | '$push' + | '$pull' + | '$addToSet' + | '$pop'; + +export interface FieldOperation { + readonly operator: UpdateOperator; + readonly field: string; + readonly value: MongoValue; +} + +// ── Compile-time types ─────────────────────────────────────────────────────── + +type ScalarFieldKeys< + TContract extends MongoContract, + ModelName extends string & keyof TContract['models'], +> = { + [K in keyof TContract['models'][ModelName]['fields'] & + string]: TContract['models'][ModelName]['fields'][K] extends { + readonly type: { readonly kind: 'scalar' }; + } + ? K + : never; +}[keyof TContract['models'][ModelName]['fields'] & string]; + +type ValueObjectFieldKeys< + TContract extends MongoContract, + ModelName extends string & keyof TContract['models'], +> = { + [K in keyof TContract['models'][ModelName]['fields'] & + string]: TContract['models'][ModelName]['fields'][K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: string }; + } + ? K + : never; +}[keyof TContract['models'][ModelName]['fields'] & string]; + +type ResolveFieldType< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], + K extends keyof TContract['models'][ModelName]['fields'] & string, + TCodecTypes extends Record = ExtractMongoCodecTypes, +> = TContract['models'][ModelName]['fields'][K] extends { + readonly type: { + readonly kind: 'scalar'; + readonly codecId: infer CId extends string & keyof TCodecTypes; + }; + readonly many: true; +} + ? TCodecTypes[CId]['output'][] + : TContract['models'][ModelName]['fields'][K] extends { + readonly type: { + readonly kind: 'scalar'; + readonly codecId: infer CId extends string & keyof TCodecTypes; + }; + } + ? TCodecTypes[CId]['output'] + : unknown; + +export interface FieldExpression { + set(value: T): FieldOperation; + unset(): FieldOperation; + inc(value: number): FieldOperation; + mul(value: number): FieldOperation; + push(value: T extends readonly (infer E)[] ? E : unknown): FieldOperation; + pull(match: T extends readonly (infer E)[] ? E | Partial : unknown): FieldOperation; + addToSet(value: T extends readonly (infer E)[] ? E : unknown): FieldOperation; + pop(end: 1 | -1): FieldOperation; +} + +type HasValueObjects = { readonly valueObjects?: Record }; + +type VOFields = TContract extends { + readonly valueObjects: infer VOs extends Record; +} + ? VOName extends keyof VOs + ? VOs[VOName]['fields'] + : never + : never; + +type VOScalarFieldKeys> = { + [K in keyof Fields & string]: Fields[K] extends { readonly type: { readonly kind: 'scalar' } } + ? K + : never; +}[keyof Fields & string]; + +type VOValueObjectFieldKeys> = { + [K in keyof Fields & string]: Fields[K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: string }; + } + ? K + : never; +}[keyof Fields & string]; + +type VODotPaths< + TContract extends HasValueObjects, + Fields extends Record, + Prefix extends string, +> = + | { [K in VOScalarFieldKeys]: `${Prefix}${K}` }[VOScalarFieldKeys] + | { + [K in VOValueObjectFieldKeys]: Fields[K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? VODotPaths, `${Prefix}${K}.`> + : never; + }[VOValueObjectFieldKeys]; + +export type DotPath< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +> = { + [K in ValueObjectFieldKeys< + TContract, + ModelName + >]: TContract['models'][ModelName]['fields'][K] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? VODotPaths, `${K}.`> + : never; +}[ValueObjectFieldKeys]; + +type ResolveDotPathInFields< + TContract extends HasValueObjects, + Fields extends Record, + Path extends string, + TCodecTypes extends Record, +> = Path extends `${infer Head}.${infer Rest}` + ? Head extends keyof Fields & string + ? Fields[Head] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? ResolveDotPathInFields, Rest, TCodecTypes> + : never + : never + : Path extends keyof Fields & string + ? Fields[Path] extends { + readonly type: { + readonly kind: 'scalar'; + readonly codecId: infer CId extends string & keyof TCodecTypes; + }; + } + ? TCodecTypes[CId]['output'] + : unknown + : never; + +export type ResolveDotPathType< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], + Path extends string, + TCodecTypes extends Record = ExtractMongoCodecTypes, +> = Path extends `${infer Head}.${infer Rest}` + ? Head extends keyof TContract['models'][ModelName]['fields'] & string + ? TContract['models'][ModelName]['fields'][Head] extends { + readonly type: { readonly kind: 'valueObject'; readonly name: infer N extends string }; + } + ? ResolveDotPathInFields, Rest, TCodecTypes> + : never + : never + : never; + +export type FieldAccessor< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +> = { + readonly [K in ScalarFieldKeys]: FieldExpression< + ResolveFieldType + >; +} & { + readonly [K in ValueObjectFieldKeys]: FieldExpression< + ResolveFieldType + >; +} & (

>( + path: P, + ) => FieldExpression>); + +// ── Runtime implementation ─────────────────────────────────────────────────── + +function createFieldExpression(fieldPath: string): FieldExpression { + return { + set(value: unknown): FieldOperation { + return { operator: '$set', field: fieldPath, value: new MongoParamRef(value) }; + }, + unset(): FieldOperation { + return { operator: '$unset', field: fieldPath, value: new MongoParamRef('') }; + }, + inc(value: number): FieldOperation { + return { operator: '$inc', field: fieldPath, value: new MongoParamRef(value) }; + }, + mul(value: number): FieldOperation { + return { operator: '$mul', field: fieldPath, value: new MongoParamRef(value) }; + }, + push(value: unknown): FieldOperation { + return { operator: '$push', field: fieldPath, value: new MongoParamRef(value) }; + }, + pull(match: unknown): FieldOperation { + return { operator: '$pull', field: fieldPath, value: new MongoParamRef(match) }; + }, + addToSet(value: unknown): FieldOperation { + return { operator: '$addToSet', field: fieldPath, value: new MongoParamRef(value) }; + }, + pop(end: 1 | -1): FieldOperation { + return { operator: '$pop', field: fieldPath, value: new MongoParamRef(end) }; + }, + }; +} + +export function createFieldAccessor< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +>(): FieldAccessor { + return new Proxy((() => {}) as unknown as FieldAccessor, { + get(_target, prop: string): FieldExpression { + return createFieldExpression(prop); + }, + apply(_target, _thisArg, args: [string]): FieldExpression { + return createFieldExpression(args[0]); + }, + }); +} + +export function compileFieldOperations( + ops: readonly FieldOperation[], + wrapValue: (field: string, value: MongoValue) => MongoValue, +): Record { + const grouped: Record> = {}; + for (const op of ops) { + if (!grouped[op.operator]) { + grouped[op.operator] = {}; + } + grouped[op.operator]![op.field] = wrapValue(op.field, op.value); + } + return grouped as Record; +} diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index f2397fd9b..f2715cf0e 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -19,6 +19,14 @@ import type { MongoQueryExecutor } from '../src/executor'; const contract = ormContractJson as unknown as Contract; +const defaultUserData = { + name: 'Alice', + email: 'a@b.c', + loginCount: 0, + tags: [] as string[], + homeAddress: null, +}; + function createMockExecutor(...responses: unknown[][]): MongoQueryExecutor & { lastPlan: MongoQueryPlan | undefined; readonly lastCommand: MongoQueryPlan['command'] | undefined; @@ -386,14 +394,14 @@ describe('MongoCollection write methods', () => { it('returns created row with _id from insertedId', async () => { const executor = createMockExecutor([{ insertedId: 'new-id-1' }]); const col = createMongoCollection(contract, 'User', executor); - const result = await col.create({ name: 'Alice', email: 'a@b.c' }); - expect(result).toEqual({ _id: 'new-id-1', name: 'Alice', email: 'a@b.c' }); + const result = await col.create(defaultUserData); + expect(result).toEqual({ _id: 'new-id-1', ...defaultUserData }); }); it('sends an InsertOneCommand', async () => { const executor = createMockExecutor([{ insertedId: 'id' }]); const col = createMongoCollection(contract, 'User', executor); - await col.create({ name: 'Bob', email: 'b@b.c' }); + await col.create({ ...defaultUserData, name: 'Bob', email: 'b@b.c' }); expect(executor.lastCommand).toBeDefined(); expect(executor.lastCommand!.kind).toBe('insertOne'); expect(executor.lastCommand!.collection).toBe('users'); @@ -402,7 +410,7 @@ describe('MongoCollection write methods', () => { it('attaches codecId from contract fields to MongoParamRef in document', async () => { const executor = createMockExecutor([{ insertedId: 'id' }]); const col = createMongoCollection(contract, 'User', executor); - await col.create({ name: 'Alice', email: 'a@b.c' }); + await col.create(defaultUserData); const command = executor.lastCommand!; expect(command.kind).toBe('insertOne'); if (command.kind === 'insertOne') { @@ -435,14 +443,14 @@ describe('MongoCollection write methods', () => { const col = createMongoCollection(contract, 'User', executor); const rows: unknown[] = []; for await (const row of col.createAll([ - { name: 'Alice', email: 'a@b.c' }, - { name: 'Bob', email: 'b@b.c' }, + defaultUserData, + { ...defaultUserData, name: 'Bob', email: 'b@b.c' }, ])) { rows.push(row); } expect(rows).toEqual([ - { _id: 'id-1', name: 'Alice', email: 'a@b.c' }, - { _id: 'id-2', name: 'Bob', email: 'b@b.c' }, + { _id: 'id-1', ...defaultUserData }, + { _id: 'id-2', ...defaultUserData, name: 'Bob', email: 'b@b.c' }, ]); }); }); @@ -452,8 +460,8 @@ describe('MongoCollection write methods', () => { const executor = createMockExecutor([{ insertedIds: ['a', 'b'], insertedCount: 2 }]); const col = createMongoCollection(contract, 'User', executor); const count = await col.createCount([ - { name: 'Alice', email: 'a@b.c' }, - { name: 'Bob', email: 'b@b.c' }, + defaultUserData, + { ...defaultUserData, name: 'Bob', email: 'b@b.c' }, ]); expect(count).toBe(2); }); @@ -579,7 +587,7 @@ describe('MongoCollection write methods', () => { const executor = createMockExecutor([{ _id: 'new-id', name: 'Alice', email: 'a@b.c' }]); const col = createMongoCollection(contract, 'User', executor); const result = await col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ - create: { name: 'Alice', email: 'a@b.c' }, + create: defaultUserData, update: { name: 'Alice Updated' }, }); expect(result).toEqual({ _id: 'new-id', name: 'Alice', email: 'a@b.c' }); @@ -591,7 +599,7 @@ describe('MongoCollection write methods', () => { const col = createMongoCollection(contract, 'User', executor); await expect( col.upsert({ - create: { name: 'A', email: 'a@b.c' }, + create: { ...defaultUserData, name: 'A' }, update: { name: 'B' }, }), ).rejects.toThrow('requires a .where()'); @@ -648,7 +656,7 @@ describe('MongoCollection write methods', () => { await expect( withFilter(executor) .take(1) - .upsert({ create: { name: 'A', email: 'a@b.c' }, update: { name: 'B' } }), + .upsert({ create: { ...defaultUserData, name: 'A' }, update: { name: 'B' } }), ).rejects.toThrow('orderBy/skip/take'); }); }); @@ -791,7 +799,7 @@ describe('MongoCollection write methods', () => { const col = createMongoCollection(contract, 'User', executor); await expect( col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ - create: { name: 'Alice', email: 'a@b.c' }, + create: defaultUserData, update: { _id: 'new-id', name: 'B' }, }), ).rejects.toThrow('_id'); @@ -802,7 +810,7 @@ describe('MongoCollection write methods', () => { it('write methods do not mutate collection state', async () => { const executor = createMockExecutor([{ insertedId: 'x' }]); const col = createMongoCollection(contract, 'User', executor); - await col.create({ name: 'Alice', email: 'a@b.c' }); + await col.create(defaultUserData); const filtered = col.where(MongoFieldFilter.eq('name', 'Alice')); expect(filtered).not.toBe(col); }); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts index 3c4ff41f6..30e8b3a60 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/integration/polymorphism.test.ts @@ -42,7 +42,13 @@ describe( it('base query returns rows with discriminator values', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.create({ title: 'Fix crash', @@ -63,7 +69,13 @@ describe( it('variant("Bug") filters to only Bug rows', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.create({ title: 'Fix crash', @@ -84,7 +96,13 @@ describe( it('variant("Feature") filters to only Feature rows', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.create({ title: 'Fix crash', @@ -105,7 +123,13 @@ describe( it('variant create injects discriminator and persists it', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); const bug = await orm.tasks.variant('Bug').create({ title: 'Null pointer', @@ -123,7 +147,13 @@ describe( it('round-trip: create via variant, read back via base', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.variant('Bug').create({ title: 'Memory leak', @@ -150,8 +180,20 @@ describe( const orm = mongoOrm({ contract, executor: runtime }); await orm.users.createAll([ - { name: 'Alice', email: 'alice@test.com' }, - { name: 'Bob', email: 'bob@test.com' }, + { + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }, + { + name: 'Bob', + email: 'bob@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }, ]); const users = await orm.users.all(); @@ -162,7 +204,13 @@ describe( it('variant().first() returns narrowed result', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.variant('Bug').create({ title: 'Fix crash', @@ -178,7 +226,13 @@ describe( it('variant createAll injects discriminator into each document', async () => { const orm = mongoOrm({ contract, executor: runtime }); - const user = await orm.users.create({ name: 'Alice', email: 'alice@test.com' }); + const user = await orm.users.create({ + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, + }); await orm.tasks.variant('Bug').createAll([ { title: 'Bug 1', severity: 'low', assigneeId: user._id as string }, From 3f28f696f5b68a77d075374bae02ab28cf71c570 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:57:42 +0300 Subject: [PATCH 09/30] feat(mongo-orm): add callback-based update overloads (FL-04) update(), updateAll(), updateCount(), and upsert() now accept a callback (u => [...ops]) that uses the Proxy-based field accessor to produce typed mutation operators ($set, $inc, $push, etc.). Field operations are compiled into a grouped update document and codec-encoded using the contract field definitions. Dot-path access resolves codecs through value object field chains. --- .../5-query-builders/orm/src/collection.ts | 121 +++++++++-- .../orm/test/collection.test.ts | 203 ++++++++++++++++++ .../orm/test/orm-types.test-d.ts | 36 ++++ 3 files changed, 347 insertions(+), 13 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 84273b0a5..4d05f8186 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -32,6 +32,12 @@ import type { MongoIncludeExpr } from './collection-state'; import { emptyCollectionState, type MongoCollectionState } from './collection-state'; import { compileMongoQuery } from './compile'; import type { MongoQueryExecutor } from './executor'; +import { + compileFieldOperations, + createFieldAccessor, + type FieldAccessor, + type FieldOperation, +} from './field-accessor'; import type { DefaultModelRow, IncludedRow, @@ -100,12 +106,24 @@ export interface MongoCollection< update( data: Partial>, ): Promise | null>; + /** Updates one matching document using field operations from a callback. Requires `.where()`. */ + update( + callback: (u: FieldAccessor) => FieldOperation[], + ): Promise | null>; /** Non-atomic: captures matching `_id`s, updates, then re-reads by `_id`. Requires `.where()`. */ updateAll( data: Partial>, ): AsyncIterableResult>; + /** Updates all matching documents using field operations from a callback. Requires `.where()`. */ + updateAll( + callback: (u: FieldAccessor) => FieldOperation[], + ): AsyncIterableResult>; /** Updates all matching documents and returns the number modified. Requires `.where()`. */ updateCount(data: Partial>): Promise; + /** Updates all matching documents using field operations and returns the number modified. Requires `.where()`. */ + updateCount( + callback: (u: FieldAccessor) => FieldOperation[], + ): Promise; /** Deletes one matching document via `findOneAndDelete`. Returns the deleted document or `null`. Requires `.where()`. */ delete(): Promise | null>; /** Non-atomic: reads matching docs then deletes them. Concurrent writes may cause stale results. Requires `.where()`. */ @@ -121,6 +139,11 @@ export interface MongoCollection< create: ResolvedCreateInput; update: Partial>; }): Promise>; + /** Upsert using field operations callback for the update part. Requires `.where()`. */ + upsert(input: { + create: ResolvedCreateInput; + update: (u: FieldAccessor) => FieldOperation[]; + }): Promise>; } function resolveCollectionName(model: MongoModelDefinition, modelName: string): string { @@ -325,20 +348,24 @@ class MongoCollectionImpl< } async update( - data: Partial>, + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), ): Promise | null> { this.#requireFilters('update'); this.#rejectWindowing('update'); this.#rejectIncludes('update'); const filter = this.#mergeFilters(); - const updateDoc = this.#toUpdateDocument(data as Record); + const updateDoc = this.#resolveUpdateDoc(dataOrCallback); const command = new FindOneAndUpdateCommand(this.#collectionName, filter, updateDoc, false); const results = await this.#drainPlan(command); return (results[0] as IncludedRow) ?? null; } updateAll( - data: Partial>, + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), ): AsyncIterableResult> { this.#requireFilters('updateAll'); this.#rejectWindowing('updateAll'); @@ -348,7 +375,7 @@ class MongoCollectionImpl< if (ids.length === 0) return; const filter = self.#mergeFilters(); - const updateDoc = self.#toUpdateDocument(data as Record); + const updateDoc = self.#resolveUpdateDoc(dataOrCallback); const command = new UpdateManyCommand(self.#collectionName, filter, updateDoc); await self.#drainPlan(command); @@ -361,12 +388,16 @@ class MongoCollectionImpl< return new AsyncIterableResult(gen()); } - async updateCount(data: Partial>): Promise { + async updateCount( + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), + ): Promise { this.#requireFilters('updateCount'); this.#rejectWindowing('updateCount'); this.#rejectIncludes('updateCount'); const filter = this.#mergeFilters(); - const updateDoc = this.#toUpdateDocument(data as Record); + const updateDoc = this.#resolveUpdateDoc(dataOrCallback); const command = new UpdateManyCommand(this.#collectionName, filter, updateDoc); const results = await this.#drainPlan(command); return (results[0] as { modifiedCount: number }).modifiedCount; @@ -411,30 +442,45 @@ class MongoCollectionImpl< async upsert(input: { create: ResolvedCreateInput; - update: Partial>; + update: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]); }): Promise> { this.#requireFilters('upsert'); this.#rejectWindowing('upsert'); this.#rejectIncludes('upsert'); const filter = this.#mergeFilters(); - const setFields = this.#toSetFields(input.update as Record); + const allCreateFields = this.#toDocument( this.#injectDiscriminator(input.create as Record), ); - const setKeys = new Set(Object.keys(setFields)); + + let updateDoc: Record; + if (typeof input.update === 'function') { + const accessor = createFieldAccessor(); + const ops = input.update(accessor); + updateDoc = compileFieldOperations(ops, (field, value) => + this.#wrapFieldOpValue(field, value), + ); + } else { + const setFields = this.#toSetFields(input.update as Record); + updateDoc = {}; + if (Object.keys(setFields).length > 0) { + updateDoc['$set'] = setFields; + } + } + + const setKeys = new Set(Object.keys((updateDoc['$set'] as Record) ?? {})); const insertOnlyFields: Record = {}; for (const [key, value] of Object.entries(allCreateFields)) { if (!setKeys.has(key)) { insertOnlyFields[key] = value; } } - const updateDoc: Record = {}; - if (Object.keys(setFields).length > 0) { - updateDoc['$set'] = setFields; - } if (Object.keys(insertOnlyFields).length > 0) { updateDoc['$setOnInsert'] = insertOnlyFields; } + const command = new FindOneAndUpdateCommand(this.#collectionName, filter, updateDoc, true); const results = await this.#drainPlan(command); return results[0] as IncludedRow; @@ -578,6 +624,55 @@ class MongoCollectionImpl< return { $set: this.#toSetFields(data) }; } + #resolveUpdateDoc( + dataOrCallback: + | Partial> + | ((u: FieldAccessor) => FieldOperation[]), + ): Record { + if (typeof dataOrCallback === 'function') { + const accessor = createFieldAccessor(); + const ops = dataOrCallback(accessor); + return compileFieldOperations(ops, (field, value) => this.#wrapFieldOpValue(field, value)); + } + return this.#toUpdateDocument(dataOrCallback as Record); + } + + #wrapFieldOpValue(field: string, value: MongoValue): MongoValue { + const topLevelField = field.split('.')[0]!; + const fields = this.#modelFields(); + const contractField = fields[topLevelField]; + if (!contractField) return value; + + if (field.includes('.')) { + return this.#wrapDotPathValue(field, value); + } + + if (value instanceof MongoParamRef && contractField.type.kind === 'scalar') { + return new MongoParamRef(value.value, { codecId: contractField.type.codecId }); + } + return value; + } + + #wrapDotPathValue(dotPath: string, value: MongoValue): MongoValue { + const parts = dotPath.split('.'); + const fields = this.#modelFields(); + let currentField: ContractField | undefined = fields[parts[0]!]; + + for (let i = 1; i < parts.length; i++) { + if (!currentField || currentField.type.kind !== 'valueObject') return value; + const voName = currentField.type.name; + const voDef = (this.#contract as { valueObjects?: Record }) + .valueObjects?.[voName]; + if (!voDef) return value; + currentField = voDef.fields[parts[i]!]; + } + + if (currentField?.type.kind === 'scalar' && value instanceof MongoParamRef) { + return new MongoParamRef(value.value, { codecId: currentField.type.codecId }); + } + return value; + } + #mergeFilters(): MongoFilterExpr { const [single] = this.#state.filters; if (this.#state.filters.length === 1 && single) { diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index f2715cf0e..4c00ddacc 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -9,6 +9,7 @@ import { type MongoSkipStage, type MongoSortStage, } from '@prisma-next/mongo-query-ast/execution'; +import type { MongoValue } from '@prisma-next/mongo-value'; import { MongoParamRef } from '@prisma-next/mongo-value'; import { AsyncIterableResult } from '@prisma-next/runtime-executor'; import { describe, expect, it } from 'vitest'; @@ -16,6 +17,12 @@ import type { Contract } from '../../../1-foundation/mongo-contract/test/fixture import ormContractJson from '../../../1-foundation/mongo-contract/test/fixtures/orm-contract.json'; import { createMongoCollection } from '../src/collection'; import type { MongoQueryExecutor } from '../src/executor'; +import { + compileFieldOperations, + createFieldAccessor, + type FieldAccessor, + type FieldOperation, +} from '../src/field-accessor'; const contract = ormContractJson as unknown as Contract; @@ -226,6 +233,120 @@ describe('MongoCollection object-based where()', () => { }); }); +describe('createFieldAccessor()', () => { + it('property access returns FieldExpression for top-level field', () => { + const u = createFieldAccessor(); + const op = u.name.set('Bob'); + expect(op.operator).toBe('$set'); + expect(op.field).toBe('name'); + expect((op.value as MongoParamRef).value).toBe('Bob'); + }); + + it('set() produces $set operation', () => { + const u = createFieldAccessor(); + const op = u.name.set('Alice'); + expect(op.operator).toBe('$set'); + expect(op.field).toBe('name'); + }); + + it('unset() produces $unset operation', () => { + const u = createFieldAccessor(); + const op = u.name.unset(); + expect(op.operator).toBe('$unset'); + expect(op.field).toBe('name'); + }); + + it('inc() produces $inc operation', () => { + const u = createFieldAccessor(); + const op = u.loginCount.inc(1); + expect(op.operator).toBe('$inc'); + expect(op.field).toBe('loginCount'); + expect((op.value as MongoParamRef).value).toBe(1); + }); + + it('mul() produces $mul operation', () => { + const u = createFieldAccessor(); + const op = u.loginCount.mul(2); + expect(op.operator).toBe('$mul'); + expect(op.field).toBe('loginCount'); + expect((op.value as MongoParamRef).value).toBe(2); + }); + + it('push() produces $push operation', () => { + const u = createFieldAccessor(); + const op = u.tags.push('admin'); + expect(op.operator).toBe('$push'); + expect(op.field).toBe('tags'); + expect((op.value as MongoParamRef).value).toBe('admin'); + }); + + it('pull() produces $pull operation', () => { + const u = createFieldAccessor(); + const op = u.tags.pull('admin'); + expect(op.operator).toBe('$pull'); + expect(op.field).toBe('tags'); + expect((op.value as MongoParamRef).value).toBe('admin'); + }); + + it('addToSet() produces $addToSet operation', () => { + const u = createFieldAccessor(); + const op = u.tags.addToSet('admin'); + expect(op.operator).toBe('$addToSet'); + expect(op.field).toBe('tags'); + }); + + it('pop() produces $pop operation', () => { + const u = createFieldAccessor(); + const op = u.tags.pop(1); + expect(op.operator).toBe('$pop'); + expect(op.field).toBe('tags'); + expect((op.value as MongoParamRef).value).toBe(1); + }); + + it('call signature returns FieldExpression for dot-path', () => { + const u = createFieldAccessor(); + const op = u('homeAddress.city').set('NYC'); + expect(op.operator).toBe('$set'); + expect(op.field).toBe('homeAddress.city'); + expect((op.value as MongoParamRef).value).toBe('NYC'); + }); +}); + +describe('compileFieldOperations()', () => { + const identity = (_field: string, value: MongoValue) => value; + + it('groups operations by operator', () => { + const ops: FieldOperation[] = [ + { operator: '$set', field: 'name', value: new MongoParamRef('Alice') }, + { operator: '$inc', field: 'loginCount', value: new MongoParamRef(1) }, + { operator: '$set', field: 'email', value: new MongoParamRef('a@b.c') }, + ]; + const result = compileFieldOperations(ops, identity); + expect(result).toEqual({ + $set: { + name: new MongoParamRef('Alice'), + email: new MongoParamRef('a@b.c'), + }, + $inc: { + loginCount: new MongoParamRef(1), + }, + }); + }); + + it('applies wrapValue to each operation', () => { + const ops: FieldOperation[] = [ + { operator: '$set', field: 'name', value: new MongoParamRef('Alice') }, + ]; + const wrap = (_field: string, value: MongoValue) => + new MongoParamRef((value as MongoParamRef).value, { codecId: 'mongo/string@1' }); + const result = compileFieldOperations(ops, wrap) as Record< + string, + Record + >; + expect(result['$set']!['name']!.codecId).toBe('mongo/string@1'); + }); +}); + describe('MongoCollection variant()', () => { it('returns a new instance from variant()', () => { const executor = createMockExecutor(); @@ -518,6 +639,88 @@ describe('MongoCollection write methods', () => { }); }); + describe('update() with callback', () => { + it('produces correct update doc from field operations', async () => { + const executor = createMockExecutor([{ _id: 'id-1', name: 'Updated' }]); + const col = createMongoCollection(contract, 'User', executor); + await col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .update((u) => [u.name.set('Updated'), u.loginCount.inc(1)]); + const command = executor.lastCommand!; + expect(command.kind).toBe('findOneAndUpdate'); + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$set']!['name']).toBeInstanceOf(MongoParamRef); + expect(update['$inc']!['loginCount']).toBeInstanceOf(MongoParamRef); + } + }); + + it('applies codec to callback operations for scalar fields', async () => { + const executor = createMockExecutor([{ _id: 'id-1', name: 'Updated' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u.name.set('Updated')]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$set']!['name']!.codecId).toBe('mongo/string@1'); + } + }); + + it('produces $push operations from callback', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u.tags.push('admin')]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$push']).toBeDefined(); + expect(update['$push']!['tags']).toBeInstanceOf(MongoParamRef); + } + }); + + it('produces dot-path operations from callback', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .update((u) => [u('homeAddress.city').set('NYC')]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$set']!['homeAddress.city']).toBeInstanceOf(MongoParamRef); + expect(update['$set']!['homeAddress.city']!.codecId).toBe('mongo/string@1'); + } + }); + }); + + describe('updateCount() with callback', () => { + it('produces correct update doc from field operations', async () => { + const executor = createMockExecutor([{ modifiedCount: 1 }]); + const col = createMongoCollection(contract, 'User', executor); + const count = await col + .where(MongoFieldFilter.eq('email', 'a')) + .updateCount((u) => [u.name.set('X')]); + expect(count).toBe(1); + }); + }); + + describe('upsert() with callback', () => { + it('uses field operations for update part', async () => { + const executor = createMockExecutor([{ _id: 'new-id' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ + create: defaultUserData, + update: (u: FieldAccessor) => [u.loginCount.inc(1)], + }); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + expect(update['$inc']!['loginCount']).toBeInstanceOf(MongoParamRef); + expect(update['$setOnInsert']).toBeDefined(); + } + }); + }); + describe('updateCount()', () => { it('throws without .where()', async () => { const executor = createMockExecutor(); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts index 81bbd4e29..a5b9df536 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts @@ -2,6 +2,12 @@ import type { AsyncIterableResult } from '@prisma-next/runtime-executor'; import { expectTypeOf, test } from 'vitest'; import type { Contract } from '../../../1-foundation/mongo-contract/test/fixtures/orm-contract'; import type { MongoCollection } from '../src/collection'; +import type { + DotPath, + FieldAccessor, + FieldExpression, + ResolveDotPathType, +} from '../src/field-accessor'; import type { MongoOrmClient } from '../src/mongo-orm'; import type { CreateInput, @@ -366,3 +372,33 @@ test('include() on 1:N reference relation all() returns array type', () => { > >(); }); + +// --- Field accessor types --- + +test('FieldAccessor has FieldExpression for scalar fields', () => { + type Accessor = FieldAccessor; + expectTypeOf().toExtend>(); + expectTypeOf().toExtend>(); +}); + +test('FieldAccessor has FieldExpression for array fields', () => { + type Accessor = FieldAccessor; + expectTypeOf().toExtend>(); +}); + +test('DotPath resolves value object dot-paths', () => { + type Paths = DotPath; + expectTypeOf<'homeAddress.city'>().toExtend(); + expectTypeOf<'homeAddress.country'>().toExtend(); +}); + +test('DotPath rejects invalid paths', () => { + type Paths = DotPath; + expectTypeOf<'homeAddress.nonexistent'>().not.toExtend(); + expectTypeOf<'nonexistent.field'>().not.toExtend(); +}); + +test('ResolveDotPathType resolves to scalar type', () => { + type CityType = ResolveDotPathType; + expectTypeOf().toEqualTypeOf(); +}); From a3212c18dcaef07e3ea66f175d723f8dfeabb86a Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 09:58:23 +0300 Subject: [PATCH 10/30] feat(mongo-orm): export field accessor types and createFieldAccessor Export FieldAccessor, FieldExpression, FieldOperation, DotPath, ResolveDotPathType, UpdateOperator types and createFieldAccessor from the ORM package public API. --- .../5-query-builders/orm/src/exports/index.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts b/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts index 31aebab46..e4d287db3 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/exports/index.ts @@ -2,6 +2,15 @@ export type { MongoCollection } from '../collection'; export { createMongoCollection } from '../collection'; export { compileMongoQuery } from '../compile'; export type { MongoQueryExecutor } from '../executor'; +export type { + DotPath, + FieldAccessor, + FieldExpression, + FieldOperation, + ResolveDotPathType, + UpdateOperator, +} from '../field-accessor'; +export { createFieldAccessor } from '../field-accessor'; export type { MongoOrmClient, MongoOrmOptions } from '../mongo-orm'; export { mongoOrm } from '../mongo-orm'; export type { MongoRawClient } from '../mongo-raw'; From 5c7587264bc2e6271d9a216da33b935da53349da Mon Sep 17 00:00:00 2001 From: Will Madden Date: Mon, 13 Apr 2026 10:10:56 +0300 Subject: [PATCH 11/30] refactor(retail-store): replace objectIdEq and db.raw with ORM ergonomics Replace all objectIdEq() calls with object-based where({ field: value }) in carts, orders, products, users, and invoices data files. Replace db.raw addToCart/removeFromCart/updateOrderStatus with ORM callback-based update() and upsert() using field accessor operators ($push, $pull). Delete object-id-filter.ts. Fix upsert $setOnInsert to exclude fields referenced by any update operator (not just $set), preventing MongoDB path conflicts. --- examples/retail-store/src/data/carts.ts | 33 +++++++------------ examples/retail-store/src/data/invoices.ts | 5 ++- .../retail-store/src/data/object-id-filter.ts | 17 ---------- examples/retail-store/src/data/orders.ts | 18 ++++------ examples/retail-store/src/data/products.ts | 3 +- examples/retail-store/src/data/users.ts | 3 +- .../5-query-builders/orm/src/collection.ts | 11 +++++-- 7 files changed, 30 insertions(+), 60 deletions(-) delete mode 100644 examples/retail-store/src/data/object-id-filter.ts diff --git a/examples/retail-store/src/data/carts.ts b/examples/retail-store/src/data/carts.ts index 120bac1cf..4cdc5ac92 100644 --- a/examples/retail-store/src/data/carts.ts +++ b/examples/retail-store/src/data/carts.ts @@ -1,13 +1,11 @@ import type { Db } from '../db'; -import { executeRaw } from './execute-raw'; -import { objectIdEq, rawObjectIdFilter } from './object-id-filter'; export function getCartByUserId(db: Db, userId: string) { - return db.orm.carts.where(objectIdEq('userId', userId)).first(); + return db.orm.carts.where({ userId }).first(); } export function getCartWithUser(db: Db, userId: string) { - return db.orm.carts.include('user').where(objectIdEq('userId', userId)).first(); + return db.orm.carts.include('user').where({ userId }).first(); } export function upsertCart( @@ -22,17 +20,17 @@ export function upsertCart( image: { url: string }; }>, ) { - return db.orm.carts.where(objectIdEq('userId', userId)).upsert({ + return db.orm.carts.where({ userId }).upsert({ create: { userId, items: [...items] }, update: { items: [...items] }, }); } export function clearCart(db: Db, userId: string) { - return db.orm.carts.where(objectIdEq('userId', userId)).update({ items: [] }); + return db.orm.carts.where({ userId }).update({ items: [] }); } -export async function addToCart( +export function addToCart( db: Db, userId: string, item: { @@ -44,21 +42,12 @@ export async function addToCart( image: { url: string }; }, ) { - const plan = db.raw - .collection('carts') - .findOneAndUpdate( - rawObjectIdFilter('userId', userId), - { $push: { items: item }, $setOnInsert: rawObjectIdFilter('userId', userId) }, - { upsert: true }, - ) - .build(); - await executeRaw(db, plan); + return db.orm.carts.where({ userId }).upsert({ + create: { userId, items: [item] }, + update: (u) => [u.items.push(item)], + }); } -export async function removeFromCart(db: Db, userId: string, productId: string) { - const plan = db.raw - .collection('carts') - .updateOne(rawObjectIdFilter('userId', userId), { $pull: { items: { productId } } }) - .build(); - await executeRaw(db, plan); +export function removeFromCart(db: Db, userId: string, productId: string) { + return db.orm.carts.where({ userId }).update((u) => [u.items.pull({ productId })]); } diff --git a/examples/retail-store/src/data/invoices.ts b/examples/retail-store/src/data/invoices.ts index c6350a93a..64e0533c4 100644 --- a/examples/retail-store/src/data/invoices.ts +++ b/examples/retail-store/src/data/invoices.ts @@ -1,12 +1,11 @@ import type { Db } from '../db'; -import { objectIdEq } from './object-id-filter'; export function findInvoiceById(db: Db, id: string) { - return db.orm.invoices.where(objectIdEq('_id', id)).first(); + return db.orm.invoices.where({ _id: id }).first(); } export function findInvoiceWithOrder(db: Db, id: string) { - return db.orm.invoices.include('order').where(objectIdEq('_id', id)).first(); + return db.orm.invoices.include('order').where({ _id: id }).first(); } export function createInvoice( diff --git a/examples/retail-store/src/data/object-id-filter.ts b/examples/retail-store/src/data/object-id-filter.ts deleted file mode 100644 index 887bc8469..000000000 --- a/examples/retail-store/src/data/object-id-filter.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { MongoFieldFilter } from '@prisma-next/mongo-query-ast/execution'; -import { MongoParamRef } from '@prisma-next/mongo-value'; -import { ObjectId } from 'mongodb'; - -function toObjectId(id: string | ObjectId): ObjectId { - if (id instanceof ObjectId) return id; - if (!ObjectId.isValid(id)) throw new Error(`Invalid ObjectId: ${id}`); - return new ObjectId(id); -} - -export function objectIdEq(field: string, id: string | ObjectId): MongoFieldFilter { - return MongoFieldFilter.eq(field, new MongoParamRef(toObjectId(id))); -} - -export function rawObjectIdFilter(field: string, id: string | ObjectId): Record { - return { [field]: toObjectId(id) }; -} diff --git a/examples/retail-store/src/data/orders.ts b/examples/retail-store/src/data/orders.ts index a05d93d53..f0b00bb7a 100644 --- a/examples/retail-store/src/data/orders.ts +++ b/examples/retail-store/src/data/orders.ts @@ -1,17 +1,15 @@ import type { Db } from '../db'; -import { collectFirstResult } from './execute-raw'; -import { objectIdEq, rawObjectIdFilter } from './object-id-filter'; export function getUserOrders(db: Db, userId: string) { - return db.orm.orders.where(objectIdEq('userId', userId)).all(); + return db.orm.orders.where({ userId }).all(); } export function getOrderById(db: Db, id: string) { - return db.orm.orders.where(objectIdEq('_id', id)).first(); + return db.orm.orders.where({ _id: id }).first(); } export function getOrderWithUser(db: Db, id: string) { - return db.orm.orders.include('user').where(objectIdEq('_id', id)).first(); + return db.orm.orders.include('user').where({ _id: id }).first(); } export function createOrder( @@ -41,17 +39,13 @@ export function createOrder( } export function deleteOrder(db: Db, id: string) { - return db.orm.orders.where(objectIdEq('_id', id)).delete(); + return db.orm.orders.where({ _id: id }).delete(); } -export async function updateOrderStatus( +export function updateOrderStatus( db: Db, orderId: string, entry: { status: string; timestamp: Date }, ) { - const plan = db.raw - .collection('orders') - .findOneAndUpdate(rawObjectIdFilter('_id', orderId), { $push: { statusHistory: entry } }) - .build(); - return collectFirstResult>(db, plan); + return db.orm.orders.where({ _id: orderId }).update((u) => [u.statusHistory.push(entry)]); } diff --git a/examples/retail-store/src/data/products.ts b/examples/retail-store/src/data/products.ts index 7105cc02d..5e2edb8a1 100644 --- a/examples/retail-store/src/data/products.ts +++ b/examples/retail-store/src/data/products.ts @@ -3,7 +3,6 @@ import { MongoParamRef } from '@prisma-next/mongo-value'; import type { FieldOutputTypes } from '../contract'; import type { Db } from '../db'; import { collectResults } from './execute-raw'; -import { objectIdEq } from './object-id-filter'; type Product = FieldOutputTypes['Product']; @@ -21,7 +20,7 @@ export async function findProductsPaginated( } export function findProductById(db: Db, id: string) { - return db.orm.products.where(objectIdEq('_id', id)).first(); + return db.orm.products.where({ _id: id }).first(); } function escapeRegex(str: string) { diff --git a/examples/retail-store/src/data/users.ts b/examples/retail-store/src/data/users.ts index 4e5ee44ae..2639a2fcf 100644 --- a/examples/retail-store/src/data/users.ts +++ b/examples/retail-store/src/data/users.ts @@ -1,12 +1,11 @@ import type { Db } from '../db'; -import { objectIdEq } from './object-id-filter'; export function findUsers(db: Db) { return db.orm.users.all(); } export function findUserById(db: Db, id: string) { - return db.orm.users.where(objectIdEq('_id', id)).first(); + return db.orm.users.where({ _id: id }).first(); } export function createUser(db: Db, data: { name: string; email: string; address: null }) { diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 4d05f8186..28c72fead 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -470,10 +470,17 @@ class MongoCollectionImpl< } } - const setKeys = new Set(Object.keys((updateDoc['$set'] as Record) ?? {})); + const updatedFields = new Set(); + for (const operatorGroup of Object.values(updateDoc)) { + if (typeof operatorGroup === 'object' && operatorGroup !== null) { + for (const fieldPath of Object.keys(operatorGroup as Record)) { + updatedFields.add(fieldPath.split('.')[0]!); + } + } + } const insertOnlyFields: Record = {}; for (const [key, value] of Object.entries(allCreateFields)) { - if (!setKeys.has(key)) { + if (!updatedFields.has(key)) { insertOnlyFields[key] = value; } } From c2701916f413c690d339143f5117a28f779f7381 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 07:54:55 +0300 Subject: [PATCH 12/30] fix: update InferModelRow type test for new User fields after rebase --- .../2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts b/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts index 6382c2f98..54de6e98c 100644 --- a/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts +++ b/packages/2-mongo-family/7-runtime/test/orm-contract-types.test-d.ts @@ -18,6 +18,9 @@ test('InferModelRow resolves User fields', () => { _id: string; name: string; email: string; + loginCount: number; + tags: string[]; + homeAddress: { city: string; country: string } | null; }>(); }); From b1d9a21bf29f2c311c72b17712c084a117458c84 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 08:34:01 +0300 Subject: [PATCH 13/30] fix(retail-store): use ReadonlyArray for statusHistory cast after codec output types change --- examples/retail-store/test/order-lifecycle.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/retail-store/test/order-lifecycle.test.ts b/examples/retail-store/test/order-lifecycle.test.ts index 8eceb7312..56d68b2bf 100644 --- a/examples/retail-store/test/order-lifecycle.test.ts +++ b/examples/retail-store/test/order-lifecycle.test.ts @@ -80,7 +80,7 @@ describe('order lifecycle (integration)', { timeout: timeouts.spinUpMongoMemoryS expect(delivered).not.toBeNull(); expect(delivered!['statusHistory']).toHaveLength(3); - const statuses = (delivered!['statusHistory'] as Array<{ status: string }>).map( + const statuses = (delivered!['statusHistory'] as ReadonlyArray<{ status: string }>).map( (s) => s.status, ); expect(statuses).toEqual(['placed', 'shipped', 'delivered']); From 6c30e80c98784bb264112aec2d412d8cf1d8df39 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 08:59:53 +0300 Subject: [PATCH 14/30] fix(mongo-orm): skip codec on $unset, fix compileFieldOperations return type, add updateAll callback test Address code review findings F02, F03, F05: - F02: skip codec wrapping for $unset sentinel values - F03: add missing updateAll() callback form test - F05: return Record> from compileFieldOperations, eliminating unsound cast --- .../5-query-builders/orm/src/collection.ts | 24 ++++---- .../orm/src/field-accessor.ts | 8 +-- .../orm/test/collection.test.ts | 58 +++++++++++++++++-- 3 files changed, 70 insertions(+), 20 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 28c72fead..9d1d34f72 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -455,12 +455,12 @@ class MongoCollectionImpl< this.#injectDiscriminator(input.create as Record), ); - let updateDoc: Record; + let updateDoc: Record>; if (typeof input.update === 'function') { const accessor = createFieldAccessor(); const ops = input.update(accessor); - updateDoc = compileFieldOperations(ops, (field, value) => - this.#wrapFieldOpValue(field, value), + updateDoc = compileFieldOperations(ops, (field, value, operator) => + this.#wrapFieldOpValue(field, value, operator), ); } else { const setFields = this.#toSetFields(input.update as Record); @@ -472,10 +472,8 @@ class MongoCollectionImpl< const updatedFields = new Set(); for (const operatorGroup of Object.values(updateDoc)) { - if (typeof operatorGroup === 'object' && operatorGroup !== null) { - for (const fieldPath of Object.keys(operatorGroup as Record)) { - updatedFields.add(fieldPath.split('.')[0]!); - } + for (const fieldPath of Object.keys(operatorGroup)) { + updatedFields.add(fieldPath.split('.')[0]!); } } const insertOnlyFields: Record = {}; @@ -627,7 +625,7 @@ class MongoCollectionImpl< return result; } - #toUpdateDocument(data: Record): Record { + #toUpdateDocument(data: Record): Record> { return { $set: this.#toSetFields(data) }; } @@ -635,16 +633,20 @@ class MongoCollectionImpl< dataOrCallback: | Partial> | ((u: FieldAccessor) => FieldOperation[]), - ): Record { + ): Record> { if (typeof dataOrCallback === 'function') { const accessor = createFieldAccessor(); const ops = dataOrCallback(accessor); - return compileFieldOperations(ops, (field, value) => this.#wrapFieldOpValue(field, value)); + return compileFieldOperations(ops, (field, value, operator) => + this.#wrapFieldOpValue(field, value, operator), + ); } return this.#toUpdateDocument(dataOrCallback as Record); } - #wrapFieldOpValue(field: string, value: MongoValue): MongoValue { + #wrapFieldOpValue(field: string, value: MongoValue, operator?: string): MongoValue { + if (operator === '$unset') return value; + const topLevelField = field.split('.')[0]!; const fields = this.#modelFields(); const contractField = fields[topLevelField]; diff --git a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts index 691cb6c07..a9adfb7bc 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts @@ -238,14 +238,14 @@ export function createFieldAccessor< export function compileFieldOperations( ops: readonly FieldOperation[], - wrapValue: (field: string, value: MongoValue) => MongoValue, -): Record { + wrapValue: (field: string, value: MongoValue, operator: UpdateOperator) => MongoValue, +): Record> { const grouped: Record> = {}; for (const op of ops) { if (!grouped[op.operator]) { grouped[op.operator] = {}; } - grouped[op.operator]![op.field] = wrapValue(op.field, op.value); + grouped[op.operator]![op.field] = wrapValue(op.field, op.value, op.operator); } - return grouped as Record; + return grouped; } diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index 4c00ddacc..8678293c3 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -339,11 +339,22 @@ describe('compileFieldOperations()', () => { ]; const wrap = (_field: string, value: MongoValue) => new MongoParamRef((value as MongoParamRef).value, { codecId: 'mongo/string@1' }); - const result = compileFieldOperations(ops, wrap) as Record< - string, - Record - >; - expect(result['$set']!['name']!.codecId).toBe('mongo/string@1'); + const result = compileFieldOperations(ops, wrap); + expect(result['$set']!['name']!).toBeInstanceOf(MongoParamRef); + expect((result['$set']!['name']! as MongoParamRef).codecId).toBe('mongo/string@1'); + }); + + it('passes operator to wrapValue callback', () => { + const ops: FieldOperation[] = [ + { operator: '$set', field: 'name', value: new MongoParamRef('Alice') }, + { operator: '$unset', field: 'email', value: new MongoParamRef('') }, + ]; + const operators: string[] = []; + compileFieldOperations(ops, (_field, value, operator) => { + operators.push(operator); + return value; + }); + expect(operators).toEqual(['$set', '$unset']); }); }); @@ -678,6 +689,20 @@ describe('MongoCollection write methods', () => { } }); + it('does not attach codecId to $unset sentinel value', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u.name.unset()]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + const unsetRef = update['$unset']!['name']!; + expect(unsetRef).toBeInstanceOf(MongoParamRef); + expect(unsetRef.codecId).toBeUndefined(); + expect(unsetRef.value).toBe(''); + } + }); + it('produces dot-path operations from callback', async () => { const executor = createMockExecutor([{ _id: 'id-1' }]); const col = createMongoCollection(contract, 'User', executor); @@ -693,6 +718,29 @@ describe('MongoCollection write methods', () => { }); }); + describe('updateAll() with callback', () => { + it('produces correct update doc from field operations', async () => { + const executor = createMockExecutor( + [{ _id: 'id-1' }, { _id: 'id-2' }], + [{ matchedCount: 2, modifiedCount: 2 }], + [ + { _id: 'id-1', name: 'Alice', loginCount: 1 }, + { _id: 'id-2', name: 'Bob', loginCount: 1 }, + ], + ); + const col = createMongoCollection(contract, 'User', executor); + const rows: unknown[] = []; + for await (const row of col + .where(MongoFieldFilter.eq('email', 'a@b.c')) + .updateAll((u) => [u.loginCount.inc(1)])) { + rows.push(row); + } + expect(rows).toHaveLength(2); + const updateCommand = executor.lastPlan; + expect(updateCommand).toBeDefined(); + }); + }); + describe('updateCount() with callback', () => { it('produces correct update doc from field operations', async () => { const executor = createMockExecutor([{ modifiedCount: 1 }]); From 4b3f2a50b04c32c644bbc993804c9cbbb3a532b3 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 09:02:11 +0300 Subject: [PATCH 15/30] test(mongo-orm): add e2e integration tests for FL-04, FL-06, FL-08 Addresses code review finding F06: all three features now have integration tests against a real MongoDB instance (MongoMemoryReplSet) proving codec encoding, BSON round-trip, and pipeline correctness. --- .../test/integration/orm-ergonomics.test.ts | 206 ++++++++++++++++++ 1 file changed, 206 insertions(+) create mode 100644 packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts diff --git a/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts new file mode 100644 index 000000000..8eec5c2f4 --- /dev/null +++ b/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts @@ -0,0 +1,206 @@ +import { createMongoAdapter } from '@prisma-next/adapter-mongo'; +import { createMongoDriver } from '@prisma-next/driver-mongo'; +import { createMongoRuntime, type MongoRuntime } from '@prisma-next/mongo-runtime'; +import { timeouts } from '@prisma-next/test-utils'; +import { MongoClient, ObjectId } from 'mongodb'; +import { MongoMemoryReplSet } from 'mongodb-memory-server'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import type { Contract } from '../../../../1-foundation/mongo-contract/test/fixtures/orm-contract'; +import ormContractJson from '../../../../1-foundation/mongo-contract/test/fixtures/orm-contract.json'; +import type { FieldAccessor } from '../../src/field-accessor'; +import { mongoOrm } from '../../src/mongo-orm'; + +const contract = ormContractJson as unknown as Contract; + +const defaultUserData = { + name: 'Alice', + email: 'alice@test.com', + loginCount: 0, + tags: [] as string[], + homeAddress: null, +}; + +function getUserId(user: Record): ObjectId { + return new ObjectId(user['_id'] as string); +} + +describe( + 'ORM ergonomics integration (FL-04, FL-06, FL-08)', + { timeout: timeouts.spinUpMongoMemoryServer }, + () => { + let replSet: MongoMemoryReplSet; + let client: MongoClient; + let runtime: MongoRuntime; + const dbName = 'orm_ergonomics_test'; + + beforeAll(async () => { + replSet = await MongoMemoryReplSet.create({ + replSet: { count: 1, storageEngine: 'wiredTiger' }, + }); + client = new MongoClient(replSet.getUri()); + await client.connect(); + + const adapter = createMongoAdapter(); + const driver = await createMongoDriver(replSet.getUri(), dbName); + runtime = createMongoRuntime({ adapter, driver }); + }, timeouts.spinUpMongoMemoryServer); + + beforeEach(async () => { + await client.db(dbName).dropDatabase(); + }); + + afterAll(async () => { + await Promise.allSettled([runtime?.close(), client?.close(), replSet?.stop()]); + }, timeouts.spinUpMongoMemoryServer); + + describe('FL-06: codec-aware where()', () => { + it('retrieves document by ObjectId field using object where', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + const found = await orm.users.where({ _id: user._id as string }).first(); + expect(found).not.toBeNull(); + expect(found!.name).toBe('Alice'); + }); + + it('retrieves document by string field using object where', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + const found = await orm.users.where({ name: 'Alice' }).first(); + expect(found).not.toBeNull(); + expect(found!.email).toBe('alice@test.com'); + }); + + it('filters by multiple fields using object where', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + await orm.users.create({ ...defaultUserData, name: 'Bob', email: 'bob@test.com' }); + const found = await orm.users.where({ name: 'Alice', email: 'alice@test.com' }).first(); + expect(found).not.toBeNull(); + expect(found!.name).toBe('Alice'); + }); + }); + + describe('FL-04: field accessor mutations', () => { + it('$push adds element to array field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + const updated = await orm.users + .where({ _id: user._id as string }) + .update((u) => [u.tags.push('admin')]); + expect(updated).not.toBeNull(); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['tags']).toEqual(['admin']); + }); + + it('$pull removes element from array field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create({ ...defaultUserData, tags: ['admin', 'editor'] }); + await orm.users.where({ _id: user._id as string }).update((u) => [u.tags.pull('admin')]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['tags']).toEqual(['editor']); + }); + + it('$inc increments numeric field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + await orm.users.where({ _id: user._id as string }).update((u) => [u.loginCount.inc(1)]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['loginCount']).toBe(1); + }); + + it('dot-path $set updates nested value object field', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create({ + ...defaultUserData, + homeAddress: { city: 'SF', country: 'US' }, + }); + await orm.users + .where({ _id: user._id as string }) + .update((u) => [u('homeAddress.city').set('NYC')]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['homeAddress']).toEqual({ city: 'NYC', country: 'US' }); + }); + + it('multiple operations in one callback are applied together', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + await orm.users + .where({ _id: user._id as string }) + .update((u) => [u.tags.push('admin'), u.loginCount.inc(5)]); + + const oid = getUserId(user as Record); + const doc = await client.db(dbName).collection('users').findOne({ _id: oid }); + expect(doc!['tags']).toEqual(['admin']); + expect(doc!['loginCount']).toBe(5); + }); + + it('updateAll with callback updates multiple documents', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + await orm.users.create({ ...defaultUserData, name: 'Bob', email: 'bob@test.com' }); + + const rows: unknown[] = []; + for await (const row of orm.users + .where({ loginCount: 0 }) + .updateAll((u: FieldAccessor) => [u.loginCount.inc(1)])) { + rows.push(row); + } + expect(rows).toHaveLength(2); + + const docs = await client.db(dbName).collection('users').find({}).toArray(); + for (const doc of docs) { + expect(doc['loginCount']).toBe(1); + } + }); + }); + + describe('FL-08: 1:N reference relation include', () => { + it('include() on 1:N relation returns array of related documents', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + + await orm.tasks.create({ + title: 'Task 1', + type: 'bug', + assigneeId: user._id as string, + } as never); + await orm.tasks.create({ + title: 'Task 2', + type: 'feature', + assigneeId: user._id as string, + } as never); + + const result = await orm.users + .include('tasks') + .where({ _id: user._id as string }) + .first(); + expect(result).not.toBeNull(); + const tasks = (result as Record)['tasks'] as Record[]; + expect(tasks).toHaveLength(2); + const titles = tasks.map((t) => t['title']).sort(); + expect(titles).toEqual(['Task 1', 'Task 2']); + }); + + it('include() on 1:N returns empty array when no related documents', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + const user = await orm.users.create(defaultUserData); + + const result = await orm.users + .include('tasks') + .where({ _id: user._id as string }) + .first(); + expect(result).not.toBeNull(); + const tasks = (result as Record)['tasks'] as unknown[]; + expect(tasks).toEqual([]); + }); + }); + }, +); From cb0b871d2aa364f808b1856764b9d0f21871d1f9 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 09:17:32 +0300 Subject: [PATCH 16/30] brand MongoFilterExpr with symbol to prevent misrouting plain objects with a kind field #isFilterExpr used a structural check ("kind" in filter) which would misroute user data objects that legitimately have a "kind" field. Replace with a symbol brand on the MongoFilterExpression base class that plain objects can never carry. --- .../query-ast/src/exports/execution.ts | 1 + .../query-ast/src/filter-expressions.ts | 3 +++ .../query-ast/test/filter-expressions.test.ts | 21 +++++++++++++++++++ .../5-query-builders/orm/src/collection.ts | 3 ++- 4 files changed, 27 insertions(+), 1 deletion(-) diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts index 6105d8277..18291c289 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts @@ -34,6 +34,7 @@ export { MongoFieldFilter, MongoNotExpr, MongoOrExpr, + mongoFilterBrand, } from '../filter-expressions'; export type { MongoQueryPlan } from '../query-plan'; export type { RawMongoCommand } from '../raw-commands'; diff --git a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts index 742f386bc..d37833d74 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts @@ -3,7 +3,10 @@ import type { MongoAggExpr } from './aggregation-expressions'; import { MongoAstNode } from './ast-node'; import type { MongoFilterRewriter, MongoFilterVisitor } from './visitors'; +export const mongoFilterBrand: unique symbol = Symbol('MongoFilterExpr'); + abstract class MongoFilterExpression extends MongoAstNode { + readonly [mongoFilterBrand] = true as const; abstract accept(visitor: MongoFilterVisitor): R; abstract rewrite(rewriter: MongoFilterRewriter): MongoFilterExpr; diff --git a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts index 638588298..8e6363983 100644 --- a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts +++ b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts @@ -8,6 +8,7 @@ import { MongoFieldFilter, MongoNotExpr, MongoOrExpr, + mongoFilterBrand, } from '../src/filter-expressions'; import type { MongoFilterRewriter, MongoFilterVisitor } from '../src/visitors'; @@ -264,6 +265,26 @@ describe('MongoFilterRewriter', () => { }); }); +describe('mongoFilterBrand', () => { + it('is present on all filter expression types', () => { + const field = MongoFieldFilter.eq('x', 1); + const and = MongoAndExpr.of([field]); + const or = MongoOrExpr.of([field]); + const not = new MongoNotExpr(field); + const exists = MongoExistsExpr.exists('x'); + const expr = MongoExprFilter.of(MongoAggFieldRef.of('x')); + + for (const node of [field, and, or, not, exists, expr]) { + expect(mongoFilterBrand in node).toBe(true); + } + }); + + it('is absent on plain objects with a kind property', () => { + const impersonator = { kind: 'field', field: 'x', op: '$eq', value: 1 }; + expect(mongoFilterBrand in impersonator).toBe(false); + }); +}); + describe('composite nesting', () => { it('supports $and containing $or and $not', () => { const expr: MongoFilterExpr = MongoAndExpr.of([ diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 9d1d34f72..720b4f0e1 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -23,6 +23,7 @@ import { InsertOneCommand, MongoAndExpr, MongoFieldFilter, + mongoFilterBrand, UpdateManyCommand, } from '@prisma-next/mongo-query-ast/execution'; import type { MongoValue } from '@prisma-next/mongo-value'; @@ -539,7 +540,7 @@ class MongoCollectionImpl< } #isFilterExpr(filter: unknown): filter is MongoFilterExpr { - return typeof filter === 'object' && filter !== null && 'kind' in filter; + return typeof filter === 'object' && filter !== null && mongoFilterBrand in filter; } #compileWhereObject(data: Record): MongoFilterExpr[] { From 14bb643cdfd3dd68565a20e6225723bd63f3dcab Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 09:19:29 +0300 Subject: [PATCH 17/30] constrain inc()/mul() to numeric types in FieldExpression FieldExpression now intersects with NumericOps only when T extends number, so calling inc()/mul() on a string field is a type error. The runtime implementation still carries all methods via RuntimeFieldExpression; gating is purely at the type level. --- .../orm/src/field-accessor.ts | 27 ++++++++++++++----- .../orm/test/orm-types.test-d.ts | 13 +++++++++ 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts index a9adfb7bc..8c7c0af07 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts @@ -74,16 +74,19 @@ type ResolveFieldType< ? TCodecTypes[CId]['output'] : unknown; -export interface FieldExpression { - set(value: T): FieldOperation; - unset(): FieldOperation; +type NumericOps = { inc(value: number): FieldOperation; mul(value: number): FieldOperation; +}; + +export type FieldExpression = { + set(value: T): FieldOperation; + unset(): FieldOperation; push(value: T extends readonly (infer E)[] ? E : unknown): FieldOperation; pull(match: T extends readonly (infer E)[] ? E | Partial : unknown): FieldOperation; addToSet(value: T extends readonly (infer E)[] ? E : unknown): FieldOperation; pop(end: 1 | -1): FieldOperation; -} +} & (T extends number ? NumericOps : unknown); type HasValueObjects = { readonly valueObjects?: Record }; @@ -193,7 +196,17 @@ export type FieldAccessor< // ── Runtime implementation ─────────────────────────────────────────────────── -function createFieldExpression(fieldPath: string): FieldExpression { +// Runtime expression has all methods; type-level gating happens via FieldExpression +interface RuntimeFieldExpression extends NumericOps { + set(value: unknown): FieldOperation; + unset(): FieldOperation; + push(value: unknown): FieldOperation; + pull(match: unknown): FieldOperation; + addToSet(value: unknown): FieldOperation; + pop(end: 1 | -1): FieldOperation; +} + +function createFieldExpression(fieldPath: string): RuntimeFieldExpression { return { set(value: unknown): FieldOperation { return { operator: '$set', field: fieldPath, value: new MongoParamRef(value) }; @@ -227,10 +240,10 @@ export function createFieldAccessor< ModelName extends string & keyof TContract['models'], >(): FieldAccessor { return new Proxy((() => {}) as unknown as FieldAccessor, { - get(_target, prop: string): FieldExpression { + get(_target, prop: string): RuntimeFieldExpression { return createFieldExpression(prop); }, - apply(_target, _thisArg, args: [string]): FieldExpression { + apply(_target, _thisArg, args: [string]): RuntimeFieldExpression { return createFieldExpression(args[0]); }, }); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts index a5b9df536..031200e1e 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts @@ -402,3 +402,16 @@ test('ResolveDotPathType resolves to scalar type', () => { type CityType = ResolveDotPathType; expectTypeOf().toEqualTypeOf(); }); + +test('FieldExpression inc/mul restricted to numeric types', () => { + type StringExpr = FieldExpression; + type NumberExpr = FieldExpression; + + // @ts-expect-error inc is not available on string fields + void ({} as StringExpr).inc(1); + // @ts-expect-error mul is not available on string fields + void ({} as StringExpr).mul(2); + + void ({} as NumberExpr).inc(1); + void ({} as NumberExpr).mul(2); +}); From 7385b548c9638b568b1e000872e0c1e3a8363881 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 09:21:42 +0300 Subject: [PATCH 18/30] reject dot-path field operations in upsert() callbacks Dot-path operations like u("homeAddress.city").set("LA") conflict with $setOnInsert on the insert path: MongoDB rejects overlapping paths, and excluding the parent field from $setOnInsert creates an incomplete document. Add a runtime guard that throws with a clear error message when dot-path operations are detected in upsert callbacks. --- .../5-query-builders/orm/src/collection.ts | 8 ++++++++ .../5-query-builders/orm/test/collection.test.ts | 11 +++++++++++ .../orm/test/integration/orm-ergonomics.test.ts | 13 +++++++++++++ 3 files changed, 32 insertions(+) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 720b4f0e1..bd06faf31 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -460,6 +460,14 @@ class MongoCollectionImpl< if (typeof input.update === 'function') { const accessor = createFieldAccessor(); const ops = input.update(accessor); + const dotPathOp = ops.find((op) => op.field.includes('.')); + if (dotPathOp) { + throw new Error( + `upsert() does not support dot-path field operations (found "${dotPathOp.field}"). ` + + 'Dot-path updates conflict with $setOnInsert on the insert path, producing incomplete documents. ' + + 'Use top-level field operations instead.', + ); + } updateDoc = compileFieldOperations(ops, (field, value, operator) => this.#wrapFieldOpValue(field, value, operator), ); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index 8678293c3..a966675e4 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -767,6 +767,17 @@ describe('MongoCollection write methods', () => { expect(update['$setOnInsert']).toBeDefined(); } }); + + it('throws when callback produces dot-path operations', async () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor); + await expect( + col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ + create: { ...defaultUserData, homeAddress: { city: 'SF', country: 'US' } }, + update: (u) => [u('homeAddress.city').set('LA')], + }), + ).rejects.toThrow('dot-path'); + }); }); describe('updateCount()', () => { diff --git a/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts index 8eec5c2f4..809221c06 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/integration/orm-ergonomics.test.ts @@ -162,6 +162,19 @@ describe( }); }); + describe('upsert() dot-path guard', () => { + it('throws when callback uses a dot-path operation', async () => { + const orm = mongoOrm({ contract, executor: runtime }); + await orm.users.create(defaultUserData); + await expect( + orm.users.where({ name: 'Alice' }).upsert({ + create: { ...defaultUserData, homeAddress: { city: 'SF', country: 'US' } }, + update: (u) => [u('homeAddress.city').set('LA')], + }), + ).rejects.toThrow('dot-path'); + }); + }); + describe('FL-08: 1:N reference relation include', () => { it('include() on 1:N relation returns array of related documents', async () => { const orm = mongoOrm({ contract, executor: runtime }); From e1f26b3837726ad1f4faa80eab0dd2fa176ad464 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 14:31:05 +0300 Subject: [PATCH 19/30] fix biome noNonNullAssertion warnings in ORM collection and field-accessor Replace non-null assertions with nullish-coalescing fallbacks and explicit local variables to satisfy biome strict mode. --- .../5-query-builders/orm/src/collection.ts | 9 +++++---- .../5-query-builders/orm/src/field-accessor.ts | 8 +++++--- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index bd06faf31..4393e4395 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -482,7 +482,7 @@ class MongoCollectionImpl< const updatedFields = new Set(); for (const operatorGroup of Object.values(updateDoc)) { for (const fieldPath of Object.keys(operatorGroup)) { - updatedFields.add(fieldPath.split('.')[0]!); + updatedFields.add(fieldPath.split('.')[0] ?? fieldPath); } } const insertOnlyFields: Record = {}; @@ -656,7 +656,7 @@ class MongoCollectionImpl< #wrapFieldOpValue(field: string, value: MongoValue, operator?: string): MongoValue { if (operator === '$unset') return value; - const topLevelField = field.split('.')[0]!; + const topLevelField = field.split('.')[0] ?? field; const fields = this.#modelFields(); const contractField = fields[topLevelField]; if (!contractField) return value; @@ -674,7 +674,7 @@ class MongoCollectionImpl< #wrapDotPathValue(dotPath: string, value: MongoValue): MongoValue { const parts = dotPath.split('.'); const fields = this.#modelFields(); - let currentField: ContractField | undefined = fields[parts[0]!]; + let currentField: ContractField | undefined = parts[0] ? fields[parts[0]] : undefined; for (let i = 1; i < parts.length; i++) { if (!currentField || currentField.type.kind !== 'valueObject') return value; @@ -682,7 +682,8 @@ class MongoCollectionImpl< const voDef = (this.#contract as { valueObjects?: Record }) .valueObjects?.[voName]; if (!voDef) return value; - currentField = voDef.fields[parts[i]!]; + const partKey = parts[i]; + currentField = partKey ? voDef.fields[partKey] : undefined; } if (currentField?.type.kind === 'scalar' && value instanceof MongoParamRef) { diff --git a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts index 8c7c0af07..235943c17 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts @@ -255,10 +255,12 @@ export function compileFieldOperations( ): Record> { const grouped: Record> = {}; for (const op of ops) { - if (!grouped[op.operator]) { - grouped[op.operator] = {}; + let group = grouped[op.operator]; + if (!group) { + group = {}; + grouped[op.operator] = group; } - grouped[op.operator]![op.field] = wrapValue(op.field, op.value, op.operator); + group[op.field] = wrapValue(op.field, op.value, op.operator); } return grouped; } From 5c72d3bbdb70ca1d6759aeb959312ce9704229ba Mon Sep 17 00:00:00 2001 From: Will Madden Date: Tue, 14 Apr 2026 14:50:36 +0300 Subject: [PATCH 20/30] reject _id mutations in callback-based updates Object-based updates reject _id via #toSetFields, but callback-based updates (update, updateAll, updateCount, upsert) bypassed that check. Add _id validation in #resolveUpdateDoc and the upsert callback path so u._id.set() throws the same error as object mutations. --- .../5-query-builders/orm/src/collection.ts | 8 +++++ .../orm/test/collection.test.ts | 32 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 4393e4395..5b204a0f0 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -460,6 +460,10 @@ class MongoCollectionImpl< if (typeof input.update === 'function') { const accessor = createFieldAccessor(); const ops = input.update(accessor); + const idOp = ops.find((op) => op.field === '_id'); + if (idOp) { + throw new Error('Mutation payloads cannot modify `_id`'); + } const dotPathOp = ops.find((op) => op.field.includes('.')); if (dotPathOp) { throw new Error( @@ -646,6 +650,10 @@ class MongoCollectionImpl< if (typeof dataOrCallback === 'function') { const accessor = createFieldAccessor(); const ops = dataOrCallback(accessor); + const idOp = ops.find((op) => op.field === '_id'); + if (idOp) { + throw new Error('Mutation payloads cannot modify `_id`'); + } return compileFieldOperations(ops, (field, value, operator) => this.#wrapFieldOpValue(field, value, operator), ); diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index a966675e4..ee71360dc 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -1066,6 +1066,38 @@ describe('MongoCollection write methods', () => { }), ).rejects.toThrow('_id'); }); + + it('update() with callback throws when _id is targeted', async () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor); + await expect( + col.where(MongoFieldFilter.eq('_id', 'id-1')).update((u) => [u._id.set('new-id')]), + ).rejects.toThrow('_id'); + }); + + it('updateAll() with callback throws when _id is targeted', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + const result = col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .updateAll((u: FieldAccessor) => [u._id.set('new-id')]); + await expect(async () => { + for await (const _ of result) { + /* drain */ + } + }).rejects.toThrow('_id'); + }); + + it('upsert() with callback throws when _id is targeted', async () => { + const executor = createMockExecutor(); + const col = createMongoCollection(contract, 'User', executor); + await expect( + col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ + create: defaultUserData, + update: (u) => [u._id.set('new-id')], + }), + ).rejects.toThrow('_id'); + }); }); describe('immutability', () => { From 7f096823e13f19143266383a5d2f6e096b895507 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 06:45:59 +0300 Subject: [PATCH 21/30] fix: add explicit type annotation for upsert callback parameter --- .../2-mongo-family/5-query-builders/orm/test/collection.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index ee71360dc..4da4bbb85 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -1094,7 +1094,7 @@ describe('MongoCollection write methods', () => { await expect( col.where(MongoFieldFilter.eq('email', 'a@b.c')).upsert({ create: defaultUserData, - update: (u) => [u._id.set('new-id')], + update: (u: FieldAccessor) => [u._id.set('new-id')], }), ).rejects.toThrow('_id'); }); From 00fdc93d1da1f1c84665c557ce040d16cc390ecf Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 06:49:42 +0300 Subject: [PATCH 22/30] normalize empty callback updates and wrap value-object payloads in field operations Empty callback updates now produce { $set: {} } matching object-based update behavior. Value-object payloads in callback mutations (set/push/pull) are now recursively wrapped through their codecs, matching the wrapping applied to object-based updates and create paths. --- .../5-query-builders/orm/src/collection.ts | 29 +++++++++++++++++++ .../orm/test/collection.test.ts | 27 +++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 5b204a0f0..c988bebb5 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -654,6 +654,9 @@ class MongoCollectionImpl< if (idOp) { throw new Error('Mutation payloads cannot modify `_id`'); } + if (ops.length === 0) { + return { $set: {} }; + } return compileFieldOperations(ops, (field, value, operator) => this.#wrapFieldOpValue(field, value, operator), ); @@ -676,6 +679,19 @@ class MongoCollectionImpl< if (value instanceof MongoParamRef && contractField.type.kind === 'scalar') { return new MongoParamRef(value.value, { codecId: contractField.type.codecId }); } + + if (contractField.type.kind === 'valueObject' && value instanceof MongoParamRef) { + const raw = value.value; + if (typeof raw === 'object' && raw !== null && !Array.isArray(raw)) { + const voName = contractField.type.name; + const voDef = (this.#contract as { valueObjects?: Record }) + .valueObjects?.[voName]; + if (voDef) { + return this.#wrapValueObject(raw as Record, voDef); + } + } + } + return value; } @@ -697,6 +713,19 @@ class MongoCollectionImpl< if (currentField?.type.kind === 'scalar' && value instanceof MongoParamRef) { return new MongoParamRef(value.value, { codecId: currentField.type.codecId }); } + + if (currentField?.type.kind === 'valueObject' && value instanceof MongoParamRef) { + const raw = value.value; + if (typeof raw === 'object' && raw !== null && !Array.isArray(raw)) { + const voName = currentField.type.name; + const voDef = (this.#contract as { valueObjects?: Record }) + .valueObjects?.[voName]; + if (voDef) { + return this.#wrapValueObject(raw as Record, voDef); + } + } + } + return value; } diff --git a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts index 4da4bbb85..a6351e7c0 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/collection.test.ts @@ -716,6 +716,33 @@ describe('MongoCollection write methods', () => { expect(update['$set']!['homeAddress.city']!.codecId).toBe('mongo/string@1'); } }); + + it('normalizes empty callback to { $set: {} }', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col.where(MongoFieldFilter.eq('_id', 'id-1')).update(() => []); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + expect(command.update).toEqual({ $set: {} }); + } + }); + + it('wraps value-object payload through codec in set()', async () => { + const executor = createMockExecutor([{ _id: 'id-1' }]); + const col = createMongoCollection(contract, 'User', executor); + await col + .where(MongoFieldFilter.eq('_id', 'id-1')) + .update((u) => [u.homeAddress.set({ city: 'NYC', country: 'US' })]); + const command = executor.lastCommand!; + if (command.kind === 'findOneAndUpdate') { + const update = command.update as Record>; + const setDoc = update['$set']!['homeAddress'] as Record; + expect(setDoc['city']).toBeInstanceOf(MongoParamRef); + expect(setDoc['city']!.codecId).toBe('mongo/string@1'); + expect(setDoc['country']).toBeInstanceOf(MongoParamRef); + expect(setDoc['country']!.codecId).toBe('mongo/string@1'); + } + }); }); describe('updateAll() with callback', () => { From 37ce2e8bbe0cf3f2bdd4fb362ab38b015de8644b Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 07:37:26 +0300 Subject: [PATCH 23/30] replace mongoFilterBrand symbol with non-enumerable string property Symbol-based brands break under dual-package installs because each copy of the module creates its own unique symbol. A string-keyed, non-enumerable prototype property survives this scenario since string in-checks compare by value, not reference identity. --- .../4-query/query-ast/src/filter-expressions.ts | 11 +++++++++-- .../query-ast/test/filter-expressions.test.ts | 12 ++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts index d37833d74..37d6368f1 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts @@ -3,10 +3,10 @@ import type { MongoAggExpr } from './aggregation-expressions'; import { MongoAstNode } from './ast-node'; import type { MongoFilterRewriter, MongoFilterVisitor } from './visitors'; -export const mongoFilterBrand: unique symbol = Symbol('MongoFilterExpr'); +const FILTER_EXPR_BRAND = '__prismaNextMongoFilter__'; +export { FILTER_EXPR_BRAND as mongoFilterBrand }; abstract class MongoFilterExpression extends MongoAstNode { - readonly [mongoFilterBrand] = true as const; abstract accept(visitor: MongoFilterVisitor): R; abstract rewrite(rewriter: MongoFilterRewriter): MongoFilterExpr; @@ -15,6 +15,13 @@ abstract class MongoFilterExpression extends MongoAstNode { } } +Object.defineProperty(MongoFilterExpression.prototype, FILTER_EXPR_BRAND, { + value: true, + writable: false, + enumerable: false, + configurable: false, +}); + export class MongoFieldFilter extends MongoFilterExpression { readonly kind = 'field' as const; readonly field: string; diff --git a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts index 8e6363983..22a9fe278 100644 --- a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts +++ b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts @@ -283,6 +283,18 @@ describe('mongoFilterBrand', () => { const impersonator = { kind: 'field', field: 'x', op: '$eq', value: 1 }; expect(mongoFilterBrand in impersonator).toBe(false); }); + + it('brand property is non-enumerable', () => { + const field = MongoFieldFilter.eq('x', 1); + expect(Object.keys(field)).not.toContain(String(mongoFilterBrand)); + expect(JSON.parse(JSON.stringify(field))).not.toHaveProperty(String(mongoFilterBrand)); + }); + + it('survives dual-package scenario (string-based lookup)', () => { + const field = MongoFieldFilter.eq('x', 1); + const brandKey = '__prismaNextMongoFilter__'; + expect(brandKey in field).toBe(true); + }); }); describe('composite nesting', () => { From 8d66ccacc4d43779d785208152b66be18b605ecc Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 07:38:58 +0300 Subject: [PATCH 24/30] Use named schema input type --- examples/retail-store/src/data/carts.ts | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/examples/retail-store/src/data/carts.ts b/examples/retail-store/src/data/carts.ts index 4cdc5ac92..289345a59 100644 --- a/examples/retail-store/src/data/carts.ts +++ b/examples/retail-store/src/data/carts.ts @@ -1,3 +1,4 @@ +import type { CartItemInput } from '../contract'; import type { Db } from '../db'; export function getCartByUserId(db: Db, userId: string) { @@ -30,18 +31,7 @@ export function clearCart(db: Db, userId: string) { return db.orm.carts.where({ userId }).update({ items: [] }); } -export function addToCart( - db: Db, - userId: string, - item: { - productId: string; - name: string; - brand: string; - amount: number; - price: { amount: number; currency: string }; - image: { url: string }; - }, -) { +export function addToCart(db: Db, userId: string, item: CartItemInput) { return db.orm.carts.where({ userId }).upsert({ create: { userId, items: [item] }, update: (u) => [u.items.push(item)], From 638ea912bf57234dce4f2f8fc5bed02446725767 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 07:45:36 +0300 Subject: [PATCH 25/30] fix(mongo-orm): resolve value-object field types in FieldAccessor via type maps ResolveFieldType only handled scalar fields, causing value-object fields (e.g. homeAddress) to fall through to unknown. This silently accepted any value in set()/push()/pull() for those fields. Now uses ResolvedModelRow (ExtractMongoFieldOutputTypes with InferModelRow fallback), matching the same pattern as ResolvedOutputRow in types.ts. Value-object fields resolve to their concrete emitted types, restoring type safety. --- .../orm/src/field-accessor.ts | 33 +++++++++---------- .../orm/test/orm-types.test-d.ts | 8 +++++ 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts index 235943c17..f7b9bd3c7 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/field-accessor.ts @@ -1,6 +1,8 @@ import type { ContractField, ContractValueObject } from '@prisma-next/contract/types'; import type { ExtractMongoCodecTypes, + ExtractMongoFieldOutputTypes, + InferModelRow, MongoContract, MongoContractWithTypeMaps, MongoTypeMaps, @@ -52,27 +54,24 @@ type ValueObjectFieldKeys< : never; }[keyof TContract['models'][ModelName]['fields'] & string]; +type ResolvedModelRow< + TContract extends MongoContractWithTypeMaps, + ModelName extends string & keyof TContract['models'], +> = string extends keyof ExtractMongoFieldOutputTypes + ? InferModelRow + : ModelName extends keyof ExtractMongoFieldOutputTypes + ? { + -readonly [K in keyof ExtractMongoFieldOutputTypes[ModelName]]: ExtractMongoFieldOutputTypes[ModelName][K]; + } + : InferModelRow; + type ResolveFieldType< TContract extends MongoContractWithTypeMaps, ModelName extends string & keyof TContract['models'], K extends keyof TContract['models'][ModelName]['fields'] & string, - TCodecTypes extends Record = ExtractMongoCodecTypes, -> = TContract['models'][ModelName]['fields'][K] extends { - readonly type: { - readonly kind: 'scalar'; - readonly codecId: infer CId extends string & keyof TCodecTypes; - }; - readonly many: true; -} - ? TCodecTypes[CId]['output'][] - : TContract['models'][ModelName]['fields'][K] extends { - readonly type: { - readonly kind: 'scalar'; - readonly codecId: infer CId extends string & keyof TCodecTypes; - }; - } - ? TCodecTypes[CId]['output'] - : unknown; +> = K extends keyof ResolvedModelRow + ? ResolvedModelRow[K] + : unknown; type NumericOps = { inc(value: number): FieldOperation; diff --git a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts index 031200e1e..70f99cc27 100644 --- a/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts +++ b/packages/2-mongo-family/5-query-builders/orm/test/orm-types.test-d.ts @@ -386,6 +386,14 @@ test('FieldAccessor has FieldExpression for array fields', () => { expectTypeOf().toExtend>(); }); +test('FieldAccessor resolves value-object field to concrete type, not unknown', () => { + type Accessor = FieldAccessor; + type HomeAddressExpr = Accessor['homeAddress']; + + // @ts-expect-error set() rejects a number when field type is value-object + void ({} as HomeAddressExpr).set(42); +}); + test('DotPath resolves value object dot-paths', () => { type Paths = DotPath; expectTypeOf<'homeAddress.city'>().toExtend(); From 74d13e9c2c9c3594794b9dbf967a52e9583e1978 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 07:49:06 +0300 Subject: [PATCH 26/30] refactor(retail-store): replace hand-written types with emitted contract types Use CartItemInput, OrderLineItemInput, StatusEntryInput, InvoiceLineItemInput, FieldInputTypes from contract.d.ts instead of inline object type literals. This ensures the data layer stays in sync with schema changes automatically and demonstrates the value of contract-emitted types. --- examples/retail-store/src/data/carts.ts | 13 +------ examples/retail-store/src/data/events.ts | 36 ++++--------------- examples/retail-store/src/data/invoices.ts | 8 ++--- examples/retail-store/src/data/orders.ts | 18 +++------- examples/retail-store/src/data/users.ts | 3 +- examples/retail-store/src/seed.ts | 1 + .../retail-store/test/aggregation.test.ts | 1 + .../retail-store/test/crud-lifecycle.test.ts | 1 + 8 files changed, 19 insertions(+), 62 deletions(-) diff --git a/examples/retail-store/src/data/carts.ts b/examples/retail-store/src/data/carts.ts index 289345a59..1e03953a0 100644 --- a/examples/retail-store/src/data/carts.ts +++ b/examples/retail-store/src/data/carts.ts @@ -9,18 +9,7 @@ export function getCartWithUser(db: Db, userId: string) { return db.orm.carts.include('user').where({ userId }).first(); } -export function upsertCart( - db: Db, - userId: string, - items: ReadonlyArray<{ - productId: string; - name: string; - brand: string; - amount: number; - price: { amount: number; currency: string }; - image: { url: string }; - }>, -) { +export function upsertCart(db: Db, userId: string, items: ReadonlyArray) { return db.orm.carts.where({ userId }).upsert({ create: { userId, items: [...items] }, update: { items: [...items] }, diff --git a/examples/retail-store/src/data/events.ts b/examples/retail-store/src/data/events.ts index ea1d81908..333764ec3 100644 --- a/examples/retail-store/src/data/events.ts +++ b/examples/retail-store/src/data/events.ts @@ -1,19 +1,14 @@ import { acc } from '@prisma-next/mongo-pipeline-builder'; import { MongoFieldFilter } from '@prisma-next/mongo-query-ast/execution'; +import type { FieldInputTypes } from '../contract'; import type { Db } from '../db'; import { collectResults } from './execute-raw'; +type EventBase = Omit; + export function createViewProductEvent( db: Db, - event: { - userId: string; - sessionId: string; - timestamp: Date; - productId: string; - subCategory: string; - brand: string; - exitMethod?: string | null; - }, + event: EventBase & FieldInputTypes['ViewProductEvent'], ) { return db.orm.events.variant('ViewProductEvent').create({ userId: event.userId, @@ -22,19 +17,11 @@ export function createViewProductEvent( productId: event.productId, subCategory: event.subCategory, brand: event.brand, - exitMethod: event.exitMethod ?? null, + exitMethod: event.exitMethod, }); } -export function createSearchEvent( - db: Db, - event: { - userId: string; - sessionId: string; - timestamp: Date; - query: string; - }, -) { +export function createSearchEvent(db: Db, event: EventBase & FieldInputTypes['SearchEvent']) { return db.orm.events.variant('SearchEvent').create({ userId: event.userId, sessionId: event.sessionId, @@ -43,16 +30,7 @@ export function createSearchEvent( }); } -export function createAddToCartEvent( - db: Db, - event: { - userId: string; - sessionId: string; - timestamp: Date; - productId: string; - brand: string; - }, -) { +export function createAddToCartEvent(db: Db, event: EventBase & FieldInputTypes['AddToCartEvent']) { return db.orm.events.variant('AddToCartEvent').create({ userId: event.userId, sessionId: event.sessionId, diff --git a/examples/retail-store/src/data/invoices.ts b/examples/retail-store/src/data/invoices.ts index 64e0533c4..8bf6a9a04 100644 --- a/examples/retail-store/src/data/invoices.ts +++ b/examples/retail-store/src/data/invoices.ts @@ -1,3 +1,4 @@ +import type { InvoiceLineItemInput } from '../contract'; import type { Db } from '../db'; export function findInvoiceById(db: Db, id: string) { @@ -12,12 +13,7 @@ export function createInvoice( db: Db, invoice: { orderId: string; - items: ReadonlyArray<{ - name: string; - amount: number; - unitPrice: number; - lineTotal: number; - }>; + items: ReadonlyArray; subtotal: number; tax: number; total: number; diff --git a/examples/retail-store/src/data/orders.ts b/examples/retail-store/src/data/orders.ts index f0b00bb7a..b294e11b2 100644 --- a/examples/retail-store/src/data/orders.ts +++ b/examples/retail-store/src/data/orders.ts @@ -1,3 +1,4 @@ +import type { OrderLineItemInput, StatusEntryInput } from '../contract'; import type { Db } from '../db'; export function getUserOrders(db: Db, userId: string) { @@ -16,17 +17,10 @@ export function createOrder( db: Db, order: { userId: string; - items: ReadonlyArray<{ - productId: string; - name: string; - brand: string; - amount: number; - price: { amount: number; currency: string }; - image: { url: string }; - }>; + items: ReadonlyArray; shippingAddress: string; type: string; - statusHistory: ReadonlyArray<{ status: string; timestamp: Date }>; + statusHistory: ReadonlyArray; }, ) { return db.orm.orders.create({ @@ -42,10 +36,6 @@ export function deleteOrder(db: Db, id: string) { return db.orm.orders.where({ _id: id }).delete(); } -export function updateOrderStatus( - db: Db, - orderId: string, - entry: { status: string; timestamp: Date }, -) { +export function updateOrderStatus(db: Db, orderId: string, entry: StatusEntryInput) { return db.orm.orders.where({ _id: orderId }).update((u) => [u.statusHistory.push(entry)]); } diff --git a/examples/retail-store/src/data/users.ts b/examples/retail-store/src/data/users.ts index 2639a2fcf..58804beb9 100644 --- a/examples/retail-store/src/data/users.ts +++ b/examples/retail-store/src/data/users.ts @@ -1,3 +1,4 @@ +import type { FieldInputTypes } from '../contract'; import type { Db } from '../db'; export function findUsers(db: Db) { @@ -8,6 +9,6 @@ export function findUserById(db: Db, id: string) { return db.orm.users.where({ _id: id }).first(); } -export function createUser(db: Db, data: { name: string; email: string; address: null }) { +export function createUser(db: Db, data: Omit) { return db.orm.users.create(data); } diff --git a/examples/retail-store/src/seed.ts b/examples/retail-store/src/seed.ts index 5c3593d72..82fdfd2fd 100644 --- a/examples/retail-store/src/seed.ts +++ b/examples/retail-store/src/seed.ts @@ -366,6 +366,7 @@ export async function seed(db: Db) { productId: p0._id, subCategory: 'Topwear', brand: 'Heritage', + exitMethod: null, }); await createAddToCartEvent(db, { diff --git a/examples/retail-store/test/aggregation.test.ts b/examples/retail-store/test/aggregation.test.ts index 30c49ab57..975bde3ba 100644 --- a/examples/retail-store/test/aggregation.test.ts +++ b/examples/retail-store/test/aggregation.test.ts @@ -21,6 +21,7 @@ describe('aggregation pipelines', { timeout: timeouts.spinUpMongoMemoryServer }, productId: `prod-${i}`, subCategory: 'Topwear', brand: 'TestBrand', + exitMethod: null, }); } for (let i = 0; i < 2; i++) { diff --git a/examples/retail-store/test/crud-lifecycle.test.ts b/examples/retail-store/test/crud-lifecycle.test.ts index aa7e39445..0cd3bf0f0 100644 --- a/examples/retail-store/test/crud-lifecycle.test.ts +++ b/examples/retail-store/test/crud-lifecycle.test.ts @@ -243,6 +243,7 @@ describe('CRUD lifecycle', { timeout: timeouts.spinUpMongoMemoryServer }, () => productId: 'prod-1', subCategory: 'Topwear', brand: 'TestBrand', + exitMethod: null, }); const events = await findEventsByUser(ctx.db, 'user-1'); From 1f1e66f0bd822392be0476e25556a462f70b743d Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 08:09:15 +0300 Subject: [PATCH 27/30] refactor(mongo-query-ast): co-locate filter brand predicate with brand definition Replace exported mongoFilterBrand constant with isMongoFilterExpr() predicate defined alongside the brand. The brand is now an implementation detail; consumers use the predicate instead of performing raw in-checks against an exported constant. --- .../query-ast/src/exports/execution.ts | 2 +- .../query-ast/src/filter-expressions.ts | 5 +++- .../query-ast/test/filter-expressions.test.ts | 23 ++++++++++++------- .../5-query-builders/orm/src/collection.ts | 4 ++-- 4 files changed, 22 insertions(+), 12 deletions(-) diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts index 18291c289..60f6ad241 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts @@ -28,13 +28,13 @@ export { } from '../commands'; export type { MongoFilterExpr } from '../filter-expressions'; export { + isMongoFilterExpr, MongoAndExpr, MongoExistsExpr, MongoExprFilter, MongoFieldFilter, MongoNotExpr, MongoOrExpr, - mongoFilterBrand, } from '../filter-expressions'; export type { MongoQueryPlan } from '../query-plan'; export type { RawMongoCommand } from '../raw-commands'; diff --git a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts index 37d6368f1..d8428b569 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/filter-expressions.ts @@ -4,7 +4,10 @@ import { MongoAstNode } from './ast-node'; import type { MongoFilterRewriter, MongoFilterVisitor } from './visitors'; const FILTER_EXPR_BRAND = '__prismaNextMongoFilter__'; -export { FILTER_EXPR_BRAND as mongoFilterBrand }; + +export function isMongoFilterExpr(value: unknown): value is MongoFilterExpr { + return typeof value === 'object' && value !== null && FILTER_EXPR_BRAND in value; +} abstract class MongoFilterExpression extends MongoAstNode { abstract accept(visitor: MongoFilterVisitor): R; diff --git a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts index 22a9fe278..d4030fdd8 100644 --- a/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts +++ b/packages/2-mongo-family/4-query/query-ast/test/filter-expressions.test.ts @@ -2,13 +2,13 @@ import { describe, expect, it } from 'vitest'; import { MongoAggFieldRef, MongoAggOperator } from '../src/aggregation-expressions'; import type { MongoFilterExpr } from '../src/filter-expressions'; import { + isMongoFilterExpr, MongoAndExpr, MongoExistsExpr, MongoExprFilter, MongoFieldFilter, MongoNotExpr, MongoOrExpr, - mongoFilterBrand, } from '../src/filter-expressions'; import type { MongoFilterRewriter, MongoFilterVisitor } from '../src/visitors'; @@ -265,8 +265,8 @@ describe('MongoFilterRewriter', () => { }); }); -describe('mongoFilterBrand', () => { - it('is present on all filter expression types', () => { +describe('isMongoFilterExpr', () => { + it('returns true for all filter expression types', () => { const field = MongoFieldFilter.eq('x', 1); const and = MongoAndExpr.of([field]); const or = MongoOrExpr.of([field]); @@ -275,19 +275,26 @@ describe('mongoFilterBrand', () => { const expr = MongoExprFilter.of(MongoAggFieldRef.of('x')); for (const node of [field, and, or, not, exists, expr]) { - expect(mongoFilterBrand in node).toBe(true); + expect(isMongoFilterExpr(node)).toBe(true); } }); - it('is absent on plain objects with a kind property', () => { + it('returns false for plain objects with a kind property', () => { const impersonator = { kind: 'field', field: 'x', op: '$eq', value: 1 }; - expect(mongoFilterBrand in impersonator).toBe(false); + expect(isMongoFilterExpr(impersonator)).toBe(false); + }); + + it('returns false for null and primitives', () => { + expect(isMongoFilterExpr(null)).toBe(false); + expect(isMongoFilterExpr(undefined)).toBe(false); + expect(isMongoFilterExpr(42)).toBe(false); }); it('brand property is non-enumerable', () => { const field = MongoFieldFilter.eq('x', 1); - expect(Object.keys(field)).not.toContain(String(mongoFilterBrand)); - expect(JSON.parse(JSON.stringify(field))).not.toHaveProperty(String(mongoFilterBrand)); + const brandKey = '__prismaNextMongoFilter__'; + expect(Object.keys(field)).not.toContain(brandKey); + expect(JSON.parse(JSON.stringify(field))).not.toHaveProperty(brandKey); }); it('survives dual-package scenario (string-based lookup)', () => { diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index c988bebb5..a170e5869 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -21,9 +21,9 @@ import { FindOneAndUpdateCommand, InsertManyCommand, InsertOneCommand, + isMongoFilterExpr, MongoAndExpr, MongoFieldFilter, - mongoFilterBrand, UpdateManyCommand, } from '@prisma-next/mongo-query-ast/execution'; import type { MongoValue } from '@prisma-next/mongo-value'; @@ -552,7 +552,7 @@ class MongoCollectionImpl< } #isFilterExpr(filter: unknown): filter is MongoFilterExpr { - return typeof filter === 'object' && filter !== null && mongoFilterBrand in filter; + return isMongoFilterExpr(filter); } #compileWhereObject(data: Record): MongoFilterExpr[] { From daca03c0e002d433439c06a21f4552fac5b61411 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 08:11:14 +0300 Subject: [PATCH 28/30] refactor(mongo-query-ast): move isMongoFilterExpr to internal entry point The filter brand predicate is an implementation detail, not a public API. Move it from the execution barrel to a dedicated ./internal entry point so only internal consumers (mongo-orm) can access it. --- packages/2-mongo-family/4-query/query-ast/package.json | 4 ++++ .../2-mongo-family/4-query/query-ast/src/exports/execution.ts | 1 - .../2-mongo-family/4-query/query-ast/src/exports/internal.ts | 1 + packages/2-mongo-family/4-query/query-ast/tsdown.config.ts | 1 + .../2-mongo-family/5-query-builders/orm/src/collection.ts | 2 +- 5 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts diff --git a/packages/2-mongo-family/4-query/query-ast/package.json b/packages/2-mongo-family/4-query/query-ast/package.json index b95f83581..db70e2605 100644 --- a/packages/2-mongo-family/4-query/query-ast/package.json +++ b/packages/2-mongo-family/4-query/query-ast/package.json @@ -41,6 +41,10 @@ "types": "./dist/exports/control.d.mts", "import": "./dist/exports/control.mjs" }, + "./internal": { + "types": "./dist/exports/internal.d.mts", + "import": "./dist/exports/internal.mjs" + }, "./package.json": "./package.json" }, "repository": { diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts index 60f6ad241..6105d8277 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts @@ -28,7 +28,6 @@ export { } from '../commands'; export type { MongoFilterExpr } from '../filter-expressions'; export { - isMongoFilterExpr, MongoAndExpr, MongoExistsExpr, MongoExprFilter, diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts new file mode 100644 index 000000000..1d9192a72 --- /dev/null +++ b/packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts @@ -0,0 +1 @@ +export { isMongoFilterExpr } from '../filter-expressions'; diff --git a/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts b/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts index 1e5f7990c..32a425a03 100644 --- a/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts +++ b/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts @@ -4,6 +4,7 @@ export default defineConfig({ entry: { 'exports/execution': 'src/exports/execution.ts', 'exports/control': 'src/exports/control.ts', + 'exports/internal': 'src/exports/internal.ts', }, exports: { enabled: false }, }); diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index a170e5869..03fefb95e 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -21,11 +21,11 @@ import { FindOneAndUpdateCommand, InsertManyCommand, InsertOneCommand, - isMongoFilterExpr, MongoAndExpr, MongoFieldFilter, UpdateManyCommand, } from '@prisma-next/mongo-query-ast/execution'; +import { isMongoFilterExpr } from '@prisma-next/mongo-query-ast/internal'; import type { MongoValue } from '@prisma-next/mongo-value'; import { MongoParamRef } from '@prisma-next/mongo-value'; import { AsyncIterableResult } from '@prisma-next/runtime-executor'; From 69c910024ed724679e60d3efbff6838e32b0eb34 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 08:13:55 +0300 Subject: [PATCH 29/30] fix(mongo-query-ast): export isMongoFilterExpr from execution barrel MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The predicate belongs alongside the types it discriminates. Remove the bogus ./internal entry point — it was not a real access control boundary and had no precedent in this repo. --- packages/2-mongo-family/4-query/query-ast/package.json | 4 ---- .../2-mongo-family/4-query/query-ast/src/exports/execution.ts | 1 + .../2-mongo-family/4-query/query-ast/src/exports/internal.ts | 1 - packages/2-mongo-family/4-query/query-ast/tsdown.config.ts | 1 - .../2-mongo-family/5-query-builders/orm/src/collection.ts | 2 +- 5 files changed, 2 insertions(+), 7 deletions(-) delete mode 100644 packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts diff --git a/packages/2-mongo-family/4-query/query-ast/package.json b/packages/2-mongo-family/4-query/query-ast/package.json index db70e2605..b95f83581 100644 --- a/packages/2-mongo-family/4-query/query-ast/package.json +++ b/packages/2-mongo-family/4-query/query-ast/package.json @@ -41,10 +41,6 @@ "types": "./dist/exports/control.d.mts", "import": "./dist/exports/control.mjs" }, - "./internal": { - "types": "./dist/exports/internal.d.mts", - "import": "./dist/exports/internal.mjs" - }, "./package.json": "./package.json" }, "repository": { diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts index 6105d8277..60f6ad241 100644 --- a/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts +++ b/packages/2-mongo-family/4-query/query-ast/src/exports/execution.ts @@ -28,6 +28,7 @@ export { } from '../commands'; export type { MongoFilterExpr } from '../filter-expressions'; export { + isMongoFilterExpr, MongoAndExpr, MongoExistsExpr, MongoExprFilter, diff --git a/packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts b/packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts deleted file mode 100644 index 1d9192a72..000000000 --- a/packages/2-mongo-family/4-query/query-ast/src/exports/internal.ts +++ /dev/null @@ -1 +0,0 @@ -export { isMongoFilterExpr } from '../filter-expressions'; diff --git a/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts b/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts index 32a425a03..1e5f7990c 100644 --- a/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts +++ b/packages/2-mongo-family/4-query/query-ast/tsdown.config.ts @@ -4,7 +4,6 @@ export default defineConfig({ entry: { 'exports/execution': 'src/exports/execution.ts', 'exports/control': 'src/exports/control.ts', - 'exports/internal': 'src/exports/internal.ts', }, exports: { enabled: false }, }); diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index 03fefb95e..a170e5869 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -21,11 +21,11 @@ import { FindOneAndUpdateCommand, InsertManyCommand, InsertOneCommand, + isMongoFilterExpr, MongoAndExpr, MongoFieldFilter, UpdateManyCommand, } from '@prisma-next/mongo-query-ast/execution'; -import { isMongoFilterExpr } from '@prisma-next/mongo-query-ast/internal'; import type { MongoValue } from '@prisma-next/mongo-value'; import { MongoParamRef } from '@prisma-next/mongo-value'; import { AsyncIterableResult } from '@prisma-next/runtime-executor'; From 8190ed2bd20862d4c52b3a22708c8a7da3b39c03 Mon Sep 17 00:00:00 2001 From: Will Madden Date: Wed, 15 Apr 2026 08:15:56 +0300 Subject: [PATCH 30/30] refactor(mongo-orm): inline isMongoFilterExpr, remove redundant wrapper --- .../2-mongo-family/5-query-builders/orm/src/collection.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts index a170e5869..ba1e08a9e 100644 --- a/packages/2-mongo-family/5-query-builders/orm/src/collection.ts +++ b/packages/2-mongo-family/5-query-builders/orm/src/collection.ts @@ -203,7 +203,7 @@ class MongoCollectionImpl< where( filter: MongoWhereFilter | MongoFilterExpr, ): MongoCollection { - if (this.#isFilterExpr(filter)) { + if (isMongoFilterExpr(filter)) { return this.#clone({ filters: [...this.#state.filters, filter] }); } const compiled = this.#compileWhereObject(filter as Record); @@ -551,10 +551,6 @@ class MongoCollectionImpl< return model?.fields ?? {}; } - #isFilterExpr(filter: unknown): filter is MongoFilterExpr { - return isMongoFilterExpr(filter); - } - #compileWhereObject(data: Record): MongoFilterExpr[] { const fields = this.#modelFields(); const filters: MongoFilterExpr[] = [];