diff --git a/.changeset/lovely-socks-argue.md b/.changeset/lovely-socks-argue.md new file mode 100644 index 00000000..12e650fe --- /dev/null +++ b/.changeset/lovely-socks-argue.md @@ -0,0 +1,5 @@ +--- +'@cleverbrush/schema': minor +--- + +fix(schema): make ~standard.validate return Promise to support async validation per Standard Schema v1 spec diff --git a/.changeset/quiet-pans-create.md b/.changeset/quiet-pans-create.md new file mode 100644 index 00000000..b5ae938d --- /dev/null +++ b/.changeset/quiet-pans-create.md @@ -0,0 +1,29 @@ +--- +'@cleverbrush/client': minor +'@cleverbrush/server': minor +--- + +feat(client): optimistic update + offline support + tag-based cache invalidation + +- Add `optimisticUpdate()` middleware — tags mutations with IDs and tracks network failures +- Add `offlineQueue()` middleware — queues mutations when offline, replays on reconnect +- Add `useOptimisticMutation()` React hook — automatic TanStack Query cache snapshot/rollback +- Add `OfflineError` class extending `NetworkError` +- Extend `PerCallOverrides` with `optimisticUpdate` and `offlineQueue` keys + +feat(server, client): tag-based cache invalidation via `.clearsCacheTag()` endpoint annotations + +- Add `.clearsCacheTag(name[, selector])` to `EndpointBuilder` — declare cache tags with optional property selectors +- Add `CacheTagDefinition`, `CacheTagPropertyAccessor`, `createCacheTagTree`, `serializeTag`, `computeCacheKey` to `@cleverbrush/server` +- Add `cacheTags()` middleware to `@cleverbrush/client/cache` — tag-keyed HTTP caching with automatic invalidation on mutations +- Add `CacheTagMiddlewareOptions` with `ttlByTag`, `defaultTtl`, `condition` +- Add `cacheTags` and `headers` fields to `EndpointMeta` for middleware introspection +- Add implicit TanStack Query invalidation in `useMutation` when endpoint declares cache tags +- Add `CacheTagSelector` type for IDE autocomplete in `.clearsCacheTag()` selector callbacks + +feat(server, client): request idempotency middleware + +- Add `idempotency()` server middleware — stores responses keyed by `X-Idempotency-Key` header, replays stored response on duplicate keys +- Add `idempotency()` client middleware — auto-generates UUID v4 as `X-Idempotency-Key` header for mutating requests, preserves key across retries +- Export `IdempotencyOptions` (client) and `ServerIdempotencyOptions` (server) +- Add `cacheResponse()` server middleware — tag-based server-side response caching with handler-level invalidation diff --git a/.changeset/separate-knex-schema-operations.md b/.changeset/separate-knex-schema-operations.md new file mode 100644 index 00000000..27119841 --- /dev/null +++ b/.changeset/separate-knex-schema-operations.md @@ -0,0 +1,5 @@ +--- +"@cleverbrush/knex-schema": patch +--- + +Extract query operations into modular files (helpers, select, where, insert, update, delete, join, pagination, state) from monolithic SchemaQueryBuilder. No public API changes. diff --git a/.changeset/tiny-dragons-shout.md b/.changeset/tiny-dragons-shout.md new file mode 100644 index 00000000..c09e7e3c --- /dev/null +++ b/.changeset/tiny-dragons-shout.md @@ -0,0 +1,11 @@ +--- +'@cleverbrush/schema': minor +'@cleverbrush/schema-json': minor +--- + +Add `intersection()` schema builder for combining two schemas (both must pass) + +- New `IntersectionSchemaBuilder` class with `intersection(left, right)` factory +- Validates both schemas against the input and merges outputs +- Maps to `allOf` in JSON Schema (to/from bidirectional) +- Supports all standard modifiers: `.optional()`, `.nullable()`, `.default()`, `.catch()`, `.brand()`, `.readonly()`, `.addValidator()`, `.addPreprocessor()`, etc. diff --git a/.changeset/tiny-socks-march.md b/.changeset/tiny-socks-march.md new file mode 100644 index 00000000..189370cd --- /dev/null +++ b/.changeset/tiny-socks-march.md @@ -0,0 +1,24 @@ +--- +"@cleverbrush/server": minor +"@cleverbrush/server-openapi": minor +--- + +feat(server): add file upload support via `.upload()` and `FilePart` type + +Adds `multipart/form-data` parsing with `@fastify/busboy`, a new `.upload()` +method on `EndpointBuilder`, and the `FilePart` type for handling uploaded +files in endpoint handlers. The OpenAPI generator emits `multipart/form-data` +request bodies for upload-enabled endpoints. + +```ts +const ep = endpoint + .post("/api/avatar") + .upload({ maxFileSize: 2 * 1024 * 1024 }) + .body(object({ description: string().optional() })); + +server.handle(ep, ({ files }) => { + const avatar = files["avatar"]; + // { filename, mimeType, buffer, size } +}); +``` + diff --git a/demos/todo-backend/Dockerfile b/demos/todo-backend/Dockerfile index 949cba78..3f11b1bd 100644 --- a/demos/todo-backend/Dockerfile +++ b/demos/todo-backend/Dockerfile @@ -5,7 +5,7 @@ FROM node:22-alpine AS builder WORKDIR /app # Copy root workspace manifest for npm workspaces resolution -COPY package.json package-lock.json* turbo.json tsconfig.build.json ./ +COPY package.json package-lock.json* turbo.json tsconfig.json tsconfig.build.json ./ # Copy all workspace package.json files for dependency resolution COPY libs/async/package.json ./libs/async/ @@ -26,6 +26,8 @@ COPY libs/server-openapi/package.json ./libs/server-openapi/ COPY libs/otel/package.json ./libs/otel/ COPY libs/client/package.json ./libs/client/ COPY libs/benchmarks/package.json ./libs/benchmarks/ +COPY libs/orm/package.json ./libs/orm/ +COPY libs/orm-cli/package.json ./libs/orm-cli/ COPY demos/todo-backend/package.json ./demos/todo-backend/ # Install all workspace dependencies diff --git a/demos/todo-backend/migrations/20260502000001_add_todo_attachment.ts b/demos/todo-backend/migrations/20260502000001_add_todo_attachment.ts new file mode 100644 index 00000000..82e36bf3 --- /dev/null +++ b/demos/todo-backend/migrations/20260502000001_add_todo_attachment.ts @@ -0,0 +1,17 @@ +import type { Knex } from 'knex'; + +export async function up(knex: Knex): Promise { + await knex.schema.table('todos', (table) => { + table.binary('attachment_data').nullable(); + table.string('attachment_name', 1024).nullable(); + table.string('attachment_mime_type', 255).nullable(); + }); +} + +export async function down(knex: Knex): Promise { + await knex.schema.table('todos', (table) => { + table.dropColumn('attachment_data'); + table.dropColumn('attachment_name'); + table.dropColumn('attachment_mime_type'); + }); +} diff --git a/demos/todo-backend/src/api/contract.ts b/demos/todo-backend/src/api/contract.ts index bd5cc6f8..10fb5e9f 100644 --- a/demos/todo-backend/src/api/contract.ts +++ b/demos/todo-backend/src/api/contract.ts @@ -82,9 +82,13 @@ export const api = defineApi({ list: todosResource .get() .query(TodoListQuerySchema) + .cacheTag('todo-list', p => ({ + page: p.query.page, + limit: p.query.limit + })) .responses({ 200: array(TodoResponseSchema) }), - get: todosResource.get(ById).responses({ + get: todosResource.get(ById).cacheTag('todo', p => ({ id: p.params.id })).responses({ 200: TodoResponseSchema, 403: ErrorResponseSchema, 404: ErrorResponseSchema @@ -96,6 +100,7 @@ export const api = defineApi({ id: number().coerce() })`/${t => t.id}/with-author` ) + .cacheTag('todo-author', p => ({ id: p.params.id })) .responses({ 200: TodoWithAuthorResponseSchema, 403: ErrorResponseSchema, @@ -105,15 +110,20 @@ export const api = defineApi({ create: todosResource .post() .body(CreateTodoBodySchema) + .clearsCacheTag('todo-list') .responses({ 201: TodoResponseSchema }), - update: todosResource.patch(ById).body(UpdateTodoBodySchema).responses({ + update: todosResource.patch(ById).body(UpdateTodoBodySchema) + .clearsCacheTag('todo-list') + .clearsCacheTag('todo', p => ({ id: p.params.id })).responses({ 200: TodoResponseSchema, 403: ErrorResponseSchema, 404: ErrorResponseSchema }), - delete: todosResource.delete(ById).responses({ + delete: todosResource.delete(ById) + .clearsCacheTag('todo-list') + .clearsCacheTag('todo', p => ({ id: p.params.id })).responses({ 204: null, 403: ErrorResponseSchema, 404: ErrorResponseSchema @@ -153,6 +163,8 @@ export const api = defineApi({ complete: todosResource .post(route({ id: number().coerce() })`/${t => t.id}/complete`) .headers(CompletionRequestHeadersSchema) + .clearsCacheTag('todo-list') + .clearsCacheTag('todo', p => ({ id: p.params.id })) .responses({ 200: TodoResponseSchema, 409: ErrorResponseSchema, @@ -164,10 +176,25 @@ export const api = defineApi({ route({ id: number().coerce() })`/${t => t.id}/attachment` ), + uploadAttachment: todosResource + .post( + route({ id: number().coerce() })`/${t => t.id}/attachment` + ) + .upload({ + maxFileSize: 10 * 1024 * 1024, + allowedMimeTypes: [ + 'image/*', + 'application/pdf', + 'text/plain' + ] + }) + .body(object({ description: string().optional() })), + listActivity: todosResource .get( route({ id: number().coerce() })`/${t => t.id}/activity` ) + .cacheTag('todo-activity', p => ({ id: p.params.id })) .responses({ 200: array(TodoActivityResponseSchema), 403: ErrorResponseSchema, @@ -179,15 +206,24 @@ export const api = defineApi({ list: usersResource .get() .query(PaginationQuerySchema) + .cacheTag('user-list', p => ({ + page: p.query.page, + limit: p.query.limit + })) .responses({ 200: array(UserResponseSchema) }), - delete: usersResource.delete(ById).responses({ + delete: usersResource.delete(ById) + .clearsCacheTag('user-list') + .clearsCacheTag('user-profile') + .responses({ 204: null, 400: ErrorResponseSchema, 404: ErrorResponseSchema }), - me: usersResource.get(route({})`/me`).returns(UserResponseSchema) + me: usersResource.get(route({})`/me`) + .cacheTag('user-profile') + .returns(UserResponseSchema) }, webhooks: { @@ -204,9 +240,14 @@ export const api = defineApi({ listAll: activityResource .get() .query(object({ limit: number().coerce().optional() })) + .cacheTag('activity-list', p => ({ + limit: p.query.limit + })) .responses({ 200: array(TodoActivityResponseSchema) }), - delete: activityResource.delete(ById).responses({ + delete: activityResource.delete(ById) + .clearsCacheTag('activity-list') + .responses({ 204: null, 404: ErrorResponseSchema }) diff --git a/demos/todo-backend/src/api/endpoints.ts b/demos/todo-backend/src/api/endpoints.ts index 550733f4..59be1988 100644 --- a/demos/todo-backend/src/api/endpoints.ts +++ b/demos/todo-backend/src/api/endpoints.ts @@ -1,11 +1,17 @@ -import { defineWebhook } from '@cleverbrush/server'; // eslint-disable-next-line @typescript-eslint/no-unused-vars import { POLYMORPHIC_TYPE_BRAND } from '@cleverbrush/orm'; -import { DbToken, KnexToken, LoggerToken, TrackedDbToken } from '../di/tokens.js'; +import { TodoResponseSchema } from './schemas.js'; +import { defineWebhook } from '@cleverbrush/server'; +import { + DbToken, + KnexToken, + LoggerToken, + TrackedDbToken +} from '../di/tokens.js'; import { api } from './contract.js'; import { type ImportTodosBody, - ImportTodosBodySchema, + type ImportTodosBodySchema, PrincipalSchema, TodoNotificationPayloadSchema, WebhookAckSchema @@ -179,16 +185,32 @@ export const ExportTodosEndpoint = api.todos.exportCsv export const DownloadAttachmentEndpoint = api.todos.downloadAttachment .authorize(PrincipalSchema) - .inject({ db: DbToken }) - .producesFile('text/plain', 'A plain-text summary of the todo.') + .inject({ knex: KnexToken }) .summary('Download todo attachment') .description( - 'Downloads a plain-text summary of the todo as a file attachment. ' + - 'Demonstrates `.producesFile()` and `ActionResult.file()`.' + 'Downloads the uploaded file attachment for a todo. ' + + 'Returns the original file with its original content type.' ) .tags('todos') .operationId('downloadTodoAttachment'); +// ── Upload attachment ───────────────────────────────────────────────────────── +// Features: .upload(), multipart/form-data, FilePart, file persistence in DB + +export const UploadAttachmentEndpoint = api.todos.uploadAttachment + .authorize(PrincipalSchema) + .inject({ db: DbToken, knex: KnexToken }) + .responses({ 201: TodoResponseSchema }) + .summary('Upload todo attachment') + .description( + 'Uploads a file attachment for a todo. ' + + 'Supports images, PDFs, and plain text files up to 10 MB. ' + + 'The file is stored in the database and can be downloaded via ' + + 'the download attachment endpoint.' + ) + .tags('todos') + .operationId('uploadTodoAttachment'); + // ── Import todos ────────────────────────────────────────────────────────────── // Features: .example(), .examples(), .headers(), ActionResult.json(), ActionResult.accepted() @@ -391,6 +413,7 @@ export const endpoints = { sendEvent: SendTodoEventEndpoint, exportCsv: ExportTodosEndpoint, downloadAttachment: DownloadAttachmentEndpoint, + uploadAttachment: UploadAttachmentEndpoint, importBulk: ImportTodosEndpoint, legacyReplace: LegacyReplaceTodoEndpoint, complete: CompleteTodoEndpoint, diff --git a/demos/todo-backend/src/api/handlers/index.ts b/demos/todo-backend/src/api/handlers/index.ts index d35c3cb2..3b157463 100644 --- a/demos/todo-backend/src/api/handlers/index.ts +++ b/demos/todo-backend/src/api/handlers/index.ts @@ -26,7 +26,8 @@ import { listTodoActivityHandler, listTodosHandler, sendTodoEventHandler, - updateTodoHandler + updateTodoHandler, + uploadAttachmentHandler } from './todos.js'; import { deleteUserHandler, @@ -51,6 +52,7 @@ export const handlers: HandlerMap = { sendEvent: sendTodoEventHandler, exportCsv: exportTodosHandler, downloadAttachment: downloadAttachmentHandler, + uploadAttachment: uploadAttachmentHandler, importBulk: importTodosHandler, legacyReplace: legacyReplaceTodoHandler, complete: completeTodoHandler, diff --git a/demos/todo-backend/src/api/handlers/todos.ts b/demos/todo-backend/src/api/handlers/todos.ts index ffaa13b6..0c60aea1 100644 --- a/demos/todo-backend/src/api/handlers/todos.ts +++ b/demos/todo-backend/src/api/handlers/todos.ts @@ -1,4 +1,5 @@ -import { ActionResult, type Handler } from '@cleverbrush/server'; +import { ActionResult, BadRequestError, ForbiddenError, type Handler, NotFoundError } from '@cleverbrush/server'; +import type { Knex } from 'knex'; import { withSpan } from '@cleverbrush/otel'; import { TodoCompleted, @@ -23,6 +24,7 @@ import type { ListAllActivityEndpoint, ListTodoActivityEndpoint, ListTodosEndpoint, + UploadAttachmentEndpoint, SendTodoEventEndpoint, UpdateTodoEndpoint } from '../endpoints.js'; @@ -367,38 +369,98 @@ export const exportTodosHandler: Handler = async ( export const downloadAttachmentHandler: Handler< typeof DownloadAttachmentEndpoint -> = async ({ params, principal }, { db }) => { +> = async ({ params, principal }, { knex }) => { + const row = await (knex as Knex)('todos') + .select( + 'attachment_data', + 'attachment_name', + 'attachment_mime_type', + 'user_id' + ) + .where('id', params.id) + .first(); + + if (!row) { + throw new NotFoundError(`Todo ${params.id} not found.`); + } + + if ( + principal.role !== 'admin' && + (row as Record).user_id !== principal.userId + ) { + throw new ForbiddenError('You do not have access to this todo.'); + } + + if (!(row as Record).attachment_data) { + throw new NotFoundError('No attachment for this todo.'); + } + + const r = row as { + attachment_data: Buffer; + attachment_name: string; + attachment_mime_type: string; + }; + + return ActionResult.file( + r.attachment_data, + r.attachment_name, + r.attachment_mime_type + ); +}; + +// ── Upload todo attachment ──────────────────────────────────────────────────── + +export const uploadAttachmentHandler: Handler< + typeof UploadAttachmentEndpoint +> = async ({ params, principal, files, rejectedFiles }, { db, knex }) => { const todo = await db.todos.find(params.id); if (!todo) { - return ActionResult.notFound({ - message: `Todo ${params.id} not found.` - }); + throw new NotFoundError(`Todo ${params.id} not found.`); } if (principal.role !== 'admin' && todo.userId !== principal.userId) { - return ActionResult.forbidden({ - message: 'You do not have access to this todo.' - }); + throw new ForbiddenError('You do not have access to this todo.'); } - const mapped = await mapTodo(todo); - const text = [ - `Todo #${mapped.id}`, - `Title: ${mapped.title}`, - mapped.description ? `Description: ${mapped.description}` : null, - `Completed: ${mapped.completed ? 'Yes' : 'No'}`, - `Created: ${mapped.createdAt.toISOString()}`, - `Updated: ${mapped.updatedAt.toISOString()}` - ] - .filter(Boolean) - .join('\n'); + const file = files['attachment']; + if (!file) { + let detail = 'No file uploaded. Use field name "attachment".'; + if (rejectedFiles && rejectedFiles.length > 0) { + const reasons = rejectedFiles + .map(r => `${r.filename}: ${r.reason}`) + .join('; '); + detail += ` Rejected: ${reasons}`; + } + throw new BadRequestError(detail); + } - return ActionResult.file( - Buffer.from(text, 'utf-8'), - `todo-${params.id}.txt`, - 'text/plain' - ); + // Persist file in DB — raw knex for bytea column + await (knex as Knex)('todos') + .where('id', params.id) + .update({ + attachment_data: file.buffer, + attachment_name: file.filename, + attachment_mime_type: file.mimeType, + updated_at: new Date() + }); + + // Re-fetch the updated todo for the response + const updated = await db.todos.find(params.id); + if (!updated) throw new NotFoundError(`Todo ${params.id} not found.`); + + return ActionResult.created({ + id: updated.id, + title: updated.title, + description: updated.description, + completed: updated.completed, + userId: updated.userId, + createdAt: updated.createdAt, + updatedAt: updated.updatedAt, + attachmentName: updated.attachmentName, + attachmentMimeType: updated.attachmentMimeType, + attachmentSize: file.size + }); }; // ── Bulk import todos ───────────────────────────────────────────────────────── diff --git a/demos/todo-backend/src/api/mappers.ts b/demos/todo-backend/src/api/mappers.ts index f4258e6c..eaba2470 100644 --- a/demos/todo-backend/src/api/mappers.ts +++ b/demos/todo-backend/src/api/mappers.ts @@ -20,7 +20,9 @@ const TodoRowSchema = object({ completed: boolean(), userId: number(), createdAt: date(), - updatedAt: date() + updatedAt: date(), + attachmentName: string().optional(), + attachmentMimeType: string().optional() }); export const mappingRegistry = mapper() @@ -28,7 +30,11 @@ export const mappingRegistry = mapper() m.for(t => t.authProvider).compute(f => f.authProvider) ) .configure(TodoRowSchema, TodoResponseSchema, m => - m.for(t => t.description).compute(f => f.description ?? undefined) + m + .for(t => t.description) + .compute(f => f.description ?? undefined) + .for(t => t.attachmentSize) + .ignore() ); const _mapUserFn = mappingRegistry.getMapper(UserRowSchema, UserResponseSchema); diff --git a/demos/todo-backend/src/api/schemas.ts b/demos/todo-backend/src/api/schemas.ts index e71643db..d5dd9480 100644 --- a/demos/todo-backend/src/api/schemas.ts +++ b/demos/todo-backend/src/api/schemas.ts @@ -104,7 +104,16 @@ export const TodoResponseSchema = object({ .describe('ISO 8601 timestamp of when the todo was created.'), updatedAt: date() .coerce() - .describe('ISO 8601 timestamp of the last update.') + .describe('ISO 8601 timestamp of the last update.'), + attachmentName: string() + .optional() + .describe('Original filename of the uploaded attachment.'), + attachmentMimeType: string() + .optional() + .describe('MIME type of the uploaded attachment.'), + attachmentSize: number() + .optional() + .describe('Size of the uploaded attachment in bytes.') }).schemaName('TodoResponse'); export type TodoResponse = InferType; diff --git a/demos/todo-backend/src/db/schemas.ts b/demos/todo-backend/src/db/schemas.ts index 50b00c57..277907e2 100644 --- a/demos/todo-backend/src/db/schemas.ts +++ b/demos/todo-backend/src/db/schemas.ts @@ -118,6 +118,12 @@ const TodoSchema = object({ .index('idx_todos_user_id'), createdAt: date().hasColumnName('created_at'), updatedAt: date().hasColumnName('updated_at'), + attachmentName: string() + .hasColumnName('attachment_name') + .optional(), + attachmentMimeType: string() + .hasColumnName('attachment_mime_type') + .optional(), // navigation properties consumed by `defineEntity()` author: UserDbSchema.optional(), activity: array(TodoActivityDbSchema).optional() @@ -133,7 +139,9 @@ const TodoSchema = object({ 'completed', 'userId', 'createdAt', - 'updatedAt' + 'updatedAt', + 'attachmentName', + 'attachmentMimeType' ) .projection('ownership', 'id', 'userId') .scope( @@ -205,4 +213,6 @@ export type TodoDb = { userId: number; createdAt: Date; updatedAt: Date; + attachmentName?: string; + attachmentMimeType?: string; }; diff --git a/demos/todo-backend/src/server.ts b/demos/todo-backend/src/server.ts index ea895cb7..f238900a 100644 --- a/demos/todo-backend/src/server.ts +++ b/demos/todo-backend/src/server.ts @@ -83,7 +83,9 @@ export function buildServer(config: Config, logger: Logger) { correlationResponseHeader: false }); - const server = createServer() + const server = createServer({ + maxBodySize: 15 * 1024 * 1024 // Matches nginx client_max_body_size + }) .use(tracingMiddleware({ excludePaths: ['/health'] })) .use(corsMiddleware) .use(correlationMiddleware) diff --git a/demos/todo-backend/tsup.config.ts b/demos/todo-backend/tsup.config.ts index f553d459..7bec1ccf 100644 --- a/demos/todo-backend/tsup.config.ts +++ b/demos/todo-backend/tsup.config.ts @@ -12,5 +12,7 @@ export default defineConfig({ // @opentelemetry/* packages are CJS and use require('async_hooks') + other // Node built-ins internally. Bundling them into ESM via tsup's shimmed // require breaks at runtime. Keep them external so Node loads them natively. - external: ['ws', /^@opentelemetry\//], + // @fastify/busboy is CJS and uses require('node:stream') internally. + // Bundling it into ESM via tsup's shimmed require breaks at runtime. + external: ['ws', /^@opentelemetry\//, '@fastify/busboy'], }); diff --git a/demos/todo-frontend/Dockerfile b/demos/todo-frontend/Dockerfile index 1ceb83b6..9fd23c3c 100644 --- a/demos/todo-frontend/Dockerfile +++ b/demos/todo-frontend/Dockerfile @@ -24,6 +24,10 @@ COPY libs/server/package.json ./libs/server/ COPY libs/server-openapi/package.json ./libs/server-openapi/ COPY libs/client/package.json ./libs/client/ COPY libs/benchmarks/package.json ./libs/benchmarks/ +COPY libs/log/package.json ./libs/log/ +COPY libs/otel/package.json ./libs/otel/ +COPY libs/orm/package.json ./libs/orm/ +COPY libs/orm-cli/package.json ./libs/orm-cli/ COPY demos/todo-backend/package.json ./demos/todo-backend/ COPY demos/todo-frontend/package.json ./demos/todo-frontend/ diff --git a/demos/todo-frontend/nginx.conf b/demos/todo-frontend/nginx.conf index 792d622a..3f93c96f 100644 --- a/demos/todo-frontend/nginx.conf +++ b/demos/todo-frontend/nginx.conf @@ -2,6 +2,9 @@ server { listen 80; server_name _; + # Allow file uploads up to 15MB + client_max_body_size 15m; + root /usr/share/nginx/html; index index.html; diff --git a/demos/todo-frontend/src/App.tsx b/demos/todo-frontend/src/App.tsx index f2c0ee45..2f8efe3d 100644 --- a/demos/todo-frontend/src/App.tsx +++ b/demos/todo-frontend/src/App.tsx @@ -23,6 +23,8 @@ const BatchingPage = lazy(() => import('./features/batching/BatchingPage')); const ReactQueryPage = lazy(() => import('./features/react-query/ReactQueryPage')); const LivePage = lazy(() => import('./features/live/LivePage')); const ActivityFeedPage = lazy(() => import('./features/activity/ActivityPage')); +const CachePage = lazy(() => import('./features/cache/CachePage')); +const IdempotencyPage = lazy(() => import('./features/idempotency/IdempotencyPage')); const PageFallback = () => ( @@ -51,6 +53,8 @@ const router = createBrowserRouter([ { path: '/react-query', element: }> }, { path: '/live', element: }> }, { path: '/activity', element: }> }, + { path: '/cache', element: }> }, + { path: '/idempotency', element: }> }, { element: , children: [ diff --git a/demos/todo-frontend/src/api/client.ts b/demos/todo-frontend/src/api/client.ts index 59188500..a1a61889 100644 --- a/demos/todo-frontend/src/api/client.ts +++ b/demos/todo-frontend/src/api/client.ts @@ -21,8 +21,11 @@ import { createClient } from '@cleverbrush/client/react'; import { retry } from '@cleverbrush/client/retry'; import { timeout } from '@cleverbrush/client/timeout'; import { dedupe } from '@cleverbrush/client/dedupe'; -import { throttlingCache } from '@cleverbrush/client/cache'; +import { idempotency } from '@cleverbrush/client/idempotency'; +import { cacheTags } from '@cleverbrush/client/cache'; import { batching } from '@cleverbrush/client/batching'; +import { optimisticUpdate } from '@cleverbrush/client/optimistic-update'; +import { offlineQueue } from '@cleverbrush/client/offline-queue'; import { api } from '@cleverbrush/todo-backend/contract'; import { loadToken, setToken } from '../lib/http-client'; @@ -34,12 +37,18 @@ const BASE_URL = import.meta.env.VITE_API_URL ?? ''; * Groups: `auth`, `todos`, `users`, `webhooks`, `admin`, `demo`. * * Resilience middlewares are applied in order: - * 1. **retry** — retries failed requests up to 2 times with exponential backoff - * 2. **timeout** — aborts requests exceeding 10 seconds - * 3. **dedupe** — coalesces identical in-flight GET requests - * 4. **cache** — serves cached GET responses within a 2-second TTL - * 5. **batching** — coalesces concurrent requests into a single `POST /__batch` + * 1. **offlineQueue** — queues mutations when offline, replays on reconnect (outermost) + * 2. **idempotency** — adds X-Idempotency-Key to mutations for server deduplication + * 3. **retry** — retries failed requests (preserves idempotency key across retries) + * 4. **timeout** — aborts requests exceeding 10 seconds + * 5. **dedupe** — coalesces identical in-flight GET requests + * 6. **cacheTags** — tag-based caching and auto-invalidation + * 7. **batching** — coalesces concurrent requests into a single `POST /__batch` + * 8. **optimisticUpdate** — tags mutations and tracks network failures (innermost) */ + +export const offlineQueueStore = { queue: [], isOnline: true, isReplaying: false }; + export const client = createClient(api, { baseUrl: BASE_URL, getToken: () => loadToken(), @@ -51,10 +60,15 @@ export const client = createClient(api, { } }, middlewares: [ + offlineQueue({ store: offlineQueueStore }), + idempotency(), retry({ limit: 2, retryOnTimeout: true }), timeout({ timeout: 10_000 }), dedupe(), - throttlingCache({ throttle: 2000 }), - batching({ maxSize: 10, windowMs: 10 }) - ] + cacheTags({ + defaultTtl: 5000 + }), + batching({ maxSize: 10, windowMs: 10 }), + optimisticUpdate() + ] as any }); diff --git a/demos/todo-frontend/src/components/Layout.tsx b/demos/todo-frontend/src/components/Layout.tsx index 5714dd9a..edaeca93 100644 --- a/demos/todo-frontend/src/components/Layout.tsx +++ b/demos/todo-frontend/src/components/Layout.tsx @@ -25,6 +25,8 @@ const navItems: NavItem[] = [ { to: '/resilience', label: 'Resilience', emoji: '🛡️' }, { to: '/batching', label: 'Batching', emoji: '📦' }, { to: '/react-query', label: 'React Query', emoji: '⚡' }, + { to: '/cache', label: 'Cache', emoji: '🗄️' }, + { to: '/idempotency', label: 'Idempotency', emoji: '🔑' }, { to: '/live', label: 'Live', emoji: '📡' }, { to: '/activity', label: 'Activity Feed', emoji: '🔴' }, { to: '/webhooks', label: 'Webhooks', emoji: '🔔' }, diff --git a/demos/todo-frontend/src/features/cache/CachePage.tsx b/demos/todo-frontend/src/features/cache/CachePage.tsx new file mode 100644 index 00000000..e2024f17 --- /dev/null +++ b/demos/todo-frontend/src/features/cache/CachePage.tsx @@ -0,0 +1,352 @@ +import { useState, useCallback } from 'react'; +import { + Badge, + Box, + Button, + Callout, + Card, + Code, + Flex, + Heading, + Separator, + Text, + TextField +} from '@radix-ui/themes'; +import { client } from '../../api/client'; + +// ── Types ──────────────────────────────────────────────────────────────── + +type LogEntry = { + step: number; + label: string; + expected: string; + result: string; + resultColor: 'blue' | 'green' | 'amber' | 'red'; +}; + +// ── Page ───────────────────────────────────────────────────────────────── + +export default function CachePage() { + const [todoId, setTodoId] = useState(1); + const [running, setRunning] = useState(false); + const [log, setLog] = useState([]); + + const run = useCallback(async () => { + setRunning(true); + setLog([]); + const entries: LogEntry[] = []; + + function add( + step: number, + label: string, + expected: string, + result: string, + resultColor: LogEntry['resultColor'] = 'blue' + ) { + entries.push({ step, label, expected, result, resultColor }); + setLog([...entries]); + } + + const id = todoId; + let start: number; + + // Step 1 — warm the cache + start = performance.now(); + try { + await client.todos.get({ params: { id } }); + const ms = Math.round(performance.now() - start); + add( + 1, + `Fetch GET /api/todos/${id}`, + 'Network request in DevTools', + `Completed in ${ms}ms — network fetch`, + 'blue' + ); + } catch (err: any) { + add( + 1, + `Fetch GET /api/todos/${id}`, + 'Network request', + `Error: ${err.message}`, + 'red' + ); + setRunning(false); + return; + } + + await new Promise((r) => setTimeout(r, 100)); + + // Step 2 — cache hit + start = performance.now(); + await client.todos.get({ params: { id } }); + const ms2 = Math.round(performance.now() - start); + add( + 2, + `Fetch GET /api/todos/${id} again`, + 'NO request in DevTools — served from cacheTags middleware cache', + `Completed in ${ms2}ms`, + 'green' + ); + + await new Promise((r) => setTimeout(r, 100)); + + // Step 3 — list cache hit (after being warmed by previous fetch?) + start = performance.now(); + await client.todos.list({ query: {} }); + const ms3 = Math.round(performance.now() - start); + add( + 3, + 'Fetch GET /api/todos (list)', + 'Network request in DevTools (first list fetch)', + `Completed in ${ms3}ms`, + 'blue' + ); + + await new Promise((r) => setTimeout(r, 100)); + + // Step 4 — list cache hit (warmed) + start = performance.now(); + await client.todos.list({ query: {} }); + const ms4 = Math.round(performance.now() - start); + add( + 4, + 'Fetch GET /api/todos (list) again', + 'NO request — cache hit for "todo-list" tag', + `Completed in ${ms4}ms`, + 'green' + ); + + await new Promise((r) => setTimeout(r, 100)); + + // Step 5 — mutate + start = performance.now(); + try { + await client.todos.update({ + params: { id }, + body: { title: `Cache-test ${Date.now()}` } + }); + const ms5 = Math.round(performance.now() - start); + add( + 5, + `Mutate PATCH /api/todos/${id}`, + 'Network request in DevTools. Invalidates "todo" and "todo-list" cache entries.', + `Completed in ${ms5}ms`, + 'amber' + ); + } catch (err: any) { + add( + 5, + `Mutate PATCH /api/todos/${id}`, + 'Network request, invalidates cache', + `Error: ${err.message}`, + 'red' + ); + } + + await new Promise((r) => setTimeout(r, 100)); + + // Step 6 — fetch after invalidate (should be network) + start = performance.now(); + await client.todos.get({ params: { id } }); + const ms6 = Math.round(performance.now() - start); + add( + 6, + `Fetch GET /api/todos/${id} after mutation`, + 'Network request in DevTools — "todo" cache was invalidated by step 5', + `Completed in ${ms6}ms`, + 'green' + ); + + await new Promise((r) => setTimeout(r, 100)); + + // Step 7 — list after mutation (should also be invalidated) + start = performance.now(); + await client.todos.list({ query: {} }); + const ms7 = Math.round(performance.now() - start); + add( + 7, + 'Fetch GET /api/todos (list) after mutation', + 'Network request in DevTools — "todo-list" cache was invalidated by step 5', + `Completed in ${ms7}ms`, + 'green' + ); + + setRunning(false); + }, [todoId]); + + const colorMap: Record = { + blue: 'var(--blue-9)', + green: 'var(--green-9)', + amber: 'var(--amber-9)', + red: 'var(--red-9)' + }; + + return ( + + + 🗄️ Cache Tags + + + Demonstrates tag-based HTTP caching with the{' '} + cacheTags middleware. The demo runs a sequence of + fetch + mutate operations and shows what to expect in your + browser's DevTools Network tab. + + + + + Open DevTools → Network tab before running. Cache hits + show no HTTP request at all — the response + comes from the middleware's in-memory cache. + + + + + + + + + Todo ID + + + setTodoId(Number(e.target.value) || 1) + } + /> + + + + + + {log.length > 0 && ( + + + Run Log + + + + {log.map((entry) => ( + + + + Step {entry.step} + + + {entry.label} + + + + + + + Expected + + + {entry.expected} + + + + + Result + + + {entry.result} + + + + + ))} + + + )} + + + + What's Happening + + + + + Steps 1–2: The first{' '} + GET /api/todos/:id fetches from the + network and populates the{' '} + todo cache entry. The second call + within the 5s TTL hits the in-memory cache — no + network request is made. + + + Steps 3–4: Same pattern for{' '} + GET /api/todos with the{' '} + todo-list tag. + + + Step 5: A{' '} + PATCH /api/todos/:id mutation triggers + cache invalidation for both the{' '} + todo tag (matching the entity) and the{' '} + todo-list tag (the collection). The + middleware deletes the matching cache entries. + + + Steps 6–7: After invalidation, the + next fetch for both entity and list hits the + network again — the cache was cleared. + + + + + + ); +} diff --git a/demos/todo-frontend/src/features/idempotency/IdempotencyPage.tsx b/demos/todo-frontend/src/features/idempotency/IdempotencyPage.tsx new file mode 100644 index 00000000..bd5f9b6b --- /dev/null +++ b/demos/todo-frontend/src/features/idempotency/IdempotencyPage.tsx @@ -0,0 +1,232 @@ +import { useState, useCallback } from 'react'; +import { + Badge, + Box, + Button, + Card, + Code, + Flex, + Heading, + Separator, + Text, + TextField +} from '@radix-ui/themes'; +import { client } from '../../api/client'; + +// ── Types ──────────────────────────────────────────────────────────────── + +type LogEntry = { + id: number; + step: number; + label: string; + result: string; + kind: 'info' | 'success' | 'error'; +}; + +// ── Page ───────────────────────────────────────────────────────────────── + +export default function IdempotencyPage() { + const [title, setTitle] = useState('Test todo'); + const [running, setRunning] = useState(false); + const [log, setLog] = useState([]); + + const run = useCallback(async () => { + setRunning(true); + setLog([]); + const entries: LogEntry[] = []; + + function add( + step: number, + label: string, + result: string, + kind: LogEntry['kind'] = 'info' + ) { + entries.push({ + id: entries.length + 1, + step, + label, + result, + kind + }); + setLog([...entries]); + } + + // Step 1: Create a todo — client adds X-Idempotency-Key automatically + let createdId: number | undefined; + try { + const res = await client.todos.create({ + body: { title: title.trim() } + }); + createdId = res.id; + add( + 1, + 'POST /api/todos (create)', + `Created todo #${res.id} with title "${res.title}" — idempotency key auto-generated`, + 'success' + ); + } catch (err: any) { + add( + 1, + 'POST /api/todos (create)', + `Error: ${err.message}`, + 'error' + ); + setRunning(false); + return; + } + + // Step 2: Create the same todo again (different key, should succeed) + try { + const res2 = await client.todos.create({ + body: { title: title.trim() } + }); + add( + 2, + 'POST /api/todos (create) again', + `Created todo #${res2.id} — different idempotency key, different request (expected: two todos exist)`, + 'success' + ); + } catch (err: any) { + add( + 2, + 'POST /api/todos (create) again', + `Error: ${err.message}`, + 'error' + ); + } + + // Step 3: Show what's in the Network tab + add( + 3, + 'Check DevTools Network tab', + 'Both POST requests have X-Idempotency-Key header. Server stores the response — if the same key is replayed, the handler does not execute again.', + 'info' + ); + + setRunning(false); + }, [title]); + + return ( + + + 🔑 Idempotency + + + Demonstrates the idempotency client middleware — + every mutating request automatically receives an{' '} + X-Idempotency-Key header so the server can + deduplicate replays. + + + + + + + + Todo title + + setTitle(e.target.value)} + /> + + + + + + {log.length > 0 && ( + + + Run Log + + + + {log.map((entry) => ( + + + Step {entry.step} + + + + {entry.label} + + + {entry.result} + + + + ))} + + + )} + + + + How It Works + + + + + Client: The{' '} + idempotency() middleware adds a{' '} + X-Idempotency-Key header (UUID v4) to + every mutating request. When a request is retried, + the same key is reused — so retries are + deduplicated server-side. + + + Server: The{' '} + idempotency() middleware stores + responses keyed by the header value. Replayed + requests return the stored response instead of + re-executing the handler. + + + TTL: Stored responses expire + after 24 hours by default. Periodic cleanup runs + every 60 seconds. + + + + + + ); +} diff --git a/demos/todo-frontend/src/features/react-query/ReactQueryPage.tsx b/demos/todo-frontend/src/features/react-query/ReactQueryPage.tsx index 2b894555..d0cdd4e2 100644 --- a/demos/todo-frontend/src/features/react-query/ReactQueryPage.tsx +++ b/demos/todo-frontend/src/features/react-query/ReactQueryPage.tsx @@ -14,7 +14,8 @@ import { } from '@radix-ui/themes'; import { useQueryClient } from '@tanstack/react-query'; import { isApiError, isWebError } from '@cleverbrush/client'; -import { client } from '../../api/client'; +import { useOptimisticMutation } from '@cleverbrush/client/react'; +import { client, offlineQueueStore } from '../../api/client'; // ── Shared Helpers ────────────────────────────────────────────────────── @@ -154,24 +155,20 @@ function ParameterQueryDemo() { ); } -// ── Demo 3: useMutation + Cache Invalidation ────────────────────────── +// ── Demo 3: useMutation + Implicit Cache Invalidation ────────────────── function MutationDemo() { - const queryClient = useQueryClient(); const [title, setTitle] = useState(''); const mutation = client.todos.create.useMutation({ onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: client.todos.queryKey() - }); setTitle(''); } }); return ( )} - {'client.todos.create.useMutation({ onSuccess: () => invalidate })'} + cache invalidation is implicit via cacheTags middleware ); } -// ── Demo 4: Optimistic Toggle ───────────────────────────────────────── +// ── Demo 4: Optimistic Toggle (useOptimisticMutation) ───────────────── function OptimisticToggleDemo() { - const queryClient = useQueryClient(); const { data } = client.todos.list.useQuery(); - const toggleMutation = client.todos.update.useMutation({ - onMutate: async (variables: any) => { - await queryClient.cancelQueries({ - queryKey: client.todos.queryKey() - }); - const key = client.todos.list.queryKey(); - const previous = queryClient.getQueryData(key); - queryClient.setQueryData(key, (old: any[]) => - old?.map((t: any) => - t.id === variables.params.id - ? { ...t, completed: variables.body.completed } - : t - ) - ); - return { previous }; - }, - onError: (_err: unknown, _vars: unknown, context: any) => { - if (context?.previous) { - queryClient.setQueryData( - client.todos.list.queryKey(), - context.previous - ); - } - }, - onSettled: () => { - queryClient.invalidateQueries({ - queryKey: client.todos.queryKey() - }); - } + const toggleMutation = useOptimisticMutation(client.todos.update, { + queryKey: client.todos.list.queryKey(), + optimisticUpdate: (oldTodos: any, variables: any) => + (oldTodos ?? []).map((t: any) => + t.id === variables.params.id + ? { ...t, completed: variables.body.completed } + : t + ) }); const todos = (data ?? []).slice(0, 5); return ( {todos.length === 0 ? ( @@ -289,7 +265,7 @@ function OptimisticToggleDemo() { )} - onMutate → cancel + setQueryData → onError → rollback + {'useOptimisticMutation(client.todos.update, { queryKey, optimisticUpdate })'} ); @@ -517,6 +493,203 @@ function ErrorHandlingDemo() { ); } +// ── Demo 9: Cache Tag Invalidation ────────────────────────────────────_ + +function CacheTagDemo() { + const [todoId, setTodoId] = useState(1); + const [result, setResult] = useState(null); + const [fetchCount, setFetchCount] = useState(0); + const [cachedCount, setCachedCount] = useState(0); + const [updating, setUpdating] = useState(false); + + const handleFetch = useCallback(async () => { + const before = Date.now(); + await client.todos.get({ params: { id: todoId } }); + const elapsed = Date.now() - before; + // < 100ms likely came from middleware cache; network takes longer + if (elapsed < 100) { + setCachedCount((c) => c + 1); + setResult(`Cache hit! (${elapsed}ms)`); + } else { + setFetchCount((c) => c + 1); + setResult(`Network fetch (${elapsed}ms)`); + } + }, [todoId]); + + const handleMutate = useCallback(async () => { + setUpdating(true); + try { + await client.todos.update({ + params: { id: todoId }, + body: { title: `Updated ${Date.now()}` } + }); + setResult('Mutation done — cache invalidated'); + } catch { + setResult('Mutation failed (expected if todo does not exist)'); + } + setUpdating(false); + }, [todoId]); + + return ( + + + Todo ID: + setTodoId(Number(e.target.value) || 1)} + style={{ width: '80px' }} + /> + + + + + + + Network: {fetchCount} + + + Cache hits: {cachedCount} + + + + {result && ( + + {result} + + )} + + + {'endpoint.cacheTag("todo", p => ({ id: p.params.id }))'} + + + ); +} + +// ── Demo 10: Offline Queue ───────────────────────────────────────────── + +function OfflineQueueDemo() { + const [title, setTitle] = useState(''); + const [isDemoOffline, setIsDemoOffline] = useState(false); + + const queueCount = offlineQueueStore.queue.length; + const isReplaying = offlineQueueStore.isReplaying; + + const toggleOffline = () => { + if (isDemoOffline) { + setIsDemoOffline(false); + offlineQueueStore.isOnline = true; + window.dispatchEvent(new Event('online')); + } else { + setIsDemoOffline(true); + offlineQueueStore.isOnline = false; + window.dispatchEvent(new Event('offline')); + } + }; + + const createMutation = client.todos.create.useMutation({ + onSuccess: () => { + setTitle(''); + } + }); + + return ( + + + + {queueCount > 0 && ( + + {queueCount} queued + + )} + {isReplaying && ( + + Replaying… + + )} + {isDemoOffline && ( + Offline + )} + + + + setTitle(e.target.value)} + style={{ flex: 1 }} + /> + + + + {isDemoOffline && ( + + Offline mode — mutations are queued and will replay when + you go back online. + + )} + {queueCount > 0 && ( + + Queued: {queueCount} mutation(s) + + )} + + offlineQueue() middleware — automatic queue and replay + + + ); +} + // ── Page ──────────────────────────────────────────────────────────────── export default function ReactQueryPage() { @@ -538,6 +711,8 @@ export default function ReactQueryPage() { + + ); diff --git a/demos/todo-frontend/src/features/todos/TodoDetailPage.tsx b/demos/todo-frontend/src/features/todos/TodoDetailPage.tsx index f22ddabb..e095545c 100644 --- a/demos/todo-frontend/src/features/todos/TodoDetailPage.tsx +++ b/demos/todo-frontend/src/features/todos/TodoDetailPage.tsx @@ -1,4 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; +import { useCallback, useEffect, useRef, useState } from 'react'; import { useNavigate, useParams } from 'react-router'; import { Badge, @@ -20,7 +20,6 @@ import { ApiError, isTimeoutError, isNetworkError } from '@cleverbrush/client'; import { client } from '../../api/client'; type TodoEvent = Parameters[0]['body']; -import { loadToken } from '../../lib/http-client'; import { ConfirmDialog } from '../../components/ConfirmDialog'; type TodoWithAuthor = Awaited>; @@ -46,6 +45,11 @@ export function TodoDetailPage() { const [eventLoading, setEventLoading] = useState(false); const [eventResult, setEventResult] = useState(null); + // Attachment upload + const fileInputRef = useRef(null); + const [selectedFile, setSelectedFile] = useState(null); + const [uploadLoading, setUploadLoading] = useState(false); + // User list for the "assigned" picker const [users, setUsers] = useState>([]); @@ -117,18 +121,46 @@ export function TodoDetailPage() { } }; + const handleFileSelect = (e: React.ChangeEvent) => { + const file = e.target.files?.[0] ?? null; + setSelectedFile(file); + }; + + const handleUpload = async () => { + if (!selectedFile || !id) return; + setUploadLoading(true); + setError(null); + try { + await client.todos.uploadAttachment({ + params: { id: Number(id) }, + body: {}, + files: { attachment: selectedFile } + }); + setSelectedFile(null); + if (fileInputRef.current) fileInputRef.current.value = ''; + await load(); + } catch (e) { + setError(e instanceof ApiError ? e.message : 'Upload failed.'); + } finally { + setUploadLoading(false); + } + }; + const handleDownload = async () => { try { - const token = loadToken(); - const resp = await fetch(`/api/todos/${id}/attachment`, { - headers: token ? { Authorization: `Bearer ${token}` } : {} + const blob = await client.todos.downloadAttachment.file({ + params: { id: Number(id) } }); - if (!resp.ok) throw new Error('Download failed'); - const blob = await resp.blob(); + + // Extract filename from Content-Disposition header + // (not directly available from Blob, use todo attachment name) + const filename = + data?.todo.attachmentName ?? `todo-${id}-download`; + const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; - a.download = `todo-${id}.txt`; + a.download = filename; a.click(); URL.revokeObjectURL(url); } catch { @@ -250,8 +282,65 @@ export function TodoDetailPage() { + {/* Attachment upload */} + + Attachment + {todo?.attachmentName ? ( + + + {todo.attachmentName} + + + {todo.attachmentMimeType} + {todo.attachmentSize + ? ` — ${(todo.attachmentSize / 1024).toFixed(1)} KB` + : ''} + + + ) : ( + + No attachment uploaded. + + )} + + + + + {selectedFile && ( + + {selectedFile.name} ({(selectedFile.size / 1024).toFixed(1)} KB) + + )} + {selectedFile && ( + + )} + {todo?.attachmentName && ( + + )} + + + - diff --git a/demos/todo-frontend/vite.config.ts b/demos/todo-frontend/vite.config.ts index 1c2c55a6..4931fa12 100644 --- a/demos/todo-frontend/vite.config.ts +++ b/demos/todo-frontend/vite.config.ts @@ -2,12 +2,28 @@ import { defineConfig } from 'vite'; import react from '@vitejs/plugin-react'; import { resolve } from 'path'; +const clientSrc = resolve(__dirname, '../../libs/client/src'); + export default defineConfig({ plugins: [react()], resolve: { alias: { // Resolve backend contract from source during development - '@cleverbrush/todo-backend/contract': resolve(__dirname, '../todo-backend/src/contract.ts') + '@cleverbrush/todo-backend/contract': resolve(__dirname, '../todo-backend/src/contract.ts'), + // Resolve @cleverbrush/client sub-paths from TypeScript source so that + // dev-server changes take effect immediately (Vite HMR) without + // requiring a dist rebuild + server restart. + // More-specific sub-paths must come before the base package entry. + '@cleverbrush/client/react': `${clientSrc}/react.ts`, + '@cleverbrush/client/retry': `${clientSrc}/retry.ts`, + '@cleverbrush/client/timeout': `${clientSrc}/timeout.ts`, + '@cleverbrush/client/dedupe': `${clientSrc}/dedupe.ts`, + '@cleverbrush/client/idempotency': `${clientSrc}/idempotency.ts`, + '@cleverbrush/client/cache': `${clientSrc}/cache.ts`, + '@cleverbrush/client/batching': `${clientSrc}/batching.ts`, + '@cleverbrush/client/optimistic-update': `${clientSrc}/optimisticUpdate.ts`, + '@cleverbrush/client/offline-queue': `${clientSrc}/offlineQueue.ts`, + '@cleverbrush/client': `${clientSrc}/index.ts` }, // Force a single instance of these packages so instanceof checks work // across @cleverbrush/react-form (which bundles its own copy) and app code diff --git a/libs/client/README.md b/libs/client/README.md index 8eef9a08..d7364325 100644 --- a/libs/client/README.md +++ b/libs/client/README.md @@ -243,6 +243,43 @@ throttlingCache({ Caches successful GET responses for a configurable TTL. Subsequent requests within the TTL receive a cloned cached response without hitting the network. +### Cache Tags — `@cleverbrush/client/cache` + +Tag-based HTTP caching with automatic invalidation driven by server-side endpoint +annotations (`.cacheTag()` / `.clearsCacheTag()`). Replaces manual invalidation callbacks — mutations +automatically clear cache entries matching the endpoint's declared tag names. + +```ts +import { cacheTags } from '@cleverbrush/client/cache'; + +const client = createClient(api, { + middlewares: [cacheTags({ defaultTtl: 5000 })], +}); + +// Populates cache entries for 'todo-list' and 'todo:id=1' tags. +await client.todos.list({ query: { page: 1 } }); +await client.todos.get({ params: { id: 1 } }); + +// Mutation — automatically invalidates both tags. +await client.todos.update({ params: { id: 1 }, body: { title: 'Updated' } }); + +// Triggers network fetch — cache was cleared. +await client.todos.list({ query: { page: 1 } }); +``` + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `defaultTtl` | `number` | `0` | TTL in ms for tags without explicit TTL. `0` = invalidation-only. | +| `ttlByTag` | `Record` | `{}` | Per-tag TTL overrides. | +| `condition` | `(Response) => boolean` | `response.ok` | Predicate controlling which responses are cached. | + +When used with `@cleverbrush/client/react`, TanStack Query's `useMutation` hooks +automatically invalidate the query cache for the affected group — no manual +`queryClient.invalidateQueries()` needed. + +See the [server-side cache tags](/server#cache-tags) section for how to declare +tags on your endpoints. + ## Per-Call Overrides Override middleware options for individual calls: @@ -683,6 +720,47 @@ function InfiniteTodos() { } ``` +### Optimistic Mutations + +The `useOptimisticMutation` hook wraps TanStack Query's `useMutation` with automatic cache snapshot, optimistic update, and rollback on error. This replaces the manual `onMutate`/`onError`/`onSettled` pattern. + +```tsx +import { useOptimisticMutation } from '@cleverbrush/client/react'; + +function TodoItem({ todo }: { todo: Todo }) { + const toggleMutation = useOptimisticMutation(client.todos.update, { + queryKey: client.todos.list.queryKey(), + optimisticUpdate: (oldTodos, variables) => + (oldTodos ?? []).map(t => + t.id === variables.params.id + ? { ...t, completed: variables.body.completed } + : t + ), + onSettled: () => { + queryClient.invalidateQueries({ + queryKey: client.todos.queryKey() + }); + } + }); + + return ( + + ); +} +``` + +The hook handles: +1. **Cancel** — cancels in-flight queries for the given `queryKey` +2. **Snapshot** — captures the current cache state +3. **Optimistic update** — applies your `optimisticUpdate` function +4. **Rollback** — restores the snapshot if the mutation fails +5. **Invalidate** — invalidates the cache when the mutation settles + ### Error Handling in Hooks ```tsx @@ -693,7 +771,7 @@ const { error } = client.todos.list.useQuery(); if (isApiError(error)) { console.log(error.status, error.body); } else if (isTimeoutError(error)) { - console.log('Timed out after', error.timeout, 'ms'); + console.log('Timed out after', err.timeout, 'ms'); } ``` @@ -731,10 +809,134 @@ if (isApiError(error)) { | `@cleverbrush/client/retry` | Retry middleware with exponential backoff | | `@cleverbrush/client/timeout` | AbortController-based timeout middleware | | `@cleverbrush/client/dedupe` | Request deduplication middleware | -| `@cleverbrush/client/cache` | Throttling cache middleware | +| `@cleverbrush/client/idempotency` | Idempotency key middleware (deduplicates mutations) | +| `@cleverbrush/client/cache` | Throttling cache + tag-based cache invalidation middleware | | `@cleverbrush/client/batching` | Request batching middleware | +| `@cleverbrush/client/optimistic-update` | Optimistic update middleware (mutation tracking) | +| `@cleverbrush/client/offline-queue` | Offline queue middleware (queue + replay) | | `@cleverbrush/client/react` | TanStack Query hooks + unified client | +## Optimistic Update Middleware + +```ts +import { optimisticUpdate } from '@cleverbrush/client/optimistic-update'; +``` + +Tags mutation requests (POST/PUT/PATCH/DELETE) with a unique ID and tracks network failures in an inspectable store. Designed to work with the `useOptimisticMutation` React hook. + +### Basic usage + +```ts +import { createClient } from '@cleverbrush/client'; +import { optimisticUpdate } from '@cleverbrush/client/optimistic-update'; + +const client = createClient(api, { + middlewares: [optimisticUpdate()] +}); +``` + +### Store inspection + +Pass a shared store to inspect failed mutations: + +```ts +import { optimisticUpdate, type OptimisticUpdateStore } from '@cleverbrush/client/optimistic-update'; + +const store: OptimisticUpdateStore = { failures: [] }; + +const client = createClient(api, { + middlewares: [optimisticUpdate({ store })] +}); + +// After a network error: +console.log(store.failures); +// → [{ id, url, init, error, timestamp }] +``` + +### Options + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `store` | `OptimisticUpdateStore` | `{ failures: [] }` | Shared mutable store | +| `skip` | `(url, init) => boolean` | — | Skip tagging for specific requests | + +### Per-call override + +```ts +await client.todos.create({ + body: { title: 'test' }, + optimisticUpdate: { skip: true } // skip tagging for this call +}); +``` + +## Offline Queue Middleware + +```ts +import { offlineQueue } from '@cleverbrush/client/offline-queue'; +``` + +Queues mutation requests (POST/PUT/PATCH/DELETE) when the browser reports offline (`navigator.onLine`). Automatically replays queued mutations when connectivity is restored. + +### Important: middleware placement + +`offlineQueue()` must be the **outermost** middleware (first in the array) so that retry/timeout/etc. middlewares re-apply when queued mutations are replayed: + +```ts +middlewares: [ + offlineQueue(), // outermost + retry({ limit: 3 }), // re-applies on replay + timeout({ timeout: 10000 }), + batching(), +] +``` + +### Basic usage + +```ts +import { createClient } from '@cleverbrush/client'; +import { offlineQueue } from '@cleverbrush/client/offline-queue'; + +const client = createClient(api, { + middlewares: [offlineQueue()] +}); +``` + +### Store inspection + +Pass a shared store to observe queue state: + +```ts +import { offlineQueue, type OfflineQueueStore } from '@cleverbrush/client/offline-queue'; + +const store: OfflineQueueStore = { queue: [], isOnline: true, isReplaying: false }; + +const client = createClient(api, { + middlewares: [offlineQueue({ store })] +}); + +// Check queue status: +console.log(store.isOnline); // boolean +console.log(store.queue.length); // queued mutations +console.log(store.isReplaying); // currently replaying +``` + +### Options + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `store` | `OfflineQueueStore` | `{ queue: [], isOnline: true, isReplaying: false }` | Shared mutable store | +| `skip` | `(url, init) => boolean` | — | Skip queue for specific requests | +| `maxRetries` | `number` | `3` | Max flush retries per queued item | + +### Per-call override + +```ts +await client.todos.create({ + body: { title: 'test' }, + offlineQueue: { skip: true } // bypass queue for this call +}); +``` + ## License BSD 3-Clause diff --git a/libs/client/package.json b/libs/client/package.json index 4d8ba57a..4268d2bb 100644 --- a/libs/client/package.json +++ b/libs/client/package.json @@ -63,6 +63,10 @@ "types": "./dist/dedupe.d.ts", "import": "./dist/dedupe.js" }, + "./idempotency": { + "types": "./dist/idempotency.d.ts", + "import": "./dist/idempotency.js" + }, "./cache": { "types": "./dist/cache.d.ts", "import": "./dist/cache.js" @@ -71,6 +75,14 @@ "types": "./dist/batching.d.ts", "import": "./dist/batching.js" }, + "./optimistic-update": { + "types": "./dist/optimisticUpdate.d.ts", + "import": "./dist/optimisticUpdate.js" + }, + "./offline-queue": { + "types": "./dist/offlineQueue.d.ts", + "import": "./dist/offlineQueue.js" + }, "./react": { "types": "./dist/react.d.ts", "import": "./dist/react.js" diff --git a/libs/client/src/cache.ts b/libs/client/src/cache.ts index 3e72f450..605e98ed 100644 --- a/libs/client/src/cache.ts +++ b/libs/client/src/cache.ts @@ -1,2 +1,4 @@ export type { CacheOptions } from './middlewares/cache.js'; export { throttlingCache } from './middlewares/cache.js'; +export type { CacheTagMiddlewareOptions } from './middlewares/cacheTags.js'; +export { cacheTags } from './middlewares/cacheTags.js'; diff --git a/libs/client/src/client.ts b/libs/client/src/client.ts index 091b6a8a..f85b012d 100644 --- a/libs/client/src/client.ts +++ b/libs/client/src/client.ts @@ -13,7 +13,7 @@ * @module */ -import type { ApiContract } from '@cleverbrush/server/contract'; +import type { ApiContract, FilePart } from '@cleverbrush/server/contract'; import { ApiError, NetworkError, WebError } from './errors.js'; import { composeMiddleware, PER_CALL_OPTIONS } from './middleware.js'; import { buildPath } from './path.js'; @@ -179,7 +179,7 @@ export function createClient( url: string; method: string; headers: Record; - body: string | undefined; + body: string | FormData | undefined; } { const meta = getMeta(ep); const method = meta.method.toUpperCase(); @@ -202,17 +202,58 @@ export function createClient( } // -- Body -- - let body: string | undefined; + let body: string | FormData | undefined; if (args?.body !== undefined && hasBody(method)) { - reqHeaders['Content-Type'] = JSON_CONTENT_TYPE; - body = JSON.stringify(args.body); + if (meta.fileUpload) { + // Build FormData for multipart uploads + const fd = new FormData(); + if ( + args.body && + typeof args.body === 'object' && + !(args.body instanceof Blob) + ) { + for (const [key, val] of Object.entries(args.body)) { + fd.append(key, String(val)); + } + } + // Append file fields from args.files + if (args.files) { + for (const [key, value] of Object.entries( + args.files as Record + )) { + if (value instanceof Blob) { + fd.append(key, value); + } else { + const fp = value as FilePart; + fd.append( + key, + new Blob([fp.buffer], { + type: fp.mimeType + }), + fp.filename + ); + } + } + } + body = fd; + // Let the browser set Content-Type with boundary + delete reqHeaders['Content-Type']; + } else { + reqHeaders['Content-Type'] = JSON_CONTENT_TYPE; + body = JSON.stringify(args.body); + } } return { url, method, headers: reqHeaders, body }; } // The actual fetch logic, shared by every endpoint proxy method. - async function execute(ep: any, args: any): Promise { + async function execute( + ep: any, + args: any, + groupName?: string, + endpointName?: string + ): Promise { const { url, method, @@ -230,10 +271,65 @@ export function createClient( const perCallOptions: Record = {}; if (args?.retry !== undefined) perCallOptions.retry = args.retry; if (args?.timeout !== undefined) perCallOptions.timeout = args.timeout; + if (args?.optimisticUpdate !== undefined) + perCallOptions.optimisticUpdate = args.optimisticUpdate; + if (args?.offlineQueue !== undefined) + perCallOptions.offlineQueue = args.offlineQueue; if (Object.keys(perCallOptions).length > 0) { (init as any)[PER_CALL_OPTIONS] = perCallOptions; } + // Attach endpoint metadata for middleware introspection + // (e.g. throttlingCache cache invalidation callbacks). + if (groupName && endpointName) { + const meta = getMeta(ep); + const tpl = meta.pathTemplate; + + let suffix = ''; + if (typeof tpl === 'string') { + suffix = tpl; + } else if (tpl && typeof (tpl as any).introspect === 'function') { + suffix = + (tpl as any).introspect().templateDefinition.literals[0] ?? + ''; + } + const collectionPath = meta.basePath + suffix || '/'; + + let pathParamNames: string[] = []; + if ( + tpl && + typeof tpl !== 'string' && + typeof (tpl as any).introspect === 'function' + ) { + pathParamNames = (tpl as any) + .introspect() + .templateDefinition.segments.map((s: any) => s.path); + } + + const epMeta: Record = { + group: groupName, + endpoint: endpointName, + method: meta.method, + path: ep.path as string, + basePath: meta.basePath, + baseUrl, + collectionPath, + fullCollectionUrl: baseUrl.replace(/\/$/, '') + collectionPath, + pathParamNames, + params: args?.params ?? ({} as Record), + body: args?.body, + query: args?.query ?? ({} as Record), + headers: args?.headers ?? ({} as Record), + operationId: meta.operationId ?? null, + tags: meta.tags ?? [], + cacheTags: meta.cacheTags ?? [] + }; + + if (!(init as any).__endpointMeta) { + (init as any).__endpointMeta = epMeta; + } + } + // -- beforeRequest hooks -- await runBeforeRequest(hooks, url, init); @@ -382,9 +478,33 @@ export function createClient( ); } - // Regular HTTP endpoints return a callable with .stream() - const call = (args?: any) => execute(ep, args); + // Regular HTTP endpoints return a callable with .stream() and .file() + const call = (args?: any) => + execute(ep, args, groupName, endpointName); call.stream = (args?: any) => streamLines(ep, args); + call.file = async (args?: any): Promise => { + const { + url, + method, + headers: reqHeaders, + body + } = buildRequest(ep, args); + const init: RequestInit = { + method, + headers: reqHeaders, + body + }; + await runBeforeRequest(hooks, url, init); + const response = await composedFetch(url, init); + if (!response.ok) { + if (response.status === 401) onUnauthorized?.(); + throw new ApiError( + response.status, + response.statusText || `HTTP ${response.status}` + ); + } + return response.blob(); + }; return call; } }); diff --git a/libs/client/src/errors.ts b/libs/client/src/errors.ts index 7fc21b34..9f618578 100644 --- a/libs/client/src/errors.ts +++ b/libs/client/src/errors.ts @@ -153,6 +153,46 @@ export function isTimeoutError(error: unknown): error is TimeoutError { * } * ``` */ +/** + * Error thrown when a request is attempted while the client is offline. + * + * Extends {@link NetworkError} so offline errors are caught by + * `isNetworkError()` checks. + * + * @example + * ```ts + * try { + * await client.todos.list(); + * } catch (err) { + * if (isOfflineError(err)) { + * console.log('Cannot make requests while offline'); + * } + * } + * ``` + */ +export class OfflineError extends NetworkError { + constructor() { + super('Client is offline'); + this.name = 'OfflineError'; + } +} + +/** + * Type guard for {@link OfflineError}. + * + * @example + * ```ts + * catch (err) { + * if (isOfflineError(err)) { + * console.log('Device is offline'); + * } + * } + * ``` + */ +export function isOfflineError(error: unknown): error is OfflineError { + return error instanceof OfflineError; +} + export function isNetworkError(error: unknown): error is NetworkError { return error instanceof NetworkError; } diff --git a/libs/client/src/idempotency.ts b/libs/client/src/idempotency.ts new file mode 100644 index 00000000..81337ab6 --- /dev/null +++ b/libs/client/src/idempotency.ts @@ -0,0 +1,2 @@ +export type { IdempotencyOptions } from './middlewares/idempotency.js'; +export { idempotency } from './middlewares/idempotency.js'; diff --git a/libs/client/src/index.ts b/libs/client/src/index.ts index 1bdda650..faf2c6fa 100644 --- a/libs/client/src/index.ts +++ b/libs/client/src/index.ts @@ -24,13 +24,15 @@ export { ApiError, isApiError, isNetworkError, + isOfflineError, isTimeoutError, isWebError, NetworkError, + OfflineError, TimeoutError, WebError } from './errors.js'; -export type { FetchLike, Middleware } from './middleware.js'; +export type { EndpointMeta, FetchLike, Middleware } from './middleware.js'; export { composeMiddleware, getPerCallOptions, @@ -43,6 +45,7 @@ export type { EndpointCall, EndpointCallArgs, EndpointResponse, + FilePart, PerCallOverrides, Subscription, SubscriptionCall, diff --git a/libs/client/src/middleware.ts b/libs/client/src/middleware.ts index b36b14a0..2fbae2f9 100644 --- a/libs/client/src/middleware.ts +++ b/libs/client/src/middleware.ts @@ -115,3 +115,73 @@ export function getPerCallOptions( ): T | undefined { return (init as any)[PER_CALL_OPTIONS]?.[key] as T | undefined; } + +// --------------------------------------------------------------------------- +// Endpoint metadata +// --------------------------------------------------------------------------- + +/** + * Endpoint metadata carried through {@link PER_CALL_OPTIONS} on every request. + * + * Computed by the Proxy-based client at call time, this gives middleware + * access to the endpoint's structural info plus the actual call arguments + * without any URL parsing or regex. + * + * Used by {@link throttlingCache} for cache-invalidation callbacks. + */ +export interface EndpointMeta { + /** Contract group name, e.g. `"todos"`. */ + group: string; + /** Endpoint name within the group, e.g. `"update"`. */ + endpoint: string; + /** HTTP method in uppercase, e.g. `"PATCH"`. */ + method: string; + /** Path template with colon placeholders, e.g. `/api/todos/:id`. */ + path: string; + /** Resource base path, e.g. `/api/todos`. */ + basePath: string; + /** Resource collection path (basePath without param placeholders). */ + collectionPath: string; + /** Client base URL (e.g. `"http://localhost:3000"` or `""`). */ + baseUrl: string; + /** + * Full collection URL matching the HTTP cache key format. + * Computed as `baseUrl (stripped of trailing slash) + collectionPath`. + */ + fullCollectionUrl: string; + /** Names of path parameters, e.g. `["id"]`. */ + pathParamNames: string[]; + /** Actual route parameter values from the call, e.g. `{ id: 42 }`. */ + params: Readonly>; + /** Request body. */ + body: unknown; + /** Query parameters, e.g. `{ page: 1 }`. */ + query: Readonly>; + /** OpenAPI operationId, or `null`. */ + operationId: string | null; + /** OpenAPI tags, or `[]`. */ + tags: readonly string[]; + /** + * Cache tag definitions from the endpoint's `.cacheTag()` calls. + * Each tag has a `name` and a map of `properties` (key → accessor). + * Used by the `cacheTags` middleware. + */ + cacheTags: ReadonlyArray<{ + name: string; + properties: Readonly< + Record< + string, + { + getValue(root: { + params: Record; + body: unknown; + query: Record; + headers: Record; + }): { value?: unknown; success: boolean }; + } + > + >; + }>; + /** Request headers from the call, e.g. `{ 'x-request-id': 'abc' }`. */ + headers: Readonly>; +} diff --git a/libs/client/src/middlewares/cache.ts b/libs/client/src/middlewares/cache.ts index c188ee6b..5ab26bc3 100644 --- a/libs/client/src/middlewares/cache.ts +++ b/libs/client/src/middlewares/cache.ts @@ -18,7 +18,7 @@ * @module */ -import type { Middleware } from '../middleware.js'; +import type { EndpointMeta, Middleware } from '../middleware.js'; // --------------------------------------------------------------------------- // Types @@ -61,8 +61,15 @@ export interface CacheOptions { * Return `null` to skip invalidation. * * By default, mutating requests do not invalidate the cache. + * + * The `meta` parameter carries endpoint metadata (group, endpoint, + * method, path, params, body, query, etc.) provided by the client proxy. */ - invalidate?: (url: string, init: RequestInit) => string | null; + invalidate?: ( + url: string, + init: RequestInit, + meta?: EndpointMeta + ) => string | null; } // --------------------------------------------------------------------------- @@ -106,8 +113,9 @@ const DEFAULT_CONDITION = (response: Response) => response.ok; * const client = createClient(api, { * middlewares: [throttlingCache({ * throttle: 2000, - * invalidate: (url, init) => { - * if (init.method !== 'GET') return `GET@${url}`; + * invalidate: (_url, _init, meta) => { + * if (meta && meta.method !== 'GET') + * return `GET@${meta.collectionPath}`; * return null; * }, * })], @@ -128,7 +136,10 @@ export function throttlingCache(options: CacheOptions = {}): Middleware { return next => (url, init) => { // Handle cache invalidation for mutating requests. if (invalidate) { - const invalidateKey = invalidate(url, init); + const meta = (init as any).__endpointMeta as + | EndpointMeta + | undefined; + const invalidateKey = invalidate(url, init, meta); if (invalidateKey !== null && invalidateKey !== undefined) { cache.delete(invalidateKey); } diff --git a/libs/client/src/middlewares/cacheTags.test.ts b/libs/client/src/middlewares/cacheTags.test.ts new file mode 100644 index 00000000..14545185 --- /dev/null +++ b/libs/client/src/middlewares/cacheTags.test.ts @@ -0,0 +1,497 @@ +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import type { EndpointMeta, FetchLike } from '../middleware.js'; +import { cacheTags } from './cacheTags.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeTagMeta( + tags: Array<{ + name: string; + properties: Record< + string, + { + getValue(root: any): { + value?: unknown; + success: boolean; + }; + } + >; + }>, + overrides: Partial = {} +): EndpointMeta { + return { + group: 'test', + endpoint: 'test', + method: 'GET', + path: '/api/test', + basePath: '/api/test', + collectionPath: '/api/test', + baseUrl: '', + fullCollectionUrl: '/api/test', + pathParamNames: [], + params: {}, + body: undefined, + query: {}, + headers: {}, + operationId: null, + tags: [], + cacheTags: tags, + ...overrides + } as EndpointMeta; +} + +function makeInit(meta: EndpointMeta, method = 'GET'): RequestInit { + return { + method, + headers: {}, + __endpointMeta: meta + } as any; +} + +function makeConstAccessor(value: unknown) { + return { + getValue: () => ({ success: true, value }) + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('cacheTags middleware', () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + // -- GET caching -------------------------------------------------------- + + test('caches GET response within TTL', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(42) } + } + ]); + const init = makeInit(meta); + + // First call — hits network + const r1 = await mw('/api/test', init); + expect(r1.status).toBe(200); + expect(fetch).toHaveBeenCalledTimes(1); + + // Second call within TTL — should be cached + const r2 = await mw('/api/test', init); + expect(r2.status).toBe(200); + expect(fetch).toHaveBeenCalledTimes(1); + }); + + test('fetches again after TTL expiry', async () => { + const fetch = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ v: 1 }), { + headers: { 'Content-Type': 'application/json' } + }) + ) + .mockResolvedValueOnce( + new Response(JSON.stringify({ v: 2 }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 1000 })(fetch); + + const meta = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(1) } + } + ]); + const init = makeInit(meta); + + // First call + await mw('/api/test', init); + expect(fetch).toHaveBeenCalledTimes(1); + + // Advance past TTL + vi.advanceTimersByTime(1001); + + // Second call — should fetch again + await mw('/api/test', init); + expect(fetch).toHaveBeenCalledTimes(2); + }); + + test('does not cache when TTL is 0 (invalidates only)', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 0 })(fetch); + + const meta = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(42) } + } + ]); + const init = makeInit(meta); + + await mw('/api/test', init); + await mw('/api/test', init); + + // Both calls should hit the network + expect(fetch).toHaveBeenCalledTimes(2); + }); + + test('caches per unique key', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta1 = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(1) } + } + ]); + const meta2 = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(2) } + } + ]); + + await mw('/api/test', makeInit(meta1)); + await mw('/api/test', makeInit(meta2)); + + // Two different keys → two network calls + expect(fetch).toHaveBeenCalledTimes(2); + }); + + test('does not cache error responses', async () => { + const fetch = vi + .fn() + .mockResolvedValue(new Response('Not Found', { status: 404 })); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(42) } + } + ]); + const init = makeInit(meta); + + await mw('/api/test', init); + await mw('/api/test', init); + + // Error responses not cached + expect(fetch).toHaveBeenCalledTimes(2); + }); + + test('uses per-tag TTL when configured', async () => { + const fetch = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ v: 1 }), { + headers: { 'Content-Type': 'application/json' } + }) + ) + .mockResolvedValueOnce( + new Response(JSON.stringify({ v: 2 }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ + defaultTtl: 0, + ttlByTag: { 'cached-tag': 2000 } + })(fetch); + + const meta = makeTagMeta([ + { + name: 'cached-tag', + properties: { id: makeConstAccessor(42) } + } + ]); + const init = makeInit(meta); + + // First call + await mw('/api/test', init); + expect(fetch).toHaveBeenCalledTimes(1); + + // Within 2000ms TTL + vi.advanceTimersByTime(500); + await mw('/api/test', init); + expect(fetch).toHaveBeenCalledTimes(1); + + // Past TTL + vi.advanceTimersByTime(1501); + await mw('/api/test', init); + expect(fetch).toHaveBeenCalledTimes(2); + }); + + // -- Invalidation ------------------------------------------------------- + + test('invalidates cache on mutating requests by tag name prefix', async () => { + const fetch = vi + .fn() + .mockResolvedValueOnce( + new Response(JSON.stringify({ v: 1 }), { + headers: { 'Content-Type': 'application/json' } + }) + ) + .mockResolvedValueOnce( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ) + .mockResolvedValueOnce( + new Response(JSON.stringify({ v: 2 }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const getMeta = makeTagMeta([ + { + name: 'test-tag', + properties: { id: makeConstAccessor(42) } + } + ]); + + // GET — cache populated + await mw('/api/test', makeInit(getMeta, 'GET')); + expect(fetch).toHaveBeenCalledTimes(1); + + // POST — should invalidate 'test-tag' keys + await mw('/api/test', makeInit(getMeta, 'POST')); + expect(fetch).toHaveBeenCalledTimes(2); + + // GET again — should re-fetch after invalidation + await mw('/api/test', makeInit(getMeta, 'GET')); + expect(fetch).toHaveBeenCalledTimes(3); + }); + + test('invalidates exact key and all prefixed variants', async () => { + const fetch = vi.fn().mockImplementation(() => + Promise.resolve( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ) + ); + + const mw = cacheTags({ defaultTtl: 10000 })(fetch); + + // Cache entries with different ids under same tag + const meta1 = makeTagMeta([ + { name: 'todo', properties: { id: makeConstAccessor(1) } } + ]); + const meta2 = makeTagMeta([ + { name: 'todo', properties: { id: makeConstAccessor(2) } } + ]); + + await mw('/api/todo/1', makeInit(meta1, 'GET')); + await mw('/api/todo/2', makeInit(meta2, 'GET')); + expect(fetch).toHaveBeenCalledTimes(2); + + // Mutate todo:2 — should invalidate 'todo' prefix = all todo keys + await mw('/api/todo/2', makeInit(meta2, 'POST')); + expect(fetch).toHaveBeenCalledTimes(3); + + // Both GETs should re-fetch + await mw('/api/todo/1', makeInit(meta1, 'GET')); + await mw('/api/todo/2', makeInit(meta2, 'GET')); + expect(fetch).toHaveBeenCalledTimes(5); + }); + + test('does not invalidate when no cache tags', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta = makeTagMeta([]); + const getInit = makeInit(meta, 'GET'); + + // GET — passes through + await mw('/api/test', getInit); + expect(fetch).toHaveBeenCalledTimes(1); + + // POST — passes through + await mw('/api/test', makeInit(meta, 'POST')); + expect(fetch).toHaveBeenCalledTimes(2); + + // GET — still goes through (no caching without tags) + await mw('/api/test', getInit); + expect(fetch).toHaveBeenCalledTimes(3); + }); + + // -- Simple tags -------------------------------------------------------- + + test('simple tags (no properties) work as cache keys', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta = makeTagMeta([{ name: 'global', properties: {} }]); + + await mw('/api/test', makeInit(meta, 'GET')); + await mw('/api/test', makeInit(meta, 'GET')); + expect(fetch).toHaveBeenCalledTimes(1); + }); + + // -- Multiple tags ------------------------------------------------------ + + test('multiple tags: first matching cache hit is used', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta = makeTagMeta([ + { name: 'tag-a', properties: { id: makeConstAccessor(1) } }, + { name: 'tag-b', properties: { id: makeConstAccessor(2) } } + ]); + + // First request populates both tags + await mw('/api/test', makeInit(meta, 'GET')); + expect(fetch).toHaveBeenCalledTimes(1); + + // Second request — tag-a key still valid + await mw('/api/test', makeInit(meta, 'GET')); + expect(fetch).toHaveBeenCalledTimes(1); + }); + + // -- Headers-based tag -------------------------------------------------- + + test('uses headers in tag key computation', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + // Two requests with different tenant headers + const meta1 = makeTagMeta( + [ + { + name: 'tenant', + properties: { + tenant: { + getValue: (root: any) => ({ + success: true, + value: root.headers?.['x-tenant'] + }) + } + } + } + ], + { headers: { 'x-tenant': 'acme' } } + ); + + const meta2 = makeTagMeta( + [ + { + name: 'tenant', + properties: { + tenant: { + getValue: (root: any) => ({ + success: true, + value: root.headers?.['x-tenant'] + }) + } + } + } + ], + { headers: { 'x-tenant': 'beta' } } + ); + + await mw('/api/test', makeInit(meta1, 'GET')); + await mw('/api/test', makeInit(meta2, 'GET')); + expect(fetch).toHaveBeenCalledTimes(2); + }); + + // -- Pass-through for non-tagged requests ------------------------------- + + test('passes through requests without cache tags', async () => { + const fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ ok: true }), { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const init: RequestInit = { + method: 'GET', + headers: {} + }; + + await mw('/api/test', init); + await mw('/api/test', init); + expect(fetch).toHaveBeenCalledTimes(2); + }); + + // -- Cloned responses are independent ----------------------------------- + + test('cached responses are clones', async () => { + const responseBody = JSON.stringify({ data: 'test' }); + const fetch = vi.fn().mockResolvedValue( + new Response(responseBody, { + headers: { 'Content-Type': 'application/json' } + }) + ); + + const mw = cacheTags({ defaultTtl: 5000 })(fetch); + + const meta = makeTagMeta([ + { name: 'test', properties: { id: makeConstAccessor(1) } } + ]); + + const r1 = await mw('/api/test', makeInit(meta, 'GET')); + const body1 = await r1.text(); + + const r2 = await mw('/api/test', makeInit(meta, 'GET')); + const body2 = await r2.text(); + + expect(body1).toBe(responseBody); + expect(body2).toBe(responseBody); + expect(r1).not.toBe(r2); + }); +}); diff --git a/libs/client/src/middlewares/cacheTags.ts b/libs/client/src/middlewares/cacheTags.ts new file mode 100644 index 00000000..39910a49 --- /dev/null +++ b/libs/client/src/middlewares/cacheTags.ts @@ -0,0 +1,226 @@ +/** + * Tag-based cache middleware for the `@cleverbrush/client`. + * + * Caches successful GET responses keyed by endpoint-defined cache tags. + * Mutating requests (POST, PUT, DELETE, PATCH) invalidate all cache + * entries whose key starts with each of the endpoint's tag names. + * + * @example + * ```ts + * import { createClient } from '@cleverbrush/client'; + * import { cacheTags } from '@cleverbrush/client/cache'; + * + * const client = createClient(api, { + * middlewares: [cacheTags({ defaultTtl: 0, ttlByTag: { 'todo-list': 5000 } })], + * }); + * ``` + * + * @module + */ + +import type { EndpointMeta, Middleware } from '../middleware.js'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +/** + * Configuration for the {@link cacheTags} middleware. + */ +export interface CacheTagMiddlewareOptions { + /** + * Per-tag TTL map: `{ [tagName]: ttlMs }`. + * Tags not listed here fall back to `defaultTtl`. + */ + ttlByTag?: Record; + + /** + * Default TTL in milliseconds for tags without an explicit TTL. + * Defaults to `0` (no caching — invalidation-only mode). + */ + defaultTtl?: number; + + /** + * Predicate that decides whether a request should be cached. + * Defaults to caching only successful responses (`response.ok`). + */ + condition?: (response: Response) => boolean; +} + +/** + * The root object passed to each `CacheTagPropertyAccessor.getValue()` call. + */ +interface TagRoot { + params: Record; + body: unknown; + query: Record; + headers: Record; +} + +/** Shape of a serialised cache tag from endpoint metadata. */ +interface SerializedCacheTag { + name: string; + properties: Readonly< + Record< + string, + { + getValue(root: TagRoot): { + value?: unknown; + success: boolean; + }; + } + > + >; +} + +// --------------------------------------------------------------------------- +// Internals +// --------------------------------------------------------------------------- + +interface CacheEntry { + response: Response; + expiresAt: number; +} + +function isMutating(method: string): boolean { + return ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method.toUpperCase()); +} + +/** + * Computes a deterministic cache key from a tag and request data. + * + * - Tags with no properties produce just the tag name. + * - Tags with properties produce `name:key1=val1,key2=val2` where + * keys are sorted alphabetically for determinism. + */ +function computeKey(tag: SerializedCacheTag, root: TagRoot): string { + const entries = Object.entries(tag.properties); + + if (entries.length === 0) { + return tag.name; + } + + const parts: string[] = []; + for (const [key, accessor] of entries.sort(([a], [b]) => + a.localeCompare(b) + )) { + const result = accessor.getValue(root); + if (result.success && result.value !== undefined) { + parts.push(`${key}=${String(result.value)}`); + } + } + + if (parts.length === 0) { + return tag.name; + } + + return `${tag.name}:${parts.join(',')}`; +} + +// --------------------------------------------------------------------------- +// Middleware +// --------------------------------------------------------------------------- + +/** + * Creates a tag-based cache middleware. + * + * On GET requests, the middleware inspects `__endpointMeta.cacheTags` to + * compute cache keys. If a valid (non-expired) cache entry exists, the + * cached response is returned immediately (cloned). + * + * On mutating requests (POST, PUT, DELETE, PATCH), all cache entries whose + * key starts with any of the endpoint's tag names are invalidated. + * + * @param options - Cache configuration. + * @returns A {@link Middleware} that caches and invalidates by tag. + */ +export function cacheTags(options: CacheTagMiddlewareOptions = {}): Middleware { + const { + ttlByTag = {}, + defaultTtl = 0, + condition = (response: Response) => response.ok + } = options; + + const cache = new Map(); + + return next => (url, init) => { + const meta = (init as any).__endpointMeta as EndpointMeta | undefined; + const tags: readonly SerializedCacheTag[] | undefined = meta?.cacheTags; + const method = (init.method ?? 'GET').toUpperCase(); + + // -- Invalidation on mutating requests -- + if (isMutating(method) && tags && tags.length > 0) { + const root: TagRoot = { + params: (meta?.params as Record) ?? {}, + body: meta?.body, + query: (meta?.query as Record) ?? {}, + headers: (meta?.headers as Record) ?? {} + }; + + for (const tag of tags) { + const tagKey = computeKey(tag, root); + // Invalidate the exact key and any prefixed variants + // (tag name prefix match handles dynamic property variants + // when the mutation didn't provide the same properties). + for (const [cachedKey] of cache) { + if ( + cachedKey === tagKey || + cachedKey.startsWith(tag.name) + ) { + cache.delete(cachedKey); + } + } + } + } + + // -- Cache lookup for GET requests -- + if (method === 'GET' && tags && tags.length > 0) { + const root: TagRoot = { + params: (meta?.params as Record) ?? {}, + body: meta?.body, + query: (meta?.query as Record) ?? {}, + headers: (meta?.headers as Record) ?? {} + }; + + let foundEntry: CacheEntry | undefined; + + for (const tag of tags) { + const cacheKey = computeKey(tag, root); + const entry = cache.get(cacheKey); + if (entry && entry.expiresAt > Date.now()) { + foundEntry = entry; + break; + } + if (entry) { + cache.delete(cacheKey); + } + } + + if (foundEntry) { + return Promise.resolve(foundEntry.response.clone()); + } + + return next(url, init).then(response => { + if (condition(response)) { + for (const tag of tags) { + const cacheKey = computeKey(tag, root); + const ttl = + ttlByTag[tag.name] !== undefined + ? ttlByTag[tag.name] + : defaultTtl; + if (ttl > 0) { + cache.set(cacheKey, { + response: response.clone(), + expiresAt: Date.now() + ttl + }); + } + } + } + return response; + }); + } + + // -- Pass-through for non-cache-tagged or non-GET requests -- + return next(url, init); + }; +} diff --git a/libs/client/src/middlewares/idempotency.test.ts b/libs/client/src/middlewares/idempotency.test.ts new file mode 100644 index 00000000..672af9c6 --- /dev/null +++ b/libs/client/src/middlewares/idempotency.test.ts @@ -0,0 +1,135 @@ +import { describe, expect, test, vi } from 'vitest'; +import type { FetchLike } from '../middleware.js'; +import { idempotency } from './idempotency.js'; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('idempotency middleware', () => { + test('adds X-Idempotency-Key header to POST requests', async () => { + const fetch = vi + .fn() + .mockResolvedValue(new Response('ok', { status: 201 })); + + const mw = idempotency()(fetch); + await mw('/api/todos', { method: 'POST', body: '{}' }); + + const call = fetch.mock.calls[0]; + const headers = call[1].headers as Headers; + const key = headers.get('X-Idempotency-Key'); + expect(key).toBeTruthy(); + // UUID v4 format: 8-4-4-4-12 hex chars + expect(key).toMatch( + /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/ + ); + }); + + test('adds key to PUT requests', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency()(fetch); + await mw('/api/todos/1', { method: 'PUT', body: '{}' }); + + const headers = fetch.mock.calls[0][1].headers as Headers; + expect(headers.get('X-Idempotency-Key')).toBeTruthy(); + }); + + test('adds key to PATCH requests', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency()(fetch); + await mw('/api/todos/1', { method: 'PATCH', body: '{}' }); + + expect( + (fetch.mock.calls[0][1].headers as Headers).get('X-Idempotency-Key') + ).toBeTruthy(); + }); + + test('adds key to DELETE requests', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency()(fetch); + await mw('/api/todos/1', { method: 'DELETE' }); + + expect( + (fetch.mock.calls[0][1].headers as Headers).get('X-Idempotency-Key') + ).toBeTruthy(); + }); + + test('does not add key to GET requests', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency()(fetch); + await mw('/api/todos', { method: 'GET' }); + + const headers = fetch.mock.calls[0][1].headers as Headers | undefined; + expect(headers?.get('X-Idempotency-Key') ?? null).toBeNull(); + }); + + test('reuses existing key if already present', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const existingKey = 'my-custom-key-123'; + const mw = idempotency()(fetch); + await mw('/api/todos', { + method: 'POST', + headers: new Headers({ 'X-Idempotency-Key': existingKey }) + }); + + const headers = fetch.mock.calls[0][1].headers as Headers; + expect(headers.get('X-Idempotency-Key')).toBe(existingKey); + }); + + test('preserves existing headers alongside idempotency key', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency()(fetch); + await mw('/api/todos', { + method: 'POST', + headers: new Headers({ + Authorization: 'Bearer token', + 'Content-Type': 'application/json' + }) + }); + + const headers = fetch.mock.calls[0][1].headers as Headers; + expect(headers.get('Authorization')).toBe('Bearer token'); + expect(headers.get('Content-Type')).toBe('application/json'); + expect(headers.get('X-Idempotency-Key')).toBeTruthy(); + }); + + test('uses custom header name', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency({ headerName: 'Idempotency-Key' })(fetch); + await mw('/api/todos', { method: 'POST', body: '{}' }); + + const headers = fetch.mock.calls[0][1].headers as Headers; + expect(headers.get('Idempotency-Key')).toBeTruthy(); + }); + + test('uses custom key generator', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + let callCount = 0; + const mw = idempotency({ + keyGenerator: () => `custom-${++callCount}` + })(fetch); + + await mw('/api/todos', { method: 'POST' }); + + const headers = fetch.mock.calls[0][1].headers as Headers; + expect(headers.get('X-Idempotency-Key')).toBe('custom-1'); + }); + + test('custom condition can include GET', async () => { + const fetch = vi.fn().mockResolvedValue(new Response('ok')); + + const mw = idempotency({ condition: () => true })(fetch); + await mw('/api/todos', { method: 'GET' }); + + const headers = fetch.mock.calls[0][1].headers as Headers; + expect(headers.get('X-Idempotency-Key')).toBeTruthy(); + }); +}); diff --git a/libs/client/src/middlewares/idempotency.ts b/libs/client/src/middlewares/idempotency.ts new file mode 100644 index 00000000..4978c542 --- /dev/null +++ b/libs/client/src/middlewares/idempotency.ts @@ -0,0 +1,104 @@ +/** + * Idempotency middleware for the `@cleverbrush/client`. + * + * Automatically adds an `X-Idempotency-Key` header to mutating requests + * so the server can deduplicate replays. The same key is preserved across + * retries, ensuring retried requests are treated as the same operation. + * + * @module + */ + +import type { Middleware } from '../middleware.js'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +/** + * Configuration for {@link idempotency}. + */ +export interface IdempotencyOptions { + /** + * Header name to use for the idempotency key. + * Defaults to `"X-Idempotency-Key"`. + */ + headerName?: string; + + /** + * Custom key generator. Receives `(url, init)` and returns a string. + * Defaults to generating a UUID v4. + * + * The key must be stable across retries of the same logical request. + * When not provided, a UUID is generated once and reused on retry. + */ + keyGenerator?: (url: string, init: RequestInit) => string; + + /** + * Predicate that decides whether a request should receive a key. + * Defaults to `true` for POST, PUT, PATCH, DELETE. + */ + condition?: (url: string, init: RequestInit) => boolean; +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/** + * Generates a random UUID v4 without external dependencies. + */ +function uuid4(): string { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c => { + const r = (Math.random() * 16) | 0; + const v = c === 'x' ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +} + +function isMutating(method: string): boolean { + return ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method.toUpperCase()); +} + +// --------------------------------------------------------------------------- +// Middleware +// --------------------------------------------------------------------------- + +/** + * Creates an idempotency middleware for the client. + * + * Mutating requests automatically receive an `X-Idempotency-Key` header + * with a UUID v4 value. The key is generated once per request and reused + * across retries — so the server sees the same key for the same logical + * operation even when the client retries. + * + * @param options - Configuration. + * @returns A {@link Middleware} that adds idempotency keys. + * + * @example + * ```ts + * const client = createClient(api, { + * middlewares: [idempotency(), retry({ limit: 3 })], + * }); + * ``` + */ +export function idempotency(options: IdempotencyOptions = {}): Middleware { + const { + headerName = 'X-Idempotency-Key', + keyGenerator = uuid4, + condition = (_url, init) => + isMutating((init.method ?? 'GET').toUpperCase()) + } = options; + + return next => (url, init) => { + if (!condition(url, init)) { + return next(url, init); + } + + const headers = new Headers(init.headers); + if (!headers.has(headerName.toLowerCase())) { + headers.set(headerName, keyGenerator(url, init)); + } + + return next(url, { ...init, headers }); + }; +} diff --git a/libs/client/src/middlewares/offlineQueue.test.ts b/libs/client/src/middlewares/offlineQueue.test.ts new file mode 100644 index 00000000..b9157a08 --- /dev/null +++ b/libs/client/src/middlewares/offlineQueue.test.ts @@ -0,0 +1,219 @@ +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { offlineQueue } from './offlineQueue.js'; + +describe('offlineQueue middleware', () => { + function jsonResponse(data: unknown, status = 200): Response { + return new Response(JSON.stringify(data), { + status, + headers: { 'Content-Type': 'application/json' } + }); + } + + beforeEach(() => { + if (typeof navigator !== 'undefined') { + Object.defineProperty(navigator, 'onLine', { + configurable: true, + value: true + }); + } + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + test('passes GET requests through immediately when online', async () => { + const mw = offlineQueue(); + const next = vi.fn().mockResolvedValue(jsonResponse({ ok: true })); + + const response = await mw(next)('/api/todos', { method: 'GET' }); + + expect(response).toBeDefined(); + expect(next).toHaveBeenCalledTimes(1); + }); + + test('passes POST requests through when online', async () => { + const mw = offlineQueue(); + const next = vi.fn().mockResolvedValue(jsonResponse({ id: 1 })); + + const response = await mw(next)('/api/todos', { + method: 'POST', + body: '{"title":"test"}' + }); + + expect(await response.json()).toEqual({ id: 1 }); + expect(next).toHaveBeenCalledTimes(1); + }); + + test('queues POST requests when offline', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const mw = offlineQueue(); + const next = vi.fn().mockResolvedValue(jsonResponse({ id: 1 })); + + const promise = mw(next)('/api/todos', { + method: 'POST', + body: '{"title":"test"}' + }); + + // Should NOT call next while offline + expect(next).not.toHaveBeenCalled(); + // Should return a pending promise + expect(promise).toBeInstanceOf(Promise); + }); + + test('store reflects queued requests', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const store = { queue: [], isOnline: false, isReplaying: false }; + const mw = offlineQueue({ store }); + + mw(vi.fn())('/api/todos', { method: 'POST', body: '{}' }); + mw(vi.fn())('/api/todos/1', { method: 'DELETE' }); + + expect(store.queue).toHaveLength(2); + expect(store.queue[0].url).toBe('/api/todos'); + expect(store.queue[0].init.method).toBe('POST'); + expect(store.queue[1].url).toBe('/api/todos/1'); + expect(typeof store.queue[0].id).toBe('string'); + expect(typeof store.queue[0].timestamp).toBe('number'); + }); + + test('flushes queue when going back online via flush', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const mw = offlineQueue(); + const next = vi.fn().mockResolvedValue(jsonResponse({ id: 1 })); + + const promise = mw(next)('/api/todos', { + method: 'POST', + body: '{"title":"test"}' + }); + + expect(next).not.toHaveBeenCalled(); + + // Simulate going online + Object.defineProperty(navigator, 'onLine', { value: true }); + window.dispatchEvent(new Event('online')); + + // Wait a tick for the flush to process + await vi.waitFor(() => { + expect(next).toHaveBeenCalled(); + }); + + // The promise should resolve + const response = await promise; + expect(response).toBeDefined(); + }); + + test('online event triggers automatic flush', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const store = { queue: [], isOnline: false, isReplaying: false }; + const mw = offlineQueue({ store }); + const next = vi.fn().mockResolvedValue(jsonResponse({ ok: true })); + + mw(next)('/api/todos', { method: 'POST', body: '{}' }); + + Object.defineProperty(navigator, 'onLine', { value: true }); + window.dispatchEvent(new Event('online')); + + await vi.waitFor(() => { + expect(store.isOnline).toBe(true); + expect(next).toHaveBeenCalled(); + }); + }); + + test('offline event updates store.isOnline', () => { + const store = { queue: [], isOnline: true, isReplaying: false }; + offlineQueue({ store }); + + window.dispatchEvent(new Event('offline')); + + expect(store.isOnline).toBe(false); + }); + + test('skip predicate bypasses queue for specific requests', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const mw = offlineQueue({ + skip: url => url.includes('skip-me') + }); + const next = vi.fn().mockResolvedValue(jsonResponse({ ok: true })); + + await mw(next)('/api/skip-me', { method: 'POST' }); + + expect(next).toHaveBeenCalledTimes(1); + }); + + test('works in non-browser environments (defaults online)', async () => { + const originalWindow = globalThis.window; + const originalNavigator = globalThis.navigator; + (globalThis as any).window = undefined; + (globalThis as any).navigator = undefined; + + try { + const mw = offlineQueue(); + const next = vi.fn().mockResolvedValue(jsonResponse({ ok: true })); + + const response = await mw(next)('/api/todos', { method: 'POST' }); + + expect(response).toBeDefined(); + expect(next).toHaveBeenCalledTimes(1); + } finally { + (globalThis as any).window = originalWindow; + (globalThis as any).navigator = originalNavigator; + } + }); + + test('store.isReplaying is true during flush', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const store = { queue: [], isOnline: false, isReplaying: false }; + const mw = offlineQueue({ store }); + const next = vi + .fn() + .mockImplementation( + () => + new Promise(resolve => + setTimeout( + () => resolve(jsonResponse({ ok: true })), + 50 + ) + ) + ); + + mw(next)('/api/todos', { method: 'POST', body: '{}' }); + + Object.defineProperty(navigator, 'onLine', { value: true }); + window.dispatchEvent(new Event('online')); + + expect(store.isReplaying).toBe(true); + + await vi.waitFor(() => { + expect(store.isReplaying).toBe(false); + }); + }); + + test('multiple queued requests are flushed in order', async () => { + Object.defineProperty(navigator, 'onLine', { value: false }); + + const mw = offlineQueue(); + const next = vi + .fn() + .mockImplementation(() => + Promise.resolve(jsonResponse({ ok: true })) + ); + + const p1 = mw(next)('/api/todos', { method: 'POST', body: '{"n":1}' }); + const p2 = mw(next)('/api/todos', { method: 'POST', body: '{"n":2}' }); + const p3 = mw(next)('/api/todos', { method: 'POST', body: '{"n":3}' }); + + Object.defineProperty(navigator, 'onLine', { value: true }); + window.dispatchEvent(new Event('online')); + + const responses = await Promise.all([p1, p2, p3]); + expect(responses).toHaveLength(3); + expect(next).toHaveBeenCalledTimes(3); + }); +}); diff --git a/libs/client/src/middlewares/offlineQueue.ts b/libs/client/src/middlewares/offlineQueue.ts new file mode 100644 index 00000000..088b45f9 --- /dev/null +++ b/libs/client/src/middlewares/offlineQueue.ts @@ -0,0 +1,126 @@ +import type { FetchLike, Middleware } from '../middleware.js'; + +export interface QueuedRequest { + id: string; + url: string; + init: RequestInit; + timestamp: number; + retryCount: number; +} + +export interface OfflineQueueStore { + queue: QueuedRequest[]; + isOnline: boolean; + isReplaying: boolean; +} + +export interface OfflineQueueOptions { + store?: OfflineQueueStore; + skip?: (url: string, init: RequestInit) => boolean; + maxRetries?: number; +} + +const MAX_FLUSH_RETRIES = 3; + +interface Deferred { + resolve: (value: Response | PromiseLike) => void; + reject: (reason: unknown) => void; +} + +function generateId(): string { + return `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`; +} + +function isBrowser(): boolean { + return typeof window !== 'undefined' && typeof navigator !== 'undefined'; +} + +export function offlineQueue(options: OfflineQueueOptions = {}): Middleware { + const { + skip, + store = { queue: [], isOnline: true, isReplaying: false }, + maxRetries = MAX_FLUSH_RETRIES + } = options; + + const deferredMap = new Map(); + + let nextRef: FetchLike | null = null; + + function flushQueue(): void { + const next = nextRef; + if (!next || store.queue.length === 0) return; + + store.isReplaying = true; + const batch = store.queue.splice(0); + + Promise.all( + batch.map(async item => { + const deferred = deferredMap.get(item.id); + deferredMap.delete(item.id); + if (!deferred) return; + + try { + const init = { ...item.init }; + delete (init as any).signal; + const response = await next(item.url, init); + deferred.resolve(response); + } catch (err) { + if (item.retryCount < maxRetries) { + item.retryCount++; + store.queue.push(item); + deferredMap.set(item.id, deferred); + } else { + deferred.reject(err); + } + } + }) + ).finally(() => { + store.isReplaying = false; + }); + } + + store.isOnline = isBrowser() ? navigator.onLine : true; + + if (isBrowser()) { + window.addEventListener('online', () => { + store.isOnline = true; + flushQueue(); + }); + window.addEventListener('offline', () => { + store.isOnline = false; + }); + } + + return next => { + nextRef = next; + + return async (url, init) => { + if (skip?.(url, init)) { + return next(url, init); + } + + const method = (init.method ?? 'GET').toUpperCase(); + if (method === 'GET') { + return next(url, init); + } + + if (!store.isOnline) { + const id = generateId(); + const queued: QueuedRequest = { + id, + url, + init: { ...init }, + timestamp: Date.now(), + retryCount: 0 + }; + store.queue.push(queued); + + return new Promise((resolve, reject) => { + deferredMap.set(id, { resolve, reject }); + }); + } + + return next(url, init); + }; + }; +} diff --git a/libs/client/src/middlewares/optimisticUpdate.test.ts b/libs/client/src/middlewares/optimisticUpdate.test.ts new file mode 100644 index 00000000..90b8b60d --- /dev/null +++ b/libs/client/src/middlewares/optimisticUpdate.test.ts @@ -0,0 +1,133 @@ +import { describe, expect, test, vi } from 'vitest'; +import { NetworkError } from '../errors.js'; +import { + OPTIMISTIC_MUTATION_ID, + optimisticUpdate +} from './optimisticUpdate.js'; + +describe('optimisticUpdate middleware', () => { + function jsonResponse(data: unknown, status = 200): Response { + return new Response(JSON.stringify(data), { + status, + headers: { 'Content-Type': 'application/json' } + }); + } + + test('passes GET requests through without tagging', async () => { + const mw = optimisticUpdate(); + const next = vi.fn().mockResolvedValue(jsonResponse({ ok: true })); + + const response = await mw(next)('/api/todos', { method: 'GET' }); + + expect(response).toBeDefined(); + expect(next).toHaveBeenCalledTimes(1); + const init = next.mock.calls[0][1] as any; + expect(init[OPTIMISTIC_MUTATION_ID]).toBeUndefined(); + }); + + test('tags POST requests with a mutation ID', async () => { + const mw = optimisticUpdate(); + const next = vi.fn().mockResolvedValue(jsonResponse({ id: 1 })); + + await mw(next)('/api/todos', { + method: 'POST', + body: '{"title":"test"}' + }); + + const init = next.mock.calls[0][1] as any; + expect(init[OPTIMISTIC_MUTATION_ID]).toBeDefined(); + expect(typeof init[OPTIMISTIC_MUTATION_ID]).toBe('string'); + }); + + test.each(['PUT', 'PATCH', 'DELETE'])('tags %s requests', async method => { + const mw = optimisticUpdate(); + const next = vi.fn().mockResolvedValue(jsonResponse({})); + + await mw(next)('/api/todos/1', { method }); + + const init = next.mock.calls[0][1] as any; + expect(init[OPTIMISTIC_MUTATION_ID]).toBeDefined(); + }); + + test('skip predicate bypasses tagging', async () => { + const mw = optimisticUpdate({ + skip: url => url.includes('skip-me') + }); + const next = vi.fn().mockResolvedValue(jsonResponse({})); + + await mw(next)('/api/skip-me', { method: 'POST' }); + + const init = next.mock.calls[0][1] as any; + expect(init[OPTIMISTIC_MUTATION_ID]).toBeUndefined(); + }); + + test('captures NetworkError in the store', async () => { + const store = { failures: [] }; + const mw = optimisticUpdate({ store }); + const next = vi.fn().mockRejectedValue(new NetworkError('offline')); + + await expect( + mw(next)('/api/todos', { method: 'POST' }) + ).rejects.toThrow(NetworkError); + + expect(store.failures).toHaveLength(1); + expect(store.failures[0].url).toBe('/api/todos'); + expect(store.failures[0].error).toBeInstanceOf(NetworkError); + expect(store.failures[0].id).toBeDefined(); + expect(store.failures[0].timestamp).toBeDefined(); + }); + + test('does NOT capture ApiError (HTTP errors) in the store', async () => { + const store = { failures: [] }; + const mw = optimisticUpdate({ store }); + const next = vi.fn().mockResolvedValue( + new Response('Not Found', { + status: 404, + statusText: 'Not Found' + }) + ); + + const response = await mw(next)('/api/todos/999', { method: 'DELETE' }); + + expect(response.status).toBe(404); + expect(store.failures).toHaveLength(0); + }); + + test('captures non-NetworkError rejections but does NOT add to store', async () => { + const store = { failures: [] }; + const mw = optimisticUpdate({ store }); + const next = vi.fn().mockRejectedValue(new Error('Something broke')); + + await expect( + mw(next)('/api/todos', { method: 'POST' }) + ).rejects.toThrow('Something broke'); + + expect(store.failures).toHaveLength(0); + }); + + test('uses default internal store when none provided', async () => { + const mw = optimisticUpdate(); + const next = vi.fn().mockRejectedValue(new NetworkError('offline')); + + await expect( + mw(next)('/api/todos', { method: 'POST' }) + ).rejects.toThrow(NetworkError); + + // Middleware still works without a custom store + }); + + test('multiple failures accumulate in the store', async () => { + const store = { failures: [] }; + const mw = optimisticUpdate({ store }); + const next = vi.fn().mockRejectedValue(new NetworkError('offline')); + + await expect( + mw(next)('/api/todos/1', { method: 'DELETE' }) + ).rejects.toThrow(NetworkError); + await expect( + mw(next)('/api/todos', { method: 'POST' }) + ).rejects.toThrow(NetworkError); + + expect(store.failures).toHaveLength(2); + }); +}); diff --git a/libs/client/src/middlewares/optimisticUpdate.ts b/libs/client/src/middlewares/optimisticUpdate.ts new file mode 100644 index 00000000..53951f0c --- /dev/null +++ b/libs/client/src/middlewares/optimisticUpdate.ts @@ -0,0 +1,59 @@ +import { isNetworkError } from '../errors.js'; +import type { Middleware } from '../middleware.js'; + +export const OPTIMISTIC_MUTATION_ID = Symbol('optimistic-mutation-id'); + +export interface OptimisticFailure { + id: string; + url: string; + init: RequestInit; + error: Error; + timestamp: number; +} + +export interface OptimisticUpdateStore { + failures: OptimisticFailure[]; +} + +export interface OptimisticUpdateOptions { + store?: OptimisticUpdateStore; + skip?: (url: string, init: RequestInit) => boolean; +} + +function generateId(): string { + return `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`; +} + +export function optimisticUpdate( + options: OptimisticUpdateOptions = {} +): Middleware { + const { skip, store = { failures: [] } } = options; + + return next => async (url, init) => { + const method = (init.method ?? 'GET').toUpperCase(); + if (method === 'GET') { + return next(url, init); + } + if (skip?.(url, init)) { + return next(url, init); + } + + const mutationId = generateId(); + (init as any)[OPTIMISTIC_MUTATION_ID] = mutationId; + + try { + return await next(url, init); + } catch (err) { + if (isNetworkError(err)) { + store.failures.push({ + id: mutationId, + url, + init: { ...init }, + error: err instanceof Error ? err : new Error(String(err)), + timestamp: Date.now() + }); + } + throw err; + } + }; +} diff --git a/libs/client/src/offlineQueue.ts b/libs/client/src/offlineQueue.ts new file mode 100644 index 00000000..620c59f2 --- /dev/null +++ b/libs/client/src/offlineQueue.ts @@ -0,0 +1,6 @@ +export type { + OfflineQueueOptions, + OfflineQueueStore, + QueuedRequest +} from './middlewares/offlineQueue.js'; +export { offlineQueue } from './middlewares/offlineQueue.js'; diff --git a/libs/client/src/optimisticUpdate.ts b/libs/client/src/optimisticUpdate.ts new file mode 100644 index 00000000..6f59d0af --- /dev/null +++ b/libs/client/src/optimisticUpdate.ts @@ -0,0 +1,9 @@ +export type { + OptimisticFailure, + OptimisticUpdateOptions, + OptimisticUpdateStore +} from './middlewares/optimisticUpdate.js'; +export { + OPTIMISTIC_MUTATION_ID, + optimisticUpdate +} from './middlewares/optimisticUpdate.js'; diff --git a/libs/client/src/react/createClient.ts b/libs/client/src/react/createClient.ts index ac9b6448..e4926d09 100644 --- a/libs/client/src/react/createClient.ts +++ b/libs/client/src/react/createClient.ts @@ -45,6 +45,9 @@ import type { UnifiedClient } from './types.js'; * Each group also exposes a `queryKey()` method for group-level * cache invalidation. * + * When the `cacheTags` middleware is active, `useMutation` hooks + * automatically invalidate TanStack Query entries for the affected group. + * * @param contract - An API contract created with `defineApi()`. * @param options - Client options passed to `@cleverbrush/web`'s `createClient()`. * @returns A fully typed unified client proxy. @@ -99,6 +102,7 @@ export function createClient( // Attach streaming from the web client call.stream = (...args: any[]) => webEndpoint.stream(...args); + call.file = (...args: any[]) => webEndpoint.file(...args); // Attach TanStack Query hooks call.useQuery = createUseQuery(webClient, group, endpoint); @@ -112,7 +116,12 @@ export function createClient( group, endpoint ); - call.useMutation = createUseMutation(webClient, group, endpoint); + call.useMutation = createUseMutation( + webClient, + group, + endpoint, + extractCacheTagNames(contract, group, endpoint) + ); call.queryKey = createQueryKey(group, endpoint); call.prefetch = createPrefetch(webClient, group, endpoint); @@ -146,3 +155,20 @@ export function createClient( } }); } + +function extractCacheTagNames( + contract: any, + group: string, + endpoint: string +): string[] | undefined { + try { + const ep = contract[group]?.[endpoint]; + if (!ep || typeof ep.introspect !== 'function') return undefined; + const meta = ep.introspect(); + const tags: Array<{ name: string }> = meta.cacheTags ?? []; + if (tags.length === 0) return undefined; + return tags.map(t => t.name); + } catch { + return undefined; + } +} diff --git a/libs/client/src/react/hooks.test.ts b/libs/client/src/react/hooks.test.ts index 1ee820b6..8ced4b8e 100644 --- a/libs/client/src/react/hooks.test.ts +++ b/libs/client/src/react/hooks.test.ts @@ -14,12 +14,14 @@ function mockEndpoint(meta: { pathTemplate?: | string | { serialize: (p: Record) => string }; + cacheTags?: ReadonlyArray<{ name: string }>; }) { return { introspect: () => ({ method: meta.method, basePath: meta.basePath, - pathTemplate: meta.pathTemplate ?? '' + pathTemplate: meta.pathTemplate ?? '', + cacheTags: meta.cacheTags ?? [] }) }; } @@ -35,7 +37,11 @@ function createMockContract() { serialize: (p: Record) => `/${p.id}` } }), - create: mockEndpoint({ method: 'POST', basePath: '/api/todos' }) + create: mockEndpoint({ + method: 'POST', + basePath: '/api/todos', + cacheTags: [{ name: 'todo-list' }] + }) }, users: { me: mockEndpoint({ method: 'GET', basePath: '/api/users/me' }) @@ -210,6 +216,74 @@ describe('hooks integration', () => { await waitFor(() => expect(onError).toHaveBeenCalledOnce()); }); + + test('invalidates query cache on success when endpoint has cacheTags', async () => { + const invalidateSpy = vi.spyOn(queryClient, 'invalidateQueries'); + mockFetch.mockResolvedValueOnce(jsonResponse({ id: 1 }, 201)); + + const { result } = renderHook( + () => queryApi.todos.create.useMutation(), + { wrapper: createWrapper(queryClient) } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'Test' } } as any); + }); + + await waitFor(() => + expect(invalidateSpy).toHaveBeenCalledWith({ + queryKey: ['@cleverbrush', 'todos'] + }) + ); + }); + + test('does not invalidate when endpoint has no cacheTags', async () => { + const invalidateSpy = vi.spyOn(queryClient, 'invalidateQueries'); + mockFetch.mockResolvedValueOnce(jsonResponse({ id: 1 }, 201)); + + const { result } = renderHook( + () => queryApi.todos.list.useMutation(), + { wrapper: createWrapper(queryClient) } + ); + + await act(async () => { + result.current.mutate({} as any); + }); + + // Wait a tick to ensure no calls + await vi.waitFor( + () => + expect(invalidateSpy).not.toHaveBeenCalledWith({ + queryKey: ['@cleverbrush', 'todos'] + }), + { timeout: 1000 } + ); + }); + + test('still calls onSuccess alongside auto-invalidation', async () => { + const onSuccess = vi.fn(); + const invalidateSpy = vi.spyOn(queryClient, 'invalidateQueries'); + mockFetch.mockResolvedValueOnce(jsonResponse({ id: 1 }, 201)); + + const { result } = renderHook( + () => + queryApi.todos.create.useMutation({ + onSuccess + }), + { wrapper: createWrapper(queryClient) } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'Test' } } as any); + }); + + await waitFor(() => { + expect(onSuccess).toHaveBeenCalledOnce(); + expect(invalidateSpy).toHaveBeenCalledWith({ + queryKey: ['@cleverbrush', 'todos'] + }); + }); + }); }); // -- prefetch ----------------------------------------------------------- diff --git a/libs/client/src/react/hooks.ts b/libs/client/src/react/hooks.ts index 99c60a63..54c9aec2 100644 --- a/libs/client/src/react/hooks.ts +++ b/libs/client/src/react/hooks.ts @@ -13,6 +13,7 @@ import { useInfiniteQuery, useMutation, useQuery, + useQueryClient, useSuspenseQuery } from '@tanstack/react-query'; import type { WebError } from '../index.js'; @@ -153,18 +154,42 @@ export function createUseInfiniteQuery( /** * Creates a `useMutation` hook for the given endpoint. * + * When `cacheTagNames` is provided, the hook automatically invalidates + * TanStack Query entries for the endpoint's group on mutation success + * — no manual `queryClient.invalidateQueries()` needed. + * + * @param webClient - The underlying typed web client. + * @param group - API contract group name (e.g. `"todos"`). + * @param endpoint - Endpoint name within the group (e.g. `"create"`). + * @param cacheTagNames - Optional cache tag names declared on the + * endpoint via `.clearsCacheTag()`. When non-empty, triggers automatic + * `queryClient.invalidateQueries()` on mutation success. * @internal */ export function createUseMutation( webClient: AnyClient, group: string, - endpoint: string + endpoint: string, + cacheTagNames?: readonly string[] ) { return function hookUseMutation(options?: any): any { + const queryClient = useQueryClient(); + return useMutation({ ...options, mutationFn: (args: any) => - callEndpoint(webClient, group, endpoint, args) + callEndpoint(webClient, group, endpoint, args), + onSuccess: (...args: any[]) => { + options?.onSuccess?.(...args); + + // Auto-invalidate TanStack Query cache when endpoint + // declares cache tags and cacheTags middleware is active. + if (cacheTagNames && cacheTagNames.length > 0) { + queryClient.invalidateQueries({ + queryKey: ['@cleverbrush', group] + }); + } + } }); }; } diff --git a/libs/client/src/react/index.ts b/libs/client/src/react/index.ts index 492778bd..b29cc600 100644 --- a/libs/client/src/react/index.ts +++ b/libs/client/src/react/index.ts @@ -27,6 +27,8 @@ */ export { createClient } from './createClient.js'; +export type { OptimisticMutationConfig } from './optimisticMutation.js'; +export { useOptimisticMutation } from './optimisticMutation.js'; export { buildGroupQueryKey, buildQueryKey, diff --git a/libs/client/src/react/optimisticMutation.test.ts b/libs/client/src/react/optimisticMutation.test.ts new file mode 100644 index 00000000..0af4ac98 --- /dev/null +++ b/libs/client/src/react/optimisticMutation.test.ts @@ -0,0 +1,250 @@ +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import { act, renderHook, waitFor } from '@testing-library/react'; +import { createElement, type ReactNode } from 'react'; +import { beforeEach, describe, expect, test, vi } from 'vitest'; +import { useOptimisticMutation } from './optimisticMutation.js'; + +describe('useOptimisticMutation', () => { + let queryClient: QueryClient; + + function createWrapper() { + return function Wrapper({ children }: { children: ReactNode }) { + return createElement( + QueryClientProvider, + { client: queryClient }, + children + ); + }; + } + + beforeEach(() => { + queryClient = new QueryClient({ + defaultOptions: { + queries: { retry: false }, + mutations: { retry: false } + } + }); + + // Seed some initial data + const initialData = [ + { id: 1, title: 'Buy milk', completed: false }, + { id: 2, title: 'Walk dog', completed: true } + ]; + queryClient.setQueryData(['todos', 'list'], initialData); + }); + + test('applies optimistic update before mutation resolves', async () => { + const mutationFn = vi + .fn() + .mockResolvedValue({ id: 1, title: 'Buy milk', completed: true }); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any, args: any) => + (oldData ?? []).map((t: any) => + t.id === args.params.id + ? { ...t, completed: args.body.completed } + : t + ) + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ + params: { id: 1 }, + body: { completed: true } + }); + }); + + // The optimistic update should have been applied immediately + const cached = queryClient.getQueryData(['todos', 'list']) as any[]; + expect(cached[0].completed).toBe(true); + }); + + test('rolls back on error', async () => { + const mutationFn = vi.fn().mockRejectedValue(new Error('Server error')); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any, args: any) => + (oldData ?? []).map((t: any) => + t.id === args.params.id + ? { ...t, completed: args.body.completed } + : t + ) + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ + params: { id: 1 }, + body: { completed: true } + }); + }); + + await waitFor(() => expect(result.current.isError).toBe(true)); + + // Should have rolled back to original state + const cached = queryClient.getQueryData(['todos', 'list']) as any[]; + expect(cached[0].completed).toBe(false); + }); + + test('cancels in-flight queries before optimistic update', async () => { + const cancelSpy = vi.spyOn(queryClient, 'cancelQueries'); + const mutationFn = vi.fn().mockResolvedValue({ ok: true }); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any) => oldData + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'Test' } } as any); + }); + + expect(cancelSpy).toHaveBeenCalledWith({ queryKey: ['todos', 'list'] }); + }); + + test('calls onSuccess callback when mutation succeeds', async () => { + const onSuccess = vi.fn(); + const mutationFn = vi.fn().mockResolvedValue({ id: 3, title: 'New' }); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any) => oldData, + onSuccess + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'New' } } as any); + }); + + await waitFor(() => expect(onSuccess).toHaveBeenCalledOnce()); + }); + + test('calls onError callback when mutation fails', async () => { + const onError = vi.fn(); + const mutationFn = vi.fn().mockRejectedValue(new Error('Failed')); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any) => oldData, + onError + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'Fail' } } as any); + }); + + await waitFor(() => expect(onError).toHaveBeenCalledOnce()); + }); + + test('calls onSettled callback after mutation completes', async () => { + const onSettled = vi.fn(); + const mutationFn = vi.fn().mockResolvedValue({ ok: true }); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any) => oldData, + onSettled + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'Test' } } as any); + }); + + await waitFor(() => expect(onSettled).toHaveBeenCalledOnce()); + }); + + test('invalidates query cache on settled', async () => { + const invalidateSpy = vi.spyOn(queryClient, 'invalidateQueries'); + const mutationFn = vi.fn().mockResolvedValue({ ok: true }); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any) => oldData + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'Test' } } as any); + }); + + await waitFor(() => { + expect(invalidateSpy).toHaveBeenCalledWith({ + queryKey: ['todos', 'list'] + }); + }); + }); + + test('handles undefined cache data gracefully', async () => { + queryClient.removeQueries({ queryKey: ['todos', 'list'] }); + const mutationFn = vi.fn().mockResolvedValue({ ok: true }); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (_oldData: any, args: any) => { + return [args.body]; + } + }), + { wrapper: createWrapper() } + ); + + await act(async () => { + result.current.mutate({ body: { title: 'First' } } as any); + }); + + await waitFor(() => expect(result.current.isSuccess).toBe(true)); + + const cached = queryClient.getQueryData(['todos', 'list']) as any[]; + expect(cached).toEqual([{ title: 'First' }]); + }); + + test('returns standard UseMutationResult shape', () => { + const mutationFn = vi.fn(); + + const { result } = renderHook( + () => + useOptimisticMutation(mutationFn, { + queryKey: ['todos', 'list'], + optimisticUpdate: (oldData: any) => oldData + }), + { wrapper: createWrapper() } + ); + + expect(result.current).toHaveProperty('mutate'); + expect(result.current).toHaveProperty('mutateAsync'); + expect(result.current).toHaveProperty('isPending'); + expect(result.current).toHaveProperty('isSuccess'); + expect(result.current).toHaveProperty('isError'); + expect(result.current).toHaveProperty('data'); + expect(result.current).toHaveProperty('error'); + }); +}); diff --git a/libs/client/src/react/optimisticMutation.ts b/libs/client/src/react/optimisticMutation.ts new file mode 100644 index 00000000..a53bbb2f --- /dev/null +++ b/libs/client/src/react/optimisticMutation.ts @@ -0,0 +1,68 @@ +import { + type QueryKey, + useMutation, + useQueryClient +} from '@tanstack/react-query'; +import { useRef } from 'react'; +import type { WebError } from '../errors.js'; + +export interface OptimisticMutationConfig { + queryKey: QueryKey; + optimisticUpdate: (oldData: TData | undefined, args: TArgs) => TData; + onSuccess?: (data: TData, args: TArgs) => void; + onError?: (error: WebError, args: TArgs) => void; + onSettled?: ( + data: TData | undefined, + error: WebError | null, + args: TArgs + ) => void; +} + +export function useOptimisticMutation( + mutationFn: (args: TArgs) => Promise, + config: OptimisticMutationConfig +) { + const queryClient = useQueryClient(); + const { queryKey, optimisticUpdate, onSuccess, onError, onSettled } = + config; + const pendingCount = useRef(0); + + return useMutation({ + mutationFn, + onMutate: async (args: TArgs) => { + pendingCount.current++; + await queryClient.cancelQueries({ queryKey }); + const previous = queryClient.getQueryData(queryKey); + queryClient.setQueryData(queryKey, (old: unknown) => + optimisticUpdate(old as TData | undefined, args) + ); + return { previous }; + }, + onError: (error: WebError, args: TArgs, context: unknown) => { + const ctx = context as { previous?: TData } | undefined; + if (ctx?.previous !== undefined) { + queryClient.setQueryData(queryKey, ctx.previous); + } + onError?.(error, args); + }, + onSuccess: (data: TData, args: TArgs) => { + onSuccess?.(data, args); + }, + onSettled: ( + data: TData | undefined, + error: WebError | null, + args: TArgs + ) => { + pendingCount.current--; + if (pendingCount.current === 0) { + // Cancel any background GET that may have snuck in (e.g. from a + // window-focus refetch) and potentially cached an intermediate + // server state, then trigger the definitive invalidation. + queryClient + .cancelQueries({ queryKey }) + .then(() => queryClient.invalidateQueries({ queryKey })); + onSettled?.(data, error, args); + } + } + }); +} diff --git a/libs/client/src/types.ts b/libs/client/src/types.ts index 9cf9b112..22624f7f 100644 --- a/libs/client/src/types.ts +++ b/libs/client/src/types.ts @@ -10,9 +10,15 @@ import type { InferType, SchemaBuilder } from '@cleverbrush/schema'; import type { EndpointBuilder, + FilePart, ApiContract as ServerApiContract, SubscriptionBuilder } from '@cleverbrush/server/contract'; + +// Re-export types shared between server and client +/** @see {@link FilePart} from `@cleverbrush/server` */ +export type { FilePart }; + import type { WebError } from './errors.js'; import type { Middleware } from './middleware.js'; @@ -50,11 +56,17 @@ type InferSchema = * Assembles the parts of the request argument object conditionally. * Only keys that carry data are included. */ -type CallArgsParts = - (HasKeys extends true ? { params: TParams } : {}) & - (TBody extends undefined ? {} : { body: InferSchema }) & - (HasKeys extends true ? { query: TQuery } : {}) & - (HasKeys extends true ? { headers: THeaders } : {}); +type CallArgsParts< + TParams, + TBody, + TQuery, + THeaders, + TUpload extends boolean +> = (HasKeys extends true ? { params: TParams } : {}) & + (TBody extends undefined ? {} : { body: InferSchema }) & + (HasKeys extends true ? { query: TQuery } : {}) & + (HasKeys extends true ? { headers: THeaders } : {}) & + (TUpload extends true ? { files: Record } : {}); /** * Extracts the typed request argument shape from an `EndpointBuilder`. @@ -95,12 +107,13 @@ export type EndpointCallArgs = any, // TPrincipal any, // TRoles any, // TResponse - any // TResponses + any, // TResponses + infer TUpload // TUpload > ? HasKeys< - Simplify> + Simplify> > extends true - ? Simplify> + ? Simplify> : undefined : never; @@ -192,6 +205,16 @@ export interface PerCallOverrides { * Override the timeout (in milliseconds) for this call only. */ timeout?: number; + /** + * Override optimistic update middleware options for this call. + * Pass `{ skip: true }` to skip tagging for this mutation. + */ + optimisticUpdate?: { skip?: boolean }; + /** + * Override offline queue middleware options for this call. + * Pass `{ skip: true }` to bypass the queue for this mutation. + */ + offlineQueue?: { skip?: boolean }; } /** @@ -205,20 +228,20 @@ export interface PerCallOverrides { * `AsyncIterable` yielding newline-delimited chunks (e.g. NDJSON). * An optional `AbortSignal` can be passed to cancel an in-flight stream. */ -export type EndpointCall = - EndpointCallArgs extends undefined - ? ((args?: PerCallOverrides) => Promise>) & { - stream: (options?: { - signal?: AbortSignal; - }) => AsyncIterable; - } - : (( - args: EndpointCallArgs & PerCallOverrides - ) => Promise>) & { - stream: ( - args: EndpointCallArgs & { signal?: AbortSignal } - ) => AsyncIterable; - }; +export type EndpointCall = (EndpointCallArgs extends undefined + ? (args?: PerCallOverrides) => Promise> + : ( + args: EndpointCallArgs & PerCallOverrides + ) => Promise>) & { + stream: EndpointCallArgs extends undefined + ? (options?: { signal?: AbortSignal }) => AsyncIterable + : ( + args: EndpointCallArgs & { signal?: AbortSignal } + ) => AsyncIterable; + file: EndpointCallArgs extends undefined + ? (args?: PerCallOverrides) => Promise + : (args: EndpointCallArgs & PerCallOverrides) => Promise; +}; // --------------------------------------------------------------------------- // Subscription types diff --git a/libs/client/tsup.config.ts b/libs/client/tsup.config.ts index 51abe83c..2e9edd8d 100644 --- a/libs/client/tsup.config.ts +++ b/libs/client/tsup.config.ts @@ -8,6 +8,8 @@ export default defineConfig({ 'src/dedupe.ts', 'src/cache.ts', 'src/batching.ts', + 'src/optimisticUpdate.ts', + 'src/offlineQueue.ts', 'src/react.ts' ], format: ['esm'], diff --git a/libs/knex-schema/src/SchemaQueryBuilder.ts b/libs/knex-schema/src/SchemaQueryBuilder.ts index 9a4e239d..127d635c 100644 --- a/libs/knex-schema/src/SchemaQueryBuilder.ts +++ b/libs/knex-schema/src/SchemaQueryBuilder.ts @@ -4,23 +4,26 @@ import type { InferType } from '@cleverbrush/schema'; import { EXTRA_TYPE_BRAND, METHOD_LITERAL_BRAND, - ObjectSchemaBuilder, - SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR + type ObjectSchemaBuilder } from '@cleverbrush/schema'; import type { Knex } from 'knex'; +import { buildColumnMap } from './columns.js'; +import { getTableName, POLYMORPHIC_TYPE_BRAND } from './extension.js'; +// Operations import { - buildColumnMap, - getPrimaryKeyColumns, - resolveColumnRef, - resolvePropertyKey -} from './columns.js'; -import { - getProjections, - getTableName, - getVariants, - POLYMORPHIC_TYPE_BRAND -} from './extension.js'; -import { clearRow } from './mappers.js'; + avgImpl, + countDistinctImpl, + countImpl, + distinctImpl, + maxImpl, + minImpl, + projectedImpl, + scopedImpl, + selectImpl, + selectRawImpl, + sumImpl, + unscopedImpl +} from './operations/select.js'; import type { ColumnRef, CursorPaginationResult, @@ -28,48 +31,91 @@ import type { JoinManySpec, JoinOneSpec, PaginationResult, - RelationSpec, - ResolvedVariantConfig, - ResolvedVariantRelationSpec, SelectProjection, - SelectSelector, - ValidatedSpec, - VariantWhereFilter, - WithJoinedMany, - WithJoinedOne + SelectSelector } from './types.js'; + +export { OnConflictBuilder } from './operations/insert.js'; + +import { + deleteImpl, + hardDeleteImpl, + onlyDeletedImpl, + restoreImpl, + withDeletedImpl +} from './operations/delete.js'; +import { + ALLOWED_OPS, + buildQuery, + cleanAndMapRow, + getQuery, + getVariantConfig, + invalidateCache, + registerSchemaQueryBuilder, + resolveColumn +} from './operations/helpers.js'; +import { + bulkInsertImpl, + bulkUpsertImpl, + insertImpl, + insertManyImpl, + onConflictImpl, + upsertImpl +} from './operations/insert.js'; +import { + includeImpl, + includeVariantImpl, + joinManyImpl, + joinOneImpl +} from './operations/join.js'; +import { + executeImpl, + limitImpl, + offsetImpl, + paginateAfterImpl, + paginateImpl +} from './operations/pagination.js'; +import { getState, setState } from './operations/state.js'; +import { bulkUpdateImpl, updateImpl } from './operations/update.js'; import { - validateJoinMany, - validateJoinOne, - validateUniqueFieldNames -} from './validate.js'; + andWhereImpl, + groupByImpl, + groupByRawImpl, + havingImpl, + havingRawImpl, + orderByImpl, + orderByRawImpl, + orWhereImpl, + orWhereInImpl, + orWhereNotInImpl, + orWhereNotNullImpl, + orWhereNullImpl, + whereBetweenImpl, + whereExistsImpl, + whereILikeImpl, + whereImpl, + whereInImpl, + whereJsonPathImpl, + whereLikeImpl, + whereNotBetweenImpl, + whereNotExistsImpl, + whereNotImpl, + whereNotInImpl, + whereNotNullImpl, + whereNullImpl, + whereRawImpl +} from './operations/where.js'; // --------------------------------------------------------------------------- -// Helpers +// Type-level helpers // --------------------------------------------------------------------------- -/** - * Extracts the scope names registered on a schema via `.scope(name, fn)`. - * Returns `never` when no scopes are defined (making `.scoped()` uncallable). - * Falls back to `string` for `any`-typed schemas to preserve loose behaviour. - * - * @internal - */ type ScopesOf = S extends { readonly [METHOD_LITERAL_BRAND]?: infer N; } ? Extract : never; -/** - * Extracts the named projection map from a schema type. - * Returns a `Record` type where each key is a - * registered projection name and the value is the tuple of property keys. - * Returns `Record` (no projections) when the schema has none, - * making `.projected()` uncallable on undecorated schemas. - * - * @internal - */ type ProjectionsOf = S extends { readonly [EXTRA_TYPE_BRAND]?: infer P; } @@ -78,13 +124,6 @@ type ProjectionsOf = S extends { : Record : Record; -/** - * Extracts the string-key union for a specific projection name from a schema - * type. This indirection is needed because TypeScript cannot directly index - * `ProjectionsOf[K]` with `number` inside a generic function signature. - * - * @internal - */ type ProjectionKeysOf< S, K extends keyof ProjectionsOf & string @@ -92,1012 +131,121 @@ type ProjectionKeysOf< ? T : string; -// --------------------------------------------------------------------------- -// Polymorphic result type — driven by POLYMORPHIC_TYPE_BRAND phantom type -// --------------------------------------------------------------------------- - -/** - * When a schema carries the `POLYMORPHIC_TYPE_BRAND` phantom type (set by - * `.withVariants()`), extract the discriminated-union result type from it. - * Otherwise fall back to `InferType`. - * - * @internal - */ type QueryResultType = TLocalSchema extends { readonly [POLYMORPHIC_TYPE_BRAND]?: infer U; } ? NonNullable : InferType; -// --------------------------------------------------------------------------- -// OnConflictBuilder -// --------------------------------------------------------------------------- - -/** - * Intermediate builder returned by {@link SchemaQueryBuilder.onConflict}. - * Call `.merge()` or `.ignore()` to complete the upsert/insert-ignore operation. - * - * @internal - */ -export class OnConflictBuilder< - TLocalSchema extends ObjectSchemaBuilder, - TResult -> { - readonly #knex: Knex; - readonly #localSchema: TLocalSchema; - readonly #conflictColumns: string[]; - - /** @internal */ - constructor( - knex: Knex, - localSchema: TLocalSchema, - _parent: SchemaQueryBuilder, - conflictColumns: string[] - ) { - this.#knex = knex; - this.#localSchema = localSchema; - this.#conflictColumns = conflictColumns; - } - - /** - * Insert and merge (update) conflicting rows. - * - * @param data - Row to insert. - * @param updateData - Optional partial object of columns to update on - * conflict. If omitted, all inserted columns are updated. - * @returns The resulting row. - */ - async merge( - data: InsertType, - updateData?: Partial> - ): Promise { - return this.#execute(data, 'merge', updateData) as Promise; - } - - /** - * Insert and silently ignore conflicts. - * - * @param data - Row to insert. - * @returns The row if inserted, or `undefined` if the conflict was ignored. - */ - async ignore(data: InsertType): Promise { - return this.#execute(data, 'ignore'); - } - - async #execute( - data: InsertType, - mode: 'merge' | 'ignore', - updateData?: Partial> - ): Promise { - const tableName = getTableName(this.#localSchema); - const timestamps: { createdAt: string; updatedAt: string } | null = - (this.#localSchema as any).getExtension?.('timestamps') ?? null; - - const beforeHooks: Function[] = - (this.#localSchema as any).getExtension?.('beforeInsert') ?? []; - - let processed = { ...(data as Record) }; - for (const hook of beforeHooks) { - processed = (await hook(processed)) ?? processed; - } - - const { propToCol } = buildColumnMap(this.#localSchema as any); - const mapped: Record = {}; - for (const [key, val] of Object.entries(processed)) { - mapped[propToCol.get(key) ?? key] = val; - } - if (timestamps) { - mapped[timestamps.createdAt] = this.#knex.fn.now(); - mapped[timestamps.updatedAt] = this.#knex.fn.now(); - } - - let qb = this.#knex(tableName) - .insert(mapped) - .onConflict(this.#conflictColumns); - - if (mode === 'ignore') { - qb = (qb as any).ignore(); - } else { - let mergeObj: Record; - if (updateData) { - mergeObj = {}; - for (const [key, val] of Object.entries( - updateData as Record - )) { - mergeObj[propToCol.get(key) ?? key] = val; - } - } else { - mergeObj = { ...mapped }; - if (timestamps) { - delete mergeObj[timestamps.createdAt]; - mergeObj[timestamps.updatedAt] = this.#knex.fn.now(); - } - } - qb = (qb as any).merge(mergeObj); - } - - const rows = await (qb as any).returning('*'); - if (!rows || rows.length === 0) return undefined; - - const { colToProp } = buildColumnMap(this.#localSchema as any); - const result: Record = {}; - for (const [col, val] of Object.entries(rows[0])) { - result[colToProp.get(col) ?? col] = val; - } - return result as TResult; - } -} - // --------------------------------------------------------------------------- // SchemaQueryBuilder // --------------------------------------------------------------------------- -/** - * Type-safe, schema-driven query builder for Knex. - * - * `SchemaQueryBuilder` wraps a Knex.QueryBuilder and adds: - * - **Type-safe column references** — pass a property accessor (`t => t.name`) - * or a string property name; both are resolved to the correct SQL column - * through the schema's `hasColumnName()` metadata automatically. - * - **Eager loading without N+1** — {@link joinOne} and {@link joinMany} use - * PostgreSQL CTEs and `jsonb_agg` to load related rows in a single query. - * - **Bidirectional result mapping** — rows returned from Postgres (column - * names) are converted back to schema property names before being returned. - * - **Thenable protocol** — the builder itself is `await`-able so you can - * write `await query(db, Schema)` without calling {@link execute} explicitly. - * - * Create instances via the {@link query} factory function rather than - * calling the constructor directly. - * - * @typeParam TLocalSchema - The `ObjectSchemaBuilder` describing the main table. - * @typeParam TResult - The inferred row type, widened automatically as joins - * are registered via {@link joinOne} / {@link joinMany}. - * - * @example - * ```ts - * import knex from 'knex'; - * import { query, object, string, number } from '@cleverbrush/knex-schema'; - * - * const UserSchema = object({ - * id: number(), - * name: string(), - * age: number().optional(), - * }).hasTableName('users'); - * - * const db = knex({ client: 'pg', connection: process.env.DB_URL }); - * - * // Fetch all users older than 18, ordered by name - * const adults = await query(db, UserSchema) - * .where(t => t.age, '>', 18) - * .orderBy(t => t.name); - * // adults: Array<{ id: number; name: string; age?: number }> - * ``` - */ export class SchemaQueryBuilder< TLocalSchema extends ObjectSchemaBuilder, TResult > { - readonly #knex: Knex; - readonly #baseQuery: Knex.QueryBuilder; - readonly #localSchema: TLocalSchema; - readonly #specs: ValidatedSpec[] = []; - readonly #tableName: string; - /** - * Tracks the SQL column names that were explicitly passed to `.select()`. - * `null` means no explicit select was made (SELECT *). - */ - #explicitSelects: string[] | null = null; - - /** - * Tracks which column-selection mode is active on this builder. - * - `null` — no explicit SELECT issued yet (SELECT *). - * - `'select'` — `.select()` / `.distinct()` was called. - * - `'aggregate'` — `.count()` / `.countDistinct()` / `.min()` / etc. - * - `'projection'` — `.projected()` was called. - * - * Only one mode is allowed per query. Calling a method that would switch - * to a different mode throws an error. - */ - #selectionMode: 'select' | 'aggregate' | 'projection' | null = null; - /** Name of the projection currently applied, for use in error messages. */ - #appliedProjection: string | null = null; - - /** When true, soft-delete filter is not applied to SELECT queries. */ - #includeDeleted = false; - /** When true, only soft-deleted rows are returned. */ - #onlyDeleted = false; - /** When true, default scope is not applied. */ - #skipDefaultScope = false; - - // ----------------------------------------------------------------------- - // Polymorphic variant state - // ----------------------------------------------------------------------- - - /** - * Resolved variant config, lazily populated from the schema's `'variants'` - * extension. `undefined` = not yet read; `null` = schema is not polymorphic. - */ - #variantConfig: ResolvedVariantConfig | null | undefined = undefined; - - /** - * When set, only these discriminator values are returned (added to WHERE). - * `null` means all variants are included. - */ - #enabledVariants: Set | null = null; - - /** Pending per-variant WHERE filters registered via `.whereVariant()`. */ - #variantWhereFilters: VariantWhereFilter[] = []; - - /** - * Variant-relation eager-load requests registered via `.includeVariant()`. - * Each entry names the variant key and the relation name to load. - * Processed inside `#applyVariantJoins()`. - */ - #variantRelationIncludes: Array<{ - variantKey: string; - relationName: string; - customize?: (q: SchemaQueryBuilder) => void; - }> = []; - - /** - * Memoized result of `#buildQuery()`. Invalidated by any mutating method - * (where, orderBy, limit, etc.) and re-built lazily on the next read. - */ - #cachedBuiltQuery: Knex.QueryBuilder | null = null; - - /** Invalidate the compiled-query cache. Called by every mutating method. */ - #invalidateCache(): void { - this.#cachedBuiltQuery = null; - } - - /** Return the built query, building it once and caching the result. */ - #getQuery(): Knex.QueryBuilder { - if (!this.#cachedBuiltQuery) { - this.#cachedBuiltQuery = this.#buildQuery(); - } - return this.#cachedBuiltQuery; - } - - /** - * @param knex - A configured Knex instance. - * @param localSchema - The `ObjectSchemaBuilder` for the primary table. - * Must have a table name set via `.hasTableName()`. - * @param baseQuery - Optional pre-configured `Knex.QueryBuilder` to use as - * the base query instead of the default `knex(tableName)`. Useful when you - * need custom joins, CTEs, or other Knex features not exposed by this API. - */ constructor( knex: Knex, localSchema: TLocalSchema, baseQuery?: Knex.QueryBuilder ) { - this.#knex = knex; - this.#localSchema = localSchema; - this.#tableName = getTableName(localSchema); - this.#baseQuery = baseQuery ?? knex(this.#tableName); + const tableName = getTableName(localSchema); + setState(this, { + knex, + baseQuery: baseQuery ?? knex(tableName), + localSchema, + specs: [], + tableName, + explicitSelects: null, + selectionMode: null, + appliedProjection: null, + includeDeleted: false, + onlyDeleted: false, + skipDefaultScope: false, + variantConfig: undefined, + enabledVariants: null, + variantWhereFilters: [], + variantRelationIncludes: [], + cachedBuiltQuery: null + }); } // ======================================================================= - // Private helpers + // SELECT / DISTINCT / AGGREGATES // ======================================================================= - #resolveColumn( - ref: any, - label = 'column' - ): string | import('knex').Knex.Raw { - return resolveColumnRef( - ref as ColumnRef, - this.#localSchema, - label, - this.#knex - ); + select(...columns: (ColumnRef | Knex.Raw)[]): this; + select>( + selector: TSel + ): SchemaQueryBuilder>>; + select(...args: unknown[]): any { + return selectImpl(this as any, ...args); } - /** @internal Read soft-delete extension from schema. */ - #getSoftDelete(): { column: string } | null { - const ext = (this.#localSchema as any).getExtension?.('softDelete'); - return ext ?? null; + distinct(...columns: (ColumnRef | Knex.Raw)[]): this { + return (distinctImpl as any)(this, ...columns); } - /** @internal Read default scope function from schema. */ - #getDefaultScope(): Function | null { - const fn = (this.#localSchema as any).getExtension?.('defaultScope'); - return typeof fn === 'function' ? fn : null; + count(column?: ColumnRef | Knex.Raw): this { + return (countImpl as any)(this, column); } - /** @internal Read timestamps config from schema. */ - #getTimestamps(): { createdAt: string; updatedAt: string } | null { - const ts = (this.#localSchema as any).getExtension?.('timestamps'); - return ts ?? null; + countDistinct(column?: ColumnRef | Knex.Raw): this { + return (countDistinctImpl as any)(this, column); } - // ----------------------------------------------------------------------- - // Polymorphic helpers - // ----------------------------------------------------------------------- - - /** - * Read and cache the variant config from the schema. Returns `null` when - * the schema is not polymorphic. - * @internal - */ - #getVariantConfig(): ResolvedVariantConfig | null { - if (this.#variantConfig !== undefined) return this.#variantConfig; - - const raw = getVariants(this.#localSchema); - if (!raw) { - this.#variantConfig = null; - return null; - } - - // Resolve discriminator property key → SQL column name - const { propToCol } = buildColumnMap(this.#localSchema); - const discCol = - propToCol.get(raw.discriminatorKey) ?? raw.discriminatorKey; - - this.#variantConfig = { - ...raw, - discriminatorColumn: discCol - }; - return this.#variantConfig; - } - - /** - * Validate allowed SQL operators for `whereVariant`. - * Guards against SQL injection via the `op` parameter. - * @internal - */ - static #ALLOWED_OPS = new Set([ - '=', - '!=', - '<>', - '<', - '>', - '<=', - '>=', - 'like', - 'not like', - 'ilike', - 'not ilike', - 'in', - 'not in', - 'is', - 'is not' - ]); - - /** - * Apply variant LEFT JOINs and aliased column selects to a base query, - * then add any `whereVariant` / `selectVariants` filters. - * - * For CTI variants: - * - `LEFT JOIN variantTable AS __v_ ON __v_. = base. AND base. = ''` - * - `SELECT __v_. AS __v___` for every column (incl. FK for orphan detection) - * - * For STI variants: - * - No extra JOIN needed; columns are already in the base row. - * - * @internal - */ - #applyVariantJoins( - base: Knex.QueryBuilder, - variantConfig: ResolvedVariantConfig - ): Knex.QueryBuilder { - const knex = this.#knex; - const baseTable = this.#tableName; - const basePkCol = this.#findPrimaryKeyColumn(this.#localSchema); - const discCol = variantConfig.discriminatorColumn; - - // Clone base and ensure we SELECT base.* so variant aliases don't - // collide with the base column list when the caller used SELECT *. - const qb = base.clone().select(`${baseTable}.*`); - - for (const [key, spec] of Object.entries(variantConfig.variants)) { - // Skip disabled variants when selectVariants() was called - if ( - this.#enabledVariants !== null && - !this.#enabledVariants.has(key) - ) - continue; - - if (spec.storage === 'cti') { - const variantAlias = `__v_${key}`; - const variantTable = spec.tableName!; - const fkCol = spec.foreignKey!; - - // LEFT JOIN with discriminator gate so we only pick up rows - // for the matching variant. - qb.leftJoin( - `${variantTable} as ${variantAlias}`, - knex.raw(`?? = ?? AND ?? = ?`, [ - `${variantAlias}.${fkCol}`, - `${baseTable}.${basePkCol}`, - `${baseTable}.${discCol}`, - key - ]) - ); - - // SELECT each variant column with a namespaced alias - const { propToCol } = buildColumnMap(spec.schema); - const variantIntrospect = spec.schema.introspect() as any; - const variantProps: Record = - variantIntrospect.properties ?? {}; - - for (const propKey of Object.keys(variantProps)) { - const colName = propToCol.get(propKey) ?? propKey; - // Alias: __v_image.width AS __v_image__width - qb.select( - knex.raw('?? as ??', [ - `${variantAlias}.${colName}`, - `${variantAlias}__${colName}` - ]) - ); - } - } - // STI: no extra JOIN — variant columns are already in the base row - } - - // Apply variant-relation eager-load JOINs (registered via .includeVariant()) - for (const vrInc of this.#variantRelationIncludes) { - const variantSpec = variantConfig.variants[vrInc.variantKey]; - if (!variantSpec) continue; - - const relSpec = variantSpec.relations.find( - (r: ResolvedVariantRelationSpec) => - r.name === vrInc.relationName - ); - if (!relSpec) continue; - - const foreignSchema = this.#resolveSchema(relSpec.schema); - const foreignTableName = getTableName(foreignSchema); - const relAlias = `__v_${vrInc.variantKey}__rel_${vrInc.relationName}`; - - if (relSpec.type === 'belongsTo' || relSpec.type === 'hasOne') { - // Determine join condition columns - let localCol: string; - let foreignCol: string; - - if (relSpec.type === 'belongsTo') { - // FK is on the variant table (or base table for STI) - localCol = - relSpec.foreignKey ?? - ((): string => { - throw new Error( - `includeVariant: relation "${vrInc.relationName}" on variant "${vrInc.variantKey}" requires foreignKey` - ); - })(); - foreignCol = this.#findPrimaryKeyColumn(foreignSchema); - } else { - // hasOne: FK is on the foreign table - localCol = this.#findPrimaryKeyColumn(this.#localSchema); - foreignCol = - relSpec.foreignKey ?? - ((): string => { - throw new Error( - `includeVariant: relation "${vrInc.relationName}" on variant "${vrInc.variantKey}" requires foreignKey` - ); - })(); - } - - // Build the join ON condition, gated by the discriminator - let onExpr: string; - const variantAlias = `__v_${vrInc.variantKey}`; - - if (variantSpec.storage === 'cti') { - if (relSpec.type === 'belongsTo') { - // FK lives on the CTI variant alias table - onExpr = `${relAlias}.${foreignCol} = ${variantAlias}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; - } else { - // hasOne: FK on foreign table, local col is base PK - onExpr = `${relAlias}.${foreignCol} = ${baseTable}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; - } - } else { - // STI: all columns are in the base table - if (relSpec.type === 'belongsTo') { - onExpr = `${relAlias}.${foreignCol} = ${baseTable}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; - } else { - onExpr = `${relAlias}.${foreignCol} = ${baseTable}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; - } - } - - // Build the foreign query (applying customize if provided) - const foreignKnex: Knex.QueryBuilder = - this.#knex(foreignTableName); - - // Determine which columns to select from the foreign table. - // Run customize on a probe proxy to capture projection/explicit-select state. - const selectionSql = this.#buildVariantRelationSelect( - foreignSchema, - relAlias, - foreignTableName, - vrInc.customize - ); - - qb.leftJoin( - knex.raw(`?? as ??`, [foreignTableName, relAlias]), - knex.raw(onExpr) - ); - void foreignKnex; // probe was used only for column determination - - for (const sel of selectionSql) { - qb.select(sel); - } - } - // NOTE: hasMany / belongsToMany on variants are not yet supported - // via inline JOINs (they would multiply rows). Future: secondary query. - } - - // Apply per-variant WHERE filters (added via .whereVariant()) - for (const filter of this.#variantWhereFilters) { - const discColFull = `${baseTable}.${discCol}`; - // (base.disc = 'key' AND variant_col op value) OR base.disc != 'key' - // → restricts matching rows, passes through non-matching variants - qb.where(function (this: Knex.QueryBuilder) { - this.where(discColFull, filter.key) - .andWhere(filter.qualifiedColumn, filter.op, filter.value) - .orWhere(discColFull, '!=', filter.key); - }); - } - - // Apply selectVariants restriction - if (this.#enabledVariants !== null) { - qb.whereIn(`${baseTable}.${discCol}`, [...this.#enabledVariants]); - } - - return qb; - } - - /** - * Build the list of `knex.raw("?? as ??", ...)` select expressions that - * project a variant-relation alias table's columns into the namespaced - * prefix `__v___rel___`. - * - * When `customize` applies a projection, only the projected columns are - * selected. Otherwise every column in the foreign schema is selected. - * - * @internal - */ - #buildVariantRelationSelect( - foreignSchema: ObjectSchemaBuilder, - relAlias: string, - foreignTableName: string, - customize?: (q: SchemaQueryBuilder) => void - ): Knex.Raw[] { - const knex = this.#knex; - const { propToCol } = buildColumnMap(foreignSchema); - const foreignIntrospect = foreignSchema.introspect() as any; - const foreignProps: Record = - foreignIntrospect.properties ?? {}; - - // Determine which property keys to include. - // Run customize on a probe proxy to capture #explicitSelects state. - let columnsToSelect: string[]; - - if (customize) { - const probe = new SchemaQueryBuilder( - this.#knex, - foreignSchema, - this.#knex(foreignTableName) - ); - customize(probe); - const explicit = probe.#explicitSelects; - if (explicit && explicit.length > 0) { - columnsToSelect = explicit; - } else { - columnsToSelect = Object.keys(foreignProps).map( - p => propToCol.get(p) ?? p - ); - } - } else { - columnsToSelect = Object.keys(foreignProps).map( - p => propToCol.get(p) ?? p - ); - } - - return columnsToSelect.map(colName => - knex.raw('?? as ??', [ - `${relAlias}.${colName}`, - `${relAlias}__${colName}` - ]) - ); + min(column: ColumnRef | Knex.Raw): this { + return (minImpl as any)(this, column); } - /** - * Map a raw SQL row from a polymorphic query to a schema-property-named - * object for the active variant. - * - * - Base columns are mapped via the base schema's `colToProp` map. - * - CTI variant columns (aliased as `__v___`) are mapped via - * the variant schema's `colToProp` map for the matching discriminator value. - * - STI variant columns are already in the base row; they are mapped via - * the variant schema's `colToProp` map. - * - The CTI FK column alias (`__v___`) is used only for orphan - * detection and is NOT included in the result. - * - * @internal - */ - #mapPolymorphicRow( - row: Record, - variantConfig: ResolvedVariantConfig - ): Record { - const { colToProp: baseColToProp } = buildColumnMap(this.#localSchema); - const result: Record = {}; - - // Pass 1: map base columns (skip __v_* aliases) - for (const [colName, value] of Object.entries(row)) { - if (colName.startsWith('__v_')) continue; - const propName = baseColToProp.get(colName); - if (propName) { - result[propName] = value; - } else { - // Unknown column (raw expression, joined field) — pass through - result[colName] = value; - } - } - - // Pass 2: map variant columns for the active discriminator value - const discPropKey = variantConfig.discriminatorKey; - const discValue: string | undefined = result[discPropKey]; - - if (discValue != null) { - const variantSpec = variantConfig.variants[discValue]; - if (variantSpec) { - if (variantSpec.storage === 'cti') { - const { colToProp: varColToProp } = buildColumnMap( - variantSpec.schema - ); - const variantAlias = `__v_${discValue}`; - const prefix = `${variantAlias}__`; - const fkCol = variantSpec.foreignKey; - - // Check for orphaned discriminator (FK alias is NULL) - if (fkCol) { - const fkAlias = `${prefix}${fkCol}`; - if (!variantSpec.allowOrphan && row[fkAlias] == null) { - throw new Error( - `Polymorphic orphan: "${discPropKey}" = "${discValue}" ` + - `but no matching row found in variant table ` + - `"${variantSpec.tableName}". ` + - `Set allowOrphan: true on this variant to suppress.` - ); - } - } - - for (const [colName, value] of Object.entries(row)) { - if (!colName.startsWith(prefix)) continue; - const origCol = colName.slice(prefix.length); - // Skip FK column — it duplicates the base PK - if (origCol === fkCol) continue; - const propName = varColToProp.get(origCol) ?? origCol; - result[propName] = value; - } - } else { - // STI: variant columns are in the base row (no prefix) - const { colToProp: varColToProp } = buildColumnMap( - variantSpec.schema - ); - for (const [colName, value] of Object.entries(row)) { - if (colName.startsWith('__v_')) continue; - if (baseColToProp.has(colName)) continue; // already mapped - const propName = varColToProp.get(colName); - if (propName) { - result[propName] = value; - } - } - } - } - } - - // Pass 3: extract variant-relation nested objects - if (this.#variantRelationIncludes.length > 0 && discValue != null) { - const variantSpec3 = variantConfig.variants[discValue]; - if (variantSpec3) { - for (const vrInc of this.#variantRelationIncludes) { - if (vrInc.variantKey !== discValue) continue; - - const relSpec = variantSpec3.relations.find( - (r: ResolvedVariantRelationSpec) => - r.name === vrInc.relationName - ); - if (!relSpec) continue; - - if ( - relSpec.type === 'belongsTo' || - relSpec.type === 'hasOne' - ) { - const relAlias = `__v_${discValue}__rel_${vrInc.relationName}`; - const prefix = `${relAlias}__`; - const foreignSchema = this.#resolveSchema( - relSpec.schema - ); - const { colToProp: relColToProp } = - buildColumnMap(foreignSchema); - const nested: Record = {}; - let anyNonNull = false; - - for (const [colName, value] of Object.entries(row)) { - if (!colName.startsWith(prefix)) continue; - const origCol = colName.slice(prefix.length); - const propName = - relColToProp.get(origCol) ?? origCol; - nested[propName] = value; - if (value !== null && value !== undefined) { - anyNonNull = true; - } - } - result[vrInc.relationName] = anyNonNull ? nested : null; - } - } - } - } - - return result; - } - - /** - * Build the effective base query with soft-delete and default-scope - * filters applied lazily. Does NOT include CTE wrapping. - */ - #getEffectiveBaseQuery(): Knex.QueryBuilder { - let effectiveBase = this.#baseQuery; - let cloned = false; - - // Apply soft delete filter - const softDelete = this.#getSoftDelete(); - if (softDelete && this.#onlyDeleted) { - if (!cloned) { - effectiveBase = effectiveBase.clone(); - cloned = true; - } - effectiveBase.whereNotNull(softDelete.column); - } else if (softDelete && !this.#includeDeleted) { - if (!cloned) { - effectiveBase = effectiveBase.clone(); - cloned = true; - } - effectiveBase.whereNull(softDelete.column); - } - - // Apply default scope - if (!this.#skipDefaultScope) { - const defaultScopeFn = this.#getDefaultScope(); - if (defaultScopeFn) { - if (!cloned) { - effectiveBase = effectiveBase.clone(); - cloned = true; - } - const proxy = new SchemaQueryBuilder( - this.#knex, - this.#localSchema, - effectiveBase - ); - proxy.#skipDefaultScope = true; - defaultScopeFn(proxy); - } - } - - return effectiveBase; + max(column: ColumnRef | Knex.Raw): this { + return (maxImpl as any)(this, column); } - /** @internal Resolve a lazy schema reference `schema | () => schema`. */ - #resolveSchema( - schema: any - ): ObjectSchemaBuilder { - return typeof schema === 'function' ? schema() : schema; + sum(column: ColumnRef | Knex.Raw): this { + return (sumImpl as any)(this, column); } - /** @internal Find the primary key column name from schema extensions. */ - #findPrimaryKeyColumn( - schema: ObjectSchemaBuilder - ): string { - const pk = getPrimaryKeyColumns(schema); - if (pk.columnNames.length > 0) return pk.columnNames[0]; - return 'id'; + avg(column: ColumnRef | Knex.Raw): this { + return (avgImpl as any)(this, column); } - // ======================================================================= - // Relation methods — eager loading (absorbed from knex-eager) - // ======================================================================= + selectRaw(sql: string, bindings?: any[]): this { + return selectRawImpl(this as any, sql, bindings); + } - /** - * Eager-load a single related row (one-to-one / many-to-one relationship). - * - * The related rows are fetched using a single CTE + `jsonb_agg` — no N+1 - * queries. The related object is attached to each result row under the - * field name specified by `spec.as`. - * - * @param spec - Join specification. Key fields: - * - `foreignSchema` — the `ObjectSchemaBuilder` of the related table. - * - `localColumn` — the local column that holds the foreign-table reference. - * - `foreignColumn` — the primary/unique key on the foreign table. - * - `as` — the property name to attach the related object under. - * - `required` — if `true` (default), rows without a matching related - * record are excluded (inner join); if `false`, they are included with - * `null` (left join). - * - `foreignQuery` — optional pre-filtered `Knex.QueryBuilder` for the - * foreign table (e.g. to apply scopes). - * - * @returns `this` (with an updated `TResult` type that includes the new field) - * for chaining. - * - * @example - * ```ts - * const PostSchema = object({ - * id: number(), - * title: string(), - * authorId: number(), - * }).hasTableName('posts'); - * - * const AuthorSchema = object({ - * id: number(), - * name: string(), - * }).hasTableName('authors'); - * - * const posts = await query(db, PostSchema) - * .joinOne({ - * foreignSchema: AuthorSchema, - * localColumn: t => t.authorId, - * foreignColumn: t => t.id, - * as: 'author', - * }); - * // posts[0].author.name — typed as string ✓ - * ``` - */ - joinOne< - TForeignSchema extends ObjectSchemaBuilder< - any, - any, - any, - any, - any, - any, - any - >, - TFieldName extends string, - TRequired extends boolean = true - >( - spec: JoinOneSpec - ): SchemaQueryBuilder< - TLocalSchema, - WithJoinedOne - > { - const validated = validateJoinOne(spec, this.#localSchema, this.#knex); - this.#specs.push({ type: 'one' as const, ...validated }); - validateUniqueFieldNames(this.#specs); - this.#invalidateCache(); - return this as any; - } - - /** - * Eager-load a collection of related rows (one-to-many relationship). - * - * Related rows are fetched via a single CTE + `jsonb_agg` query. The - * collection is attached to each result row under the field name specified - * by `spec.as`. Supports `limit`, `offset`, and `orderBy` per-parent - * using a `row_number()` window function to avoid fetching the full - * relation before slicing. - * - * @param spec - Join specification. Key fields: - * - `foreignSchema` — the `ObjectSchemaBuilder` of the related table. - * - `localColumn` — the primary/unique key on the local table. - * - `foreignColumn` — the column on the foreign table that references `localColumn`. - * - `as` — the property name to attach the array under. - * - `limit` / `offset` — optional pagination per parent row. - * - `orderBy` — optional `{ column, direction }` for the sub-collection. - * - `foreignQuery` — optional pre-filtered `Knex.QueryBuilder`. - * - * @returns `this` (with an updated `TResult` type that includes the new field) - * for chaining. - * - * @example - * ```ts - * const UserSchema = object({ - * id: number(), - * name: string(), - * }).hasTableName('users'); - * - * const PostSchema = object({ - * id: number(), - * title: string(), - * authorId: number(), - * }).hasTableName('posts'); - * - * const users = await query(db, UserSchema) - * .joinMany({ - * foreignSchema: PostSchema, - * localColumn: t => t.id, - * foreignColumn: t => t.authorId, - * as: 'posts', - * limit: 5, - * orderBy: { column: t => t.id, direction: 'desc' }, - * }); - * // users[0].posts — typed as Array<{ id: number; title: string; authorId: number }> - * ``` - */ - joinMany< - TForeignSchema extends ObjectSchemaBuilder< - any, - any, - any, - any, - any, - any, - any - >, - TFieldName extends string - >( - spec: JoinManySpec + projected & string>( + name: K ): SchemaQueryBuilder< TLocalSchema, - WithJoinedMany + Pick & keyof TResult> > { - const validated = validateJoinMany(spec, this.#localSchema, this.#knex); - this.#specs.push({ type: 'many' as const, ...validated }); - validateUniqueFieldNames(this.#specs); - this.#invalidateCache(); - return this as any; + return projectedImpl(this as any, name); + } + + scoped>(name: K): this { + return scopedImpl(this as any, name as string); + } + + unscoped(): this { + return unscopedImpl(this as any); } // ======================================================================= - // WHERE methods + // WHERE // ======================================================================= - /** - * Add a `WHERE` clause to the query. - * - * Accepts a column reference, an optional operator, and a value: - * - `where(t => t.age, '>', 18)` — property accessor + operator + value. - * - `where('age', 18)` — string key + value (defaults to `=`). - * - `where({ name: 'Alice' })` — record object; property keys are mapped - * to column names automatically. - * - `where(builder => { ... })` — Knex sub-builder callback for grouped - * conditions. - * - `where(knex.raw('...'))` — raw SQL expression. - * - * Multiple `.where()` calls are combined with `AND`. - * - * @returns `this` for chaining. - */ where(column: ColumnRef, operator: string, value: any): this; where(column: ColumnRef, value: any): this; where(raw: Knex.Raw, operator: string, value: any): this; where(callback: (builder: Knex.QueryBuilder) => void): this; where(record: Record): this; where(raw: Knex.Raw): this; - where( - columnOrRaw: - | ColumnRef - | Knex.Raw - | Record - | ((builder: Knex.QueryBuilder) => void), - ...args: any[] - ): this { - this.#invalidateCache(); - if ( - typeof columnOrRaw === 'function' && - !this.#isColumnAccessor(columnOrRaw) - ) { - (this.#baseQuery.where as any)(columnOrRaw, ...args); - } else if ( - typeof columnOrRaw === 'object' && - columnOrRaw !== null && - !('toSQL' in columnOrRaw) - ) { - // Record — map property keys to column names - const mapped = this.#mapRecordToColumns( - columnOrRaw as Record - ); - (this.#baseQuery.where as any)(mapped, ...args); - } else { - const col = this.#resolveColumnArg(columnOrRaw); - (this.#baseQuery.where as any)(col, ...args); - } - return this; + where(columnOrRaw: any, ...args: any[]): this { + return whereImpl(this as any, columnOrRaw, ...args); } - /** - * Alias for {@link where} — explicitly adds an `AND WHERE` clause. - * Identical to calling `.where()` when no logical-OR grouping is needed. - * @returns `this` for chaining. - */ andWhere( column: ColumnRef, operator: string, @@ -1107,40 +255,10 @@ export class SchemaQueryBuilder< andWhere(record: Record): this; andWhere(callback: (builder: Knex.QueryBuilder) => void): this; andWhere(raw: Knex.Raw): this; - andWhere( - columnOrRaw: - | ColumnRef - | Knex.Raw - | Record - | ((builder: Knex.QueryBuilder) => void), - ...args: any[] - ): this { - this.#invalidateCache(); - if ( - typeof columnOrRaw === 'function' && - !this.#isColumnAccessor(columnOrRaw) - ) { - (this.#baseQuery.andWhere as any)(columnOrRaw, ...args); - } else if ( - typeof columnOrRaw === 'object' && - columnOrRaw !== null && - !('toSQL' in columnOrRaw) - ) { - const mapped = this.#mapRecordToColumns( - columnOrRaw as Record - ); - (this.#baseQuery.andWhere as any)(mapped, ...args); - } else { - const col = this.#resolveColumnArg(columnOrRaw); - (this.#baseQuery.andWhere as any)(col, ...args); - } - return this; + andWhere(columnOrRaw: any, ...args: any[]): this { + return andWhereImpl(this as any, columnOrRaw, ...args); } - /** - * Add an `OR WHERE` clause. Use this to create alternative filter branches. - * @returns `this` for chaining. - */ orWhere( column: ColumnRef, operator: string, @@ -1150,40 +268,10 @@ export class SchemaQueryBuilder< orWhere(record: Record): this; orWhere(callback: (builder: Knex.QueryBuilder) => void): this; orWhere(raw: Knex.Raw): this; - orWhere( - columnOrRaw: - | ColumnRef - | Knex.Raw - | Record - | ((builder: Knex.QueryBuilder) => void), - ...args: any[] - ): this { - this.#invalidateCache(); - if ( - typeof columnOrRaw === 'function' && - !this.#isColumnAccessor(columnOrRaw) - ) { - (this.#baseQuery.orWhere as any)(columnOrRaw, ...args); - } else if ( - typeof columnOrRaw === 'object' && - columnOrRaw !== null && - !('toSQL' in columnOrRaw) - ) { - const mapped = this.#mapRecordToColumns( - columnOrRaw as Record - ); - (this.#baseQuery.orWhere as any)(mapped, ...args); - } else { - const col = this.#resolveColumnArg(columnOrRaw); - (this.#baseQuery.orWhere as any)(col, ...args); - } - return this; + orWhere(columnOrRaw: any, ...args: any[]): this { + return orWhereImpl(this as any, columnOrRaw, ...args); } - /** - * Add a `WHERE NOT` clause — negates the condition. - * @returns `this` for chaining. - */ whereNot( column: ColumnRef, operator: string, @@ -1193,1722 +281,343 @@ export class SchemaQueryBuilder< whereNot(record: Record): this; whereNot(callback: (builder: Knex.QueryBuilder) => void): this; whereNot(raw: Knex.Raw): this; - whereNot( - columnOrRaw: - | ColumnRef - | Knex.Raw - | Record - | ((builder: Knex.QueryBuilder) => void), - ...args: any[] - ): this { - this.#invalidateCache(); - if ( - typeof columnOrRaw === 'function' && - !this.#isColumnAccessor(columnOrRaw) - ) { - (this.#baseQuery.whereNot as any)(columnOrRaw, ...args); - } else if ( - typeof columnOrRaw === 'object' && - columnOrRaw !== null && - !('toSQL' in columnOrRaw) - ) { - const mapped = this.#mapRecordToColumns( - columnOrRaw as Record - ); - (this.#baseQuery.whereNot as any)(mapped, ...args); - } else { - const col = this.#resolveColumnArg(columnOrRaw); - (this.#baseQuery.whereNot as any)(col, ...args); - } - return this; + whereNot(columnOrRaw: any, ...args: any[]): this { + return whereNotImpl(this as any, columnOrRaw, ...args); } - /** - * Add a `WHERE column IN (values)` clause. - * @param column - Column reference (property accessor or string key). - * @param values - Array of values or a sub-query. - * @returns `this` for chaining. - */ whereIn( column: ColumnRef, values: readonly any[] | Knex.QueryBuilder ): this { - this.#invalidateCache(); - this.#baseQuery.whereIn( - this.#resolveColumn(column, 'whereIn') as any, - values as any - ); - return this; + return (whereInImpl as any)(this, column, values); } - /** - * Add a `WHERE column NOT IN (values)` clause. - * @param column - Column reference. - * @param values - Array of values or a sub-query. - * @returns `this` for chaining. - */ whereNotIn( column: ColumnRef, values: readonly any[] | Knex.QueryBuilder ): this { - this.#invalidateCache(); - this.#baseQuery.whereNotIn( - this.#resolveColumn(column, 'whereNotIn') as any, - values as any - ); - return this; + return (whereNotInImpl as any)(this, column, values); } - /** - * Add an `OR WHERE column IN (values)` clause. - * @returns `this` for chaining. - */ orWhereIn( column: ColumnRef, values: readonly any[] | Knex.QueryBuilder ): this { - this.#invalidateCache(); - (this.#baseQuery as any).orWhereIn( - this.#resolveColumn(column, 'orWhereIn'), - values as any - ); - return this; + return (orWhereInImpl as any)(this, column, values); } - /** - * Add an `OR WHERE column NOT IN (values)` clause. - * @returns `this` for chaining. - */ orWhereNotIn( column: ColumnRef, values: readonly any[] | Knex.QueryBuilder ): this { - this.#invalidateCache(); - (this.#baseQuery as any).orWhereNotIn( - this.#resolveColumn(column, 'orWhereNotIn'), - values as any - ); - return this; + return (orWhereNotInImpl as any)(this, column, values); } - /** - * Add a `WHERE column IS NULL` clause. - * @returns `this` for chaining. - */ whereNull(column: ColumnRef): this { - this.#invalidateCache(); - this.#baseQuery.whereNull( - this.#resolveColumn(column, 'whereNull') as any - ); - return this; + return (whereNullImpl as any)(this, column); } - /** - * Add a `WHERE column IS NOT NULL` clause. - * @returns `this` for chaining. - */ whereNotNull(column: ColumnRef): this { - this.#invalidateCache(); - this.#baseQuery.whereNotNull( - this.#resolveColumn(column, 'whereNotNull') as any - ); - return this; + return (whereNotNullImpl as any)(this, column); } - /** - * Add an `OR WHERE column IS NULL` clause. - * @returns `this` for chaining. - */ orWhereNull(column: ColumnRef): this { - this.#invalidateCache(); - (this.#baseQuery as any).orWhereNull( - this.#resolveColumn(column, 'orWhereNull') - ); - return this; + return (orWhereNullImpl as any)(this, column); } - /** - * Add an `OR WHERE column IS NOT NULL` clause. - * @returns `this` for chaining. - */ orWhereNotNull(column: ColumnRef): this { - this.#invalidateCache(); - (this.#baseQuery as any).orWhereNotNull( - this.#resolveColumn(column, 'orWhereNotNull') - ); - return this; + return (orWhereNotNullImpl as any)(this, column); } - /** - * Add a `WHERE column BETWEEN low AND high` clause. - * @param range - A two-element tuple `[low, high]`. - * @returns `this` for chaining. - */ whereBetween( column: ColumnRef, range: readonly [any, any] ): this { - this.#invalidateCache(); - this.#baseQuery.whereBetween( - this.#resolveColumn(column, 'whereBetween') as any, - range as [any, any] - ); - return this; + return (whereBetweenImpl as any)(this, column, range); } - /** - * Add a `WHERE column NOT BETWEEN low AND high` clause. - * @param range - A two-element tuple `[low, high]`. - * @returns `this` for chaining. - */ whereNotBetween( column: ColumnRef, range: readonly [any, any] ): this { - this.#invalidateCache(); - this.#baseQuery.whereNotBetween( - this.#resolveColumn(column, 'whereNotBetween') as any, - range as [any, any] - ); - return this; + return (whereNotBetweenImpl as any)(this, column, range); } - /** - * Add a case-sensitive `WHERE column LIKE value` clause. - * @param value - A SQL LIKE pattern (e.g. `'Alice%'`). - * @returns `this` for chaining. - */ whereLike(column: ColumnRef, value: string): this { - this.#invalidateCache(); - (this.#baseQuery as any).whereLike( - this.#resolveColumn(column, 'whereLike'), - value - ); - return this; + return (whereLikeImpl as any)(this, column, value); } - /** - * Add a case-insensitive `WHERE column ILIKE value` clause (PostgreSQL). - * @param value - A SQL LIKE pattern (e.g. `'alice%'`). - * @returns `this` for chaining. - */ whereILike(column: ColumnRef, value: string): this { - this.#invalidateCache(); - (this.#baseQuery as any).whereILike( - this.#resolveColumn(column, 'whereILike'), - value - ); - return this; + return (whereILikeImpl as any)(this, column, value); } - /** - * Add a raw `WHERE` clause. Useful for database-specific expressions. - * @param sql - Raw SQL string with optional `:binding:` or `?` placeholders. - * @param bindings - Values for the placeholders. - * @returns `this` for chaining. - */ whereRaw(sql: string, ...bindings: any[]): this { - this.#invalidateCache(); - this.#baseQuery.whereRaw(sql, ...bindings); - return this; + return (whereRawImpl as any)(this, sql, ...bindings); } - /** - * Add a `WHERE EXISTS (subquery)` clause. - * @param callback - A Knex query callback or sub-query builder. - * @returns `this` for chaining. - */ whereExists(callback: Knex.QueryCallback | Knex.QueryBuilder): this { - this.#invalidateCache(); - this.#baseQuery.whereExists(callback as any); - return this; + return (whereExistsImpl as any)(this, callback); } - /** - * Add a `WHERE NOT EXISTS (subquery)` clause. - * @param callback - A Knex query callback or sub-query builder. - * @returns `this` for chaining. - */ whereNotExists(callback: Knex.QueryCallback | Knex.QueryBuilder): this { - this.#invalidateCache(); - (this.#baseQuery as any).whereNotExists(callback as any); - return this; + return (whereNotExistsImpl as any)(this, callback); } - /** - * Add a PostgreSQL JSON path filter using `@?` / `@@` operators or - * a path-based equality test via `jsonb_path_query_first`. - * - * Only supported on `pg` clients — throws at runtime on others. - * - * @param column - Column reference for the `jsonb` column. - * @param path - Dot-separated property path (e.g. `'a.b.c'`) or a - * JSONPath expression string (e.g. `'$.a.b ? (@ == 1)'`). - * @param operator - Comparison operator (`=`, `!=`, `<`, `<=`, `>`, `>=`, - * `@?`, `@@`). Use `@?` / `@@` for JSONPath existence / predicate tests. - * @param value - The right-hand side value. Ignored for `@?` and `@@`. - * @returns `this` for chaining. - * - * @example - * ```ts - * // Filter rows where data->>'status' = 'active' - * query(db, Schema).whereJsonPath(t => t.data, 'status', '=', 'active'); - * - * // JSONPath existence - * query(db, Schema).whereJsonPath(t => t.data, '$.tags[*] ? (@ == "sale")', '@?'); - * ``` - */ whereJsonPath( column: ColumnRef, path: string, operator?: string, value?: any ): this { - this.#invalidateCache(); - const client = (this.#knex as any).client?.config?.client as - | string - | undefined; - if ( - client !== 'pg' && - client !== 'postgresql' && - client !== 'postgres' - ) { - throw new Error( - `whereJsonPath() is only supported on PostgreSQL (got client: "${client ?? 'unknown'}")` - ); - } - - const col = this.#resolveColumn(column, 'whereJsonPath'); - const op = operator ?? '='; - - if (op === '@?' || op === '@@') { - // JSONPath existence / predicate. - // Note: `@?` contains a `?` which Knex would treat as a binding - // placeholder inside whereRaw. Escape it with `\?` → `\\?` in JS. - const escapedOp = op === '@?' ? '@\\?' : '@@'; - this.#baseQuery.whereRaw(`?? ${escapedOp} ?`, [col, path]); - } else { - // Path-based value test: jsonb_path_query_first(col, path) op value - const jsonPath = path.startsWith('$') - ? path - : `$.${path.replace(/\./g, '.')}`; - this.#baseQuery.whereRaw( - `jsonb_path_query_first(??, ?) ${op} ?::jsonb`, - [col, jsonPath, JSON.stringify(value)] - ); - } - return this; + return (whereJsonPathImpl as any)(this, column, path, operator, value); } // ======================================================================= // ORDER BY // ======================================================================= - /** - * Order the results by a column. - * @param column - Column reference or raw expression. - * @param direction - `'asc'` (default) or `'desc'`. - * @returns `this` for chaining. - * - * @example - * ```ts - * query(db, UserSchema).orderBy(t => t.name).orderBy(t => t.createdAt, 'desc'); - * ``` - */ orderBy( column: ColumnRef | Knex.Raw, direction?: 'asc' | 'desc' ): this { - this.#invalidateCache(); - const col = this.#resolveColumnArg(column); - this.#baseQuery.orderBy(col as string, direction); - return this; + return (orderByImpl as any)(this, column, direction); } - /** - * Order the results by a raw SQL expression. - * @param sql - Raw SQL (e.g. `'LOWER(name) ASC'`). - * @returns `this` for chaining. - */ orderByRaw(sql: string, ...bindings: any[]): this { - this.#invalidateCache(); - this.#baseQuery.orderByRaw(sql, ...bindings); - return this; + return (orderByRawImpl as any)(this, sql, ...bindings); } // ======================================================================= // GROUP BY / HAVING // ======================================================================= - /** - * Add a `GROUP BY` clause. - * @param columns - One or more column references or raw expressions. - * @returns `this` for chaining. - */ groupBy(...columns: (ColumnRef | Knex.Raw)[]): this { - this.#invalidateCache(); - const resolved = columns.map(c => this.#resolveColumnArg(c)); - this.#baseQuery.groupBy(...(resolved as string[])); - return this; + return (groupByImpl as any)(this, ...columns); } - /** - * Add a raw `GROUP BY` expression. - * @returns `this` for chaining. - */ groupByRaw(sql: string, ...bindings: any[]): this { - this.#invalidateCache(); - this.#baseQuery.groupByRaw(sql, ...bindings); - return this; + return (groupByRawImpl as any)(this, sql, ...bindings); } - /** - * Add a `HAVING column operator value` clause (used with `GROUP BY`). - * @returns `this` for chaining. - */ having( column: ColumnRef | Knex.Raw, operator: string, value: any ): this { - this.#invalidateCache(); - const col = this.#resolveColumnArg(column); - this.#baseQuery.having(col as string, operator, value); - return this; + return (havingImpl as any)(this, column, operator, value); } - /** - * Add a raw `HAVING` expression. - * @returns `this` for chaining. - */ havingRaw(sql: string, ...bindings: any[]): this { - this.#invalidateCache(); - this.#baseQuery.havingRaw(sql, ...bindings); - return this; + return havingRawImpl(this as any, sql, ...bindings); } // ======================================================================= // PAGINATION // ======================================================================= - /** - * Limit the number of rows returned. - * @param n - Maximum number of rows. - * @returns `this` for chaining. - */ limit(n: number): this { - this.#invalidateCache(); - this.#baseQuery.limit(n); - return this; + return limitImpl(this as any, n); } - /** - * Skip the first `n` rows in the result set (for cursor/offset pagination). - * @param n - Number of rows to skip. - * @returns `this` for chaining. - */ offset(n: number): this { - this.#invalidateCache(); - this.#baseQuery.offset(n); - return this; + return offsetImpl(this as any, n); + } + + async paginate(opts: { + page: number; + pageSize: number; + }): Promise> { + return paginateImpl(this as any, opts) as Promise< + PaginationResult + >; + } + + async paginateAfter(opts: { + cursor?: any; + limit: number; + column?: ColumnRef; + direction?: 'asc' | 'desc'; + }): Promise> { + return (paginateAfterImpl as any)(this, opts) as Promise< + CursorPaginationResult + >; } // ======================================================================= - // SELECT / DISTINCT + // WRITE OPERATIONS // ======================================================================= - /** - * Select specific columns instead of `*`. Each column reference is - * resolved to its SQL column name through the schema. - * @param columns - One or more column references or raw expressions. - * @returns `this` for chaining. - */ - select(...columns: (ColumnRef | Knex.Raw)[]): this; - /** - * DTO projection: select multiple aliased columns at once via a - * descriptor record. Returns a query whose result rows match the - * shape of the selector's return value (each value typed as the - * inferred schema-property type). - * - * @example - * ```ts - * const dtos = await query(db, UserSchema) - * .where(t => t.id, '>', 0) - * .select(t => ({ id: t.id, n: t.name })) - * .execute(); - * // dtos: { id: number; n: string }[] - * ``` - */ - select>( - selector: TSel - ): SchemaQueryBuilder>>; - select(...args: unknown[]): SchemaQueryBuilder { - // DTO projection overload: a single function whose return value - // is a Record (NOT a single descriptor — those - // are handled by the column-list path via #resolveColumnArg). - if (args.length === 1 && typeof args[0] === 'function') { - const fn = args[0] as (t: any) => unknown; - const tree = ObjectSchemaBuilder.getPropertiesFor( - this.#localSchema as any - ); - const result = fn(tree); - if ( - result && - typeof result === 'object' && - !(SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR in (result as object)) - ) { - this.#invalidateCache(); - this.#assertNotProjection('select'); - this.#selectionMode = 'projection'; - this.#appliedProjection = ''; - - const aliasMap: Record = {}; - this.#explicitSelects ??= []; - for (const [alias, descriptor] of Object.entries( - result as Record - )) { - if ( - !descriptor || - typeof descriptor !== 'object' || - !( - SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR in - (descriptor as object) - ) - ) { - throw new Error( - `select(selector): value for alias "${alias}" must be a property descriptor (e.g. \`t.someProp\`).` - ); - } - const col = this.#resolveColumn( - (() => descriptor) as ColumnRef, - `select(selector).${alias}` - ); - aliasMap[alias] = col as string; - this.#explicitSelects.push(col as string); - } - this.#baseQuery.select(aliasMap); - return this as unknown as SchemaQueryBuilder; - } - // Fall through: single descriptor → column-list path. - } + async insert(data: InsertType): Promise { + return insertImpl(this as any, data) as Promise; + } - // Column-list path (existing behaviour). - this.#invalidateCache(); - this.#assertNotProjection('select'); - this.#selectionMode = 'select'; - const resolved = (args as (ColumnRef | Knex.Raw)[]).map( - c => this.#resolveColumnArg(c) - ); - this.#baseQuery.select(...(resolved as string[])); - // Track the string-resolved columns (not Knex.Raw) for CTE column management - this.#explicitSelects ??= []; - for (const r of resolved) { - if (typeof r === 'string') { - this.#explicitSelects.push(r); - } - } - return this as unknown as SchemaQueryBuilder; + async insertMany(data: InsertType[]): Promise { + return insertManyImpl(this as any, data) as Promise; } - /** @internal Throw if a projection has already been applied. */ - #assertNotProjection(method: string): void { - if (this.#selectionMode === 'projection') { - throw new Error( - `Cannot call .${method}() after .projected('${ - this.#appliedProjection - }'). Choose one column-selection mode per query.` - ); - } + onConflict( + ...conflictColumns: ColumnRef[] + ): import('./operations/insert.js').OnConflictBuilder< + TLocalSchema, + TResult + > { + return (onConflictImpl as any)(this, ...conflictColumns); } - /** @internal Throw if .select() or .projected() has already been applied. */ - #assertNotExplicitSelect(method: string): void { - if (this.#selectionMode === 'select') { - throw new Error( - `Cannot call .${method}() after .select(). Choose one column-selection mode per query.` - ); - } - if (this.#selectionMode === 'aggregate') { - throw new Error( - `Cannot call .${method}() after an aggregate method. Choose one column-selection mode per query.` - ); + async upsert( + data: InsertType, + opts: { + conflictColumns: ColumnRef[]; + updateColumns?: ColumnRef[]; } - if (this.#selectionMode === 'projection') { - throw new Error( - `Cannot call .${method}() after .projected('${ - this.#appliedProjection - }'). Choose one column-selection mode per query.` - ); + ): Promise { + return (upsertImpl as any)(this, data, opts); + } + + async bulkInsert( + rows: InsertType[], + opts?: { + chunkSize?: number; + onConflict?: 'ignore' | 'merge'; + conflictColumns?: ColumnRef[]; } + ): Promise { + return (bulkInsertImpl as any)(this, rows, opts); } - /** - * Add `DISTINCT` to the select clause. Duplicate rows are eliminated. - * @param columns - One or more column references or raw expressions. - * @returns `this` for chaining. - */ - distinct(...columns: (ColumnRef | Knex.Raw)[]): this { - this.#invalidateCache(); - const resolved = columns.map(c => this.#resolveColumnArg(c)); - this.#baseQuery.distinct(...(resolved as string[])); - return this; - } - - // ======================================================================= - // AGGREGATES - // ======================================================================= - - /** - * Add a `COUNT(*)` or `COUNT(column)` aggregate to the select list. - * @param column - Optional column to count (defaults to `*`). - * @returns `this` for chaining. - */ - count(column?: ColumnRef | Knex.Raw): this { - this.#invalidateCache(); - this.#assertNotProjection('count'); - this.#selectionMode = 'aggregate'; - if (column) { - this.#baseQuery.count(this.#resolveColumnArg(column) as string); - } else { - this.#baseQuery.count(); - } - return this; - } - - /** - * Add a `COUNT(DISTINCT column)` aggregate to the select list. - * @param column - Optional column (defaults to `*`). - * @returns `this` for chaining. - */ - countDistinct(column?: ColumnRef | Knex.Raw): this { - this.#invalidateCache(); - this.#assertNotProjection('countDistinct'); - this.#selectionMode = 'aggregate'; - if (column) { - this.#baseQuery.countDistinct( - this.#resolveColumnArg(column) as string - ); - } else { - this.#baseQuery.countDistinct(); - } - return this; - } - - /** - * Add a `MIN(column)` aggregate. - * @returns `this` for chaining. - */ - min(column: ColumnRef | Knex.Raw): this { - this.#invalidateCache(); - this.#assertNotProjection('min'); - this.#selectionMode = 'aggregate'; - this.#baseQuery.min(this.#resolveColumnArg(column) as string); - return this; - } - - /** - * Add a `MAX(column)` aggregate. - * @returns `this` for chaining. - */ - max(column: ColumnRef | Knex.Raw): this { - this.#invalidateCache(); - this.#assertNotProjection('max'); - this.#selectionMode = 'aggregate'; - this.#baseQuery.max(this.#resolveColumnArg(column) as string); - return this; - } - - /** - * Add a `SUM(column)` aggregate. - * @returns `this` for chaining. - */ - sum(column: ColumnRef | Knex.Raw): this { - this.#invalidateCache(); - this.#assertNotProjection('sum'); - this.#selectionMode = 'aggregate'; - this.#baseQuery.sum(this.#resolveColumnArg(column) as string); - return this; - } - - /** - * Add an `AVG(column)` aggregate. - * @returns `this` for chaining. - */ - avg(column: ColumnRef | Knex.Raw): this { - this.#invalidateCache(); - this.#assertNotProjection('avg'); - this.#selectionMode = 'aggregate'; - this.#baseQuery.avg(this.#resolveColumnArg(column) as string); - return this; - } - - // ======================================================================= - // WRITE OPERATIONS - // ======================================================================= - - /** - * Insert a single row into the table and return the inserted record. - * - * Property keys are mapped to SQL column names via the schema's - * `hasColumnName()` metadata before the `INSERT` is executed. The - * returned row is mapped back to property names. - * - * @param data - The object to insert. Keys must be valid schema property names. - * @returns The full inserted row (including database-generated fields). - * - * @example - * ```ts - * const user = await query(db, UserSchema).insert({ name: 'Alice', age: 30 }); - * // user.id is populated by the database DEFAULT / SERIAL - * ``` - */ - async insert(data: InsertType): Promise { - let processedData = { ...(data as Record) }; - - // Run beforeInsert hooks - const beforeHooks = - ((this.#localSchema as any).getExtension?.('beforeInsert') as - | Function[] - | undefined) ?? []; - for (const hook of beforeHooks) { - processedData = (await hook(processedData)) ?? processedData; - } - - const mapped = this.#mapObjectToColumns(processedData); - - // Apply timestamps - const timestamps = this.#getTimestamps(); - if (timestamps) { - mapped[timestamps.createdAt] = this.#knex.fn.now(); - mapped[timestamps.updatedAt] = this.#knex.fn.now(); - } - - const [row] = await this.#knex(this.#tableName) - .insert(mapped) - .returning('*'); - const result = this.#mapRow(row) as TResult; - - // Run afterInsert hooks - const afterHooks = - ((this.#localSchema as any).getExtension?.('afterInsert') as - | Function[] - | undefined) ?? []; - for (const hook of afterHooks) { - await hook(result); - } - - return result; - } - - /** - * Insert multiple rows in a single `INSERT` statement and return all - * inserted records. - * - * @param data - Array of objects to insert. - * @returns The full inserted rows in insertion order. - */ - async insertMany(data: InsertType[]): Promise { - const timestamps = this.#getTimestamps(); - const beforeHooks = - ((this.#localSchema as any).getExtension?.('beforeInsert') as - | Function[] - | undefined) ?? []; - - const mapped = []; - for (const d of data) { - let processedData = { ...(d as Record) }; - for (const hook of beforeHooks) { - processedData = (await hook(processedData)) ?? processedData; - } - const m = this.#mapObjectToColumns(processedData); - if (timestamps) { - m[timestamps.createdAt] = this.#knex.fn.now(); - m[timestamps.updatedAt] = this.#knex.fn.now(); - } - mapped.push(m); - } - - const rows = await this.#knex(this.#tableName) - .insert(mapped) - .returning('*'); - const results = rows.map((row: any) => this.#mapRow(row) as TResult); - - // Run afterInsert hooks - const afterHooks = - ((this.#localSchema as any).getExtension?.('afterInsert') as - | Function[] - | undefined) ?? []; - for (const result of results) { - for (const hook of afterHooks) { - await hook(result); - } - } - - return results; - } - - /** - * Insert a row (or rows) with an `ON CONFLICT` clause. - * - * Returns a chainable object with `.merge()` and `.ignore()` methods. - * - * - `.merge(updateData?)` — updates the conflicting row with the provided - * fields (or all insert fields if omitted). - * - `.ignore()` — skips the insert when a conflict occurs (INSERT IGNORE). - * - * @param conflictColumns - Column references that define the conflict target. - * @returns A chainable conflict builder. - * - * @example - * ```ts - * // Upsert: insert or update on conflict - * await query(db, UserSchema) - * .onConflict(t => t.email) - * .merge({ name: 'Bob' }); - * - * // Insert and ignore on conflict - * await query(db, UserSchema) - * .onConflict(t => t.email) - * .ignore(); - * ``` - */ - onConflict( - ...conflictColumns: ColumnRef[] - ): OnConflictBuilder { - const cols = conflictColumns.map( - c => this.#resolveColumn(c, 'onConflict') as string - ); - return new OnConflictBuilder(this.#knex, this.#localSchema, this, cols); - } - - /** - * Insert or update a row based on a conflict target (upsert shorthand). - * - * Equivalent to calling `.onConflict(conflictColumns).merge(updateData)`. - * - * @param data - The row data to insert. - * @param opts - `{ conflictColumns, updateColumns? }`. - * @returns The resulting row (inserted or updated). - * - * @example - * ```ts - * const user = await query(db, UserSchema).upsert( - * { email: 'alice@example.com', name: 'Alice' }, - * { conflictColumns: [t => t.email], updateColumns: [t => t.name] } - * ); - * ``` - */ - async upsert( - data: InsertType, + async bulkUpsert( + rows: InsertType[], opts: { conflictColumns: ColumnRef[]; - updateColumns?: ColumnRef[]; - } - ): Promise { - const timestamps = this.#getTimestamps(); - const beforeHooks = - ((this.#localSchema as any).getExtension?.('beforeInsert') as - | Function[] - | undefined) ?? []; - - let processedData = { ...(data as Record) }; - for (const hook of beforeHooks) { - processedData = (await hook(processedData)) ?? processedData; - } - - const mapped = this.#mapObjectToColumns(processedData); - if (timestamps) { - mapped[timestamps.createdAt] = this.#knex.fn.now(); - mapped[timestamps.updatedAt] = this.#knex.fn.now(); - } - - const conflictCols = opts.conflictColumns.map( - c => this.#resolveColumn(c, 'upsert conflict') as string - ); - - let qb = this.#knex(this.#tableName) - .insert(mapped) - .onConflict(conflictCols); - - if (opts.updateColumns && opts.updateColumns.length > 0) { - const updateCols = opts.updateColumns.map( - c => this.#resolveColumn(c, 'upsert update') as string - ); - const updateData: Record = {}; - for (const col of updateCols) { - if (col in mapped) { - updateData[col] = mapped[col]; - } - } - if (timestamps) { - updateData[timestamps.updatedAt] = this.#knex.fn.now(); - } - qb = (qb as any).merge(updateData); - } else { - const updateData = { ...mapped }; - // Don't overwrite createdAt on upsert - if (timestamps) { - delete updateData[timestamps.createdAt]; - updateData[timestamps.updatedAt] = this.#knex.fn.now(); - } - qb = (qb as any).merge(updateData); - } - - const [row] = await (qb as any).returning('*'); - return this.#mapRow(row) as TResult; - } - - /** - * Update all rows that match the current `WHERE` clause and return the - * updated records. - * - * Only the keys present in `data` are updated (partial update). Property - * keys are resolved to column names automatically. - * - * @param data - Partial schema object with fields to update. - * @returns All rows that were updated. - * - * @example - * ```ts - * const updated = await query(db, UserSchema) - * .where(t => t.id, userId) - * .update({ name: 'Bob' }); - * ``` - */ - async update(data: Partial>): Promise { - let processedData = { ...(data as Record) }; - - // Run beforeUpdate hooks - const beforeHooks = - ((this.#localSchema as any).getExtension?.('beforeUpdate') as - | Function[] - | undefined) ?? []; - for (const hook of beforeHooks) { - processedData = (await hook(processedData)) ?? processedData; - } - - const mapped = this.#mapObjectToColumns(processedData); - - // Apply timestamps - const timestamps = this.#getTimestamps(); - if (timestamps) { - mapped[timestamps.updatedAt] = this.#knex.fn.now(); - } - - const rows = await this.#baseQuery.update(mapped).returning('*'); - return rows.map((row: any) => this.#mapRow(row) as TResult); - } - - /** - * Delete all rows that match the current `WHERE` clause. - * - * If the schema has soft-delete enabled via `.softDelete()`, this performs - * an `UPDATE SET deleted_at = NOW()` instead of a real `DELETE`. - * Use {@link hardDelete} for permanent deletion. - * - * @returns The number of rows deleted (or soft-deleted). - * - * @example - * ```ts - * const count = await query(db, UserSchema).where(t => t.id, id).delete(); - * ``` - */ - async delete(): Promise { - // Run beforeDelete hooks - const hooks = - ((this.#localSchema as any).getExtension?.('beforeDelete') as - | Function[] - | undefined) ?? []; - for (const hook of hooks) { - await hook(this); - } - - const softDelete = this.#getSoftDelete(); - if (softDelete) { - return this.#baseQuery.update({ - [softDelete.column]: this.#knex.fn.now() - }); + chunkSize?: number; } - return this.#baseQuery.delete(); + ): Promise { + return (bulkUpsertImpl as any)(this, rows, opts); } // ======================================================================= - // BULK WRITE OPERATIONS + // UPDATE // ======================================================================= - /** - * Bulk-insert many rows in chunks. The default chunk size of `500` keeps - * comfortably below Postgres' parameter-count limit of 65535. The chunk - * size also auto-shrinks when the number of bindings per row would - * exceed that ceiling. - * - * When `opts.onConflict` is supplied each chunk is wrapped in an - * `INSERT ... ON CONFLICT` clause: - * - * - `'ignore'` — `ON CONFLICT (...) DO NOTHING` - * - `'merge'` — `ON CONFLICT (...) DO UPDATE SET ...` (uses `conflictColumns` as - * the conflict target and updates every inserted column). - * - * `beforeInsert` / `afterInsert` hooks fire per row, identically to - * {@link insertMany}. - * - * @returns The inserted rows (excluding rows skipped by `'ignore'`). - */ - async bulkInsert( - rows: InsertType[], - opts?: { - chunkSize?: number; - onConflict?: 'ignore' | 'merge'; - conflictColumns?: ColumnRef[]; - } - ): Promise { - if (rows.length === 0) return []; - - const requestedChunkSize = opts?.chunkSize ?? 500; - const bindingsPerRow = Object.keys(rows[0] as object).length || 1; - const safeChunkCap = Math.max( - 1, - Math.floor(60000 / Math.max(1, bindingsPerRow)) - ); - const chunkSize = Math.max( - 1, - Math.min(requestedChunkSize, safeChunkCap) - ); - - const timestamps = this.#getTimestamps(); - const beforeHooks = - ((this.#localSchema as any).getExtension?.('beforeInsert') as - | Function[] - | undefined) ?? []; - const afterHooks = - ((this.#localSchema as any).getExtension?.('afterInsert') as - | Function[] - | undefined) ?? []; - - const conflictCols = - opts?.onConflict && opts.conflictColumns - ? opts.conflictColumns.map( - c => - this.#resolveColumn( - c, - 'bulkInsert.onConflict' - ) as string - ) - : null; - if (opts?.onConflict && (!conflictCols || conflictCols.length === 0)) { - throw new Error( - 'bulkInsert: `conflictColumns` is required when `onConflict` is set.' - ); - } - - const results: TResult[] = []; - - for (let i = 0; i < rows.length; i += chunkSize) { - const chunk = rows.slice(i, i + chunkSize); - const mapped: Record[] = []; - for (const row of chunk) { - let processed = { ...(row as Record) }; - for (const hook of beforeHooks) { - processed = (await hook(processed)) ?? processed; - } - const m = this.#mapObjectToColumns(processed); - if (timestamps) { - m[timestamps.createdAt] = this.#knex.fn.now(); - m[timestamps.updatedAt] = this.#knex.fn.now(); - } - mapped.push(m); - } - - let qb: any = this.#knex(this.#tableName).insert(mapped); - - if (conflictCols) { - qb = qb.onConflict(conflictCols); - if (opts!.onConflict === 'ignore') { - qb = qb.ignore(); - } else { - // Merge — update every non-conflict-target column. - const updateCols = new Set(); - for (const m of mapped) { - for (const k of Object.keys(m)) updateCols.add(k); - } - for (const c of conflictCols) updateCols.delete(c); - if (timestamps) { - updateCols.delete(timestamps.createdAt); - updateCols.add(timestamps.updatedAt); - } - qb = qb.merge(Array.from(updateCols)); - } - } - - const inserted: any[] = await qb.returning('*'); - for (const row of inserted) { - const mappedRow = this.#mapRow(row) as TResult; - for (const hook of afterHooks) { - await hook(mappedRow); - } - results.push(mappedRow); - } - } - - return results; - } - - /** - * Bulk-upsert many rows in chunks. Equivalent to - * `.bulkInsert(rows, { onConflict: 'merge', conflictColumns })`. - */ - async bulkUpsert( - rows: InsertType[], - opts: { - conflictColumns: ColumnRef[]; - chunkSize?: number; - } - ): Promise { - return this.bulkInsert(rows, { - chunkSize: opts.chunkSize, - onConflict: 'merge', - conflictColumns: opts.conflictColumns - }); + async update(data: Partial>): Promise { + return updateImpl(this as any, data) as Promise; } - /** - * Bulk-update many rows in a single SQL statement using a CASE - * expression keyed on the entity's primary key. - * - * Each entry's `where` clause must fully match the entity's primary key - * columns (single or composite). Updates that touch different columns - * are coalesced into one statement; rows whose PK appears in `updates` - * but whose `set` does not contain a given column retain their existing - * value. - * - * @returns The number of rows affected. - */ async bulkUpdate( updates: ReadonlyArray<{ where: Partial>; set: Partial>; }> ): Promise { - if (updates.length === 0) return 0; - - const pk = this.#resolvePkColumns(); - const { propToCol } = buildColumnMap(this.#localSchema as any); - const beforeHooks = - ((this.#localSchema as any).getExtension?.('beforeUpdate') as - | Function[] - | undefined) ?? []; - const timestamps = this.#getTimestamps(); - - // Pre-process: run hooks, map keys, validate PK presence in `where`. - const processed: Array<{ - pkValues: unknown[]; - set: Record; - }> = []; - for (const entry of updates) { - let setData = { ...(entry.set as Record) }; - for (const hook of beforeHooks) { - setData = (await hook(setData)) ?? setData; - } - const setMapped = this.#mapObjectToColumns(setData); - if (timestamps) { - setMapped[timestamps.updatedAt] = this.#knex.fn.now(); - } - - const whereRec = entry.where as Record; - const pkValues: unknown[] = []; - for (const propKey of pk.propertyKeys) { - const value = - propKey in whereRec - ? whereRec[propKey] - : (whereRec[propToCol.get(propKey) ?? propKey] as - | unknown - | undefined); - if (value === undefined) { - throw new Error( - `bulkUpdate: each \`where\` clause must include the entity's primary key (missing "${propKey}").` - ); - } - pkValues.push(value); - } - processed.push({ pkValues, set: setMapped }); - } + return bulkUpdateImpl(this as any, updates as any); + } - // Collect every column referenced in any `set`. - const allSetCols = new Set(); - for (const p of processed) { - for (const k of Object.keys(p.set)) allSetCols.add(k); - } - if (allSetCols.size === 0) return 0; - - // Build the SET clause: one CASE per column, keyed by PK. - const knex = this.#knex; - const updateExpr: Record = {}; - for (const col of allSetCols) { - const fragments: string[] = []; - const bindings: unknown[] = []; - for (const p of processed) { - if (!(col in p.set)) continue; - if (pk.columnNames.length === 1) { - fragments.push('WHEN ?? = ? THEN ?'); - bindings.push(pk.columnNames[0], p.pkValues[0], p.set[col]); - } else { - const conditions = pk.columnNames - .map(() => '?? = ?') - .join(' AND '); - fragments.push(`WHEN ${conditions} THEN ?`); - for (let i = 0; i < pk.columnNames.length; i++) { - bindings.push(pk.columnNames[i], p.pkValues[i]); - } - bindings.push(p.set[col]); - } - } - if (fragments.length === 0) continue; - // ELSE keeps the existing column value untouched. - updateExpr[col] = knex.raw( - `CASE ${fragments.join(' ')} ELSE ?? END`, - [...bindings, col] as any - ); - } + // ======================================================================= + // DELETE / SOFT DELETE + // ======================================================================= - // WHERE: restrict to the PK tuples covered by the batch. - let qb: any = knex(this.#tableName).update(updateExpr); - if (pk.columnNames.length === 1) { - qb = qb.whereIn( - pk.columnNames[0], - processed.map(p => p.pkValues[0]) - ); - } else { - qb = qb.where(function (this: Knex.QueryBuilder) { - for (const p of processed) { - this.orWhere(function (this: Knex.QueryBuilder) { - for (let i = 0; i < pk.columnNames.length; i++) { - this.andWhere( - pk.columnNames[i], - p.pkValues[i] as any - ); - } - }); - } - }); - } + async delete(): Promise { + return deleteImpl(this as any); + } - return await qb; + withDeleted(): this { + return withDeletedImpl(this as any); } - /** - * @internal Resolve the entity's primary-key columns, throwing a clear - * error when none is declared. Used by bulk-update / find helpers. - */ - #resolvePkColumns(): { - propertyKeys: readonly string[]; - columnNames: readonly string[]; - } { - const pk = getPrimaryKeyColumns(this.#localSchema as any); - if (pk.columnNames.length === 0) { - throw new Error( - 'No primary key declared on this schema. Use `.primaryKey()` on a column or `.hasPrimaryKey([...])` on the schema.' - ); - } - return pk; + onlyDeleted(): this { + return onlyDeletedImpl(this as any); } - // ======================================================================= - // ESCAPE HATCH - // ======================================================================= + async hardDelete(): Promise { + return hardDeleteImpl(this as any); + } - /** - * Escape hatch: apply any Knex method to the underlying base query. - * - * Use this when you need a Knex feature not exposed by this API (e.g. - * `forUpdate()`, CTEs, `join()`, `union()`). - * - * @param fn - A callback that receives the raw `Knex.QueryBuilder` and - * may mutate it in place. - * @returns `this` for chaining. - * - * @example - * ```ts - * query(db, UserSchema).apply(qb => qb.forUpdate().noWait()); - * ``` - */ - apply(fn: (builder: Knex.QueryBuilder) => void): this { - this.#invalidateCache(); - fn(this.#baseQuery); - return this; + async restore(): Promise { + return restoreImpl(this as any) as Promise; } // ======================================================================= - // Transaction support + // EAGER LOADING (JOIN) // ======================================================================= - /** - * Bind this query builder to a Knex transaction. - * - * Returns a **new** builder that runs all operations — SELECT, INSERT, - * UPDATE, DELETE, and eager-loaded sub-queries — within the given - * transaction. The original builder is left unchanged. - * - * Use this when you already have a transaction obtained from - * `knex.transaction()` and want all operations performed by the returned - * builder to participate in that transaction. - * - * @param trx - The Knex transaction obtained from `knex.transaction()`. - * @returns A new {@link SchemaQueryBuilder} bound to the transaction. - * - * @example - * ```ts - * async function createUser( - * data: InsertType, - * trx: Knex.Transaction - * ) { - * return query(db, UserSchema).transacting(trx).insert(data); - * } - * - * await db.transaction(async trx => { - * const user = await createUser({ name: 'Alice' }, trx); - * await query(db, PostSchema).transacting(trx).insert({ authorId: user.id, title: 'Hello' }); - * }); - * ``` - */ - transacting( - trx: Knex.Transaction - ): SchemaQueryBuilder { - const builder = new SchemaQueryBuilder( - trx as unknown as Knex, - this.#localSchema, - this.#baseQuery.clone().transacting(trx) - ); - for (const spec of this.#specs) { - builder.#specs.push({ - ...spec, - foreignQuery: spec.foreignQuery.clone().transacting(trx) - }); - } - builder.#explicitSelects = this.#explicitSelects - ? [...this.#explicitSelects] - : null; - builder.#selectionMode = this.#selectionMode; - builder.#appliedProjection = this.#appliedProjection; - builder.#includeDeleted = this.#includeDeleted; - builder.#onlyDeleted = this.#onlyDeleted; - builder.#skipDefaultScope = this.#skipDefaultScope; - // Copy polymorphic variant state - builder.#variantConfig = this.#variantConfig; - builder.#enabledVariants = - this.#enabledVariants !== null - ? new Set(this.#enabledVariants) - : null; - builder.#variantWhereFilters = [...this.#variantWhereFilters]; - builder.#variantRelationIncludes = [...this.#variantRelationIncludes]; - return builder; + joinOne< + TForeignSchema extends ObjectSchemaBuilder< + any, + any, + any, + any, + any, + any, + any + >, + TFieldName extends string, + TRequired extends boolean = true + >( + spec: JoinOneSpec + ): SchemaQueryBuilder< + TLocalSchema, + import('./types.js').WithJoinedOne< + TResult, + TFieldName, + TForeignSchema, + TRequired + > + > { + return joinOneImpl(this as any, spec); } - // ======================================================================= - // Include (relation-based eager loading) - // ======================================================================= + joinMany< + TForeignSchema extends ObjectSchemaBuilder< + any, + any, + any, + any, + any, + any, + any + >, + TFieldName extends string + >( + spec: JoinManySpec + ): SchemaQueryBuilder< + TLocalSchema, + import('./types.js').WithJoinedMany + > { + return joinManyImpl(this as any, spec); + } - /** - * Eager-load a named relation defined via `.hasMany()`, `.belongsTo()`, - * `.hasOne()`, or `.belongsToMany()` on the schema. - * - * @param relationName - The relation name passed to the schema's relation method. - * @param customize - Optional callback to customise the foreign query - * (e.g. add ordering, limits). - * @returns `this` for chaining. - * - * @example - * ```ts - * const posts = await query(db, PostWithRelations) - * .include('author') - * .include('tags'); - * ``` - */ include( relationName: string, customize?: (q: SchemaQueryBuilder) => void ): this { - this.#invalidateCache(); - const relations: RelationSpec[] = - (this.#localSchema as any).getExtension?.('relations') ?? []; - const relation = relations.find( - (r: RelationSpec) => r.name === relationName - ); - if (!relation) { - // Try variant relations as fallback (auto-routing) - const variantConfig = this.#getVariantConfig(); - if (variantConfig) { - const matches: Array<{ variantKey: string }> = []; - for (const [vKey, vSpec] of Object.entries( - variantConfig.variants - )) { - if ( - vSpec.relations.some( - (r: ResolvedVariantRelationSpec) => - r.name === relationName - ) - ) { - matches.push({ variantKey: vKey }); - } - } - if (matches.length === 1) { - return this.includeVariant( - matches[0].variantKey, - relationName, - customize - ); - } - if (matches.length > 1) { - throw new Error( - `Ambiguous relation "${relationName}" — found on variants: ${matches.map(m => m.variantKey).join(', ')}. Use .includeVariant(key, name) to be explicit.` - ); - } - } - throw new Error( - `Unknown relation "${relationName}" on schema for table "${this.#tableName}"` - ); - } - - const foreignSchema = this.#resolveSchema(relation.schema); - const foreignTableName = getTableName(foreignSchema); - - switch (relation.type) { - case 'belongsTo': { - const localColumn = resolveColumnRef( - relation.foreignKey, - this.#localSchema, - 'foreignKey' - ); - const foreignColumn = this.#findPrimaryKeyColumn(foreignSchema); - - const foreignQuery1: Knex.QueryBuilder = - this.#knex(foreignTableName); - if (customize) { - const proxy = new SchemaQueryBuilder( - this.#knex, - foreignSchema, - foreignQuery1 - ); - customize(proxy); - } - - this.joinOne({ - foreignSchema, - localColumn, - foreignColumn, - as: relationName, - foreignQuery: foreignQuery1 - } as any); - break; - } - case 'hasOne': { - const localColumn = this.#findPrimaryKeyColumn( - this.#localSchema - ); - const foreignColumn = resolveColumnRef( - relation.foreignKey, - foreignSchema, - 'foreignKey' - ); - - const foreignQuery2: Knex.QueryBuilder = - this.#knex(foreignTableName); - if (customize) { - const proxy = new SchemaQueryBuilder( - this.#knex, - foreignSchema, - foreignQuery2 - ); - customize(proxy); - } - - this.joinOne({ - foreignSchema, - localColumn, - foreignColumn, - as: relationName, - required: false, - foreignQuery: foreignQuery2 - } as any); - break; - } - case 'hasMany': { - const localColumn = this.#findPrimaryKeyColumn( - this.#localSchema - ); - const foreignColumn = resolveColumnRef( - relation.foreignKey, - foreignSchema, - 'foreignKey' - ); - - const foreignQuery3: Knex.QueryBuilder = - this.#knex(foreignTableName); - if (customize) { - const proxy = new SchemaQueryBuilder( - this.#knex, - foreignSchema, - foreignQuery3 - ); - customize(proxy); - } - - this.joinMany({ - foreignSchema, - localColumn, - foreignColumn, - as: relationName, - foreignQuery: foreignQuery3 - } as any); - break; - } - case 'belongsToMany': { - const through = relation.through!; - const localColumn = this.#findPrimaryKeyColumn( - this.#localSchema - ); - - const foreignQuery = this.#knex(foreignTableName) - .join( - through.table, - `${through.table}.${through.foreignKey}`, - `${foreignTableName}.${this.#findPrimaryKeyColumn(foreignSchema)}` - ) - .select( - `${foreignTableName}.*`, - `${through.table}.${through.localKey}` - ); - - if (customize) { - const proxy = new SchemaQueryBuilder( - this.#knex, - foreignSchema, - foreignQuery - ); - customize(proxy); - } - - this.joinMany({ - foreignSchema, - localColumn, - foreignColumn: through.localKey, - as: relationName, - foreignQuery - } as any); - break; - } - } - - return this; + return includeImpl(this as any, relationName, customize); } - /** - * Eager-load a named relation declared inside a `withVariants` variant spec. - * - * The relation is loaded via a LEFT JOIN on the variant's alias table and - * only populated on rows whose discriminator matches `variantKey`. - * - * @param variantKey - The variant key (e.g. `'assigned'`). - * @param relationName - The relation name declared in `variants[key].relations`. - * @param customize - Optional callback to restrict which columns are - * selected from the foreign table (scope / projection). - * - * @example - * ```ts - * await query(db, TodoActivity) - * .includeVariant('assigned', 'assignee', q => q.projected('summary')); - * ``` - */ includeVariant( variantKey: string, relationName: string, customize?: (q: SchemaQueryBuilder) => void ): this { - this.#invalidateCache(); - const variantConfig = this.#getVariantConfig(); - if (!variantConfig) { - throw new Error( - `includeVariant: schema for table "${this.#tableName}" is not polymorphic (no .withVariants() config found)` - ); - } - const variantSpec = variantConfig.variants[variantKey]; - if (!variantSpec) { - throw new Error( - `includeVariant: unknown variant key "${variantKey}" on schema for table "${this.#tableName}"` - ); - } - const relSpec = variantSpec.relations.find( - (r: ResolvedVariantRelationSpec) => r.name === relationName - ); - if (!relSpec) { - throw new Error( - `includeVariant: unknown relation "${relationName}" on variant "${variantKey}" of table "${this.#tableName}"` - ); - } - this.#variantRelationIncludes.push({ + return includeVariantImpl( + this as any, variantKey, relationName, customize - }); - return this; - } - - // ======================================================================= - // Scopes - // ======================================================================= - - /** - * Apply a named scope defined on the schema via `.scope(name, fn)`. - * - * The `name` parameter is constrained to the literal scope names registered - * on the schema, so IDEs show only valid completions and typos are caught - * at compile time. - * - * @param name - The scope name. - * @returns `this` for chaining. - * - * @example - * ```ts - * await query(db, Post).scoped('published').scoped('recent'); - * ``` - */ - /** - * Apply a **named projection** defined on the schema via - * `.projection(name, columns)`. - * - * Calling `.projected()` on the query builder does two things: - * 1. Restricts the SQL `SELECT` clause to the columns registered under - * `name` (SQL column names are resolved via `.hasColumnName()`). - * 2. Narrows the TypeScript result row type to `Pick` so - * accessing columns outside the projection is a compile-time error. - * - * The `name` parameter is constrained to the literal projection names - * registered on the schema — TypeScript will report an error for any - * unregistered name. - * - * Calling `.projected()` after `.select()`, any aggregate method - * (`.count()`, `.min()`, etc.), or a second `.projected()` call throws - * at runtime with a clear error message. - * - * @param name - The projection name. - * @returns A new builder whose result type is `Pick`. - * - * @example - * ```ts - * const PostSchema = object({ id: number(), title: string(), body: string() }) - * .hasTableName('posts') - * .projection('summary', 'id', 'title'); - * - * const rows = await query(db, PostSchema) - * .scoped('published') - * .projected('summary'); - * // rows: Array> - * // rows[0].body // ← TS error: not in projection - * ``` - * - * @see {@link ddlExtension} `.projection()` for schema-side definition. - */ - projected & string>( - name: K - ): SchemaQueryBuilder< - TLocalSchema, - Pick & keyof TResult> - > { - this.#assertNotExplicitSelect('projected'); - if (this.#selectionMode === 'projection') { - throw new Error( - `Cannot call .projected('${name}') — .projected('${ - this.#appliedProjection - }') was already applied. Only one projection per query.` - ); - } - const projections = getProjections(this.#localSchema as any); - const projection = projections[name]; - if (!projection) { - throw new Error( - `Unknown projection "${name}" on schema for table "${ - this.#tableName - }"` - ); - } - // Translate property keys → SQL column names - const { propToCol } = buildColumnMap(this.#localSchema as any); - const sqlCols = projection.keys.map(key => propToCol.get(key) ?? key); - this.#baseQuery.select(...sqlCols); - this.#explicitSelects ??= []; - for (const col of sqlCols) { - this.#explicitSelects.push(col); - } - this.#selectionMode = 'projection'; - this.#appliedProjection = name; - this.#invalidateCache(); - return this as any; - } - - scoped>(name: K): this { - this.#invalidateCache(); - const scopes = (this.#localSchema as any).getExtension?.('scopes') as - | Record - | undefined; - const scopeFn = scopes?.[name]; - if (!scopeFn) { - throw new Error( - `Unknown scope "${name}" on schema for table "${this.#tableName}"` - ); - } - scopeFn(this); - return this; - } - - /** - * Bypass the default scope (and soft-delete scope) for this query. - * - * @returns `this` for chaining. - * - * @example - * ```ts - * await query(db, Post).unscoped().where(t => t.id, 1); - * ``` - */ - unscoped(): this { - this.#invalidateCache(); - this.#skipDefaultScope = true; - this.#includeDeleted = true; - return this; + ); } // ======================================================================= - // Polymorphic variant methods + // POLYMORPHIC VARIANTS // ======================================================================= - /** - * Add a WHERE condition that applies **only to rows matching a specific - * variant** of a polymorphic schema. Rows for other variants pass through - * unaffected (the condition is ORed away for non-matching discriminator values). - * - * The column name is resolved against the **variant's** schema properties. - * - * Only valid on a polymorphic schema (created via `.withVariants()`). - * - * @param key - The discriminator value identifying the variant (e.g. `'image'`). - * @param column - Property key on the variant's schema (e.g. `'width'`). - * @param operator - SQL comparison operator (`'='`, `'>'`, `'<'`, `'like'`, etc.). - * @param value - The value to compare against. - * @returns `this` for chaining. - * - * @example - * ```ts - * // Return all documents and only images wider than 1024 px - * const files = await query(db, FileSchema) - * .whereVariant('image', 'width', '>', 1024); - * ``` - */ whereVariant( key: string, column: string, operator: string, value: any ): this { - const variantConfig = this.#getVariantConfig(); + const state = getState(this); + const variantConfig = getVariantConfig(this); if (!variantConfig) { throw new Error( 'whereVariant() can only be used on a polymorphic schema (created with .withVariants())' @@ -2924,14 +633,13 @@ export class SchemaQueryBuilder< } const op = operator.toLowerCase(); - if (!SchemaQueryBuilder.#ALLOWED_OPS.has(op)) { + if (!ALLOWED_OPS.has(op)) { throw new Error( `whereVariant: operator "${operator}" is not allowed. ` + - `Allowed operators: ${[...SchemaQueryBuilder.#ALLOWED_OPS].join(', ')}` + `Allowed operators: ${[...ALLOWED_OPS].join(', ')}` ); } - // Resolve property key → SQL column name via variant schema column map const { propToCol } = buildColumnMap(spec.schema); const colName = propToCol.get(column) ?? column; @@ -2939,768 +647,117 @@ export class SchemaQueryBuilder< if (spec.storage === 'cti') { qualifiedColumn = `__v_${key}.${colName}`; } else { - // STI: column is on the base table - qualifiedColumn = `${this.#tableName}.${colName}`; + qualifiedColumn = `${state.tableName}.${colName}`; } - this.#variantWhereFilters.push({ key, qualifiedColumn, op, value }); - this.#invalidateCache(); + state.variantWhereFilters.push({ key, qualifiedColumn, op, value }); + invalidateCache(this); return this; } - /** - * Restrict the query to only return rows for the specified variant keys. - * - * Adds `WHERE IN (...)` to the query and skips the LEFT - * JOINs for excluded variants. This is more efficient than filtering after - * loading all variants. - * - * Only valid on a polymorphic schema (created via `.withVariants()`). - * - * @param keys - Discriminator values to include (e.g. `['image', 'document']`). - * @returns `this` for chaining. - * - * @example - * ```ts - * const images = await query(db, FileSchema).selectVariants(['image']); - * // images: Array<{ id; name; type: 'image'; width; height; format }> - * ``` - */ selectVariants(keys: string[]): this { - if (!this.#getVariantConfig()) { + const state = getState(this); + if (!getVariantConfig(this)) { throw new Error( 'selectVariants() can only be used on a polymorphic schema (created with .withVariants())' ); } - this.#enabledVariants = new Set(keys); - this.#invalidateCache(); - return this; - } - - // ======================================================================= - // Soft delete methods - // ======================================================================= - - /** - * Include soft-deleted rows in the results. - * - * By default, schemas with `.softDelete()` automatically filter out - * rows where `deleted_at IS NOT NULL`. Call `.withDeleted()` to - * include them. - * - * @returns `this` for chaining. - */ - withDeleted(): this { - this.#invalidateCache(); - this.#includeDeleted = true; + state.enabledVariants = new Set(keys); + invalidateCache(this); return this; } - /** - * Return only soft-deleted rows (`WHERE deleted_at IS NOT NULL`). - * - * @returns `this` for chaining. - */ - onlyDeleted(): this { - this.#invalidateCache(); - this.#onlyDeleted = true; - this.#includeDeleted = true; - return this; - } - - /** - * Permanently delete rows matching the current WHERE clause, bypassing - * the soft-delete mechanism. - * - * @returns The number of rows deleted. - */ - async hardDelete(): Promise { - // Run beforeDelete hooks - const hooks = - ((this.#localSchema as any).getExtension?.('beforeDelete') as - | Function[] - | undefined) ?? []; - for (const hook of hooks) { - await hook(this); - } - return this.#baseQuery.delete(); - } - - /** - * Restore soft-deleted rows by setting `deleted_at = NULL`. - * - * @returns The restored rows. - */ - async restore(): Promise { - const softDelete = this.#getSoftDelete(); - if (!softDelete) { - throw new Error( - 'Schema does not have soft delete enabled. Use .softDelete() on the schema.' - ); - } - const rows = await this.#baseQuery - .update({ [softDelete.column]: null }) - .returning('*'); - return rows.map((row: any) => this.#mapRow(row) as TResult); - } - - // ======================================================================= - // Pagination // ======================================================================= - - /** - * Execute an offset-based paginated query. - * - * Runs a count query and a data query in parallel. Returns the page data - * along with pagination metadata. - * - * @param opts - `{ page, pageSize }` — 1-based page number and page size. - * @returns A {@link PaginationResult} with data, total count, and page info. - * - * @example - * ```ts - * const page = await query(db, Post) - * .where(t => t.status, 'published') - * .paginate({ page: 2, pageSize: 20 }); - * // page.data, page.total, page.totalPages, page.hasNextPage, ... - * ``` - */ - async paginate(opts: { - page: number; - pageSize: number; - }): Promise> { - const { page, pageSize } = opts; - - const effectiveBase = this.#getEffectiveBaseQuery(); - - // Count query (no CTE, no ordering, no eager loading) - const countResult = await effectiveBase - .clone() - .clearSelect() - .clearOrder() - .count('* as count') - .first(); - const total = Number((countResult as any)?.count ?? 0); - - // Data query (with CTE for eager loading) - this.limit(pageSize).offset((page - 1) * pageSize); - const data = await this.execute(); - - const totalPages = Math.ceil(total / pageSize); - - return { - data, - total, - page, - pageSize, - totalPages, - hasNextPage: page < totalPages, - hasPreviousPage: page > 1 - }; - } - - /** - * Execute a cursor-based (keyset) paginated query. - * - * More efficient than offset pagination for large datasets. Fetches one - * extra row to determine whether more data exists. - * - * @param opts - `{ cursor, limit, column?, direction? }`. - * @returns A {@link CursorPaginationResult} with data, next cursor, and - * `hasMore` flag. - * - * @example - * ```ts - * const page = await query(db, Post) - * .orderBy(t => t.createdAt, 'desc') - * .paginateAfter({ cursor: lastCreatedAt, limit: 20 }); - * ``` - */ - async paginateAfter(opts: { - cursor?: any; - limit: number; - column?: ColumnRef; - direction?: 'asc' | 'desc'; - }): Promise> { - const direction = opts.direction ?? 'desc'; - const column = opts.column ?? ('id' as any); - - if (opts.cursor != null) { - const op = direction === 'desc' ? '<' : '>'; - this.where(column, op, opts.cursor); - } - - this.orderBy(column, direction).limit(opts.limit + 1); - const rows = await this.execute(); - - const hasMore = rows.length > opts.limit; - const data = hasMore ? rows.slice(0, opts.limit) : rows; - - const propKey = - typeof column === 'string' - ? column - : resolvePropertyKey( - column as any, - this.#localSchema, - 'cursor' - ); - - const nextCursor = - hasMore && data.length > 0 - ? String((data[data.length - 1] as any)[propKey]) - : null; - - return { data, nextCursor, hasMore }; - } - - // ======================================================================= - // Select Raw + // ESCAPE HATCH // ======================================================================= - /** - * Add a raw SQL expression to the SELECT clause. - * - * @param sql - Raw SQL (e.g. `'*, ts_rank(vector, query) AS rank'`). - * @param bindings - Optional parameter bindings. - * @returns `this` for chaining. - */ - selectRaw(sql: string, bindings?: any[]): this { - this.#invalidateCache(); - if (bindings) { - this.#baseQuery.select(this.#knex.raw(sql, bindings)); - } else { - this.#baseQuery.select(this.#knex.raw(sql)); - } + apply(fn: (builder: Knex.QueryBuilder) => void): this { + const state = getState(this); + invalidateCache(this); + fn(state.baseQuery); return this; } // ======================================================================= - // CTE-based eager loading query building (from knex-eager) + // TRANSACTION // ======================================================================= - #buildQuery(): Knex.QueryBuilder { - const effectiveBase = this.#getEffectiveBaseQuery(); - - // Apply polymorphic variant joins (CTI LEFT JOINs + selectVariants / - // whereVariant filters) before any CTE wrapping so the CTE also - // contains variant columns. - const variantConfig = this.#getVariantConfig(); - const queryBase = variantConfig - ? this.#applyVariantJoins(effectiveBase, variantConfig) - : effectiveBase; - - if (this.#specs.length === 0) { - return queryBase; - } - - const knex = this.#knex; - const specs = this.#specs; - - // Collect all localColumns needed for CTE joins - const requiredLocalColumns = [ - ...new Set(specs.map(s => s.localColumn)) - ]; - - // If the caller used .select(...), some localColumns may have been - // omitted. Clone the base query and ensure those columns are always - // included in the CTE so the join conditions work at runtime. - // Track which columns we added so they can be excluded from the - // final SELECT (preserving the original column set the caller asked for). - let cteQuery = queryBase; - let extraColumns: string[] = []; - - if (this.#explicitSelects !== null) { - const selectedSet = new Set(this.#explicitSelects); - extraColumns = requiredLocalColumns.filter( - col => !selectedSet.has(col) - ); - if (extraColumns.length > 0) { - cteQuery = queryBase.clone(); - for (const col of extraColumns) { - cteQuery.column(col); - } - } - } - - // Build the outer query that wraps the CTE. - // When we added extra columns, select only the original columns + joined - // aliases (instead of originalQuery.*) so the caller's column set is - // preserved in the final result. - const resultQuery = knex.queryBuilder().with('originalQuery', cteQuery); - - if (extraColumns.length > 0 && this.#explicitSelects !== null) { - // Explicit column list: original user selections only - for (const col of this.#explicitSelects) { - resultQuery.select( - knex.raw(':originalQuery:.:col: as :col:', { - originalQuery: 'originalQuery', - col - }) - ); - } - } else { - resultQuery.select('originalQuery.*'); - } - - resultQuery.from( - knex.raw(':originalQuery:', { - originalQuery: 'originalQuery' - }) - ); - - for (let i = 0; i < specs.length; i++) { - const spec = specs[i]; - const relationAlias = `eagerRelation${i}`; - - if (spec.type === 'one') { - this.#buildJoinOne(resultQuery, spec, relationAlias); - } else { - this.#buildJoinMany(resultQuery, spec, relationAlias, i); - } - } - - return resultQuery; - } - - #buildJoinOne( - resultQuery: Knex.QueryBuilder, - spec: ValidatedSpec & { type: 'one' }, - relationAlias: string - ): void { - const knex = this.#knex; - const foreignTable = spec.foreignQuery; - const foreignTableName = (foreignTable as any)._single?.table; - - if (!foreignTableName) { - throw new Error( - `Could not determine table name from foreignQuery for "${spec.as}". ` + - 'Make sure foreignQuery is created via knex("tableName").' - ); - } - - resultQuery.select( - knex.raw(':relationAlias:.:as:->0 as :as:', { - relationAlias, - as: spec.as - }) + transacting( + trx: Knex.Transaction + ): SchemaQueryBuilder { + const state = getState(this); + const builder = new SchemaQueryBuilder( + trx as unknown as Knex, + state.localSchema as TLocalSchema, + state.baseQuery.clone().transacting(trx) ); - - const subquery = knex - .from(foreignTable.as(foreignTableName)) - .select( - knex.raw(':foreignTable:.:foreignColumn:', { - foreignTable: foreignTableName, - foreignColumn: spec.foreignColumn - }) - ) - .select( - knex.raw('jsonb_agg(:foreignTable:) as :as:', { - foreignTable: foreignTableName, - as: spec.as - }) - ) - .groupByRaw(':foreignTable:.:foreignColumn:', { - foreignTable: foreignTableName, - foreignColumn: spec.foreignColumn - }) - .as(relationAlias); - - const joinMethod = spec.required ? 'join' : 'leftJoin'; - resultQuery[joinMethod](subquery, function () { - this.on( - knex.raw( - ':relationAlias:.:foreignColumn: = :originalQuery:.:localColumn:', - { - originalQuery: 'originalQuery', - relationAlias, - foreignColumn: spec.foreignColumn, - localColumn: spec.localColumn - } - ) - ); - }); - } - - #buildJoinMany( - resultQuery: Knex.QueryBuilder, - spec: ValidatedSpec & { type: 'many' }, - relationAlias: string, - i: number - ): void { - const knex = this.#knex; - const filterName = `withFilter${i}`; - - const hasLimitOffset = - (spec.limit !== null && spec.limit > 0) || - (spec.offset !== null && spec.offset > 0); - - const orderByColumn = spec.orderBy - ? spec.orderBy.column - : spec.foreignColumn; - const orderByDirection = spec.orderBy ? spec.orderBy.direction : 'asc'; - - if (hasLimitOffset) { - resultQuery.with( - filterName, - knex - .from( - spec.foreignQuery - .clone() - .whereIn( - spec.foreignColumn, - knex - .from( - knex.raw(':originalQuery:', { - originalQuery: 'originalQuery' - }) - ) - .distinct(spec.localColumn) - ) - .as(`__wf_inner_${i}`) - ) - .select(`__wf_inner_${i}.*`) - .select( - knex.raw( - `row_number() over(partition by :foreignColumn: order by :orderByColumn: ${orderByDirection}) as "__rn__"`, - { - foreignColumn: spec.foreignColumn, - orderByColumn - } - ) - ) - ); - } else { - resultQuery.with( - filterName, - spec.foreignQuery.clone().whereIn( - spec.foreignColumn, - knex - .from( - knex.raw(':originalQuery:', { - originalQuery: 'originalQuery' - }) - ) - .distinct(spec.localColumn) - ) - ); - } - - const aggSubquery = knex.from(filterName); - - if (hasLimitOffset) { - const hasLimit = spec.limit !== null && spec.limit > 0; - const hasOffset = spec.offset !== null && spec.offset > 0; - const effectiveOffset = spec.offset ?? 0; - const effectiveLimit = effectiveOffset + (spec.limit ?? 0); - - const condition = - hasLimit && hasOffset - ? '"__rn__" > :offset and "__rn__" <= :limit' - : hasLimit - ? '"__rn__" <= :limit' - : '"__rn__" > :offset'; - - aggSubquery.whereRaw(condition, { - limit: effectiveLimit, - offset: effectiveOffset + const builderState = getState(builder); + for (const spec of state.specs) { + builderState.specs.push({ + ...spec, + foreignQuery: spec.foreignQuery.clone().transacting(trx) }); - - aggSubquery.select( - knex.raw(':foreignColumn:', { - foreignColumn: spec.foreignColumn - }) - ); - aggSubquery.select( - knex.raw( - "coalesce(jsonb_agg(to_jsonb(:filterName:) - '__rn__' order by \"__rn__\"), '[]'::jsonb) as :as:", - { filterName, as: spec.as } - ) - ); - } else { - aggSubquery.select( - knex.raw(':foreignColumn:', { - foreignColumn: spec.foreignColumn - }) - ); - - const orderClause = spec.orderBy - ? `jsonb_agg(:filterName: order by :filterName:.:orderByColumn: ${orderByDirection})` - : 'jsonb_agg(:filterName:)'; - - aggSubquery.select( - knex.raw( - `coalesce(${orderClause}, '[]'::jsonb) as :as:`, - spec.orderBy - ? { filterName, orderByColumn, as: spec.as } - : { filterName, as: spec.as } - ) - ); } - - aggSubquery.groupByRaw(':foreignColumn:', { - foreignColumn: spec.foreignColumn - }); - - const subquery = aggSubquery.as(relationAlias); - - resultQuery.select( - knex.raw("coalesce(:relationAlias:.:as:, '[]'::jsonb) as :as:", { - relationAlias, - as: spec.as - }) - ); - - resultQuery.leftJoin(subquery, function () { - this.on( - knex.raw( - ':relationAlias:.:foreignColumn: = :originalQuery:.:localColumn:', - { - relationAlias, - foreignColumn: spec.foreignColumn, - originalQuery: 'originalQuery', - localColumn: spec.localColumn - } - ) - ); - }); - } - - // ======================================================================= - // Result mapping - // ======================================================================= - - /** - * Map a SQL result row (column names) back to schema property names. - * Also handles joined fields (which are already named by `as`). - * Delegates to `#mapPolymorphicRow` for polymorphic schemas. - */ - #mapRow(row: Record): Record { - if (!row) return row; - - const variantConfig = this.#getVariantConfig(); - if (variantConfig) { - return this.#mapPolymorphicRow(row, variantConfig); - } - - const { colToProp } = buildColumnMap(this.#localSchema); - const result: Record = {}; - - // Map known columns back to property names - for (const [colName, value] of Object.entries(row)) { - const propName = colToProp.get(colName); - if (propName) { - result[propName] = value; - } else { - // Unknown column (e.g., joined field, raw expression) — pass through - result[colName] = value; - } - } - - return result; - } - - /** - * Clean a row that has eager-loaded relations (apply mappers, then map columns). - */ - #cleanAndMapRow(row: Record): Record { - const oneSpecs = this.#specs.filter( - (s): s is ValidatedSpec & { type: 'one' } => s.type === 'one' - ); - const manySpecs = this.#specs.filter( - (s): s is ValidatedSpec & { type: 'many' } => s.type === 'many' - ); - const cleaned = clearRow(row, oneSpecs, manySpecs); - return this.#mapRow(cleaned); - } - - // ======================================================================= - // Column mapping helpers - // ======================================================================= - - /** - * Map a schema-shaped object (property keys) to a SQL object (column names). - * Used for INSERT / UPDATE. - */ - #mapObjectToColumns(obj: Record): Record { - const { propToCol } = buildColumnMap(this.#localSchema); - const result: Record = {}; - - for (const [key, value] of Object.entries(obj)) { - const colName = propToCol.get(key); - if (colName) { - result[colName] = value; - } else { - // Unknown key — pass through (could be a raw column) - result[key] = value; - } - } - - return result; - } - - /** - * Map a Record to Record. - * Used for `.where({ name: 'John' })` style calls. - */ - #mapRecordToColumns(record: Record): Record { - return this.#mapObjectToColumns(record); - } - - /** - * Resolve a column argument that could be a ColumnRef, Knex.Raw, or callback. - * Returns the resolved string or passes through Knex.Raw. - */ - #resolveColumnArg(col: any): string | Knex.Raw { - if (typeof col === 'string') { - return this.#resolveColumn(col, 'column'); - } - if (typeof col === 'function') { - // Property descriptor accessor - return this.#resolveColumn(col, 'column'); - } - // Knex.Raw — pass through - return col; - } - - /** - * Detect if a function is a property descriptor accessor (takes tree, returns descriptor) - * vs a knex sub-builder callback (takes builder, returns void). - * - * Heuristic: property descriptor accessors are arrow functions that access - * tree properties. We cannot distinguish at runtime, so we try the accessor - * and fallback to callback if it fails. - * - * For safety, we check if the function parameter count can help: - * - Knex callbacks typically have 1 parameter named `builder` or `qb` - * - Property accessors typically have 1 parameter named `t` or similar - * - * Since both are `(arg) => result`, we use a try/catch approach: - * attempt to resolve as column accessor first. - */ - #isColumnAccessor(fn: Function): boolean { - // Try to invoke the accessor with the property descriptor tree - try { - const tree = ObjectSchemaBuilder.getPropertiesFor( - this.#localSchema as any - ); - const result = fn(tree); - // If it returns a valid property descriptor, it's an accessor - if ( - result && - typeof result === 'object' && - SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR in result - ) { - return true; - } - } catch { - // Not an accessor - } - return false; + builderState.explicitSelects = state.explicitSelects + ? [...state.explicitSelects] + : null; + builderState.selectionMode = state.selectionMode; + builderState.appliedProjection = state.appliedProjection; + builderState.includeDeleted = state.includeDeleted; + builderState.onlyDeleted = state.onlyDeleted; + builderState.skipDefaultScope = state.skipDefaultScope; + builderState.variantConfig = state.variantConfig; + builderState.enabledVariants = + state.enabledVariants !== null + ? new Set(state.enabledVariants) + : null; + builderState.variantWhereFilters = [...state.variantWhereFilters]; + builderState.variantRelationIncludes = [ + ...state.variantRelationIncludes + ]; + return builder; } // ======================================================================= - // Execution + // EXECUTION // ======================================================================= - /** - * Return the raw SQL string that would be executed, for debugging. - * Does not execute the query against the database. - */ toQuery(): string { - return this.#getQuery().toQuery(); + return getQuery(this).toQuery(); } - /** - * Returns the underlying Knex query builder. Useful when passing this - * query as a `foreignQuery` in `.joinOne()` / `.joinMany()`, or any context - * that expects a raw `Knex.QueryBuilder`. - */ toKnexQuery(): Knex.QueryBuilder { - return this.#getQuery(); + return getQuery(this); } - /** - * Alias for {@link toQuery} — returns the raw SQL string. - */ toString(): string { - return this.#getQuery().toString(); - } - - /** - * Execute the query and return all matching rows, mapped back to schema - * property names. - * - * @returns A promise that resolves to an array of result objects typed as - * `TResult[]`. - * - * @example - * ```ts - * const users = await query(db, UserSchema).execute(); - * ``` - */ + return getQuery(this).toString(); + } + async execute(): Promise { - const query = this.#getQuery(); - const rows = await query; - - if (!rows) return []; - if (!Array.isArray(rows)) - return [this.#cleanAndMapRow(rows)] as TResult[]; - - return rows.map((row: any) => this.#cleanAndMapRow(row)) as TResult[]; - } - - /** - * Execute the query and return only the first row, or `undefined` if no - * rows match. - * - * @example - * ```ts - * const user = await query(db, UserSchema).where(t => t.id, id).first(); - * if (user) { /* ... *\/ } - * ``` - */ + return executeImpl(this) as Promise; + } + async first(): Promise { - const query = this.#getQuery().first(); + const query = getQuery(this).first(); const row = await query; if (!row) return undefined; - return this.#cleanAndMapRow(row) as TResult; - } - - /** - * Execute the query and return an array of values for a single column. - * - * @param column - Column reference (property accessor or string key) for - * the column whose values should be returned. - * @returns A promise resolving to an array of values for that column. - * - * @example - * ```ts - * const names = await query(db, UserSchema).pluck(t => t.name); - * // names: string[] - * ``` - */ + return cleanAndMapRow(this, row) as TResult; + } + async pluck( column: ColumnRef ): Promise { - const col = this.#resolveColumn(column, 'pluck') as string; - const rows = await this.#buildQuery().select(col); + const _state = getState(this); + const col = resolveColumn(this, column, 'pluck') as string; + const rows = await buildQuery(this).select(col); return rows.map( (row: any) => row[col] ?? row[column as string] ) as TResult[K][]; } - /** - * Thenable implementation — allows the builder to be awaited directly - * without calling {@link execute} explicitly. - * - * @example - * ```ts - * const users = await query(db, UserSchema).where(t => t.name, 'Alice'); - * // Equivalent to: await query(db, UserSchema).where(...).execute() - * ``` - */ - // biome-ignore lint/suspicious/noThenProperty: intentional thenable for `await builder` support + // biome-ignore lint/suspicious/noThenProperty: intentional thenable then( onfulfilled?: | ((value: TResult[]) => TReturn1 | PromiseLike) @@ -3711,33 +768,13 @@ export class SchemaQueryBuilder< } } +// Register the constructor for circular-dependency-safe access +registerSchemaQueryBuilder(SchemaQueryBuilder); + // --------------------------------------------------------------------------- // query() — main entry point // --------------------------------------------------------------------------- -/** - * Create a typed {@link SchemaQueryBuilder} for the table described by `schema`. - * - * The schema must have a table name configured via `.hasTableName()`. - * Column name mappings set via `.hasColumnName()` are applied automatically - * to all query methods. The returned builder is thenable — you can `await` it - * directly to execute the query and get `TResult[]`. - * - * @param knex - A configured Knex instance. - * @param schema - The `ObjectSchemaBuilder` describing the table. - * @returns A new {@link SchemaQueryBuilder} ready for chaining. - * - * @example - * ```ts - * import knex from 'knex'; - * import { query, object, string, number } from '@cleverbrush/knex-schema'; - * - * const UserSchema = object({ id: number(), name: string() }).hasTableName('users'); - * const db = knex({ client: 'pg', connection: process.env.DB_URL }); - * - * const users = await query(db, UserSchema).where(t => t.name, 'like', 'A%'); - * ``` - */ export function query< TLocalSchema extends ObjectSchemaBuilder >( @@ -3745,24 +782,6 @@ export function query< schema: TLocalSchema ): SchemaQueryBuilder>; -/** - * Create a typed {@link SchemaQueryBuilder} from an existing Knex query builder. - * - * Use this overload when you need to supply a pre-configured base query — - * for example one that already has a sub-query, CTE, or a schema scope applied. - * - * @param knex - A configured Knex instance. - * @param schema - The `ObjectSchemaBuilder` describing the table. - * @param baseQuery - An existing `Knex.QueryBuilder` to use as the base. - * @returns A new {@link SchemaQueryBuilder} wrapping `baseQuery`. - * - * @example - * ```ts - * // Use a scoped base query (e.g. soft-delete filter applied globally) - * const base = db('users').where('deleted_at', null); - * const activeUsers = await query(db, UserSchema, base).where(t => t.age, '>', 18); - * ``` - */ export function query< TLocalSchema extends ObjectSchemaBuilder >( @@ -3789,7 +808,6 @@ export function query< // createQuery() — knex-bound factory // --------------------------------------------------------------------------- -/** Bound query function returned by {@link createQuery}. */ export interface BoundQuery { < TLocalSchema extends ObjectSchemaBuilder< @@ -3818,74 +836,10 @@ export interface BoundQuery { schema: TLocalSchema, baseQuery: Knex.QueryBuilder ): SchemaQueryBuilder>; - /** - * Return a version of this bound factory whose queries all run within the - * given Knex transaction. Equivalent to calling `.transacting(trx)` on - * each individual builder, but more convenient when every query in a block - * must share the same transaction. - * - * @example - * ```ts - * const db = createQuery(knex); - * - * await knex.transaction(async trx => { - * const dbTrx = db.withTransaction(trx); - * const user = await dbTrx(UserSchema).insert({ name: 'Alice' }); - * await dbTrx(PostSchema).insert({ authorId: user.id, title: 'Hello' }); - * }); - * ``` - */ withTransaction(trx: Knex.Transaction): BoundQuery; - /** - * Start a Knex transaction and run `callback` inside it, passing a - * transaction-bound `BoundQuery` factory as the argument. The transaction - * is committed when the callback resolves and rolled back if it rejects. - * - * This is the callback-style counterpart to {@link withTransaction} — you - * don't need to obtain a `Knex.Transaction` object yourself. - * - * @param callback - An async function that receives a transaction-bound - * `BoundQuery` and returns a value. The returned value is forwarded as - * the resolved value of the outer `Promise`. - * @returns A `Promise` that resolves with the value returned by `callback`. - * - * @example - * ```ts - * const db = createQuery(knex); - * - * const user = await db.transaction(async dbTrx => { - * const newUser = await dbTrx(UserSchema).insert({ name: 'Alice' }); - * await dbTrx(PostSchema).insert({ authorId: newUser.id, title: 'Hello' }); - * return newUser; - * }); - * ``` - */ transaction(callback: (db: BoundQuery) => Promise): Promise; } -/** - * Bind a Knex instance once and get back a `query(schema)` function that - * doesn't require repeating the knex argument on every call. - * - * @param knex - A configured Knex instance. - * @returns A bound query factory: `(schema, baseQuery?) => SchemaQueryBuilder`. - * - * @example - * ```ts - * import Knex from 'knex'; - * import { createQuery } from '@cleverbrush/knex-schema'; - * - * const knex = Knex({ client: 'pg', connection: process.env.DB_URL }); - * const query = createQuery(knex); - * - * // No knex argument needed from here on - * const users = await query(UserSchema).where(t => t.role, '=', 'admin'); - * const post = await query(PostSchema).where(t => t.id, '=', 42).first(); - * - * // Optional base query (e.g. soft-delete scope applied globally) - * const active = query(UserSchema, knex('users').where('deleted_at', null)); - * ``` - */ export function createQuery(knexInstance: Knex): BoundQuery { function boundQuery< TLocalSchema extends ObjectSchemaBuilder< diff --git a/libs/knex-schema/src/operations/delete.ts b/libs/knex-schema/src/operations/delete.ts new file mode 100644 index 00000000..5f8a7962 --- /dev/null +++ b/libs/knex-schema/src/operations/delete.ts @@ -0,0 +1,72 @@ +// @cleverbrush/knex-schema — DELETE / soft-delete / restore operations + +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import { getSoftDelete, invalidateCache, mapRow } from './helpers.js'; +import { getState } from './state.js'; + +export async function deleteImpl( + builder: SchemaQueryBuilder +): Promise { + const state = getState(builder); + + const hooks = + ((state.localSchema as any).getExtension?.('beforeDelete') as + | Function[] + | undefined) ?? []; + for (const hook of hooks) { + await hook(builder); + } + + const softDelete = getSoftDelete(builder); + if (softDelete) { + return state.baseQuery.update({ + [softDelete.column]: state.knex.fn.now() + }); + } + return state.baseQuery.delete(); +} + +export function withDeletedImpl(builder: SchemaQueryBuilder): any { + const state = getState(builder); + invalidateCache(builder); + state.includeDeleted = true; + return builder; +} + +export function onlyDeletedImpl(builder: SchemaQueryBuilder): any { + const state = getState(builder); + invalidateCache(builder); + state.onlyDeleted = true; + state.includeDeleted = true; + return builder; +} + +export async function hardDeleteImpl( + builder: SchemaQueryBuilder +): Promise { + const state = getState(builder); + const hooks = + ((state.localSchema as any).getExtension?.('beforeDelete') as + | Function[] + | undefined) ?? []; + for (const hook of hooks) { + await hook(builder); + } + return state.baseQuery.delete(); +} + +export async function restoreImpl( + builder: SchemaQueryBuilder +): Promise { + const state = getState(builder); + const softDelete = getSoftDelete(builder); + if (!softDelete) { + throw new Error( + 'Schema does not have soft delete enabled. Use .softDelete() on the schema.' + ); + } + const rows = await state.baseQuery + .update({ [softDelete.column]: null }) + .returning('*'); + return rows.map((row: any) => mapRow(builder, row)); +} diff --git a/libs/knex-schema/src/operations/helpers.ts b/libs/knex-schema/src/operations/helpers.ts new file mode 100644 index 00000000..cbaf6b9e --- /dev/null +++ b/libs/knex-schema/src/operations/helpers.ts @@ -0,0 +1,960 @@ +// @cleverbrush/knex-schema — Extracted helper functions from SchemaQueryBuilder + +import type { InferType } from '@cleverbrush/schema'; +import { + EXTRA_TYPE_BRAND, + METHOD_LITERAL_BRAND, + ObjectSchemaBuilder, + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR +} from '@cleverbrush/schema'; +import type { Knex } from 'knex'; +import { + buildColumnMap, + getPrimaryKeyColumns, + resolveColumnRef +} from '../columns.js'; +import { + getTableName, + getVariants, + POLYMORPHIC_TYPE_BRAND +} from '../extension.js'; +import { clearRow } from '../mappers.js'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { + ColumnRef, + ResolvedVariantConfig, + ResolvedVariantRelationSpec, + ValidatedSpec +} from '../types.js'; +import { getState } from './state.js'; + +// --------------------------------------------------------------------------- +// Type-level helpers +// --------------------------------------------------------------------------- + +export type ScopesOf = S extends { + readonly [METHOD_LITERAL_BRAND]?: infer N; +} + ? Extract + : never; + +export type ProjectionsOf = S extends { + readonly [EXTRA_TYPE_BRAND]?: infer P; +} + ? P extends Record + ? P + : Record + : Record; + +export type ProjectionKeysOf< + S, + K extends keyof ProjectionsOf & string +> = ProjectionsOf[K] extends readonly (infer T extends string)[] + ? T + : string; + +export type QueryResultType = TLocalSchema extends { + readonly [POLYMORPHIC_TYPE_BRAND]?: infer U; +} + ? NonNullable + : InferType; + +// --------------------------------------------------------------------------- +// State-based helpers +// --------------------------------------------------------------------------- + +export function resolveColumn( + builder: SchemaQueryBuilder, + ref: any, + label = 'column' +): string | Knex.Raw { + const state = getState(builder); + return resolveColumnRef( + ref as ColumnRef, + state.localSchema, + label, + state.knex + ); +} + +export function invalidateCache(builder: SchemaQueryBuilder): void { + getState(builder).cachedBuiltQuery = null; +} + +export function getSoftDelete( + builder: SchemaQueryBuilder +): { column: string } | null { + const state = getState(builder); + const ext = (state.localSchema as any).getExtension?.('softDelete'); + return ext ?? null; +} + +export function getDefaultScope( + builder: SchemaQueryBuilder +): Function | null { + const state = getState(builder); + const fn = (state.localSchema as any).getExtension?.('defaultScope'); + return typeof fn === 'function' ? fn : null; +} + +export function getTimestamps( + builder: SchemaQueryBuilder +): { createdAt: string; updatedAt: string } | null { + const state = getState(builder); + const ts = (state.localSchema as any).getExtension?.('timestamps'); + return ts ?? null; +} + +const ALLOWED_OPS = new Set([ + '=', + '!=', + '<>', + '<', + '>', + '<=', + '>=', + 'like', + 'not like', + 'ilike', + 'not ilike', + 'in', + 'not in', + 'is', + 'is not' +]); + +export { ALLOWED_OPS }; + +export function getVariantConfig( + builder: SchemaQueryBuilder +): ResolvedVariantConfig | null { + const state = getState(builder); + if (state.variantConfig !== undefined) return state.variantConfig; + + const raw = getVariants(state.localSchema); + if (!raw) { + state.variantConfig = null; + return null; + } + + const { propToCol } = buildColumnMap(state.localSchema); + const discCol = propToCol.get(raw.discriminatorKey) ?? raw.discriminatorKey; + + state.variantConfig = { + ...raw, + discriminatorColumn: discCol + }; + return state.variantConfig; +} + +export function applyVariantJoins( + builder: SchemaQueryBuilder, + base: Knex.QueryBuilder, + variantConfig: ResolvedVariantConfig +): Knex.QueryBuilder { + const state = getState(builder); + const knex = state.knex; + const baseTable = state.tableName; + const basePkCol = findPrimaryKeyColumn(builder, state.localSchema); + const discCol = variantConfig.discriminatorColumn; + + const qb = base.clone().select(`${baseTable}.*`); + + for (const [key, spec] of Object.entries(variantConfig.variants)) { + if (state.enabledVariants !== null && !state.enabledVariants.has(key)) + continue; + + if (spec.storage === 'cti') { + const variantAlias = `__v_${key}`; + const variantTable = spec.tableName!; + const fkCol = spec.foreignKey!; + + qb.leftJoin( + `${variantTable} as ${variantAlias}`, + knex.raw(`?? = ?? AND ?? = ?`, [ + `${variantAlias}.${fkCol}`, + `${baseTable}.${basePkCol}`, + `${baseTable}.${discCol}`, + key + ]) + ); + + const { propToCol } = buildColumnMap(spec.schema); + const variantIntrospect = spec.schema.introspect() as any; + const variantProps: Record = + variantIntrospect.properties ?? {}; + + for (const propKey of Object.keys(variantProps)) { + const colName = propToCol.get(propKey) ?? propKey; + qb.select( + knex.raw('?? as ??', [ + `${variantAlias}.${colName}`, + `${variantAlias}__${colName}` + ]) + ); + } + } + } + + for (const vrInc of state.variantRelationIncludes) { + const variantSpec = variantConfig.variants[vrInc.variantKey]; + if (!variantSpec) continue; + + const relSpec = variantSpec.relations.find( + (r: ResolvedVariantRelationSpec) => r.name === vrInc.relationName + ); + if (!relSpec) continue; + + const foreignSchema = resolveSchema(builder, relSpec.schema); + const foreignTableName = getTableName(foreignSchema); + const relAlias = `__v_${vrInc.variantKey}__rel_${vrInc.relationName}`; + + if (relSpec.type === 'belongsTo' || relSpec.type === 'hasOne') { + let localCol: string; + let foreignCol: string; + + if (relSpec.type === 'belongsTo') { + localCol = + relSpec.foreignKey ?? + ((): string => { + throw new Error( + `includeVariant: relation "${vrInc.relationName}" on variant "${vrInc.variantKey}" requires foreignKey` + ); + })(); + foreignCol = findPrimaryKeyColumn(builder, foreignSchema); + } else { + localCol = findPrimaryKeyColumn(builder, state.localSchema); + foreignCol = + relSpec.foreignKey ?? + ((): string => { + throw new Error( + `includeVariant: relation "${vrInc.relationName}" on variant "${vrInc.variantKey}" requires foreignKey` + ); + })(); + } + + let onExpr: string; + const variantAlias = `__v_${vrInc.variantKey}`; + + if (variantSpec.storage === 'cti') { + if (relSpec.type === 'belongsTo') { + onExpr = `${relAlias}.${foreignCol} = ${variantAlias}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; + } else { + onExpr = `${relAlias}.${foreignCol} = ${baseTable}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; + } + } else { + if (relSpec.type === 'belongsTo') { + onExpr = `${relAlias}.${foreignCol} = ${baseTable}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; + } else { + onExpr = `${relAlias}.${foreignCol} = ${baseTable}.${localCol} AND ${baseTable}.${discCol} = '${vrInc.variantKey}'`; + } + } + + const foreignKnex: Knex.QueryBuilder = state.knex(foreignTableName); + + const selectionSql = buildVariantRelationSelect( + builder, + foreignSchema, + relAlias, + foreignTableName, + vrInc.customize + ); + + qb.leftJoin( + knex.raw(`?? as ??`, [foreignTableName, relAlias]), + knex.raw(onExpr) + ); + void foreignKnex; + + for (const sel of selectionSql) { + qb.select(sel); + } + } + } + + for (const filter of state.variantWhereFilters) { + const discColFull = `${baseTable}.${discCol}`; + qb.where(function (this: Knex.QueryBuilder) { + this.where(discColFull, filter.key) + .andWhere(filter.qualifiedColumn, filter.op, filter.value) + .orWhere(discColFull, '!=', filter.key); + }); + } + + if (state.enabledVariants !== null) { + qb.whereIn(`${baseTable}.${discCol}`, [...state.enabledVariants]); + } + + return qb; +} + +// Circular-dependency-safe SchemaQueryBuilder constructor reference +// Set by SchemaQueryBuilder.ts after the class is defined. +let SchemaQueryBuilderCtor: new (...args: any[]) => any = null!; +export function registerSchemaQueryBuilder( + ctor: new (...args: any[]) => any +): void { + SchemaQueryBuilderCtor = ctor; +} +export function getSchemaQueryBuilderCtor(): new (...args: any[]) => any { + return SchemaQueryBuilderCtor; +} + +export function buildVariantRelationSelect( + builder: SchemaQueryBuilder, + foreignSchema: ObjectSchemaBuilder, + relAlias: string, + foreignTableName: string, + customize?: (q: SchemaQueryBuilder) => void +): Knex.Raw[] { + const state = getState(builder); + const knex = state.knex; + const { propToCol } = buildColumnMap(foreignSchema); + const foreignIntrospect = foreignSchema.introspect() as any; + const foreignProps: Record = + foreignIntrospect.properties ?? {}; + + let columnsToSelect: string[]; + + if (customize) { + const probe = new (SchemaQueryBuilderCtor as any)( + state.knex, + foreignSchema, + state.knex(foreignTableName) + ); + customize(probe); + const state2 = getState(probe); + const explicit = state2.explicitSelects; + if (explicit && explicit.length > 0) { + columnsToSelect = explicit; + } else { + columnsToSelect = Object.keys(foreignProps).map( + p => propToCol.get(p) ?? p + ); + } + } else { + columnsToSelect = Object.keys(foreignProps).map( + p => propToCol.get(p) ?? p + ); + } + + return columnsToSelect.map(colName => + knex.raw('?? as ??', [ + `${relAlias}.${colName}`, + `${relAlias}__${colName}` + ]) + ); +} + +export function mapPolymorphicRow( + builder: SchemaQueryBuilder, + row: Record, + variantConfig: ResolvedVariantConfig +): Record { + const state = getState(builder); + const { colToProp: baseColToProp } = buildColumnMap(state.localSchema); + const result: Record = {}; + + for (const [colName, value] of Object.entries(row)) { + if (colName.startsWith('__v_')) continue; + const propName = baseColToProp.get(colName); + if (propName) { + result[propName] = value; + } else { + result[colName] = value; + } + } + + const discPropKey = variantConfig.discriminatorKey; + const discValue: string | undefined = result[discPropKey]; + + if (discValue != null) { + const variantSpec = variantConfig.variants[discValue]; + if (variantSpec) { + if (variantSpec.storage === 'cti') { + const { colToProp: varColToProp } = buildColumnMap( + variantSpec.schema + ); + const variantAlias = `__v_${discValue}`; + const prefix = `${variantAlias}__`; + const fkCol = variantSpec.foreignKey; + + if (fkCol) { + const fkAlias = `${prefix}${fkCol}`; + if (!variantSpec.allowOrphan && row[fkAlias] == null) { + throw new Error( + `Polymorphic orphan: "${discPropKey}" = "${discValue}" ` + + `but no matching row found in variant table ` + + `"${variantSpec.tableName}". ` + + `Set allowOrphan: true on this variant to suppress.` + ); + } + } + + for (const [colName, value] of Object.entries(row)) { + if (!colName.startsWith(prefix)) continue; + const origCol = colName.slice(prefix.length); + if (origCol === fkCol) continue; + const propName = varColToProp.get(origCol) ?? origCol; + result[propName] = value; + } + } else { + const { colToProp: varColToProp } = buildColumnMap( + variantSpec.schema + ); + for (const [colName, value] of Object.entries(row)) { + if (colName.startsWith('__v_')) continue; + if (baseColToProp.has(colName)) continue; + const propName = varColToProp.get(colName); + if (propName) { + result[propName] = value; + } + } + } + } + } + + if (state.variantRelationIncludes.length > 0 && discValue != null) { + const variantSpec3 = variantConfig.variants[discValue]; + if (variantSpec3) { + for (const vrInc of state.variantRelationIncludes) { + if (vrInc.variantKey !== discValue) continue; + + const relSpec = variantSpec3.relations.find( + (r: ResolvedVariantRelationSpec) => + r.name === vrInc.relationName + ); + if (!relSpec) continue; + + if (relSpec.type === 'belongsTo' || relSpec.type === 'hasOne') { + const relAlias = `__v_${discValue}__rel_${vrInc.relationName}`; + const prefix = `${relAlias}__`; + const foreignSchema = resolveSchema( + builder, + relSpec.schema + ); + const { colToProp: relColToProp } = + buildColumnMap(foreignSchema); + const nested: Record = {}; + let anyNonNull = false; + + for (const [colName, value] of Object.entries(row)) { + if (!colName.startsWith(prefix)) continue; + const origCol = colName.slice(prefix.length); + const propName = relColToProp.get(origCol) ?? origCol; + nested[propName] = value; + if (value !== null && value !== undefined) { + anyNonNull = true; + } + } + result[vrInc.relationName] = anyNonNull ? nested : null; + } + } + } + } + + return result; +} + +export function resolveSchema( + _builder: SchemaQueryBuilder, + schema: any +): ObjectSchemaBuilder { + return typeof schema === 'function' ? schema() : schema; +} + +export function findPrimaryKeyColumn( + _builder: SchemaQueryBuilder, + schema: ObjectSchemaBuilder +): string { + const pk = getPrimaryKeyColumns(schema); + if (pk.columnNames.length > 0) return pk.columnNames[0]; + return 'id'; +} + +export function resolvePkColumns(builder: SchemaQueryBuilder): { + propertyKeys: readonly string[]; + columnNames: readonly string[]; +} { + const state = getState(builder); + const pk = getPrimaryKeyColumns(state.localSchema as any); + if (pk.columnNames.length === 0) { + throw new Error( + 'No primary key declared on this schema. Use `.primaryKey()` on a column or `.hasPrimaryKey([...])` on the schema.' + ); + } + return pk; +} + +export function getEffectiveBaseQuery( + builder: SchemaQueryBuilder +): Knex.QueryBuilder { + const state = getState(builder); + let effectiveBase = state.baseQuery; + let cloned = false; + + const softDelete = getSoftDelete(builder); + if (softDelete && state.onlyDeleted) { + if (!cloned) { + effectiveBase = effectiveBase.clone(); + cloned = true; + } + effectiveBase.whereNotNull(softDelete.column); + } else if (softDelete && !state.includeDeleted) { + if (!cloned) { + effectiveBase = effectiveBase.clone(); + cloned = true; + } + effectiveBase.whereNull(softDelete.column); + } + + if (!state.skipDefaultScope) { + const defaultScopeFn = getDefaultScope(builder); + if (defaultScopeFn) { + if (!cloned) { + effectiveBase = effectiveBase.clone(); + cloned = true; + } + const proxy = new (SchemaQueryBuilderCtor as any)( + state.knex, + state.localSchema, + effectiveBase + ); + const proxyState = getState(proxy); + proxyState.skipDefaultScope = true; + defaultScopeFn(proxy); + } + } + + return effectiveBase; +} + +export function buildJoinOne( + builder: SchemaQueryBuilder, + resultQuery: Knex.QueryBuilder, + spec: ValidatedSpec & { type: 'one' }, + relationAlias: string +): void { + const state = getState(builder); + const knex = state.knex; + const foreignTable = spec.foreignQuery; + const foreignTableName = (foreignTable as any)._single?.table; + + if (!foreignTableName) { + throw new Error( + `Could not determine table name from foreignQuery for "${spec.as}". ` + + 'Make sure foreignQuery is created via knex("tableName").' + ); + } + + resultQuery.select( + knex.raw(':relationAlias:.:as:->0 as :as:', { + relationAlias, + as: spec.as + }) + ); + + const subquery = knex + .from(foreignTable.as(foreignTableName)) + .select( + knex.raw(':foreignTable:.:foreignColumn:', { + foreignTable: foreignTableName, + foreignColumn: spec.foreignColumn + }) + ) + .select( + knex.raw('jsonb_agg(:foreignTable:) as :as:', { + foreignTable: foreignTableName, + as: spec.as + }) + ) + .groupByRaw(':foreignTable:.:foreignColumn:', { + foreignTable: foreignTableName, + foreignColumn: spec.foreignColumn + }) + .as(relationAlias); + + const joinMethod = spec.required ? 'join' : 'leftJoin'; + resultQuery[joinMethod](subquery, function () { + this.on( + knex.raw( + ':relationAlias:.:foreignColumn: = :originalQuery:.:localColumn:', + { + originalQuery: 'originalQuery', + relationAlias, + foreignColumn: spec.foreignColumn, + localColumn: spec.localColumn + } + ) + ); + }); +} + +export function buildJoinMany( + builder: SchemaQueryBuilder, + resultQuery: Knex.QueryBuilder, + spec: ValidatedSpec & { type: 'many' }, + relationAlias: string, + i: number +): void { + const state = getState(builder); + const knex = state.knex; + const filterName = `withFilter${i}`; + + const hasLimitOffset = + (spec.limit !== null && spec.limit > 0) || + (spec.offset !== null && spec.offset > 0); + + const orderByColumn = spec.orderBy + ? spec.orderBy.column + : spec.foreignColumn; + const orderByDirection = spec.orderBy ? spec.orderBy.direction : 'asc'; + + if (hasLimitOffset) { + resultQuery.with( + filterName, + knex + .from( + spec.foreignQuery + .clone() + .whereIn( + spec.foreignColumn, + knex + .from( + knex.raw(':originalQuery:', { + originalQuery: 'originalQuery' + }) + ) + .distinct(spec.localColumn) + .as(`__wf_inner_${i}`) + ) + .as(`__wf_inner_${i}`) + ) + .select(`__wf_inner_${i}.*`) + .select( + knex.raw( + `row_number() over(partition by :foreignColumn: order by :orderByColumn: ${orderByDirection}) as "__rn__"`, + { + foreignColumn: spec.foreignColumn, + orderByColumn + } + ) + ) + ); + } else { + resultQuery.with( + filterName, + spec.foreignQuery.clone().whereIn( + spec.foreignColumn, + knex + .from( + knex.raw(':originalQuery:', { + originalQuery: 'originalQuery' + }) + ) + .distinct(spec.localColumn) + ) + ); + } + + const aggSubquery = knex.from(filterName); + + if (hasLimitOffset) { + const hasLimit = spec.limit !== null && spec.limit > 0; + const hasOffset = spec.offset !== null && spec.offset > 0; + const effectiveOffset = spec.offset ?? 0; + const effectiveLimit = effectiveOffset + (spec.limit ?? 0); + + const condition = + hasLimit && hasOffset + ? '"__rn__" > :offset and "__rn__" <= :limit' + : hasLimit + ? '"__rn__" <= :limit' + : '"__rn__" > :offset'; + + aggSubquery.whereRaw(condition, { + limit: effectiveLimit, + offset: effectiveOffset + }); + + aggSubquery.select( + knex.raw(':foreignColumn:', { + foreignColumn: spec.foreignColumn + }) + ); + aggSubquery.select( + knex.raw( + "coalesce(jsonb_agg(to_jsonb(:filterName:) - '__rn__' order by \"__rn__\"), '[]'::jsonb) as :as:", + { filterName, as: spec.as } + ) + ); + } else { + aggSubquery.select( + knex.raw(':foreignColumn:', { + foreignColumn: spec.foreignColumn + }) + ); + + const orderClause = spec.orderBy + ? `jsonb_agg(:filterName: order by :filterName:.:orderByColumn: ${orderByDirection})` + : 'jsonb_agg(:filterName:)'; + + aggSubquery.select( + knex.raw( + `coalesce(${orderClause}, '[]'::jsonb) as :as:`, + spec.orderBy + ? { filterName, orderByColumn, as: spec.as } + : { filterName, as: spec.as } + ) + ); + } + + aggSubquery.groupByRaw(':foreignColumn:', { + foreignColumn: spec.foreignColumn + }); + + const subquery = aggSubquery.as(relationAlias); + + resultQuery.select( + knex.raw("coalesce(:relationAlias:.:as:, '[]'::jsonb) as :as:", { + relationAlias, + as: spec.as + }) + ); + + resultQuery.leftJoin(subquery, function () { + this.on( + knex.raw( + ':relationAlias:.:foreignColumn: = :originalQuery:.:localColumn:', + { + relationAlias, + foreignColumn: spec.foreignColumn, + originalQuery: 'originalQuery', + localColumn: spec.localColumn + } + ) + ); + }); +} + +export function buildQuery( + builder: SchemaQueryBuilder +): Knex.QueryBuilder { + const state = getState(builder); + const effectiveBase = getEffectiveBaseQuery(builder); + + const variantConfig = getVariantConfig(builder); + const queryBase = variantConfig + ? applyVariantJoins(builder, effectiveBase, variantConfig) + : effectiveBase; + + if (state.specs.length === 0) { + return queryBase; + } + + const knex = state.knex; + const specs = state.specs; + + const requiredLocalColumns = [...new Set(specs.map(s => s.localColumn))]; + + let cteQuery = queryBase; + let extraColumns: string[] = []; + + if (state.explicitSelects !== null) { + const selectedSet = new Set(state.explicitSelects); + extraColumns = requiredLocalColumns.filter( + col => !selectedSet.has(col) + ); + if (extraColumns.length > 0) { + cteQuery = queryBase.clone(); + for (const col of extraColumns) { + cteQuery.column(col); + } + } + } + + const resultQuery = knex.queryBuilder().with('originalQuery', cteQuery); + + if (extraColumns.length > 0 && state.explicitSelects !== null) { + for (const col of state.explicitSelects) { + resultQuery.select( + knex.raw(':originalQuery:.:col: as :col:', { + originalQuery: 'originalQuery', + col + }) + ); + } + } else { + resultQuery.select('originalQuery.*'); + } + + resultQuery.from( + knex.raw(':originalQuery:', { + originalQuery: 'originalQuery' + }) + ); + + for (let i = 0; i < specs.length; i++) { + const spec = specs[i]; + const relationAlias = `eagerRelation${i}`; + + if (spec.type === 'one') { + buildJoinOne(builder, resultQuery, spec, relationAlias); + } else { + buildJoinMany(builder, resultQuery, spec, relationAlias, i); + } + } + + return resultQuery; +} + +export function getQuery( + builder: SchemaQueryBuilder +): Knex.QueryBuilder { + const state = getState(builder); + if (!state.cachedBuiltQuery) { + state.cachedBuiltQuery = buildQuery(builder); + } + return state.cachedBuiltQuery; +} + +export function mapRow( + builder: SchemaQueryBuilder, + row: Record +): Record { + if (!row) return row; + + const variantConfig = getVariantConfig(builder); + if (variantConfig) { + return mapPolymorphicRow(builder, row, variantConfig); + } + + const state = getState(builder); + const { colToProp } = buildColumnMap(state.localSchema); + const result: Record = {}; + + for (const [colName, value] of Object.entries(row)) { + const propName = colToProp.get(colName); + if (propName) { + result[propName] = value; + } else { + result[colName] = value; + } + } + + return result; +} + +export function cleanAndMapRow( + builder: SchemaQueryBuilder, + row: Record +): Record { + const state = getState(builder); + const oneSpecs = state.specs.filter( + (s): s is ValidatedSpec & { type: 'one' } => s.type === 'one' + ); + const manySpecs = state.specs.filter( + (s): s is ValidatedSpec & { type: 'many' } => s.type === 'many' + ); + const cleaned = clearRow(row, oneSpecs, manySpecs); + return mapRow(builder, cleaned); +} + +export function mapObjectToColumns( + builder: SchemaQueryBuilder, + obj: Record +): Record { + const state = getState(builder); + const { propToCol } = buildColumnMap(state.localSchema); + const result: Record = {}; + + for (const [key, value] of Object.entries(obj)) { + const colName = propToCol.get(key); + if (colName) { + result[colName] = value; + } else { + result[key] = value; + } + } + + return result; +} + +export function mapRecordToColumns( + builder: SchemaQueryBuilder, + record: Record +): Record { + return mapObjectToColumns(builder, record); +} + +export function resolveColumnArg( + builder: SchemaQueryBuilder, + col: any +): string | Knex.Raw { + if (typeof col === 'string') { + return resolveColumn(builder, col, 'column'); + } + if (typeof col === 'function') { + return resolveColumn(builder, col, 'column'); + } + return col; +} + +export function isColumnAccessor( + builder: SchemaQueryBuilder, + fn: Function +): boolean { + const state = getState(builder); + try { + const tree = ObjectSchemaBuilder.getPropertiesFor( + state.localSchema as any + ); + const result = fn(tree); + if ( + result && + typeof result === 'object' && + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR in result + ) { + return true; + } + } catch {} + return false; +} + +export function assertNotProjection( + builder: SchemaQueryBuilder, + method: string +): void { + const state = getState(builder); + if (state.selectionMode === 'projection') { + throw new Error( + `Cannot call .${method}() after .projected('${ + state.appliedProjection + }'). Choose one column-selection mode per query.` + ); + } +} + +export function assertNotExplicitSelect( + builder: SchemaQueryBuilder, + method: string +): void { + const state = getState(builder); + if (state.selectionMode === 'select') { + throw new Error( + `Cannot call .${method}() after .select(). Choose one column-selection mode per query.` + ); + } + if (state.selectionMode === 'aggregate') { + throw new Error( + `Cannot call .${method}() after an aggregate method. Choose one column-selection mode per query.` + ); + } + if (state.selectionMode === 'projection') { + throw new Error( + `Cannot call .${method}() after .projected('${ + state.appliedProjection + }'). Choose one column-selection mode per query.` + ); + } +} diff --git a/libs/knex-schema/src/operations/insert.ts b/libs/knex-schema/src/operations/insert.ts new file mode 100644 index 00000000..82b09609 --- /dev/null +++ b/libs/knex-schema/src/operations/insert.ts @@ -0,0 +1,366 @@ +// @cleverbrush/knex-schema — INSERT / upsert / bulk operations + +import type { InferType } from '@cleverbrush/schema'; +import type { Knex } from 'knex'; +import { buildColumnMap } from '../columns.js'; +import { getTableName } from '../extension.js'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { ColumnRef, InsertType } from '../types.js'; +import { + getTimestamps, + mapObjectToColumns, + mapRow, + resolveColumn +} from './helpers.js'; +import { getState } from './state.js'; + +// --------------------------------------------------------------------------- +// OnConflictBuilder +// --------------------------------------------------------------------------- + +export class OnConflictBuilder< + TLocalSchema extends import('@cleverbrush/schema').ObjectSchemaBuilder< + any, + any, + any, + any, + any, + any, + any + >, + TResult +> { + readonly #knex: Knex; + readonly #localSchema: TLocalSchema; + readonly #conflictColumns: string[]; + + constructor( + knex: Knex, + localSchema: TLocalSchema, + _parent: SchemaQueryBuilder, + conflictColumns: string[] + ) { + this.#knex = knex; + this.#localSchema = localSchema; + this.#conflictColumns = conflictColumns; + } + + async merge( + data: InsertType, + updateData?: Partial> + ): Promise { + return this.#execute(data, 'merge', updateData) as Promise; + } + + async ignore(data: InsertType): Promise { + return this.#execute(data, 'ignore'); + } + + async #execute( + data: InsertType, + mode: 'merge' | 'ignore', + updateData?: Partial> + ): Promise { + const tableName = getTableName(this.#localSchema); + const timestamps: { createdAt: string; updatedAt: string } | null = + (this.#localSchema as any).getExtension?.('timestamps') ?? null; + + const beforeHooks: Function[] = + (this.#localSchema as any).getExtension?.('beforeInsert') ?? []; + + let processed = { ...(data as Record) }; + for (const hook of beforeHooks) { + processed = (await hook(processed)) ?? processed; + } + + const { propToCol } = buildColumnMap(this.#localSchema as any); + const mapped: Record = {}; + for (const [key, val] of Object.entries(processed)) { + mapped[propToCol.get(key) ?? key] = val; + } + if (timestamps) { + mapped[timestamps.createdAt] = this.#knex.fn.now(); + mapped[timestamps.updatedAt] = this.#knex.fn.now(); + } + + let qb = this.#knex(tableName) + .insert(mapped) + .onConflict(this.#conflictColumns); + + if (mode === 'ignore') { + qb = (qb as any).ignore(); + } else { + let mergeObj: Record; + if (updateData) { + mergeObj = {}; + for (const [key, val] of Object.entries( + updateData as Record + )) { + mergeObj[propToCol.get(key) ?? key] = val; + } + } else { + mergeObj = { ...mapped }; + if (timestamps) { + delete mergeObj[timestamps.createdAt]; + mergeObj[timestamps.updatedAt] = this.#knex.fn.now(); + } + } + qb = (qb as any).merge(mergeObj); + } + + const rows = await (qb as any).returning('*'); + if (!rows || rows.length === 0) return undefined; + + const { colToProp } = buildColumnMap(this.#localSchema as any); + const result: Record = {}; + for (const [col, val] of Object.entries(rows[0])) { + result[colToProp.get(col) ?? col] = val; + } + return result as TResult; + } +} + +// --------------------------------------------------------------------------- +// Insert operation functions +// --------------------------------------------------------------------------- + +export async function insertImpl( + builder: SchemaQueryBuilder, + data: InsertType +): Promise { + const state = getState(builder); + + let processedData = { ...(data as Record) }; + + const beforeHooks = + ((state.localSchema as any).getExtension?.('beforeInsert') as + | Function[] + | undefined) ?? []; + for (const hook of beforeHooks) { + processedData = (await hook(processedData)) ?? processedData; + } + + const mapped = mapObjectToColumns(builder, processedData); + + const timestamps = getTimestamps(builder); + if (timestamps) { + mapped[timestamps.createdAt] = state.knex.fn.now(); + mapped[timestamps.updatedAt] = state.knex.fn.now(); + } + + const [row] = await state + .knex(state.tableName) + .insert(mapped) + .returning('*'); + const result = mapRow(builder, row); + + const afterHooks = + ((state.localSchema as any).getExtension?.('afterInsert') as + | Function[] + | undefined) ?? []; + for (const hook of afterHooks) { + await hook(result); + } + + return result; +} + +export async function insertManyImpl( + builder: SchemaQueryBuilder, + data: InsertType[] +): Promise { + const state = getState(builder); + const timestamps = getTimestamps(builder); + const beforeHooks = + ((state.localSchema as any).getExtension?.('beforeInsert') as + | Function[] + | undefined) ?? []; + + const mapped = []; + for (const d of data) { + let processedData = { ...(d as Record) }; + for (const hook of beforeHooks) { + processedData = (await hook(processedData)) ?? processedData; + } + const m = mapObjectToColumns(builder, processedData); + if (timestamps) { + m[timestamps.createdAt] = state.knex.fn.now(); + m[timestamps.updatedAt] = state.knex.fn.now(); + } + mapped.push(m); + } + + const rows = await state + .knex(state.tableName) + .insert(mapped) + .returning('*'); + const results = rows.map((row: any) => mapRow(builder, row)); + + const afterHooks = + ((state.localSchema as any).getExtension?.('afterInsert') as + | Function[] + | undefined) ?? []; + for (const result of results) { + for (const hook of afterHooks) { + await hook(result); + } + } + + return results; +} + +export function onConflictImpl( + builder: SchemaQueryBuilder, + ...conflictColumns: ColumnRef[] +): OnConflictBuilder { + const state = getState(builder); + const cols = conflictColumns.map( + c => resolveColumn(builder, c, 'onConflict') as string + ); + return new OnConflictBuilder(state.knex, state.localSchema, builder, cols); +} + +export async function upsertImpl( + builder: SchemaQueryBuilder, + data: InsertType, + opts: { + conflictColumns: ColumnRef[]; + updateColumns?: ColumnRef[]; + } +): Promise { + const state = getState(builder); + const cols = opts.conflictColumns.map( + c => resolveColumn(builder, c, 'upsert') as string + ); + + const qb = state + .knex(state.tableName) + .insert(mapObjectToColumns(builder, data as Record)) + .onConflict(cols); + + if (opts.updateColumns && opts.updateColumns.length > 0) { + const updateCols = opts.updateColumns.map( + c => resolveColumn(builder, c, 'upsert') as string + ); + (qb as any).merge(updateCols); + } else { + (qb as any).merge(); + } + + const [row] = await (qb as any).returning('*'); + return mapRow(builder, row); +} + +export async function bulkInsertImpl( + builder: SchemaQueryBuilder, + rows: InsertType[], + opts?: { + chunkSize?: number; + onConflict?: 'ignore' | 'merge'; + conflictColumns?: ColumnRef[]; + } +): Promise { + if (rows.length === 0) return []; + + const state = getState(builder); + + const requestedChunkSize = opts?.chunkSize ?? 500; + const bindingsPerRow = Object.keys(rows[0] as object).length || 1; + const safeChunkCap = Math.max( + 1, + Math.floor(60000 / Math.max(1, bindingsPerRow)) + ); + const chunkSize = Math.max(1, Math.min(requestedChunkSize, safeChunkCap)); + + const timestamps = getTimestamps(builder); + const beforeHooks = + ((state.localSchema as any).getExtension?.('beforeInsert') as + | Function[] + | undefined) ?? []; + const afterHooks = + ((state.localSchema as any).getExtension?.('afterInsert') as + | Function[] + | undefined) ?? []; + + const conflictCols = + opts?.onConflict && opts.conflictColumns + ? opts.conflictColumns.map( + c => + resolveColumn( + builder, + c, + 'bulkInsert.onConflict' + ) as string + ) + : null; + if (opts?.onConflict && (!conflictCols || conflictCols.length === 0)) { + throw new Error( + 'bulkInsert: `conflictColumns` is required when `onConflict` is set.' + ); + } + + const results: any[] = []; + + for (let i = 0; i < rows.length; i += chunkSize) { + const chunk = rows.slice(i, i + chunkSize); + const mapped: Record[] = []; + for (const row of chunk) { + let processed = { ...(row as Record) }; + for (const hook of beforeHooks) { + processed = (await hook(processed)) ?? processed; + } + const m = mapObjectToColumns(builder, processed); + if (timestamps) { + m[timestamps.createdAt] = state.knex.fn.now(); + m[timestamps.updatedAt] = state.knex.fn.now(); + } + mapped.push(m); + } + + let qb: any = state.knex(state.tableName).insert(mapped); + + if (conflictCols) { + qb = qb.onConflict(conflictCols); + if (opts!.onConflict === 'ignore') { + qb = qb.ignore(); + } else { + const updateCols = new Set(); + for (const m of mapped) { + for (const k of Object.keys(m)) updateCols.add(k); + } + for (const c of conflictCols) updateCols.delete(c); + if (timestamps) { + updateCols.delete(timestamps.createdAt); + updateCols.add(timestamps.updatedAt); + } + qb = qb.merge(Array.from(updateCols)); + } + } + + const inserted: any[] = await qb.returning('*'); + for (const row of inserted) { + const mappedRow = mapRow(builder, row); + for (const hook of afterHooks) { + await hook(mappedRow); + } + results.push(mappedRow); + } + } + + return results; +} + +export async function bulkUpsertImpl( + builder: SchemaQueryBuilder, + rows: InsertType[], + opts: { + conflictColumns: ColumnRef[]; + chunkSize?: number; + } +): Promise { + return bulkInsertImpl(builder, rows, { + chunkSize: opts.chunkSize, + onConflict: 'merge', + conflictColumns: opts.conflictColumns + }); +} diff --git a/libs/knex-schema/src/operations/join.ts b/libs/knex-schema/src/operations/join.ts new file mode 100644 index 00000000..b1390aae --- /dev/null +++ b/libs/knex-schema/src/operations/join.ts @@ -0,0 +1,257 @@ +// @cleverbrush/knex-schema — Eager-loading JOIN operations (joinOne, joinMany, include, includeVariant) + +import type { Knex } from 'knex'; +import { resolveColumnRef } from '../columns.js'; +import { getTableName } from '../extension.js'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { + JoinManySpec, + JoinOneSpec, + RelationSpec, + ResolvedVariantRelationSpec +} from '../types.js'; +import { + validateJoinMany, + validateJoinOne, + validateUniqueFieldNames +} from '../validate.js'; +import { + findPrimaryKeyColumn, + getSchemaQueryBuilderCtor, + getVariantConfig, + invalidateCache, + resolveSchema +} from './helpers.js'; +import { getState } from './state.js'; + +export function joinOneImpl( + builder: SchemaQueryBuilder, + spec: JoinOneSpec +): any { + const state = getState(builder); + const validated = validateJoinOne(spec, state.localSchema, state.knex); + state.specs.push({ type: 'one' as const, ...validated }); + validateUniqueFieldNames(state.specs); + invalidateCache(builder); + return builder; +} + +export function joinManyImpl( + builder: SchemaQueryBuilder, + spec: JoinManySpec +): any { + const state = getState(builder); + const validated = validateJoinMany(spec, state.localSchema, state.knex); + state.specs.push({ type: 'many' as const, ...validated }); + validateUniqueFieldNames(state.specs); + invalidateCache(builder); + return builder; +} + +export function includeImpl( + builder: SchemaQueryBuilder, + relationName: string, + customize?: (q: SchemaQueryBuilder) => void +): any { + const state = getState(builder); + invalidateCache(builder); + const relations: RelationSpec[] = + (state.localSchema as any).getExtension?.('relations') ?? []; + const relation = relations.find( + (r: RelationSpec) => r.name === relationName + ); + if (!relation) { + const variantConfig = getVariantConfig(builder); + if (variantConfig) { + const matches: Array<{ variantKey: string }> = []; + for (const [vKey, vSpec] of Object.entries( + variantConfig.variants + )) { + if ( + vSpec.relations.some( + (r: ResolvedVariantRelationSpec) => + r.name === relationName + ) + ) { + matches.push({ variantKey: vKey }); + } + } + if (matches.length === 1) { + return includeVariantImpl( + builder, + matches[0].variantKey, + relationName, + customize + ); + } + if (matches.length > 1) { + throw new Error( + `Ambiguous relation "${relationName}" — found on variants: ${matches.map(m => m.variantKey).join(', ')}. Use .includeVariant(key, name) to be explicit.` + ); + } + } + throw new Error( + `Unknown relation "${relationName}" on schema for table "${state.tableName}"` + ); + } + + const foreignSchema = resolveSchema(builder, relation.schema); + const foreignTableName = getTableName(foreignSchema); + + switch (relation.type) { + case 'belongsTo': { + const localColumn = resolveColumnRef( + relation.foreignKey, + state.localSchema, + 'foreignKey' + ); + const foreignColumn = findPrimaryKeyColumn(builder, foreignSchema); + + const foreignQuery1: Knex.QueryBuilder = + state.knex(foreignTableName); + if (customize) { + const SQB = getSchemaQueryBuilderCtor(); + const proxy = new SQB(state.knex, foreignSchema, foreignQuery1); + customize(proxy); + } + + joinOneImpl(builder, { + foreignSchema, + localColumn, + foreignColumn, + as: relationName, + foreignQuery: foreignQuery1 + } as any); + break; + } + case 'hasOne': { + const localColumn = findPrimaryKeyColumn( + builder, + state.localSchema + ); + const foreignColumn = resolveColumnRef( + relation.foreignKey, + foreignSchema, + 'foreignKey' + ); + + const foreignQuery2: Knex.QueryBuilder = + state.knex(foreignTableName); + if (customize) { + const SQB = getSchemaQueryBuilderCtor(); + const proxy = new SQB(state.knex, foreignSchema, foreignQuery2); + customize(proxy); + } + + joinOneImpl(builder, { + foreignSchema, + localColumn, + foreignColumn, + as: relationName, + required: false, + foreignQuery: foreignQuery2 + } as any); + break; + } + case 'hasMany': { + const localColumn = findPrimaryKeyColumn( + builder, + state.localSchema + ); + const foreignColumn = resolveColumnRef( + relation.foreignKey, + foreignSchema, + 'foreignKey' + ); + + const foreignQuery3: Knex.QueryBuilder = + state.knex(foreignTableName); + if (customize) { + const SQB = getSchemaQueryBuilderCtor(); + const proxy = new SQB(state.knex, foreignSchema, foreignQuery3); + customize(proxy); + } + + joinManyImpl(builder, { + foreignSchema, + localColumn, + foreignColumn, + as: relationName, + foreignQuery: foreignQuery3 + } as any); + break; + } + case 'belongsToMany': { + const through = relation.through!; + const localColumn = findPrimaryKeyColumn( + builder, + state.localSchema + ); + + const foreignQuery = state + .knex(foreignTableName) + .join( + through.table, + `${through.table}.${through.foreignKey}`, + `${foreignTableName}.${findPrimaryKeyColumn(builder, foreignSchema)}` + ) + .select( + `${foreignTableName}.*`, + `${through.table}.${through.localKey}` + ); + + if (customize) { + const SQB = getSchemaQueryBuilderCtor(); + const proxy = new SQB(state.knex, foreignSchema, foreignQuery); + customize(proxy); + } + + joinManyImpl(builder, { + foreignSchema, + localColumn, + foreignColumn: through.localKey, + as: relationName, + foreignQuery + } as any); + break; + } + } + + return builder; +} + +export function includeVariantImpl( + builder: SchemaQueryBuilder, + variantKey: string, + relationName: string, + customize?: (q: SchemaQueryBuilder) => void +): any { + const state = getState(builder); + invalidateCache(builder); + const variantConfig = getVariantConfig(builder); + if (!variantConfig) { + throw new Error( + `includeVariant: schema for table "${state.tableName}" is not polymorphic (no .withVariants() config found)` + ); + } + const variantSpec = variantConfig.variants[variantKey]; + if (!variantSpec) { + throw new Error( + `includeVariant: unknown variant key "${variantKey}" on schema for table "${state.tableName}"` + ); + } + const relSpec = variantSpec.relations.find( + (r: ResolvedVariantRelationSpec) => r.name === relationName + ); + if (!relSpec) { + throw new Error( + `includeVariant: unknown relation "${relationName}" on variant "${variantKey}" of table "${state.tableName}"` + ); + } + state.variantRelationIncludes.push({ + variantKey, + relationName, + customize + }); + return builder; +} diff --git a/libs/knex-schema/src/operations/pagination.ts b/libs/knex-schema/src/operations/pagination.ts new file mode 100644 index 00000000..5968b16b --- /dev/null +++ b/libs/knex-schema/src/operations/pagination.ts @@ -0,0 +1,124 @@ +// @cleverbrush/knex-schema — Pagination (offset & cursor-based) + +import { resolvePropertyKey } from '../columns.js'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { + ColumnRef, + CursorPaginationResult, + PaginationResult +} from '../types.js'; +import { + cleanAndMapRow, + getEffectiveBaseQuery, + getQuery, + invalidateCache +} from './helpers.js'; +import { getState } from './state.js'; +import { orderByImpl, whereImpl } from './where.js'; + +export function limitImpl( + builder: SchemaQueryBuilder, + n: number +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.limit(n); + return builder; +} + +export function offsetImpl( + builder: SchemaQueryBuilder, + n: number +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.offset(n); + return builder; +} + +export async function paginateImpl( + builder: SchemaQueryBuilder, + opts: { + page: number; + pageSize: number; + } +): Promise> { + const { page, pageSize } = opts; + const _state = getState(builder); + + const effectiveBase = getEffectiveBaseQuery(builder); + + const countResult = await effectiveBase + .clone() + .clearSelect() + .clearOrder() + .count('* as count') + .first(); + const total = Number((countResult as any)?.count ?? 0); + + limitImpl(builder, pageSize); + offsetImpl(builder, (page - 1) * pageSize); + const data = await executeImpl(builder); + + const totalPages = Math.ceil(total / pageSize); + + return { + data, + total, + page, + pageSize, + totalPages, + hasNextPage: page < totalPages, + hasPreviousPage: page > 1 + } as PaginationResult; +} + +export async function paginateAfterImpl( + builder: SchemaQueryBuilder, + opts: { + cursor?: any; + limit: number; + column?: ColumnRef; + direction?: 'asc' | 'desc'; + } +): Promise> { + const direction = opts.direction ?? 'desc'; + const column = opts.column ?? ('id' as any); + const state = getState(builder); + + if (opts.cursor != null) { + const op = direction === 'desc' ? '<' : '>'; + whereImpl(builder, column, op, opts.cursor); + } + + orderByImpl(builder, column, direction); + limitImpl(builder, opts.limit + 1); + const rows = await executeImpl(builder); + + const hasMore = rows.length > opts.limit; + const data = hasMore ? rows.slice(0, opts.limit) : rows; + + const propKey = + typeof column === 'string' + ? column + : resolvePropertyKey(column, state.localSchema, 'cursor'); + + const nextCursor = + hasMore && data.length > 0 + ? String((data[data.length - 1] as any)[propKey]) + : null; + + return { data, nextCursor, hasMore } as CursorPaginationResult; +} + +export async function executeImpl( + builder: SchemaQueryBuilder +): Promise { + const query = getQuery(builder); + const rows = await query; + + if (!rows) return []; + if (!Array.isArray(rows)) return [cleanAndMapRow(builder, rows)]; + + return rows.map((row: any) => cleanAndMapRow(builder, row)); +} diff --git a/libs/knex-schema/src/operations/select.ts b/libs/knex-schema/src/operations/select.ts new file mode 100644 index 00000000..8896b2d9 --- /dev/null +++ b/libs/knex-schema/src/operations/select.ts @@ -0,0 +1,256 @@ +// @cleverbrush/knex-schema — SELECT / DISTINCT / aggregates / projections / scopes + +import { + ObjectSchemaBuilder, + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR +} from '@cleverbrush/schema'; +import type { Knex } from 'knex'; +import { buildColumnMap } from '../columns.js'; +import { getProjections } from '../extension.js'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { ColumnRef } from '../types.js'; +import { + assertNotExplicitSelect, + assertNotProjection, + invalidateCache, + resolveColumn, + resolveColumnArg +} from './helpers.js'; +import { getState } from './state.js'; + +export function selectImpl( + builder: SchemaQueryBuilder, + ...args: unknown[] +): any { + const state = getState(builder); + + if (args.length === 1 && typeof args[0] === 'function') { + const fn = args[0] as (t: any) => unknown; + const tree = ObjectSchemaBuilder.getPropertiesFor( + state.localSchema as any + ); + const result = fn(tree); + if ( + result && + typeof result === 'object' && + !(SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR in (result as object)) + ) { + invalidateCache(builder); + assertNotProjection(builder, 'select'); + state.selectionMode = 'projection'; + state.appliedProjection = ''; + + const aliasMap: Record = {}; + state.explicitSelects ??= []; + for (const [alias, descriptor] of Object.entries( + result as Record + )) { + if ( + !descriptor || + typeof descriptor !== 'object' || + !( + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR in + (descriptor as object) + ) + ) { + throw new Error( + `select(selector): value for alias "${alias}" must be a property descriptor (e.g. \`t.someProp\`).` + ); + } + const col = resolveColumn( + builder, + (() => descriptor) as ColumnRef, + `select(selector).${alias}` + ); + aliasMap[alias] = col as string; + state.explicitSelects.push(col as string); + } + state.baseQuery.select(aliasMap); + return builder; + } + } + + invalidateCache(builder); + assertNotProjection(builder, 'select'); + state.selectionMode = 'select'; + const resolved = (args as (ColumnRef | Knex.Raw)[]).map(c => + resolveColumnArg(builder, c) + ); + state.baseQuery.select(...(resolved as string[])); + state.explicitSelects ??= []; + for (const r of resolved) { + if (typeof r === 'string') { + state.explicitSelects.push(r); + } + } + return builder; +} + +export function distinctImpl( + builder: SchemaQueryBuilder, + ...columns: (ColumnRef | Knex.Raw)[] +): any { + invalidateCache(builder); + const resolved = columns.map(c => resolveColumnArg(builder, c)); + getState(builder).baseQuery.distinct(...(resolved as string[])); + return builder; +} + +export function countImpl( + builder: SchemaQueryBuilder, + column?: ColumnRef | Knex.Raw +): any { + const state = getState(builder); + invalidateCache(builder); + assertNotProjection(builder, 'count'); + state.selectionMode = 'aggregate'; + if (column) { + state.baseQuery.count(resolveColumnArg(builder, column) as string); + } else { + state.baseQuery.count(); + } + return builder; +} + +export function countDistinctImpl( + builder: SchemaQueryBuilder, + column?: ColumnRef | Knex.Raw +): any { + const state = getState(builder); + invalidateCache(builder); + assertNotProjection(builder, 'countDistinct'); + state.selectionMode = 'aggregate'; + if (column) { + state.baseQuery.countDistinct( + resolveColumnArg(builder, column) as string + ); + } else { + state.baseQuery.countDistinct(); + } + return builder; +} + +export function minImpl( + builder: SchemaQueryBuilder, + column: ColumnRef | Knex.Raw +): any { + const state = getState(builder); + invalidateCache(builder); + assertNotProjection(builder, 'min'); + state.selectionMode = 'aggregate'; + state.baseQuery.min(resolveColumnArg(builder, column) as string); + return builder; +} + +export function maxImpl( + builder: SchemaQueryBuilder, + column: ColumnRef | Knex.Raw +): any { + const state = getState(builder); + invalidateCache(builder); + assertNotProjection(builder, 'max'); + state.selectionMode = 'aggregate'; + state.baseQuery.max(resolveColumnArg(builder, column) as string); + return builder; +} + +export function sumImpl( + builder: SchemaQueryBuilder, + column: ColumnRef | Knex.Raw +): any { + const state = getState(builder); + invalidateCache(builder); + assertNotProjection(builder, 'sum'); + state.selectionMode = 'aggregate'; + state.baseQuery.sum(resolveColumnArg(builder, column) as string); + return builder; +} + +export function avgImpl( + builder: SchemaQueryBuilder, + column: ColumnRef | Knex.Raw +): any { + const state = getState(builder); + invalidateCache(builder); + assertNotProjection(builder, 'avg'); + state.selectionMode = 'aggregate'; + state.baseQuery.avg(resolveColumnArg(builder, column) as string); + return builder; +} + +export function selectRawImpl( + builder: SchemaQueryBuilder, + sql: string, + bindings?: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + if (bindings) { + state.baseQuery.select(state.knex.raw(sql, bindings)); + } else { + state.baseQuery.select(state.knex.raw(sql)); + } + return builder; +} + +export function projectedImpl( + builder: SchemaQueryBuilder, + name: string +): any { + const state = getState(builder); + assertNotExplicitSelect(builder, 'projected'); + if (state.selectionMode === 'projection') { + throw new Error( + `Cannot call .projected('${name}') — .projected('${ + state.appliedProjection + }') was already applied. Only one projection per query.` + ); + } + const projections = getProjections(state.localSchema as any); + const projection = projections[name]; + if (!projection) { + throw new Error( + `Unknown projection "${name}" on schema for table "${ + state.tableName + }"` + ); + } + const { propToCol } = buildColumnMap(state.localSchema as any); + const sqlCols = projection.keys.map(key => propToCol.get(key) ?? key); + state.baseQuery.select(...sqlCols); + state.explicitSelects ??= []; + for (const col of sqlCols) { + state.explicitSelects.push(col); + } + state.selectionMode = 'projection'; + state.appliedProjection = name; + invalidateCache(builder); + return builder; +} + +export function scopedImpl( + builder: SchemaQueryBuilder, + name: string +): any { + const state = getState(builder); + invalidateCache(builder); + const scopes = (state.localSchema as any).getExtension?.('scopes') as + | Record + | undefined; + const scopeFn = scopes?.[name]; + if (!scopeFn) { + throw new Error( + `Unknown scope "${name}" on schema for table "${state.tableName}"` + ); + } + scopeFn(builder); + return builder; +} + +export function unscopedImpl(builder: SchemaQueryBuilder): any { + const state = getState(builder); + invalidateCache(builder); + state.skipDefaultScope = true; + state.includeDeleted = true; + return builder; +} diff --git a/libs/knex-schema/src/operations/state.ts b/libs/knex-schema/src/operations/state.ts new file mode 100644 index 00000000..c257aa10 --- /dev/null +++ b/libs/knex-schema/src/operations/state.ts @@ -0,0 +1,76 @@ +// @cleverbrush/knex-schema — Shared mutable state store for SchemaQueryBuilder + +import type { ObjectSchemaBuilder } from '@cleverbrush/schema'; +import type { Knex } from 'knex'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { + ResolvedVariantConfig, + ValidatedSpec, + VariantWhereFilter +} from '../types.js'; + +export interface QueryBuilderState { + knex: Knex; + baseQuery: Knex.QueryBuilder; + localSchema: ObjectSchemaBuilder; + specs: ValidatedSpec[]; + tableName: string; + + /** SQL column names explicitly passed to `.select()`. null = SELECT *. */ + explicitSelects: string[] | null; + + /** Column-selection mode: null, 'select', 'aggregate', or 'projection'. */ + selectionMode: 'select' | 'aggregate' | 'projection' | null; + + /** Name of the projection currently applied, for error messages. */ + appliedProjection: string | null; + + /** When true, soft-delete filter is NOT applied. */ + includeDeleted: boolean; + + /** When true, only soft-deleted rows are returned. */ + onlyDeleted: boolean; + + /** When true, default scope is not applied. */ + skipDefaultScope: boolean; + + /** Resolved variant config, lazily populated. undefined = not yet read; null = not polymorphic. */ + variantConfig: ResolvedVariantConfig | null | undefined; + + /** When set, only these discriminator values are returned. null = all variants. */ + enabledVariants: Set | null; + + /** Pending per-variant WHERE filters registered via .whereVariant(). */ + variantWhereFilters: VariantWhereFilter[]; + + /** Variant-relation eager-load requests registered via .includeVariant(). */ + variantRelationIncludes: Array<{ + variantKey: string; + relationName: string; + customize?: (q: SchemaQueryBuilder) => void; + }>; + + /** Memoized result of buildQuery(). null = needs rebuild. */ + cachedBuiltQuery: Knex.QueryBuilder | null; +} + +const STATE = new WeakMap, QueryBuilderState>(); + +export function getState( + builder: SchemaQueryBuilder +): QueryBuilderState { + const s = STATE.get(builder); + if (!s) { + throw new Error( + 'SchemaQueryBuilder state not found — builder was not properly initialized' + ); + } + return s; +} + +export function setState( + builder: SchemaQueryBuilder, + state: QueryBuilderState +): void { + STATE.set(builder, state); +} diff --git a/libs/knex-schema/src/operations/update.ts b/libs/knex-schema/src/operations/update.ts new file mode 100644 index 00000000..ba89037a --- /dev/null +++ b/libs/knex-schema/src/operations/update.ts @@ -0,0 +1,146 @@ +// @cleverbrush/knex-schema — UPDATE / bulk-update operations + +import type { InferType } from '@cleverbrush/schema'; +import type { Knex } from 'knex'; +import { buildColumnMap } from '../columns.js'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import { + getTimestamps, + mapObjectToColumns, + mapRow, + resolvePkColumns +} from './helpers.js'; +import { getState } from './state.js'; + +export async function updateImpl( + builder: SchemaQueryBuilder, + data: Partial> +): Promise { + const state = getState(builder); + + let processedData = { ...(data as Record) }; + + const beforeHooks = + ((state.localSchema as any).getExtension?.('beforeUpdate') as + | Function[] + | undefined) ?? []; + for (const hook of beforeHooks) { + processedData = (await hook(processedData)) ?? processedData; + } + + const mapped = mapObjectToColumns(builder, processedData); + + const timestamps = getTimestamps(builder); + if (timestamps) { + mapped[timestamps.updatedAt] = state.knex.fn.now(); + } + + const rows = await state.baseQuery.update(mapped).returning('*'); + return rows.map((row: any) => mapRow(builder, row)); +} + +export async function bulkUpdateImpl( + builder: SchemaQueryBuilder, + updates: ReadonlyArray<{ + where: Partial>; + set: Partial>; + }> +): Promise { + if (updates.length === 0) return 0; + + const state = getState(builder); + const pk = resolvePkColumns(builder); + const { propToCol } = buildColumnMap(state.localSchema as any); + const beforeHooks = + ((state.localSchema as any).getExtension?.('beforeUpdate') as + | Function[] + | undefined) ?? []; + const timestamps = getTimestamps(builder); + + const processed: Array<{ + pkValues: unknown[]; + set: Record; + }> = []; + for (const entry of updates) { + let setData = { ...(entry.set as Record) }; + for (const hook of beforeHooks) { + setData = (await hook(setData)) ?? setData; + } + const setMapped = mapObjectToColumns(builder, setData); + if (timestamps) { + setMapped[timestamps.updatedAt] = state.knex.fn.now(); + } + + const whereRec = entry.where as Record; + const pkValues: unknown[] = []; + for (const propKey of pk.propertyKeys) { + const value = + propKey in whereRec + ? whereRec[propKey] + : (whereRec[propToCol.get(propKey) ?? propKey] as + | unknown + | undefined); + if (value === undefined) { + throw new Error( + `bulkUpdate: each \`where\` clause must include the entity's primary key (missing "${propKey}").` + ); + } + pkValues.push(value); + } + processed.push({ pkValues, set: setMapped }); + } + + const allSetCols = new Set(); + for (const p of processed) { + for (const k of Object.keys(p.set)) allSetCols.add(k); + } + if (allSetCols.size === 0) return 0; + + const knex = state.knex; + const updateExpr: Record = {}; + for (const col of allSetCols) { + const fragments: string[] = []; + const bindings: unknown[] = []; + for (const p of processed) { + if (!(col in p.set)) continue; + if (pk.columnNames.length === 1) { + fragments.push('WHEN ?? = ? THEN ?'); + bindings.push(pk.columnNames[0], p.pkValues[0], p.set[col]); + } else { + const conditions = pk.columnNames + .map(() => '?? = ?') + .join(' AND '); + fragments.push(`WHEN ${conditions} THEN ?`); + for (let i = 0; i < pk.columnNames.length; i++) { + bindings.push(pk.columnNames[i], p.pkValues[i]); + } + bindings.push(p.set[col]); + } + } + if (fragments.length === 0) continue; + updateExpr[col] = knex.raw(`CASE ${fragments.join(' ')} ELSE ?? END`, [ + ...bindings, + col + ] as any); + } + + let qb: any = knex(state.tableName).update(updateExpr); + if (pk.columnNames.length === 1) { + qb = qb.whereIn( + pk.columnNames[0], + processed.map(p => p.pkValues[0]) + ); + } else { + qb = qb.where(function (this: Knex.QueryBuilder) { + for (const p of processed) { + this.orWhere(function (this: Knex.QueryBuilder) { + for (let i = 0; i < pk.columnNames.length; i++) { + this.andWhere(pk.columnNames[i], p.pkValues[i] as any); + } + }); + } + }); + } + + return await qb; +} diff --git a/libs/knex-schema/src/operations/where.ts b/libs/knex-schema/src/operations/where.ts new file mode 100644 index 00000000..51c77f74 --- /dev/null +++ b/libs/knex-schema/src/operations/where.ts @@ -0,0 +1,425 @@ +// @cleverbrush/knex-schema — WHERE / ORDER BY / GROUP BY / HAVING operations + +import type { Knex } from 'knex'; +import type { SchemaQueryBuilder } from '../SchemaQueryBuilder.js'; +import type { ColumnRef } from '../types.js'; +import { + invalidateCache, + isColumnAccessor, + mapRecordToColumns, + resolveColumn, + resolveColumnArg +} from './helpers.js'; +import { getState } from './state.js'; + +export function whereImpl( + builder: SchemaQueryBuilder, + columnOrRaw: any, + ...args: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + if ( + typeof columnOrRaw === 'function' && + !isColumnAccessor(builder, columnOrRaw) + ) { + (state.baseQuery.where as any)(columnOrRaw, ...args); + } else if ( + typeof columnOrRaw === 'object' && + columnOrRaw !== null && + !('toSQL' in columnOrRaw) + ) { + const mapped = mapRecordToColumns( + builder, + columnOrRaw as Record + ); + (state.baseQuery.where as any)(mapped, ...args); + } else { + const col = resolveColumnArg(builder, columnOrRaw); + (state.baseQuery.where as any)(col, ...args); + } + return builder; +} + +export function andWhereImpl( + builder: SchemaQueryBuilder, + columnOrRaw: any, + ...args: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + if ( + typeof columnOrRaw === 'function' && + !isColumnAccessor(builder, columnOrRaw) + ) { + (state.baseQuery.andWhere as any)(columnOrRaw, ...args); + } else if ( + typeof columnOrRaw === 'object' && + columnOrRaw !== null && + !('toSQL' in columnOrRaw) + ) { + const mapped = mapRecordToColumns( + builder, + columnOrRaw as Record + ); + (state.baseQuery.andWhere as any)(mapped, ...args); + } else { + const col = resolveColumnArg(builder, columnOrRaw); + (state.baseQuery.andWhere as any)(col, ...args); + } + return builder; +} + +export function orWhereImpl( + builder: SchemaQueryBuilder, + columnOrRaw: any, + ...args: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + if ( + typeof columnOrRaw === 'function' && + !isColumnAccessor(builder, columnOrRaw) + ) { + (state.baseQuery.orWhere as any)(columnOrRaw, ...args); + } else if ( + typeof columnOrRaw === 'object' && + columnOrRaw !== null && + !('toSQL' in columnOrRaw) + ) { + const mapped = mapRecordToColumns( + builder, + columnOrRaw as Record + ); + (state.baseQuery.orWhere as any)(mapped, ...args); + } else { + const col = resolveColumnArg(builder, columnOrRaw); + (state.baseQuery.orWhere as any)(col, ...args); + } + return builder; +} + +export function whereNotImpl( + builder: SchemaQueryBuilder, + columnOrRaw: any, + ...args: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + if ( + typeof columnOrRaw === 'function' && + !isColumnAccessor(builder, columnOrRaw) + ) { + (state.baseQuery.whereNot as any)(columnOrRaw, ...args); + } else if ( + typeof columnOrRaw === 'object' && + columnOrRaw !== null && + !('toSQL' in columnOrRaw) + ) { + const mapped = mapRecordToColumns( + builder, + columnOrRaw as Record + ); + (state.baseQuery.whereNot as any)(mapped, ...args); + } else { + const col = resolveColumnArg(builder, columnOrRaw); + (state.baseQuery.whereNot as any)(col, ...args); + } + return builder; +} + +export function whereInImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + values: readonly any[] | Knex.QueryBuilder +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereIn( + resolveColumn(builder, column, 'whereIn') as any, + values as any + ); + return builder; +} + +export function whereNotInImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + values: readonly any[] | Knex.QueryBuilder +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereNotIn( + resolveColumn(builder, column, 'whereNotIn') as any, + values as any + ); + return builder; +} + +export function orWhereInImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + values: readonly any[] | Knex.QueryBuilder +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).orWhereIn( + resolveColumn(builder, column, 'orWhereIn'), + values as any + ); + return builder; +} + +export function orWhereNotInImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + values: readonly any[] | Knex.QueryBuilder +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).orWhereNotIn( + resolveColumn(builder, column, 'orWhereNotIn'), + values as any + ); + return builder; +} + +export function whereNullImpl( + builder: SchemaQueryBuilder, + column: ColumnRef +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereNull( + resolveColumn(builder, column, 'whereNull') as any + ); + return builder; +} + +export function whereNotNullImpl( + builder: SchemaQueryBuilder, + column: ColumnRef +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereNotNull( + resolveColumn(builder, column, 'whereNotNull') as any + ); + return builder; +} + +export function orWhereNullImpl( + builder: SchemaQueryBuilder, + column: ColumnRef +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).orWhereNull( + resolveColumn(builder, column, 'orWhereNull') + ); + return builder; +} + +export function orWhereNotNullImpl( + builder: SchemaQueryBuilder, + column: ColumnRef +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).orWhereNotNull( + resolveColumn(builder, column, 'orWhereNotNull') + ); + return builder; +} + +export function whereBetweenImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + range: readonly [any, any] +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereBetween( + resolveColumn(builder, column, 'whereBetween') as any, + range as [any, any] + ); + return builder; +} + +export function whereNotBetweenImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + range: readonly [any, any] +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereNotBetween( + resolveColumn(builder, column, 'whereNotBetween') as any, + range as [any, any] + ); + return builder; +} + +export function whereLikeImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + value: string +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).whereLike( + resolveColumn(builder, column, 'whereLike'), + value + ); + return builder; +} + +export function whereILikeImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + value: string +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).whereILike( + resolveColumn(builder, column, 'whereILike'), + value + ); + return builder; +} + +export function whereRawImpl( + builder: SchemaQueryBuilder, + sql: string, + ...bindings: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereRaw(sql, ...bindings); + return builder; +} + +export function whereExistsImpl( + builder: SchemaQueryBuilder, + callback: Knex.QueryCallback | Knex.QueryBuilder +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.whereExists(callback as any); + return builder; +} + +export function whereNotExistsImpl( + builder: SchemaQueryBuilder, + callback: Knex.QueryCallback | Knex.QueryBuilder +): any { + const state = getState(builder); + invalidateCache(builder); + (state.baseQuery as any).whereNotExists(callback as any); + return builder; +} + +export function whereJsonPathImpl( + builder: SchemaQueryBuilder, + column: ColumnRef, + path: string, + operator?: string, + value?: any +): any { + const state = getState(builder); + invalidateCache(builder); + const client = (state.knex as any).client?.config?.client as + | string + | undefined; + if (client !== 'pg' && client !== 'postgresql' && client !== 'postgres') { + throw new Error( + `whereJsonPath() is only supported on PostgreSQL (got client: "${client ?? 'unknown'}")` + ); + } + + const col = resolveColumn(builder, column, 'whereJsonPath'); + const op = operator ?? '='; + + if (op === '@?' || op === '@@') { + const escapedOp = op === '@?' ? '@\\?' : '@@'; + state.baseQuery.whereRaw(`?? ${escapedOp} ?`, [col, path]); + } else { + const jsonPath = path.startsWith('$') + ? path + : `$.${path.replace(/\./g, '.')}`; + state.baseQuery.whereRaw( + `jsonb_path_query_first(??, ?) ${op} ?::jsonb`, + [col, jsonPath, JSON.stringify(value)] + ); + } + return builder; +} + +export function orderByImpl( + builder: SchemaQueryBuilder, + column: ColumnRef | Knex.Raw, + direction?: 'asc' | 'desc' +): any { + const state = getState(builder); + invalidateCache(builder); + const col = resolveColumnArg(builder, column); + state.baseQuery.orderBy(col as string, direction); + return builder; +} + +export function orderByRawImpl( + builder: SchemaQueryBuilder, + sql: string, + ...bindings: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.orderByRaw(sql, ...bindings); + return builder; +} + +export function groupByImpl( + builder: SchemaQueryBuilder, + ...columns: (ColumnRef | Knex.Raw)[] +): any { + const state = getState(builder); + invalidateCache(builder); + const resolved = columns.map(c => resolveColumnArg(builder, c)); + state.baseQuery.groupBy(...(resolved as string[])); + return builder; +} + +export function groupByRawImpl( + builder: SchemaQueryBuilder, + sql: string, + ...bindings: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.groupByRaw(sql, ...bindings); + return builder; +} + +export function havingImpl( + builder: SchemaQueryBuilder, + column: ColumnRef | Knex.Raw, + operator: string, + value: any +): any { + const state = getState(builder); + invalidateCache(builder); + const col = resolveColumnArg(builder, column); + state.baseQuery.having(col as string, operator, value); + return builder; +} + +export function havingRawImpl( + builder: SchemaQueryBuilder, + sql: string, + ...bindings: any[] +): any { + const state = getState(builder); + invalidateCache(builder); + state.baseQuery.havingRaw(sql, ...bindings); + return builder; +} diff --git a/libs/schema-json/README.md b/libs/schema-json/README.md index 9c5e5888..6e81b819 100644 --- a/libs/schema-json/README.md +++ b/libs/schema-json/README.md @@ -116,7 +116,7 @@ const schema = fromJsonSchema(S); // ObjectSchemaBuilder<{ x: NumberSchemaBuilde | `enum` | `union(…)` of const builders | | `anyOf` | `union(…)` of sub-builders | | `anyOf` + `discriminator` | auto-emitted for discriminated `union()` branches (see below) | -| `allOf` | not supported — falls back to `any()` | +| `allOf` | `intersection(...)` — chains sub-schemas via the `intersection()` builder | | `minLength` / `maxLength` | `.minLength()` / `.maxLength()` | | `pattern` | `.matches(regex)` (invalid patterns silently ignored) | | `minimum` / `maximum` | `.min()` / `.max()` | @@ -288,7 +288,6 @@ type B = JsonSchemaNodeToBuilder; | `$ref` / `$defs` | Not supported in `fromJsonSchema` | | `if` / `then` / `else` | Not supported | | `not` | Not supported | -| `allOf` in `fromJsonSchema` | Falls back to `SchemaBuilder` (no deep merge) | | Dual IP format (`ip()` with both v4 + v6) | `format` is omitted in `toJsonSchema` output (no standard keyword covers both) | | JSDoc comments on properties | Not preserved in `toJsonSchema` output | | `nameResolver` + `$ref` / `$defs` round-trip | `nameResolver` emits `$ref` pointers based on external registry; `fromJsonSchema` does not resolve `$ref` references — they fall back to `any()` | diff --git a/libs/schema-json/src/fromJsonSchema.test.ts b/libs/schema-json/src/fromJsonSchema.test.ts index f51f671f..5c67a764 100644 --- a/libs/schema-json/src/fromJsonSchema.test.ts +++ b/libs/schema-json/src/fromJsonSchema.test.ts @@ -1,4 +1,4 @@ -import type { InferType } from '@cleverbrush/schema'; +import type { InferType, IntersectionSchemaBuilder } from '@cleverbrush/schema'; import { expect, expectTypeOf, test } from 'vitest'; import { fromJsonSchema } from './fromJsonSchema.js'; @@ -319,15 +319,98 @@ test('fromJsonSchema - 28: anyOf accepts either type', () => { expect(valid(schema, true)).toBe(false); }); -test('fromJsonSchema - 28b: allOf falls back to any() (not supported)', () => { +test('fromJsonSchema - 28b: allOf maps to intersection', () => { const schema = fromJsonSchema({ - allOf: [{ type: 'string' }, { minLength: 1 }] + allOf: [ + { type: 'string', minLength: 1 }, + { type: 'string', maxLength: 10 } + ] } as const); - // allOf is not supported; falls back to any() which accepts anything - expectTypeOf>().toMatchTypeOf(); + + // Type-level: InferType should be string (intersection of string & string) + expectTypeOf>().toMatchTypeOf(); + + // Type-level: should NOT be unknown + expectTypeOf>().not.toMatchTypeOf(); + + // Type-level: schema should be an IntersectionSchemaBuilder + expectTypeOf().toMatchTypeOf< + IntersectionSchemaBuilder + >(); + expect(valid(schema, 'hello')).toBe(true); - expect(valid(schema, 42)).toBe(true); - expect(valid(schema, null)).toBe(false); + expect(valid(schema, '')).toBe(false); + expect(valid(schema, 'a'.repeat(11))).toBe(false); +}); + +test('fromJsonSchema - 28c: allOf type is not unknown', () => { + const schema = fromJsonSchema({ + allOf: [ + { + type: 'object', + properties: { name: { type: 'string' } }, + required: ['name'] + }, + { + type: 'object', + properties: { age: { type: 'number' } }, + required: ['age'] + } + ] + } as const); + + // InferType should be { name: string } & { age: number }, not unknown + expectTypeOf>().toMatchTypeOf<{ + name: string; + age: number; + }>(); + expectTypeOf>().not.toMatchTypeOf(); +}); + +test('fromJsonSchema - 28d: allOf three elements produces intersection type', () => { + const schema = fromJsonSchema({ + allOf: [ + { + type: 'object', + properties: { name: { type: 'string' } }, + required: ['name'] + }, + { + type: 'object', + properties: { age: { type: 'number' } }, + required: ['age'] + }, + { + type: 'object', + properties: { email: { type: 'string' } }, + required: ['email'] + } + ] + } as const); + + expectTypeOf>().toMatchTypeOf<{ + name: string; + age: number; + email: string; + }>(); + expectTypeOf>().not.toMatchTypeOf(); +}); + +test('fromJsonSchema - 28e: allOf single element returns direct builder type', () => { + const schema = fromJsonSchema({ + allOf: [{ type: 'string', minLength: 1 }] + } as const); + + expectTypeOf>().toMatchTypeOf(); + expectTypeOf>().not.toMatchTypeOf(); +}); + +test('fromJsonSchema - 28f: allOf empty returns unknown builder', () => { + const schema = fromJsonSchema({ + allOf: [] + } as const); + + expectTypeOf>().toMatchTypeOf(); }); // --------------------------------------------------------------------------- diff --git a/libs/schema-json/src/fromJsonSchema.ts b/libs/schema-json/src/fromJsonSchema.ts index b67836ae..4ccbc3e8 100644 --- a/libs/schema-json/src/fromJsonSchema.ts +++ b/libs/schema-json/src/fromJsonSchema.ts @@ -3,6 +3,7 @@ import { any, array, boolean, + intersection, nul, number, object, @@ -35,8 +36,8 @@ function buildNode(s: unknown): SchemaBuilder { else if ('const' in node) b = buildConst(node['const']); else if ('anyOf' in node && Array.isArray(node['anyOf'])) b = buildAnyOf(node['anyOf']); - // allOf is not supported (no intersection builder); fall back to any() - else if ('allOf' in node && Array.isArray(node['allOf'])) b = any(); + else if ('allOf' in node && Array.isArray(node['allOf'])) + b = buildAllOf(node['allOf']); else if (!('type' in node)) b = any(); else { switch (node['type']) { @@ -210,6 +211,15 @@ function buildAnyOf(options: unknown[]): SchemaBuilder { return b; } +function buildAllOf(options: unknown[]): SchemaBuilder { + if (options.length === 0) return any(); + let b: any = buildNode(options[0]); + for (let i = 1; i < options.length; i++) { + b = intersection(b, buildNode(options[i])); + } + return b; +} + /** * Converts a JSON Schema object into a `@cleverbrush/schema` builder. * @@ -248,9 +258,8 @@ function buildAnyOf(options: unknown[]): SchemaBuilder { * | `format: 'ipv6'` | `.ip({ version: 'v6' })` | * | `format: 'date-time'` | `.matches(iso8601 regex)` | * - * Keywords **not** supported: `allOf` (falls back to `any()`), `$ref`, - * `$defs`, `if/then/else`, `not`, `contains`, `unevaluatedProperties`, - * `contentEncoding`. + * Keywords **not** supported: `$ref`, `$defs`, `if/then/else`, `not`, + * `contains`, `unevaluatedProperties`, `contentEncoding`. * * @param schema - A JSON Schema literal. Pass with `as const` for precise * TypeScript type inference on the returned builder. diff --git a/libs/schema-json/src/standardJsonSchema.test.ts b/libs/schema-json/src/standardJsonSchema.test.ts index fc19a526..7c9f13f3 100644 --- a/libs/schema-json/src/standardJsonSchema.test.ts +++ b/libs/schema-json/src/standardJsonSchema.test.ts @@ -113,12 +113,12 @@ describe('withStandardJsonSchema', () => { // Preserves original schema behaviour // ----------------------------------------------------------------------- - test('wrapped schema still validates via ~standard.validate', () => { + test('wrapped schema still validates via ~standard.validate', async () => { const wrapped = withStandardJsonSchema(string()); - const pass = wrapped['~standard'].validate('hello'); + const pass = await wrapped['~standard'].validate('hello'); expect(pass).toEqual({ value: 'hello' }); - const fail = wrapped['~standard'].validate(123); + const fail = await wrapped['~standard'].validate(123); expect(fail).toHaveProperty('issues'); }); diff --git a/libs/schema-json/src/toJsonSchema.test.ts b/libs/schema-json/src/toJsonSchema.test.ts index 8632a496..e0414d8e 100644 --- a/libs/schema-json/src/toJsonSchema.test.ts +++ b/libs/schema-json/src/toJsonSchema.test.ts @@ -3,6 +3,7 @@ import { array, boolean, date, + intersection, lazy, nul, number, @@ -306,6 +307,60 @@ test('toJsonSchema - 28d: discriminated union with nameResolver → mapping', () expect(anyOf[1]).toEqual({ $ref: '#/components/schemas/Dog' }); }); +test('toJsonSchema - intersection of objects → allOf', () => { + const result = toJsonSchema( + intersection(object({ name: string() }), object({ age: number() })), + { $schema: false } + ); + expect(result).toEqual({ + allOf: [ + { + type: 'object', + additionalProperties: false, + properties: { name: { type: 'string' } }, + required: ['name'] + }, + { + type: 'object', + additionalProperties: false, + properties: { age: { type: 'integer' } }, + required: ['age'] + } + ] + }); +}); + +test('toJsonSchema - intersection nullable → oneOf with null', () => { + const result = toJsonSchema( + intersection( + object({ name: string() }), + object({ age: number() }) + ).nullable(), + { $schema: false } + ); + expect(result).toEqual({ + oneOf: [ + { + allOf: [ + { + type: 'object', + additionalProperties: false, + properties: { name: { type: 'string' } }, + required: ['name'] + }, + { + type: 'object', + additionalProperties: false, + properties: { age: { type: 'integer' } }, + required: ['age'] + } + ] + }, + { type: 'null' } + ] + }); +}); + // --------------------------------------------------------------------------- // any // --------------------------------------------------------------------------- diff --git a/libs/schema-json/src/toJsonSchema.ts b/libs/schema-json/src/toJsonSchema.ts index 606ff87d..a322e59b 100644 --- a/libs/schema-json/src/toJsonSchema.ts +++ b/libs/schema-json/src/toJsonSchema.ts @@ -211,6 +211,18 @@ function convertNodeInner( return out; } + case 'intersection': { + const left = info.left as SchemaBuilder; + const right = info.right as SchemaBuilder; + return { + ...readOnly, + allOf: [ + convertNode(left, resolver), + convertNode(right, resolver) + ] + }; + } + case 'lazy': { // Resolve the lazy schema once and delegate conversion. // If the resolved schema has a name registered in the nameResolver, @@ -264,6 +276,10 @@ function convertNode( const anyOf = out['anyOf'] as Out[]; const hasNull = anyOf.some(o => o['type'] === 'null'); if (!hasNull) anyOf.push({ type: 'null' }); + } else if (out['allOf'] !== undefined && out['type'] === undefined) { + // Intersection type without a top-level type — wrap in oneOf with null + out['oneOf'] = [{ allOf: out['allOf'] as Out[] }, { type: 'null' }]; + delete out['allOf']; } else if (out['enum'] !== undefined) { // Enum — add null to enum values if not already present const enumValues = out['enum'] as unknown[]; diff --git a/libs/schema-json/src/types.ts b/libs/schema-json/src/types.ts index ad79f78e..089b9748 100644 --- a/libs/schema-json/src/types.ts +++ b/libs/schema-json/src/types.ts @@ -8,6 +8,7 @@ import type { ExtendedArray, ExtendedNumber, ExtendedString, + IntersectionSchemaBuilder, ObjectSchemaBuilder, SchemaBuilder, UnionSchemaBuilder @@ -120,9 +121,18 @@ export type InferFromJsonSchema = S extends { readonly const: infer V } : S extends { readonly anyOf: readonly (infer U)[] } ? InferFromJsonSchema : S extends { - readonly allOf: readonly JsonSchemaNode[]; + readonly allOf: readonly [ + infer First extends JsonSchemaNode, + ...infer Rest extends + readonly JsonSchemaNode[] + ]; } - ? unknown + ? Rest extends readonly [] + ? InferFromJsonSchema + : InferFromJsonSchema & + InferFromJsonSchema<{ + readonly allOf: Rest; + }> : unknown; /** Options accepted by {@link toJsonSchema}. */ @@ -274,6 +284,53 @@ type ObjectPropertiesToBuilders< >; }; +/** + * When folding an allOf tuple, some accumulator elements may already be + * resolved builders from previous fold steps. This helper returns the + * input unchanged when it is already a schema builder, and otherwise + * maps it through {@link JsonSchemaNodeToBuilder}. + * @internal + */ +type AsBuilderIfNeeded = + X extends SchemaBuilder + ? X + : JsonSchemaNodeToBuilder; + +/** + * Recursively folds an `allOf` tuple left-to-right into a nested + * {@link IntersectionSchemaBuilder} chain, matching the runtime behavior + * of {@link fromJsonSchema}. + * + * @internal + */ +type AllOfNodesToBuilder< + Acc extends readonly unknown[], + TRequired extends boolean = true +> = Acc extends readonly [ + infer First, + infer Second, + ...infer Rest extends readonly unknown[] +] + ? Rest extends readonly [] + ? IntersectionSchemaBuilder< + AsBuilderIfNeeded, + AsBuilderIfNeeded, + TRequired + > + : AllOfNodesToBuilder< + [ + IntersectionSchemaBuilder< + AsBuilderIfNeeded, + AsBuilderIfNeeded + >, + ...Rest + ], + TRequired + > + : Acc extends readonly [infer Only] + ? AsBuilderIfNeeded + : SchemaBuilder; + /** * Recursively maps a statically-known JSON Schema node (passed with * `as const`) to the exact `@cleverbrush/schema` builder type, including: @@ -328,11 +385,11 @@ export type JsonSchemaNodeToBuilder = readonly anyOf: infer Opts extends readonly unknown[]; } ? UnionSchemaBuilder, TRequired> - : // allOf (not supported; falls back to any() at runtime) + : // allOf — left-fold into IntersectionSchemaBuilder chain S extends { - readonly allOf: infer _Opts extends readonly unknown[]; + readonly allOf: infer Opts extends readonly unknown[]; } - ? SchemaBuilder + ? AllOfNodesToBuilder : // string S extends { readonly type: 'string' } ? ExtendedStringBuilder diff --git a/libs/schema/README.md b/libs/schema/README.md index c66af55f..8de68f59 100644 --- a/libs/schema/README.md +++ b/libs/schema/README.md @@ -137,6 +137,7 @@ The following builder functions are available: | `tuple([...schemas])` | Fixed-length array with per-position types. Each index validated against its own schema — mirrors TypeScript tuple types. | `.rest(schema)`, `.optional()`, `.nullable()`, `.notNullable()`, `.default(value)` | | `record(keySchema, valSchema)` | Object with dynamic string keys. Every key must satisfy `keySchema` (a string schema) and every value must satisfy `valSchema` — mirrors TypeScript's `Record`. | `.optional()`, `.nullable()`, `.notNullable()`, `.default(value)`, `.addValidator(fn)` | | `union(schema)` | Union of schemas — e.g. `string \| number`. | `.or(schema)`, `.validate(data)`, `.optional()`, `.nullable()`, `.notNullable()`, `.default(value)` | +| `intersection(left, right)` | Intersection of two schemas — both must pass. Merges validated outputs. Use `.acceptUnknownProps()` on object schemas. | `.optional()`, `.nullable()`, `.notNullable()`, `.default(value)`, `.addValidator(fn)` | | `enumOf(...values)` | String enum — sugar for `string().oneOf(...)`. | `.optional()`, `.nullable()`, `.notNullable()`, `.default(value)` | | `lazy(getter)` | Recursive/self-referential schema. The getter is called once and its result is cached. Enables tree structures, linked lists, and other recursive types. | `.resolve()`, `.optional()`, `.addValidator(fn)`, `.default(value)` | | `generic(fn)` | Parameterized schema template. Call `.apply(...schemas)` with concrete schemas to obtain a fully typed concrete schema builder. TypeScript infers the result type from the template function's own generic signature. Optionally pass a `defaults` array as the first argument to enable direct validation without calling `.apply()`. | `.apply(...schemas)`, `.optional()`, `.nullable()`, `.default(value)` | @@ -199,6 +200,16 @@ const TeamSchema = object({ const IdOrEmail = union(string().minLength(1)).or( string().matches(/^[^@]+@[^@]+$/) ); + +// Intersection types — combine two schemas into one (both must pass) +import { intersection } from '@cleverbrush/schema'; + +const NameAndAge = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() +); +const person = NameAndAge.parse({ name: 'Alice', age: 30 }); +// typeof person === { name: string } & { age: number } ``` ## Generic Schemas @@ -1766,6 +1777,16 @@ const UserSchema = object({ const standardSchema = UserSchema['~standard']; ``` +The `['~standard'].validate()` method returns a `Promise`, supporting both sync +and async preprocessors, validators, and error message providers: + +```ts +const result = await UserSchema['~standard'].validate(input); +if ('issues' in result) { + console.error(result.issues[0].message); +} +``` + Confirmed integrations: **tRPC**, **TanStack Form**, **React Hook Form**, **T3 Env**, **Hono**, **Elysia**, **next-safe-action**, and 50+ others listed on [standardschema.dev](https://standardschema.dev/). ## Code Quality diff --git a/libs/schema/src/builders/ExternSchemaBuilder.test.ts b/libs/schema/src/builders/ExternSchemaBuilder.test.ts index 59350f54..83f6cfd3 100644 --- a/libs/schema/src/builders/ExternSchemaBuilder.test.ts +++ b/libs/schema/src/builders/ExternSchemaBuilder.test.ts @@ -469,15 +469,15 @@ test('safeParse() works like validate()', () => { // ~standard on extern builder itself // --------------------------------------------------------------------------- -test('extern builder ~standard.validate works', () => { +test('extern builder ~standard.validate works', async () => { const schema = extern(mockStringSchema); - const result = schema['~standard'].validate('hello'); + const result = await schema['~standard'].validate('hello'); expect(result).toEqual({ value: 'hello' }); }); -test('extern builder ~standard.validate returns issues on failure', () => { +test('extern builder ~standard.validate returns issues on failure', async () => { const schema = extern(mockStringSchema); - const result = schema['~standard'].validate(123); + const result = await schema['~standard'].validate(123); expect(result).toHaveProperty('issues'); const issues = (result as StandardSchemaV1.FailureResult).issues; expect(issues.length).toBeGreaterThan(0); diff --git a/libs/schema/src/builders/IntersectionSchemaBuilder.test.ts b/libs/schema/src/builders/IntersectionSchemaBuilder.test.ts new file mode 100644 index 00000000..a256d103 --- /dev/null +++ b/libs/schema/src/builders/IntersectionSchemaBuilder.test.ts @@ -0,0 +1,497 @@ +import { expect, expectTypeOf, test } from 'vitest'; +import { intersection } from './IntersectionSchemaBuilder.js'; +import { number } from './NumberSchemaBuilder.js'; +import { object } from './ObjectSchemaBuilder.js'; +import type { BRAND, InferType } from './SchemaBuilder.js'; +import { string } from './StringSchemaBuilder.js'; + +test('Intersection of two objects', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + type T = InferType; + expectTypeOf().toEqualTypeOf<{ name: string } & { age: number }>(); + + const { valid, object: result } = await schema.validate({ + name: 'Alice', + age: 30 + }); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Alice', age: 30 }); +}); + +test('Intersection fails when left schema fails', async () => { + const schema = intersection( + object({ name: string().minLength(1) }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + const { valid, errors } = await schema.validate({ + name: '', + age: 30 + } as any); + expect(valid).toEqual(false); + expect(errors?.length).toBeGreaterThan(0); +}); + +test('Intersection fails when right schema fails', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number().min(0) }).acceptUnknownProps() + ); + + const { valid, errors } = await schema.validate({ + name: 'Alice', + age: -1 + } as any); + expect(valid).toEqual(false); + expect(errors?.length).toBeGreaterThan(0); +}); + +test('Intersection of primitives', async () => { + const schema = intersection(string().minLength(3), string().maxLength(10)); + + type T = InferType; + expectTypeOf().toEqualTypeOf(); + + { + const { valid, object: result } = await schema.validate('hello'); + expect(valid).toEqual(true); + expect(result).toEqual('hello'); + } + + { + const { valid } = await schema.validate('ab'); + expect(valid).toEqual(false); + } + + { + const { valid } = await schema.validate('a'.repeat(11)); + expect(valid).toEqual(false); + } +}); + +test('Intersection with optional', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number().optional() }).acceptUnknownProps() + ).optional(); + + type T = InferType; + expectTypeOf().toEqualTypeOf< + ({ name: string } & { age?: number }) | undefined + >(); + + { + const { valid, object: result } = schema.validate(undefined as any); + expect(valid).toEqual(true); + expect(result).toBeUndefined(); + } + + { + const { valid, object: result } = await schema.validate({ + name: 'Alice' + }); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Alice' }); + } +}); + +test('Intersection with nullable', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ).nullable(); + + type T = InferType; + expectTypeOf().toEqualTypeOf< + ({ name: string } & { age: number }) | null + >(); + + { + const { valid, object: result } = schema.validate(null as any); + expect(valid).toEqual(true); + expect(result).toBeNull(); + } +}); + +test('Intersection with default value', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ).default({ name: 'Default', age: 0 } as any); + + { + const { valid, object: result } = schema.validate(undefined as any); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Default', age: 0 }); + } +}); + +test('Intersection returns merged object', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + const { valid, object: result } = await schema.validate({ + name: 'Bob', + age: 25 + }); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Bob', age: 25 }); +}); + +test('Intersection with nullable', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).nullable(); + + type T = InferType; + expectTypeOf().toEqualTypeOf< + ({ name: string } & { age: number }) | null + >(); + + { + const { valid, object: result } = schema.validate(null as any); + expect(valid).toEqual(true); + expect(result).toBeNull(); + } +}); + +test('Intersection with default value', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ).default({ name: 'Default', age: 0 } as any); + + { + const { valid, object: result } = schema.validate(undefined as any); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Default', age: 0 }); + } +}); + +test('Intersection with catch value', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).catch({ name: 'Fallback', age: 99 } as any); + + { + const { valid, object: result } = await schema.validate(null as any); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Fallback', age: 99 }); + } +}); + +test('Intersection returns merged object', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + const { valid, object: result } = await schema.validate({ + name: 'Bob', + age: 25 + }); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Bob', age: 25 }); +}); + +test('Intersection handles overlapping properties', async () => { + const schema = intersection( + object({ value: string().minLength(1) }), + object({ value: string().maxLength(10) }) + ); + + const { valid, object: result } = await schema.validate({ + value: 'test' + }); + expect(valid).toEqual(true); + expect(result).toEqual({ value: 'test' }); +}); + +test('Intersection with readonly', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).readonly(); + + type T = InferType; + expectTypeOf().toEqualTypeOf< + Readonly<{ name: string } & { age: number }> + >(); +}); + +test('Intersection with brand', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).brand<'Person'>(); + + type T = InferType; + expectTypeOf().toExtend<{ name: string } & { age: number }>(); + expectTypeOf().toExtend<{ readonly [K in BRAND]: 'Person' }>(); +}); + +test('Intersection with describe', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).describe('A person'); + + expect(schema.introspect().description).toEqual('A person'); +}); + +test('Intersection with schemaName', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).schemaName('Person'); + + expect(schema.introspect().schemaName).toEqual('Person'); +}); + +test('Intersection with example', async () => { + const example = { name: 'Alice', age: 30 }; + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ).example(example); + + expect(schema.introspect().example).toEqual(example); +}); + +test('Intersection with addPreprocessor', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ).addPreprocessor((obj: any) => ({ + ...obj, + name: (obj.name || '').toString().trim() + })); + + const { valid, object: result } = await schema.validate({ + name: ' Alice ', + age: 30 + } as any); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Alice', age: 30 }); +}); + +test('Intersection with addValidator', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number().min(0) }).acceptUnknownProps() + ).addValidator((obj: any) => { + if (obj.name === 'Admin' && obj.age < 18) { + return { + valid: false, + errors: [{ message: 'Admin must be at least 18' }] + }; + } + return { valid: true, errors: [] }; + }); + + { + const { valid } = await schema.validate({ + name: 'Admin', + age: 15 + } as any); + expect(valid).toEqual(false); + } + + { + const { valid } = await schema.validate({ + name: 'Admin', + age: 25 + } as any); + expect(valid).toEqual(true); + } +}); + +test('Intersection is immutable', () => { + const schema1 = intersection( + object({ name: string() }), + object({ age: number() }) + ); + const schema2 = intersection( + object({ name: string() }), + object({ age: number() }) + ).optional(); + + expect(schema1).not.toBe(schema2); + expect((schema1.introspect() as any).isRequired).toEqual(true); + expect((schema2.introspect() as any).isRequired).toEqual(false); +}); + +test('Intersection introspection', () => { + const leftSchema = object({ name: string() }); + const rightSchema = object({ age: number() }); + const schema = intersection(leftSchema, rightSchema); + + const info = schema.introspect(); + expect(info.type).toEqual('intersection'); + expect(info.left).toBe(leftSchema); + expect(info.right).toBe(rightSchema); +}); + +test('Intersection with hasType', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ).hasType<{ name: string; age: number }>(); + + type T = InferType; + expectTypeOf().toEqualTypeOf<{ name: string; age: number }>(); + + const { valid, object: result } = await schema.validate({ + name: 'Alice', + age: 30 + }); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Alice', age: 30 }); +}); + +test('Intersection validateAsync', async () => { + const schema = intersection( + object({ name: string().minLength(1) }).acceptUnknownProps(), + object({ age: number().min(0) }).acceptUnknownProps() + ); + + { + const { valid, object: result } = await schema.validateAsync({ + name: 'Alice', + age: 30 + }); + expect(valid).toEqual(true); + expect(result).toEqual({ name: 'Alice', age: 30 }); + } + + { + const { valid } = await schema.validateAsync({ + name: '', + age: 30 + } as any); + expect(valid).toEqual(false); + } + + { + const { valid } = await schema.validateAsync({ + name: 'Alice', + age: -1 + } as any); + expect(valid).toEqual(false); + } +}); + +test('Intersection parse throws on invalid', () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + expect(() => + schema.parse({ name: 'Alice', age: 'not-a-number' } as any) + ).toThrow(); +}); + +test('Intersection parse returns value on valid', () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + const result = schema.parse({ name: 'Alice', age: 30 }); + expect(result).toEqual({ name: 'Alice', age: 30 }); +}); + +test('Intersection safeParse', () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + { + const result = schema.safeParse({ name: 'Alice', age: 30 }); + expect(result.valid).toEqual(true); + expect(result.object).toEqual({ name: 'Alice', age: 30 }); + } + + { + const result = schema.safeParse({ name: 'Alice' } as any); + expect(result.valid).toEqual(false); + } +}); + +test('Intersection safeParseAsync', async () => { + const schema = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + + const result = await schema.safeParseAsync({ name: 'Alice', age: 30 }); + expect(result.valid).toEqual(true); + expect(result.object).toEqual({ name: 'Alice', age: 30 }); +}); + +test('Intersection of intersection', async () => { + const base = intersection( + object({ name: string() }).acceptUnknownProps(), + object({ age: number() }).acceptUnknownProps() + ); + const extended = intersection( + base, + object({ email: string() }).acceptUnknownProps() + ); + + type T = InferType; + expectTypeOf().toEqualTypeOf< + ({ name: string } & { age: number }) & { email: string } + >(); + + const { valid, object: result } = await extended.validate({ + name: 'Alice', + age: 30, + email: 'alice@example.com' + }); + expect(valid).toEqual(true); + expect(result).toEqual({ + name: 'Alice', + age: 30, + email: 'alice@example.com' + }); +}); + +test('Intersection required method', async () => { + const schema = intersection( + object({ name: string() }), + object({ age: number() }) + ) + .optional() + .required(); + + type T = InferType; + expectTypeOf().toEqualTypeOf<{ name: string } & { age: number }>(); + + { + const { valid } = await schema.validate(undefined as any); + expect(valid).toEqual(false); + } +}); + +test('Intersection clearDefault', async () => { + const withDefault = intersection( + object({ name: string() }), + object({ age: number() }) + ).default({ name: 'X', age: 0 } as any); + + expect((withDefault.introspect() as any).defaultValue).toBeDefined(); + + const withoutDefault = withDefault.clearDefault(); + expect((withoutDefault.introspect() as any).defaultValue).toBeUndefined(); +}); diff --git a/libs/schema/src/builders/IntersectionSchemaBuilder.ts b/libs/schema/src/builders/IntersectionSchemaBuilder.ts new file mode 100644 index 00000000..6d746c6a --- /dev/null +++ b/libs/schema/src/builders/IntersectionSchemaBuilder.ts @@ -0,0 +1,589 @@ +import { + type BRAND, + type InferType, + SchemaBuilder, + type ValidationContext, + type ValidationResult +} from './SchemaBuilder.js'; + +type IntersectionSchemaBuilderCreateProps< + TLeft extends SchemaBuilder, + TRight extends SchemaBuilder, + R extends boolean = true +> = Partial< + ReturnType['introspect']> +>; + +type SchemaIntersection< + TLeft extends SchemaBuilder, + TRight extends SchemaBuilder +> = InferType & InferType; + +export type IntersectionSchemaValidationResult = ValidationResult; + +export class IntersectionSchemaBuilder< + TLeft extends SchemaBuilder, + TRight extends SchemaBuilder, + TRequired extends boolean = true, + TNullable extends boolean = false, + TExplicitType = undefined, + THasDefault extends boolean = false, + TExtensions = {} +> extends SchemaBuilder< + TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType, + TRequired, + TNullable, + THasDefault, + TExtensions +> { + #left!: TLeft; + #right!: TRight; + + /** + * @hidden + */ + public static create( + props: IntersectionSchemaBuilderCreateProps + ) { + return new IntersectionSchemaBuilder({ + type: 'intersection', + ...props + }); + } + + protected constructor( + props: IntersectionSchemaBuilderCreateProps + ) { + super(props as any); + + if (props.left instanceof SchemaBuilder) { + this.#left = props.left; + } + if (props.right instanceof SchemaBuilder) { + this.#right = props.right; + } + } + + public introspect() { + return { + ...super.introspect(), + left: this.#left, + right: this.#right + }; + } + + /** + * @override + */ + protected override get isNullRequiredViolation(): boolean { + return false; + } + + /** + * @inheritdoc + */ + public hasType( + _notUsed?: T + ): IntersectionSchemaBuilder< + TLeft, + TRight, + true, + TNullable, + T, + THasDefault, + TExtensions + > & + TExtensions { + return this.createFromProps({ + ...this.introspect() + } as any) as any; + } + + /** + * @inheritdoc + */ + public clearHasType(): IntersectionSchemaBuilder< + TLeft, + TRight, + TRequired, + TNullable, + undefined, + THasDefault, + TExtensions + > & + TExtensions { + return this.createFromProps({ + ...this.introspect() + } as any) as any; + } + + /** + * @inheritdoc + */ + public validate( + object: TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType, + context?: ValidationContext + ): IntersectionSchemaValidationResult< + TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType + > { + return super.validate(object, context) as any; + } + + /** + * @inheritdoc + */ + public async validateAsync( + object: TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType, + context?: ValidationContext + ): Promise< + IntersectionSchemaValidationResult< + TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType + > + > { + return super.validateAsync(object, context) as any; + } + + #mergeObjects(leftObj: any, rightObj: any): any { + if ( + typeof leftObj === 'object' && + leftObj !== null && + typeof rightObj === 'object' && + rightObj !== null && + !Array.isArray(leftObj) && + !Array.isArray(rightObj) + ) { + return { ...leftObj, ...rightObj }; + } + return rightObj; + } + + #createValidationSetup( + superResult: ReturnType< + IntersectionSchemaBuilder['preValidateSync'] + > + ) { + const { + valid, + transaction: preValidationTransaction, + context: prevalidationContext, + errors + } = superResult; + + if (!valid) { + return { + needsValidation: false as const, + result: { valid, errors } as any + }; + } + + const { + object: { validatedObject: objToValidate } + } = preValidationTransaction!; + + if ( + (typeof objToValidate === 'undefined' && !this.isRequired) || + (objToValidate === null && (!this.isRequired || this.isNullable)) + ) { + return { + needsValidation: false as const, + result: { + valid: true, + object: objToValidate + } as any + }; + } + + return { + needsValidation: true as const, + objToValidate, + prevalidationContext + }; + } + + /** + * Performs synchronous validation. + * Validates left schema first, then right schema. + * Both must pass for the intersection to be valid. + */ + protected _validate( + object: TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType, + context?: ValidationContext + ): IntersectionSchemaValidationResult< + TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType + > { + if ( + this.canSkipPreValidation && + !context?.doNotStopOnFirstError && + !context?.rootPropertyDescriptor + ) { + if (typeof object === 'undefined') { + if (this.hasDefault) { + object = this.resolveDefaultValue(); + } else if (!this.isRequired) { + return { valid: true, object } as any; + } else { + return { + valid: false, + errors: [ + { + message: this.getValidationErrorMessageSync( + this.requiredErrorMessage, + object as any + ) + } + ] + } as any; + } + } else if ( + object === null && + (!this.isRequired || this.isNullable) + ) { + return { valid: true, object } as any; + } + + const leftResult = this.#left.validate(object as any); + if (!leftResult.valid) { + return { + valid: false, + errors: leftResult.errors + } as any; + } + + const rightResult = this.#right.validate(object as any); + if (!rightResult.valid) { + return { + valid: false, + errors: rightResult.errors + } as any; + } + + const mergedObject = this.#mergeObjects( + leftResult.object, + rightResult.object + ); + return { valid: true, object: mergedObject } as any; + } + + return this.#validateFull(object, context); + } + + #validateFull( + object: any, + context?: ValidationContext + ): IntersectionSchemaValidationResult { + const setup = this.#createValidationSetup( + this.preValidateSync(object, context) + ); + + if (!setup.needsValidation) return setup.result; + + const { objToValidate, prevalidationContext } = setup; + + const leftResult = this.#left.validate(objToValidate, { + ...prevalidationContext, + currentPropertyDescriptor: undefined, + rootPropertyDescriptor: undefined + } as any); + + if (!leftResult.valid) { + return { + valid: false, + errors: leftResult.errors + }; + } + + const rightResult = this.#right.validate(objToValidate, { + ...prevalidationContext, + currentPropertyDescriptor: undefined, + rootPropertyDescriptor: undefined + } as any); + + if (!rightResult.valid) { + return { + valid: false, + errors: rightResult.errors + }; + } + + const mergedObject = this.#mergeObjects( + leftResult.object, + rightResult.object + ); + + return { + valid: true, + object: mergedObject + }; + } + + /** + * Performs async validation. + */ + protected async _validateAsync( + object: TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType, + context?: ValidationContext + ): Promise< + IntersectionSchemaValidationResult< + TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType + > + > { + const setup = this.#createValidationSetup( + await super.preValidateAsync(object, context) + ); + + if (!setup.needsValidation) return setup.result; + + const { objToValidate, prevalidationContext } = setup; + + const leftResult = await this.#left.validateAsync(objToValidate, { + ...prevalidationContext, + currentPropertyDescriptor: undefined, + rootPropertyDescriptor: undefined + } as any); + + if (!leftResult.valid) { + return { + valid: false, + errors: leftResult.errors + }; + } + + const rightResult = await this.#right.validateAsync(objToValidate, { + ...prevalidationContext, + currentPropertyDescriptor: undefined, + rootPropertyDescriptor: undefined + } as any); + + if (!rightResult.valid) { + return { + valid: false, + errors: rightResult.errors + }; + } + + const mergedObject = this.#mergeObjects( + leftResult.object, + rightResult.object + ); + + return { + valid: true, + object: mergedObject + }; + } + + protected createFromProps< + TL extends SchemaBuilder, + TR extends SchemaBuilder, + TReq extends boolean + >(props: IntersectionSchemaBuilderCreateProps): this { + return IntersectionSchemaBuilder.create(props as any) as any; + } + + /** + * @hidden + */ + public required( + errorMessage?: any + ): IntersectionSchemaBuilder< + TLeft, + TRight, + true, + TNullable, + TExplicitType, + THasDefault, + TExtensions + > & + TExtensions { + return super.required(errorMessage); + } + + /** + * @hidden + */ + public optional(): IntersectionSchemaBuilder< + TLeft, + TRight, + false, + TNullable, + TExplicitType, + THasDefault, + TExtensions + > & + TExtensions { + return super.optional(); + } + + /** + * @hidden + */ + public default( + value: + | (TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType) + | (() => TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType) + ): IntersectionSchemaBuilder< + TLeft, + TRight, + true, + TNullable, + TExplicitType, + true, + TExtensions + > & + TExtensions { + return super.default(value as any) as any; + } + + /** + * @hidden + */ + public clearDefault(): IntersectionSchemaBuilder< + TLeft, + TRight, + TRequired, + TNullable, + TExplicitType, + false, + TExtensions + > & + TExtensions { + return super.clearDefault() as any; + } + + /** + * @hidden + */ + public brand( + _name?: TBrand + ): IntersectionSchemaBuilder< + TLeft, + TRight, + TRequired, + TNullable, + (TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType) & { readonly [K in BRAND]: TBrand }, + THasDefault, + TExtensions + > & + TExtensions { + return super.brand(_name); + } + + /** + * @hidden + */ + public readonly(): IntersectionSchemaBuilder< + TLeft, + TRight, + TRequired, + TNullable, + Readonly< + TExplicitType extends undefined + ? SchemaIntersection + : TExplicitType + >, + THasDefault, + TExtensions + > & + TExtensions { + return super.readonly(); + } + + /** + * @hidden + */ + public nullable(): IntersectionSchemaBuilder< + TLeft, + TRight, + TRequired, + true, + TExplicitType, + THasDefault, + TExtensions + > & + TExtensions { + return super.nullable() as any; + } + + /** + * @hidden + */ + public notNullable(): IntersectionSchemaBuilder< + TLeft, + TRight, + TRequired, + false, + TExplicitType, + THasDefault, + TExtensions + > & + TExtensions { + return super.notNullable() as any; + } + + /** + * Gets the left side of this intersection. + */ + public get leftSchema(): TLeft { + return this.#left; + } + + /** + * Gets the right side of this intersection. + */ + public get rightSchema(): TRight { + return this.#right; + } +} + +/** + * Creates an intersection schema. + * The resulting schema validates that the input satisfies both `left` and `right` schemas. + * + * @example + * ```ts + * const schema = intersection( + * object({ name: string() }), + * object({ age: number() }) + * ); + * // InferType === { name: string } & { age: number } + * ``` + * + * @param left - first schema + * @param right - second schema + */ +export const intersection = < + TLeft extends SchemaBuilder, + TRight extends SchemaBuilder +>( + left: TLeft, + right: TRight +) => + IntersectionSchemaBuilder.create({ + isRequired: true, + left, + right + }) as IntersectionSchemaBuilder; diff --git a/libs/schema/src/builders/SchemaBuilder.ts b/libs/schema/src/builders/SchemaBuilder.ts index 1dc13e34..6b119bca 100644 --- a/libs/schema/src/builders/SchemaBuilder.ts +++ b/libs/schema/src/builders/SchemaBuilder.ts @@ -858,16 +858,18 @@ export abstract class SchemaBuilder< this.#standardProps = { version: 1 as const, vendor: '@cleverbrush/schema', - validate( + async validate( value: unknown - ): StandardSchemaV1.Result< - ResolvedSchemaType + ): Promise< + StandardSchemaV1.Result< + ResolvedSchemaType + > > { // Standard Schema validate accepts `unknown`, while the - // schema's own validate() has a typed parameter. The cast - // is safe because validate() performs full runtime + // schema's own validateAsync() has a typed parameter. The cast + // is safe because validateAsync() performs full runtime // validation regardless of the compile-time input type. - const result = self.validate(value as any); + const result = await self.validateAsync(value as any); if (result.valid) { return { value: result.object as ResolvedSchemaType< diff --git a/libs/schema/src/builders/standard-schema.test.ts b/libs/schema/src/builders/standard-schema.test.ts index c2e94091..c891924a 100644 --- a/libs/schema/src/builders/standard-schema.test.ts +++ b/libs/schema/src/builders/standard-schema.test.ts @@ -80,26 +80,29 @@ test('~standard is cached (same reference)', () => { // Successful validation // --------------------------------------------------------------------------- -test('validate returns success result for valid string', () => { - const result = string()['~standard'].validate('hello'); +test('validate returns success result for valid string', async () => { + const result = await string()['~standard'].validate('hello'); expect(result).toEqual({ value: 'hello' }); expect(result).not.toHaveProperty('issues'); }); -test('validate returns success result for valid number', () => { - const result = number()['~standard'].validate(42); +test('validate returns success result for valid number', async () => { + const result = await number()['~standard'].validate(42); expect(result).toEqual({ value: 42 }); }); -test('validate returns success result for valid object', () => { +test('validate returns success result for valid object', async () => { const schema = object({ name: string(), age: number() }); - const result = schema['~standard'].validate({ name: 'Alice', age: 30 }); + const result = await schema['~standard'].validate({ + name: 'Alice', + age: 30 + }); expect(result).toEqual({ value: { name: 'Alice', age: 30 } }); }); -test('validate returns success for optional schema with undefined', () => { +test('validate returns success for optional schema with undefined', async () => { const schema = string().optional(); - const result = schema['~standard'].validate(undefined); + const result = await schema['~standard'].validate(undefined); expect(result).toEqual({ value: undefined }); }); @@ -107,31 +110,31 @@ test('validate returns success for optional schema with undefined', () => { // Failed validation // --------------------------------------------------------------------------- -test('validate returns failure result for invalid string', () => { - const result = string()['~standard'].validate(123); +test('validate returns failure result for invalid string', async () => { + const result = await string()['~standard'].validate(123); expect(result).toHaveProperty('issues'); const issues = (result as StandardSchemaV1.FailureResult).issues; expect(issues.length).toBeGreaterThan(0); expect(typeof issues[0].message).toBe('string'); }); -test('validate returns failure result for required schema with null', () => { - const result = string()['~standard'].validate(null); +test('validate returns failure result for required schema with null', async () => { + const result = await string()['~standard'].validate(null); expect(result).toHaveProperty('issues'); const issues = (result as StandardSchemaV1.FailureResult).issues; expect(issues.length).toBeGreaterThan(0); }); -test('validate returns failure result for required schema with undefined', () => { - const result = string()['~standard'].validate(undefined); +test('validate returns failure result for required schema with undefined', async () => { + const result = await string()['~standard'].validate(undefined); expect(result).toHaveProperty('issues'); const issues = (result as StandardSchemaV1.FailureResult).issues; expect(issues.length).toBeGreaterThan(0); }); -test('validate returns failure with multiple issues for invalid object', () => { +test('validate returns failure with multiple issues for invalid object', async () => { const schema = object({ name: string(), age: number() }); - const result = schema['~standard'].validate({ + const result = await schema['~standard'].validate({ name: 123, age: 'not a number' }); @@ -144,12 +147,12 @@ test('validate returns failure with multiple issues for invalid object', () => { // Preprocessors work through ~standard.validate // --------------------------------------------------------------------------- -test('preprocessors are applied through ~standard.validate', () => { +test('preprocessors are applied through ~standard.validate', async () => { const schema = string().addPreprocessor((v: any) => { if (typeof v === 'number') return String(v); return v; }); - const result = schema['~standard'].validate(42); + const result = await schema['~standard'].validate(42); expect(result).toEqual({ value: '42' }); }); @@ -157,15 +160,50 @@ test('preprocessors are applied through ~standard.validate', () => { // Validators with constraints // --------------------------------------------------------------------------- -test('string minLength constraint is enforced via ~standard.validate', () => { +test('string minLength constraint is enforced via ~standard.validate', async () => { const schema = string().minLength(3); - const fail = schema['~standard'].validate('ab'); + const fail = await schema['~standard'].validate('ab'); expect(fail).toHaveProperty('issues'); - const pass = schema['~standard'].validate('abc'); + const pass = await schema['~standard'].validate('abc'); expect(pass).toEqual({ value: 'abc' }); }); +// --------------------------------------------------------------------------- +// Async validation through ~standard.validate +// --------------------------------------------------------------------------- + +test('async preprocessor works through ~standard.validate', async () => { + const schema = string().addPreprocessor(async (v: any) => { + if (typeof v === 'number') return String(v); + return v; + }); + const result = await schema['~standard'].validate(42); + expect(result).toEqual({ value: '42' }); +}); + +test('async validator works through ~standard.validate', async () => { + const schema = string().addValidator(async (v: any) => { + if (v === 'hello') { + return { valid: true, errors: [] }; + } + return { valid: false, errors: [{ message: 'not hello' }] }; + }); + const pass = await schema['~standard'].validate('hello'); + expect(pass).toEqual({ value: 'hello' }); + + const fail = await schema['~standard'].validate('world'); + expect(fail).toHaveProperty('issues'); + const issues = (fail as StandardSchemaV1.FailureResult).issues; + expect(issues[0].message).toMatch(/not hello/); +}); + +test('validate returns Promise from ~standard bridge', () => { + const schema = string(); + const result = schema['~standard'].validate('hello'); + expect(result).toBeInstanceOf(Promise); +}); + // --------------------------------------------------------------------------- // Type conformance — assignable to StandardSchemaV1 // --------------------------------------------------------------------------- @@ -219,64 +257,64 @@ test('StandardSchemaV1.InferOutput works with optional schema', () => { // Error shape — issue messages and mutual exclusivity of value / issues // --------------------------------------------------------------------------- -test('failure result has no value property', () => { - const result = string()['~standard'].validate(123); +test('failure result has no value property', async () => { + const result = await string()['~standard'].validate(123); expect(result).not.toHaveProperty('value'); expect(result).toHaveProperty('issues'); }); -test('success result has no issues property', () => { - const result = string()['~standard'].validate('hello'); +test('success result has no issues property', async () => { + const result = await string()['~standard'].validate('hello'); expect(result).toHaveProperty('value'); expect(result).not.toHaveProperty('issues'); }); -test('wrong type produces a descriptive message', () => { - const result = string()['~standard'].validate( +test('wrong type produces a descriptive message', async () => { + const result = (await string()['~standard'].validate( 123 - ) as StandardSchemaV1.FailureResult; + )) as StandardSchemaV1.FailureResult; expect(result.issues[0].message).toMatch(/string/i); }); -test('required field missing produces a descriptive message', () => { - const result = string()['~standard'].validate( +test('required field missing produces a descriptive message', async () => { + const result = (await string()['~standard'].validate( undefined - ) as StandardSchemaV1.FailureResult; + )) as StandardSchemaV1.FailureResult; expect(result.issues[0].message).toMatch(/required/i); }); -test('null value produces a descriptive message', () => { - const result = string()['~standard'].validate( +test('null value produces a descriptive message', async () => { + const result = (await string()['~standard'].validate( null - ) as StandardSchemaV1.FailureResult; + )) as StandardSchemaV1.FailureResult; expect(result.issues[0].message).toBeTruthy(); }); -test('minLength violation produces a descriptive message', () => { - const result = string() +test('minLength violation produces a descriptive message', async () => { + const result = (await string() .minLength(5) - ['~standard'].validate('ab') as StandardSchemaV1.FailureResult; + ['~standard'].validate('ab')) as StandardSchemaV1.FailureResult; expect(result.issues[0].message).toMatch(/5/); }); -test('number min violation produces a descriptive message', () => { - const result = number() +test('number min violation produces a descriptive message', async () => { + const result = (await number() .min(10) - ['~standard'].validate(3) as StandardSchemaV1.FailureResult; + ['~standard'].validate(3)) as StandardSchemaV1.FailureResult; expect(result.issues[0].message).toMatch(/10/); }); -test('custom error message string is propagated through issues', () => { - const result = string() +test('custom error message string is propagated through issues', async () => { + const result = (await string() .minLength(3, 'name too short') - ['~standard'].validate('a') as StandardSchemaV1.FailureResult; + ['~standard'].validate('a')) as StandardSchemaV1.FailureResult; expect(result.issues[0].message).toBe('name too short'); }); -test('each issue has a non-empty string message', () => { - const result = string()['~standard'].validate( +test('each issue has a non-empty string message', async () => { + const result = (await string()['~standard'].validate( false - ) as StandardSchemaV1.FailureResult; + )) as StandardSchemaV1.FailureResult; for (const issue of result.issues) { expect(typeof issue.message).toBe('string'); expect(issue.message.length).toBeGreaterThan(0); diff --git a/libs/schema/src/core.ts b/libs/schema/src/core.ts index 9a08d7f7..023f3b3b 100644 --- a/libs/schema/src/core.ts +++ b/libs/schema/src/core.ts @@ -25,6 +25,11 @@ export { GenericSchemaBuilder, generic } from './builders/GenericSchemaBuilder.js'; +export type { IntersectionSchemaValidationResult } from './builders/IntersectionSchemaBuilder.js'; +export { + IntersectionSchemaBuilder, + intersection +} from './builders/IntersectionSchemaBuilder.js'; export { LazySchemaBuilder, lazy } from './builders/LazySchemaBuilder.js'; export { NullSchemaBuilder, nul } from './builders/NullSchemaBuilder.js'; export { NumberSchemaBuilder, number } from './builders/NumberSchemaBuilder.js'; diff --git a/libs/server-openapi/src/generateOpenApiSpec.ts b/libs/server-openapi/src/generateOpenApiSpec.ts index 231431da..f566eb3e 100644 --- a/libs/server-openapi/src/generateOpenApiSpec.ts +++ b/libs/server-openapi/src/generateOpenApiSpec.ts @@ -7,6 +7,7 @@ import type { AuthenticationConfig, EndpointMetadata, EndpointRegistration, + UploadOptions, WebhookDefinition } from '@cleverbrush/server'; import { resolvePath } from './pathUtils.js'; @@ -167,22 +168,34 @@ function buildRequestBody( examples?: Record< string, { summary?: string; description?: string; value: unknown } - > | null + > | null, + fileUpload?: UploadOptions | null ): Record { - const jsonSchema = convertSchema(bodySchema, registry); const bodyInfo = bodySchema.introspect() as any; - const mediaType: Record = { schema: jsonSchema }; - if (example != null) { - mediaType['example'] = example; - } else if (examples != null) { - mediaType['examples'] = examples; - } const body: Record = { - required: bodyInfo.isRequired !== false, - content: { - 'application/json': mediaType - } + required: bodyInfo.isRequired !== false }; + + // When file uploads are enabled, emit multipart/form-data + if (fileUpload) { + const jsonSchema = convertSchema(bodySchema, registry); + const mediaType: Record = { schema: jsonSchema }; + body['content'] = { + 'multipart/form-data': mediaType + }; + } else { + const jsonSchema = convertSchema(bodySchema, registry); + const mediaType: Record = { schema: jsonSchema }; + if (example != null) { + mediaType['example'] = example; + } else if (examples != null) { + mediaType['examples'] = examples; + } + body['content'] = { + 'application/json': mediaType + }; + } + if (typeof bodyInfo.description === 'string' && bodyInfo.description !== '') body['description'] = bodyInfo.description; return body; @@ -521,7 +534,8 @@ function buildOperation( meta.bodySchema, registry, meta.example, - meta.examples + meta.examples, + meta.fileUpload ); } diff --git a/libs/server/README.md b/libs/server/README.md index 360e3b67..6351892f 100644 --- a/libs/server/README.md +++ b/libs/server/README.md @@ -124,6 +124,36 @@ const CreateUser = endpoint .operationId('createUser'); ``` +### Cache Tags + +Tag-based cache invalidation. Tags declared on endpoints flow to the +[`cacheTags` middleware](/client/cache-tags) for automatic HTTP caching and +invalidation on mutating requests. + +```ts +const ListTodos = endpoint + .get('/api/todos') + .query(TodoListQuerySchema) + .cacheTag('todo-list', p => ({ page: p.query.page, limit: p.query.limit })) + .returns(array(TodoSchema)); + +const UpdateTodo = endpoint + .patch('/api/todos/:id') + .body(UpdateTodoBody) + .clearsCacheTag('todo-list') // clears the collection cache + .clearsCacheTag('todo', p => ({ id: p.params.id })) // clears specific entity + .returns(TodoSchema); +``` + +- **`.cacheTag(name)`** — declares the endpoint's data belongs to a cache + group. Use on GET endpoints. +- **`.clearsCacheTag(name)`** — declares that this mutation clears matching + cache entries on success. Use on POST / PUT / PATCH / DELETE. +- **`.cacheTag(name, p => ({ ... }))`** — property-based tag; each selected + property becomes part of the cache key (different pages → different entries). +- **Immutability** — both methods return a new builder; the original is + unchanged. + ## Registering and Handling Endpoints ```ts @@ -155,6 +185,48 @@ await server.listen(3000); | `ActionResult.stream(readable, contentType)` | 200 | Pipes a `Readable` | | `ActionResult.status(status)` | any | Bare status, no body | +## File Upload + +Accept file uploads via `multipart/form-data` by chaining `.upload()` on an endpoint: + +```ts +import { endpoint } from '@cleverbrush/server'; +import { object, string } from '@cleverbrush/schema'; + +const UploadAvatar = endpoint + .post('/api/avatar') + .upload({ maxFileSize: 2 * 1024 * 1024, allowedMimeTypes: ['image/*'] }) + .body(object({ description: string().optional() })) + .authorize(UserPrincipal); + +const handler: Handler = async ({ body, files }) => { + const avatar = files['avatar']; + // avatar: FilePart { filename, mimeType, buffer, size } + return ActionResult.created({ name: avatar.filename }); +}; +``` + +The `files` object on the handler context contains one `FilePart` entry per uploaded file field. Non-file form fields are validated against the body schema and available via `body`. + +### Options + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `maxFileSize` | `number` | 10 MB | Maximum file size per file in bytes | +| `allowedMimeTypes` | `string[]` | all | MIME type allowlist (supports `image/*` glob) | +| `maxFileCount` | `number` | 10 | Maximum number of files per request | + +### FilePart type + +```ts +interface FilePart { + readonly filename: string; + readonly mimeType: string; + readonly buffer: Buffer; + readonly size: number; +} +``` + ## Middleware ```ts diff --git a/libs/server/package.json b/libs/server/package.json index 1f008da1..35b43b5f 100644 --- a/libs/server/package.json +++ b/libs/server/package.json @@ -8,9 +8,11 @@ "@cleverbrush/auth": "^4.0.0", "@cleverbrush/di": "^4.0.0", "@cleverbrush/schema": "^4.0.0", + "@fastify/busboy": "^3.2.0", "ws": "^8.20.0" }, "devDependencies": { + "@types/busboy": "1.5.4", "@types/ws": "^8.18.1" }, "description": "Schema-first HTTP server framework — schema-driven controllers, DI, auto-validation, RFC 9457 errors", diff --git a/libs/server/src/ActionResult.ts b/libs/server/src/ActionResult.ts index a3c00c25..7c29f00e 100644 --- a/libs/server/src/ActionResult.ts +++ b/libs/server/src/ActionResult.ts @@ -248,9 +248,15 @@ export class FileResult extends ActionResult { res: http.ServerResponse, _contentNegotiator: ContentNegotiator ): Promise { + const filename = this.fileName; + const sanitized = filename.replace(/[^\x20-\x7E]/g, '_'); + const disposition = + sanitized === filename + ? `attachment; filename="${filename}"` + : `attachment; filename="${sanitized}"; filename*=UTF-8''${encodeURIComponent(filename)}`; res.writeHead(200, { 'content-type': this.contentType, - 'content-disposition': `attachment; filename="${this.fileName}"`, + 'content-disposition': disposition, 'content-length': String(this.content.byteLength) }); res.end(this.content); diff --git a/libs/server/src/CacheTag.ts b/libs/server/src/CacheTag.ts new file mode 100644 index 00000000..f6d44542 --- /dev/null +++ b/libs/server/src/CacheTag.ts @@ -0,0 +1,214 @@ +import { + type ObjectSchemaBuilder, + object, + type PropertyDescriptor, + type SchemaBuilder, + SYMBOL_HAS_PROPERTIES, + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR +} from '@cleverbrush/schema'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +/** + * An accessor that can extract a property value from a structured + * request root. Wraps a {@link PropertyDescriptor}'s `getValue` + * closure so the middleware layer does not need to know about schemas. + */ +export interface CacheTagPropertyAccessor { + getValue(root: { + params: Record; + body: unknown; + query: Record; + headers: Record; + }): { value?: unknown; success: boolean }; +} + +/** + * A serialisable cache-tag definition stored on endpoint metadata. + * + * `properties` maps human-readable key names (used as label segments + * in the final cache key) to accessors that resolve the actual value + * from call-time request data. + */ +export interface CacheTagDefinition { + readonly name: string; + readonly properties: Readonly>; +} + +// --------------------------------------------------------------------------- +// Synthetic schema construction +// --------------------------------------------------------------------------- + +/** + * Builds a synthetic `object({ params, body, query, headers })` schema + * from the endpoint's schema definitions and returns its + * `PropertyDescriptorTree` so callers can write type-safe selectors like: + * + * ```ts + * endpoint.cacheTag('todo', p => ({ + * id: p.query.id, + * fromBodyId: p.body.id + * })) + * ``` + * + * Only non-null schemas are included in the synthetic schema. + */ +export function createCacheTagTree(schemas: { + paramsSchema?: SchemaBuilder | null; + bodySchema?: SchemaBuilder | null; + querySchema?: ObjectSchemaBuilder | null; + headerSchema?: ObjectSchemaBuilder< + any, + any, + any, + any, + any, + any, + any + > | null; +}): any { + const props: Record> = {}; + + if (schemas.paramsSchema) { + const ps = schemas.paramsSchema; + // ParseStringSchemaBuilder wraps an ObjectSchemaBuilder; + // extract it so PropertyDescriptorTree can recurse into params. + if ( + typeof (ps as any).introspect === 'function' && + (ps as any).introspect().objectSchema + ) { + props.params = (ps as any).introspect().objectSchema; + } else if ((ps as any)[SYMBOL_HAS_PROPERTIES] === true) { + props.params = ps; + } + } + + if (schemas.bodySchema) { + const bs = schemas.bodySchema; + if ( + (bs as any)[SYMBOL_HAS_PROPERTIES] === true || + typeof (bs as any).introspect === 'function' + ) { + props.body = bs; + } + } + + if (schemas.querySchema) { + props.query = schemas.querySchema; + } + + if (schemas.headerSchema) { + props.headers = schemas.headerSchema; + } + + const syn = object(props as any); + return (object as any).getPropertiesFor(syn); +} + +// --------------------------------------------------------------------------- +// Serialization +// --------------------------------------------------------------------------- + +/** + * Validates that a value returned from a cache-tag selector is a valid + * property descriptor. + */ +function isPropertyDescriptor(value: unknown): boolean { + if (value === null || value === undefined) return false; + if (typeof value !== 'object') return false; + return ( + typeof (value as any)[SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR] === 'object' && + (value as any)[SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR] !== null + ); +} + +/** + * Serialises the result of a cache-tag selector callback into a + * {@link CacheTagDefinition} that can be stored on endpoint metadata + * and forwarded to the client middleware. + * + * Each value in `descriptors` must be a {@link PropertyDescriptor} — + * its `getValue` closure is wrapped in a {@link CacheTagPropertyAccessor}. + * + * @throws If any value is not a valid property descriptor. + */ +export function serializeTag( + name: string, + descriptors: Record +): CacheTagDefinition { + const properties: Record = {}; + + for (const [key, value] of Object.entries(descriptors)) { + if (!isPropertyDescriptor(value)) { + throw new Error( + `Cache tag "${name}": property "${key}" is not a valid ` + + `PropertyDescriptor. Make sure you select a leaf property ` + + `from the tree (e.g. p.query.id, not p.query).` + ); + } + + const inner = (value as PropertyDescriptor)[ + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR + ]; + + properties[key] = { + getValue: (root: { + params: Record; + body: unknown; + query: Record; + headers: Record; + }) => inner.getValue(root as any) + }; + } + + return { name, properties }; +} + +// --------------------------------------------------------------------------- +// Key computation (client-side) +// --------------------------------------------------------------------------- + +/** + * Computes a deterministic cache key from a tag definition and live + * request data. + * + * - Simple tags (no properties) produce just the tag name. + * - Tags with properties produce `name:key1=val1,key2=val2` where + * keys are sorted alphabetically for determinism. + * + * Properties whose `getValue` returns `success: false` are skipped + * (their value is not included in the key). + */ +export function computeCacheKey( + tag: CacheTagDefinition, + root: { + params: Record; + body: unknown; + query: Record; + headers: Record; + } +): string { + const entries = Object.entries(tag.properties); + + if (entries.length === 0) { + return tag.name; + } + + const parts: string[] = []; + for (const [key, accessor] of entries.sort(([a], [b]) => + a.localeCompare(b) + )) { + const result = accessor.getValue(root); + if (result.success && result.value !== undefined) { + parts.push(`${key}=${String(result.value)}`); + } + } + + if (parts.length === 0) { + return tag.name; + } + + return `${tag.name}:${parts.join(',')}`; +} diff --git a/libs/server/src/Endpoint.ts b/libs/server/src/Endpoint.ts index 7710e460..578adc3e 100644 --- a/libs/server/src/Endpoint.ts +++ b/libs/server/src/Endpoint.ts @@ -1,3 +1,4 @@ +// biome-ignore-all lint/suspicious/useAdjacentOverloadSignatures: each method in ScopedEndpointFactoryMethods and EndpointFactory has a single signature; they are separate methods, not overloads import type { InferType, ObjectSchemaBuilder, @@ -5,6 +6,7 @@ import type { PropertyDescriptorTree, SchemaBuilder } from '@cleverbrush/schema'; +import { SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR } from '@cleverbrush/schema'; import type { ActionResult, ContentResult, @@ -15,6 +17,8 @@ import type { StatusCodeResult, StreamResult } from './ActionResult.js'; +import type { CacheTagDefinition } from './CacheTag.js'; +import { createCacheTagTree, serializeTag } from './CacheTag.js'; import type { RequestContext } from './RequestContext.js'; import { createSubscription, @@ -22,7 +26,12 @@ import { type SubscriptionBuilder, type SubscriptionHandlerEntry } from './Subscription.js'; -import type { Middleware } from './types.js'; +import type { + FilePart, + Middleware, + RejectedFile, + UploadOptions +} from './types.js'; // --------------------------------------------------------------------------- // Simplify — flattens intersection types for clean IDE tooltips @@ -36,7 +45,14 @@ type Simplify = { [K in keyof T]: T[K] } & {}; type HasKeys = keyof T extends never ? false : true; -type ActionContextParts = { +type ActionContextParts< + TParams, + TBody, + TQuery, + THeaders, + TPrincipal, + TUpload extends boolean +> = { context: RequestContext; } & (HasKeys extends true ? { params: TParams } : {}) & (TBody extends undefined @@ -48,7 +64,10 @@ type ActionContextParts = { }) & (HasKeys extends true ? { query: TQuery } : {}) & (HasKeys extends true ? { headers: THeaders } : {}) & - (TPrincipal extends undefined ? {} : { principal: TPrincipal }); + (TPrincipal extends undefined ? {} : { principal: TPrincipal }) & + (TUpload extends true + ? { files: Record; rejectedFiles?: RejectedFile[] } + : {}); /** * The fully-typed argument object passed to endpoint handlers. @@ -66,10 +85,18 @@ export type ActionContext = infer TPrincipal, any, any, - any + any, + infer TUpload > ? Simplify< - ActionContextParts + ActionContextParts< + TParams, + TBody, + TQuery, + THeaders, + TPrincipal, + TUpload + > > : never; @@ -97,6 +124,7 @@ export type ServiceSchemas = any, any, any, + any, any > ? TServices @@ -112,6 +140,7 @@ type ResponseType = any, any, infer TResponse, + any, any > ? TResponse extends SchemaBuilder @@ -133,7 +162,8 @@ export type ResponsesOf = any, any, any, - infer TResponses + infer TResponses, + any > ? TResponses : never; @@ -198,7 +228,18 @@ export type Handler = // Handler mapping — compile-time complete endpoint → handler binding // --------------------------------------------------------------------------- -type AnyEndpoint = EndpointBuilder; +type AnyEndpoint = EndpointBuilder< + any, + any, + any, + any, + any, + any, + any, + any, + any, + any +>; type AnySubscriptionBuilder = SubscriptionBuilder< any, any, @@ -534,6 +575,17 @@ export interface EndpointMetadata { * OpenAPI Operation Object. */ readonly callbacks: Record | null; + /** + * When set, the endpoint accepts `multipart/form-data` uploads. + * The configuration controls max file size, allowed MIME types, etc. + * @see `EndpointBuilder.upload()` + */ + readonly fileUpload: UploadOptions | null; + /** + * Cache tags declared via `.clearsCacheTag()`, providing tag-based cache + * key computation for the client middleware. + */ + readonly cacheTags: readonly CacheTagDefinition[]; } /** @@ -566,6 +618,61 @@ type InferResponsesMap< : null; }; +// --------------------------------------------------------------------------- +// Cache-tag selector type — gives the consumer IDE hints when selecting +// properties from the tree passed to the `.clearsCacheTag()` callback. +// --------------------------------------------------------------------------- + +/** + * A leaf node in a cache-tag property tree — mirrors the shape of the + * actual runtime {@link PropertyDescriptor} so the compiler accepts + * values selected by the consumer. + */ +interface CacheTagPropertyLeaf { + readonly [SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR]: { + readonly getValue: (obj: Record) => { + readonly value?: unknown; + readonly success: boolean; + }; + }; +} + +/** Recursively builds a typed property tree from an inferred object shape. */ +type CacheTagPropertyTree = CacheTagPropertyLeaf & + (T extends Record + ? { readonly [K in keyof T]-?: CacheTagPropertyTree } + : unknown); + +/** + * The typed tree passed to the `.clearsCacheTag(name, selector)` callback. + * + * `p.params`, `p.query`, and `p.headers` provide IDE completion for + * each schema's property names, while `p.body` resolves through the + * body schema's `InferType`. + */ +type CacheTagSelector = { + readonly params: [keyof TParams] extends [never] + ? Record + : TParams extends Record + ? CacheTagPropertyTree + : Record; + readonly body: TBody extends undefined + ? undefined + : TBody extends SchemaBuilder + ? CacheTagPropertyTree> + : Record; + readonly query: [keyof TQuery] extends [never] + ? Record + : TQuery extends Record + ? CacheTagPropertyTree + : Record; + readonly headers: [keyof THeaders] extends [never] + ? Record + : THeaders extends Record + ? CacheTagPropertyTree + : Record; +}; + export class EndpointBuilder< TParams = {}, TBody = undefined, @@ -575,7 +682,8 @@ export class EndpointBuilder< TPrincipal = undefined, TRoles extends string = string, TResponse = any, - TResponses extends Record = {} + TResponses extends Record = {}, + TUpload extends boolean = false > { readonly #method: string; readonly #basePath: string; @@ -639,6 +747,8 @@ export class EndpointBuilder< readonly #externalDocs: { url: string; description?: string } | null; readonly #links: Record | null; readonly #callbacks: Record | null; + readonly #fileUpload: UploadOptions | null; + readonly #cacheTags: readonly CacheTagDefinition[]; constructor( method: string, @@ -702,7 +812,9 @@ export class EndpointBuilder< > | null = null, externalDocs: { url: string; description?: string } | null = null, links: Record | null = null, - callbacks: Record | null = null + callbacks: Record | null = null, + fileUpload: UploadOptions | null = null, + cacheTags: readonly CacheTagDefinition[] = [] ) { this.#method = method; this.#basePath = basePath; @@ -727,6 +839,8 @@ export class EndpointBuilder< this.#externalDocs = externalDocs; this.#links = links; this.#callbacks = callbacks; + this.#fileUpload = fileUpload; + this.#cacheTags = cacheTags; } /** Define the request body schema. Validation failures return 422 Problem Details. */ @@ -741,7 +855,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -766,7 +881,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -784,7 +901,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -809,7 +927,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -827,7 +947,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -852,7 +973,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -870,7 +993,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -895,7 +1019,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -920,7 +1046,8 @@ export class EndpointBuilder< InferType, TRoles, TResponse, - TResponses + TResponses, + TUpload >; authorize( ...roles: TRoles[] @@ -933,7 +1060,8 @@ export class EndpointBuilder< unknown, TRoles, TResponse, - TResponses + TResponses, + TUpload >; authorize( ...args: unknown[] @@ -946,7 +1074,8 @@ export class EndpointBuilder< any, TRoles, TResponse, - TResponses + TResponses, + TUpload > { let roles: string[]; if ( @@ -987,7 +1116,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1007,7 +1138,8 @@ export class EndpointBuilder< TPrincipal, TRoles, T, - TResponses + TResponses, + TUpload >; returns>( schema: TSchema @@ -1020,11 +1152,12 @@ export class EndpointBuilder< TPrincipal, TRoles, TSchema, - TResponses + TResponses, + TUpload >; returns( _schema?: unknown - ): EndpointBuilder { + ): EndpointBuilder { const schema = _schema != null && typeof _schema === 'object' && @@ -1054,7 +1187,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1096,7 +1231,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - InferResponsesMap + InferResponsesMap, + TUpload > { return new EndpointBuilder( this.#method, @@ -1121,7 +1257,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1137,7 +1275,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1162,7 +1301,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1178,7 +1319,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1203,7 +1345,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1219,7 +1363,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1244,7 +1389,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1260,7 +1407,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1285,7 +1433,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1299,7 +1449,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1324,7 +1475,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1347,7 +1500,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1372,7 +1526,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1398,7 +1554,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1423,7 +1580,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1448,7 +1607,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1473,7 +1633,81 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags + ); + } + + /** + * Mark this endpoint as accepting `multipart/form-data` file uploads. + * + * When set, the server parses the request body with a streaming multipart + * parser instead of the default JSON deserializer. File fields are made + * available to the handler via `arg.files` (a `Record`), + * while non-file form fields are validated against the body schema and + * available via `arg.body`. + * + * @param options - Upload configuration (max file size, allowed MIME types, etc.). + * + * @example + * ```ts + * const UploadAvatar = endpoint + * .post('/api/avatar') + * .upload({ maxFileSize: 2 * 1024 * 1024, allowedMimeTypes: ['image/*'] }) + * .authorize(PrincipalSchema) + * .responses({ 200: AvatarSchema }); + * + * const handler: Handler = async ({ files }) => { + * const avatar = files['avatar']; + * // avatar: { filename, mimeType, buffer, size } + * }; + * ``` + */ + upload( + options?: UploadOptions + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses, + true + > { + return new EndpointBuilder( + this.#method, + this.#basePath, + this.#pathTemplate, + this.#bodySchema, + this.#querySchema, + this.#headerSchema, + this.#serviceSchemas, + this.#authRoles, + this.#summary, + this.#description, + this.#tags, + this.#operationId, + this.#deprecated, + this.#responseSchema, + this.#responsesSchemas, + this.#example, + this.#examples, + this.#producesFile, + this.#produces, + this.#responseHeaderSchema, + this.#externalDocs, + this.#links, + this.#callbacks, + { + maxFileSize: options?.maxFileSize ?? 10 * 1024 * 1024, + allowedMimeTypes: options?.allowedMimeTypes, + maxFileCount: options?.maxFileCount ?? 10 + }, + this.#cacheTags ); } @@ -1528,7 +1762,9 @@ export class EndpointBuilder< responseHeaderSchema: this.#responseHeaderSchema, externalDocs: this.#externalDocs, links: this.#links, - callbacks: this.#callbacks + callbacks: this.#callbacks, + fileUpload: this.#fileUpload, + cacheTags: this.#cacheTags }; } @@ -1567,7 +1803,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1592,7 +1829,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1627,7 +1866,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1652,7 +1892,9 @@ export class EndpointBuilder< schema, this.#externalDocs, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1676,7 +1918,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1701,7 +1944,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, { url, description }, this.#links, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1736,7 +1981,8 @@ export class EndpointBuilder< TPrincipal, TRoles, TResponse, - TResponses + TResponses, + TUpload > { return new EndpointBuilder( this.#method, @@ -1761,7 +2007,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, defs as Record, - this.#callbacks + this.#callbacks, + this.#fileUpload, + this.#cacheTags ); } @@ -1789,6 +2037,165 @@ export class EndpointBuilder< */ callbacks( defs: Record> + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses, + TUpload + > { + return new EndpointBuilder( + this.#method, + this.#basePath, + this.#pathTemplate, + this.#bodySchema, + this.#querySchema, + this.#headerSchema, + this.#serviceSchemas, + this.#authRoles, + this.#summary, + this.#description, + this.#tags, + this.#operationId, + this.#deprecated, + this.#responseSchema, + this.#responsesSchemas, + this.#example, + this.#examples, + this.#producesFile, + this.#produces, + this.#responseHeaderSchema, + this.#externalDocs, + this.#links, + defs as Record, + this.#fileUpload, + this.#cacheTags + ); + } + + /** + * Declare a cache group for this endpoint. + * + * Use on GET / query endpoints to group responses into a named cache. + * The client-side {@code cacheTags} middleware caches responses keyed + * by this tag and flushes matching entries when a mutation calls + * {@link clearsCacheTag}. + * + * @overload Simple tag (no properties — single cache entry). + * @overload Tag with property descriptors for fine-grained keys. + * + * @example + * ```ts + * // GET — responses cached under "todo" group, keyed by id + * endpoint.get('/api/todos/:id') + * .cacheTag('todo', p => ({ + * id: p.params.id + * })) + * ``` + */ + cacheTag( + name: string + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses + >; + cacheTag( + name: string, + selector: ( + tree: CacheTagSelector + ) => Record + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses + >; + cacheTag( + name: string, + selector?: (tree: any) => Record + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses + > { + return this.clearsCacheTag(name, selector!); + } + + /** + * Declare which cache groups are cleared when this mutation succeeds. + * + * Use on POST / PUT / PATCH / DELETE endpoints. When the mutation + * completes, the {@code cacheTags} client middleware invalidates all + * cache entries matching the declared tag names (prefix match). + * + * @overload Simple tag (clears all entries prefixed with the name). + * @overload Tag with property descriptors for targeted invalidation. + * + * @example + * ```ts + * // PATCH — clears "todo-list" and "todo:id=42" on success + * endpoint.patch('/api/todos/:id') + * .clearsCacheTag('todo-list') + * .clearsCacheTag('todo', p => ({ + * id: p.params.id + * })) + * ``` + */ + clearsCacheTag( + name: string + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses + >; + clearsCacheTag( + name: string, + selector: ( + tree: CacheTagSelector + ) => Record + ): EndpointBuilder< + TParams, + TBody, + TQuery, + THeaders, + TServices, + TPrincipal, + TRoles, + TResponse, + TResponses + >; + clearsCacheTag( + name: string, + selector?: (tree: any) => Record ): EndpointBuilder< TParams, TBody, @@ -1800,6 +2207,56 @@ export class EndpointBuilder< TResponse, TResponses > { + if (!selector) { + return new EndpointBuilder( + this.#method, + this.#basePath, + this.#pathTemplate, + this.#bodySchema, + this.#querySchema, + this.#headerSchema, + this.#serviceSchemas, + this.#authRoles, + this.#summary, + this.#description, + this.#tags, + this.#operationId, + this.#deprecated, + this.#responseSchema, + this.#responsesSchemas, + this.#example, + this.#examples, + this.#producesFile, + this.#produces, + this.#responseHeaderSchema, + this.#externalDocs, + this.#links, + this.#callbacks, + this.#fileUpload, + [...this.#cacheTags, { name, properties: {} }] + ); + } + + const paramsSchema = extractParamsObjectSchema(this.#pathTemplate); + + const tree = createCacheTagTree({ + paramsSchema, + bodySchema: this.#bodySchema, + querySchema: this.#querySchema, + headerSchema: this.#headerSchema + }); + + const descriptors = selector(tree); + + if (typeof descriptors !== 'object' || descriptors === null) { + throw new Error( + `Cache tag "${name}": selector must return an object ` + + `with property descriptors (e.g. { id: p.query.id }).` + ); + } + + const definition = serializeTag(name, descriptors); + return new EndpointBuilder( this.#method, this.#basePath, @@ -1823,7 +2280,9 @@ export class EndpointBuilder< this.#responseHeaderSchema, this.#externalDocs, this.#links, - defs as Record + this.#callbacks, + this.#fileUpload, + [...this.#cacheTags, definition] ); } } @@ -1849,7 +2308,7 @@ function createEndpoint( pathTemplate?: ParseStringSchemaBuilder, authRoles?: readonly string[] | null, meta?: EndpointMetadataDescriptors -): EndpointBuilder; +): EndpointBuilder; function createEndpoint( method: string, @@ -1981,6 +2440,84 @@ type ScopedEndpointFactoryMethods< any, {} >; + post( + pathTemplate?: ParseStringSchemaBuilder + ): EndpointBuilder< + TParams extends undefined ? {} : TParams, + undefined, + {}, + {}, + {}, + TPrincipal, + TRoles, + any, + {} + >; + put( + pathTemplate?: ParseStringSchemaBuilder + ): EndpointBuilder< + TParams extends undefined ? {} : TParams, + undefined, + {}, + {}, + {}, + TPrincipal, + TRoles, + any, + {} + >; + patch( + pathTemplate?: ParseStringSchemaBuilder + ): EndpointBuilder< + TParams extends undefined ? {} : TParams, + undefined, + {}, + {}, + {}, + TPrincipal, + TRoles, + any, + {} + >; + delete( + pathTemplate?: ParseStringSchemaBuilder + ): EndpointBuilder< + TParams extends undefined ? {} : TParams, + undefined, + {}, + {}, + {}, + TPrincipal, + TRoles, + any, + {} + >; + head( + pathTemplate?: ParseStringSchemaBuilder + ): EndpointBuilder< + TParams extends undefined ? {} : TParams, + undefined, + {}, + {}, + {}, + TPrincipal, + TRoles, + any, + {} + >; + options( + pathTemplate?: ParseStringSchemaBuilder + ): EndpointBuilder< + TParams extends undefined ? {} : TParams, + undefined, + {}, + {}, + {}, + TPrincipal, + TRoles, + any, + {} + >; }; export type ScopedEndpointFactory = @@ -2044,108 +2581,58 @@ function createScopedFactory(basePath: string): ScopedEndpointFactory { } // --------------------------------------------------------------------------- -// EndpointFactory — top-level endpoint creation +// endpoint factory — creates EndpointBuilder instances // --------------------------------------------------------------------------- +/** + * Extracts an ObjectSchemaBuilder from a ParseStringSchemaBuilder path template. + * Used for constructing the synthetic cache tag tree. + */ +function extractParamsObjectSchema( + pathTemplate: RoutePath +): ObjectSchemaBuilder | null { + if ( + pathTemplate && + typeof pathTemplate !== 'string' && + typeof (pathTemplate as any).introspect === 'function' + ) { + const info = (pathTemplate as any).introspect(); + if (info.objectSchema) { + return info.objectSchema; + } + } + return null; +} + type EndpointFactory = { get( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; post( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; put( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; patch( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; delete( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; head( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; options( basePath: string, pathTemplate?: ParseStringSchemaBuilder - ): EndpointBuilder< - TParams, - undefined, - {}, - {}, - {}, - undefined, - TRoles, - any, - {} - >; + ): EndpointBuilder; resource(basePath: string): ScopedEndpointFactory; subscription( basePath: string, diff --git a/libs/server/src/Server.ts b/libs/server/src/Server.ts index 7423655f..7a10da42 100644 --- a/libs/server/src/Server.ts +++ b/libs/server/src/Server.ts @@ -14,6 +14,7 @@ import { requireRole } from '@cleverbrush/auth'; import { ServiceCollection, type ServiceProvider } from '@cleverbrush/di'; +import { Busboy } from '@fastify/busboy'; import { type WebSocket, WebSocketServer } from 'ws'; import { ActionResult, JsonResult } from './ActionResult.js'; import { ContentNegotiator } from './ContentNegotiator.js'; @@ -34,10 +35,13 @@ import { checkJsonDepth, safeJsonParse } from './safeJson.js'; import type { ContentTypeHandler, EndpointRegistration, + FilePart, Middleware, + RejectedFile, ServerBatchingOptions, ServerOptions, - SubscriptionRegistration + SubscriptionRegistration, + UploadOptions } from './types.js'; import { VirtualIncomingMessage, @@ -81,6 +85,107 @@ export interface AuthorizationConfig { policies?: Record void>; } +// --------------------------------------------------------------------------- +// Multipart / file-upload helpers +// --------------------------------------------------------------------------- + +async function parseMultipart( + req: http.IncomingMessage, + options: UploadOptions +): Promise<{ + fields: Record; + files: Record; + rejectedFiles: RejectedFile[]; +}> { + const maxFileCount = options.maxFileCount ?? 10; + const maxFileSize = options.maxFileSize ?? 10 * 1024 * 1024; + const allowedMimeTypes = options.allowedMimeTypes; + + return new Promise((resolve, reject) => { + const fields: Record = {}; + const files: Record = {}; + const rejectedFiles: RejectedFile[] = []; + let fileCount = 0; + + const busboy = Busboy({ + headers: req.headers as { + 'content-type': string; + } & http.IncomingHttpHeaders, + limits: { + fileSize: maxFileSize, + files: maxFileCount + } + }); + + busboy.on('field', (fieldname: string, value: string) => { + fields[fieldname] = value; + }); + + busboy.on( + 'file', + ( + fieldname: string, + stream: import('@fastify/busboy').BusboyFileStream, + filename: string, + _transferEncoding: string, + mimeType: string + ) => { + if (fileCount >= maxFileCount) { + rejectedFiles.push({ + filename, + mimeType, + reason: `Exceeded max file count (${maxFileCount})` + }); + stream.resume(); + return; + } + + if (allowedMimeTypes) { + const allowed = allowedMimeTypes.some(pattern => { + if (pattern.endsWith('/*')) { + return mimeType.startsWith(pattern.slice(0, -1)); + } + return mimeType === pattern; + }); + if (!allowed) { + rejectedFiles.push({ + filename, + mimeType, + reason: `MIME type "${mimeType}" not allowed (allowed: ${allowedMimeTypes.join(', ')})` + }); + stream.resume(); + return; + } + } + + fileCount++; + const chunks: Buffer[] = []; + + stream.on('data', (chunk: Buffer) => { + chunks.push(chunk); + }); + + stream.on('end', () => { + const buffer = Buffer.concat(chunks); + files[fieldname] = { + filename, + mimeType, + buffer, + size: buffer.length + }; + }); + + stream.on('error', reject); + } + ); + + busboy.on('error', reject); + busboy.on('finish', () => resolve({ fields, files, rejectedFiles })); + + req.pipe(busboy); + }); +} + /** * Fluent builder for constructing and starting an HTTP server. * @@ -104,12 +209,16 @@ export class ServerBuilder { readonly #webhooks: WebhookDefinition[] = []; readonly #globalMiddlewares: Middleware[] = []; readonly #contentNegotiator = new ContentNegotiator(); - #options: ServerOptions = {}; + #options: ServerOptions; #authConfig: AuthenticationConfig | null = null; #authzConfig: AuthorizationConfig | null = null; #healthcheck = false; #batchConfig: ServerBatchingOptions | null = null; + constructor(options: ServerOptions = {}) { + this.#options = options; + } + /** * Configure the DI service collection. * @@ -600,39 +709,82 @@ export class Server { // Parse body if needed let parsedBody: unknown; + let uploadedFiles: Record | undefined; + let rejectedFiles: RejectedFile[] | undefined; if (needsBody(meta)) { - const contentType = req.headers['content-type']; - const ctHandler = - this.#contentNegotiator.selectRequestHandler( - contentType - ); - if (ctHandler) { - const rawBody = await ctx.body(); - const bodyText = rawBody.toString('utf-8'); - if (bodyText.length > 0) { - try { - parsedBody = ctHandler.deserialize(bodyText); - } catch { - const pd = createProblemDetails( - 400, - 'Malformed request body' - ); - res.writeHead(400, { - 'content-type': PROBLEM_JSON_CONTENT_TYPE - }); - res.end(serializeProblemDetails(pd)); - ctx.responded = true; - return; - } + const contentType = req.headers['content-type'] ?? ''; + + // Multipart / file-upload path + if ( + meta.fileUpload && + contentType.startsWith('multipart/form-data') + ) { + try { + const result = await parseMultipart( + req, + meta.fileUpload + ); + parsedBody = result.fields; + uploadedFiles = result.files; + rejectedFiles = result.rejectedFiles; + } catch { + const pd = createProblemDetails( + 400, + 'Malformed multipart request' + ); + res.writeHead(400, { + 'content-type': PROBLEM_JSON_CONTENT_TYPE + }); + res.end(serializeProblemDetails(pd)); + ctx.responded = true; + return; } - } else if (contentType) { - const pd = createProblemDetails(415); - res.writeHead(415, { + } else if (meta.fileUpload) { + const pd = createProblemDetails( + 400, + 'File upload endpoint requires multipart/form-data content type' + ); + res.writeHead(400, { 'content-type': PROBLEM_JSON_CONTENT_TYPE }); res.end(serializeProblemDetails(pd)); ctx.responded = true; return; + } else { + const ctHandler = + this.#contentNegotiator.selectRequestHandler( + contentType + ); + if (ctHandler) { + const rawBody = await ctx.body(); + const bodyText = rawBody.toString('utf-8'); + if (bodyText.length > 0) { + try { + parsedBody = + ctHandler.deserialize(bodyText); + } catch { + const pd = createProblemDetails( + 400, + 'Malformed request body' + ); + res.writeHead(400, { + 'content-type': + PROBLEM_JSON_CONTENT_TYPE + }); + res.end(serializeProblemDetails(pd)); + ctx.responded = true; + return; + } + } + } else if (contentType) { + const pd = createProblemDetails(415); + res.writeHead(415, { + 'content-type': PROBLEM_JSON_CONTENT_TYPE + }); + res.end(serializeProblemDetails(pd)); + ctx.responded = true; + return; + } } } @@ -654,6 +806,18 @@ export class Server { return; } + // Inject uploaded files into the action context + if (uploadedFiles && resolveResult.args.length > 0) { + const ctx = resolveResult.args[0] as Record< + string, + unknown + >; + ctx.files = uploadedFiles; + if (rejectedFiles && rejectedFiles.length > 0) { + ctx.rejectedFiles = rejectedFiles; + } + } + // Call handler let result = registration.handler(...resolveResult.args); if (result instanceof Promise) { @@ -1111,11 +1275,7 @@ export class Server { } export function createServer(options?: ServerOptions): ServerBuilder { - const builder = new ServerBuilder(); - if (options) { - (builder as any).__options = options; - } - return builder; + return new ServerBuilder(options); } // --------------------------------------------------------------------------- diff --git a/libs/server/src/contract.ts b/libs/server/src/contract.ts index 181bf826..ac35f300 100644 --- a/libs/server/src/contract.ts +++ b/libs/server/src/contract.ts @@ -27,6 +27,10 @@ * @module */ +export type { + CacheTagDefinition, + CacheTagPropertyAccessor +} from './CacheTag.js'; export { type ActionContext, type AllowedResponseReturn, @@ -52,6 +56,7 @@ export { type TrackedEvent, tracked } from './Subscription.js'; +export type { FilePart, UploadOptions } from './types.js'; import type { EndpointBuilder as _EB } from './Endpoint.js'; import type { SubscriptionBuilder as _SB } from './Subscription.js'; diff --git a/libs/server/src/index.ts b/libs/server/src/index.ts index d813777b..4124fcb8 100644 --- a/libs/server/src/index.ts +++ b/libs/server/src/index.ts @@ -8,6 +8,13 @@ export { StatusCodeResult, StreamResult } from './ActionResult.js'; +export { + type CacheTagDefinition, + type CacheTagPropertyAccessor, + computeCacheKey, + createCacheTagTree, + serializeTag +} from './CacheTag.js'; export { type ApiContract, type ApiGroup, @@ -44,6 +51,14 @@ export { NotFoundError, UnauthorizedError } from './HttpError.js'; +export { + idempotency, + type ServerIdempotencyOptions +} from './middlewares/Idempotency.js'; +export { + cacheResponse, + type ServerCacheOptions +} from './middlewares/ResponseCache.js'; export { createProblemDetails, createValidationProblemDetails, @@ -75,9 +90,12 @@ export { export type { ContentTypeHandler, EndpointRegistration, + FilePart, Middleware, + RejectedFile, ServerBatchingOptions, ServerOptions, - SubscriptionRegistration + SubscriptionRegistration, + UploadOptions } from './types.js'; export { defineWebhook, type WebhookDefinition } from './Webhook.js'; diff --git a/libs/server/src/middlewares/Idempotency.ts b/libs/server/src/middlewares/Idempotency.ts new file mode 100644 index 00000000..b30bb8b2 --- /dev/null +++ b/libs/server/src/middlewares/Idempotency.ts @@ -0,0 +1,185 @@ +/** + * Server-side idempotency middleware. + * + * Ensures mutating requests with the same idempotency key produce the + * same result exactly once — subsequent replays return the stored + * response without re-executing the handler. + * + * @module + */ + +import type { ServerResponse } from 'node:http'; +import type { RequestContext } from '../RequestContext.js'; +import type { Middleware } from '../types.js'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +/** + * Configuration for {@link idempotency}. + */ +export interface ServerIdempotencyOptions { + /** + * TTL in milliseconds for stored responses. + * Defaults to `86_400_000` (24 hours). + */ + ttl?: number; + + /** + * Header name to read the idempotency key from. + * Defaults to `"x-idempotency-key"`. + */ + headerName?: string; + + /** + * Predicate that decides whether a request should be skipped. + * Defaults to skipping non-mutating requests (GET, HEAD, OPTIONS). + */ + skip?: (ctx: RequestContext) => boolean; +} + +// --------------------------------------------------------------------------- +// Internals +// --------------------------------------------------------------------------- + +interface StoredResponse { + status: number; + headers: Record; + body: Buffer; + expiresAt: number; +} + +function isMutating(method: string): boolean { + return ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method.toUpperCase()); +} + +const CLEANUP_INTERVAL = 60_000; + +// --------------------------------------------------------------------------- +// Middleware +// --------------------------------------------------------------------------- + +/** + * Server-side idempotency middleware. + * + * Reads the `x-idempotency-key` header from mutating requests. If a + * response has already been stored for that key, it is returned + * immediately — the handler is never called. Otherwise the handler + * executes and its response is stored for future replays. + * + * GET, HEAD, and OPTIONS requests pass through without checking. + * + * @param options - Configuration. + * @returns A server-side {@link Middleware}. + * + * @example + * ```ts + * server.handle(CreateTodo, createHandler, { + * middlewares: [idempotency({ ttl: 86_400_000 })] + * }); + * ``` + */ +export function idempotency( + options: ServerIdempotencyOptions = {} +): Middleware { + const { + ttl = 86_400_000, + headerName = 'x-idempotency-key', + skip = (ctx: RequestContext) => !isMutating(ctx.method) + } = options; + + const store = new Map(); + + // Periodic cleanup of expired entries + const cleanupTimer = setInterval(() => { + const now = Date.now(); + for (const [key, entry] of store) { + if (entry.expiresAt <= now) { + store.delete(key); + } + } + }, CLEANUP_INTERVAL); + + if (cleanupTimer.unref) { + cleanupTimer.unref(); + } + + return async (ctx: RequestContext, next: () => Promise) => { + if (skip(ctx)) { + return next(); + } + + const key = + ctx.headers[headerName] ?? ctx.headers[headerName.toLowerCase()]; + if (!key || typeof key !== 'string' || key.length === 0) { + return next(); + } + + // Check if we already have a stored response for this key + const stored = store.get(key); + if (stored) { + if (stored.expiresAt <= Date.now()) { + store.delete(key); + // Expired — fall through to handler + } else { + // Replay stored response + const res = ctx.response as ServerResponse; + res.writeHead(stored.status, stored.headers); + res.end(stored.body); + ctx.responded = true; + return; + } + } + + // Capture the handler's response for future replays + const originalWriteHead = ( + ctx.response as ServerResponse + ).writeHead.bind(ctx.response); + const originalEnd = (ctx.response as ServerResponse).end.bind( + ctx.response + ); + + let capturedStatus = 200; + let capturedHeaders: Record = {}; + const chunks: Buffer[] = []; + + (ctx.response as ServerResponse).writeHead = function ( + this: ServerResponse, + statusCode: number, + ...args: any[] + ) { + capturedStatus = statusCode; + if (args.length > 0) { + capturedHeaders = args[0]; + } + return originalWriteHead(statusCode, ...args) as ServerResponse; + } as any; + + (ctx.response as ServerResponse).end = function ( + this: ServerResponse, + chunk?: any, + ...args: any[] + ) { + if (chunk) { + chunks.push( + Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + ); + } + return originalEnd(chunk, ...args) as ServerResponse; + } as any; + + await next(); + + // Store the response for future replays + if (capturedStatus >= 200 && capturedStatus < 500) { + const body = Buffer.concat(chunks); + store.set(key, { + status: capturedStatus, + headers: capturedHeaders, + body, + expiresAt: Date.now() + ttl + }); + } + }; +} diff --git a/libs/server/src/middlewares/ResponseCache.ts b/libs/server/src/middlewares/ResponseCache.ts new file mode 100644 index 00000000..5286b25c --- /dev/null +++ b/libs/server/src/middlewares/ResponseCache.ts @@ -0,0 +1,264 @@ +/** + * Server-side cache response middleware. + * + * Caches successful handler responses keyed by endpoint-defined cache tags. + * On cache hit, the response is served directly — the handler never runs. + * Mutating requests invalidate matching cache entries after the handler + * completes successfully. + * + * @module + */ + +import type { ServerResponse } from 'node:http'; +import type { RequestContext } from '../RequestContext.js'; +import type { Middleware } from '../types.js'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +/** + * Configuration for {@link cacheResponse}. + */ +export interface ServerCacheOptions { + /** + * Default TTL in milliseconds for tags without an explicit TTL. + * Defaults to `60000` (60 seconds). + */ + defaultTtl?: number; + + /** + * Per-tag TTL overrides: `{ [tagName]: ttlMs }`. + */ + ttlByTag?: Record; +} + +// --------------------------------------------------------------------------- +// Internals +// --------------------------------------------------------------------------- + +interface CacheEntry { + status: number; + headers: Record; + body: Buffer; + expiresAt: number; +} + +function isMutating(method: string): boolean { + return ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method.toUpperCase()); +} + +function computeKey( + tags: ReadonlyArray<{ + name: string; + properties: Readonly< + Record< + string, + { + getValue(root: any): { + value?: unknown; + success: boolean; + }; + } + > + >; + }>, + root: any +): string[] { + return tags.map(tag => { + const parts: string[] = []; + for (const [key, accessor] of Object.entries(tag.properties).sort( + ([a], [b]) => a.localeCompare(b) + )) { + const result = accessor.getValue(root); + if (result.success && result.value !== undefined) { + parts.push(`${key}=${String(result.value)}`); + } + } + return parts.length > 0 ? `${tag.name}:${parts.join(',')}` : tag.name; + }); +} + +// --------------------------------------------------------------------------- +// Middleware +// --------------------------------------------------------------------------- + +/** + * Server-side cache response middleware. + * + * Uses cache-tag definitions from the matched endpoint (already available + * on `ctx.items.__endpoint_meta.cacheTags`) to compute deterministic cache + * keys from request data (params, query, body, headers). + * + * - **GET**: Computes cache key → serves cached response if valid → + * handler never executes. On cache miss, runs the handler and caches + * the response. + * - **Mutation (POST/PUT/PATCH/DELETE)**: Lets the handler run, then + * invalidates all cache entries whose key starts with any of the + * endpoint's cache tag names. + * + * @param options - Cache configuration. + * @returns A server-side {@link Middleware}. + * + * @example + * ```ts + * server.handle(ListTodos, listHandler, { + * middlewares: [cacheResponse({ defaultTtl: 30_000 })] + * }); + * ``` + */ +export function cacheResponse(options: ServerCacheOptions = {}): Middleware { + const { ttlByTag = {}, defaultTtl = 60_000 } = options; + + const cache = new Map(); + + return async (ctx: RequestContext, next: () => Promise) => { + const meta = ctx.items.get('__endpoint_meta') as any; + const tags: ReadonlyArray<{ + name: string; + properties: Record< + string, + { getValue(root: any): { value?: unknown; success: boolean } } + >; + }> = meta?.cacheTags ?? []; + + if (tags.length === 0) { + return next(); + } + + if (isMutating(ctx.method)) { + // Run handler first (so cache is invalidated only on success) + await next(); + + if ( + (ctx.response as ServerResponse).statusCode >= 200 && + (ctx.response as ServerResponse).statusCode < 300 + ) { + // Build root for key computation + const rawBody = ctx.items.get('__raw_body') as + | unknown + | undefined; + const root = { + params: ctx.pathParams ?? {}, + body: rawBody, + query: ctx.queryParams ?? {}, + headers: ctx.headers ?? {} + }; + + const keys = computeKey(tags, root); + for (const tag of tags) { + for (const [cachedKey] of cache) { + if ( + keys.includes(cachedKey) || + keys.some(_k => cachedKey.startsWith(tag.name)) + ) { + cache.delete(cachedKey); + } + } + // Also delete by pure tag name prefix + for (const [cachedKey] of cache) { + if (cachedKey.startsWith(tag.name)) { + cache.delete(cachedKey); + } + } + } + } + return; + } + + if (ctx.method === 'GET') { + // Build root for key computation + const root = { + params: ctx.pathParams ?? {}, + body: undefined, + query: ctx.queryParams ?? {}, + headers: ctx.headers ?? {} + }; + + const keys = computeKey(tags, root); + + // Check all keys — first valid cache hit wins + for (const key of keys) { + const entry = cache.get(key); + if (entry && entry.expiresAt > Date.now()) { + // Serve from cache + const res = ctx.response as ServerResponse; + res.writeHead(entry.status, entry.headers); + res.end(entry.body); + ctx.responded = true; + return; + } + if (entry) { + cache.delete(key); + } + } + + // Cache miss — run handler, then capture the response + const originalWriteHead = ( + ctx.response as ServerResponse + ).writeHead.bind(ctx.response); + const originalEnd = (ctx.response as ServerResponse).end.bind( + ctx.response + ); + + let capturedStatus = 200; + let capturedHeaders: Record = + {}; + const chunks: Buffer[] = []; + + (ctx.response as ServerResponse).writeHead = function ( + this: ServerResponse, + statusCode: number, + ...args: any[] + ) { + capturedStatus = statusCode; + if (args.length > 0) { + capturedHeaders = args[0]; + } + return originalWriteHead(statusCode, ...args) as ServerResponse; + } as any; + + (ctx.response as ServerResponse).end = function ( + this: ServerResponse, + chunk?: any, + ...args: any[] + ) { + if (chunk) { + chunks.push( + Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + ); + } + return originalEnd(chunk, ...args) as ServerResponse; + } as any; + + await next(); + + // Store in cache on success + if (capturedStatus >= 200 && capturedStatus < 300) { + const body = Buffer.concat(chunks); + const ttl = tags.reduce((max, tag) => { + const t = + ttlByTag[tag.name] !== undefined + ? ttlByTag[tag.name] + : defaultTtl; + return t > max ? t : max; + }, 0); + + if (ttl > 0) { + for (const key of keys) { + cache.set(key, { + status: capturedStatus, + headers: capturedHeaders, + body, + expiresAt: Date.now() + ttl + }); + } + } + } + return; + } + + // Non-GET, non-mutation — pass through + return next(); + }; +} diff --git a/libs/server/src/types.ts b/libs/server/src/types.ts index 98df5730..e97acf13 100644 --- a/libs/server/src/types.ts +++ b/libs/server/src/types.ts @@ -119,7 +119,59 @@ export interface ServerOptions { } // --------------------------------------------------------------------------- -// Batching Options +// File Upload +// --------------------------------------------------------------------------- + +/** + * Represents a single uploaded file from a `multipart/form-data` request. + */ +export interface FilePart { + /** Original filename as provided by the client. */ + readonly filename: string; + /** MIME type of the file (e.g. `'image/jpeg'`). */ + readonly mimeType: string; + /** Full file contents as a Buffer. */ + readonly buffer: Buffer; + /** File size in bytes. */ + readonly size: number; +} + +/** + * Describes a file that was rejected during multipart parsing. + */ +export interface RejectedFile { + /** Original filename as provided by the client. */ + readonly filename: string; + /** MIME type of the file (e.g. `'application/xlsx'`). */ + readonly mimeType: string; + /** Human-readable reason the file was rejected. */ + readonly reason: string; +} + +/** + * Configuration for file upload endpoints declared via + * `EndpointBuilder.upload()`. + */ +export interface UploadOptions { + /** + * Maximum allowed file size per uploaded file in bytes. + * @default 10_485_760 (10 MB) + */ + maxFileSize?: number; + /** + * Allowed MIME types or patterns (e.g. `'image/*'`, `'application/pdf'`). + * When not set, all MIME types are accepted. + */ + allowedMimeTypes?: string[]; + /** + * Maximum number of files allowed in a single request. + * @default 10 + */ + maxFileCount?: number; +} + +// --------------------------------------------------------------------------- +// Server Batching Options // --------------------------------------------------------------------------- /** diff --git a/libs/server/tests/CacheTag.test.ts b/libs/server/tests/CacheTag.test.ts new file mode 100644 index 00000000..edf3fc1f --- /dev/null +++ b/libs/server/tests/CacheTag.test.ts @@ -0,0 +1,331 @@ +import { + number, + object, + parseString, + SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR, + string +} from '@cleverbrush/schema'; +import { describe, expect, it } from 'vitest'; +import type { CacheTagPropertyAccessor } from '../src/CacheTag.js'; +import { + computeCacheKey, + createCacheTagTree, + serializeTag +} from '../src/CacheTag.js'; + +// --------------------------------------------------------------------------- +// createCacheTagTree +// --------------------------------------------------------------------------- + +describe('createCacheTagTree', () => { + it('returns a tree with params from a ParseStringSchemaBuilder', () => { + const ps = parseString( + object({ id: number() }), + $t => $t`/todos/${t => t.id}` + ); + const tree = createCacheTagTree({ paramsSchema: ps }); + + const desc = tree.params?.id; + expect(desc).toBeDefined(); + const inner = (desc as any)[SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR]; + const result = inner.getValue({ params: { id: 42 } }); + expect(result.success).toBe(true); + expect(result.value).toBe(42); + }); + + it('returns a tree with body', () => { + const bodySchema = object({ title: string() }); + const tree = createCacheTagTree({ bodySchema }); + + const desc = tree.body?.title; + expect(desc).toBeDefined(); + const inner = (desc as any)[SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR]; + const result = inner.getValue({ body: { title: 'hello' } }); + expect(result.success).toBe(true); + expect(result.value).toBe('hello'); + }); + + it('returns a tree with query', () => { + const querySchema = object({ page: number() }); + const tree = createCacheTagTree({ querySchema }); + + const desc = tree.query?.page; + expect(desc).toBeDefined(); + const inner = (desc as any)[SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR]; + const result = inner.getValue({ query: { page: 3 } }); + expect(result.success).toBe(true); + expect(result.value).toBe(3); + }); + + it('returns a tree with headers', () => { + const headerSchema = object({ 'x-tenant': string() }); + const tree = createCacheTagTree({ headerSchema }); + + const desc = tree.headers?.['x-tenant']; + expect(desc).toBeDefined(); + const inner = (desc as any)[SYMBOL_SCHEMA_PROPERTY_DESCRIPTOR]; + const result = inner.getValue({ + headers: { 'x-tenant': 'acme' } + }); + expect(result.success).toBe(true); + expect(result.value).toBe('acme'); + }); + + it('omits null schemas', () => { + const querySchema = object({ search: string() }); + const tree = createCacheTagTree({ querySchema }); + + expect(tree.params).toBeUndefined(); + expect(tree.body).toBeUndefined(); + expect(tree.headers).toBeUndefined(); + expect(tree.query?.search).toBeDefined(); + }); + + it('works with all schemas populated', () => { + const ps = parseString( + object({ id: string() }), + $t => $t`/todos/${t => t.id}` + ); + const bodySchema = object({ title: string() }); + const querySchema = object({ filter: string() }); + const headerSchema = object({ 'x-api-key': string() }); + + const tree = createCacheTagTree({ + paramsSchema: ps, + bodySchema, + querySchema, + headerSchema + }); + + expect(tree.params?.id).toBeDefined(); + expect(tree.body?.title).toBeDefined(); + expect(tree.query?.filter).toBeDefined(); + expect(tree.headers?.['x-api-key']).toBeDefined(); + }); +}); + +// --------------------------------------------------------------------------- +// serializeTag +// --------------------------------------------------------------------------- + +describe('serializeTag', () => { + it('serializes a tag with named property accessors', () => { + const bodySchema = object({ + id: number(), + name: string() + }); + const tree = createCacheTagTree({ bodySchema }); + + const definition = serializeTag('todo', { + id: tree.body?.id, + fromName: tree.body?.name + }); + + expect(definition.name).toBe('todo'); + expect(Object.keys(definition.properties).sort()).toEqual([ + 'fromName', + 'id' + ]); + + // Accessor for id should extract body.id + const idResult = definition.properties['id'].getValue({ + params: {}, + body: { id: 123, name: 'test' }, + query: {}, + headers: {} + }); + expect(idResult.success).toBe(true); + expect(idResult.value).toBe(123); + + // Accessor for fromName should extract body.name + const nameResult = definition.properties['fromName'].getValue({ + params: {}, + body: { id: 123, name: 'test' }, + query: {}, + headers: {} + }); + expect(nameResult.success).toBe(true); + expect(nameResult.value).toBe('test'); + }); + + it('returns a tag with empty properties when no descriptors given', () => { + const definition = serializeTag('simple', {}); + expect(definition.name).toBe('simple'); + expect(definition.properties).toEqual({}); + }); + + it('throws when a value is not a valid property descriptor', () => { + expect(() => + serializeTag('bad', { + notADescriptor: 'hello' + }) + ).toThrow(/not a valid.*PropertyDescriptor/); + }); + + it('throws when a value is null', () => { + expect(() => + serializeTag('bad', { + missing: null + }) + ).toThrow(/not a valid.*PropertyDescriptor/); + }); + + it('throws when a value is an object without the descriptor symbol', () => { + expect(() => + serializeTag('bad', { + plain: { foo: 'bar' } + }) + ).toThrow(/not a valid.*PropertyDescriptor/); + }); +}); + +// --------------------------------------------------------------------------- +// computeCacheKey +// --------------------------------------------------------------------------- + +describe('computeCacheKey', () => { + function makeTag( + name: string, + accessors: Record + ) { + return { name, properties: accessors }; + } + + function makeConstAccessor(value: unknown): CacheTagPropertyAccessor { + return { + getValue: () => ({ success: true, value }) + }; + } + + function makeFailingAccessor(): CacheTagPropertyAccessor { + return { + getValue: () => ({ success: false }) + }; + } + + const emptyRoot = { + params: {}, + body: undefined, + query: {}, + headers: {} + }; + + it('returns tag name for simple tags with no properties', () => { + const tag = makeTag('invalidate-all', {}); + expect(computeCacheKey(tag, emptyRoot)).toBe('invalidate-all'); + }); + + it('builds key with single property', () => { + const tag = makeTag('todo', { + id: makeConstAccessor(42) + }); + expect(computeCacheKey(tag, emptyRoot)).toBe('todo:id=42'); + }); + + it('builds key with multiple properties sorted alphabetically', () => { + const tag = makeTag('todo', { + z: makeConstAccessor('last'), + a: makeConstAccessor('first'), + m: makeConstAccessor('middle') + }); + expect(computeCacheKey(tag, emptyRoot)).toBe( + 'todo:a=first,m=middle,z=last' + ); + }); + + it('skips properties where getValue returns success: false', () => { + const tag = makeTag('todo', { + id: makeConstAccessor(42), + optional: makeFailingAccessor() + }); + expect(computeCacheKey(tag, emptyRoot)).toBe('todo:id=42'); + }); + + it('returns tag name when all properties fail', () => { + const tag = makeTag('todo', { + a: makeFailingAccessor(), + b: makeFailingAccessor() + }); + expect(computeCacheKey(tag, emptyRoot)).toBe('todo'); + }); + + it('skips properties with undefined value', () => { + const tag = makeTag('todo', { + id: makeConstAccessor(undefined) + }); + expect(computeCacheKey(tag, emptyRoot)).toBe('todo'); + }); + + it('produces stable output for the same inputs', () => { + const tag = makeTag('todo', { + id: makeConstAccessor(42), + name: makeConstAccessor('test') + }); + const k1 = computeCacheKey(tag, emptyRoot); + const k2 = computeCacheKey(tag, emptyRoot); + expect(k1).toBe(k2); + }); +}); + +// --------------------------------------------------------------------------- +// Integration: createCacheTagTree + serializeTag + computeCacheKey +// --------------------------------------------------------------------------- + +describe('integration', () => { + it('end-to-end: descriptor tree → serialize → compute key', () => { + const bodySchema = object({ + orgId: number(), + userId: string() + }); + const querySchema = object({ filter: string() }); + + const tree = createCacheTagTree({ bodySchema, querySchema }); + + const definition = serializeTag('resource', { + orgId: tree.body?.orgId, + userId: tree.body?.userId, + filter: tree.query?.filter + }); + + const root = { + params: {}, + body: { orgId: 10, userId: 'u1' }, + query: { filter: 'active' }, + headers: {} + }; + + const key = computeCacheKey(definition, root); + // Sorted: filter, orgId, userId + expect(key).toBe('resource:filter=active,orgId=10,userId=u1'); + }); + + it('end-to-end with params and headers', () => { + const ps = parseString( + object({ orgId: number(), projectId: string() }), + $t => $t`/orgs/${t => t.orgId}/projects/${t => t.projectId}` + ); + const headerSchema = object({ 'x-tenant': string() }); + + const tree = createCacheTagTree({ + paramsSchema: ps, + headerSchema + }); + + const definition = serializeTag('project', { + orgId: tree.params?.orgId, + projectId: tree.params?.projectId, + tenant: tree.headers?.['x-tenant'] + }); + + const root = { + params: { orgId: 42, projectId: 'p1' }, + body: undefined, + query: {}, + headers: { 'x-tenant': 'acme' } + }; + + const key = computeCacheKey(definition, root); + // Sorted: orgId, projectId, tenant + expect(key).toBe('project:orgId=42,projectId=p1,tenant=acme'); + }); +}); diff --git a/libs/server/tests/Endpoint.test.ts b/libs/server/tests/Endpoint.test.ts index cd4ecbbe..1c988b05 100644 --- a/libs/server/tests/Endpoint.test.ts +++ b/libs/server/tests/Endpoint.test.ts @@ -612,3 +612,165 @@ describe('endpoint HTTP method factories', () => { expect(ep.introspect().method).toBe('OPTIONS'); }); }); + +// --------------------------------------------------------------------------- +// .clearsCacheTag() +// --------------------------------------------------------------------------- + +describe('EndpointBuilder clearsCacheTag / cacheTag', () => { + it('simple tag stores name with empty properties in introspect', () => { + const ep = endpoint.get('/api/items').clearsCacheTag('tag-a'); + + const tags = ep.introspect().cacheTags; + expect(tags).toHaveLength(1); + expect(tags[0].name).toBe('tag-a'); + expect(tags[0].properties).toEqual({}); + }); + + it('property tag stores name and property accessors', () => { + const querySchema = object({ filter: string(), page: number() }); + const ep = endpoint + .get('/api/items') + .query(querySchema) + .clearsCacheTag('tag-b', p => ({ + filter: p.query.filter, + page: p.query.page + })); + + const tags = ep.introspect().cacheTags; + expect(tags).toHaveLength(1); + expect(tags[0].name).toBe('tag-b'); + expect(Object.keys(tags[0].properties).sort()).toEqual([ + 'filter', + 'page' + ]); + }); + + it('multiple tags accumulate in order', () => { + const ep = endpoint + .get('/api/items') + .clearsCacheTag('first') + .clearsCacheTag('second'); + + const tags = ep.introspect().cacheTags; + expect(tags).toHaveLength(2); + expect(tags[0].name).toBe('first'); + expect(tags[1].name).toBe('second'); + }); + + it('cacheTag returns a new builder (immutable)', () => { + const a = endpoint.get('/api/items'); + const b = a.clearsCacheTag('test'); + + expect(a).not.toBe(b); + expect(a.introspect().cacheTags).toEqual([]); + expect(b.introspect().cacheTags).toHaveLength(1); + expect(b.introspect().cacheTags[0].name).toBe('test'); + }); + + it('selector with invalid value throws', () => { + const ep = endpoint.get('/api/items'); + + expect(() => + ep.clearsCacheTag('bad', () => ({ + notADescriptor: 'hello' as any + })) + ).toThrow(/not a valid.*PropertyDescriptor/); + }); + + it('selector returning null throws', () => { + const ep = endpoint.get('/api/items'); + + expect(() => + ep.clearsCacheTag('bad', () => ({ + missing: null as any + })) + ).toThrow(/not a valid.*PropertyDescriptor/); + }); + + it('combines simple and property tags', () => { + const querySchema = object({ search: string() }); + const ep = endpoint + .get('/api/items') + .query(querySchema) + .clearsCacheTag('list') + .clearsCacheTag('item', p => ({ search: p.query.search })); + + const tags = ep.introspect().cacheTags; + expect(tags).toHaveLength(2); + expect(tags[0].name).toBe('list'); + expect(tags[0].properties).toEqual({}); + expect(tags[1].name).toBe('item'); + expect(Object.keys(tags[1].properties)).toEqual(['search']); + }); + + it('property tag with body schema', () => { + const bodySchema = object({ title: string() }); + const ep = endpoint + .post('/api/items') + .body(bodySchema) + .clearsCacheTag('item', p => ({ title: p.body.title })); + + const tags = ep.introspect().cacheTags; + expect(tags).toHaveLength(1); + expect(tags[0].name).toBe('item'); + expect(Object.keys(tags[0].properties)).toEqual(['title']); + }); + + it('property tag with params from ParseStringSchemaBuilder', () => { + const ps = parseString( + object({ id: number() }), + $t => $t`/items/${t => t.id}` + ); + const ep = endpoint + .get('/api/items', ps as any) + .clearsCacheTag('item', p => ({ id: p.params.id })); + + const tags = ep.introspect().cacheTags; + expect(tags).toHaveLength(1); + expect(tags[0].name).toBe('item'); + expect(Object.keys(tags[0].properties)).toEqual(['id']); + }); + + it('property accessors actually resolve values', () => { + const querySchema = object({ filter: string() }); + const ep = endpoint + .get('/api/items') + .query(querySchema) + .clearsCacheTag('item', p => ({ filter: p.query.filter })); + + const tags = ep.introspect().cacheTags; + const accessor = tags[0].properties['filter']; + const result = accessor.getValue({ + params: {}, + body: undefined, + query: { filter: 'active' }, + headers: {} + }); + expect(result.success).toBe(true); + expect(result.value).toBe('active'); + }); + + it('cacheTag and clearsCacheTag produce identical results', () => { + const querySchema = object({ x: string() }); + const ep1 = endpoint + .get('/api/items') + .query(querySchema) + .cacheTag('group', p => ({ x: p.query.x })); + + const ep2 = endpoint + .get('/api/items') + .query(querySchema) + .clearsCacheTag('group', p => ({ x: p.query.x })); + + const tags1 = ep1.introspect().cacheTags; + const tags2 = ep2.introspect().cacheTags; + + expect(tags1).toHaveLength(1); + expect(tags2).toHaveLength(1); + expect(tags1[0].name).toBe(tags2[0].name); + expect(Object.keys(tags1[0].properties)).toEqual( + Object.keys(tags2[0].properties) + ); + }); +}); diff --git a/libs/server/tests/Idempotency.test.ts b/libs/server/tests/Idempotency.test.ts new file mode 100644 index 00000000..184dc1de --- /dev/null +++ b/libs/server/tests/Idempotency.test.ts @@ -0,0 +1,191 @@ +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { idempotency } from '../src/middlewares/Idempotency.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeContext( + headers: Record = {}, + method = 'POST' +): any { + const response: any = { + statusCode: 200, + _headers: {} as Record, + _body: null as any, + writeHead(statusCode: number, hdrs?: any) { + this.statusCode = statusCode; + if (hdrs) this._headers = hdrs; + return this; + }, + end(chunk?: any) { + this._body = chunk; + return this; + } + }; + + return { + method, + headers, + response, + responded: false, + pathParams: {}, + queryParams: {}, + items: new Map() + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('idempotency middleware', () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + test('passes through GET requests', async () => { + const mw = idempotency({ ttl: 5000 }); + const ctx = makeContext({ 'x-idempotency-key': 'key-1' }, 'GET'); + const next = vi.fn().mockResolvedValue(undefined); + await mw(ctx, next); + expect(next).toHaveBeenCalledOnce(); + }); + + test('passes through mutations without idempotency key', async () => { + const mw = idempotency({ ttl: 5000 }); + const ctx = makeContext({}, 'POST'); + const next = vi.fn().mockResolvedValue(undefined); + await mw(ctx, next); + expect(next).toHaveBeenCalledOnce(); + }); + + test('passes through on first request with key', async () => { + const mw = idempotency({ ttl: 5000 }); + const ctx = makeContext({ 'x-idempotency-key': 'key-1' }, 'POST'); + let handlerCalled = false; + await mw(ctx, async () => { + handlerCalled = true; + ctx.response.writeHead(201); + ctx.response.end('created'); + }); + expect(handlerCalled).toBe(true); + }); + + test('returns stored response on duplicate key', async () => { + const mw = idempotency({ ttl: 10_000 }); + + // First request + const ctx1 = makeContext({ 'x-idempotency-key': 'key-dup' }, 'POST'); + await mw(ctx1, async () => { + ctx1.response.writeHead(201, { + 'content-type': 'application/json' + }); + ctx1.response.end(JSON.stringify({ id: 1 })); + }); + + // Duplicate with same key + const ctx2 = makeContext({ 'x-idempotency-key': 'key-dup' }, 'POST'); + let handlerCalled = false; + await mw(ctx2, async () => { + handlerCalled = true; + }); + + expect(handlerCalled).toBe(false); + expect(ctx2.responded).toBe(true); + }); + + test('different keys store independently', async () => { + const mw = idempotency({ ttl: 10_000 }); + + const ctx1 = makeContext({ 'x-idempotency-key': 'key-a' }, 'POST'); + await mw(ctx1, async () => { + ctx1.response.writeHead(200); + ctx1.response.end('result-a'); + }); + + const ctx2 = makeContext({ 'x-idempotency-key': 'key-b' }, 'POST'); + let handlerCalled = false; + await mw(ctx2, async () => { + handlerCalled = true; + }); + + expect(handlerCalled).toBe(true); + }); + + test('expired key calls handler again', async () => { + const mw = idempotency({ ttl: 1000 }); + + const ctx1 = makeContext({ 'x-idempotency-key': 'key-exp' }, 'POST'); + await mw(ctx1, async () => { + ctx1.response.writeHead(200); + ctx1.response.end('first'); + }); + + // Advance past TTL + vi.advanceTimersByTime(1001); + + const ctx2 = makeContext({ 'x-idempotency-key': 'key-exp' }, 'POST'); + let handlerCalled = false; + await mw(ctx2, async () => { + handlerCalled = true; + }); + + expect(handlerCalled).toBe(true); + }); + + test('handles case-insensitive header name', async () => { + const mw = idempotency({ + ttl: 10_000, + headerName: 'X-Idempotency-Key' + }); + + const ctx1 = makeContext({ 'x-idempotency-key': 'key-ci' }, 'POST'); + await mw(ctx1, async () => { + ctx1.response.writeHead(200); + ctx1.response.end('ok'); + }); + + const ctx2 = makeContext({ 'x-idempotency-key': 'key-ci' }, 'POST'); + let handlerCalled = false; + await mw(ctx2, async () => { + handlerCalled = true; + }); + + expect(handlerCalled).toBe(false); + }); + + test('stores error responses too (non-2xx under 500)', async () => { + const mw = idempotency({ ttl: 10_000 }); + + const ctx1 = makeContext({ 'x-idempotency-key': 'key-err' }, 'POST'); + await mw(ctx1, async () => { + ctx1.response.writeHead(422); + ctx1.response.end('validation error'); + }); + + const ctx2 = makeContext({ 'x-idempotency-key': 'key-err' }, 'POST'); + let handlerCalled = false; + await mw(ctx2, async () => { + handlerCalled = true; + }); + + expect(handlerCalled).toBe(false); + }); + + test('custom skip predicate', async () => { + const mw = idempotency({ + ttl: 10_000, + skip: ctx => ctx.method === 'DELETE' + }); + + const ctx = makeContext({ 'x-idempotency-key': 'key-skip' }, 'DELETE'); + const next = vi.fn().mockResolvedValue(undefined); + await mw(ctx, next); + expect(next).toHaveBeenCalledOnce(); + }); +}); diff --git a/libs/server/tests/ResponseCache.test.ts b/libs/server/tests/ResponseCache.test.ts new file mode 100644 index 00000000..ed9dd812 --- /dev/null +++ b/libs/server/tests/ResponseCache.test.ts @@ -0,0 +1,278 @@ +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { cacheResponse } from '../src/middlewares/ResponseCache.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeConstAccessor(value: unknown) { + return { + getValue: () => ({ success: true, value }) + }; +} + +function makeContext( + meta: { cacheTags?: any } = {}, + overrides: Partial<{ + method: string; + }> = {} +): any { + const items = new Map(); + items.set('__endpoint_meta', meta); + + const response: any = { + statusCode: 200, + _headers: {} as Record, + _body: null as any, + writeHead(statusCode: number, headers?: any) { + this.statusCode = statusCode; + if (headers) this._headers = headers; + return this; + }, + end(chunk?: any) { + this._body = chunk; + return this; + } + }; + + return { + method: overrides.method ?? 'GET', + pathParams: {}, + queryParams: {}, + headers: {}, + items, + response, + responded: false + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('cacheResponse middleware', () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + test('passes through when endpoint has no cache tags', async () => { + const mw = cacheResponse({ defaultTtl: 5000 }); + const ctx = makeContext({ cacheTags: [] }); + const next = vi.fn().mockResolvedValue(undefined); + await mw(ctx, next); + expect(next).toHaveBeenCalledOnce(); + }); + + test('serves cached response on second GET', async () => { + const mw = cacheResponse({ defaultTtl: 5000 }); + const meta = { + cacheTags: [ + { name: 'todo', properties: { id: makeConstAccessor(42) } } + ] + }; + + const ctx1 = makeContext(meta); + let handlerCalled1 = false; + await mw(ctx1, async () => { + handlerCalled1 = true; + ctx1.response.writeHead(200, { + 'content-type': 'application/json' + }); + ctx1.response.end(JSON.stringify({ name: 'test' })); + }); + expect(handlerCalled1).toBe(true); + + const ctx2 = makeContext(meta); + let handlerCalled2 = false; + await mw(ctx2, async () => { + handlerCalled2 = true; + }); + expect(handlerCalled2).toBe(false); + }); + + test('does not serve cached response after TTL expiry', async () => { + const mw = cacheResponse({ defaultTtl: 5000 }); + const meta = { + cacheTags: [ + { name: 'todo', properties: { id: makeConstAccessor(42) } } + ] + }; + + const ctx1 = makeContext(meta); + await mw(ctx1, async () => { + ctx1.response.writeHead(200); + ctx1.response.end('ok'); + }); + + vi.advanceTimersByTime(5001); + + const ctx2 = makeContext(meta); + let handlerCalled2 = false; + await mw(ctx2, async () => { + handlerCalled2 = true; + }); + expect(handlerCalled2).toBe(true); + }); + + test('different property values produce different cache keys', async () => { + const mw = cacheResponse({ defaultTtl: 5000 }); + + const ctx1 = makeContext({ + cacheTags: [ + { name: 'todo', properties: { id: makeConstAccessor(1) } } + ] + }); + let handlerCalled1 = false; + await mw(ctx1, async () => { + handlerCalled1 = true; + ctx1.response.writeHead(200); + ctx1.response.end('value-1'); + }); + expect(handlerCalled1).toBe(true); + + const ctx2 = makeContext({ + cacheTags: [ + { name: 'todo', properties: { id: makeConstAccessor(2) } } + ] + }); + let handlerCalled2 = false; + await mw(ctx2, async () => { + handlerCalled2 = true; + }); + expect(handlerCalled2).toBe(true); + }); + + test('simple tags (no properties) cache key is tag name', async () => { + const mw = cacheResponse({ defaultTtl: 5000 }); + const meta = { + cacheTags: [{ name: 'global', properties: {} }] + }; + + const ctx1 = makeContext(meta); + let handlerCalled1 = false; + await mw(ctx1, async () => { + handlerCalled1 = true; + ctx1.response.writeHead(200); + ctx1.response.end('ok'); + }); + expect(handlerCalled1).toBe(true); + + const ctx2 = makeContext(meta); + let handlerCalled2 = false; + await mw(ctx2, async () => { + handlerCalled2 = true; + }); + expect(handlerCalled2).toBe(false); + }); + + test('mutation invalidates cache entries by tag name prefix', async () => { + const mw = cacheResponse({ defaultTtl: 10_000 }); + const getMeta = { + cacheTags: [{ name: 'todo-list', properties: {} }] + }; + const mutMeta = { + cacheTags: [{ name: 'todo-list', properties: {} }] + }; + + // Populate cache with GET + const ctxGet = makeContext(getMeta); + await mw(ctxGet, async () => { + ctxGet.response.writeHead(200); + ctxGet.response.end('list'); + }); + + // Mutation + const ctxMut = makeContext(mutMeta, { method: 'POST' }); + await mw(ctxMut, async () => { + ctxMut.response.writeHead(201); + ctxMut.response.end('created'); + }); + + // GET after mutation — should miss cache + const ctxGetAfter = makeContext(getMeta); + let handlerCalled = false; + await mw(ctxGetAfter, async () => { + handlerCalled = true; + }); + expect(handlerCalled).toBe(true); + }); + + test('mutation does not invalidate on non-2xx status', async () => { + const mw = cacheResponse({ defaultTtl: 10_000 }); + const meta = { + cacheTags: [{ name: 'todo-list', properties: {} }] + }; + + // Populate cache with GET + const ctxGet = makeContext(meta); + await mw(ctxGet, async () => { + ctxGet.response.writeHead(200); + ctxGet.response.end('list'); + }); + + // Failed mutation (404) + const ctxMut = makeContext(meta, { method: 'DELETE' }); + ctxMut.response.statusCode = 404; + await mw(ctxMut, async () => { + // Handler sets 404 — no writeHead needed + }); + + // GET after failed mutation — should hit cache + const ctxGetAfter = makeContext(meta); + let handlerCalled = false; + await mw(ctxGetAfter, async () => { + handlerCalled = true; + }); + expect(handlerCalled).toBe(false); + }); + + test('uses per-tag TTL when configured', async () => { + const mw = cacheResponse({ + defaultTtl: 5000, + ttlByTag: { 'fast-tag': 1000 } + }); + const meta = { + cacheTags: [{ name: 'fast-tag', properties: {} }] + }; + + // Populate cache + const ctx1 = makeContext(meta); + await mw(ctx1, async () => { + ctx1.response.writeHead(200); + ctx1.response.end('ok'); + }); + + // Within per-tag TTL — should hit cache + vi.advanceTimersByTime(500); + const ctx2 = makeContext(meta); + let handlerCalled2 = false; + await mw(ctx2, async () => { + handlerCalled2 = true; + }); + expect(handlerCalled2).toBe(false); + + // Past per-tag TTL but within default — should miss cache + vi.advanceTimersByTime(501); + const ctx3 = makeContext(meta); + let handlerCalled3 = false; + await mw(ctx3, async () => { + handlerCalled3 = true; + }); + expect(handlerCalled3).toBe(true); + }); + + test('non-GET non-mutation passes through', async () => { + const mw = cacheResponse({ defaultTtl: 5000 }); + const meta = { + cacheTags: [{ name: 'tag', properties: {} }] + }; + const ctx = makeContext(meta, { method: 'OPTIONS' }); + const next = vi.fn().mockResolvedValue(undefined); + await mw(ctx, next); + expect(next).toHaveBeenCalledOnce(); + }); +}); diff --git a/libs/server/tsup.config.ts b/libs/server/tsup.config.ts index 70e80e3d..bbdea003 100644 --- a/libs/server/tsup.config.ts +++ b/libs/server/tsup.config.ts @@ -9,5 +9,7 @@ export default defineConfig({ sourcemap: true, clean: true, target: 'es2022', - external: ['ws'] + // @fastify/busboy is CJS and uses require('node:stream') internally. + // Bundling it into ESM via tsup's shimmed require breaks at runtime. + external: ['ws', '@fastify/busboy'] }); diff --git a/opencode.json b/opencode.json new file mode 100644 index 00000000..cc37eda1 --- /dev/null +++ b/opencode.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://opencode.ai/config.json", + "permission": { + "edit": "allow", + "bash": "ask", + "webfetch": "allow", + "doom_loop": "allow", + "external_directory": "ask", + "glob": "allow", + "grep": "allow", + "list": "allow", + "read": "allow", + "lsp": "allow", + "question": "allow", + "skill": "allow", + "task": "allow", + "write": "allow", + "todowrite": "allow", + "websearch": "allow" + }, + "instructions": ["MIGRATION_PLAN.md"], + "plugin": [ "openrtk" ] +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 6f8b441c..5918a68a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -149,7 +149,7 @@ }, "libs/async": { "name": "@cleverbrush/async", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "devDependencies": { "@types/node": "^25.4.0" @@ -167,17 +167,17 @@ }, "libs/auth": { "name": "@cleverbrush/auth", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/schema": "^4.0.0" } }, "libs/benchmarks": { "name": "@cleverbrush/benchmarks", - "version": "2.0.1", + "version": "2.0.2", "dependencies": { - "@cleverbrush/schema": "^3.0.0", + "@cleverbrush/schema": "^4.0.0", "joi": "^17.13.3", "yup": "^1.6.1", "zod": "^3.24.4" @@ -185,11 +185,11 @@ }, "libs/client": { "name": "@cleverbrush/client", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0", - "@cleverbrush/server": "^3.1.0" + "@cleverbrush/schema": "^4.0.0", + "@cleverbrush/server": "^4.0.0" }, "devDependencies": { "@tanstack/react-query": "^5.75.0", @@ -213,29 +213,29 @@ }, "libs/deep": { "name": "@cleverbrush/deep", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause" }, "libs/di": { "name": "@cleverbrush/di", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/schema": "^4.0.0" } }, "libs/env": { "name": "@cleverbrush/env", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/deep": "^3.1.0" + "@cleverbrush/deep": "^4.0.0" }, "devDependencies": { "@types/node": "^25.4.0" }, "peerDependencies": { - "@cleverbrush/schema": "^3.0.1" + "@cleverbrush/schema": "^4.0.0" } }, "libs/env/node_modules/@types/node": { @@ -250,11 +250,11 @@ }, "libs/knex-clickhouse": { "name": "@cleverbrush/knex-clickhouse", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/async": "^3.1.0", - "@cleverbrush/deep": "^3.1.0", + "@cleverbrush/async": "^4.0.0", + "@cleverbrush/deep": "^4.0.0", "@clickhouse/client": "^1.18.2" }, "peerDependencies": { @@ -263,10 +263,10 @@ }, "libs/knex-schema": { "name": "@cleverbrush/knex-schema", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/schema": "^4.0.0" }, "peerDependencies": { "knex": ">=3.1.0" @@ -274,19 +274,19 @@ }, "libs/log": { "name": "@cleverbrush/log", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/async": "^3.1.0", - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/async": "^4.0.0", + "@cleverbrush/schema": "^4.0.0" }, "devDependencies": { "@types/node": "^25.4.0" }, "peerDependencies": { - "@cleverbrush/di": "^3.1.0", - "@cleverbrush/knex-clickhouse": "^3.1.0", - "@cleverbrush/server": "^3.1.0" + "@cleverbrush/di": "^4.0.0", + "@cleverbrush/knex-clickhouse": "^4.0.0", + "@cleverbrush/server": "^4.0.0" }, "peerDependenciesMeta": { "@cleverbrush/di": { @@ -312,19 +312,19 @@ }, "libs/mapper": { "name": "@cleverbrush/mapper", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/schema": "^4.0.0" } }, "libs/orm": { "name": "@cleverbrush/orm", - "version": "0.1.0", + "version": "1.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/knex-schema": "^3.1.0", - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/knex-schema": "^4.0.0", + "@cleverbrush/schema": "^4.0.0" }, "peerDependencies": { "knex": ">=3.1.0" @@ -332,7 +332,7 @@ }, "libs/orm-cli": { "name": "@cleverbrush/orm-cli", - "version": "0.1.0", + "version": "1.0.0", "license": "BSD 3-Clause", "dependencies": { "@cleverbrush/knex-schema": "*", @@ -347,7 +347,7 @@ }, "libs/otel": { "name": "@cleverbrush/otel", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { "@opentelemetry/api": "^1.9.0", @@ -375,9 +375,9 @@ "knex": "^3.1.0" }, "peerDependencies": { - "@cleverbrush/di": "^3.1.0", - "@cleverbrush/log": "^3.1.0", - "@cleverbrush/server": "^3.1.0", + "@cleverbrush/di": "^4.0.0", + "@cleverbrush/log": "^4.0.0", + "@cleverbrush/server": "^4.0.0", "@opentelemetry/instrumentation-http": "^0.215.0", "@opentelemetry/instrumentation-runtime-node": "^0.28.0", "@opentelemetry/instrumentation-undici": "^0.25.0", @@ -419,10 +419,10 @@ }, "libs/react-form": { "name": "@cleverbrush/react-form", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/schema": "^4.0.0" }, "devDependencies": { "@types/react": "^19.0.0", @@ -434,10 +434,10 @@ }, "libs/scheduler": { "name": "@cleverbrush/scheduler", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/schema": "^3.1.0" + "@cleverbrush/schema": "^4.0.0" }, "devDependencies": { "@types/node": "^25.4.0" @@ -455,10 +455,10 @@ }, "libs/schema": { "name": "@cleverbrush/schema", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "devDependencies": { - "@cleverbrush/deep": "^3.1.0" + "@cleverbrush/deep": "^4.0.0" }, "peerDependencies": { "@standard-schema/spec": "^1.1.0" @@ -466,46 +466,48 @@ }, "libs/schema-json": { "name": "@cleverbrush/schema-json", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "peerDependencies": { - "@cleverbrush/schema": "^3.0.1", + "@cleverbrush/schema": "^4.0.0", "@standard-schema/spec": "^1.1.0" } }, "libs/server": { "name": "@cleverbrush/server", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "dependencies": { - "@cleverbrush/auth": "^3.1.0", - "@cleverbrush/di": "^3.1.0", - "@cleverbrush/schema": "^3.1.0", + "@cleverbrush/auth": "^4.0.0", + "@cleverbrush/di": "^4.0.0", + "@cleverbrush/schema": "^4.0.0", + "@fastify/busboy": "^3.2.0", "ws": "^8.20.0" }, "devDependencies": { + "@types/busboy": "1.5.4", "@types/ws": "^8.18.1" } }, "libs/server-integration-tests": { "name": "@cleverbrush/server-integration-tests", - "version": "2.0.1", + "version": "2.0.2", "dependencies": { - "@cleverbrush/auth": "^3.0.0", - "@cleverbrush/di": "^3.0.0", - "@cleverbrush/schema": "^3.0.0", - "@cleverbrush/server": "^3.0.0" + "@cleverbrush/auth": "^4.0.0", + "@cleverbrush/di": "^4.0.0", + "@cleverbrush/schema": "^4.0.0", + "@cleverbrush/server": "^4.0.0" } }, "libs/server-openapi": { "name": "@cleverbrush/server-openapi", - "version": "3.1.0", + "version": "4.0.0", "license": "BSD 3-Clause", "peerDependencies": { - "@cleverbrush/auth": "^3.0.1", - "@cleverbrush/schema": "^3.0.1", - "@cleverbrush/schema-json": "^3.0.1", - "@cleverbrush/server": "^3.0.1" + "@cleverbrush/auth": "^4.0.0", + "@cleverbrush/schema": "^4.0.0", + "@cleverbrush/schema-json": "^4.0.0", + "@cleverbrush/server": "^4.0.0" } }, "node_modules/@asamuzakjp/css-color": { @@ -2011,6 +2013,12 @@ } } }, + "node_modules/@fastify/busboy": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.2.0.tgz", + "integrity": "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==", + "license": "MIT" + }, "node_modules/@floating-ui/core": { "version": "1.7.5", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.5.tgz", @@ -5945,6 +5953,16 @@ "@babel/types": "^7.28.2" } }, + "node_modules/@types/busboy": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@types/busboy/-/busboy-1.5.4.tgz", + "integrity": "sha512-kG7WrUuAKK0NoyxfQHsVE6j1m01s6kMma64E+OZenQABMQyTJop1DumUWcLwAQ2JzpefU7PDYoRDKl8uZosFjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/chai": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", diff --git a/websites/docs/app/auth/page.tsx b/websites/docs/app/auth/page.tsx index 80df9dfb..00fed664 100644 --- a/websites/docs/app/auth/page.tsx +++ b/websites/docs/app/auth/page.tsx @@ -173,7 +173,7 @@ const scheme = cookieScheme({ const principal: Principal<{ sub: string; role: string }> = result.principal; principal.isAuthenticated; // true -principal.claims.sub; // 'user-1' +principal.value?.sub; // 'user-1' principal.hasRole('admin'); // true principal.hasClaim('role', 'admin'); // true @@ -272,7 +272,7 @@ server .useAuthorization() .handle( endpoint.get('/api/admin').authorize(UserPrincipal, 'admin'), - ({ principal }) => ({ greeting: \`Hello \${principal.claims.sub}\` }) + ({ principal }) => ({ greeting: \`Hello \${principal.value?.sub}\` }) ); await server.listen(3000);`) diff --git a/websites/docs/app/client/[[...slug]]/page.tsx b/websites/docs/app/client/[[...slug]]/page.tsx index 77c72ebf..9e3e48ad 100644 --- a/websites/docs/app/client/[[...slug]]/page.tsx +++ b/websites/docs/app/client/[[...slug]]/page.tsx @@ -1,10 +1,12 @@ import { redirect } from 'next/navigation'; import BatchingSection from '../sections/batching'; import CacheSection from '../sections/cache'; +import CacheTagsSection from '../sections/cacheTags'; import DedupeSection from '../sections/dedupe'; import ErrorHandlingSection from '../sections/error-handling'; import GettingStartedSection from '../sections/getting-started'; import HooksSection from '../sections/hooks'; +import IdempotencySection from '../sections/idempotency'; import { CLIENT_SECTIONS } from '../sections/index'; import MiddlewareSection from '../sections/middleware'; import PerCallOverridesSection from '../sections/per-call-overrides'; @@ -25,7 +27,9 @@ const SECTION_COMPONENTS: Record = { retry: RetrySection, timeout: TimeoutSection, dedupe: DedupeSection, + idempotency: IdempotencySection, cache: CacheSection, + 'cache-tags': CacheTagsSection, batching: BatchingSection, 'error-handling': ErrorHandlingSection, 'per-call-overrides': PerCallOverridesSection, diff --git a/websites/docs/app/client/sections/cacheTags.tsx b/websites/docs/app/client/sections/cacheTags.tsx new file mode 100644 index 00000000..846e81a6 --- /dev/null +++ b/websites/docs/app/client/sections/cacheTags.tsx @@ -0,0 +1,157 @@ +/** biome-ignore-all lint/security/noDangerouslySetInnerHtml: it is intentional */ +import { highlightTS } from '@cleverbrush/website-shared/lib/highlight'; + +export default function CacheTagsSection() { + return ( + <> +
+

Cache-Tag Middleware

+

+ Tag-based HTTP caching with automatic invalidation driven by + endpoint annotations +

+
+ +
+

Basic Usage

+
+                    
+                
+
+ +
+

Server Integration

+

+ Cache tags are declared on the server-side endpoint + definition via .clearsCacheTag(). Tags flow + through the contract metadata to the client automatically. +

+
+                     ({
+        page: p.query.page,
+        limit: p.query.limit
+    }))
+    .returns(array(TodoSchema));
+
+const UpdateTodo = todosResource
+    .patch(ById)
+    .body(UpdateTodoBodySchema)
+    .clearsCacheTag('todo-list')            // invalidates list
+    .clearsCacheTag('todo', p => ({        // invalidates entity
+        id: p.params.id
+    }))
+    .returns(TodoSchema);
+`)
+                        }}
+                    />
+                
+
+ +
+

How It Works

+
    +
  • + On GET: Computes cache key from the + endpoint's cache tag names and property selectors. + Serves cached response if within TTL. +
  • +
  • + On mutation (POST/PUT/PATCH/DELETE):{' '} + Invalidates all entries whose key starts with any of the + endpoint's tag names — no manual callbacks needed. +
  • +
  • + Property-based keys: Tags with + properties differentiate cache entries by request + params, query, body, or headers (e.g. different pages + get different cache keys). +
  • +
  • + TanStack Query bridge: When used with{' '} + @cleverbrush/client/react,{' '} + useMutation hooks automatically invalidate + TanStack Query cache for the affected group. +
  • +
+
+ +
+

Options

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
OptionTypeDefault
+ defaultTtl + + number + + 0 +
+ ttlByTag + + + {'{'} [tagName: string]: number {'}'} + + + {'{ }'} +
+ condition + + (response) => boolean + + response.ok +
+
+

+ Set defaultTtl: 0 for invalidation-only mode: + GET responses are not cached, but mutations still invalidate + entries created by other endpoints. +

+
+ + ); +} diff --git a/websites/docs/app/client/sections/idempotency.tsx b/websites/docs/app/client/sections/idempotency.tsx new file mode 100644 index 00000000..b99987f4 --- /dev/null +++ b/websites/docs/app/client/sections/idempotency.tsx @@ -0,0 +1,181 @@ +/** biome-ignore-all lint/security/noDangerouslySetInnerHtml: it is intentional */ +import { highlightTS } from '@cleverbrush/website-shared/lib/highlight'; + +export default function IdempotencySection() { + return ( + <> +
+

Idempotency Middleware

+

+ Deduplicate replays of mutating requests via idempotency + keys +

+
+ +
+

Basic Usage

+
+                    
+                
+
+ +
+

Server Integration

+

+ The server-side idempotency() middleware reads + the header, stores the response, and replays it for + duplicate keys. +

+
+                    
+                
+
+ +
+

How It Works

+
    +
  • + On mutation: Client auto-generates a + UUID v4 as X-Idempotency-Key header. +
  • +
  • + On server: First request with a key + runs the handler and stores the response. Replays return + the stored response immediately. +
  • +
  • + On retry: The key is preserved — + retried requests are treated as replays, not new + operations. +
  • +
+
+ +
+

Options (Client)

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
OptionTypeDefault
+ headerName + + string + + "X-Idempotency-Key" +
+ keyGenerator + + (url, init) => string + + uuid v4 +
+ condition + + (url, init) => boolean + + mutations only +
+
+
+ +
+

Options (Server)

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +
OptionTypeDefault
+ ttl + + number + + 86400000 (24h) +
+ headerName + + string + + "x-idempotency-key" +
+ skip + + (ctx) => boolean + + non-mutating requests +
+
+
+ + ); +} diff --git a/websites/docs/app/client/sections/index.ts b/websites/docs/app/client/sections/index.ts index c1023adc..68a8d423 100644 --- a/websites/docs/app/client/sections/index.ts +++ b/websites/docs/app/client/sections/index.ts @@ -15,7 +15,15 @@ export const SECTION_GROUPS = [ }, { label: 'Resilience', - slugs: ['retry', 'timeout', 'dedupe', 'cache', 'batching'] + slugs: [ + 'retry', + 'timeout', + 'dedupe', + 'idempotency', + 'cache', + 'cache-tags', + 'batching' + ] }, { label: 'Advanced', @@ -34,7 +42,17 @@ export const CLIENT_SECTIONS: ClientSection[] = [ { slug: 'retry', title: 'Retry', group: 'Resilience' }, { slug: 'timeout', title: 'Timeout', group: 'Resilience' }, { slug: 'dedupe', title: 'Deduplication', group: 'Resilience' }, + { + slug: 'idempotency', + title: 'Idempotency', + group: 'Resilience' + }, { slug: 'cache', title: 'Cache', group: 'Resilience' }, + { + slug: 'cache-tags', + title: 'Cache Tags', + group: 'Resilience' + }, { slug: 'batching', title: 'Batching', group: 'Resilience' }, { slug: 'error-handling', diff --git a/websites/docs/app/getting-started/page.tsx b/websites/docs/app/getting-started/page.tsx index b1d17ca7..325c3f51 100644 --- a/websites/docs/app/getting-started/page.tsx +++ b/websites/docs/app/getting-started/page.tsx @@ -59,7 +59,7 @@ export default function GettingStartedPage() {
                         
                     
                     
                      $t\`Todo #\${t => t.TodoId} "\${t => t.Title}" created by \${t => t.UserId}\`
+);
 
 const logger = createLogger({ sinks: [consoleSink()] });
 
@@ -176,12 +179,12 @@ logger.info(TodoCreated, { TodoId: 1, Title: 'Buy milk', UserId: 'u-42' });
                 
                     
                 
diff --git a/websites/docs/app/orm/page.tsx b/websites/docs/app/orm/page.tsx index 65168482..4f833f0d 100644 --- a/websites/docs/app/orm/page.tsx +++ b/websites/docs/app/orm/page.tsx @@ -149,10 +149,10 @@ const updated = await db.users.save({ id: 1, email: 'alice@example.com', name: ' }).hasTableName('todos'); const TodoEntity = defineEntity(TodoSchema) - .belongsTo(t => t.author, 'userId'); + .belongsTo(t => t.author, l => l.userId, r => r.id); const UserEntity = defineEntity(UserSchema) - .hasMany(t => t.todos, TodoEntity, 'userId'); + .hasMany(t => t.todos, l => l.id, r => r.userId); const db = createDb(knex, { users: UserEntity, todos: TodoEntity }); diff --git a/websites/docs/app/scheduler/page.tsx b/websites/docs/app/scheduler/page.tsx index 8a93a028..2132fe3c 100644 --- a/websites/docs/app/scheduler/page.tsx +++ b/websites/docs/app/scheduler/page.tsx @@ -248,7 +248,7 @@ scheduler.on('job:message', ({ jobId, value }) => { { - '{ every: "month", interval: 1, dayOfMonth: 1, hour: 0, minute: 0 }' + '{ every: "month", interval: 1, day: 1, hour: 0, minute: 0 }' } @@ -261,7 +261,7 @@ scheduler.on('job:message', ({ jobId, value }) => { { - '{ every: "year", interval: 1, month: 1, dayOfMonth: 1, hour: 0, minute: 0 }' + '{ every: "year", interval: 1, month: 1, day: 1, hour: 0, minute: 0 }' } diff --git a/websites/docs/app/server-openapi/page.tsx b/websites/docs/app/server-openapi/page.tsx index d668b914..4c1acd7b 100644 --- a/websites/docs/app/server-openapi/page.tsx +++ b/websites/docs/app/server-openapi/page.tsx @@ -216,7 +216,7 @@ await writeOpenApiSpec({ dangerouslySetInnerHTML={{ __html: highlightTS( `import { object, string, number, array } from '@cleverbrush/schema'; -import { endpoint, route } from '@cleverbrush/server'; +import { createServer, endpoint, route } from '@cleverbrush/server'; import { generateOpenApiSpec } from '@cleverbrush/server-openapi'; // Export as a constant — reuse the same reference everywhere @@ -230,8 +230,12 @@ const GetUser = endpoint .returns(UserSchema); const ListUsers = endpoint.get('/api/users').returns(array(UserSchema)); +const server = createServer() + .handle(GetUser, ({ params }) => ({ id: params.id, name: 'Alice' })) + .handle(ListUsers, () => []); + const spec = generateOpenApiSpec({ - registrations: [GetUser.registration, ListUsers.registration], + server, info: { title: 'My API', version: '1.0.0' }, }); diff --git a/websites/docs/app/server/page.tsx b/websites/docs/app/server/page.tsx index 717c12fc..23c5b1b4 100644 --- a/websites/docs/app/server/page.tsx +++ b/websites/docs/app/server/page.tsx @@ -212,6 +212,107 @@ return ActionResult.status(202);`)
+ {/* ── File Upload ────────────────────────────────── */} +
+

File Upload

+

+ Accept file uploads via multipart/form-data{' '} + by chaining .upload() on an endpoint. File + fields are received as FilePart objects on + the handler context's files property; + non-file form fields are validated against the body + schema and available via body. +

+
+                         {
+    const avatar: FilePart = files['avatar'];
+    // avatar.filename, avatar.mimeType, avatar.buffer, avatar.size
+    return ActionResult.created({ name: avatar.filename });
+});`)
+                            }}
+                        />
+                    
+ +

Options

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionTypeDefaultDescription
+ maxFileSize + + number + 10 MBMaximum file size per file in bytes
+ allowedMimeTypes + + string[] + all + MIME type allowlist (supports{' '} + image/* glob) +
+ maxFileCount + + number + 10Maximum number of files per request
+
+ +

FilePart type

+
+                        
+                    
+
+ {/* ── HTTP Errors ──────────────────────────────────── */}

HTTP Errors

diff --git a/websites/docs/public/api-docs/index.html b/websites/docs/public/api-docs/index.html index ef1ea111..25987478 100644 --- a/websites/docs/public/api-docs/index.html +++ b/websites/docs/public/api-docs/index.html @@ -40,6 +40,8 @@

Previous Versions

+ + @@ -49,6 +51,7 @@

Previous Versions