From 57a0fb455eed1d94df693d6dbd8ef606a9892cf4 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 05:24:28 +0000 Subject: [PATCH 1/2] feat: add queryTimeline endpoint for paginated date-range memory browsing Adds a dedicated timeline query that discovers which dates have content and returns connected nodes grouped by day, with pagination and optional nodeType filtering. This avoids the queryGraph 200-node cap and supports infinite scroll in Petals' timeline view. https://claude.ai/code/session_01QnhM7Q1mZf97d5Uj28kcwy --- src/lib/query/timeline.ts | 185 +++++++++++++++++++++++++ src/lib/schemas/query-timeline.test.ts | 112 +++++++++++++++ src/lib/schemas/query-timeline.ts | 45 ++++++ src/routes/query/timeline.ts | 11 ++ src/sdk/memory-client.ts | 16 +++ 5 files changed, 369 insertions(+) create mode 100644 src/lib/query/timeline.ts create mode 100644 src/lib/schemas/query-timeline.test.ts create mode 100644 src/lib/schemas/query-timeline.ts create mode 100644 src/routes/query/timeline.ts diff --git a/src/lib/query/timeline.ts b/src/lib/query/timeline.ts new file mode 100644 index 0000000..77d5cd6 --- /dev/null +++ b/src/lib/query/timeline.ts @@ -0,0 +1,185 @@ +import { and, eq, or, gte, lte, desc, inArray, sql } from "drizzle-orm"; +import { edges, nodeMetadata, nodes } from "~/db/schema"; +import { NodeTypeEnum } from "~/types/graph"; +import type { TypeId } from "~/types/typeid"; +import { useDatabase } from "~/utils/db"; +import { + QueryTimelineRequest, + QueryTimelineResponse, +} from "../schemas/query-timeline"; +import { format, subDays } from "date-fns"; + +/** + * Query a timeline of memories grouped by date. + * + * Finds all Temporal (day) nodes within a date range for a user, + * then fetches connected nodes for each day. Supports pagination + * via limit/offset on days, and optional nodeType filtering on + * the connected nodes. + */ +export async function queryTimeline( + params: QueryTimelineRequest, +): Promise { + const { + userId, + limit = 30, + offset = 0, + nodeTypes, + } = params; + + const today = format(new Date(), "yyyy-MM-dd"); + const ninetyDaysAgo = format(subDays(new Date(), 90), "yyyy-MM-dd"); + const startDate = params.startDate ?? today; + const endDate = params.endDate ?? ninetyDaysAgo; + + const db = await useDatabase(); + + // Step 1: Find all Temporal day nodes in the date range. + // Temporal nodes have their label set to "YYYY-MM-DD". + // We sort descending (most recent first) so pagination goes backward in time. + const allDayNodes = await db + .select({ + id: nodes.id, + label: nodeMetadata.label, + }) + .from(nodes) + .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) + .where( + and( + eq(nodes.userId, userId), + eq(nodes.nodeType, NodeTypeEnum.enum.Temporal), + // label is the date string: compare lexicographically + lte(nodeMetadata.label, startDate), + gte(nodeMetadata.label, endDate), + ), + ) + .orderBy(desc(nodeMetadata.label)); + + const totalDays = allDayNodes.length; + + // Step 2: Apply pagination to the day nodes. + const paginatedDayNodes = allDayNodes.slice(offset, offset + limit); + + if (paginatedDayNodes.length === 0) { + return { + days: [], + totalDays, + hasMore: offset + limit < totalDays, + }; + } + + const dayNodeIds = paginatedDayNodes.map((d) => d.id); + + // Step 3: Batch-fetch all connected nodes for the paginated day nodes. + // This avoids N+1 queries — one query gets everything. + const connectedRows = await db + .select({ + dayNodeId: sql>` + CASE + WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} + THEN ${edges.sourceNodeId} + ELSE ${edges.targetNodeId} + END + `.as("day_node_id"), + id: nodes.id, + nodeType: nodes.nodeType, + label: nodeMetadata.label, + description: nodeMetadata.description, + edgeType: edges.edgeType, + createdAt: nodes.createdAt, + }) + .from(edges) + .innerJoin( + nodes, + eq( + nodes.id, + sql`CASE + WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} + THEN ${edges.targetNodeId} + ELSE ${edges.sourceNodeId} + END`, + ), + ) + .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) + .where( + and( + eq(edges.userId, userId), + eq(nodes.userId, userId), + or( + inArray(edges.sourceNodeId, dayNodeIds), + inArray(edges.targetNodeId, dayNodeIds), + ), + // Exclude the day nodes themselves from results + sql`CASE + WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} + THEN ${edges.targetNodeId} + ELSE ${edges.sourceNodeId} + END NOT IN (${sql.join(dayNodeIds.map((id) => sql`${id}`), sql`, `)})`, + ), + ); + + // Step 4: Group connected nodes by day node and build response. + const nodesByDay = new Map< + TypeId<"node">, + typeof connectedRows + >(); + for (const row of connectedRows) { + const dayId = row.dayNodeId; + const existing = nodesByDay.get(dayId); + if (existing) { + existing.push(row); + } else { + nodesByDay.set(dayId, [row]); + } + } + + const days = paginatedDayNodes.map((dayNode) => { + const dayId = dayNode.id; + const allConnected = nodesByDay.get(dayId) ?? []; + + // Deduplicate by node id (a node can be connected via multiple edges) + const uniqueMap = new Map(); + for (const row of allConnected) { + if (!uniqueMap.has(row.id)) { + uniqueMap.set(row.id, row); + } + } + const totalNodeCount = uniqueMap.size; + + // Apply nodeTypes filter if requested (after counting total) + let filteredNodes = allConnected; + if (nodeTypes && nodeTypes.length > 0) { + filteredNodes = allConnected.filter((r) => + nodeTypes.includes(r.nodeType as typeof nodeTypes[number]), + ); + } + + // Deduplicate the filtered set + const filteredUniqueMap = new Map(); + for (const row of filteredNodes) { + if (!filteredUniqueMap.has(row.id)) { + filteredUniqueMap.set(row.id, row); + } + } + + return { + date: dayNode.label!, + temporalNodeId: dayId, + nodeCount: totalNodeCount, + nodes: Array.from(filteredUniqueMap.values()).map((r) => ({ + id: r.id, + label: r.label, + description: r.description, + nodeType: r.nodeType, + edgeType: r.edgeType, + createdAt: r.createdAt, + })), + }; + }); + + return { + days, + totalDays, + hasMore: offset + limit < totalDays, + }; +} diff --git a/src/lib/schemas/query-timeline.test.ts b/src/lib/schemas/query-timeline.test.ts new file mode 100644 index 0000000..d718b53 --- /dev/null +++ b/src/lib/schemas/query-timeline.test.ts @@ -0,0 +1,112 @@ +import { describe, it, expect } from "vitest"; +import { + queryTimelineRequestSchema, + queryTimelineResponseSchema, +} from "./query-timeline"; + +describe("queryTimelineRequestSchema", () => { + it("accepts minimal request with only userId", () => { + const parsed = queryTimelineRequestSchema.parse({ userId: "user_123" }); + expect(parsed.userId).toBe("user_123"); + expect(parsed.limit).toBe(30); + expect(parsed.offset).toBe(0); + expect(parsed.startDate).toBeUndefined(); + expect(parsed.endDate).toBeUndefined(); + expect(parsed.nodeTypes).toBeUndefined(); + }); + + it("accepts full request with all fields", () => { + const parsed = queryTimelineRequestSchema.parse({ + userId: "user_123", + startDate: "2025-01-15", + endDate: "2024-10-15", + limit: 10, + offset: 5, + nodeTypes: ["Person", "Event"], + }); + expect(parsed.startDate).toBe("2025-01-15"); + expect(parsed.endDate).toBe("2024-10-15"); + expect(parsed.limit).toBe(10); + expect(parsed.offset).toBe(5); + expect(parsed.nodeTypes).toEqual(["Person", "Event"]); + }); + + it("rejects invalid date format", () => { + expect(() => + queryTimelineRequestSchema.parse({ + userId: "user_123", + startDate: "01-15-2025", + }), + ).toThrow(); + }); + + it("rejects invalid nodeType", () => { + expect(() => + queryTimelineRequestSchema.parse({ + userId: "user_123", + nodeTypes: ["InvalidType"], + }), + ).toThrow(); + }); + + it("rejects limit over 100", () => { + expect(() => + queryTimelineRequestSchema.parse({ + userId: "user_123", + limit: 101, + }), + ).toThrow(); + }); + + it("rejects negative offset", () => { + expect(() => + queryTimelineRequestSchema.parse({ + userId: "user_123", + offset: -1, + }), + ).toThrow(); + }); +}); + +describe("queryTimelineResponseSchema", () => { + it("validates a complete response", () => { + const response = { + days: [ + { + date: "2025-01-15", + temporalNodeId: "node_01234567890123456789abcdef", + nodeCount: 5, + nodes: [ + { + id: "node_abcdefghijklmnopqrstuvwxyz", + label: "Meeting with team", + description: "Weekly standup meeting", + nodeType: "Event", + edgeType: "OCCURRED_ON", + createdAt: "2025-01-15T10:00:00.000Z", + }, + ], + }, + ], + totalDays: 10, + hasMore: true, + }; + const parsed = queryTimelineResponseSchema.parse(response); + expect(parsed.days).toHaveLength(1); + expect(parsed.days[0]!.nodeCount).toBe(5); + expect(parsed.days[0]!.nodes[0]!.createdAt).toBeInstanceOf(Date); + expect(parsed.totalDays).toBe(10); + expect(parsed.hasMore).toBe(true); + }); + + it("validates empty response", () => { + const parsed = queryTimelineResponseSchema.parse({ + days: [], + totalDays: 0, + hasMore: false, + }); + expect(parsed.days).toEqual([]); + expect(parsed.totalDays).toBe(0); + expect(parsed.hasMore).toBe(false); + }); +}); diff --git a/src/lib/schemas/query-timeline.ts b/src/lib/schemas/query-timeline.ts new file mode 100644 index 0000000..0d354c4 --- /dev/null +++ b/src/lib/schemas/query-timeline.ts @@ -0,0 +1,45 @@ +import { EdgeTypeEnum, NodeTypeEnum } from "../../types/graph.js"; +import { typeIdSchema } from "../../types/typeid.js"; +import { z } from "zod"; + +const dateRegex = /^\d{4}-\d{2}-\d{2}$/; + +export const queryTimelineRequestSchema = z.object({ + userId: z.string(), + startDate: z + .string() + .regex(dateRegex, "startDate must be in YYYY-MM-DD format") + .optional(), + endDate: z + .string() + .regex(dateRegex, "endDate must be in YYYY-MM-DD format") + .optional(), + limit: z.number().int().min(1).max(100).default(30), + offset: z.number().int().min(0).default(0), + nodeTypes: z.array(NodeTypeEnum).optional(), +}); + +export const queryTimelineNodeSchema = z.object({ + id: typeIdSchema("node"), + label: z.string().nullable(), + description: z.string().nullable(), + nodeType: NodeTypeEnum, + edgeType: EdgeTypeEnum, + createdAt: z.coerce.date(), +}); + +export const queryTimelineDaySchema = z.object({ + date: z.string(), + temporalNodeId: typeIdSchema("node"), + nodeCount: z.number(), + nodes: z.array(queryTimelineNodeSchema), +}); + +export const queryTimelineResponseSchema = z.object({ + days: z.array(queryTimelineDaySchema), + totalDays: z.number(), + hasMore: z.boolean(), +}); + +export type QueryTimelineRequest = z.infer; +export type QueryTimelineResponse = z.infer; diff --git a/src/routes/query/timeline.ts b/src/routes/query/timeline.ts new file mode 100644 index 0000000..e349209 --- /dev/null +++ b/src/routes/query/timeline.ts @@ -0,0 +1,11 @@ +import { defineEventHandler } from "h3"; +import { queryTimeline } from "~/lib/query/timeline"; +import { + queryTimelineRequestSchema, + queryTimelineResponseSchema, +} from "~/lib/schemas/query-timeline"; + +export default defineEventHandler(async (event) => { + const params = queryTimelineRequestSchema.parse(await readBody(event)); + return queryTimelineResponseSchema.parse(await queryTimeline(params)); +}); diff --git a/src/sdk/memory-client.ts b/src/sdk/memory-client.ts index bce60fa..ce03d66 100644 --- a/src/sdk/memory-client.ts +++ b/src/sdk/memory-client.ts @@ -100,6 +100,11 @@ import { scratchpadResponseSchema, scratchpadEditResponseSchema, } from "../lib/schemas/scratchpad.js"; +import { + QueryTimelineRequest, + QueryTimelineResponse, + queryTimelineResponseSchema, +} from "../lib/schemas/query-timeline.js"; import { SummarizeRequest, SummarizeResponse, @@ -227,6 +232,17 @@ export class MemoryClient { ); } + async queryTimeline( + payload: QueryTimelineRequest, + ): Promise { + return this._fetch( + "POST", + "/query/timeline", + queryTimelineResponseSchema, + payload, + ); + } + async summarize(payload: SummarizeRequest): Promise { return this._fetch("POST", "/summarize", summarizeResponseSchema, payload); } From 06d4193764aed9c3109410d5bfb5cdc2b36732cc Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 12 Apr 2026 05:59:49 +0000 Subject: [PATCH 2/2] fix: address CI formatting and review feedback - Fix prettier formatting issues that caused CI failure - Move day-node pagination to DB level (LIMIT/OFFSET + COUNT query) instead of fetching all rows and slicing in memory - Normalize date range so query works correctly regardless of which param is chronologically first https://claude.ai/code/session_01QnhM7Q1mZf97d5Uj28kcwy --- src/lib/query/timeline.ts | 86 +++++++++++++++----------- src/lib/schemas/query-timeline.test.ts | 2 +- src/sdk/memory-client.ts | 10 +-- 3 files changed, 55 insertions(+), 43 deletions(-) diff --git a/src/lib/query/timeline.ts b/src/lib/query/timeline.ts index 77d5cd6..45c318e 100644 --- a/src/lib/query/timeline.ts +++ b/src/lib/query/timeline.ts @@ -1,13 +1,13 @@ -import { and, eq, or, gte, lte, desc, inArray, sql } from "drizzle-orm"; -import { edges, nodeMetadata, nodes } from "~/db/schema"; -import { NodeTypeEnum } from "~/types/graph"; -import type { TypeId } from "~/types/typeid"; -import { useDatabase } from "~/utils/db"; import { QueryTimelineRequest, QueryTimelineResponse, } from "../schemas/query-timeline"; import { format, subDays } from "date-fns"; +import { and, eq, or, gte, lte, desc, inArray, sql, count } from "drizzle-orm"; +import { edges, nodeMetadata, nodes } from "~/db/schema"; +import { NodeTypeEnum } from "~/types/graph"; +import type { TypeId } from "~/types/typeid"; +import { useDatabase } from "~/utils/db"; /** * Query a timeline of memories grouped by date. @@ -20,51 +20,63 @@ import { format, subDays } from "date-fns"; export async function queryTimeline( params: QueryTimelineRequest, ): Promise { - const { - userId, - limit = 30, - offset = 0, - nodeTypes, - } = params; + const { userId, limit = 30, offset = 0, nodeTypes } = params; const today = format(new Date(), "yyyy-MM-dd"); const ninetyDaysAgo = format(subDays(new Date(), 90), "yyyy-MM-dd"); const startDate = params.startDate ?? today; const endDate = params.endDate ?? ninetyDaysAgo; + // Normalize so rangeMin <= rangeMax regardless of param ordering + const rangeMin = startDate < endDate ? startDate : endDate; + const rangeMax = startDate < endDate ? endDate : startDate; + const db = await useDatabase(); - // Step 1: Find all Temporal day nodes in the date range. - // Temporal nodes have their label set to "YYYY-MM-DD". - // We sort descending (most recent first) so pagination goes backward in time. - const allDayNodes = await db + // Shared WHERE clause for day-node lookups + const dayNodeWhere = and( + eq(nodes.userId, userId), + eq(nodes.nodeType, NodeTypeEnum.enum.Temporal), + gte(nodeMetadata.label, rangeMin), + lte(nodeMetadata.label, rangeMax), + ); + + // Step 1: Count total days with data in the range (DB-level). + const [countResult] = await db + .select({ total: count() }) + .from(nodes) + .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) + .where(dayNodeWhere); + + const totalDays = countResult?.total ?? 0; + + if (totalDays === 0 || offset >= totalDays) { + return { + days: [], + totalDays, + hasMore: false, + }; + } + + // Step 2: Fetch the paginated day nodes (DB-level limit/offset). + // Most recent first so pagination scrolls backward in time. + const paginatedDayNodes = await db .select({ id: nodes.id, label: nodeMetadata.label, }) .from(nodes) .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) - .where( - and( - eq(nodes.userId, userId), - eq(nodes.nodeType, NodeTypeEnum.enum.Temporal), - // label is the date string: compare lexicographically - lte(nodeMetadata.label, startDate), - gte(nodeMetadata.label, endDate), - ), - ) - .orderBy(desc(nodeMetadata.label)); - - const totalDays = allDayNodes.length; - - // Step 2: Apply pagination to the day nodes. - const paginatedDayNodes = allDayNodes.slice(offset, offset + limit); + .where(dayNodeWhere) + .orderBy(desc(nodeMetadata.label)) + .limit(limit) + .offset(offset); if (paginatedDayNodes.length === 0) { return { days: [], totalDays, - hasMore: offset + limit < totalDays, + hasMore: false, }; } @@ -114,15 +126,15 @@ export async function queryTimeline( WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} THEN ${edges.targetNodeId} ELSE ${edges.sourceNodeId} - END NOT IN (${sql.join(dayNodeIds.map((id) => sql`${id}`), sql`, `)})`, + END NOT IN (${sql.join( + dayNodeIds.map((id) => sql`${id}`), + sql`, `, + )})`, ), ); // Step 4: Group connected nodes by day node and build response. - const nodesByDay = new Map< - TypeId<"node">, - typeof connectedRows - >(); + const nodesByDay = new Map, typeof connectedRows>(); for (const row of connectedRows) { const dayId = row.dayNodeId; const existing = nodesByDay.get(dayId); @@ -150,7 +162,7 @@ export async function queryTimeline( let filteredNodes = allConnected; if (nodeTypes && nodeTypes.length > 0) { filteredNodes = allConnected.filter((r) => - nodeTypes.includes(r.nodeType as typeof nodeTypes[number]), + nodeTypes.includes(r.nodeType as (typeof nodeTypes)[number]), ); } diff --git a/src/lib/schemas/query-timeline.test.ts b/src/lib/schemas/query-timeline.test.ts index d718b53..3d88b9a 100644 --- a/src/lib/schemas/query-timeline.test.ts +++ b/src/lib/schemas/query-timeline.test.ts @@ -1,8 +1,8 @@ -import { describe, it, expect } from "vitest"; import { queryTimelineRequestSchema, queryTimelineResponseSchema, } from "./query-timeline"; +import { describe, it, expect } from "vitest"; describe("queryTimelineRequestSchema", () => { it("accepts minimal request with only userId", () => { diff --git a/src/sdk/memory-client.ts b/src/sdk/memory-client.ts index ce03d66..bfbce07 100644 --- a/src/sdk/memory-client.ts +++ b/src/sdk/memory-client.ts @@ -91,6 +91,11 @@ import { QuerySearchResponse, querySearchResponseSchema, } from "../lib/schemas/query-search.js"; +import { + QueryTimelineRequest, + QueryTimelineResponse, + queryTimelineResponseSchema, +} from "../lib/schemas/query-timeline.js"; import { ScratchpadReadRequest, ScratchpadWriteRequest, @@ -100,11 +105,6 @@ import { scratchpadResponseSchema, scratchpadEditResponseSchema, } from "../lib/schemas/scratchpad.js"; -import { - QueryTimelineRequest, - QueryTimelineResponse, - queryTimelineResponseSchema, -} from "../lib/schemas/query-timeline.js"; import { SummarizeRequest, SummarizeResponse,