-
Notifications
You must be signed in to change notification settings - Fork 1
Add timeline query feature for memory retrieval by date #30
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,197 @@ | ||
| import { | ||
| QueryTimelineRequest, | ||
| QueryTimelineResponse, | ||
| } from "../schemas/query-timeline"; | ||
| import { format, subDays } from "date-fns"; | ||
| import { and, eq, or, gte, lte, desc, inArray, sql, count } from "drizzle-orm"; | ||
| import { edges, nodeMetadata, nodes } from "~/db/schema"; | ||
| import { NodeTypeEnum } from "~/types/graph"; | ||
| import type { TypeId } from "~/types/typeid"; | ||
| import { useDatabase } from "~/utils/db"; | ||
|
|
||
| /** | ||
| * Query a timeline of memories grouped by date. | ||
| * | ||
| * Finds all Temporal (day) nodes within a date range for a user, | ||
| * then fetches connected nodes for each day. Supports pagination | ||
| * via limit/offset on days, and optional nodeType filtering on | ||
| * the connected nodes. | ||
| */ | ||
| export async function queryTimeline( | ||
| params: QueryTimelineRequest, | ||
| ): Promise<QueryTimelineResponse> { | ||
| const { userId, limit = 30, offset = 0, nodeTypes } = params; | ||
|
|
||
| const today = format(new Date(), "yyyy-MM-dd"); | ||
| const ninetyDaysAgo = format(subDays(new Date(), 90), "yyyy-MM-dd"); | ||
| const startDate = params.startDate ?? today; | ||
| const endDate = params.endDate ?? ninetyDaysAgo; | ||
|
|
||
| // Normalize so rangeMin <= rangeMax regardless of param ordering | ||
| const rangeMin = startDate < endDate ? startDate : endDate; | ||
| const rangeMax = startDate < endDate ? endDate : startDate; | ||
|
|
||
| const db = await useDatabase(); | ||
|
|
||
| // Shared WHERE clause for day-node lookups | ||
| const dayNodeWhere = and( | ||
| eq(nodes.userId, userId), | ||
| eq(nodes.nodeType, NodeTypeEnum.enum.Temporal), | ||
| gte(nodeMetadata.label, rangeMin), | ||
| lte(nodeMetadata.label, rangeMax), | ||
| ); | ||
|
|
||
| // Step 1: Count total days with data in the range (DB-level). | ||
| const [countResult] = await db | ||
| .select({ total: count() }) | ||
| .from(nodes) | ||
| .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) | ||
| .where(dayNodeWhere); | ||
|
|
||
| const totalDays = countResult?.total ?? 0; | ||
|
|
||
| if (totalDays === 0 || offset >= totalDays) { | ||
| return { | ||
| days: [], | ||
| totalDays, | ||
| hasMore: false, | ||
| }; | ||
| } | ||
|
|
||
| // Step 2: Fetch the paginated day nodes (DB-level limit/offset). | ||
| // Most recent first so pagination scrolls backward in time. | ||
| const paginatedDayNodes = await db | ||
| .select({ | ||
| id: nodes.id, | ||
| label: nodeMetadata.label, | ||
| }) | ||
| .from(nodes) | ||
| .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) | ||
| .where(dayNodeWhere) | ||
| .orderBy(desc(nodeMetadata.label)) | ||
| .limit(limit) | ||
| .offset(offset); | ||
|
|
||
| if (paginatedDayNodes.length === 0) { | ||
| return { | ||
| days: [], | ||
| totalDays, | ||
| hasMore: false, | ||
| }; | ||
| } | ||
|
|
||
| const dayNodeIds = paginatedDayNodes.map((d) => d.id); | ||
|
|
||
| // Step 3: Batch-fetch all connected nodes for the paginated day nodes. | ||
| // This avoids N+1 queries — one query gets everything. | ||
| const connectedRows = await db | ||
| .select({ | ||
| dayNodeId: sql<TypeId<"node">>` | ||
| CASE | ||
| WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} | ||
| THEN ${edges.sourceNodeId} | ||
| ELSE ${edges.targetNodeId} | ||
| END | ||
| `.as("day_node_id"), | ||
| id: nodes.id, | ||
| nodeType: nodes.nodeType, | ||
| label: nodeMetadata.label, | ||
| description: nodeMetadata.description, | ||
| edgeType: edges.edgeType, | ||
| createdAt: nodes.createdAt, | ||
| }) | ||
| .from(edges) | ||
| .innerJoin( | ||
| nodes, | ||
| eq( | ||
| nodes.id, | ||
| sql`CASE | ||
| WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} | ||
| THEN ${edges.targetNodeId} | ||
| ELSE ${edges.sourceNodeId} | ||
| END`, | ||
| ), | ||
| ) | ||
| .innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id)) | ||
| .where( | ||
| and( | ||
| eq(edges.userId, userId), | ||
| eq(nodes.userId, userId), | ||
| or( | ||
| inArray(edges.sourceNodeId, dayNodeIds), | ||
| inArray(edges.targetNodeId, dayNodeIds), | ||
| ), | ||
| // Exclude the day nodes themselves from results | ||
| sql`CASE | ||
| WHEN ${inArray(edges.sourceNodeId, dayNodeIds)} | ||
| THEN ${edges.targetNodeId} | ||
| ELSE ${edges.sourceNodeId} | ||
| END NOT IN (${sql.join( | ||
| dayNodeIds.map((id) => sql`${id}`), | ||
| sql`, `, | ||
| )})`, | ||
| ), | ||
| ); | ||
|
|
||
| // Step 4: Group connected nodes by day node and build response. | ||
| const nodesByDay = new Map<TypeId<"node">, typeof connectedRows>(); | ||
| for (const row of connectedRows) { | ||
| const dayId = row.dayNodeId; | ||
| const existing = nodesByDay.get(dayId); | ||
| if (existing) { | ||
| existing.push(row); | ||
| } else { | ||
| nodesByDay.set(dayId, [row]); | ||
| } | ||
| } | ||
|
|
||
| const days = paginatedDayNodes.map((dayNode) => { | ||
| const dayId = dayNode.id; | ||
| const allConnected = nodesByDay.get(dayId) ?? []; | ||
|
|
||
| // Deduplicate by node id (a node can be connected via multiple edges) | ||
| const uniqueMap = new Map<string, (typeof allConnected)[number]>(); | ||
| for (const row of allConnected) { | ||
| if (!uniqueMap.has(row.id)) { | ||
| uniqueMap.set(row.id, row); | ||
| } | ||
| } | ||
| const totalNodeCount = uniqueMap.size; | ||
|
|
||
| // Apply nodeTypes filter if requested (after counting total) | ||
| let filteredNodes = allConnected; | ||
| if (nodeTypes && nodeTypes.length > 0) { | ||
| filteredNodes = allConnected.filter((r) => | ||
| nodeTypes.includes(r.nodeType as (typeof nodeTypes)[number]), | ||
| ); | ||
| } | ||
|
Comment on lines
+162
to
+167
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Filtering by
Owner
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The Generated by Claude Code |
||
|
|
||
| // Deduplicate the filtered set | ||
| const filteredUniqueMap = new Map<string, (typeof filteredNodes)[number]>(); | ||
| for (const row of filteredNodes) { | ||
| if (!filteredUniqueMap.has(row.id)) { | ||
| filteredUniqueMap.set(row.id, row); | ||
| } | ||
| } | ||
|
|
||
| return { | ||
| date: dayNode.label!, | ||
| temporalNodeId: dayId, | ||
| nodeCount: totalNodeCount, | ||
| nodes: Array.from(filteredUniqueMap.values()).map((r) => ({ | ||
| id: r.id, | ||
| label: r.label, | ||
| description: r.description, | ||
| nodeType: r.nodeType, | ||
| edgeType: r.edgeType, | ||
| createdAt: r.createdAt, | ||
| })), | ||
| }; | ||
| }); | ||
|
|
||
| return { | ||
| days, | ||
| totalDays, | ||
| hasMore: offset + limit < totalDays, | ||
| }; | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,112 @@ | ||
| import { | ||
| queryTimelineRequestSchema, | ||
| queryTimelineResponseSchema, | ||
| } from "./query-timeline"; | ||
| import { describe, it, expect } from "vitest"; | ||
|
|
||
| describe("queryTimelineRequestSchema", () => { | ||
| it("accepts minimal request with only userId", () => { | ||
| const parsed = queryTimelineRequestSchema.parse({ userId: "user_123" }); | ||
| expect(parsed.userId).toBe("user_123"); | ||
| expect(parsed.limit).toBe(30); | ||
| expect(parsed.offset).toBe(0); | ||
| expect(parsed.startDate).toBeUndefined(); | ||
| expect(parsed.endDate).toBeUndefined(); | ||
| expect(parsed.nodeTypes).toBeUndefined(); | ||
| }); | ||
|
|
||
| it("accepts full request with all fields", () => { | ||
| const parsed = queryTimelineRequestSchema.parse({ | ||
| userId: "user_123", | ||
| startDate: "2025-01-15", | ||
| endDate: "2024-10-15", | ||
| limit: 10, | ||
| offset: 5, | ||
| nodeTypes: ["Person", "Event"], | ||
| }); | ||
| expect(parsed.startDate).toBe("2025-01-15"); | ||
| expect(parsed.endDate).toBe("2024-10-15"); | ||
| expect(parsed.limit).toBe(10); | ||
| expect(parsed.offset).toBe(5); | ||
| expect(parsed.nodeTypes).toEqual(["Person", "Event"]); | ||
| }); | ||
|
|
||
| it("rejects invalid date format", () => { | ||
| expect(() => | ||
| queryTimelineRequestSchema.parse({ | ||
| userId: "user_123", | ||
| startDate: "01-15-2025", | ||
| }), | ||
| ).toThrow(); | ||
| }); | ||
|
|
||
| it("rejects invalid nodeType", () => { | ||
| expect(() => | ||
| queryTimelineRequestSchema.parse({ | ||
| userId: "user_123", | ||
| nodeTypes: ["InvalidType"], | ||
| }), | ||
| ).toThrow(); | ||
| }); | ||
|
|
||
| it("rejects limit over 100", () => { | ||
| expect(() => | ||
| queryTimelineRequestSchema.parse({ | ||
| userId: "user_123", | ||
| limit: 101, | ||
| }), | ||
| ).toThrow(); | ||
| }); | ||
|
|
||
| it("rejects negative offset", () => { | ||
| expect(() => | ||
| queryTimelineRequestSchema.parse({ | ||
| userId: "user_123", | ||
| offset: -1, | ||
| }), | ||
| ).toThrow(); | ||
| }); | ||
| }); | ||
|
|
||
| describe("queryTimelineResponseSchema", () => { | ||
| it("validates a complete response", () => { | ||
| const response = { | ||
| days: [ | ||
| { | ||
| date: "2025-01-15", | ||
| temporalNodeId: "node_01234567890123456789abcdef", | ||
| nodeCount: 5, | ||
| nodes: [ | ||
| { | ||
| id: "node_abcdefghijklmnopqrstuvwxyz", | ||
| label: "Meeting with team", | ||
| description: "Weekly standup meeting", | ||
| nodeType: "Event", | ||
| edgeType: "OCCURRED_ON", | ||
| createdAt: "2025-01-15T10:00:00.000Z", | ||
| }, | ||
| ], | ||
| }, | ||
| ], | ||
| totalDays: 10, | ||
| hasMore: true, | ||
| }; | ||
| const parsed = queryTimelineResponseSchema.parse(response); | ||
| expect(parsed.days).toHaveLength(1); | ||
| expect(parsed.days[0]!.nodeCount).toBe(5); | ||
| expect(parsed.days[0]!.nodes[0]!.createdAt).toBeInstanceOf(Date); | ||
| expect(parsed.totalDays).toBe(10); | ||
| expect(parsed.hasMore).toBe(true); | ||
| }); | ||
|
|
||
| it("validates empty response", () => { | ||
| const parsed = queryTimelineResponseSchema.parse({ | ||
| days: [], | ||
| totalDays: 0, | ||
| hasMore: false, | ||
| }); | ||
| expect(parsed.days).toEqual([]); | ||
| expect(parsed.totalDays).toBe(0); | ||
| expect(parsed.hasMore).toBe(false); | ||
| }); | ||
| }); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,45 @@ | ||
| import { EdgeTypeEnum, NodeTypeEnum } from "../../types/graph.js"; | ||
| import { typeIdSchema } from "../../types/typeid.js"; | ||
| import { z } from "zod"; | ||
|
|
||
| const dateRegex = /^\d{4}-\d{2}-\d{2}$/; | ||
|
|
||
| export const queryTimelineRequestSchema = z.object({ | ||
| userId: z.string(), | ||
| startDate: z | ||
| .string() | ||
| .regex(dateRegex, "startDate must be in YYYY-MM-DD format") | ||
| .optional(), | ||
| endDate: z | ||
| .string() | ||
| .regex(dateRegex, "endDate must be in YYYY-MM-DD format") | ||
| .optional(), | ||
| limit: z.number().int().min(1).max(100).default(30), | ||
| offset: z.number().int().min(0).default(0), | ||
| nodeTypes: z.array(NodeTypeEnum).optional(), | ||
| }); | ||
|
|
||
| export const queryTimelineNodeSchema = z.object({ | ||
| id: typeIdSchema("node"), | ||
| label: z.string().nullable(), | ||
| description: z.string().nullable(), | ||
| nodeType: NodeTypeEnum, | ||
| edgeType: EdgeTypeEnum, | ||
| createdAt: z.coerce.date(), | ||
| }); | ||
|
|
||
| export const queryTimelineDaySchema = z.object({ | ||
| date: z.string(), | ||
| temporalNodeId: typeIdSchema("node"), | ||
| nodeCount: z.number(), | ||
| nodes: z.array(queryTimelineNodeSchema), | ||
| }); | ||
|
|
||
| export const queryTimelineResponseSchema = z.object({ | ||
| days: z.array(queryTimelineDaySchema), | ||
| totalDays: z.number(), | ||
| hasMore: z.boolean(), | ||
| }); | ||
|
|
||
| export type QueryTimelineRequest = z.infer<typeof queryTimelineRequestSchema>; | ||
| export type QueryTimelineResponse = z.infer<typeof queryTimelineResponseSchema>; |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,11 @@ | ||
| import { defineEventHandler } from "h3"; | ||
| import { queryTimeline } from "~/lib/query/timeline"; | ||
| import { | ||
| queryTimelineRequestSchema, | ||
| queryTimelineResponseSchema, | ||
| } from "~/lib/schemas/query-timeline"; | ||
|
|
||
| export default defineEventHandler(async (event) => { | ||
| const params = queryTimelineRequestSchema.parse(await readBody(event)); | ||
| return queryTimelineResponseSchema.parse(await queryTimeline(params)); | ||
| }); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The date range parameters and their defaults are logically inverted. Conventionally,
startDateshould represent the beginning of the period (the older date) andendDatethe end (the more recent date). Currently,startDatedefaults totodayandendDatetoninetyDaysAgo, which is counter-intuitive and contradicts the naming. This also affects the query logic in subsequent steps.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The
startDate= today /endDate= 90 days ago convention is intentional — it was designed by the consuming app (Petals) where "start" means "where the timeline view starts" (today) and "end" means "how far back to look". That said, the naming can be confusing, so I've added date normalization in 06d4193 that sorts the two dates internally. The query now works correctly regardless of which param is chronologically larger, so callers can pass them in either order.Generated by Claude Code