Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
197 changes: 197 additions & 0 deletions src/lib/query/timeline.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
import {
QueryTimelineRequest,
QueryTimelineResponse,
} from "../schemas/query-timeline";
import { format, subDays } from "date-fns";
import { and, eq, or, gte, lte, desc, inArray, sql, count } from "drizzle-orm";
import { edges, nodeMetadata, nodes } from "~/db/schema";
import { NodeTypeEnum } from "~/types/graph";
import type { TypeId } from "~/types/typeid";
import { useDatabase } from "~/utils/db";

/**
* Query a timeline of memories grouped by date.
*
* Finds all Temporal (day) nodes within a date range for a user,
* then fetches connected nodes for each day. Supports pagination
* via limit/offset on days, and optional nodeType filtering on
* the connected nodes.
*/
export async function queryTimeline(
params: QueryTimelineRequest,
): Promise<QueryTimelineResponse> {
const { userId, limit = 30, offset = 0, nodeTypes } = params;

const today = format(new Date(), "yyyy-MM-dd");
const ninetyDaysAgo = format(subDays(new Date(), 90), "yyyy-MM-dd");
const startDate = params.startDate ?? today;
const endDate = params.endDate ?? ninetyDaysAgo;
Comment on lines +27 to +28
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

The date range parameters and their defaults are logically inverted. Conventionally, startDate should represent the beginning of the period (the older date) and endDate the end (the more recent date). Currently, startDate defaults to today and endDate to ninetyDaysAgo, which is counter-intuitive and contradicts the naming. This also affects the query logic in subsequent steps.

Suggested change
const startDate = params.startDate ?? today;
const endDate = params.endDate ?? ninetyDaysAgo;
const startDate = params.startDate ?? ninetyDaysAgo;
const endDate = params.endDate ?? today;

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The startDate = today / endDate = 90 days ago convention is intentional — it was designed by the consuming app (Petals) where "start" means "where the timeline view starts" (today) and "end" means "how far back to look". That said, the naming can be confusing, so I've added date normalization in 06d4193 that sorts the two dates internally. The query now works correctly regardless of which param is chronologically larger, so callers can pass them in either order.


Generated by Claude Code


// Normalize so rangeMin <= rangeMax regardless of param ordering
const rangeMin = startDate < endDate ? startDate : endDate;
const rangeMax = startDate < endDate ? endDate : startDate;

const db = await useDatabase();

// Shared WHERE clause for day-node lookups
const dayNodeWhere = and(
eq(nodes.userId, userId),
eq(nodes.nodeType, NodeTypeEnum.enum.Temporal),
gte(nodeMetadata.label, rangeMin),
lte(nodeMetadata.label, rangeMax),
);

// Step 1: Count total days with data in the range (DB-level).
const [countResult] = await db
.select({ total: count() })
.from(nodes)
.innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id))
.where(dayNodeWhere);

const totalDays = countResult?.total ?? 0;

if (totalDays === 0 || offset >= totalDays) {
return {
days: [],
totalDays,
hasMore: false,
};
}

// Step 2: Fetch the paginated day nodes (DB-level limit/offset).
// Most recent first so pagination scrolls backward in time.
const paginatedDayNodes = await db
.select({
id: nodes.id,
label: nodeMetadata.label,
})
.from(nodes)
.innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id))
.where(dayNodeWhere)
.orderBy(desc(nodeMetadata.label))
.limit(limit)
.offset(offset);

if (paginatedDayNodes.length === 0) {
return {
days: [],
totalDays,
hasMore: false,
};
}

const dayNodeIds = paginatedDayNodes.map((d) => d.id);

// Step 3: Batch-fetch all connected nodes for the paginated day nodes.
// This avoids N+1 queries — one query gets everything.
const connectedRows = await db
.select({
dayNodeId: sql<TypeId<"node">>`
CASE
WHEN ${inArray(edges.sourceNodeId, dayNodeIds)}
THEN ${edges.sourceNodeId}
ELSE ${edges.targetNodeId}
END
`.as("day_node_id"),
id: nodes.id,
nodeType: nodes.nodeType,
label: nodeMetadata.label,
description: nodeMetadata.description,
edgeType: edges.edgeType,
createdAt: nodes.createdAt,
})
.from(edges)
.innerJoin(
nodes,
eq(
nodes.id,
sql`CASE
WHEN ${inArray(edges.sourceNodeId, dayNodeIds)}
THEN ${edges.targetNodeId}
ELSE ${edges.sourceNodeId}
END`,
),
)
.innerJoin(nodeMetadata, eq(nodeMetadata.nodeId, nodes.id))
.where(
and(
eq(edges.userId, userId),
eq(nodes.userId, userId),
or(
inArray(edges.sourceNodeId, dayNodeIds),
inArray(edges.targetNodeId, dayNodeIds),
),
// Exclude the day nodes themselves from results
sql`CASE
WHEN ${inArray(edges.sourceNodeId, dayNodeIds)}
THEN ${edges.targetNodeId}
ELSE ${edges.sourceNodeId}
END NOT IN (${sql.join(
dayNodeIds.map((id) => sql`${id}`),
sql`, `,
)})`,
),
);

// Step 4: Group connected nodes by day node and build response.
const nodesByDay = new Map<TypeId<"node">, typeof connectedRows>();
for (const row of connectedRows) {
const dayId = row.dayNodeId;
const existing = nodesByDay.get(dayId);
if (existing) {
existing.push(row);
} else {
nodesByDay.set(dayId, [row]);
}
}

const days = paginatedDayNodes.map((dayNode) => {
const dayId = dayNode.id;
const allConnected = nodesByDay.get(dayId) ?? [];

// Deduplicate by node id (a node can be connected via multiple edges)
const uniqueMap = new Map<string, (typeof allConnected)[number]>();
for (const row of allConnected) {
if (!uniqueMap.has(row.id)) {
uniqueMap.set(row.id, row);
}
}
const totalNodeCount = uniqueMap.size;

// Apply nodeTypes filter if requested (after counting total)
let filteredNodes = allConnected;
if (nodeTypes && nodeTypes.length > 0) {
filteredNodes = allConnected.filter((r) =>
nodeTypes.includes(r.nodeType as (typeof nodeTypes)[number]),
);
}
Comment on lines +162 to +167
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Filtering by nodeTypes in memory after fetching all connected nodes is inefficient and doesn't scale. This filter should be applied within the SQL query in Step 3 to reduce data transfer. Furthermore, the nodeCount returned in the response is the total count before filtering, which is inconsistent with the nodes array length and may be confusing for API consumers. Consider renaming this field to totalMemoriesCount or updating it to reflect the filtered results.

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The nodeCount being pre-filter is intentional per the spec — it lets the Petals UI show context like "5 memories (showing 2 Events)" when filtering. The in-memory filter is acceptable here since per-day connected node counts are small (typically tens, not thousands). Pushing the filter to SQL would require either two separate batch queries or a conditional count expression, adding complexity without meaningful perf gain at this scale.


Generated by Claude Code


// Deduplicate the filtered set
const filteredUniqueMap = new Map<string, (typeof filteredNodes)[number]>();
for (const row of filteredNodes) {
if (!filteredUniqueMap.has(row.id)) {
filteredUniqueMap.set(row.id, row);
}
}

return {
date: dayNode.label!,
temporalNodeId: dayId,
nodeCount: totalNodeCount,
nodes: Array.from(filteredUniqueMap.values()).map((r) => ({
id: r.id,
label: r.label,
description: r.description,
nodeType: r.nodeType,
edgeType: r.edgeType,
createdAt: r.createdAt,
})),
};
});

return {
days,
totalDays,
hasMore: offset + limit < totalDays,
};
}
112 changes: 112 additions & 0 deletions src/lib/schemas/query-timeline.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import {
queryTimelineRequestSchema,
queryTimelineResponseSchema,
} from "./query-timeline";
import { describe, it, expect } from "vitest";

describe("queryTimelineRequestSchema", () => {
it("accepts minimal request with only userId", () => {
const parsed = queryTimelineRequestSchema.parse({ userId: "user_123" });
expect(parsed.userId).toBe("user_123");
expect(parsed.limit).toBe(30);
expect(parsed.offset).toBe(0);
expect(parsed.startDate).toBeUndefined();
expect(parsed.endDate).toBeUndefined();
expect(parsed.nodeTypes).toBeUndefined();
});

it("accepts full request with all fields", () => {
const parsed = queryTimelineRequestSchema.parse({
userId: "user_123",
startDate: "2025-01-15",
endDate: "2024-10-15",
limit: 10,
offset: 5,
nodeTypes: ["Person", "Event"],
});
expect(parsed.startDate).toBe("2025-01-15");
expect(parsed.endDate).toBe("2024-10-15");
expect(parsed.limit).toBe(10);
expect(parsed.offset).toBe(5);
expect(parsed.nodeTypes).toEqual(["Person", "Event"]);
});

it("rejects invalid date format", () => {
expect(() =>
queryTimelineRequestSchema.parse({
userId: "user_123",
startDate: "01-15-2025",
}),
).toThrow();
});

it("rejects invalid nodeType", () => {
expect(() =>
queryTimelineRequestSchema.parse({
userId: "user_123",
nodeTypes: ["InvalidType"],
}),
).toThrow();
});

it("rejects limit over 100", () => {
expect(() =>
queryTimelineRequestSchema.parse({
userId: "user_123",
limit: 101,
}),
).toThrow();
});

it("rejects negative offset", () => {
expect(() =>
queryTimelineRequestSchema.parse({
userId: "user_123",
offset: -1,
}),
).toThrow();
});
});

describe("queryTimelineResponseSchema", () => {
it("validates a complete response", () => {
const response = {
days: [
{
date: "2025-01-15",
temporalNodeId: "node_01234567890123456789abcdef",
nodeCount: 5,
nodes: [
{
id: "node_abcdefghijklmnopqrstuvwxyz",
label: "Meeting with team",
description: "Weekly standup meeting",
nodeType: "Event",
edgeType: "OCCURRED_ON",
createdAt: "2025-01-15T10:00:00.000Z",
},
],
},
],
totalDays: 10,
hasMore: true,
};
const parsed = queryTimelineResponseSchema.parse(response);
expect(parsed.days).toHaveLength(1);
expect(parsed.days[0]!.nodeCount).toBe(5);
expect(parsed.days[0]!.nodes[0]!.createdAt).toBeInstanceOf(Date);
expect(parsed.totalDays).toBe(10);
expect(parsed.hasMore).toBe(true);
});

it("validates empty response", () => {
const parsed = queryTimelineResponseSchema.parse({
days: [],
totalDays: 0,
hasMore: false,
});
expect(parsed.days).toEqual([]);
expect(parsed.totalDays).toBe(0);
expect(parsed.hasMore).toBe(false);
});
});
45 changes: 45 additions & 0 deletions src/lib/schemas/query-timeline.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import { EdgeTypeEnum, NodeTypeEnum } from "../../types/graph.js";
import { typeIdSchema } from "../../types/typeid.js";
import { z } from "zod";

const dateRegex = /^\d{4}-\d{2}-\d{2}$/;

export const queryTimelineRequestSchema = z.object({
userId: z.string(),
startDate: z
.string()
.regex(dateRegex, "startDate must be in YYYY-MM-DD format")
.optional(),
endDate: z
.string()
.regex(dateRegex, "endDate must be in YYYY-MM-DD format")
.optional(),
limit: z.number().int().min(1).max(100).default(30),
offset: z.number().int().min(0).default(0),
nodeTypes: z.array(NodeTypeEnum).optional(),
});

export const queryTimelineNodeSchema = z.object({
id: typeIdSchema("node"),
label: z.string().nullable(),
description: z.string().nullable(),
nodeType: NodeTypeEnum,
edgeType: EdgeTypeEnum,
createdAt: z.coerce.date(),
});

export const queryTimelineDaySchema = z.object({
date: z.string(),
temporalNodeId: typeIdSchema("node"),
nodeCount: z.number(),
nodes: z.array(queryTimelineNodeSchema),
});

export const queryTimelineResponseSchema = z.object({
days: z.array(queryTimelineDaySchema),
totalDays: z.number(),
hasMore: z.boolean(),
});

export type QueryTimelineRequest = z.infer<typeof queryTimelineRequestSchema>;
export type QueryTimelineResponse = z.infer<typeof queryTimelineResponseSchema>;
11 changes: 11 additions & 0 deletions src/routes/query/timeline.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import { defineEventHandler } from "h3";
import { queryTimeline } from "~/lib/query/timeline";
import {
queryTimelineRequestSchema,
queryTimelineResponseSchema,
} from "~/lib/schemas/query-timeline";

export default defineEventHandler(async (event) => {
const params = queryTimelineRequestSchema.parse(await readBody(event));
return queryTimelineResponseSchema.parse(await queryTimeline(params));
});
16 changes: 16 additions & 0 deletions src/sdk/memory-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,11 @@ import {
QuerySearchResponse,
querySearchResponseSchema,
} from "../lib/schemas/query-search.js";
import {
QueryTimelineRequest,
QueryTimelineResponse,
queryTimelineResponseSchema,
} from "../lib/schemas/query-timeline.js";
import {
ScratchpadReadRequest,
ScratchpadWriteRequest,
Expand Down Expand Up @@ -227,6 +232,17 @@ export class MemoryClient {
);
}

async queryTimeline(
payload: QueryTimelineRequest,
): Promise<QueryTimelineResponse> {
return this._fetch(
"POST",
"/query/timeline",
queryTimelineResponseSchema,
payload,
);
}

async summarize(payload: SummarizeRequest): Promise<SummarizeResponse> {
return this._fetch("POST", "/summarize", summarizeResponseSchema, payload);
}
Expand Down
Loading