diff --git a/.env.example b/.env.example index 940c6bac..da57c210 100644 --- a/.env.example +++ b/.env.example @@ -1,65 +1,79 @@ - # Since the ".env" file is gitignored, you can use the ".env.example" file to -# build a new ".env" file when you clone the repo. Keep this file up-to-date -# when you add new variables to `.env`. +# Server environment variables +DATABASE_URL="postgresql://user:password@localhost:5432/pdr_ai" + +# Clerk Authentication +NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY="pk_test_..." +CLERK_SECRET_KEY="sk_test_..." + +# OpenAI API +OPENAI_API_KEY="sk-..." + +# NextJS client environment variables +NEXT_PUBLIC_UPLOADTHING_ENABLED="true" + +# Trend / web search (optional — leave keys unset if you do not use trend search) +# SEARCH_PROVIDER default when unset is tavily-only (same as tavily). +# tavily → requires TAVILY_API_KEY +# serper → requires SERPER_API_KEY +# fallback → Serper first, then Tavily if Serper returns no results; set both keys for full behavior +# parallel → merge Serper + Tavily; set both keys for full behavior (Serper-only works with SERPER_API_KEY only) +# If a required key is missing, the pipeline no-ops that provider; providerUsed may be "none" when no key backs the active path. +TAVILY_API_KEY="tvly-..." +SERPER_API_KEY="..." +SEARCH_PROVIDER="tavily" # "tavily" | "serper" | "fallback" | "parallel" + +# Platform API Keys for Marketing Pipeline +REDDIT_CLIENT_ID="your_reddit_client_id" +REDDIT_CLIENT_SECRET="your_reddit_client_secret" +REDDIT_USER_AGENT="your_reddit_user_agent" +TWITTER_BEARER_TOKEN="your_twitter_bearer_token" +LINKEDIN_ACCESS_TOKEN="your_linkedin_access_token" +BLUESKY_HANDLE="your_bluesky_handle" +BLUESKY_APP_PASSWORD="your_bluesky_app_password" + +# Document Processing (Optional) +AZURE_DOC_INTELLIGENCE_ENDPOINT="https://..." +AZURE_DOC_INTELLIGENCE_KEY="..." +LANDING_AI_API_KEY="..." -# This file will be committed to version control, so make sure not to have any -# secrets in it. If you are cloning this repo, create a copy of this file named -# ".env" and populate it with your secrets. - -# When adding additional environment variables, the schema in "/src/env.js" -# should be updated accordingly. - -# Database -DATABASE_URL="" - -# Docker Compose: password for PostgreSQL (used by db service) -# POSTGRES_PASSWORD=password - -# Clerk Authentication (get from https://clerk.com/) - -NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY= -CLERK_SECRET_KEY= +# Anthropic API (optional — enables Claude models, get from https://console.anthropic.com/) +ANTHROPIC_API_KEY= +ANTHROPIC_MODEL= # OpenAI API (get from https://platform.openai.com/) OPENAI_API_KEY= OPENAI_MODEL="gpt-5-mini" -# Anthropic API (optional — enables Claude models, get from https://console.anthropic.com/) -ANTHROPIC_API_KEY= - # Google AI API (optional — enables Gemini models, get from https://aistudio.google.com/apikey) GOOGLE_AI_API_KEY= +GOOGLE_MODEL= # Ollama (optional — enables local models via Ollama, see https://ollama.com/) OLLAMA_BASE_URL="http://localhost:11434" OLLAMA_MODEL="llama3.1:8b" -# UploadThing (get from https://uploadthing.com/) +# LangSmith Tracing (Optional) +LANGCHAIN_TRACING_V2="false" +LANGCHAIN_API_KEY="..." +LANGCHAIN_PROJECT="pdr_ai_v2" + +# File Uploading (Optional) UPLOADTHING_SECRET="your_uploadthing_secret" UPLOADTHING_APP_ID="your_uploadthing_app_id" +UPLOADTHING_TOKEN="..." -# Datalab OCR API (optional - get from https://www.datalab.to/) -# Required only if you want to enable OCR processing for scanned documents -DATALAB_API_KEY="your_datalab_api_key" - -# Landing.AI OCR API (optional - get from https://www.landing.ai/) -LANDING_AI_API_KEY="your_landing_ai_api_key" - -# Tavily API (optional - get from https://www.tavily.com/) -TAVILY_API_KEY="your_tavily_api_key" - -# Azure Document Intelligence OCR API (optional - get from https://learn.microsoft.com/en-us/azure/applied-ai-services/document-intelligence/quickstarts/get-started-with-rest-api?pivots=programming-language-rest-api) -AZURE_DOC_INTELLIGENCE_ENDPOINT="your_azure_doc_intelligence_endpoint" -AZURE_DOC_INTELLIGENCE_KEY="your_azure_doc_intelligence_key" +# Data APIs (Optional) +DATALAB_API_KEY="..." -# Inngest (required for background document processing - https://inngest.com/) -INNGEST_EVENT_KEY="dev_placeholder" +# Background Jobs +JOB_RUNNER="inngest" +INNGEST_EVENT_KEY="local" INNGEST_SIGNING_KEY="signkey-dev-xxxxx" -# Sidecar (optional - get from https://github.com/pdr-ai/sidecar) -SIDECAR_URL="your_sidecar_url" +# Sidecar ML Compute (Optional) +SIDECAR_URL="http://localhost:8080" -# Neo4j (optional - get from https://neo4j.com/) +# Neo4j (optional) NEO4J_URI="your_neo4j_uri" NEO4J_USERNAME="your_neo4j_username" NEO4J_PASSWORD="your_neo4j_password" diff --git a/__tests__/api/trendSearch/search-provider.pbt.test.ts b/__tests__/api/trendSearch/search-provider.pbt.test.ts new file mode 100644 index 00000000..e4fa0abb --- /dev/null +++ b/__tests__/api/trendSearch/search-provider.pbt.test.ts @@ -0,0 +1,340 @@ +/** + * Property-based tests for search provider normalization and strategy behavior. + * Feature: Serper dual-channel search — Task 4.3 + * Property 13: Serper adapter output conforms to RawSearchResult. + * Property 14: Fallback strategy invokes secondary when primary returns empty. + * Property 15: Parallel merge deduplicates by URL (Serper first, then Tavily). + * Property 16: Default (no env) matches Tavily-only behavior. + * Property 17: Serper-dependent strategies downgrade when key missing. + */ + +const TAVILY_URL = "https://api.tavily.com/search"; +const SERPER_URL = "https://google.serper.dev/news"; + +jest.mock("~/env", () => { + const server = { + TAVILY_API_KEY: "test-tavily-key", + SERPER_API_KEY: "test-serper-key", + SEARCH_PROVIDER: undefined as "tavily" | "serper" | "fallback" | "parallel" | undefined, + }; + return { env: { server } }; +}); + +import * as fc from "fast-check"; +import { env } from "~/env"; +import { callSerper } from "~/lib/tools/trend-search/providers/serper"; +import { executeSearch } from "~/lib/tools/trend-search/web-search"; +import type { PlannedQuery, RawSearchResult } from "~/lib/tools/trend-search/types"; +import type { ProviderStrategy } from "~/lib/tools/trend-search/providers/types"; + +beforeEach(() => { + env.server.TAVILY_API_KEY = "test-tavily-key"; + env.server.SERPER_API_KEY = "test-serper-key"; + env.server.SEARCH_PROVIDER = undefined; +}); + +// ─── Arbitraries ───────────────────────────────────────────────────────────── + +const validCategories = ["fashion", "finance", "business", "tech"] as const; + +const categoryArb = fc.constantFrom(...validCategories); + +const plannedQueryArb: fc.Arbitrary = fc.record({ + searchQuery: fc.string({ minLength: 1, maxLength: 300 }), + category: categoryArb, + rationale: fc.string({ minLength: 1, maxLength: 200 }), +}); + +const subQueriesArb = fc.array(plannedQueryArb, { minLength: 1, maxLength: 5 }); + +/** Serper news item shape (subset we use). */ +const serperNewsItemArb = fc.record({ + link: fc.webUrl({ validSchemes: ["https"] }), + title: fc.option(fc.string({ maxLength: 200 }), { nil: undefined }), + snippet: fc.option(fc.string({ maxLength: 500 }), { nil: undefined }), + date: fc.option(fc.string(), { nil: undefined }), + position: fc.option(fc.nat({ max: 20 }), { nil: undefined }), +}); + +const serperNewsArrayArb = fc.array(serperNewsItemArb, { minLength: 0, maxLength: 15 }); + +/** RawSearchResult arbitrary for merge tests. */ +const rawSearchResultArb = fc.record({ + url: fc.webUrl({ validSchemes: ["https"] }), + title: fc.string({ minLength: 1, maxLength: 200 }), + content: fc.string({ maxLength: 500 }), + score: fc.double({ min: 0, max: 1 }), + publishedDate: fc.option(fc.string(), { nil: undefined }), +}); + +function normalizeUrl(url: string): string { + try { + return new URL(url).href; + } catch { + return url.trim(); + } +} + +function conformsToRawSearchResult(r: unknown): r is RawSearchResult { + if (r === null || typeof r !== "object") return false; + const o = r as Record; + return ( + typeof o.url === "string" && + o.url.length > 0 && + typeof o.title === "string" && + typeof o.content === "string" && + typeof o.score === "number" && + (!("publishedDate" in o) || typeof o.publishedDate === "string" || o.publishedDate === undefined) + ); +} + +// ─── Property 13: Serper output conforms to RawSearchResult ─────────────────── + +describe("Property 13: Serper-shaped responses normalize to RawSearchResult", () => { + it("for any random Serper news array, every output item conforms to RawSearchResult", async () => { + await fc.assert( + fc.asyncProperty(serperNewsArrayArb, async (news) => { + const fetchSpy = jest.spyOn(globalThis, "fetch").mockResolvedValue({ + ok: true, + text: async () => "", + json: async () => ({ news }), + } as Response); + + const results = await callSerper("test query"); + + fetchSpy.mockRestore(); + + for (const item of results) { + expect(conformsToRawSearchResult(item)).toBe(true); + expect(item.url).toBeDefined(); + expect(typeof item.title).toBe("string"); + expect(typeof item.content).toBe("string"); + expect(typeof item.score).toBe("number"); + } + }), + { numRuns: 80 } + ); + }); +}); + +// ─── Property 14: Fallback invokes secondary when primary returns empty ───────── + +describe("Property 14: Fallback strategy invokes secondary when primary returns empty", () => { + it("for random sub-query lists, when primary (Serper) returns empty, Tavily is invoked once per sub-query", async () => { + await fc.assert( + fc.asyncProperty(subQueriesArb, async (subQueries) => { + let serperCalls = 0; + let tavilyCalls = 0; + const fetchSpy = jest.spyOn(globalThis, "fetch").mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === SERPER_URL) { + serperCalls++; + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ news: [] }), + } as Response); + } + if (url === TAVILY_URL) { + tavilyCalls++; + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ + results: [{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }], + }), + } as Response); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + env.server.SEARCH_PROVIDER = "fallback"; + env.server.SERPER_API_KEY = "test-serper-key"; + + await executeSearch(subQueries); + + fetchSpy.mockRestore(); + + expect(serperCalls).toBe(subQueries.length); + expect(tavilyCalls).toBe(subQueries.length); + }), + { numRuns: 50 } + ); + }); +}); + +// ─── Property 15: Parallel dedup — Serper rows first, then Tavily (no cross-provider score compare) ────────── + +describe("Property 15: Parallel merge deduplicates by URL (Serper first)", () => { + it("for two random result sets with overlapping URLs, merged result has no duplicate URLs and Serper wins on URL tie", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(rawSearchResultArb, { minLength: 0, maxLength: 5 }), + fc.array(rawSearchResultArb, { minLength: 0, maxLength: 5 }), + fc.string({ minLength: 1, maxLength: 100 }), + async (setA, setB, _query) => { + // Tavily returns setA with original scores; Serper adapter recomputes score as 1 - position/totalResults + const serperScores = setB.length > 0 + ? setB.map((_, i) => 1 - (i + 1) / setB.length) + : []; + const setBWithSerperScores = setB.map((r, i) => ({ ...r, score: serperScores[i] ?? 0 })); + // Replicate executeSearch parallel merge: Serper first, then Tavily; first URL wins + const byUrl = new Map(); + for (const r of setBWithSerperScores) { + const key = normalizeUrl(r.url); + if (!key) continue; + if (!byUrl.has(key)) byUrl.set(key, r); + } + for (const r of setA) { + const key = normalizeUrl(r.url); + if (!key) continue; + if (!byUrl.has(key)) byUrl.set(key, r); + } + const pairKey = (r: RawSearchResult) => `${normalizeUrl(r.url)}::${Number(r.score).toFixed(10)}`; + const expectedPairs = new Set([...byUrl.values()].map(pairKey)); + + const fetchSpy = jest.spyOn(globalThis, "fetch").mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === TAVILY_URL) { + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ results: setA }), + } as Response); + } + if (url === SERPER_URL) { + const serperNews = setB.map((r, i) => ({ + link: r.url, + title: r.title, + snippet: r.content, + position: i + 1, + })); + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ news: serperNews }), + } as Response); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + env.server.SEARCH_PROVIDER = "parallel"; + env.server.SERPER_API_KEY = "test-serper-key"; + + const { results } = await executeSearch( + [{ searchQuery: "q", category: "tech", rationale: "r" }], + "parallel" + ); + + fetchSpy.mockRestore(); + + const resultUrls = results.map((r) => r.url); + const uniqueUrls = new Set(resultUrls); + expect(resultUrls.length).toBe(uniqueUrls.size); + + const actualPairs = new Set(results.map(pairKey)); + expect(actualPairs.size).toBe(expectedPairs.size); + for (const p of actualPairs) { + expect(expectedPairs.has(p)).toBe(true); + } + } + ), + { numRuns: 60 } + ); + }); +}); + +// ─── Property 16: Default (no env) matches Tavily-only ────────────────────────── + +describe("Property 16: Default strategy matches Tavily-only behavior", () => { + it("when SEARCH_PROVIDER is unset, providerUsed is tavily and only Tavily is called", async () => { + await fc.assert( + fc.asyncProperty(subQueriesArb, async (subQueries) => { + let tavilyCalls = 0; + let serperCalls = 0; + const fetchSpy = jest.spyOn(globalThis, "fetch").mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === TAVILY_URL) { + tavilyCalls++; + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ + results: [{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }], + }), + } as Response); + } + if (url === SERPER_URL) { + serperCalls++; + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ news: [] }), + } as Response); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + env.server.SEARCH_PROVIDER = undefined; + env.server.SERPER_API_KEY = "test-serper-key"; + + const { providerUsed } = await executeSearch(subQueries); + + fetchSpy.mockRestore(); + + expect(providerUsed).toBe("tavily"); + expect(tavilyCalls).toBe(subQueries.length); + expect(serperCalls).toBe(0); + }), + { numRuns: 50 } + ); + }); +}); + +// ─── Property 17: Serper-dependent strategies downgrade when key missing ─────── + +describe("Property 17: Missing Serper key downgrades Serper-dependent strategies to tavily", () => { + const serperDependentStrategies: ProviderStrategy[] = ["serper", "fallback", "parallel"]; + + it("for each Serper-dependent strategy, when SERPER_API_KEY is unset, providerUsed is tavily and no throw", async () => { + await fc.assert( + fc.asyncProperty( + fc.constantFrom(...serperDependentStrategies), + subQueriesArb, + async (strategy, subQueries) => { + const fetchSpy = jest.spyOn(globalThis, "fetch").mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === TAVILY_URL) { + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ + results: [{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }], + }), + } as Response); + } + if (url === SERPER_URL) { + return Promise.resolve({ + ok: true, + text: async () => "", + json: async () => ({ news: [] }), + } as Response); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + env.server.SERPER_API_KEY = undefined as unknown as string; + const warnSpy = jest.spyOn(console, "warn").mockImplementation(); + + const { providerUsed } = await executeSearch(subQueries, strategy); + + warnSpy.mockRestore(); + fetchSpy.mockRestore(); + + expect(providerUsed).toBe("tavily"); + } + ), + { numRuns: 30 } + ); + }); +}); diff --git a/__tests__/api/trendSearch/search-strategy.test.ts b/__tests__/api/trendSearch/search-strategy.test.ts new file mode 100644 index 00000000..f3fdcae1 --- /dev/null +++ b/__tests__/api/trendSearch/search-strategy.test.ts @@ -0,0 +1,289 @@ +/** + * Unit tests for executeSearch strategy logic. + * Feature: Serper dual-channel search — Task 4.2 + * Requirements: 3.1, 3.2, 3.3, 3.4, 3.5, 4.3, 6.1, 6.2 + */ + +const TAVILY_URL = "https://api.tavily.com/search"; +const SERPER_URL = "https://google.serper.dev/news"; + +jest.mock("~/env", () => { + const server = { + TAVILY_API_KEY: "test-tavily-key", + SERPER_API_KEY: "test-serper-key", + SEARCH_PROVIDER: undefined as "tavily" | "serper" | "fallback" | "parallel" | undefined, + }; + return { env: { server } }; +}); + +import { env } from "~/env"; +import { executeSearch } from "~/lib/tools/trend-search/web-search"; +import type { PlannedQuery } from "~/lib/tools/trend-search/types"; + +const subQueries: PlannedQuery[] = [ + { searchQuery: "test query", category: "tech", rationale: "test" }, +]; + +function tavilyResponse(results: { url: string; title?: string; content?: string; score?: number }[]) { + return { + ok: true, + text: async () => "", + json: async () => ({ results }), + } as Response; +} + +function serperResponse(items: { link: string; title?: string; snippet?: string; score?: number }[]) { + return { + ok: true, + text: async () => "", + json: async () => ({ + news: items.map((item) => ({ + link: item.link, + title: item.title ?? "Untitled", + snippet: item.snippet ?? "", + position: 1, + })), + }), + } as Response; +} + +function getFetchCallsByUrl(fetchSpy: jest.SpyInstance): { tavily: number; serper: number } { + const calls = fetchSpy.mock.calls as [string, unknown][]; + let tavily = 0; + let serper = 0; + for (const [url] of calls) { + if (url === TAVILY_URL) tavily++; + if (url === SERPER_URL) serper++; + } + return { tavily, serper }; +} + +describe("executeSearch strategy logic", () => { + let fetchSpy: jest.SpyInstance; + + beforeEach(() => { + env.server.TAVILY_API_KEY = "test-tavily-key"; + env.server.SERPER_API_KEY = "test-serper-key"; + env.server.SEARCH_PROVIDER = undefined; + fetchSpy = jest.spyOn(globalThis, "fetch").mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === TAVILY_URL) { + return Promise.resolve(tavilyResponse([{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }])); + } + if (url === SERPER_URL) { + return Promise.resolve(serperResponse([{ link: "https://serper.com/1", title: "S", snippet: "Snippet" }])); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + }); + + afterEach(() => { + fetchSpy.mockRestore(); + }); + + describe("default strategy (no env) uses Tavily only", () => { + it("when SEARCH_PROVIDER is unset, only Tavily is called and providerUsed is tavily", async () => { + env.server.SEARCH_PROVIDER = undefined; + env.server.SERPER_API_KEY = "test-serper-key"; + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("tavily"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(tavily).toBe(1); + expect(serper).toBe(0); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://tavily.com/1"); + }); + }); + + describe('"serper" strategy uses Serper only', () => { + it("when strategy is serper, only Serper is called and providerUsed is serper", async () => { + env.server.SEARCH_PROVIDER = "serper"; + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("serper"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(tavily).toBe(0); + expect(serper).toBe(1); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://serper.com/1"); + }); + }); + + describe('"fallback" strategy tries Serper first, falls back to Tavily on total failure', () => { + it("when Serper returns no results for all sub-queries, Tavily is called and providerUsed is tavily", async () => { + env.server.SEARCH_PROVIDER = "fallback"; + let callCount = 0; + fetchSpy.mockImplementation((input: RequestInfo | URL) => { + callCount++; + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === SERPER_URL) { + return Promise.resolve(serperResponse([])); // empty + } + if (url === TAVILY_URL) { + return Promise.resolve(tavilyResponse([{ url: "https://tavily.com/fallback", title: "T", content: "C", score: 0.8 }])); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("tavily"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(serper).toBe(1); + expect(tavily).toBe(1); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://tavily.com/fallback"); + }); + }); + + describe('"fallback" strategy does NOT fall back when Serper returns results', () => { + it("when Serper returns results, Tavily is not called and providerUsed is serper", async () => { + env.server.SEARCH_PROVIDER = "fallback"; + fetchSpy.mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === SERPER_URL) { + return Promise.resolve(serperResponse([{ link: "https://serper.com/ok", title: "S", snippet: "S" }])); + } + if (url === TAVILY_URL) { + return Promise.resolve(tavilyResponse([{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }])); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("serper"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(serper).toBe(1); + expect(tavily).toBe(0); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://serper.com/ok"); + }); + }); + + describe('"parallel" strategy calls both providers and merges results', () => { + it("when strategy is parallel, both Serper and Tavily are called and providerUsed is tavily+serper", async () => { + env.server.SEARCH_PROVIDER = "parallel"; + fetchSpy.mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === SERPER_URL) { + return Promise.resolve(serperResponse([{ link: "https://serper.com/1", title: "S", snippet: "S" }])); + } + if (url === TAVILY_URL) { + return Promise.resolve(tavilyResponse([{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }])); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("tavily+serper"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(serper).toBe(1); + expect(tavily).toBe(1); + expect(results).toHaveLength(2); + const urls = results.map((r) => r.url).sort(); + expect(urls).toEqual(["https://serper.com/1", "https://tavily.com/1"]); + }); + }); + + describe('"parallel" strategy deduplicates by URL (first provider wins)', () => { + it("when both providers return the same URL, Serper row is kept — scores are not comparable across providers", async () => { + env.server.SEARCH_PROVIDER = "parallel"; + const sameUrl = "https://example.com/same"; + fetchSpy.mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === SERPER_URL) { + return Promise.resolve( + serperResponse([{ link: sameUrl, title: "From Serper", snippet: "S" }]) + ); + } + if (url === TAVILY_URL) { + return Promise.resolve( + tavilyResponse([{ url: sameUrl, title: "From Tavily", content: "C", score: 0.95 }]) + ); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("tavily+serper"); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe(sameUrl); + // One Serper hit: rank 1 of 1 → score 0 (see serper adapter); Tavily is ignored for this URL. + expect(results[0]!.score).toBe(0); + expect(results[0]!.title).toBe("From Serper"); + }); + }); + + describe('missing Serper key with "serper" strategy downgrades to "tavily"', () => { + it("when SEARCH_PROVIDER is serper but SERPER_API_KEY is unset, only Tavily is called and providerUsed is tavily", async () => { + env.server.SEARCH_PROVIDER = "serper"; + env.server.SERPER_API_KEY = undefined as unknown as string; + const warnSpy = jest.spyOn(console, "warn").mockImplementation(); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("tavily"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(tavily).toBe(1); + expect(serper).toBe(0); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://tavily.com/1"); + expect(warnSpy).toHaveBeenCalledWith( + "[web-search] SERPER_API_KEY not set; downgrading strategy to tavily." + ); + warnSpy.mockRestore(); + }); + }); + + describe("missing TAVILY_API_KEY on tavily path", () => { + it("returns empty results, providerUsed none, and does not call Tavily API", async () => { + env.server.SEARCH_PROVIDER = undefined; + env.server.TAVILY_API_KEY = undefined as unknown as string; + env.server.SERPER_API_KEY = "test-serper-key"; + const warnSpy = jest.spyOn(console, "warn").mockImplementation(); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("none"); + expect(results).toHaveLength(0); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(tavily).toBe(0); + expect(serper).toBe(0); + + warnSpy.mockRestore(); + }); + }); + + describe('"parallel" with Serper key but no Tavily key', () => { + it("uses providerUsed serper and only Serper fetch is made", async () => { + env.server.SEARCH_PROVIDER = "parallel"; + env.server.SERPER_API_KEY = "test-serper-key"; + env.server.TAVILY_API_KEY = undefined as unknown as string; + fetchSpy.mockImplementation((input: RequestInfo | URL) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : (input as Request).url; + if (url === SERPER_URL) { + return Promise.resolve(serperResponse([{ link: "https://serper.com/1", title: "S", snippet: "S" }])); + } + if (url === TAVILY_URL) { + return Promise.resolve(tavilyResponse([{ url: "https://tavily.com/1", title: "T", content: "C", score: 0.9 }])); + } + return Promise.reject(new Error(`Unexpected URL: ${url}`)); + }); + + const { results, providerUsed } = await executeSearch(subQueries); + + expect(providerUsed).toBe("serper"); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://serper.com/1"); + const { tavily, serper } = getFetchCallsByUrl(fetchSpy); + expect(serper).toBe(1); + expect(tavily).toBe(0); + }); + }); +}); diff --git a/__tests__/api/trendSearch/serper-adapter.test.ts b/__tests__/api/trendSearch/serper-adapter.test.ts new file mode 100644 index 00000000..c532ef10 --- /dev/null +++ b/__tests__/api/trendSearch/serper-adapter.test.ts @@ -0,0 +1,216 @@ +/** + * Unit tests for Serper adapter (callSerper). + * Feature: Serper dual-channel search — Task 4.1 + */ + +const mockFetch = jest.fn(); + +jest.mock("~/env", () => ({ + env: { + server: { + SERPER_API_KEY: "test-serper-key", + }, + }, +})); + +import { env } from "~/env"; +import { callSerper } from "~/lib/tools/trend-search/providers/serper"; + +const mockServer = env.server as { SERPER_API_KEY?: string | undefined }; + +beforeEach(() => { + mockServer.SERPER_API_KEY = "test-serper-key"; + mockFetch.mockReset(); + jest.spyOn(globalThis, "fetch").mockImplementation(mockFetch); +}); + +afterEach(() => { + jest.restoreAllMocks(); +}); + +const SERPER_NEWS_URL = "https://google.serper.dev/news"; + +function makeOkResponse(body: { news?: unknown[] }) { + return { + ok: true, + text: async () => "", + json: async () => body, + } as Response; +} + +function makeErrorResponse(status: number, statusText: string, body = "Error") { + return { + ok: false, + status, + statusText, + text: async () => body, + json: async () => { + throw new Error("not json"); + }, + } as unknown as Response; +} + +describe("callSerper", () => { + describe("valid response normalizes correctly to RawSearchResult[]", () => { + it("maps link→url, title, snippet→content, score, publishedDate", async () => { + mockFetch.mockResolvedValue( + makeOkResponse({ + news: [ + { + title: "AI Trends 2026", + link: "https://example.com/1", + snippet: "Summary here", + date: "2 hours ago", + position: 1, + }, + ], + }), + ); + + const results = await callSerper("AI trends"); + + expect(mockFetch).toHaveBeenCalledWith( + SERPER_NEWS_URL, + expect.objectContaining({ + method: "POST", + headers: { + "X-API-KEY": "test-serper-key", + "Content-Type": "application/json", + }, + body: JSON.stringify({ q: "AI trends", num: 10, gl: "us", hl: "en" }), + }), + ); + expect(results).toHaveLength(1); + const r = results[0]!; + expect(r.url).toBe("https://example.com/1"); + expect(r.title).toBe("AI Trends 2026"); + expect(r.content).toBe("Summary here"); + expect(r.score).toBeDefined(); + expect(typeof r.score).toBe("number"); + expect(r.publishedDate).toBe("2 hours ago"); + }); + + it("uses Untitled and empty string when title/snippet missing", async () => { + mockFetch.mockResolvedValue( + makeOkResponse({ + news: [{ link: "https://example.com/2" }], + }), + ); + + const results = await callSerper("query"); + + expect(results).toHaveLength(1); + expect(results[0]!.title).toBe("Untitled"); + expect(results[0]!.content).toBe(""); + }); + }); + + describe("missing SERPER_API_KEY returns empty array", () => { + it("returns [] and does not call fetch when key is undefined", async () => { + const warnSpy = jest.spyOn(console, "warn").mockImplementation(); + mockServer.SERPER_API_KEY = undefined; + + const results = await callSerper("query"); + + expect(results).toEqual([]); + expect(mockFetch).not.toHaveBeenCalled(); + warnSpy.mockRestore(); + }); + }); + + describe("non-2xx response throws error", () => { + it("throws with status and body text on 500", async () => { + mockFetch.mockResolvedValue( + makeErrorResponse(500, "Internal Server Error", "Server down"), + ); + + await expect(callSerper("query")).rejects.toThrow( + /Serper API error: 500 Internal Server Error.*Server down/, + ); + }); + + it("throws on 401", async () => { + mockFetch.mockResolvedValue( + makeErrorResponse(401, "Unauthorized", "Invalid key"), + ); + + await expect(callSerper("query")).rejects.toThrow(/Serper API error: 401/); + }); + }); + + describe("empty news array returns empty results", () => { + it("returns [] when news is empty array", async () => { + mockFetch.mockResolvedValue(makeOkResponse({ news: [] })); + + const results = await callSerper("query"); + + expect(results).toEqual([]); + }); + + it("returns [] when news is missing", async () => { + mockFetch.mockResolvedValue(makeOkResponse({})); + + const results = await callSerper("query"); + + expect(results).toEqual([]); + }); + }); + + describe("positional score calculation is correct", () => { + it("first item has highest score, last has lowest (score = 1 - position/total)", async () => { + mockFetch.mockResolvedValue( + makeOkResponse({ + news: [ + { link: "https://a.com", position: 1 }, + { link: "https://b.com", position: 2 }, + { link: "https://c.com", position: 3 }, + ], + }), + ); + + const results = await callSerper("query"); + + expect(results).toHaveLength(3); + const total = 3; + expect(results[0]!.score).toBeCloseTo(1 - 1 / total); + expect(results[1]!.score).toBeCloseTo(1 - 2 / total); + expect(results[2]!.score).toBeCloseTo(1 - 3 / total); + expect(results[0]!.score).toBeGreaterThan(results[1]!.score); + expect(results[1]!.score).toBeGreaterThan(results[2]!.score); + }); + + it("uses rank within items that have links only", async () => { + mockFetch.mockResolvedValue( + makeOkResponse({ + news: [ + { link: "https://a.com" }, + { title: "no link — skipped" }, + { link: "https://b.com" }, + ], + }), + ); + + const results = await callSerper("query"); + + expect(results).toHaveLength(2); + expect(results[0]!.url).toBe("https://a.com"); + expect(results[1]!.url).toBe("https://b.com"); + expect(results[0]!.score).toBeCloseTo(1 - 1 / 2); + expect(results[1]!.score).toBeCloseTo(1 - 2 / 2); + }); + + it("uses index+1 when position is missing", async () => { + mockFetch.mockResolvedValue( + makeOkResponse({ + news: [{ link: "https://a.com" }, { link: "https://b.com" }], + }), + ); + + const results = await callSerper("query"); + + expect(results).toHaveLength(2); + expect(results[0]!.score).toBeCloseTo(1 - 1 / 2); + expect(results[1]!.score).toBeCloseTo(1 - 2 / 2); + }); + }); +}); diff --git a/__tests__/api/trendSearch/synthesizer.pbt.test.ts b/__tests__/api/trendSearch/synthesizer.pbt.test.ts index 7ede35e7..185b7a78 100644 --- a/__tests__/api/trendSearch/synthesizer.pbt.test.ts +++ b/__tests__/api/trendSearch/synthesizer.pbt.test.ts @@ -30,11 +30,11 @@ const categoryArb = fc.constantFrom(...validCategories); const validQueryArb = fc .string({ minLength: 1, maxLength: 1000 }) - .filter((s) => s.trim().length > 0); + .filter((s: string) => s.trim().length > 0); const validCompanyContextArb = fc .string({ minLength: 1, maxLength: 2000 }) - .filter((s) => s.trim().length > 0); + .filter((s: string) => s.trim().length > 0); /** Single raw result (URL must be unique for traceability). */ const rawResultArb = fc.record({ @@ -72,7 +72,12 @@ describe("Property 7: Synthesizer output structure", () => { validQueryArb, validCompanyContextArb, fc.array(categoryArb, { minLength: 0, maxLength: 4 }), - async (rawResults, query, companyContext, categories) => { + async ( + rawResults: RawSearchResult[], + query: string, + companyContext: string, + categories: SearchCategory[] + ) => { const mockResults = buildMockResults(rawResults, 5); mockInvoke.mockResolvedValue({ results: mockResults }); @@ -115,7 +120,12 @@ describe("Property 8: Source URL traceability", () => { validQueryArb, validCompanyContextArb, fc.array(categoryArb, { minLength: 0, maxLength: 4 }), - async (rawResults, query, companyContext, categories) => { + async ( + rawResults: RawSearchResult[], + query: string, + companyContext: string, + categories: SearchCategory[] + ) => { const urlSet = new Set(rawResults.map((r) => r.url)); const mockResults = buildMockResults(rawResults, 5); mockInvoke.mockResolvedValue({ results: mockResults }); diff --git a/__tests__/api/trendSearch/web-search.pbt.test.ts b/__tests__/api/trendSearch/web-search.pbt.test.ts index 6347a2f3..41871d6a 100644 --- a/__tests__/api/trendSearch/web-search.pbt.test.ts +++ b/__tests__/api/trendSearch/web-search.pbt.test.ts @@ -112,11 +112,11 @@ describe("Unit: one sub-query returns 0 results, pipeline continues", () => { ); }); - const result = await executeSearch(subQueries); + const { results } = await executeSearch(subQueries); expect(fetchSpy).toHaveBeenCalledTimes(3); - expect(result.results).toHaveLength(2); - expect(result.results.map((r) => r.url)).toEqual(["https://b.com", "https://c.com"]); + expect(results).toHaveLength(2); + expect(results.map((r) => r.url)).toEqual(["https://b.com", "https://c.com"]); }); }); @@ -151,14 +151,14 @@ describe("Unit: Tavily fails, retries 2 times then marks sub-query failed", () = const consoleErrorSpy = jest.spyOn(console, "error").mockImplementation(() => {}); const consoleWarnSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); - const result = await executeSearch(subQueries); + const { results } = await executeSearch(subQueries); consoleErrorSpy.mockRestore(); consoleWarnSpy.mockRestore(); // 1 + 2 retries for first sub-query, then 1 for second expect(fetchSpy).toHaveBeenCalledTimes(4); - expect(result.results).toHaveLength(1); - expect(result.results[0].url).toBe("https://ok.com"); + expect(results).toHaveLength(1); + expect(results[0]!.url).toBe("https://ok.com"); }); }); diff --git a/docker-compose.yml b/docker-compose.yml index 7b41ca40..aa831c4e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -77,7 +77,9 @@ services: LANDING_AI_API_KEY: ${LANDING_AI_API_KEY:-} # Anthropic / Google (optional — enables Claude and Gemini models) ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-} + ANTHROPIC_MODEL: ${ANTHROPIC_MODEL:-} GOOGLE_AI_API_KEY: ${GOOGLE_AI_API_KEY:-} + GOOGLE_MODEL: ${GOOGLE_MODEL:-} # Ollama (optional — set OLLAMA_BASE_URL to an Ollama instance) OLLAMA_BASE_URL: ${OLLAMA_BASE_URL:-} OLLAMA_MODEL: ${OLLAMA_MODEL:-} diff --git a/docs/deployment.md b/docs/deployment.md index 92427abc..55b4a16f 100644 --- a/docs/deployment.md +++ b/docs/deployment.md @@ -65,6 +65,10 @@ Optional integrations: - LangSmith for tracing - Sidecar (deploy separately and set `SIDECAR_URL`) +### Trend search (optional) + +Trend search calls external search APIs. Configure `TAVILY_API_KEY` and/or `SERPER_API_KEY` and set `SEARCH_PROVIDER` as documented in [`.env.example`](../.env.example) (`tavily`, `serper`, `fallback`, or `parallel`). If no API key backs the chosen path, the pipeline returns empty results and `providerUsed` may be `none`—this is expected when keys are omitted for local or OSS setups. + ### Verifying Blob uploads on Vercel 1. After deploy, sign in to the Employer portal and open `/employer/upload`. @@ -98,7 +102,9 @@ Optional: Run the sidecar separately and point `SIDECAR_URL` to it. | `BLOB_READ_WRITE_TOKEN` | Yes (Vercel) | Required for Vercel Blob uploads | | `UPLOADTHING_TOKEN` | Optional | UploadThing legacy uploader | | `SIDECAR_URL` | Optional | Sidecar URL for reranking and Graph RAG | -| `TAVILY_API_KEY` | Optional | Web search for analysis | +| `TAVILY_API_KEY` | Optional | Tavily (trend search); required for `tavily` / `fallback` / `parallel` when using Tavily | +| `SERPER_API_KEY` | Optional | Serper Google News (trend search); required for `serper` / `fallback` / `parallel` when using Serper | +| `SEARCH_PROVIDER` | Optional | `tavily` (default), `serper`, `fallback`, or `parallel` — see `.env.example` | | `AZURE_DOC_INTELLIGENCE_*` | Optional | OCR for scanned PDFs | | `DATALAB_API_KEY` | Optional | Alternative OCR | | `LANDING_AI_API_KEY` | Optional | Fallback OCR | diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 61d83649..a0827d95 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14442,7 +14442,7 @@ snapshots: isstream: 0.1.2 jsonwebtoken: 9.0.3 mime-types: 2.1.35 - retry-axios: 2.6.0(axios@1.7.4) + retry-axios: 2.6.0(axios@1.7.4(debug@4.4.3)) tough-cookie: 4.1.4 transitivePeerDependencies: - supports-color @@ -16762,7 +16762,7 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 - retry-axios@2.6.0(axios@1.7.4): + retry-axios@2.6.0(axios@1.7.4(debug@4.4.3)): dependencies: axios: 1.7.4(debug@4.4.3) diff --git a/scripts/test-trend-search.ts b/scripts/test-trend-search.ts index 791cd048..d72910b8 100644 --- a/scripts/test-trend-search.ts +++ b/scripts/test-trend-search.ts @@ -3,11 +3,17 @@ * Calls the real OpenAI + Tavily APIs — no DB, no auth, no Inngest. * * Usage: + * + * 1. Tavilly * npx tsx scripts/test-trend-search.ts * + * 2. Serper + * SEARCH_PROVIDER="serper" npx tsx scripts/test-trend-search.ts + * * Required env vars (reads from .env automatically via dotenv): * OPENAI_API_KEY - * TAVILY_API_KEY + * TAVILY_API_KEY (if using Tavily) + * SERPER_API_KEY (if using Serper) * * * ┌─────────────────────────────────────────────────────────────────────────────┐ @@ -15,12 +21,13 @@ * │ │ * │ Required env vars: │ * │ - OPENAI_API_KEY │ - * │ - TAVILY_API_KEY │ + * │ - TAVILY_API_KEY (if using Tavily) │ + * │ - SERPER_API_KEY (if using Serper) │ * │ │ * │ Sample output: │ * └─────────────────────────────────────────────────────────────────────────────┘ * -─── Input ─── +─── Input ─── (tavily) { "query": "latest AI trends in retail marketing", "companyContext": "We are a mid-size fashion retailer focused on Gen Z customers in the US market.", @@ -73,6 +80,61 @@ Running pipeline (plan → search → synthesize)… "createdAt": "2026-02-23T08:41:22.609Z" } } + +─── Input ─── (serper) +{ + "query": "latest AI trends in retail marketing", + "companyContext": "We are a mid-size fashion retailer focused on Gen Z customers in the US market.", + "categories": [ + "fashion", + "tech" + ] +} + +Running pipeline (plan → search → synthesize)… + + ⏳ stage: searching +[trend-search] Search provider used: serper + ⏳ stage: synthesizing +─── Output ─── +{ + "results": [ + { + "sourceUrl": "https://www.businessoffashion.com/articles/marketing-pr/fashions-new-ai-marketing-toolkit/", + "summary": "Fashion brands are leveraging AI tools to enhance marketing campaigns.", + "description": "This article discusses how various fashion brands are utilizing AI to create faster, cheaper, and more personalized marketing campaigns. Given your focus on Gen Z customers, understanding these AI marketing strategies can help your mid-size fashion retail company effectively engage with this demographic, which values personalization and innovative marketing approaches." + }, + { + "sourceUrl": "https://www.mckinsey.com/industries/retail/our-insights/merchants-unleashed-how-agentic-ai-transforms-retail-merchandising", + "summary": "AI is transforming retail merchandising, allowing for more efficient operations.", + "description": "This McKinsey article highlights how AI is reshaping retail merchandising, enabling retailers to optimize their operations and better meet consumer demands. As a fashion retailer targeting Gen Z, adopting AI-driven merchandising strategies can enhance your inventory management and customer engagement, aligning with the preferences of this tech-savvy generation." + }, + { + "sourceUrl": "https://www.retailtouchpoints.com/executive-viewpoints/how-ai-can-handle-the-3-biggest-gen-z-retail-trends-that-shaped-the-2025-holiday-season/156558/", + "summary": "AI can address key retail trends influencing Gen Z shopping behaviors.", + "description": "This piece explores how AI can help retailers adapt to the evolving trends that shape Gen Z's shopping habits. It provides insights into the values and preferences of Gen Z consumers, which is crucial for your company as you aim to connect with this audience through effective marketing and product offerings." + }, + { + "sourceUrl": "https://www.mckinsey.com/industries/consumer-packaged-goods/our-insights/the-state-of-the-us-consumer", + "summary": "AI is influencing US consumer spending trends, particularly among younger demographics.", + "description": "This report from McKinsey discusses the impact of AI on consumer spending, particularly highlighting how younger consumers, including Gen Z, are embracing AI-supported shopping experiences. Understanding these trends can help your fashion retail business tailor its marketing strategies to better resonate with Gen Z customers." + }, + { + "sourceUrl": "https://www.deloitte.com/us/en/Industries/consumer/articles/future-of-fashion-omnichannel-strategies.html", + "summary": "Digital transformation and omnichannel strategies are crucial for future fashion retail.", + "description": "This article outlines how digital transformation and omnichannel strategies are shaping the future of fashion retail. For a mid-size fashion retailer focused on Gen Z, leveraging these strategies can enhance customer experiences and improve engagement across various platforms, aligning with the expectations of today's consumers." + } + ], + "metadata": { + "query": "latest AI trends in retail marketing", + "companyContext": "We are a mid-size fashion retailer focused on Gen Z customers in the US market.", + "categories": [ + "fashion", + "tech" + ], + "createdAt": "2026-03-08T21:31:39.644Z" + } +} */ import dotenv from "dotenv"; diff --git a/src/app/api/agents/documentQ&A/services/models.ts b/src/app/api/agents/documentQ&A/services/models.ts index 40d46316..ffe50b37 100644 --- a/src/app/api/agents/documentQ&A/services/models.ts +++ b/src/app/api/agents/documentQ&A/services/models.ts @@ -106,4 +106,3 @@ export function getEmbeddings(): OpenAIEmbeddings { openAIApiKey: process.env.OPENAI_API_KEY, }); } - diff --git a/src/app/api/company/metadata/extract/route.ts b/src/app/api/company/metadata/extract/route.ts index f0823e99..887949f6 100644 --- a/src/app/api/company/metadata/extract/route.ts +++ b/src/app/api/company/metadata/extract/route.ts @@ -1,18 +1,16 @@ /** - * Demo endpoint — Company Metadata Extraction + * POST /api/company/metadata/extract * - * Processes ALL documents for the logged-in user's company, extracts - * metadata from each, and merges them into a single canonical JSON. - * No DB writes — returns the result directly. + * Extracts metadata from the logged-in user's company documents and merges + * them into a single canonical JSON. + * + * By default, only processes documents uploaded AFTER the last extraction + * (incremental mode). Use `{ "force": true }` to re-process all documents. * * Usage: * POST /api/company/metadata/extract - * (no body required — uses the authenticated user's company) - * + * POST /api/company/metadata/extract body: { "force": true } * POST /api/company/metadata/extract body: { "debug": true } - * (returns per-document diagnostics instead of running extraction) - * - * Returns the full CompanyMetadataJSON + aggregated diff. */ import { NextResponse } from "next/server"; @@ -21,7 +19,7 @@ import { eq, sql } from "drizzle-orm"; import { db } from "~/server/db"; import { users, document as documentTable, documentContextChunks } from "~/server/db/schema"; -import { companyMetadata } from "~/server/db/schema/company-metadata"; +import { companyMetadata, companyMetadataHistory } from "~/server/db/schema/company-metadata"; import { extractCompanyFacts } from "~/lib/tools/company-metadata/extractor"; import { mergeCompanyMetadata } from "~/lib/tools/company-metadata/merger"; import { createEmptyMetadata } from "~/lib/tools/company-metadata/types"; @@ -29,7 +27,6 @@ import type { CompanyMetadataJSON, MetadataDiff } from "~/lib/tools/company-meta export async function POST(request: Request) { try { - // Auth const { userId } = await auth(); if (!userId) { return NextResponse.json( @@ -52,118 +49,181 @@ export async function POST(request: Request) { const companyId = String(userInfo.companyId); - // Check for debug mode + // Parse optional body flags let debug = false; + let force = false; try { - const body = (await request.json()) as { debug?: boolean }; + const body = (await request.json()) as { debug?: boolean; force?: boolean }; debug = body.debug === true; + force = body.force === true; } catch { - // No body or invalid JSON — that's fine + // No body or invalid JSON — that's fine, defaults apply } - // Find all documents for this company - const docs = await db - .select({ id: documentTable.id, title: documentTable.title }) - .from(documentTable) - .where(eq(documentTable.companyId, userInfo.companyId)); + // Load existing metadata row (for incremental extraction) + const [existingRow] = await db + .select({ + metadata: companyMetadata.metadata, + lastExtractionDocumentId: companyMetadata.lastExtractionDocumentId, + }) + .from(companyMetadata) + .where(eq(companyMetadata.companyId, userInfo.companyId)); + + // Build document query — incremental by default, full if force=true or no prior extraction + const lastDocId = existingRow?.lastExtractionDocumentId; + const isIncremental = !force && lastDocId != null; + + const docs = isIncremental + ? await db + .select({ id: documentTable.id, title: documentTable.title }) + .from(documentTable) + .where( + sql`${documentTable.companyId} = ${userInfo.companyId} AND ${documentTable.id} > ${lastDocId}`, + ) + : await db + .select({ id: documentTable.id, title: documentTable.title }) + .from(documentTable) + .where(eq(documentTable.companyId, userInfo.companyId)); if (docs.length === 0) { return NextResponse.json({ - message: "No documents found for this company", - metadata: null, + message: isIncremental + ? "No new documents since last extraction" + : "No documents found for this company", + metadata: isIncremental ? existingRow?.metadata ?? null : null, documentsProcessed: 0, + incremental: isIncremental, }); } - // Debug mode: return per-document chunk counts without running extraction - if (debug) { - const diagnostics = []; - for (const doc of docs) { - const [row] = await db - .select({ count: sql`count(*)` }) - .from(documentContextChunks) - .where(eq(documentContextChunks.documentId, BigInt(doc.id))); - diagnostics.push({ - documentId: doc.id, - title: doc.title, - chunkCount: Number(row?.count ?? 0), - }); - } - return NextResponse.json({ - companyId, - totalDocuments: docs.length, - documents: diagnostics, - documentsWithChunks: diagnostics.filter((d) => d.chunkCount > 0).length, - }); - } + // For incremental: merge into existing. For full: start fresh (force) or merge into existing. + const baseMetadata = force ? null : existingRow?.metadata ?? null; - // Process each document sequentially, merging into canonical metadata - let metadata: CompanyMetadataJSON = createEmptyMetadata(companyId); - const allDiffs: MetadataDiff = { added: [], updated: [], deprecated: [] }; - let documentsWithFacts = 0; + return processDocuments(docs, companyId, userInfo.companyId, baseMetadata, debug, isIncremental, userId); + } catch (error) { + console.error("[company-metadata] POST /extract error:", error); + return NextResponse.json( + { error: "Internal server error" }, + { status: 500 }, + ); + } +} +async function processDocuments( + docs: Array<{ id: number; title: string }>, + companyId: string, + companyIdBigint: bigint, + existingMetadata: CompanyMetadataJSON | null, + debug: boolean, + incremental: boolean, + userId: string, +) { + // Debug mode: return per-document chunk counts without running extraction + if (debug) { + const diagnostics = []; for (const doc of docs) { - const extracted = await extractCompanyFacts({ + const [row] = await db + .select({ count: sql`count(*)` }) + .from(documentContextChunks) + .where(eq(documentContextChunks.documentId, BigInt(doc.id))); + diagnostics.push({ documentId: doc.id, - companyId, + title: doc.title, + chunkCount: Number(row?.count ?? 0), }); + } + return NextResponse.json({ + companyId, + incremental, + totalDocuments: docs.length, + documents: diagnostics, + documentsWithChunks: diagnostics.filter((d) => d.chunkCount > 0).length, + }); + } - if (!extracted) continue; + // Start from existing metadata (incremental) or empty (full re-extract) + let metadata: CompanyMetadataJSON = existingMetadata ?? createEmptyMetadata(companyId); + const allDiffs: MetadataDiff = { added: [], updated: [], deprecated: [] }; + let documentsWithFacts = 0; + let lastDocId = 0; - const { updatedMetadata, diff } = mergeCompanyMetadata( - metadata, - extracted, - ); + for (const doc of docs) { + const extracted = await extractCompanyFacts({ + documentId: doc.id, + companyId, + }); - metadata = updatedMetadata; - allDiffs.added.push(...diff.added); - allDiffs.updated.push(...diff.updated); - allDiffs.deprecated.push(...diff.deprecated); - documentsWithFacts++; - } + if (!extracted) continue; - if (documentsWithFacts === 0) { - return NextResponse.json({ - message: "No extractable company facts found in any document", - metadata: null, - documentsProcessed: docs.length, - }); - } + const { updatedMetadata, diff } = mergeCompanyMetadata( + metadata, + extracted, + ); - // Save to database - await db - .insert(companyMetadata) - .values({ - companyId: userInfo.companyId, - metadata: metadata, - }) - .onConflictDoUpdate({ - target: companyMetadata.companyId, - set: { - metadata: metadata, - }, - }); + metadata = updatedMetadata; + allDiffs.added.push(...diff.added); + allDiffs.updated.push(...diff.updated); + allDiffs.deprecated.push(...diff.deprecated); + documentsWithFacts++; + lastDocId = Math.max(lastDocId, doc.id); + } + if (documentsWithFacts === 0 && !existingMetadata) { return NextResponse.json({ - metadata, + message: "No extractable company facts found in any document", + metadata: null, documentsProcessed: docs.length, - documentsWithFacts, - diff: { - added: allDiffs.added, - updated: allDiffs.updated, - deprecated: allDiffs.deprecated, - summary: { - added: allDiffs.added.length, - updated: allDiffs.updated.length, - deprecated: allDiffs.deprecated.length, - }, + incremental, + }); + } + + // Update provenance + metadata.provenance.total_documents_processed = + (existingMetadata?.provenance.total_documents_processed ?? 0) + + (incremental ? documentsWithFacts : documentsWithFacts); + + // Save to database with lastExtractionDocumentId tracking + await db + .insert(companyMetadata) + .values({ + companyId: companyIdBigint, + metadata: metadata, + ...(lastDocId > 0 && { lastExtractionDocumentId: BigInt(lastDocId) }), + }) + .onConflictDoUpdate({ + target: companyMetadata.companyId, + set: { + metadata: metadata, + ...(lastDocId > 0 && { lastExtractionDocumentId: BigInt(lastDocId) }), }, }); - } catch (error) { - console.error("[company-metadata] POST /extract error:", error); - return NextResponse.json( - { error: "Internal server error" }, - { status: 500 }, - ); + + // Write audit history entry for this extraction + const hasChanges = allDiffs.added.length > 0 || allDiffs.updated.length > 0 || allDiffs.deprecated.length > 0; + if (hasChanges) { + await db.insert(companyMetadataHistory).values({ + companyId: companyIdBigint, + documentId: lastDocId > 0 ? BigInt(lastDocId) : null, + changeType: "extraction", + diff: allDiffs, + changedBy: userId, + }); } + + return NextResponse.json({ + metadata, + documentsProcessed: docs.length, + documentsWithFacts, + incremental, + diff: { + added: allDiffs.added, + updated: allDiffs.updated, + deprecated: allDiffs.deprecated, + summary: { + added: allDiffs.added.length, + updated: allDiffs.updated.length, + deprecated: allDiffs.deprecated.length, + }, + }, + }); } diff --git a/src/app/api/company/metadata/history/route.ts b/src/app/api/company/metadata/history/route.ts new file mode 100644 index 00000000..df108c88 --- /dev/null +++ b/src/app/api/company/metadata/history/route.ts @@ -0,0 +1,56 @@ +/** + * GET /api/company/metadata/history + * + * Returns the audit history for the logged-in user's company metadata. + * Sorted newest-first, limited to 100 entries. + */ + +import { NextResponse } from "next/server"; +import { auth } from "@clerk/nextjs/server"; +import { eq, desc } from "drizzle-orm"; + +import { db } from "~/server/db"; +import { users } from "~/server/db/schema"; +import { companyMetadataHistory } from "~/server/db/schema/company-metadata"; + +export async function GET() { + try { + const { userId } = await auth(); + if (!userId) { + return NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + } + + const [userInfo] = await db + .select({ companyId: users.companyId }) + .from(users) + .where(eq(users.userId, userId)); + + if (!userInfo) { + return NextResponse.json({ error: "User not found" }, { status: 400 }); + } + + const history = await db + .select({ + id: companyMetadataHistory.id, + changeType: companyMetadataHistory.changeType, + diff: companyMetadataHistory.diff, + changedBy: companyMetadataHistory.changedBy, + documentId: companyMetadataHistory.documentId, + createdAt: companyMetadataHistory.createdAt, + }) + .from(companyMetadataHistory) + .where(eq(companyMetadataHistory.companyId, userInfo.companyId)) + .orderBy(desc(companyMetadataHistory.createdAt)) + .limit(100); + + const serializable = history.map((h) => ({ + ...h, + documentId: h.documentId != null ? String(h.documentId) : null, + })); + + return NextResponse.json({ history: serializable }); + } catch (error) { + console.error("[company-metadata/history] GET error:", error); + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} diff --git a/src/app/api/company/metadata/route.ts b/src/app/api/company/metadata/route.ts index e27fb685..4ad228ab 100644 --- a/src/app/api/company/metadata/route.ts +++ b/src/app/api/company/metadata/route.ts @@ -7,10 +7,11 @@ import { NextResponse } from "next/server"; import { auth } from "@clerk/nextjs/server"; import { eq } from "drizzle-orm"; +import { z } from "zod"; import { db } from "~/server/db"; import { users } from "~/server/db/schema"; -import { companyMetadata } from "~/server/db/schema/company-metadata"; +import { companyMetadata, companyMetadataHistory } from "~/server/db/schema/company-metadata"; export async function GET() { try { @@ -65,3 +66,133 @@ export async function GET() { ); } } + +const PatchSchema = z.object({ + path: z.string().min(1), + value: z.string(), +}); + +function buildManualFact(value: string | number, existing?: { visibility?: string; usage?: string }) { + const now = new Date().toISOString(); + return { + value, + visibility: (existing?.visibility as "public" | "internal" | "confidential") ?? ("public" as const), + usage: (existing?.usage as "outreach_ok" | "internal_only" | "do_not_use") ?? ("outreach_ok" as const), + confidence: 1.0, + priority: "manual_override" as const, + status: "active" as const, + last_updated: now, + sources: [{ doc_id: 0, doc_name: "Manual edit", extracted_at: now }], + }; +} + +export async function PATCH(request: Request) { + try { + const { userId } = await auth(); + if (!userId) { + return NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + } + + const body = await request.json() as unknown; + const parsed = PatchSchema.safeParse(body); + if (!parsed.success) { + return NextResponse.json({ error: "Invalid request body" }, { status: 400 }); + } + const { path, value } = parsed.data; + + const [userInfo] = await db + .select({ companyId: users.companyId }) + .from(users) + .where(eq(users.userId, userId)); + + if (!userInfo) { + return NextResponse.json({ error: "User not found" }, { status: 400 }); + } + + const [existing] = await db + .select({ metadata: companyMetadata.metadata }) + .from(companyMetadata) + .where(eq(companyMetadata.companyId, userInfo.companyId)); + + if (!existing) { + return NextResponse.json( + { error: "No metadata found. Run extraction first." }, + { status: 404 }, + ); + } + + const updatedMetadata = structuredClone(existing.metadata); + const now = new Date().toISOString(); + const segments = path.split("."); + let oldFact: unknown = undefined; + let updatedFact: unknown = undefined; + + if (segments[0] === "company" && segments[1]) { + const field = segments[1]; + const existingFact = updatedMetadata.company[field]; + oldFact = existingFact; + updatedFact = buildManualFact( + field === "founded_year" ? Number(value) : value, + existingFact, + ); + updatedMetadata.company[field] = updatedFact; + } else if (segments[0] === "people" && segments[1] && segments[2]) { + const idx = Number(segments[1]); + const field = segments[2]; + if (isNaN(idx) || idx < 0 || idx >= updatedMetadata.people.length) { + return NextResponse.json({ error: "Invalid people index" }, { status: 400 }); + } + const person = updatedMetadata.people[idx]!; + oldFact = person[field]; + updatedFact = buildManualFact(value, person[field]); + person[field] = updatedFact; + } else if (segments[0] === "services" && segments[1] && segments[2]) { + const idx = Number(segments[1]); + const field = segments[2]; + if (isNaN(idx) || idx < 0 || idx >= updatedMetadata.services.length) { + return NextResponse.json({ error: "Invalid services index" }, { status: 400 }); + } + const service = updatedMetadata.services[idx]!; + oldFact = service[field]; + updatedFact = buildManualFact(value, service[field]); + service[field] = updatedFact; + } else if (segments[0] === "markets" && segments[1] && segments[2] != null) { + const subfield = segments[1] as "primary" | "verticals" | "geographies"; + const idx = Number(segments[2]); + const arr = updatedMetadata.markets[subfield]; + if (!arr || isNaN(idx) || idx < 0 || idx >= arr.length) { + return NextResponse.json({ error: "Invalid markets index" }, { status: 400 }); + } + oldFact = arr[idx]; + updatedFact = buildManualFact(value, arr[idx]); + arr[idx] = updatedFact; + } else { + return NextResponse.json({ error: `Unsupported path: ${path}` }, { status: 400 }); + } + + updatedMetadata.updated_at = now; + + const diff = { + added: oldFact ? [] : [{ path, new: updatedFact }], + updated: oldFact ? [{ path, old: oldFact, new: updatedFact }] : [], + deprecated: [], + }; + + await db + .update(companyMetadata) + .set({ metadata: updatedMetadata }) + .where(eq(companyMetadata.companyId, userInfo.companyId)); + + await db.insert(companyMetadataHistory).values({ + companyId: userInfo.companyId, + changeType: "manual_override", + diff, + changedBy: userId, + }); + + return NextResponse.json({ success: true, path, fact: updatedFact }); + } catch (error) { + console.error("[company-metadata] PATCH error:", error); + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} diff --git a/src/app/api/marketing-pipeline/route.ts b/src/app/api/marketing-pipeline/route.ts index 94a1cc0f..4f1e2dab 100644 --- a/src/app/api/marketing-pipeline/route.ts +++ b/src/app/api/marketing-pipeline/route.ts @@ -52,9 +52,13 @@ export async function POST(request: Request) { ); } + const url = new URL(request.url); + const debug = url.searchParams.get("debug") === "true"; + const result = await runMarketingPipeline({ companyId, input: validation.data, + debug, }); return NextResponse.json( diff --git a/src/app/employer/documents/components/CompanyMetadataPanel.tsx b/src/app/employer/documents/components/CompanyMetadataPanel.tsx index 2fd6d91a..52a704a6 100644 --- a/src/app/employer/documents/components/CompanyMetadataPanel.tsx +++ b/src/app/employer/documents/components/CompanyMetadataPanel.tsx @@ -1,6 +1,6 @@ "use client"; -import React, { useEffect, useState, useCallback } from "react"; +import React, { useEffect, useState, useCallback, useRef } from "react"; import Link from "next/link"; import { Building2, @@ -8,9 +8,12 @@ import { Briefcase, RefreshCw, AlertCircle, + AlertTriangle, FileText, + Scale, Sparkles, Pencil, + Download, } from "lucide-react"; import { Button } from "~/app/employer/documents/components/ui/button"; import { Card, CardHeader, CardTitle, CardContent } from "~/app/employer/documents/components/ui/card"; @@ -20,7 +23,9 @@ import { PeopleSection } from "~/app/employer/metadata/components/PeopleSection" import { ServicesSection } from "~/app/employer/metadata/components/ServicesSection"; import { MarketsSection } from "~/app/employer/metadata/components/MarketsSection"; import { ProvenanceCard } from "~/app/employer/metadata/components/ProvenanceCard"; -import type { CompanyMetadataJSON } from "~/lib/tools/company-metadata/types"; +import { MetadataHistorySection } from "~/app/employer/metadata/components/MetadataHistorySection"; +import { LegalSection } from "~/app/employer/metadata/components/LegalSection"; +import type { CompanyMetadataJSON, CompanyInfo, PersonEntry } from "~/lib/tools/company-metadata/types"; interface CompanyProfile { name: string; @@ -42,7 +47,7 @@ interface StatsCardProps { title: string; value: number | string; icon: React.ComponentType<{ className?: string }>; - color: "purple" | "blue" | "green" | "amber"; + color: "purple" | "blue" | "green" | "amber" | "rose"; } const colorMap = { @@ -50,6 +55,7 @@ const colorMap = { blue: { border: "border-l-blue-500", text: "text-blue-500" }, green: { border: "border-l-green-500", text: "text-green-500" }, amber: { border: "border-l-amber-500", text: "text-amber-500" }, + rose: { border: "border-l-rose-500", text: "text-rose-500" }, }; function MetadataStatsCard({ title, value, icon: Icon, color }: StatsCardProps) { @@ -78,6 +84,8 @@ export function CompanyMetadataPanel() { const [loading, setLoading] = useState(true); const [error, setError] = useState(null); const [extracting, setExtracting] = useState(false); + const [isEditMode, setIsEditMode] = useState(false); + const previousDataRef = useRef(null); const fetchMetadata = useCallback(async () => { setLoading(true); @@ -102,12 +110,95 @@ export function CompanyMetadataPanel() { } }, []); - const runExtraction = useCallback(async () => { + const handleFieldSave = useCallback(async (path: string, value: string) => { + previousDataRef.current = data; + const now = new Date().toISOString(); + const manualSource = { doc_id: 0, doc_name: "Manual edit", extracted_at: now }; + + // Optimistic update + setData((prev) => { + if (!prev?.metadata) return prev; + const m = structuredClone(prev.metadata); + const segments = path.split("."); + + const buildFact = (val: string | number, existing?: { visibility?: string; usage?: string }) => ({ + value: val, + visibility: existing?.visibility ?? "public", + usage: existing?.usage ?? "outreach_ok", + confidence: 1.0, + priority: "manual_override" as const, + status: "active" as const, + last_updated: now, + sources: [manualSource], + }); + + if (segments[0] === "company" && segments[1]) { + const field = segments[1]; + const existing = m.company[field]; + (m.company as Record)[field] = buildFact(field === "founded_year" ? Number(value) : value, existing); + } else if (segments[0] === "people" && segments[1] && segments[2]) { + const idx = Number(segments[1]); + const field = segments[2]; + const person = m.people[idx]; + if (person) { + (person as Record)[field] = buildFact(value, person[field]); + } + } else if (segments[0] === "services" && segments[1] && segments[2]) { + const idx = Number(segments[1]); + const field = segments[2]; + const service = m.services[idx]; + if (service) { + (service as Record)[field] = buildFact(value, service[field]); + } + } else if (segments[0] === "markets" && segments[1] && segments[2] != null) { + const sub = segments[1] as "primary" | "verticals" | "geographies"; + const idx = Number(segments[2]); + const arr = m.markets[sub]; + if (arr?.[idx]) { + arr[idx] = buildFact(value, arr[idx]) as typeof arr[number]; + } + } + m.updated_at = now; + return { ...prev, metadata: m }; + }); + try { + const res = await fetch("/api/company/metadata", { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ path, value }), + }); + if (!res.ok) throw new Error("Save failed"); + await fetchMetadata(); + } catch (err) { + if (previousDataRef.current !== null) { + setData(previousDataRef.current); + } + throw err; + } + }, [data, fetchMetadata]); + + const handleExportJson = useCallback(() => { + if (!data?.metadata) return; + const json = JSON.stringify(data.metadata, null, 2); + const blob = new Blob([json], { type: "application/json" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = `company-metadata-${new Date().toISOString().split("T")[0]}.json`; + a.click(); + URL.revokeObjectURL(url); + }, [data]); + + const runExtraction = useCallback(async (force = false) => { setExtracting(true); setError(null); try { - const response = await fetch("/api/company/metadata/extract", { method: "POST" }); - const result = (await response.json()) as { error?: string }; + const response = await fetch("/api/company/metadata/extract", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ force }), + }); + const result = (await response.json()) as { error?: string; message?: string }; if (result.error) throw new Error(result.error); await fetchMetadata(); } catch (err) { @@ -183,13 +274,44 @@ export function CompanyMetadataPanel() {
+ + +
); } + +/* Missing Metadata Alert */ +const EXPECTED_FIELDS: Array<{ key: keyof CompanyInfo; label: string }> = [ + { key: "name", label: "Company Name" }, + { key: "industry", label: "Industry" }, + { key: "headquarters", label: "Headquarters" }, + { key: "founded_year", label: "Founded Year" }, + { key: "description", label: "Description" }, + { key: "website", label: "Website" }, + { key: "size", label: "Company Size" }, +]; + +function MissingMetadataAlert({ company, people }: { company: CompanyInfo; people: PersonEntry[] }) { + const missingFields = EXPECTED_FIELDS.filter((f) => { + const field = company[f.key]; + if (!field) return true; + const val = (field as { value?: unknown }).value; + return val === undefined || val === null || val === ""; + }); + const peopleWithoutRoles = people.filter((p) => !p.role); + + if (missingFields.length === 0 && peopleWithoutRoles.length === 0) return null; + + return ( + +
+
+ +
+
+

+ Incomplete Metadata +

+ {missingFields.length > 0 && ( +

+ Missing company fields:{" "} + + {missingFields.map((f) => f.label).join(", ")} + +

+ )} + {peopleWithoutRoles.length > 0 && ( +

+ {peopleWithoutRoles.length} {peopleWithoutRoles.length === 1 ? "person" : "people"} missing role information +

+ )} +

+ Upload more documents or manually edit fields to fill in missing information. +

+
+
+
+ ); +} diff --git a/src/app/employer/metadata/components/CompanyInfoCard.tsx b/src/app/employer/metadata/components/CompanyInfoCard.tsx index 1ba61bec..bf5bdcb6 100644 --- a/src/app/employer/metadata/components/CompanyInfoCard.tsx +++ b/src/app/employer/metadata/components/CompanyInfoCard.tsx @@ -1,6 +1,6 @@ "use client"; -import React from "react"; +import React, { useState, useEffect } from "react"; import { Building2, Globe, @@ -13,12 +13,15 @@ import { import { Card, CardHeader, CardTitle, CardContent } from "~/app/employer/documents/components/ui/card"; import { ConfidenceBadge } from "./ConfidenceBadge"; import { VisibilityBadge } from "./VisibilityBadge"; +import { PriorityBadge } from "./PriorityBadge"; import type { CompanyInfo, MetadataFact } from "~/lib/tools/company-metadata/types"; type AnyMetadataFact = MetadataFact | MetadataFact | MetadataFact; interface CompanyInfoCardProps { company: CompanyInfo; + isEditMode?: boolean; + onFieldSave?: (field: string, value: string) => Promise; } interface FieldDisplayProps { @@ -26,12 +29,97 @@ interface FieldDisplayProps { fact: AnyMetadataFact | undefined; icon: React.ComponentType<{ className?: string }>; isLink?: boolean; + fieldKey: string; + isEditMode?: boolean; + onFieldSave?: (field: string, value: string) => Promise; } -function FieldDisplay({ label, fact, icon: Icon, isLink }: FieldDisplayProps) { - if (!fact) return null; +/** Inline editor strip: input + Save/Reset buttons. Used inside both FieldDisplay and the description section. */ +function InlineEditor({ + fieldKey, + initialValue, + multiline, + onFieldSave, + onReset, +}: { + fieldKey: string; + initialValue: string; + multiline?: boolean; + onFieldSave: (field: string, value: string) => Promise; + onReset: () => void; +}) { + const [value, setValue] = useState(initialValue); + const [saving, setSaving] = useState(false); + const [localError, setLocalError] = useState(null); - const value = String(fact.value); + // Keep value in sync if parent resets (isEditMode toggled off → on) + useEffect(() => { + setValue(initialValue); + setLocalError(null); + }, [initialValue]); + + const handleSave = async () => { + if (value.trim() === initialValue) return; + setSaving(true); + setLocalError(null); + try { + await onFieldSave(fieldKey, value.trim()); + } catch { + setLocalError("Failed to save. Try again."); + } finally { + setSaving(false); + } + }; + + return ( +
+ {multiline ? ( +