diff --git a/.vscode/settings.json b/.vscode/settings.json index 3e386d5..3e4a59b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -31,5 +31,8 @@ "editor.codeActionsOnSave": { "source.fixAll.biome": "explicit", "source.organizeImports.biome": "explicit" + }, + "files.readonlyInclude": { + "**/*": true } } diff --git a/app/actions/search.ts b/app/actions/search.ts index fc7d478..ff6e0e7 100644 --- a/app/actions/search.ts +++ b/app/actions/search.ts @@ -1,11 +1,35 @@ -/** biome-ignore-all lint/suspicious/noConsole: "Handy for debugging" */ - "use server"; +import { generateObject } from "ai"; import { Search } from "@upstash/search"; import type { PutBlobResult } from "@vercel/blob"; +import { headers } from "next/headers"; +import { z } from "zod"; + +// Simple in-memory sliding window rate limiter for server actions. +// Limits each IP to a max number of searches per time window. +const SEARCH_RATE_LIMIT = 10; // max searches +const SEARCH_WINDOW_MS = 60 * 1000; // per 60 seconds +const searchRateMap = new Map(); -const upstash = Search.fromEnv(); +function isSearchRateLimited(ip: string): boolean { + const now = Date.now(); + const timestamps = searchRateMap.get(ip) || []; + // Remove expired timestamps + const recent = timestamps.filter((t) => now - t < SEARCH_WINDOW_MS); + if (recent.length >= SEARCH_RATE_LIMIT) { + searchRateMap.set(ip, recent); + return true; + } + recent.push(now); + searchRateMap.set(ip, recent); + return false; +} + +const upstash = new Search({ + url: process.env.UPSTASH_SEARCH_REST_URL!, + token: process.env.UPSTASH_SEARCH_REST_TOKEN!, +}); const index = upstash.index("images"); type SearchResponse = @@ -16,10 +40,91 @@ type SearchResponse = error: string; }; +/** + * AI-powered re-ranking: takes the top candidate results from Upstash and + * asks xAI to verify which ones actually match the query. This solves the + * problem where generic terms like "red shirt" loosely match many images. + */ +async function rerankWithAI( + query: string, + candidates: { blob: PutBlobResult; score: number }[] +): Promise { + if (candidates.length === 0) return []; + + // Build a list of candidates with their URLs for the AI to evaluate + const candidateList = candidates.map((c, i) => ({ + index: i, + url: c.blob.downloadUrl, + })); + + try { + const { object } = await generateObject({ + model: "xai/grok-2-vision", + schema: z.object({ + matches: z.array( + z.object({ + index: z.number().describe("The index of the matching image"), + relevant: z + .boolean() + .describe("Whether this image truly matches the search query"), + }) + ), + }), + messages: [ + { + role: "user", + content: [ + { + type: "text", + text: `You are an image search judge. The user searched for: "${query}" + +Below are ${candidateList.length} candidate images. For EACH one, decide if it truly matches the search query "${query}". + +Be STRICT: only mark an image as relevant if it clearly and obviously matches. For example: +- "red shirt" → only images where someone is actually wearing a red shirt +- "dog" → only images that actually contain a dog +- "blue car" → only images showing a blue car + +Evaluate each image:`, + }, + ...candidateList.map( + (c) => + ({ + type: "image" as const, + image: c.url, + }) + ), + ], + }, + ], + }); + + // Filter to only relevant results, maintaining original order + const relevantIndices = new Set( + object.matches.filter((m) => m.relevant).map((m) => m.index) + ); + + return candidates + .filter((_, i) => relevantIndices.has(i)) + .map((c) => c.blob); + } catch { + // If AI re-ranking fails, fall back to returning all candidates + return candidates.map((c) => c.blob); + } +} + export const search = async ( _prevState: SearchResponse | undefined, formData: FormData ): Promise => { + const headersList = await headers(); + const ip = + headersList.get("x-forwarded-for")?.split(",")[0]?.trim() || "unknown"; + + if (isSearchRateLimited(ip)) { + return { error: "Too many searches. Please wait a moment and try again." }; + } + const query = formData.get("search"); if (!query || typeof query !== "string") { @@ -27,16 +132,30 @@ export const search = async ( } try { - console.log("Searching index for query:", query); - const results = await index.search({ query }); + const results = await index.search({ + query, + limit: 20, + }); + + const sorted = results.sort((a, b) => b.score - a.score); + + // First pass: use a low threshold to get a broad set of candidates. + // The AI re-ranker will do the precise filtering. + const MIN_THRESHOLD = 0.4; + const candidates = sorted + .filter((result) => result.score >= MIN_THRESHOLD) + .map((result) => ({ + blob: result.metadata as unknown as PutBlobResult, + score: result.score, + })) + .filter((c) => c.blob); + + // Take top 10 candidates max to limit AI vision API calls + const topCandidates = candidates.slice(0, 10); - console.log("Results:", results); - const data = results - .sort((a, b) => b.score - a.score) - .map((result) => result.metadata) - .filter(Boolean) as unknown as PutBlobResult[]; + // Second pass: AI verifies which images truly match the query + const data = await rerankWithAI(query, topCandidates); - console.log("Images found:", data); return { data }; } catch (error) { const message = error instanceof Error ? error.message : "Unknown error"; diff --git a/app/api/upload/generate-description.ts b/app/api/upload/generate-description.ts index 630346b..4a48835 100644 --- a/app/api/upload/generate-description.ts +++ b/app/api/upload/generate-description.ts @@ -23,7 +23,27 @@ export const generateDescription = async (blob: PutBlobResult) => { const { text } = await generateText({ model: "xai/grok-2-vision", - system: "Describe the image in detail.", + system: `You are an image description expert. Describe the image in detail for a searchable image database. + +Your response MUST have two sections: + +DESCRIPTION: +Write a detailed description including: +- People: gender, approximate age, clothing (colors, types), accessories, hair, expressions +- Animals: species, breed if identifiable, color, size, what the animal is doing +- Objects: what they are, colors, sizes, brands if visible +- Setting: indoor/outdoor, location type, time of day +- Actions: what is happening, poses, activities +- Colors: mention prominent colors explicitly +- Text: any visible text or signs + +Be specific and use common search terms. For example, say "man in blue t-shirt" not just "person wearing clothes". Say "golden retriever dog" not just "pet". + +TAGS: +List 5-15 single-word or short-phrase keyword tags that someone might search for to find this image. Focus on the PRIMARY subjects. For example: +- A photo of a dog: dog, pet, pekingese, animal, fluffy, indoor +- A selfie: selfie, woman, portrait, mirror, phone +- A landscape: sunset, beach, ocean, sky, nature`, messages: [ { role: "user", diff --git a/app/api/upload/index-image.ts b/app/api/upload/index-image.ts index 2e18531..6357e00 100644 --- a/app/api/upload/index-image.ts +++ b/app/api/upload/index-image.ts @@ -4,7 +4,10 @@ import { Search } from "@upstash/search"; import type { PutBlobResult } from "@vercel/blob"; import { FatalError, getStepMetadata, RetryableError } from "workflow"; -const upstash = Search.fromEnv(); +const upstash = new Search({ + url: process.env.UPSTASH_SEARCH_REST_URL!, + token: process.env.UPSTASH_SEARCH_REST_TOKEN!, +}); export const indexImage = async (blob: PutBlobResult, text: string) => { "use step"; diff --git a/app/api/upload/route.ts b/app/api/upload/route.ts index cfc1b19..9560014 100644 --- a/app/api/upload/route.ts +++ b/app/api/upload/route.ts @@ -1,3 +1,4 @@ +import { checkRateLimit } from "@vercel/firewall"; import { NextResponse } from "next/server"; import { FatalError } from "workflow"; import { start } from "workflow/api"; @@ -5,6 +6,15 @@ import { processImage } from "./process-image"; export const POST = async (request: Request): Promise => { try { + // Rate limit uploads: configured in Vercel Firewall dashboard + const { rateLimited } = await checkRateLimit("upload-image", { request }); + if (rateLimited) { + return NextResponse.json( + { error: "Too many uploads. Please wait a moment and try again." }, + { status: 429 } + ); + } + const formData = await request.formData(); const file = formData.get("file") as File | null; diff --git a/app/layout.tsx b/app/layout.tsx index 6f05284..351f5fd 100644 --- a/app/layout.tsx +++ b/app/layout.tsx @@ -21,8 +21,8 @@ const mono = Geist_Mono({ }); export const metadata: Metadata = { - title: "Create Next App", - description: "Generated by create next app", + title: "Picsearch", + description: "Search your photos using natural language", }; type RootLayoutProps = { diff --git a/app/page.tsx b/app/page.tsx index ba80e4b..d308be9 100644 --- a/app/page.tsx +++ b/app/page.tsx @@ -5,8 +5,8 @@ import { Results } from "@/components/results"; import { UploadedImagesProvider } from "@/components/uploaded-images-provider"; export const metadata: Metadata = { - title: "vectr", - description: "vectr", + title: "Picsearch", + description: "Search your photos using natural language", }; const ImagesSkeleton = () => ( @@ -29,14 +29,14 @@ const ImagesSkeleton = () => ( const Home = () => ( -
+
}> -
+
); diff --git a/components/deploy.tsx b/components/deploy.tsx deleted file mode 100644 index 5e946c7..0000000 --- a/components/deploy.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import Image from "next/image"; - -export const DeployButton = () => { - const url = new URL("https://vercel.com/new/clone"); - - // Demo - url.searchParams.set( - "demo-description", - "A free, open-source template for building natural language image search on the AI Cloud." - ); - url.searchParams.set("demo-image", "https://vectr.store/opengraph-image.png"); - url.searchParams.set("demo-title", "vectr.store"); - url.searchParams.set("demo-url", "https://vectr.store/"); - - // Marketplace - url.searchParams.set("from", "templates"); - url.searchParams.set("project-name", "Vectr"); - - // Repository - url.searchParams.set("repository-name", "vectr"); - url.searchParams.set("repository-url", "https://github.com/vercel/vectr"); - - // Integrations - url.searchParams.set( - "products", - JSON.stringify([ - { - type: "integration", - protocol: "storage", - productSlug: "upstash-search", - integrationSlug: "upstash", - }, - { type: "blob" }, - ]) - ); - url.searchParams.set("skippable-integrations", "0"); - - return ( - - Deploy with Vercel - - ); -}; diff --git a/components/header.tsx b/components/header.tsx index d13256e..0566a2c 100644 --- a/components/header.tsx +++ b/components/header.tsx @@ -1,20 +1,17 @@ import { CheckCircle2Icon, ImageUpIcon } from "lucide-react"; -import { DeployButton } from "./deploy"; -import { Button } from "./ui/button"; export const Header = () => (
-

vectr.store

+

Picsearch

- A free, open-source template for building natural language image search - on the AI Cloud. + Search your photos using natural language. Just describe what you're looking for.

- Try searching for "water" or "desert". + Try searching for "dog" or "blue shirt".

    @@ -85,17 +82,5 @@ export const Header = () => (

-
); diff --git a/components/preview.tsx b/components/preview.tsx index 09ea3c0..eb9e813 100644 --- a/components/preview.tsx +++ b/components/preview.tsx @@ -5,16 +5,29 @@ type PreviewProps = { priority?: boolean; }; -export const Preview = ({ url, priority }: PreviewProps) => ( +export const Preview = ({ url, priority = false }: PreviewProps) => (
- {url} + {priority ? ( + {url} + ) : ( + {url} + )}
); diff --git a/components/results.client.tsx b/components/results.client.tsx index ea1c4ec..4bcf09a 100644 --- a/components/results.client.tsx +++ b/components/results.client.tsx @@ -7,6 +7,7 @@ import { ImageIcon, ImageUpIcon, Loader2Icon, + SearchIcon, UploadIcon, } from "lucide-react"; import { useActionState, useEffect } from "react"; @@ -27,10 +28,10 @@ const PRIORITY_COUNT = 12; export const ResultsClient = ({ defaultData }: ResultsClientProps) => { const { images } = useUploadedImages(); - const [state, formAction, isPending] = useActionState(search, { data: [] }); + const [state, formAction, isPending] = useActionState(search, undefined); useEffect(() => { - if ("error" in state) { + if (state && "error" in state) { toast.error(state.error); } }, [state]); @@ -39,14 +40,45 @@ export const ResultsClient = ({ defaultData }: ResultsClientProps) => { window.location.reload(); }; - const hasImages = - images.length || - defaultData.length || - ("data" in state && state.data?.length); + const searchPerformed = state !== undefined && "data" in state; + const searchHasResults = searchPerformed && state.data.length > 0; + const searchEmpty = searchPerformed && state.data.length === 0; + const searchErrored = state !== undefined && "error" in state; + + const hasImages = images.length > 0 || defaultData.length > 0 || searchHasResults; return ( <> - {hasImages ? ( + {searchErrored ? ( + + +
+
+ +
+
+ Search error + + {state && "error" in state ? state.error : "An unknown error occurred."} + +
+
+ ) : searchEmpty ? ( + + +
+
+ +
+
+ No matching images + + No images matched your search. Try a different description or + broader terms. + +
+
+ ) : hasImages ? (
{images.map((image, index) => ( { url={image.url} /> ))} - {"data" in state && state.data?.length + {searchHasResults ? state.data.map((blob, index) => ( { action={formAction} className="-translate-x-1/2 fixed bottom-8 left-1/2 flex w-full max-w-sm items-center gap-1 rounded-full bg-background p-1 shadow-xl sm:max-w-lg lg:ml-[182px]" > - {"data" in state && state.data.length > 0 && ( + {(searchPerformed || searchErrored) && ( )} { required /> {isPending ? ( - ) : ( - + <> + + + )} diff --git a/components/ui/sonner.tsx b/components/ui/sonner.tsx index 957524e..93665cf 100644 --- a/components/ui/sonner.tsx +++ b/components/ui/sonner.tsx @@ -1,14 +1,11 @@ -"use client" +"use client"; -import { useTheme } from "next-themes" -import { Toaster as Sonner, ToasterProps } from "sonner" - -const Toaster = ({ ...props }: ToasterProps) => { - const { theme = "system" } = useTheme() +import { Toaster as Sonner, type ToasterProps } from "sonner"; +function Toaster(props: ToasterProps) { return ( { } {...props} /> - ) + ); } -export { Toaster } +export { Toaster }; diff --git a/components/upload-button.tsx b/components/upload-button.tsx index b5c7c19..7ab008e 100644 --- a/components/upload-button.tsx +++ b/components/upload-button.tsx @@ -12,9 +12,6 @@ export const UploadButton = () => { const inputRef = useRef(null); const abortControllerRef = useRef(null); const [isUploading, setIsUploading] = useState(false); - const isDemo = - typeof window !== "undefined" && - window.location.hostname.includes("vectr.store"); const cancelUpload = () => { if (abortControllerRef.current) { @@ -31,11 +28,6 @@ export const UploadButton = () => { return; } - if (isDemo) { - toast.error("Uploads are disabled in demo mode"); - return; - } - // Check file sizes const maxSize = 4.5 * 1024 * 1024; // 4.5MB const oversizedFiles = files.filter((file) => file.size > maxSize); @@ -239,13 +231,14 @@ export const UploadButton = () => { /> ); diff --git a/package.json b/package.json index 36ef0b1..e5ce34a 100644 --- a/package.json +++ b/package.json @@ -11,8 +11,10 @@ "dependencies": { "@hookform/resolvers": "^5.2.2", "@upstash/search": "^0.1.5", + "@upstash/vector": "^1.2.2", "@vercel/analytics": "^1.5.0", "@vercel/blob": "^2.0.0", + "@vercel/firewall": "^1.1.2", "ai": "^5.0.77", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9478fb0..f7f97b4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,12 +14,18 @@ importers: '@upstash/search': specifier: ^0.1.5 version: 0.1.5 + '@upstash/vector': + specifier: ^1.2.2 + version: 1.2.2 '@vercel/analytics': specifier: ^1.5.0 version: 1.5.0(next@16.0.10(@opentelemetry/api@1.9.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(react@19.2.0) '@vercel/blob': specifier: ^2.0.0 version: 2.0.0 + '@vercel/firewall': + specifier: ^1.1.2 + version: 1.1.2 ai: specifier: ^5.0.77 version: 5.0.77(zod@4.1.12) @@ -1912,6 +1918,10 @@ packages: resolution: {integrity: sha512-oAj7Pdy83YKSwIaMFoM7zFeLYWRc+qUpW3PiDSblxQMnGFb43qs4bmfq7dr/+JIfwhs6PTwe1o2YBwKhyjWxXw==} engines: {node: '>=20.0.0'} + '@vercel/firewall@1.1.2': + resolution: {integrity: sha512-h0sdBVrloWx8TitvWla/rGj3AnJ5JEYfL5LaGHNNOWkyMuzNqfCcGTvJgnjL2A5eSpAAzoN7Xt609YQ0L7xZdw==} + engines: {node: '>= 20'} + '@vercel/functions@3.1.4': resolution: {integrity: sha512-1dEfZkb7qxsA+ilo+1uBUCEgr7e90vHcimpDYkUB84DM051wQ5amJDk9x+cnaI29paZb5XukXwGl8yk3Udb/DQ==} engines: {node: '>= 20'} @@ -5125,6 +5135,8 @@ snapshots: throttleit: 2.1.0 undici: 5.29.0 + '@vercel/firewall@1.1.2': {} + '@vercel/functions@3.1.4(@aws-sdk/credential-provider-web-identity@3.609.0(@aws-sdk/client-sts@3.914.0))': dependencies: '@vercel/oidc': 3.0.3