From 2b96fbdd930cfc461187b0350e79850f29d39605 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Bramer=20Schmidt?= Date: Wed, 15 Apr 2026 14:11:54 +0300 Subject: [PATCH 1/4] Fix Compute demo deployment on public main --- Architecture/demo-compute-bundling.md | 39 +++-- demo/ppg-dev/build-compute.test.ts | 233 +++++++++++++++++--------- demo/ppg-dev/build-compute.ts | 83 ++++++--- demo/ppg-dev/server.ts | 9 + package.json | 2 +- pnpm-lock.yaml | 34 ++-- 6 files changed, 264 insertions(+), 136 deletions(-) diff --git a/Architecture/demo-compute-bundling.md b/Architecture/demo-compute-bundling.md index 0eb6e2b7..6f1843c5 100644 --- a/Architecture/demo-compute-bundling.md +++ b/Architecture/demo-compute-bundling.md @@ -21,34 +21,43 @@ It is responsible for: 1. building the browser JS from `demo/ppg-dev/client.tsx` 2. processing `ui/index.css` through the repo PostCSS pipeline 3. injecting those prebuilt assets into the bundled server through `virtual:prebuilt-assets` -4. writing a self-contained output directory whose entrypoint is `bundle/server.bundle.js` - -It is not responsible for manually collecting Prisma Postgres dev runtime assets anymore, but it does need to carry Prisma Streams worker runtime files that Bun does not discover automatically. +4. copying Prisma Dev runtime assets into `bundle/` with their stable filenames +5. bundling Prisma Streams local's worker into `touch/processor_worker.js` +6. copying the worker's vendored `hash_vendor/` files into `touch/` +7. writing a self-contained output directory whose entrypoint is `bundle/server.bundle.js` ## Prisma Dev Runtime Assets -`@prisma/dev@0.23.1` exposes a Bun runtime-asset manifest for PGlite. +`@prisma/dev@0.24.6` exposes a Bun runtime-asset manifest for PGlite and also +exports `copyPrismaDevRuntimeAssets()`. That means when `build-compute.ts` bundles `demo/ppg-dev/server.ts` with Bun: - Bun sees `@prisma/dev`'s literal Bun manifest import -- Bun emits the required `pglite.wasm`, `pglite.data`, and extension `*.tar.gz` files automatically -- those files land next to the server bundle in `deploy/bundle/` - -Studio no longer scans `node_modules/@electric-sql/pglite/dist` or copies those files by hand. +- Bun emits hashed PGlite `.wasm`, `.data`, and extension archives next to the bundled server entrypoint +- `build-compute.ts` then copies the same runtime assets into `deploy/bundle/` with their canonical names like `pglite.wasm` and `pglite-seed.tar.gz` -## Prisma Streams Runtime Assets +That extra copy is a Studio-side workaround for the current Compute boot path: +the deployed `@prisma/dev` runtime still resolves stable filenames relative to +the server bundle in some startup paths, so the Compute artifact needs both the +hashed Bun-emitted assets and the canonical names. -Prisma Dev can also start a local Prisma Streams server. That runtime spawns a touch interpreter worker from `@prisma/streams-local`. +## Prisma Streams Worker Assets -`build-compute.ts` MUST therefore copy: +`@prisma/dev` also starts Prisma Streams local and spawns a worker from +`../touch/processor_worker.js` relative to the bundled server entrypoint. -- `@prisma/streams-local/dist/touch` -- the worker's bare runtime dependency package, `better-result` +For the Compute artifact that means: -into the output directory. +- the server entrypoint stays at `deploy/bundle/server.bundle.js` +- stable PGlite assets live in `deploy/bundle/` +- the Streams worker must live at `deploy/touch/processor_worker.js` +- the worker's vendored hashing modules must live at `deploy/touch/hash_vendor/` -This is an explicit exception to the "no manual runtime asset copying" rule above: Bun handles the main PGlite runtime assets automatically, but the spawned Streams worker is resolved at runtime from the packaged filesystem and must remain self-contained after deployment. +The worker cannot be copied verbatim from `node_modules` because it still +imports package dependencies such as `better-result` and `ajv`. `build-compute.ts` +therefore Bun-bundles that worker into a standalone file before copying the +vendored hash modules alongside it. ## Runtime Detection diff --git a/demo/ppg-dev/build-compute.test.ts b/demo/ppg-dev/build-compute.test.ts index a891c991..226cc512 100644 --- a/demo/ppg-dev/build-compute.test.ts +++ b/demo/ppg-dev/build-compute.test.ts @@ -1,5 +1,5 @@ import { spawn } from "node:child_process"; -import { mkdtemp, readdir, rm } from "node:fs/promises"; +import { mkdtemp, readFile, readdir, rm } from "node:fs/promises"; import { createServer } from "node:net"; import { tmpdir } from "node:os"; import { join } from "node:path"; @@ -42,16 +42,41 @@ function runProcess( }); } -async function hasBun(): Promise { +async function getBunVersion(): Promise { try { const result = await runProcess("bun", ["--version"]); - return result.code === 0; + return result.code === 0 ? result.stdout.trim() : null; } catch { - return false; + return null; } } +function supportsBundledPrismaDevBoot(bunVersion: string): boolean { + const [major = 0, minor = 0, patch = 0] = bunVersion + .split(".") + .map((segment) => Number.parseInt(segment, 10)); + + if (major !== 1) { + return major > 1; + } + + if (minor !== 3) { + return minor > 3; + } + + return patch >= 0; +} + +function normalizeBundledServerStderr(stderr: string): string { + return stderr + .split("\n") + .map((line) => line.trim()) + .filter((line) => line.length > 0) + .filter((line) => !line.startsWith("[gc] forced GC")) + .join("\n"); +} + async function getAvailablePort(): Promise { return await new Promise((resolve, reject) => { const server = createServer(); @@ -114,88 +139,140 @@ afterAll(async () => { }); describe("build-compute", () => { - it("keeps Bun-emitted Prisma dev runtime assets next to the bundled server entrypoint", async () => { - if (!(await hasBun())) { - return; - } + it( + "copies stable Prisma dev runtime assets next to the bundled server entrypoint", + async () => { + const bunVersion = await getBunVersion(); - const outputDir = await mkdtemp( - join(tmpdir(), "studio-build-compute-output-"), - ); - tempDirs.add(outputDir); + if (!bunVersion) { + return; + } - const build = await runProcess( - "bun", - ["demo/ppg-dev/build-compute.ts", outputDir], - { - cwd: process.cwd(), + const outputDir = await mkdtemp( + join(tmpdir(), "studio-build-compute-output-"), + ); + tempDirs.add(outputDir); + + const build = await runProcess( + "bun", + ["demo/ppg-dev/build-compute.ts", outputDir], + { + cwd: process.cwd(), + env: { + STUDIO_DEMO_AI_ENABLED: "false", + }, + }, + ); + + expect(build.code).toBe(0); + expect(build.stderr).toBe(""); + + const rootEntries = await readdir(outputDir); + const bundleEntries = await readdir(join(outputDir, "bundle")); + + expect(rootEntries).toContain("bundle"); + expect(rootEntries).toContain("touch"); + expect(rootEntries.some((entry) => entry.endsWith(".tar.gz"))).toBe(false); + expect(rootEntries.some((entry) => entry.endsWith(".wasm"))).toBe(false); + expect(rootEntries.some((entry) => entry.endsWith(".data"))).toBe(false); + + expect(bundleEntries).toContain("server.bundle.js"); + expect(bundleEntries).toContain("initdb.wasm"); + expect(bundleEntries).toContain("pglite.data"); + expect(bundleEntries).toContain("pglite.wasm"); + expect(bundleEntries).toContain("pglite-seed.tar.gz"); + expect( + bundleEntries.some( + (entry) => entry.includes(".tar-") && entry.endsWith(".gz"), + ), + ).toBe(true); + + const touchEntries = await readdir(join(outputDir, "touch")); + const hashVendorEntries = await readdir( + join(outputDir, "touch", "hash_vendor"), + ); + const workerBundle = await readFile( + join(outputDir, "touch", "processor_worker.js"), + "utf8", + ); + + expect(touchEntries).toContain("processor_worker.js"); + expect(touchEntries).toContain("hash_vendor"); + expect(hashVendorEntries).toContain("LICENSE.hash-wasm"); + expect(hashVendorEntries).toContain("NOTICE.md"); + expect(hashVendorEntries).toContain("xxhash3.umd.min.cjs"); + expect(hashVendorEntries).toContain("xxhash32.umd.min.cjs"); + expect(hashVendorEntries).toContain("xxhash64.umd.min.cjs"); + expect(workerBundle).not.toContain('from "better-result"'); + expect(workerBundle).not.toContain('from "ajv"'); + + const serverBundle = await readFile( + join(outputDir, "bundle", "server.bundle.js"), + "utf8", + ); + expect(serverBundle).not.toContain( + "sourceMappingURL=data:application/json;base64", + ); + + if (!supportsBundledPrismaDevBoot(bunVersion)) { + return; + } + + const port = await getAvailablePort(); + const serverProcess = spawn("bun", ["./bundle/server.bundle.js"], { + cwd: outputDir, env: { + ...process.env, STUDIO_DEMO_AI_ENABLED: "false", + STUDIO_DEMO_PORT: String(port), }, - }, - ); - - expect(build.code).toBe(0); - expect(build.stderr).toBe(""); - - const rootEntries = await readdir(outputDir); - const bundleEntries = await readdir(join(outputDir, "bundle")); - - expect(rootEntries).toContain("bundle"); - expect(rootEntries.some((entry) => entry.endsWith(".tar.gz"))).toBe(false); - expect(rootEntries.some((entry) => entry.endsWith(".wasm"))).toBe(false); - expect(rootEntries.some((entry) => entry.endsWith(".data"))).toBe(false); - - expect(bundleEntries).toContain("server.bundle.js"); - expect( - bundleEntries.some( - (entry) => entry.includes(".tar-") && entry.endsWith(".gz"), - ), - ).toBe(true); - expect(bundleEntries.some((entry) => entry.endsWith(".wasm"))).toBe(true); - expect(bundleEntries.some((entry) => entry.endsWith(".data"))).toBe(true); - - const port = await getAvailablePort(); - const serverProcess = spawn("bun", ["./bundle/server.bundle.js"], { - cwd: outputDir, - env: { - ...process.env, - STUDIO_DEMO_AI_ENABLED: "false", - STUDIO_DEMO_PORT: String(port), - }, - stdio: ["ignore", "pipe", "pipe"], - }); + stdio: ["ignore", "pipe", "pipe"], + }); - let stdout = ""; - let stderr = ""; + let stdout = ""; + let stderr = ""; - serverProcess.stdout.on("data", (chunk) => { - stdout += String(chunk); - }); - serverProcess.stderr.on("data", (chunk) => { - stderr += String(chunk); - }); + serverProcess.stdout.on("data", (chunk) => { + stdout += String(chunk); + }); + serverProcess.stderr.on("data", (chunk) => { + stderr += String(chunk); + }); - try { - const response = await waitForHttp(`http://127.0.0.1:${port}/api/config`); - const payload = (await response.json()) as { - bootId?: unknown; - streams?: { - url?: unknown; + try { + const response = await waitForHttp( + `http://127.0.0.1:${port}/api/config`, + ); + const payload = (await response.json()) as { + bootId?: unknown; + streams?: { + url?: unknown; + }; }; - }; - - expect(typeof payload.bootId).toBe("string"); - expect(typeof payload.streams?.url).toBe("string"); - expect(payload.streams?.url).toBe("/api/streams"); - } finally { - serverProcess.kill("SIGTERM"); - await new Promise((resolve) => { - serverProcess.once("close", () => resolve()); - }); - } - expect(stderr).toBe(""); - expect(stdout).toContain(`http://localhost:${port}`); - }, 120_000); + expect(typeof payload.bootId).toBe("string"); + expect(typeof payload.streams?.url).toBe("string"); + expect(payload.streams?.url).toBe("/api/streams"); + + const faviconResponse = await fetch( + `http://127.0.0.1:${port}/favicon.ico`, + ); + expect(faviconResponse.status).toBe(204); + } finally { + serverProcess.kill("SIGTERM"); + if ( + serverProcess.exitCode === null && + serverProcess.signalCode === null + ) { + await new Promise((resolve) => { + serverProcess.once("close", () => resolve()); + }); + } + } + + expect(normalizeBundledServerStderr(stderr)).toBe(""); + expect(stdout).toContain(`http://localhost:${port}`); + }, + 120_000, + ); }); diff --git a/demo/ppg-dev/build-compute.ts b/demo/ppg-dev/build-compute.ts index 045b639c..837591cf 100644 --- a/demo/ppg-dev/build-compute.ts +++ b/demo/ppg-dev/build-compute.ts @@ -4,9 +4,10 @@ * * Pre-builds browser assets (client JS via Bun.build, CSS via PostCSS), then * bundles server.ts with the pre-built assets injected through a virtual - * module. `@prisma/dev` 0.23.1+ now exposes a Bun runtime-asset manifest, so - * Bun emits the required PGlite `.wasm`, `.data`, and extension archives next - * to the bundled server entrypoint automatically. + * module. `@prisma/dev` emits hashed PGlite runtime assets during Bun + * bundling, but the current Compute boot path still expects stable filenames + * like `pglite.wasm`, so this build also copies the Prisma Dev runtime assets + * into the bundle directory with their canonical names. * * Usage (from the repo root): * @@ -14,9 +15,10 @@ * * Deploy: * - * prisma compute deploy --skip-build \ + * bunx @prisma/compute-cli deploy --skip-build \ * --path --entrypoint bundle/server.bundle.js \ - * --http-port 8080 --env STUDIO_DEMO_PORT=8080 + * --http-port 8080 --env STUDIO_DEMO_PORT=8080 \ + * --service */ import { existsSync } from "node:fs"; @@ -25,6 +27,7 @@ import { createRequire } from "node:module"; import { basename, dirname, extname, join, resolve } from "node:path"; import { pathToFileURL } from "node:url"; +import { copyPrismaDevRuntimeAssets } from "@prisma/dev"; import postcss, { type AcceptedPlugin } from "postcss"; const studioRoot = resolve(import.meta.dirname, "../.."); @@ -55,7 +58,7 @@ const clientBuild = await Bun.build({ entrypoints: [join(studioRoot, "demo/ppg-dev/client.tsx")], format: "esm", minify: true, - sourcemap: "inline", + sourcemap: "none", splitting: false, target: "browser", }); @@ -150,8 +153,6 @@ if (!serverBuild.success) { process.exit(1); } -await copyStreamsTouchRuntimeAssets(outDir); - // Rename the output to a deterministic name. const produced = serverBuild.outputs[0]; const producedPath = produced?.path; @@ -161,6 +162,14 @@ if (producedPath && producedPath !== finalPath) { await rename(producedPath, finalPath); } +const copiedRuntimeAssets = await copyPrismaDevRuntimeAssets(bundleDir); +console.log( + `[build] Copied Prisma Dev runtime assets: ${copiedRuntimeAssets.length}`, +); + +await bundlePrismaStreamsTouchAssets(outDir); +console.log("[build] Bundled Prisma Streams worker assets."); + const { size: bundleBytes } = await stat(finalPath); console.log( `[build] Server bundle: ${(bundleBytes / 1024 / 1024).toFixed(2)} MB`, @@ -217,7 +226,7 @@ function generateAssetsModule( ].join("\n"); } -async function copyStreamsTouchRuntimeAssets(outputDir: string): Promise { +async function bundlePrismaStreamsTouchAssets(outDir: string): Promise { const prismaDevPackagePath = require.resolve("@prisma/dev/package.json"); const prismaDevRequire = createRequire(prismaDevPackagePath); const streamsLocalPackagePath = prismaDevRequire.resolve( @@ -225,20 +234,44 @@ async function copyStreamsTouchRuntimeAssets(outputDir: string): Promise { ); const streamsLocalRoot = dirname(streamsLocalPackagePath); const sourceDir = join(streamsLocalRoot, "dist", "touch"); - const destinationDir = join(outputDir, "touch"); - const betterResultPackagePath = prismaDevRequire.resolve( - "better-result/package.json", - ); - const betterResultRoot = dirname(betterResultPackagePath); - const betterResultDestination = join( - outputDir, - "node_modules", - "better-result", - ); - - await cp(sourceDir, destinationDir, { recursive: true }); - await cp(betterResultRoot, betterResultDestination, { recursive: true }); - console.log( - "[build] Copied Prisma Streams touch runtime assets and worker dependencies.", - ); + const workerEntrypoint = join(sourceDir, "processor_worker.js"); + const hashVendorDir = join(sourceDir, "hash_vendor"); + const touchOutDir = join(outDir, "touch"); + + if (!existsSync(workerEntrypoint) || !existsSync(hashVendorDir)) { + throw new Error( + `Could not locate Prisma Streams worker assets at ${sourceDir}.`, + ); + } + + await mkdir(touchOutDir, { recursive: true }); + + const workerBuild = await Bun.build({ + entrypoints: [workerEntrypoint], + format: "esm", + minify: false, + outdir: touchOutDir, + sourcemap: "none", + target: "bun", + }); + + if (!workerBuild.success) { + throw new Error( + workerBuild.logs + .map((log) => log.message) + .join("\n"), + ); + } + + const builtWorker = workerBuild.outputs[0]?.path; + const finalWorkerPath = join(touchOutDir, "processor_worker.js"); + + if (builtWorker && builtWorker !== finalWorkerPath) { + await rename(builtWorker, finalWorkerPath); + } + + await cp(hashVendorDir, join(touchOutDir, "hash_vendor"), { + force: true, + recursive: true, + }); } diff --git a/demo/ppg-dev/server.ts b/demo/ppg-dev/server.ts index 1dca8242..251020f3 100644 --- a/demo/ppg-dev/server.ts +++ b/demo/ppg-dev/server.ts @@ -365,6 +365,15 @@ async function handleRequest(request: Request): Promise { return await handleStreamsProxyRequest(request, url); } + if (url.pathname === "/favicon.ico") { + return new Response(null, { + headers: { + "Cache-Control": CACHE_CONTROL_STATIC, + }, + status: 204, + }); + } + if (!isProduction && url.pathname === "/__reload") { return new Response(createReloadStream(), { headers: { diff --git a/package.json b/package.json index 2a1efc38..fdf9fdb2 100644 --- a/package.json +++ b/package.json @@ -156,7 +156,7 @@ "@electric-sql/pglite": "0.3.15", "@eslint/eslintrc": "2.1.4", "@eslint/js": "8.57.0", - "@prisma/dev": "0.24.4", + "@prisma/dev": "0.24.6", "@radix-ui/react-alert-dialog": "1.1.15", "@radix-ui/react-checkbox": "1.3.3", "@radix-ui/react-context-menu": "2.2.16", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0221a68f..cd273c41 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -68,8 +68,8 @@ devDependencies: specifier: 8.57.0 version: 8.57.0 '@prisma/dev': - specifier: 0.24.4 - version: 0.24.4(typescript@5.9.3) + specifier: 0.24.6 + version: 0.24.6(typescript@5.9.3) '@radix-ui/react-alert-dialog': specifier: 1.1.15 version: 1.1.15(@types/react-dom@19.2.3)(@types/react@19.2.14)(react-dom@19.2.4)(react@19.2.4) @@ -687,29 +687,29 @@ packages: tslib: 2.8.1 dev: true - /@electric-sql/pglite-socket@0.1.1(@electric-sql/pglite@0.4.1): - resolution: {integrity: sha512-p2hoXw3Z3LQHwTeikdZNsFBOvXGqKY2hk51BBw+8NKND8eoH+8LFOtW9Z8CQKmTJ2qqGYu82ipqiyFZOTTXNfw==} + /@electric-sql/pglite-socket@0.1.3(@electric-sql/pglite@0.4.3): + resolution: {integrity: sha512-LAciWM0M1dCL8hlsxu2venbVZcdxema0BtDfpWYVqr+Y468UADw0pFWidhKw1M8sfJ8rdLT71tjMmnirf/IZRQ==} hasBin: true peerDependencies: - '@electric-sql/pglite': 0.4.1 + '@electric-sql/pglite': 0.4.3 dependencies: - '@electric-sql/pglite': 0.4.1 + '@electric-sql/pglite': 0.4.3 dev: true - /@electric-sql/pglite-tools@0.3.1(@electric-sql/pglite@0.4.1): - resolution: {integrity: sha512-C+T3oivmy9bpQvSxVqXA1UDY8cB9Eb9vZHL9zxWwEUfDixbXv4G3r2LjoTdR33LD8aomR3O9ZXEO3XEwr/cUCA==} + /@electric-sql/pglite-tools@0.3.3(@electric-sql/pglite@0.4.3): + resolution: {integrity: sha512-AlzLJTRJ8+UFgK8CmxIpyIpJ0+YaFw02IiOSdYrqxwPXdSyeIShz8aa9Tq+tYFXdPwcaMp/Fc80mQZ1dkOQ/wg==} peerDependencies: - '@electric-sql/pglite': 0.4.1 + '@electric-sql/pglite': 0.4.3 dependencies: - '@electric-sql/pglite': 0.4.1 + '@electric-sql/pglite': 0.4.3 dev: true /@electric-sql/pglite@0.3.15: resolution: {integrity: sha512-Cj++n1Mekf9ETfdc16TlDi+cDDQF0W7EcbyRHYOAeZdsAe8M/FJg18itDTSwyHfar2WIezawM9o0EKaRGVKygQ==} dev: true - /@electric-sql/pglite@0.4.1: - resolution: {integrity: sha512-mZ9NzzUSYPOCnxHH1oAHPRzoMFJHY472raDKwXl/+6oPbpdJ7g8LsCN4FSaIIfkiCKHhb3iF/Zqo3NYxaIhU7Q==} + /@electric-sql/pglite@0.4.3: + resolution: {integrity: sha512-ichuWTgtd4mOM1G4SpyGJa5trT03lWbMypDV0fUXUCXg5hiHqVAz/bZyV68NqmkLB7WcYmj1RMJVSp8HV/v/ZQ==} dev: true /@esbuild/aix-ppc64@0.27.3: @@ -1166,12 +1166,12 @@ packages: resolution: {integrity: sha512-YSGTiSlBAVJPzX4ONZmMotL+ozJwQjRmZweQNIq/ER0tQJKJynNkRB3kyvt37eOfsbMCXk3gnLF6J9OJ4QWftw==} dev: true - /@prisma/dev@0.24.4(typescript@5.9.3): - resolution: {integrity: sha512-u/GwsNsWxDq9Ccq6P1R3yzNJAELVIz4A+RXeR6E/JRjXTjVfwiqdIPCdlDOpO4H1+4Jvbv0I5mn0SEqz/cz8sQ==} + /@prisma/dev@0.24.6(typescript@5.9.3): + resolution: {integrity: sha512-GbPGfHBszyfeq82xaAVtMqrJX1lzdJbu3ESnuQ84Vd4+/fZQTj930yvI4/et3c63MgjfKpchFzvDHYkHJJFjXQ==} dependencies: - '@electric-sql/pglite': 0.4.1 - '@electric-sql/pglite-socket': 0.1.1(@electric-sql/pglite@0.4.1) - '@electric-sql/pglite-tools': 0.3.1(@electric-sql/pglite@0.4.1) + '@electric-sql/pglite': 0.4.3 + '@electric-sql/pglite-socket': 0.1.3(@electric-sql/pglite@0.4.3) + '@electric-sql/pglite-tools': 0.3.3(@electric-sql/pglite@0.4.3) '@hono/node-server': 1.19.11(hono@4.12.8) '@prisma/get-platform': 7.2.0 '@prisma/query-plan-executor': 7.2.0 From 66e34a1b95fb1eedbe469ee43f2437926dc918a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Bramer=20Schmidt?= Date: Wed, 15 Apr 2026 14:12:03 +0300 Subject: [PATCH 2/4] Add Compute preview automation for pull requests --- .github/workflows/compute-preview.yml | 84 ++++++++++ Architecture/compute-preview-deploy.md | 57 +++++++ CHANGELOG.md | 2 + FEATURES.md | 11 +- README.md | 42 ++++- .../compute-preview-comment.mjs | 89 +++++++++++ .../compute-preview-deploy.mjs | 149 ++++++++++++++++++ .../compute-preview-destroy.mjs | 85 ++++++++++ .../compute-preview/compute-preview-utils.mjs | 64 ++++++++ .../compute-preview-utils.test.ts | 90 +++++++++++ 10 files changed, 669 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/compute-preview.yml create mode 100644 Architecture/compute-preview-deploy.md create mode 100644 scripts/compute-preview/compute-preview-comment.mjs create mode 100644 scripts/compute-preview/compute-preview-deploy.mjs create mode 100644 scripts/compute-preview/compute-preview-destroy.mjs create mode 100644 scripts/compute-preview/compute-preview-utils.mjs create mode 100644 scripts/compute-preview/compute-preview-utils.test.ts diff --git a/.github/workflows/compute-preview.yml b/.github/workflows/compute-preview.yml new file mode 100644 index 00000000..7cc3bd2d --- /dev/null +++ b/.github/workflows/compute-preview.yml @@ -0,0 +1,84 @@ +name: compute preview + +on: + pull_request: + types: + - opened + - reopened + - synchronize + delete: + +permissions: + contents: read + issues: write + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.event.ref }} + cancel-in-progress: true + +jobs: + deploy-preview: + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Setup pnpm + uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 + + - name: Setup Node + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version-file: .node-version + cache: pnpm + + - name: Setup Bun + uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build Compute artifact + run: pnpm build:deploy + + - name: Deploy preview + id: deploy + env: + PRISMA_API_TOKEN: ${{ secrets.STUDIO_PREVIEW_COMPUTE_TOKEN }} + PREVIEW_BRANCH_NAME: ${{ github.event.pull_request.head.ref }} + run: node scripts/compute-preview/compute-preview-deploy.mjs + + - name: Comment preview URL on PR + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ github.token }} + PREVIEW_BRANCH_NAME: ${{ github.event.pull_request.head.ref }} + PREVIEW_PR_NUMBER: ${{ github.event.pull_request.number }} + PREVIEW_SERVICE_NAME: ${{ steps.deploy.outputs.preview_service_name }} + PREVIEW_SERVICE_URL: ${{ steps.deploy.outputs.preview_service_url }} + PREVIEW_VERSION_URL: ${{ steps.deploy.outputs.preview_version_url }} + run: node scripts/compute-preview/compute-preview-comment.mjs + + destroy-preview: + if: github.event_name == 'delete' && github.event.ref_type == 'branch' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ github.event.repository.default_branch }} + + - name: Setup Node + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version-file: .node-version + + - name: Setup Bun + uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2 + + - name: Destroy preview service for deleted branch + env: + PRISMA_API_TOKEN: ${{ secrets.STUDIO_PREVIEW_COMPUTE_TOKEN }} + PREVIEW_BRANCH_NAME: ${{ github.event.ref }} + run: node scripts/compute-preview/compute-preview-destroy.mjs diff --git a/Architecture/compute-preview-deploy.md b/Architecture/compute-preview-deploy.md new file mode 100644 index 00000000..206bdf08 --- /dev/null +++ b/Architecture/compute-preview-deploy.md @@ -0,0 +1,57 @@ +# Compute Preview Deploys + +This document is normative for branch-scoped Compute preview deployments. + +## Purpose + +Pull requests need a live Studio preview without manually creating and cleaning up +Compute services for every branch. + +The preview deployment path uses the existing `pnpm build:deploy` artifact and +publishes it into the dedicated Compute project named `studio-preview`. + +## Triggering + +- A preview deploy MUST run when a pull request is opened, reopened, or updated + with new commits. +- Preview deploys MUST only run for branches inside this repository. Forked pull + requests MUST NOT receive the Compute token. +- A preview service MUST be destroyed when the corresponding Git branch is + deleted. +- Because the GitHub `delete` event is evaluated from the default branch + workflow set, this workflow MUST be merged to `main` before branch-deletion + cleanup becomes automatic for later branches. + +## Service Naming + +- Preview services MUST be keyed by the pull request branch name. +- Because Compute service names need a filesystem- and URL-safe shape, the raw + branch name MUST be normalized to a lowercase slug containing only + alphanumeric segments separated by `-`. +- If the normalized branch slug exceeds the Compute name budget, it MUST be + truncated and keep a stable hash suffix so repeated deploys resolve to the + same service. +- The same normalization MUST be used for deploy and destroy flows. + +## Deploy Flow + +- The workflow MUST build the preview artifact with `pnpm build:deploy`. +- The workflow MUST authenticate with Compute through the GitHub Actions secret + `STUDIO_PREVIEW_COMPUTE_TOKEN`, exposed to the CLI as `PRISMA_API_TOKEN`. +- The deploy helper MUST resolve the `studio-preview` Compute project by name at + runtime instead of hardcoding an opaque service id. +- If the branch preview service does not exist, the helper MUST create it in the + project's default region. +- If the service already exists, the helper MUST deploy a new version to that + same service. +- Deployments MUST use the published CLI entrypoint: + `bunx @prisma/compute-cli@latest deploy --skip-build --path deploy --entrypoint bundle/server.bundle.js --http-port 8080 --env STUDIO_DEMO_PORT=8080`. + +## PR Feedback + +- Successful preview deploys MUST post the live service URL back to the pull + request. +- The PR comment MUST be sticky: later deploys for the same PR update the + existing preview comment instead of creating duplicates. +- The comment MUST include the original branch name plus the resolved Compute + service name so any slug normalization stays visible. diff --git a/CHANGELOG.md b/CHANGELOG.md index aac01303..2006186b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ - Add optional Prisma Streams setup support, wire the `ppg-dev` demo to Prisma Dev's Streams server, and show live stream names in a new sidebar `Streams` section. - Add a dedicated stream event view with infinite scrolling, expandable rows, and summary columns for time, key, indexed fields, preview text, and payload size. - Keep stream event counts live while a stream is open, and reveal newly arrived events in 50-row batches without snapping the current list. +- Work around the current `@prisma/dev` Compute asset-resolution gap by copying stable PGlite runtime filenames into the deploy bundle and bundling the Prisma Streams local worker, so the packaged demo can boot correctly on Compute with WAL syncing still enabled. +- Add automatic Compute preview deploys for pull requests, so branch builds land in the `studio-preview` project, comment their live URL on the PR, and clean themselves up when the branch is deleted. ## 0.27.3 diff --git a/FEATURES.md b/FEATURES.md index 4c719375..9fe1a3cb 100644 --- a/FEATURES.md +++ b/FEATURES.md @@ -13,8 +13,7 @@ This gives users an accurate live model of the database and keeps table navigati ## Deployable Prisma Postgres Demo The local `ppg-dev` demo can be packaged into a Compute-ready artifact instead of requiring the repo checkout at runtime. -The deploy builder precompiles the browser JS/CSS, injects those assets into the bundled server, and relies on `@prisma/dev`'s Bun runtime-asset manifest so PGlite's WASM, data, and extension archives are emitted automatically beside the server bundle. -When that bundled demo also starts its embedded local Prisma Streams runtime, Studio now relies on the published `@prisma/streams-local` package to carry its own runtime tuning defaults instead of layering a second demo-specific memory policy on top. +The deploy builder precompiles the browser JS/CSS, injects those assets into the bundled server, copies Prisma Dev's PGlite runtime assets into the bundle with stable filenames, and bundles the Prisma Streams worker into `touch/` so the Compute artifact can boot and keep WAL-to-stream syncing alive outside the repo checkout. The same demo entrypoint can also run against external development infrastructure through `pnpm demo:ppg -- --database-url --streams-server-url `, or in streams-only mode through `pnpm demo:ppg -- --streams-server-url `. In those modes, Studio keeps serving the local shell and `/api/streams` proxy, but skips local Prisma Dev startup, local Streams startup, WAL wiring, and local seeding so you can point the demo at an already-running backend stack. ## Streams-Only Studio Shell @@ -27,6 +26,14 @@ In that mode the shell hides schema selection, table navigation, and database-on Studio's local development workflow can temporarily replace the published npm `@prisma/dev` package with the sibling source package from `../team-expansion/dev/server`, while also swapping its `@prisma/streams-local` dependency over to a built local Streams checkout. That override stays opt-in, rebuilds from the sibling repos by default, and can be reverted without rewriting the tracked lockfile, so experimental Prisma Dev and Durable Streams work can stay local to one Studio checkout. +## Compute PR Preview Deploys + +Pull requests can publish the current branch into the dedicated `studio-preview` +Compute project without hand-creating services for each branch. +The preview workflow derives a stable Compute-safe service name from the branch, +reuses that service across later pushes, posts the live URL back to the PR, and +destroys the preview service when the branch is deleted. + ## Introspection Recovery and Retry Startup introspection failures show retryable diagnostics in both the sidebar and the main table panel instead of pretending the database has no tables. diff --git a/README.md b/README.md index 6fdc9adb..7c510e3c 100644 --- a/README.md +++ b/README.md @@ -400,8 +400,46 @@ Revert to the published npm packages with `pnpm streams:use-npm`. `@prisma/dev` now emits its own PGlite runtime assets during Bun bundling, so plain `bun build` no longer needs `--packages external` just to keep Prisma Postgres dev working. For a source-free Compute artifact, use `pnpm build:deploy`: -that path still prebuilds the browser JS/CSS and injects those assets into the -server bundle so the deployed demo does not need the repo checkout at runtime. +that path prebuilds the browser JS/CSS, injects those assets into the server +bundle, and copies Prisma Dev's runtime assets into `deploy/bundle/` with +stable filenames so the deployed demo does not need the repo checkout at +runtime. It also Bun-bundles the Prisma Streams local worker into `deploy/touch/` +so Compute can keep Prisma Dev's WAL-to-stream sidecar alive in the source-free artifact. + +Deploy that artifact with: + +```sh +bunx @prisma/compute-cli deploy --skip-build \ + --path deploy \ + --entrypoint bundle/server.bundle.js \ + --http-port 8080 \ + --env STUDIO_DEMO_PORT=8080 \ + --service +``` + +## Compute Preview Deploys + +This repo also maintains branch-scoped Compute previews for pull requests. + +- `.github/workflows/compute-preview.yml` deploys the current PR branch into the + dedicated `studio-preview` Compute project whenever a PR is opened, + reopened, or updated with new commits. +- The preview service name is derived from the branch name through a stable + Compute-safe slug, so later pushes reuse the same service instead of creating + duplicates. +- The workflow updates one sticky PR comment with the live preview URL after a + successful deploy. +- When a Git branch is deleted, the same workflow destroys the matching preview + service. + +The workflow expects the GitHub Actions secret +`STUDIO_PREVIEW_COMPUTE_TOKEN`, which should contain a Compute API token for the +`studio-preview` project. + +For branch-deletion cleanup to happen automatically, the workflow must be +present on the default branch. In practice that means merging the preview +workflow to `main` once, after which later PR branches will get full automatic +create/update/delete behavior. ## Development Workflow diff --git a/scripts/compute-preview/compute-preview-comment.mjs b/scripts/compute-preview/compute-preview-comment.mjs new file mode 100644 index 00000000..3b9e9c42 --- /dev/null +++ b/scripts/compute-preview/compute-preview-comment.mjs @@ -0,0 +1,89 @@ +#!/usr/bin/env node + +import { + PREVIEW_COMMENT_MARKER, + buildPreviewCommentBody, +} from "./compute-preview-utils.mjs"; + +async function main() { + const githubToken = getRequiredEnv("GITHUB_TOKEN"); + const repository = getRequiredEnv("GITHUB_REPOSITORY"); + const prNumber = getRequiredEnv("PREVIEW_PR_NUMBER"); + const branchName = getRequiredEnv("PREVIEW_BRANCH_NAME"); + const serviceName = getRequiredEnv("PREVIEW_SERVICE_NAME"); + const serviceUrl = getRequiredEnv("PREVIEW_SERVICE_URL"); + const versionUrl = process.env.PREVIEW_VERSION_URL?.trim(); + const [owner, repo] = repository.split("/"); + + if (!owner || !repo) { + throw new Error(`Invalid GITHUB_REPOSITORY value "${repository}".`); + } + + const body = buildPreviewCommentBody({ + branchName, + serviceName, + serviceUrl, + versionUrl, + }); + const comments = await githubRequest({ + githubToken, + method: "GET", + path: `/repos/${owner}/${repo}/issues/${prNumber}/comments?per_page=100`, + }); + const existingComment = comments.find((comment) => + typeof comment.body === "string" && + comment.body.includes(PREVIEW_COMMENT_MARKER), + ); + + if (existingComment) { + await githubRequest({ + body: { body }, + githubToken, + method: "PATCH", + path: `/repos/${owner}/${repo}/issues/comments/${existingComment.id}`, + }); + return; + } + + await githubRequest({ + body: { body }, + githubToken, + method: "POST", + path: `/repos/${owner}/${repo}/issues/${prNumber}/comments`, + }); +} + +async function githubRequest(args) { + const { body, githubToken, method, path } = args; + const response = await fetch(`https://api.github.com${path}`, { + body: body ? JSON.stringify(body) : undefined, + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${githubToken}`, + "Content-Type": "application/json", + "User-Agent": "studio-compute-preview", + "X-GitHub-Api-Version": "2022-11-28", + }, + method, + }); + + if (!response.ok) { + throw new Error( + `GitHub API request failed (${response.status} ${response.statusText}): ${await response.text()}`, + ); + } + + return method === "GET" ? await response.json() : null; +} + +function getRequiredEnv(name) { + const value = process.env[name]?.trim(); + + if (!value) { + throw new Error(`Missing required environment variable ${name}.`); + } + + return value; +} + +await main(); diff --git a/scripts/compute-preview/compute-preview-deploy.mjs b/scripts/compute-preview/compute-preview-deploy.mjs new file mode 100644 index 00000000..02ddf356 --- /dev/null +++ b/scripts/compute-preview/compute-preview-deploy.mjs @@ -0,0 +1,149 @@ +#!/usr/bin/env node + +import { execFile } from "node:child_process"; +import { appendFileSync } from "node:fs"; +import { promisify } from "node:util"; + +import { + PREVIEW_PROJECT_NAME, + findNamedProject, + findNamedService, + sanitizeComputeServiceName, +} from "./compute-preview-utils.mjs"; + +const execFileAsync = promisify(execFile); + +async function main() { + const branchName = getRequiredEnv("PREVIEW_BRANCH_NAME"); + const projectName = process.env.PREVIEW_PROJECT_NAME ?? PREVIEW_PROJECT_NAME; + const deployPath = process.env.PREVIEW_DEPLOY_PATH ?? "deploy"; + const entrypoint = + process.env.PREVIEW_ENTRYPOINT ?? "bundle/server.bundle.js"; + const httpPort = process.env.PREVIEW_HTTP_PORT ?? "8080"; + const serviceName = sanitizeComputeServiceName(branchName); + + const project = await resolveProject(projectName); + const service = await ensureService({ + projectId: project.id, + region: project.defaultRegion ?? "eu-west-3", + serviceName, + }); + const deployResult = await runComputeJson([ + "deploy", + "--skip-build", + "--path", + deployPath, + "--entrypoint", + entrypoint, + "--http-port", + httpPort, + "--env", + `STUDIO_DEMO_PORT=${httpPort}`, + "--service", + service.id, + ]); + + const result = { + branchName, + projectId: project.id, + region: project.defaultRegion ?? "eu-west-3", + serviceId: service.id, + serviceName, + serviceUrl: deployResult.serviceEndpointDomain, + versionId: deployResult.versionId, + versionUrl: deployResult.versionEndpointDomain, + }; + + writeOutputs(result); + process.stdout.write(`${JSON.stringify(result, null, 2)}\n`); +} + +async function resolveProject(projectName) { + const projects = await runComputeJson(["projects", "list"]); + const project = findNamedProject(projects, projectName); + + if (!project) { + throw new Error(`Compute project "${projectName}" was not found.`); + } + + return project; +} + +async function ensureService(args) { + const { projectId, region, serviceName } = args; + const services = await runComputeJson([ + "services", + "list", + "--project", + projectId, + ]); + const existingService = findNamedService(services, serviceName); + + if (existingService) { + return existingService; + } + + return await runComputeJson([ + "services", + "create", + "--project", + projectId, + "--name", + serviceName, + "--region", + region, + ]); +} + +async function runComputeJson(args) { + const { stderr, stdout } = await execFileAsync( + "bunx", + ["@prisma/compute-cli@latest", ...args, "--json"], + { + env: process.env, + maxBuffer: 10 * 1024 * 1024, + }, + ); + const payload = JSON.parse(stdout); + + if (payload.ok !== true) { + throw new Error( + `Compute CLI returned a non-ok payload.\nstdout:\n${stdout}\nstderr:\n${stderr}`, + ); + } + + return payload.data; +} + +function getRequiredEnv(name) { + const value = process.env[name]?.trim(); + + if (!value) { + throw new Error(`Missing required environment variable ${name}.`); + } + + return value; +} + +function writeOutputs(result) { + const outputPath = process.env.GITHUB_OUTPUT; + + if (!outputPath) { + return; + } + + const lines = Object.entries({ + preview_branch_name: result.branchName, + preview_project_id: result.projectId, + preview_region: result.region, + preview_service_id: result.serviceId, + preview_service_name: result.serviceName, + preview_service_url: result.serviceUrl, + preview_version_id: result.versionId, + preview_version_url: result.versionUrl, + }).map(([key, value]) => `${key}=${value}`); + + appendFileSync(outputPath, `${lines.join("\n")}\n`); +} + +await main(); diff --git a/scripts/compute-preview/compute-preview-destroy.mjs b/scripts/compute-preview/compute-preview-destroy.mjs new file mode 100644 index 00000000..b2fd4b7a --- /dev/null +++ b/scripts/compute-preview/compute-preview-destroy.mjs @@ -0,0 +1,85 @@ +#!/usr/bin/env node + +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; + +import { + PREVIEW_PROJECT_NAME, + findNamedProject, + findNamedService, + sanitizeComputeServiceName, +} from "./compute-preview-utils.mjs"; + +const execFileAsync = promisify(execFile); + +async function main() { + const branchName = getRequiredEnv("PREVIEW_BRANCH_NAME"); + const projectName = process.env.PREVIEW_PROJECT_NAME ?? PREVIEW_PROJECT_NAME; + const serviceName = sanitizeComputeServiceName(branchName); + const projects = await runComputeJson(["projects", "list"]); + const project = findNamedProject(projects, projectName); + + if (!project) { + throw new Error(`Compute project "${projectName}" was not found.`); + } + + const services = await runComputeJson([ + "services", + "list", + "--project", + project.id, + ]); + const service = findNamedService(services, serviceName); + + if (!service) { + process.stdout.write( + `${JSON.stringify( + { branchName, projectId: project.id, serviceName, destroyed: false }, + null, + 2, + )}\n`, + ); + return; + } + + await runComputeJson(["services", "destroy", service.id]); + process.stdout.write( + `${JSON.stringify( + { branchName, projectId: project.id, serviceId: service.id, serviceName, destroyed: true }, + null, + 2, + )}\n`, + ); +} + +async function runComputeJson(args) { + const { stderr, stdout } = await execFileAsync( + "bunx", + ["@prisma/compute-cli@latest", ...args, "--json"], + { + env: process.env, + maxBuffer: 10 * 1024 * 1024, + }, + ); + const payload = JSON.parse(stdout); + + if (payload.ok !== true) { + throw new Error( + `Compute CLI returned a non-ok payload.\nstdout:\n${stdout}\nstderr:\n${stderr}`, + ); + } + + return payload.data; +} + +function getRequiredEnv(name) { + const value = process.env[name]?.trim(); + + if (!value) { + throw new Error(`Missing required environment variable ${name}.`); + } + + return value; +} + +await main(); diff --git a/scripts/compute-preview/compute-preview-utils.mjs b/scripts/compute-preview/compute-preview-utils.mjs new file mode 100644 index 00000000..1d098521 --- /dev/null +++ b/scripts/compute-preview/compute-preview-utils.mjs @@ -0,0 +1,64 @@ +import { createHash } from "node:crypto"; + +export const PREVIEW_PROJECT_NAME = "studio-preview"; +export const PREVIEW_COMMENT_MARKER = ""; +export const MAX_COMPUTE_SERVICE_NAME_LENGTH = 63; + +export function sanitizeComputeServiceName(branchName) { + const normalized = branchName + .trim() + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/-+/g, "-") + .replace(/^-|-$/g, ""); + + const fallbackName = normalized.length > 0 ? normalized : "preview"; + + if (fallbackName.length <= MAX_COMPUTE_SERVICE_NAME_LENGTH) { + return fallbackName; + } + + const suffix = createHash("sha256") + .update(branchName) + .digest("hex") + .slice(0, 8); + const prefixLength = + MAX_COMPUTE_SERVICE_NAME_LENGTH - suffix.length - 1; + const truncatedPrefix = fallbackName + .slice(0, prefixLength) + .replace(/-+$/g, ""); + + return `${truncatedPrefix}-${suffix}`; +} + +export function findNamedProject(projects, projectName) { + return projects.find((project) => project.name === projectName); +} + +export function findNamedService(services, serviceName) { + return services.find((service) => service.name === serviceName); +} + +export function buildPreviewCommentBody(args) { + const { + branchName, + serviceName, + serviceUrl, + versionUrl, + } = args; + + const lines = [ + PREVIEW_COMMENT_MARKER, + "Compute preview deployed.", + "", + `Branch: \`${branchName}\``, + `Service: \`${serviceName}\``, + `Preview: ${serviceUrl}`, + ]; + + if (versionUrl) { + lines.push(`Version: ${versionUrl}`); + } + + return lines.join("\n"); +} diff --git a/scripts/compute-preview/compute-preview-utils.test.ts b/scripts/compute-preview/compute-preview-utils.test.ts new file mode 100644 index 00000000..c34d2f6b --- /dev/null +++ b/scripts/compute-preview/compute-preview-utils.test.ts @@ -0,0 +1,90 @@ +import { describe, expect, it } from "vitest"; + +import { + PREVIEW_COMMENT_MARKER, + buildPreviewCommentBody, + findNamedProject, + findNamedService, + sanitizeComputeServiceName, +} from "./compute-preview-utils.mjs"; + +describe("sanitizeComputeServiceName", () => { + it("normalizes slashes and punctuation into a compute-safe slug", () => { + expect(sanitizeComputeServiceName("codex/public-origin-main")).toBe( + "codex-public-origin-main", + ); + expect(sanitizeComputeServiceName(" Feature__Foo.Bar ")).toBe( + "feature-foo-bar", + ); + }); + + it("falls back to preview when no alphanumeric characters remain", () => { + expect(sanitizeComputeServiceName("///")).toBe("preview"); + }); + + it("truncates long names deterministically with a hash suffix", () => { + const branchName = "feature/" + "x".repeat(120); + const serviceName = sanitizeComputeServiceName(branchName); + + expect(serviceName.length).toBeLessThanOrEqual(63); + expect(serviceName).toMatch(/^feature-x+-[0-9a-f]{8}$/); + expect(sanitizeComputeServiceName(branchName)).toBe(serviceName); + }); +}); + +describe("findNamedProject", () => { + it("returns the matching project by name", () => { + expect( + findNamedProject( + [ + { id: "proj_1", name: "foo" }, + { id: "proj_2", name: "studio-preview" }, + ], + "studio-preview", + ), + ).toEqual({ + id: "proj_2", + name: "studio-preview", + }); + }); +}); + +describe("findNamedService", () => { + it("returns the matching service by name", () => { + expect( + findNamedService( + [ + { id: "svc_1", name: "main" }, + { id: "svc_2", name: "codex-public-origin-main" }, + ], + "codex-public-origin-main", + ), + ).toEqual({ + id: "svc_2", + name: "codex-public-origin-main", + }); + }); +}); + +describe("buildPreviewCommentBody", () => { + it("builds a sticky PR comment with the preview URL", () => { + expect( + buildPreviewCommentBody({ + branchName: "codex/public-origin-main", + serviceName: "codex-public-origin-main", + serviceUrl: "https://example.cdg.prisma.build", + versionUrl: "https://version.cdg.prisma.build", + }), + ).toBe( + [ + PREVIEW_COMMENT_MARKER, + "Compute preview deployed.", + "", + "Branch: `codex/public-origin-main`", + "Service: `codex-public-origin-main`", + "Preview: https://example.cdg.prisma.build", + "Version: https://version.cdg.prisma.build", + ].join("\n"), + ); + }); +}); From 5b7e96d5c81766a1b9b9b5f08baa71b5edbc3c70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Bramer=20Schmidt?= Date: Wed, 15 Apr 2026 14:14:52 +0300 Subject: [PATCH 3/4] Make preview workflow start on GitHub Actions --- .github/workflows/compute-preview.yml | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/.github/workflows/compute-preview.yml b/.github/workflows/compute-preview.yml index 7cc3bd2d..d55c46a4 100644 --- a/.github/workflows/compute-preview.yml +++ b/.github/workflows/compute-preview.yml @@ -34,8 +34,13 @@ jobs: node-version-file: .node-version cache: pnpm - - name: Setup Bun - uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2 + - name: Install Bun + env: + BUN_VERSION: 1.3.12 + run: | + curl -fsSL https://bun.sh/install | bash -s -- bun-v${BUN_VERSION} + echo "$HOME/.bun/bin" >> "$GITHUB_PATH" + "$HOME/.bun/bin/bun" --version - name: Install dependencies run: pnpm install --frozen-lockfile @@ -74,8 +79,13 @@ jobs: with: node-version-file: .node-version - - name: Setup Bun - uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2 + - name: Install Bun + env: + BUN_VERSION: 1.3.12 + run: | + curl -fsSL https://bun.sh/install | bash -s -- bun-v${BUN_VERSION} + echo "$HOME/.bun/bin" >> "$GITHUB_PATH" + "$HOME/.bun/bin/bun" --version - name: Destroy preview service for deleted branch env: From ffc215bbaa3b74e57ffe5954da79e74b316b7dbe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B8ren=20Bramer=20Schmidt?= Date: Wed, 15 Apr 2026 14:17:34 +0300 Subject: [PATCH 4/4] Grant PR write access to preview workflow --- .github/workflows/compute-preview.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/compute-preview.yml b/.github/workflows/compute-preview.yml index d55c46a4..203514b5 100644 --- a/.github/workflows/compute-preview.yml +++ b/.github/workflows/compute-preview.yml @@ -11,6 +11,7 @@ on: permissions: contents: read issues: write + pull-requests: write concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.event.ref }}