createAssembly payload
- void handleCopy()}
- >
- {copied ? "Copied" : "Copy"}
+ void handleCopy()}>
+ {copied ? 'Copied' : 'Copy'}
{payloadText}
@@ -823,18 +757,14 @@ const WeddingLayout = ({
Live gallery
- Curated highlights processed by Transloadit — resized images and
- encoded videos.
+ Curated highlights processed by Transloadit — resized images and encoded videos.
{children}
- Gallery shows the most recent uploads (retention {retentionLabel}).
- Files are persisted in R2 via Transloadit’s Cloudflare store robot.
- Built with{" "}
-
- @transloadit/convex
- {" "}
- and Transloadit .
+ Gallery shows the most recent uploads (retention {retentionLabel}). Files are persisted in
+ R2 via Transloadit’s Cloudflare store robot. Built with{' '}
+ @transloadit/convex and{' '}
+ Transloadit .
{toasts && toasts.length > 0 && (
@@ -850,20 +780,14 @@ const WeddingLayout = ({
);
};
-export default function WeddingUploadsClient({
- convexUrl,
-}: {
- convexUrl?: string | null;
-}) {
+export default function WeddingUploadsClient({ convexUrl }: { convexUrl?: string | null }) {
const [isHydrated, setIsHydrated] = useState(false);
- const [resolvedConvexUrl, setResolvedConvexUrl] = useState
(
- () => {
- if (convexUrl) return convexUrl;
- if (typeof window === "undefined") return null;
- const params = new URLSearchParams(window.location.search);
- return params.get("convexUrl");
- },
- );
+ const [resolvedConvexUrl, setResolvedConvexUrl] = useState(() => {
+ if (convexUrl) return convexUrl;
+ if (typeof window === 'undefined') return null;
+ const params = new URLSearchParams(window.location.search);
+ return params.get('convexUrl');
+ });
useEffect(() => {
setIsHydrated(true);
@@ -878,7 +802,7 @@ export default function WeddingUploadsClient({
return;
}
const params = new URLSearchParams(window.location.search);
- const fromQuery = params.get("convexUrl");
+ const fromQuery = params.get('convexUrl');
if (fromQuery) {
setResolvedConvexUrl(fromQuery);
}
@@ -892,7 +816,7 @@ export default function WeddingUploadsClient({
}
return (
-
+
);
diff --git a/example/app/api/assemblies/route.ts b/example/app/api/assemblies/route.ts
index 0291296..0351af9 100644
--- a/example/app/api/assemblies/route.ts
+++ b/example/app/api/assemblies/route.ts
@@ -1,5 +1,5 @@
-import { NextResponse } from "next/server";
-import { runAction, runQuery } from "../../../lib/convex";
+import { NextResponse } from 'next/server';
+import { runAction, runQuery } from '../../../lib/convex';
export async function POST(request: Request) {
const payload = (await request.json().catch(() => ({}))) as {
@@ -7,13 +7,11 @@ export async function POST(request: Request) {
guestName?: string;
uploadCode?: string;
};
- const fileCount = Number.isFinite(payload.fileCount)
- ? Math.max(1, payload.fileCount ?? 1)
- : 1;
+ const fileCount = Number.isFinite(payload.fileCount) ? Math.max(1, payload.fileCount ?? 1) : 1;
- const response = await runAction("createWeddingAssemblyOptions", {
+ const response = await runAction('createWeddingAssemblyOptions', {
fileCount,
- guestName: payload.guestName ?? "Guest",
+ guestName: payload.guestName ?? 'Guest',
uploadCode: payload.uploadCode,
});
@@ -22,23 +20,23 @@ export async function POST(request: Request) {
export async function GET(request: Request) {
const url = new URL(request.url);
- const assemblyId = url.searchParams.get("assemblyId");
+ const assemblyId = url.searchParams.get('assemblyId');
if (!assemblyId) {
return NextResponse.json({ status: null, results: [] });
}
- if (url.searchParams.get("refresh") === "1") {
+ if (url.searchParams.get('refresh') === '1') {
try {
- await runAction("refreshAssembly", { assemblyId });
+ await runAction('refreshAssembly', { assemblyId });
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
- console.warn("Refresh assembly failed", message);
+ console.warn('Refresh assembly failed', message);
}
}
const [status, results] = await Promise.all([
- runQuery("getAssemblyStatus", { assemblyId }),
- runQuery("listResults", { assemblyId }),
+ runQuery('getAssemblyStatus', { assemblyId }),
+ runQuery('listResults', { assemblyId }),
]);
return NextResponse.json({ status, results });
diff --git a/example/app/globals.css b/example/app/globals.css
index 78625c1..c75d766 100644
--- a/example/app/globals.css
+++ b/example/app/globals.css
@@ -1,7 +1,6 @@
:root {
color-scheme: light;
- font-family:
- "Newsreader", "Iowan Old Style", "Palatino", "Times New Roman", serif;
+ font-family: "Newsreader", "Iowan Old Style", "Palatino", "Times New Roman", serif;
line-height: 1.5;
background: #f3efe9;
color: #2e2622;
@@ -14,12 +13,7 @@
body {
margin: 0;
min-height: 100vh;
- background: radial-gradient(
- circle at top,
- #fff7ec 0%,
- #f0e6d8 55%,
- #e7dbc9 100%
- );
+ background: radial-gradient(circle at top, #fff7ec 0%, #f0e6d8 55%, #e7dbc9 100%);
}
.page {
diff --git a/example/app/layout.tsx b/example/app/layout.tsx
index d39331d..a713ac5 100644
--- a/example/app/layout.tsx
+++ b/example/app/layout.tsx
@@ -1,17 +1,13 @@
-import "@uppy/core/css/style.css";
-import "@uppy/dashboard/css/style.css";
-import "./globals.css";
+import '@uppy/core/css/style.css';
+import '@uppy/dashboard/css/style.css';
+import './globals.css';
export const metadata = {
- title: "Wedding Uploads · Transloadit + Convex",
- description: "Guest uploads with Transloadit, Convex, and Uppy",
+ title: 'Wedding Uploads · Transloadit + Convex',
+ description: 'Guest uploads with Transloadit, Convex, and Uppy',
};
-export default function RootLayout({
- children,
-}: {
- children: React.ReactNode;
-}) {
+export default function RootLayout({ children }: { children: React.ReactNode }) {
return (
{children}
diff --git a/example/app/page.tsx b/example/app/page.tsx
index 23c6d8f..450e00a 100644
--- a/example/app/page.tsx
+++ b/example/app/page.tsx
@@ -1,18 +1,18 @@
-import WeddingUploadsApp from "./WeddingUploadsApp";
+import WeddingUploadsApp from './WeddingUploadsApp';
-export const dynamic = "force-dynamic";
+export const dynamic = 'force-dynamic';
const slugifyBranch = (value: string) =>
value
.trim()
.toLowerCase()
- .replace(/[^a-z0-9-]/g, "-")
- .replace(/-+/g, "-")
- .replace(/^-|-$/g, "");
+ .replace(/[^a-z0-9-]/g, '-')
+ .replace(/-+/g, '-')
+ .replace(/^-|-$/g, '');
const resolvePreviewConvexUrl = () => {
- if (process.env.VERCEL_ENV !== "preview") return null;
- const branch = process.env.VERCEL_GIT_COMMIT_REF ?? "";
+ if (process.env.VERCEL_ENV !== 'preview') return null;
+ const branch = process.env.VERCEL_GIT_COMMIT_REF ?? '';
if (!branch) return null;
const slug = slugifyBranch(branch);
if (!slug) return null;
diff --git a/example/app/providers.tsx b/example/app/providers.tsx
index a498c7f..98671e1 100644
--- a/example/app/providers.tsx
+++ b/example/app/providers.tsx
@@ -1,8 +1,8 @@
-"use client";
+'use client';
-import { ConvexAuthProvider } from "@convex-dev/auth/react";
-import { ConvexReactClient } from "convex/react";
-import { useMemo } from "react";
+import { ConvexAuthProvider } from '@convex-dev/auth/react';
+import { ConvexReactClient } from 'convex/react';
+import { useMemo } from 'react';
export const Providers = ({
convexUrl,
diff --git a/example/app/transloadit/webhook/route.ts b/example/app/transloadit/webhook/route.ts
index c4ce77d..3b34b5d 100644
--- a/example/app/transloadit/webhook/route.ts
+++ b/example/app/transloadit/webhook/route.ts
@@ -1,9 +1,9 @@
-import { handleWebhookRequest } from "@transloadit/convex";
-import { runAction } from "../../../lib/convex";
+import { handleWebhookRequest } from '@transloadit/convex';
+import { runAction } from '../../../lib/convex';
export async function POST(request: Request) {
return handleWebhookRequest(request, {
- mode: "queue",
- runAction: (args) => runAction("queueWebhook", args),
+ mode: 'queue',
+ runAction: (args) => runAction('queueWebhook', args),
});
}
diff --git a/example/convex/auth.config.ts b/example/convex/auth.config.ts
index 8a9ad08..8db5210 100644
--- a/example/convex/auth.config.ts
+++ b/example/convex/auth.config.ts
@@ -1,18 +1,18 @@
const siteUrl =
process.env.CONVEX_SITE_URL ??
- (process.env.CONVEX_URL?.includes(".convex.cloud")
- ? process.env.CONVEX_URL.replace(".convex.cloud", ".convex.site")
+ (process.env.CONVEX_URL?.includes('.convex.cloud')
+ ? process.env.CONVEX_URL.replace('.convex.cloud', '.convex.site')
: process.env.CONVEX_URL);
if (!siteUrl) {
- throw new Error("Missing CONVEX_SITE_URL or CONVEX_URL for auth.config.ts");
+ throw new Error('Missing CONVEX_SITE_URL or CONVEX_URL for auth.config.ts');
}
export default {
providers: [
{
domain: siteUrl,
- applicationID: "convex",
+ applicationID: 'convex',
},
],
};
diff --git a/example/convex/auth.ts b/example/convex/auth.ts
index 15e1ae8..33597ae 100644
--- a/example/convex/auth.ts
+++ b/example/convex/auth.ts
@@ -1,15 +1,12 @@
-import { Anonymous } from "@convex-dev/auth/providers/Anonymous";
-import { convexAuth } from "@convex-dev/auth/server";
+import { Anonymous } from '@convex-dev/auth/providers/Anonymous';
+import { convexAuth } from '@convex-dev/auth/server';
const ensureConvexSiteUrl = () => {
if (process.env.CONVEX_SITE_URL) return;
const convexUrl = process.env.CONVEX_URL;
if (!convexUrl) return;
- if (!convexUrl.includes(".convex.cloud")) return;
- process.env.CONVEX_SITE_URL = convexUrl.replace(
- ".convex.cloud",
- ".convex.site",
- );
+ if (!convexUrl.includes('.convex.cloud')) return;
+ process.env.CONVEX_SITE_URL = convexUrl.replace('.convex.cloud', '.convex.site');
};
ensureConvexSiteUrl();
diff --git a/example/convex/convex.config.ts b/example/convex/convex.config.ts
index abfb485..c187557 100644
--- a/example/convex/convex.config.ts
+++ b/example/convex/convex.config.ts
@@ -1,5 +1,5 @@
-import transloadit from "@transloadit/convex/convex.config";
-import { defineApp } from "convex/server";
+import transloadit from '@transloadit/convex/convex.config';
+import { defineApp } from 'convex/server';
const app = defineApp();
app.use(transloadit);
diff --git a/example/convex/http.ts b/example/convex/http.ts
index 8a80d27..1c91ce9 100644
--- a/example/convex/http.ts
+++ b/example/convex/http.ts
@@ -1,8 +1,8 @@
-import { buildWebhookQueueArgs } from "@transloadit/convex";
-import { httpRouter } from "convex/server";
-import { api } from "./_generated/api";
-import { httpAction } from "./_generated/server";
-import { auth } from "./auth";
+import { buildWebhookQueueArgs } from '@transloadit/convex';
+import { httpRouter } from 'convex/server';
+import { api } from './_generated/api';
+import { httpAction } from './_generated/server';
+import { auth } from './auth';
const http = httpRouter();
auth.addHttpRoutes(http);
@@ -16,11 +16,11 @@ const requireEnv = (name: string) => {
};
http.route({
- path: "/transloadit/webhook",
- method: "POST",
+ path: '/transloadit/webhook',
+ method: 'POST',
handler: httpAction(async (ctx, request) => {
const args = await buildWebhookQueueArgs(request, {
- authSecret: requireEnv("TRANSLOADIT_SECRET"),
+ authSecret: requireEnv('TRANSLOADIT_SECRET'),
requireSignature: false,
});
diff --git a/example/convex/schema.ts b/example/convex/schema.ts
index 3808011..f856f57 100644
--- a/example/convex/schema.ts
+++ b/example/convex/schema.ts
@@ -1,6 +1,6 @@
-import { authTables } from "@convex-dev/auth/server";
-import { defineSchema, defineTable } from "convex/server";
-import { v } from "convex/values";
+import { authTables } from '@convex-dev/auth/server';
+import { defineSchema, defineTable } from 'convex/server';
+import { v } from 'convex/values';
export default defineSchema({
...authTables,
@@ -9,5 +9,5 @@ export default defineSchema({
windowStart: v.number(),
count: v.number(),
lastUploadAt: v.number(),
- }).index("by_user", ["userId"]),
+ }).index('by_user', ['userId']),
});
diff --git a/example/convex/transloadit.ts b/example/convex/transloadit.ts
index 0fa91be..208f213 100644
--- a/example/convex/transloadit.ts
+++ b/example/convex/transloadit.ts
@@ -1,5 +1,5 @@
-import { makeTransloaditAPI } from "@transloadit/convex";
-import { components } from "./_generated/api";
+import { makeTransloaditAPI } from '@transloadit/convex';
+import { components } from './_generated/api';
export const {
createAssembly,
diff --git a/example/convex/wedding.ts b/example/convex/wedding.ts
index 0343b36..1cefd17 100644
--- a/example/convex/wedding.ts
+++ b/example/convex/wedding.ts
@@ -1,8 +1,8 @@
-import { vAssemblyOptions } from "@transloadit/convex";
-import { v } from "convex/values";
-import { buildWeddingSteps } from "../lib/transloadit-steps";
-import { components, internal } from "./_generated/api";
-import { action, internalMutation } from "./_generated/server";
+import { vAssemblyOptions } from '@transloadit/convex';
+import { v } from 'convex/values';
+import { buildWeddingSteps } from '../lib/transloadit-steps';
+import { components, internal } from './_generated/api';
+import { action, internalMutation } from './_generated/server';
const MAX_UPLOADS_PER_HOUR = 6;
const WINDOW_MS = 60 * 60 * 1000;
@@ -21,11 +21,11 @@ export const checkUploadLimit = internalMutation({
handler: async (ctx, args) => {
const now = Date.now();
const existing = await ctx.db
- .query("uploadLimits")
- .withIndex("by_user", (q) => q.eq("userId", args.userId))
+ .query('uploadLimits')
+ .withIndex('by_user', (q) => q.eq('userId', args.userId))
.first();
if (!existing) {
- await ctx.db.insert("uploadLimits", {
+ await ctx.db.insert('uploadLimits', {
userId: args.userId,
windowStart: now,
count: 1,
@@ -42,7 +42,7 @@ export const checkUploadLimit = internalMutation({
return null;
}
if (existing.count >= MAX_UPLOADS_PER_HOUR) {
- throw new Error("Upload limit reached. Try again later.");
+ throw new Error('Upload limit reached. Try again later.');
}
await ctx.db.patch(existing._id, {
count: existing.count + 1,
@@ -65,7 +65,7 @@ export const createWeddingAssemblyOptions = action({
handler: async (ctx, args) => {
const identity = await ctx.auth.getUserIdentity();
if (!identity) {
- throw new Error("Authentication required.");
+ throw new Error('Authentication required.');
}
await ctx.runMutation(internal.wedding.checkUploadLimit, {
@@ -76,36 +76,33 @@ export const createWeddingAssemblyOptions = action({
if (requiredCode) {
const provided = args.uploadCode?.trim();
if (!provided || provided !== requiredCode) {
- throw new Error("Upload code required.");
+ throw new Error('Upload code required.');
}
}
const steps = buildWeddingSteps();
- const notifyUrl = requireEnv("TRANSLOADIT_NOTIFY_URL");
+ const notifyUrl = requireEnv('TRANSLOADIT_NOTIFY_URL');
const fileCount = Math.max(1, args.fileCount);
const assemblyArgs = {
steps,
notifyUrl,
numExpectedUploadFiles: fileCount,
fields: {
- guestName: args.guestName ?? "Guest",
- album: "wedding-gallery",
+ guestName: args.guestName ?? 'Guest',
+ album: 'wedding-gallery',
fileCount,
userId: identity.subject,
},
userId: identity.subject,
};
- const assemblyOptions = await ctx.runAction(
- components.transloadit.lib.createAssemblyOptions,
- {
- ...assemblyArgs,
- config: {
- authKey: requireEnv("TRANSLOADIT_KEY"),
- authSecret: requireEnv("TRANSLOADIT_SECRET"),
- },
+ const assemblyOptions = await ctx.runAction(components.transloadit.lib.createAssemblyOptions, {
+ ...assemblyArgs,
+ config: {
+ authKey: requireEnv('TRANSLOADIT_KEY'),
+ authSecret: requireEnv('TRANSLOADIT_SECRET'),
},
- );
+ });
const parsedParams = safeParseParams(assemblyOptions.params);
const params = redactSecrets(parsedParams ?? assemblyArgs);
@@ -121,32 +118,24 @@ const safeParseParams = (value: string) => {
try {
return JSON.parse(value) as Record;
} catch (error) {
- console.warn("Failed to parse Transloadit params", error);
+ console.warn('Failed to parse Transloadit params', error);
return null;
}
};
-const secretKeys = new Set([
- "secret",
- "key",
- "credentials",
- "authSecret",
- "authKey",
-]);
+const secretKeys = new Set(['secret', 'key', 'credentials', 'authSecret', 'authKey']);
const redactSecrets = (value: unknown): unknown => {
if (Array.isArray(value)) {
return value.map((item) => redactSecrets(item));
}
- if (value && typeof value === "object") {
- const entries = Object.entries(value as Record).map(
- ([key, val]) => {
- if (secretKeys.has(key)) {
- return [key, "***"];
- }
- return [key, redactSecrets(val)];
- },
- );
+ if (value && typeof value === 'object') {
+ const entries = Object.entries(value as Record).map(([key, val]) => {
+ if (secretKeys.has(key)) {
+ return [key, '***'];
+ }
+ return [key, redactSecrets(val)];
+ });
return Object.fromEntries(entries);
}
return value;
diff --git a/example/lib/convex.ts b/example/lib/convex.ts
index 5e90f6e..3968861 100644
--- a/example/lib/convex.ts
+++ b/example/lib/convex.ts
@@ -1,30 +1,29 @@
-import { ConvexHttpClient } from "convex/browser";
-import { convexTest } from "convex-test";
-import { api } from "../../src/component/_generated/api.ts";
-import schema from "../../src/component/schema.ts";
-import { modules } from "../../src/test/nodeModules.ts";
-import { buildWeddingSteps } from "./transloadit-steps";
+import { ConvexHttpClient } from 'convex/browser';
+import { convexTest } from 'convex-test';
+import { api } from '../../src/component/_generated/api.ts';
+import schema from '../../src/component/schema.ts';
+import { modules } from '../../src/test/nodeModules.ts';
+import { buildWeddingSteps } from './transloadit-steps';
-type Mode = "local" | "cloud";
+type Mode = 'local' | 'cloud';
-const authKey = process.env.TRANSLOADIT_KEY ?? "";
-const authSecret = process.env.TRANSLOADIT_SECRET ?? "";
-const remoteUrl = process.env.E2E_REMOTE_URL ?? process.env.CONVEX_URL ?? "";
-const remoteAdminKey =
- process.env.E2E_REMOTE_ADMIN_KEY ?? process.env.CONVEX_ADMIN_KEY ?? "";
+const authKey = process.env.TRANSLOADIT_KEY ?? '';
+const authSecret = process.env.TRANSLOADIT_SECRET ?? '';
+const remoteUrl = process.env.E2E_REMOTE_URL ?? process.env.CONVEX_URL ?? '';
+const remoteAdminKey = process.env.E2E_REMOTE_ADMIN_KEY ?? process.env.CONVEX_ADMIN_KEY ?? '';
const resolveMode = (): Mode => {
const explicit = process.env.E2E_MODE;
- if (explicit === "cloud") return "cloud";
- if (explicit === "local") return "local";
- if (remoteUrl && remoteAdminKey) return "cloud";
- return "local";
+ if (explicit === 'cloud') return 'cloud';
+ if (explicit === 'local') return 'local';
+ if (remoteUrl && remoteAdminKey) return 'cloud';
+ return 'local';
};
const mode = resolveMode();
-const testClient = mode === "local" ? convexTest(schema, modules) : null;
+const testClient = mode === 'local' ? convexTest(schema, modules) : null;
const remoteClient =
- mode === "cloud" && remoteUrl && remoteAdminKey
+ mode === 'cloud' && remoteUrl && remoteAdminKey
? new ConvexHttpClient(remoteUrl, { logger: false })
: null;
@@ -38,70 +37,58 @@ if (remoteClient) {
adminClient.setDebug(false);
}
-export const runAction = async (
- name: string,
- args: Record,
-) => {
+export const runAction = async (name: string, args: Record) => {
if (remoteClient) {
const remoteName =
- name === "createWeddingAssemblyOptions"
- ? "wedding:createWeddingAssemblyOptions"
+ name === 'createWeddingAssemblyOptions'
+ ? 'wedding:createWeddingAssemblyOptions'
: `transloadit:${name}`;
const remoteAction = remoteClient as ConvexHttpClient & {
- action: (
- actionName: string,
- args: Record,
- ) => Promise;
+ action: (actionName: string, args: Record) => Promise;
};
// Convex's client types only accept generated function references.
return remoteAction.action(remoteName, args);
}
- if (mode === "cloud") {
- throw new Error("Missing E2E_REMOTE_URL or E2E_REMOTE_ADMIN_KEY");
+ if (mode === 'cloud') {
+ throw new Error('Missing E2E_REMOTE_URL or E2E_REMOTE_ADMIN_KEY');
}
if (!testClient) {
- throw new Error("Missing Convex test harness");
+ throw new Error('Missing Convex test harness');
}
const config = authKey && authSecret ? { authKey, authSecret } : null;
if (!config) {
- throw new Error("Missing TRANSLOADIT_KEY or TRANSLOADIT_SECRET");
+ throw new Error('Missing TRANSLOADIT_KEY or TRANSLOADIT_SECRET');
}
- if (name === "createWeddingAssemblyOptions") {
+ if (name === 'createWeddingAssemblyOptions') {
const notifyUrl = process.env.TRANSLOADIT_NOTIFY_URL;
if (!notifyUrl) {
- throw new Error("Missing TRANSLOADIT_NOTIFY_URL");
+ throw new Error('Missing TRANSLOADIT_NOTIFY_URL');
}
- const fileCount =
- typeof args.fileCount === "number" ? Math.max(1, args.fileCount) : 1;
- const guestName =
- typeof args.guestName === "string" ? args.guestName : "Guest";
+ const fileCount = typeof args.fileCount === 'number' ? Math.max(1, args.fileCount) : 1;
+ const guestName = typeof args.guestName === 'string' ? args.guestName : 'Guest';
const requiredCode = process.env.WEDDING_UPLOAD_CODE;
if (requiredCode) {
- const provided =
- typeof args.uploadCode === "string" ? args.uploadCode.trim() : "";
+ const provided = typeof args.uploadCode === 'string' ? args.uploadCode.trim() : '';
if (!provided || provided !== requiredCode) {
- throw new Error("Upload code required.");
+ throw new Error('Upload code required.');
}
}
- const assemblyOptions = await testClient.action(
- api.lib.createAssemblyOptions,
- {
- steps: buildWeddingSteps(),
- notifyUrl,
- numExpectedUploadFiles: fileCount,
- fields: {
- guestName,
- album: "wedding-gallery",
- fileCount,
- },
- config,
+ const assemblyOptions = await testClient.action(api.lib.createAssemblyOptions, {
+ steps: buildWeddingSteps(),
+ notifyUrl,
+ numExpectedUploadFiles: fileCount,
+ fields: {
+ guestName,
+ album: 'wedding-gallery',
+ fileCount,
},
- );
+ config,
+ });
const params = safeParseParams(assemblyOptions.params);
return {
assemblyOptions,
@@ -109,14 +96,14 @@ export const runAction = async (
};
}
- if (name === "createAssembly") {
+ if (name === 'createAssembly') {
const assemblyArgs = args as Record;
return testClient.action(api.lib.createAssembly, {
...assemblyArgs,
config,
});
}
- if (name === "handleWebhook") {
+ if (name === 'handleWebhook') {
const webhookArgs = args as {
payload: unknown;
signature?: string;
@@ -127,7 +114,7 @@ export const runAction = async (
config: { authSecret: config.authSecret },
});
}
- if (name === "queueWebhook") {
+ if (name === 'queueWebhook') {
// Local harness does not run scheduled jobs, so process immediately.
const webhookArgs = args as {
payload: unknown;
@@ -139,7 +126,7 @@ export const runAction = async (
config: { authSecret: config.authSecret },
});
}
- if (name === "refreshAssembly") {
+ if (name === 'refreshAssembly') {
const refreshArgs = args as { assemblyId: string };
return testClient.action(api.lib.refreshAssembly, {
...refreshArgs,
@@ -153,28 +140,25 @@ export const runAction = async (
export const runQuery = async (name: string, args: Record) => {
if (remoteClient) {
const remoteQuery = remoteClient as ConvexHttpClient & {
- query: (
- queryName: string,
- args: Record,
- ) => Promise;
+ query: (queryName: string, args: Record) => Promise;
};
return remoteQuery.query(`transloadit:${name}`, args);
}
- if (mode === "cloud") {
- throw new Error("Missing E2E_REMOTE_URL or E2E_REMOTE_ADMIN_KEY");
+ if (mode === 'cloud') {
+ throw new Error('Missing E2E_REMOTE_URL or E2E_REMOTE_ADMIN_KEY');
}
if (!testClient) {
- throw new Error("Missing Convex test harness");
+ throw new Error('Missing Convex test harness');
}
- if (name === "getAssemblyStatus") {
+ if (name === 'getAssemblyStatus') {
return testClient.query(api.lib.getAssemblyStatus, {
assemblyId: args.assemblyId as string,
});
}
- if (name === "listResults") {
+ if (name === 'listResults') {
const listArgs = args as {
assemblyId: string;
limit?: number;
@@ -190,7 +174,7 @@ const safeParseParams = (value: string) => {
try {
return JSON.parse(value) as Record;
} catch (error) {
- console.warn("Failed to parse Transloadit params", error);
+ console.warn('Failed to parse Transloadit params', error);
return null;
}
};
diff --git a/example/lib/r2.ts b/example/lib/r2.ts
index bcb076a..09638f4 100644
--- a/example/lib/r2.ts
+++ b/example/lib/r2.ts
@@ -11,7 +11,7 @@ const clean = (value?: string) => value?.trim() || undefined;
const normalizeHost = (value?: string) => {
if (!value) return undefined;
- if (value.startsWith("http://") || value.startsWith("https://")) {
+ if (value.startsWith('http://') || value.startsWith('https://')) {
return value;
}
return `https://${value}`;
@@ -19,7 +19,7 @@ const normalizeHost = (value?: string) => {
const normalizeUrlPrefix = (value?: string) => {
if (!value) return undefined;
- return value.endsWith("/") ? value : `${value}/`;
+ return value.endsWith('/') ? value : `${value}/`;
};
export const readR2ConfigFromEnv = (env: NodeJS.ProcessEnv): R2Config => {
@@ -31,8 +31,7 @@ export const readR2ConfigFromEnv = (env: NodeJS.ProcessEnv): R2Config => {
const hostValue = clean(env.R2_HOST);
const publicUrl = clean(env.R2_PUBLIC_URL);
const host = normalizeHost(
- hostValue ??
- (accountId ? `${accountId}.r2.cloudflarestorage.com` : undefined),
+ hostValue ?? (accountId ? `${accountId}.r2.cloudflarestorage.com` : undefined),
);
if (credentials) {
@@ -48,11 +47,11 @@ export const readR2ConfigFromEnv = (env: NodeJS.ProcessEnv): R2Config => {
if (!bucket || !accessKeyId || !secretAccessKey) {
throw new Error(
- "Missing R2 credentials. Set TRANSLOADIT_R2_CREDENTIALS or provide R2_BUCKET, R2_ACCESS_KEY_ID, and R2_SECRET_ACCESS_KEY.",
+ 'Missing R2 credentials. Set TRANSLOADIT_R2_CREDENTIALS or provide R2_BUCKET, R2_ACCESS_KEY_ID, and R2_SECRET_ACCESS_KEY.',
);
}
if (!host) {
- throw new Error("Missing R2 host. Set R2_HOST or R2_ACCOUNT_ID.");
+ throw new Error('Missing R2 host. Set R2_HOST or R2_ACCOUNT_ID.');
}
return {
diff --git a/example/lib/transloadit-steps.ts b/example/lib/transloadit-steps.ts
index 281aa6f..60bd6ee 100644
--- a/example/lib/transloadit-steps.ts
+++ b/example/lib/transloadit-steps.ts
@@ -1,34 +1,27 @@
-import { robotCloudflareStoreInstructionsSchema } from "@transloadit/zod/v3/robots/cloudflare-store";
-import { robotFileFilterInstructionsSchema } from "@transloadit/zod/v3/robots/file-filter";
-import { robotImageResizeInstructionsSchema } from "@transloadit/zod/v3/robots/image-resize";
-import { robotUploadHandleInstructionsSchema } from "@transloadit/zod/v3/robots/upload-handle";
-import { robotVideoEncodeInstructionsSchema } from "@transloadit/zod/v3/robots/video-encode";
-import { robotVideoThumbsInstructionsSchema } from "@transloadit/zod/v3/robots/video-thumbs";
-import type { z } from "zod/v3";
-import { type R2Config, readR2ConfigFromEnv } from "./r2";
+import { robotCloudflareStoreInstructionsSchema } from '@transloadit/zod/v3/robots/cloudflare-store';
+import { robotFileFilterInstructionsSchema } from '@transloadit/zod/v3/robots/file-filter';
+import { robotImageResizeInstructionsSchema } from '@transloadit/zod/v3/robots/image-resize';
+import { robotUploadHandleInstructionsSchema } from '@transloadit/zod/v3/robots/upload-handle';
+import { robotVideoEncodeInstructionsSchema } from '@transloadit/zod/v3/robots/video-encode';
+import { robotVideoThumbsInstructionsSchema } from '@transloadit/zod/v3/robots/video-thumbs';
+import type { z } from 'zod/v3';
+import { type R2Config, readR2ConfigFromEnv } from './r2';
type TransloaditSteps = Record>;
// biome-ignore lint/style/useTemplate: Template literals emit invalid `${${...}}` in Next build output.
-const tpl = (value: string) => "$" + "{" + value + "}";
+const tpl = (value: string) => '$' + '{' + value + '}';
-type RobotCloudflareStoreInput = z.input<
- typeof robotCloudflareStoreInstructionsSchema
->;
+type RobotCloudflareStoreInput = z.input;
type RobotFileFilterInput = z.input;
type RobotImageResizeInput = z.input;
-type RobotUploadHandleInput = z.input<
- typeof robotUploadHandleInstructionsSchema
->;
+type RobotUploadHandleInput = z.input;
type RobotVideoEncodeInput = z.input;
type RobotVideoThumbsInput = z.input;
-const buildStoreStep = (
- use: string,
- r2: R2Config,
-): RobotCloudflareStoreInput => {
+const buildStoreStep = (use: string, r2: R2Config): RobotCloudflareStoreInput => {
const step: RobotCloudflareStoreInput = {
- robot: "/cloudflare/store",
+ robot: '/cloudflare/store',
use,
result: true,
credentials: r2.credentials,
@@ -37,9 +30,7 @@ const buildStoreStep = (
secret: r2.secretAccessKey,
host: r2.host,
url_prefix: r2.urlPrefix,
- path: `wedding/${tpl("fields.album")}/${tpl("unique_prefix")}/${tpl(
- "file.url_name",
- )}`,
+ path: `wedding/${tpl('fields.album')}/${tpl('unique_prefix')}/${tpl('file.url_name')}`,
};
robotCloudflareStoreInstructionsSchema.parse(step);
return step;
@@ -47,20 +38,17 @@ const buildStoreStep = (
const buildUploadStep = (): RobotUploadHandleInput => {
const step: RobotUploadHandleInput = {
- robot: "/upload/handle",
+ robot: '/upload/handle',
};
robotUploadHandleInstructionsSchema.parse(step);
return step;
};
-const buildFilterStep = (
- use: string,
- pattern: string,
-): RobotFileFilterInput => {
+const buildFilterStep = (use: string, pattern: string): RobotFileFilterInput => {
const step: RobotFileFilterInput = {
- robot: "/file/filter",
+ robot: '/file/filter',
use,
- accepts: [[tpl("file.mime"), "regex", pattern]],
+ accepts: [[tpl('file.mime'), 'regex', pattern]],
error_on_decline: false,
};
robotFileFilterInstructionsSchema.parse(step);
@@ -69,11 +57,11 @@ const buildFilterStep = (
const buildResizeStep = (use: string): RobotImageResizeInput => {
const step: RobotImageResizeInput = {
- robot: "/image/resize",
+ robot: '/image/resize',
use,
width: 1600,
height: 1600,
- resize_strategy: "fit",
+ resize_strategy: 'fit',
};
robotImageResizeInstructionsSchema.parse(step);
return step;
@@ -81,9 +69,9 @@ const buildResizeStep = (use: string): RobotImageResizeInput => {
const buildVideoStep = (use: string): RobotVideoEncodeInput => {
const step: RobotVideoEncodeInput = {
- robot: "/video/encode",
+ robot: '/video/encode',
use,
- preset: "ipad-high",
+ preset: 'ipad-high',
};
robotVideoEncodeInstructionsSchema.parse(step);
return step;
@@ -91,10 +79,10 @@ const buildVideoStep = (use: string): RobotVideoEncodeInput => {
const buildVideoThumbsStep = (use: string): RobotVideoThumbsInput => {
const step: RobotVideoThumbsInput = {
- robot: "/video/thumbs",
+ robot: '/video/thumbs',
use,
count: 1,
- format: "jpg",
+ format: 'jpg',
width: 640,
};
robotVideoThumbsInstructionsSchema.parse(step);
@@ -105,14 +93,14 @@ export const buildWeddingSteps = (): TransloaditSteps => {
const r2 = readR2ConfigFromEnv(process.env);
return {
- ":original": buildUploadStep(),
- images_filtered: buildFilterStep(":original", "^image"),
- videos_filtered: buildFilterStep(":original", "^video"),
- images_resized: buildResizeStep("images_filtered"),
- videos_thumbs: buildVideoThumbsStep("videos_filtered"),
- videos_encoded: buildVideoStep("videos_filtered"),
- images_output: buildStoreStep("images_resized", r2),
- videos_thumbs_output: buildStoreStep("videos_thumbs", r2),
- videos_output: buildStoreStep("videos_encoded", r2),
+ ':original': buildUploadStep(),
+ images_filtered: buildFilterStep(':original', '^image'),
+ videos_filtered: buildFilterStep(':original', '^video'),
+ images_resized: buildResizeStep('images_filtered'),
+ videos_thumbs: buildVideoThumbsStep('videos_filtered'),
+ videos_encoded: buildVideoStep('videos_filtered'),
+ images_output: buildStoreStep('images_resized', r2),
+ videos_thumbs_output: buildStoreStep('videos_thumbs', r2),
+ videos_output: buildStoreStep('videos_encoded', r2),
};
};
diff --git a/example/lib/transloadit.ts b/example/lib/transloadit.ts
index cc535ce..33a4f15 100644
--- a/example/lib/transloadit.ts
+++ b/example/lib/transloadit.ts
@@ -1,7 +1,7 @@
export const weddingStepNames = {
- image: "images_resized",
- video: "videos_encoded",
- videoThumbs: "videos_thumbs",
+ image: 'images_resized',
+ video: 'videos_encoded',
+ videoThumbs: 'videos_thumbs',
};
export type {
@@ -9,7 +9,7 @@ export type {
AssemblyResponse,
AssemblyResultResponse,
AssemblyStatus,
-} from "@transloadit/convex";
+} from '@transloadit/convex';
export {
ASSEMBLY_STATUS_COMPLETED,
@@ -26,4 +26,4 @@ export {
isAssemblyUploadingStatus,
parseAssemblyStatus,
pollAssembly,
-} from "@transloadit/convex";
+} from '@transloadit/convex';
diff --git a/example/next-env.d.ts b/example/next-env.d.ts
index 9edff1c..1511519 100644
--- a/example/next-env.d.ts
+++ b/example/next-env.d.ts
@@ -1,6 +1,6 @@
///
///
-import "./.next/types/routes.d.ts";
+import './.next/types/routes.d.ts';
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
diff --git a/example/next.config.ts b/example/next.config.ts
index 759a2b2..3ef2868 100644
--- a/example/next.config.ts
+++ b/example/next.config.ts
@@ -1,4 +1,4 @@
-import type { NextConfig } from "next";
+import type { NextConfig } from 'next';
const nextConfig: NextConfig = {
reactStrictMode: true,
diff --git a/example/tsconfig.json b/example/tsconfig.json
index 7fd2f3b..062d17a 100644
--- a/example/tsconfig.json
+++ b/example/tsconfig.json
@@ -27,12 +27,6 @@
}
]
},
- "include": [
- "next-env.d.ts",
- "app",
- "lib",
- ".next/types/**/*.ts",
- ".next/dev/types/**/*.ts"
- ],
+ "include": ["next-env.d.ts", "app", "lib", ".next/types/**/*.ts", ".next/dev/types/**/*.ts"],
"exclude": ["node_modules"]
}
diff --git a/scripts/cleanup-demo.ts b/scripts/cleanup-demo.ts
index 8652b88..b47d7bf 100644
--- a/scripts/cleanup-demo.ts
+++ b/scripts/cleanup-demo.ts
@@ -1,23 +1,19 @@
-import {
- DeleteObjectsCommand,
- ListObjectsV2Command,
- S3Client,
-} from "@aws-sdk/client-s3";
-import { ConvexHttpClient } from "convex/browser";
-import { loadEnv } from "./env.ts";
+import { DeleteObjectsCommand, ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3';
+import { ConvexHttpClient } from 'convex/browser';
+import { loadEnv } from './env.ts';
loadEnv();
const argMap = new Map();
for (const arg of process.argv.slice(2)) {
- if (arg === "--dry-run") {
- argMap.set("dry-run", true);
+ if (arg === '--dry-run') {
+ argMap.set('dry-run', true);
continue;
}
- if (arg.startsWith("--")) {
- const [key, value] = arg.slice(2).split("=");
+ if (arg.startsWith('--')) {
+ const [key, value] = arg.slice(2).split('=');
if (key) {
- argMap.set(key, value ?? "");
+ argMap.set(key, value ?? '');
}
}
}
@@ -31,28 +27,25 @@ const requireEnv = (name: string) => {
};
const album =
- (argMap.get("album") as string | undefined) ||
- process.env.DEMO_ALBUM ||
- "wedding-gallery";
-const prefix =
- (argMap.get("prefix") as string | undefined) || `wedding/${album}/`;
-const dryRun = argMap.get("dry-run") === true;
-
-const convexUrl = requireEnv("CONVEX_URL");
-const convexAdminKey = requireEnv("CONVEX_ADMIN_KEY");
-
-const r2Bucket = requireEnv("R2_BUCKET");
-const r2AccessKeyId = requireEnv("R2_ACCESS_KEY_ID");
-const r2SecretAccessKey = requireEnv("R2_SECRET_ACCESS_KEY");
+ (argMap.get('album') as string | undefined) || process.env.DEMO_ALBUM || 'wedding-gallery';
+const prefix = (argMap.get('prefix') as string | undefined) || `wedding/${album}/`;
+const dryRun = argMap.get('dry-run') === true;
+
+const convexUrl = requireEnv('CONVEX_URL');
+const convexAdminKey = requireEnv('CONVEX_ADMIN_KEY');
+
+const r2Bucket = requireEnv('R2_BUCKET');
+const r2AccessKeyId = requireEnv('R2_ACCESS_KEY_ID');
+const r2SecretAccessKey = requireEnv('R2_SECRET_ACCESS_KEY');
const r2Host =
process.env.R2_HOST ||
(process.env.R2_ACCOUNT_ID
? `https://${process.env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`
- : "");
+ : '');
if (!r2Host) {
- throw new Error("Missing R2_HOST or R2_ACCOUNT_ID environment variable");
+ throw new Error('Missing R2_HOST or R2_ACCOUNT_ID environment variable');
}
-const r2Endpoint = r2Host.startsWith("http") ? r2Host : `https://${r2Host}`;
+const r2Endpoint = r2Host.startsWith('http') ? r2Host : `https://${r2Host}`;
const client = new ConvexHttpClient(convexUrl, {
logger: false,
@@ -63,7 +56,7 @@ const client = new ConvexHttpClient(convexUrl, {
client.setAdminAuth(convexAdminKey);
const deleteFromConvex = async () => {
- const result = (await client.mutation("transloadit:purgeAlbum", {
+ const result = (await client.mutation('transloadit:purgeAlbum', {
album,
deleteAssemblies: true,
})) as { deletedResults: number; deletedAssemblies: number };
@@ -72,7 +65,7 @@ const deleteFromConvex = async () => {
const deleteFromR2 = async () => {
const s3 = new S3Client({
- region: "auto",
+ region: 'auto',
endpoint: r2Endpoint,
credentials: {
accessKeyId: r2AccessKeyId,
@@ -98,9 +91,7 @@ const deleteFromR2 = async () => {
toDelete.push({ Key: entry.Key });
}
}
- continuationToken = response.IsTruncated
- ? response.NextContinuationToken
- : undefined;
+ continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
} while (continuationToken);
if (dryRun) {
diff --git a/scripts/deploy-cloud.ts b/scripts/deploy-cloud.ts
index 824f93d..7dc0d07 100644
--- a/scripts/deploy-cloud.ts
+++ b/scripts/deploy-cloud.ts
@@ -1,15 +1,15 @@
-import { createPublicKey, generateKeyPairSync } from "node:crypto";
-import { mkdir, mkdtemp, rm } from "node:fs/promises";
-import { tmpdir } from "node:os";
-import { join, resolve } from "node:path";
-import { fileURLToPath } from "node:url";
-import { loadEnv } from "./env.ts";
-import { writeAppFiles } from "./qa/app-template.ts";
-import { parseDeployOutput, requireEnv, run } from "./qa/run.ts";
+import { createPublicKey, generateKeyPairSync } from 'node:crypto';
+import { mkdir, mkdtemp, rm } from 'node:fs/promises';
+import { tmpdir } from 'node:os';
+import { join, resolve } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { loadEnv } from './env.ts';
+import { writeAppFiles } from './qa/app-template.ts';
+import { parseDeployOutput, requireEnv, run } from './qa/run.ts';
loadEnv();
-const ciOutput = process.env.CI_OUTPUT === "1";
+const ciOutput = process.env.CI_OUTPUT === '1';
const log = (...args: Parameters) => {
if (ciOutput) {
console.error(...args);
@@ -17,53 +17,53 @@ const log = (...args: Parameters) => {
console.log(...args);
}
};
-const runStdio = ciOutput ? "pipe" : "inherit";
+const runStdio = ciOutput ? 'pipe' : 'inherit';
-const rootDir = resolve(fileURLToPath(new URL("..", import.meta.url)));
+const rootDir = resolve(fileURLToPath(new URL('..', import.meta.url)));
const sleep = (ms: number) =>
new Promise((resolve) => {
setTimeout(resolve, ms);
});
const deployCloud = async () => {
- requireEnv("TRANSLOADIT_KEY");
- requireEnv("TRANSLOADIT_SECRET");
- requireEnv("CONVEX_DEPLOY_KEY");
+ requireEnv('TRANSLOADIT_KEY');
+ requireEnv('TRANSLOADIT_SECRET');
+ requireEnv('CONVEX_DEPLOY_KEY');
let qaDir: string | null = null;
try {
- qaDir = await mkdtemp(join(tmpdir(), "transloadit-convex-deploy-"));
- const projectDir = join(qaDir, "app");
- const tgzPath = join(qaDir, "transloadit-convex.tgz");
+ qaDir = await mkdtemp(join(tmpdir(), 'transloadit-convex-deploy-'));
+ const projectDir = join(qaDir, 'app');
+ const tgzPath = join(qaDir, 'transloadit-convex.tgz');
await mkdir(projectDir, { recursive: true });
log(`Packing @transloadit/convex into ${tgzPath}...`);
- run("yarn", ["pack", "-o", tgzPath], { cwd: rootDir, stdio: runStdio });
+ run('yarn', ['pack', '-o', tgzPath], { cwd: rootDir, stdio: runStdio });
await writeAppFiles({ projectDir, tgzPath });
- log("Installing dependencies...");
- run("npm", ["install", "--no-fund", "--no-audit"], {
+ log('Installing dependencies...');
+ run('npm', ['install', '--no-fund', '--no-audit'], {
cwd: projectDir,
stdio: runStdio,
});
- log("Deploying Convex app...");
+ log('Deploying Convex app...');
const previewName = process.env.CONVEX_PREVIEW_NAME;
- const deployArgs = ["convex", "deploy", "--typecheck", "disable", "--yes"];
+ const deployArgs = ['convex', 'deploy', '--typecheck', 'disable', '--yes'];
if (previewName) {
- deployArgs.push("--preview-create", previewName);
+ deployArgs.push('--preview-create', previewName);
}
- const deployOutput = run("npx", deployArgs, {
+ const deployOutput = run('npx', deployArgs, {
cwd: projectDir,
env: {
...process.env,
- CONVEX_DEPLOY_KEY: requireEnv("CONVEX_DEPLOY_KEY"),
+ CONVEX_DEPLOY_KEY: requireEnv('CONVEX_DEPLOY_KEY'),
},
- stdio: "pipe",
+ stdio: 'pipe',
});
const { deploymentName, deploymentUrl } = parseDeployOutput(deployOutput);
@@ -72,73 +72,67 @@ const deployCloud = async () => {
log(`Deployment URL: ${deploymentUrl}`);
log(`Webhook URL: ${notifyUrl}`);
- log("Waiting for deployment to accept env updates...");
+ log('Waiting for deployment to accept env updates...');
await sleep(5000);
- log("Setting env vars on deployment...");
+ log('Setting env vars on deployment...');
const deployEnv = {
...process.env,
- CONVEX_DEPLOY_KEY: requireEnv("CONVEX_DEPLOY_KEY"),
+ CONVEX_DEPLOY_KEY: requireEnv('CONVEX_DEPLOY_KEY'),
};
const setEnv = async (name: string, value: string) => {
const attempts = 3;
for (let attempt = 1; attempt <= attempts; attempt += 1) {
try {
- run(
- "npx",
- ["convex", "env", "set", "--deployment-name", deploymentName, name],
- {
- cwd: projectDir,
- env: deployEnv,
- stdio: runStdio === "inherit" ? "pipe" : runStdio,
- input: value,
- },
- );
+ run('npx', ['convex', 'env', 'set', '--deployment-name', deploymentName, name], {
+ cwd: projectDir,
+ env: deployEnv,
+ stdio: runStdio === 'inherit' ? 'pipe' : runStdio,
+ input: value,
+ });
return;
} catch (error) {
if (attempt === attempts) {
throw error;
}
- log(
- `Failed to set ${name} (attempt ${attempt}/${attempts}). Retrying...`,
- );
+ log(`Failed to set ${name} (attempt ${attempt}/${attempts}). Retrying...`);
await sleep(5000 * attempt);
}
}
};
- await setEnv("TRANSLOADIT_KEY", requireEnv("TRANSLOADIT_KEY"));
- await setEnv("TRANSLOADIT_SECRET", requireEnv("TRANSLOADIT_SECRET"));
- await setEnv("TRANSLOADIT_NOTIFY_URL", notifyUrl);
+ await setEnv('TRANSLOADIT_KEY', requireEnv('TRANSLOADIT_KEY'));
+ await setEnv('TRANSLOADIT_SECRET', requireEnv('TRANSLOADIT_SECRET'));
+ await setEnv('TRANSLOADIT_NOTIFY_URL', notifyUrl);
let jwtPrivateKey = process.env.JWT_PRIVATE_KEY;
if (!jwtPrivateKey) {
- const { privateKey } = generateKeyPairSync("rsa", {
+ const { privateKey } = generateKeyPairSync('rsa', {
modulusLength: 2048,
- publicKeyEncoding: { type: "spki", format: "pem" },
- privateKeyEncoding: { type: "pkcs8", format: "pem" },
+ publicKeyEncoding: { type: 'spki', format: 'pem' },
+ privateKeyEncoding: { type: 'pkcs8', format: 'pem' },
});
jwtPrivateKey = privateKey;
}
- await setEnv("JWT_PRIVATE_KEY", jwtPrivateKey);
- const jwk = createPublicKey(jwtPrivateKey).export({ format: "jwk" });
+ await setEnv('JWT_PRIVATE_KEY', jwtPrivateKey);
+ const jwk = createPublicKey(jwtPrivateKey).export({ format: 'jwk' });
const jwks = JSON.stringify({
keys: [
{
- use: "sig",
+ use: 'sig',
...jwk,
},
],
});
- await setEnv("JWKS", jwks);
+ await setEnv('JWKS', jwks);
const optionalEnv = [
- "TRANSLOADIT_R2_CREDENTIALS",
- "R2_BUCKET",
- "R2_ACCESS_KEY_ID",
- "R2_SECRET_ACCESS_KEY",
- "R2_ACCOUNT_ID",
- "R2_HOST",
- "R2_PUBLIC_URL",
+ 'TRANSLOADIT_R2_CREDENTIALS',
+ 'R2_BUCKET',
+ 'R2_ACCESS_KEY_ID',
+ 'R2_SECRET_ACCESS_KEY',
+ 'R2_ACCOUNT_ID',
+ 'R2_HOST',
+ 'R2_PUBLIC_URL',
];
for (const name of optionalEnv) {
diff --git a/scripts/deploy-demo.ts b/scripts/deploy-demo.ts
index 3395724..b74a6b4 100644
--- a/scripts/deploy-demo.ts
+++ b/scripts/deploy-demo.ts
@@ -1,12 +1,12 @@
-import { createPublicKey, generateKeyPairSync } from "node:crypto";
-import { join, resolve } from "node:path";
-import { fileURLToPath } from "node:url";
-import { loadEnv } from "./env.ts";
-import { parseDeployOutput, requireEnv, run } from "./qa/run.ts";
+import { createPublicKey, generateKeyPairSync } from 'node:crypto';
+import { join, resolve } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { loadEnv } from './env.ts';
+import { parseDeployOutput, requireEnv, run } from './qa/run.ts';
loadEnv();
-const ciOutput = process.env.CI_OUTPUT === "1";
+const ciOutput = process.env.CI_OUTPUT === '1';
const log = (...args: Parameters) => {
if (ciOutput) {
console.error(...args);
@@ -14,11 +14,11 @@ const log = (...args: Parameters) => {
console.log(...args);
}
};
-const runStdio = ciOutput ? "pipe" : "inherit";
+const runStdio = ciOutput ? 'pipe' : 'inherit';
-const rootDir = resolve(fileURLToPath(new URL("..", import.meta.url)));
-const exampleDir = join(rootDir, "example");
-const binDir = join(rootDir, "node_modules", ".bin");
+const rootDir = resolve(fileURLToPath(new URL('..', import.meta.url)));
+const exampleDir = join(rootDir, 'example');
+const binDir = join(rootDir, 'node_modules', '.bin');
const resolveDeployment = () => {
if (process.env.CONVEX_DEPLOYMENT) {
@@ -36,31 +36,27 @@ const sleep = (ms: number) =>
});
const deployDemo = async () => {
- requireEnv("TRANSLOADIT_KEY");
- requireEnv("TRANSLOADIT_SECRET");
- requireEnv("CONVEX_DEPLOY_KEY");
+ requireEnv('TRANSLOADIT_KEY');
+ requireEnv('TRANSLOADIT_SECRET');
+ requireEnv('CONVEX_DEPLOY_KEY');
const targetDeployment = resolveDeployment();
const baseEnv: NodeJS.ProcessEnv = {
...process.env,
- PATH: `${binDir}:${process.env.PATH ?? ""}`,
- CONVEX_DEPLOY_KEY: requireEnv("CONVEX_DEPLOY_KEY"),
+ PATH: `${binDir}:${process.env.PATH ?? ''}`,
+ CONVEX_DEPLOY_KEY: requireEnv('CONVEX_DEPLOY_KEY'),
...(targetDeployment ? { CONVEX_DEPLOYMENT: targetDeployment } : {}),
};
- log("Building @transloadit/convex...");
- run("yarn", ["build"], { cwd: rootDir, stdio: runStdio });
+ log('Building @transloadit/convex...');
+ run('yarn', ['build'], { cwd: rootDir, stdio: runStdio });
- log("Deploying demo Convex app...");
- const deployOutput = run(
- "npx",
- ["convex", "deploy", "--typecheck", "disable", "--yes"],
- {
- cwd: exampleDir,
- env: baseEnv,
- stdio: "pipe",
- },
- );
+ log('Deploying demo Convex app...');
+ const deployOutput = run('npx', ['convex', 'deploy', '--typecheck', 'disable', '--yes'], {
+ cwd: exampleDir,
+ env: baseEnv,
+ stdio: 'pipe',
+ });
const { deploymentName, deploymentUrl } = parseDeployOutput(deployOutput);
const siteUrl = `https://${deploymentName}.convex.site`;
@@ -68,7 +64,7 @@ const deployDemo = async () => {
log(`Deployment URL: ${deploymentUrl}`);
log(`Webhook URL: ${notifyUrl}`);
- log("Waiting for deployment to accept env updates...");
+ log('Waiting for deployment to accept env updates...');
await sleep(5000);
const deployEnv: NodeJS.ProcessEnv = {
@@ -79,63 +75,57 @@ const deployDemo = async () => {
const attempts = 3;
for (let attempt = 1; attempt <= attempts; attempt += 1) {
try {
- run(
- "npx",
- ["convex", "env", "set", "--deployment-name", deploymentName, name],
- {
- cwd: exampleDir,
- env: deployEnv,
- stdio: runStdio === "inherit" ? "pipe" : runStdio,
- input: value,
- },
- );
+ run('npx', ['convex', 'env', 'set', '--deployment-name', deploymentName, name], {
+ cwd: exampleDir,
+ env: deployEnv,
+ stdio: runStdio === 'inherit' ? 'pipe' : runStdio,
+ input: value,
+ });
return;
} catch (error) {
if (attempt === attempts) {
throw error;
}
- log(
- `Failed to set ${name} (attempt ${attempt}/${attempts}). Retrying...`,
- );
+ log(`Failed to set ${name} (attempt ${attempt}/${attempts}). Retrying...`);
await sleep(5000 * attempt);
}
}
};
- await setEnv("TRANSLOADIT_KEY", requireEnv("TRANSLOADIT_KEY"));
- await setEnv("TRANSLOADIT_SECRET", requireEnv("TRANSLOADIT_SECRET"));
- await setEnv("TRANSLOADIT_NOTIFY_URL", notifyUrl);
+ await setEnv('TRANSLOADIT_KEY', requireEnv('TRANSLOADIT_KEY'));
+ await setEnv('TRANSLOADIT_SECRET', requireEnv('TRANSLOADIT_SECRET'));
+ await setEnv('TRANSLOADIT_NOTIFY_URL', notifyUrl);
let jwtPrivateKey = process.env.JWT_PRIVATE_KEY;
if (!jwtPrivateKey) {
- const { privateKey } = generateKeyPairSync("rsa", {
+ const { privateKey } = generateKeyPairSync('rsa', {
modulusLength: 2048,
- publicKeyEncoding: { type: "spki", format: "pem" },
- privateKeyEncoding: { type: "pkcs8", format: "pem" },
+ publicKeyEncoding: { type: 'spki', format: 'pem' },
+ privateKeyEncoding: { type: 'pkcs8', format: 'pem' },
});
jwtPrivateKey = privateKey;
}
- await setEnv("JWT_PRIVATE_KEY", jwtPrivateKey);
- const jwk = createPublicKey(jwtPrivateKey).export({ format: "jwk" });
+ await setEnv('JWT_PRIVATE_KEY', jwtPrivateKey);
+ const jwk = createPublicKey(jwtPrivateKey).export({ format: 'jwk' });
const jwks = JSON.stringify({
keys: [
{
- use: "sig",
+ use: 'sig',
...jwk,
},
],
});
- await setEnv("JWKS", jwks);
+ await setEnv('JWKS', jwks);
const optionalEnv = [
- "TRANSLOADIT_R2_CREDENTIALS",
- "R2_BUCKET",
- "R2_ACCESS_KEY_ID",
- "R2_SECRET_ACCESS_KEY",
- "R2_ACCOUNT_ID",
- "R2_HOST",
- "R2_PUBLIC_URL",
- "WEDDING_UPLOAD_CODE",
+ 'TRANSLOADIT_R2_CREDENTIALS',
+ 'R2_BUCKET',
+ 'R2_ACCESS_KEY_ID',
+ 'R2_SECRET_ACCESS_KEY',
+ 'R2_ACCOUNT_ID',
+ 'R2_HOST',
+ 'R2_PUBLIC_URL',
+ 'WEDDING_UPLOAD_CODE',
];
for (const name of optionalEnv) {
diff --git a/scripts/emit-package-json.ts b/scripts/emit-package-json.ts
index b7a2fc0..c76bd42 100644
--- a/scripts/emit-package-json.ts
+++ b/scripts/emit-package-json.ts
@@ -1,14 +1,11 @@
-import { mkdirSync, writeFileSync } from "node:fs";
-import { join } from "node:path";
+import { mkdirSync, writeFileSync } from 'node:fs';
+import { join } from 'node:path';
-const distDir = new URL("../dist/", import.meta.url);
+const distDir = new URL('../dist/', import.meta.url);
mkdirSync(distDir, { recursive: true });
const packageJson = {
- type: "module",
+ type: 'module',
};
-writeFileSync(
- join(distDir.pathname, "package.json"),
- `${JSON.stringify(packageJson, null, 2)}\n`,
-);
+writeFileSync(join(distDir.pathname, 'package.json'), `${JSON.stringify(packageJson, null, 2)}\n`);
diff --git a/scripts/ensure-r2-lifecycle.ts b/scripts/ensure-r2-lifecycle.ts
index b4499c7..6bb9736 100644
--- a/scripts/ensure-r2-lifecycle.ts
+++ b/scripts/ensure-r2-lifecycle.ts
@@ -3,8 +3,8 @@ import {
type LifecycleRule,
PutBucketLifecycleConfigurationCommand,
S3Client,
-} from "@aws-sdk/client-s3";
-import { loadEnv } from "./env.ts";
+} from '@aws-sdk/client-s3';
+import { loadEnv } from './env.ts';
loadEnv();
@@ -16,28 +16,26 @@ const requireEnv = (name: string) => {
return value;
};
-const retentionDays = Number(
- process.env.R2_RETENTION_DAYS ? process.env.R2_RETENTION_DAYS : "1",
-);
+const retentionDays = Number(process.env.R2_RETENTION_DAYS ? process.env.R2_RETENTION_DAYS : '1');
if (!Number.isFinite(retentionDays) || retentionDays <= 0) {
- throw new Error("R2_RETENTION_DAYS must be a positive number");
+ throw new Error('R2_RETENTION_DAYS must be a positive number');
}
-const r2Bucket = requireEnv("R2_BUCKET");
-const r2AccessKeyId = requireEnv("R2_ACCESS_KEY_ID");
-const r2SecretAccessKey = requireEnv("R2_SECRET_ACCESS_KEY");
+const r2Bucket = requireEnv('R2_BUCKET');
+const r2AccessKeyId = requireEnv('R2_ACCESS_KEY_ID');
+const r2SecretAccessKey = requireEnv('R2_SECRET_ACCESS_KEY');
const r2Host =
process.env.R2_HOST ||
(process.env.R2_ACCOUNT_ID
? `https://${process.env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`
- : "");
+ : '');
if (!r2Host) {
- throw new Error("Missing R2_HOST or R2_ACCOUNT_ID environment variable");
+ throw new Error('Missing R2_HOST or R2_ACCOUNT_ID environment variable');
}
-const r2Endpoint = r2Host.startsWith("http") ? r2Host : `https://${r2Host}`;
+const r2Endpoint = r2Host.startsWith('http') ? r2Host : `https://${r2Host}`;
const client = new S3Client({
- region: "auto",
+ region: 'auto',
endpoint: r2Endpoint,
credentials: {
accessKeyId: r2AccessKeyId,
@@ -45,13 +43,13 @@ const client = new S3Client({
},
});
-const desiredRuleId = "expire-demo-objects";
+const desiredRuleId = 'expire-demo-objects';
const buildRules = (): LifecycleRule[] => [
{
ID: desiredRuleId,
- Status: "Enabled",
- Filter: { Prefix: "" },
+ Status: 'Enabled',
+ Filter: { Prefix: '' },
Expiration: { Days: retentionDays },
},
];
@@ -66,8 +64,8 @@ const run = async () => {
);
currentRules = current.Rules ?? [];
} catch (error) {
- const message = error instanceof Error ? error.message : "";
- if (!message.includes("NoSuchLifecycleConfiguration")) {
+ const message = error instanceof Error ? error.message : '';
+ if (!message.includes('NoSuchLifecycleConfiguration')) {
throw error;
}
}
diff --git a/scripts/ensure-template.ts b/scripts/ensure-template.ts
index 0f9b8f7..d0d80cd 100644
--- a/scripts/ensure-template.ts
+++ b/scripts/ensure-template.ts
@@ -1,12 +1,12 @@
-import { spawnSync } from "node:child_process";
-import { existsSync } from "node:fs";
-import { resolve } from "node:path";
-import { loadEnv } from "./env.ts";
+import { spawnSync } from 'node:child_process';
+import { existsSync } from 'node:fs';
+import { resolve } from 'node:path';
+import { loadEnv } from './env.ts';
loadEnv();
-const templateName = process.env.TRANSLOADIT_TEMPLATE_NAME ?? "convex-demo";
-const templateFile = resolve("templates/convex-demo.json");
+const templateName = process.env.TRANSLOADIT_TEMPLATE_NAME ?? 'convex-demo';
+const templateFile = resolve('templates/convex-demo.json');
if (!existsSync(templateFile)) {
throw new Error(`Template file not found: ${templateFile}`);
@@ -15,17 +15,17 @@ if (!existsSync(templateFile)) {
const env = { ...process.env };
if (!env.TRANSLOADIT_KEY || !env.TRANSLOADIT_SECRET) {
- throw new Error("Missing TRANSLOADIT_KEY/TRANSLOADIT_SECRET");
+ throw new Error('Missing TRANSLOADIT_KEY/TRANSLOADIT_SECRET');
}
function run(args: string[]) {
- const result = spawnSync("npx", ["--yes", "transloadit", ...args], {
+ const result = spawnSync('npx', ['--yes', 'transloadit', ...args], {
env,
- encoding: "utf8",
+ encoding: 'utf8',
});
if (result.status !== 0) {
const message = result.stderr?.trim() || result.stdout?.trim();
- throw new Error(message || "Transloadit CLI failed");
+ throw new Error(message || 'Transloadit CLI failed');
}
return result.stdout.trim();
}
@@ -45,7 +45,7 @@ function parseJsonLines(output: string): TemplateRecord[] {
if (Array.isArray(parsed)) {
return parsed as TemplateRecord[];
}
- if (parsed && typeof parsed === "object") {
+ if (parsed && typeof parsed === 'object') {
return [parsed as TemplateRecord];
}
} catch {
@@ -53,51 +53,33 @@ function parseJsonLines(output: string): TemplateRecord[] {
}
return trimmed
- .split("\n")
+ .split('\n')
.map((line) => line.trim())
.filter(Boolean)
.map((line) => JSON.parse(line) as TemplateRecord);
}
-const listOutput = run(["templates", "list", "-j", "--fields", "id,name"]);
+const listOutput = run(['templates', 'list', '-j', '--fields', 'id,name']);
const templates = parseJsonLines(listOutput);
const existing = templates.find((template) => template.name === templateName);
-let templateId = "";
+let templateId = '';
if (existing?.id) {
- const modifyOutput = run([
- "templates",
- "modify",
- "-j",
- existing.id,
- templateFile,
- ]);
+ const modifyOutput = run(['templates', 'modify', '-j', existing.id, templateFile]);
const modifyPayloads = parseJsonLines(modifyOutput);
const modifyPayload =
- modifyPayloads.length > 0
- ? modifyPayloads[modifyPayloads.length - 1]
- : undefined;
+ modifyPayloads.length > 0 ? modifyPayloads[modifyPayloads.length - 1] : undefined;
templateId = modifyPayload?.id ?? existing.id;
} else {
- const createOutput = run([
- "templates",
- "create",
- "-j",
- templateName,
- templateFile,
- ]);
+ const createOutput = run(['templates', 'create', '-j', templateName, templateFile]);
const createPayloads = parseJsonLines(createOutput);
const createPayload =
- createPayloads.length > 0
- ? createPayloads[createPayloads.length - 1]
- : undefined;
- templateId = createPayload?.id ?? createPayload?.template_id ?? "";
+ createPayloads.length > 0 ? createPayloads[createPayloads.length - 1] : undefined;
+ templateId = createPayload?.id ?? createPayload?.template_id ?? '';
}
if (!templateId) {
- throw new Error(
- "Unable to determine template id from Transloadit CLI output",
- );
+ throw new Error('Unable to determine template id from Transloadit CLI output');
}
console.log(
diff --git a/scripts/env.ts b/scripts/env.ts
index e01eca2..b8bbddf 100644
--- a/scripts/env.ts
+++ b/scripts/env.ts
@@ -1,4 +1,4 @@
-import { config } from "dotenv";
+import { config } from 'dotenv';
export const loadEnv = () => {
config({ path: process.env.DOTENV_CONFIG_PATH, quiet: true });
diff --git a/scripts/generate-demo-media.ts b/scripts/generate-demo-media.ts
index 8a9921e..1a30c1a 100644
--- a/scripts/generate-demo-media.ts
+++ b/scripts/generate-demo-media.ts
@@ -1,21 +1,21 @@
-import fs from "node:fs/promises";
-import path from "node:path";
-import { parseArgs } from "node:util";
-import { fal, type QueueStatus } from "@fal-ai/client";
-import { loadEnv } from "./env.ts";
+import fs from 'node:fs/promises';
+import path from 'node:path';
+import { parseArgs } from 'node:util';
+import { fal, type QueueStatus } from '@fal-ai/client';
+import { loadEnv } from './env.ts';
loadEnv();
const { values } = parseArgs({
options: {
- out: { type: "string", default: "test/e2e/fixtures" },
- force: { type: "boolean", default: false },
+ out: { type: 'string', default: 'test/e2e/fixtures' },
+ force: { type: 'boolean', default: false },
},
strict: true,
allowPositionals: false,
});
-const outputDir = path.resolve(values.out ?? "test/e2e/fixtures");
+const outputDir = path.resolve(values.out ?? 'test/e2e/fixtures');
const force = Boolean(values.force);
const requireEnv = (name: string) => {
@@ -26,47 +26,47 @@ const requireEnv = (name: string) => {
return value;
};
-const imageModel = process.env.DEMO_IMAGE_MODEL ?? "gemini-3-pro-image-preview";
-const imageAspectRatio = process.env.DEMO_IMAGE_ASPECT ?? "4:3";
-const googleKey = requireEnv("GOOGLE_GENERATIVE_AI_API_KEY");
-const falKey = requireEnv("FAL_KEY");
+const imageModel = process.env.DEMO_IMAGE_MODEL ?? 'gemini-3-pro-image-preview';
+const imageAspectRatio = process.env.DEMO_IMAGE_ASPECT ?? '4:3';
+const googleKey = requireEnv('GOOGLE_GENERATIVE_AI_API_KEY');
+const falKey = requireEnv('FAL_KEY');
fal.config({ credentials: falKey });
const negativePrompt =
- "blurry, distorted, deformed, extra limbs, text, watermark, logo, low quality, harsh shadows";
+ 'blurry, distorted, deformed, extra limbs, text, watermark, logo, low quality, harsh shadows';
const imagePrompts = [
{
- file: "wedding-photo-01.png",
+ file: 'wedding-photo-01.png',
prompt:
- "A candid wedding photo of a joyful couple walking down the aisle, warm golden light, elegant floral arch, guests softly blurred in background, realistic, magazine quality, natural skin tones, clean composition, no text or logos.",
+ 'A candid wedding photo of a joyful couple walking down the aisle, warm golden light, elegant floral arch, guests softly blurred in background, realistic, magazine quality, natural skin tones, clean composition, no text or logos.',
},
{
- file: "wedding-photo-02.png",
+ file: 'wedding-photo-02.png',
prompt:
- "A lively wedding reception toast with friends clinking glasses, soft bokeh lights, modern venue decor, warm and inviting atmosphere, realistic photo, crisp focus, no text or logos.",
+ 'A lively wedding reception toast with friends clinking glasses, soft bokeh lights, modern venue decor, warm and inviting atmosphere, realistic photo, crisp focus, no text or logos.',
},
];
const videoSpec = {
- file: "wedding-video-01.mp4",
+ file: 'wedding-video-01.mp4',
prompt:
- "A couple slow dancing at their wedding reception, gentle swaying, warm string lights, cinematic but realistic, smooth motion, shallow depth of field.",
- duration: "5",
- sourceImage: "wedding-photo-01.png",
+ 'A couple slow dancing at their wedding reception, gentle swaying, warm string lights, cinematic but realistic, smooth motion, shallow depth of field.',
+ duration: '5',
+ sourceImage: 'wedding-photo-01.png',
};
const generateImage = async (prompt: string): Promise => {
const response = await fetch(
`https://generativelanguage.googleapis.com/v1beta/models/${imageModel}:generateContent?key=${googleKey}`,
{
- method: "POST",
- headers: { "content-type": "application/json" },
+ method: 'POST',
+ headers: { 'content-type': 'application/json' },
body: JSON.stringify({
- contents: [{ role: "user", parts: [{ text: prompt }] }],
+ contents: [{ role: 'user', parts: [{ text: prompt }] }],
generationConfig: {
- responseModalities: ["TEXT", "IMAGE"],
+ responseModalities: ['TEXT', 'IMAGE'],
imageConfig: { aspectRatio: imageAspectRatio },
},
}),
@@ -85,13 +85,11 @@ const generateImage = async (prompt: string): Promise => {
};
}>;
};
- const imagePart = data.candidates?.[0]?.content?.parts?.find(
- (part) => part.inlineData?.data,
- );
+ const imagePart = data.candidates?.[0]?.content?.parts?.find((part) => part.inlineData?.data);
if (!imagePart?.inlineData?.data) {
- throw new Error("No image data returned by model.");
+ throw new Error('No image data returned by model.');
}
- return Buffer.from(imagePart.inlineData.data, "base64");
+ return Buffer.from(imagePart.inlineData.data, 'base64');
};
const ensureDir = async (dir: string) => {
@@ -129,34 +127,31 @@ const generateVideo = async () => {
const framePath = path.join(outputDir, videoSpec.sourceImage);
const frameBuffer = await fs.readFile(framePath);
- const file = new File([frameBuffer], "frame.png", { type: "image/png" });
+ const file = new File([frameBuffer], 'frame.png', { type: 'image/png' });
- console.log("→ Uploading anchor frame for Kling");
+ console.log('→ Uploading anchor frame for Kling');
const imageUrl = await fal.storage.upload(file);
- console.log("→ Generating Kling 2.6 video");
- const result = await fal.subscribe(
- "fal-ai/kling-video/v2.6/pro/image-to-video",
- {
- input: {
- prompt: videoSpec.prompt,
- start_image_url: imageUrl,
- duration: videoSpec.duration,
- generate_audio: false,
- negative_prompt: negativePrompt,
- },
- logs: false,
- onQueueUpdate: (update: QueueStatus) => {
- if (update.status === "IN_PROGRESS") {
- process.stdout.write(" Status: processing...\r");
- }
- },
+ console.log('→ Generating Kling 2.6 video');
+ const result = await fal.subscribe('fal-ai/kling-video/v2.6/pro/image-to-video', {
+ input: {
+ prompt: videoSpec.prompt,
+ start_image_url: imageUrl,
+ duration: videoSpec.duration,
+ generate_audio: false,
+ negative_prompt: negativePrompt,
},
- );
+ logs: false,
+ onQueueUpdate: (update: QueueStatus) => {
+ if (update.status === 'IN_PROGRESS') {
+ process.stdout.write(' Status: processing...\r');
+ }
+ },
+ });
const videoUrl = result.data?.video?.url;
if (!videoUrl) {
- throw new Error("No video URL returned by Kling");
+ throw new Error('No video URL returned by Kling');
}
const response = await fetch(videoUrl);
@@ -173,7 +168,7 @@ const run = async () => {
await ensureImage(file, prompt);
}
await generateVideo();
- console.log("✓ Demo media generated");
+ console.log('✓ Demo media generated');
};
run().catch((error) => {
diff --git a/scripts/qa/app-template.ts b/scripts/qa/app-template.ts
index 0d3a693..9ba7940 100644
--- a/scripts/qa/app-template.ts
+++ b/scripts/qa/app-template.ts
@@ -1,516 +1,510 @@
-import { mkdir, readFile, writeFile } from "node:fs/promises";
-import { join, resolve } from "node:path";
-import { fileURLToPath } from "node:url";
+import { mkdir, readFile, writeFile } from 'node:fs/promises';
+import { join, resolve } from 'node:path';
+import { fileURLToPath } from 'node:url';
type WriteAppFilesOptions = {
projectDir: string;
tgzPath: string;
};
-export const writeAppFiles = async ({
- projectDir,
- tgzPath,
-}: WriteAppFilesOptions) => {
- const convexDir = join(projectDir, "convex");
- const libDir = join(projectDir, "lib");
- const repoRoot = resolve(fileURLToPath(new URL("../..", import.meta.url)));
+export const writeAppFiles = async ({ projectDir, tgzPath }: WriteAppFilesOptions) => {
+ const convexDir = join(projectDir, 'convex');
+ const libDir = join(projectDir, 'lib');
+ const repoRoot = resolve(fileURLToPath(new URL('../..', import.meta.url)));
await mkdir(convexDir, { recursive: true });
await mkdir(libDir, { recursive: true });
- const r2Source = await readFile(
- join(repoRoot, "example", "lib", "r2.ts"),
- "utf8",
- );
+ const r2Source = await readFile(join(repoRoot, 'example', 'lib', 'r2.ts'), 'utf8');
const stepsSource = await readFile(
- join(repoRoot, "example", "lib", "transloadit-steps.ts"),
- "utf8",
+ join(repoRoot, 'example', 'lib', 'transloadit-steps.ts'),
+ 'utf8',
);
- await writeFile(join(libDir, "r2.ts"), r2Source, "utf8");
- await writeFile(join(libDir, "transloadit-steps.ts"), stepsSource, "utf8");
+ await writeFile(join(libDir, 'r2.ts'), r2Source, 'utf8');
+ await writeFile(join(libDir, 'transloadit-steps.ts'), stepsSource, 'utf8');
await writeFile(
- join(projectDir, "package.json"),
+ join(projectDir, 'package.json'),
JSON.stringify(
{
- name: "transloadit-convex-qa",
+ name: 'transloadit-convex-qa',
private: true,
- type: "module",
+ type: 'module',
dependencies: {
- "@convex-dev/auth": "^0.0.90",
- "@transloadit/convex": `file:${tgzPath}`,
- "@transloadit/zod": "^4.1.9",
- convex: "^1.31.5",
- zod: "^4.3.5",
+ '@convex-dev/auth': '^0.0.90',
+ '@transloadit/convex': `file:${tgzPath}`,
+ '@transloadit/zod': '^4.1.9',
+ convex: '^1.31.5',
+ zod: '^4.3.5',
},
},
null,
2,
),
- "utf8",
+ 'utf8',
);
await writeFile(
- join(convexDir, "convex.config.ts"),
+ join(convexDir, 'convex.config.ts'),
[
'import transloadit from "@transloadit/convex/convex.config";',
'import { defineApp } from "convex/server";',
- "",
- "const app = defineApp();",
- "app.use(transloadit);",
- "",
- "export default app;",
- "",
- ].join("\n"),
- "utf8",
+ '',
+ 'const app = defineApp();',
+ 'app.use(transloadit);',
+ '',
+ 'export default app;',
+ '',
+ ].join('\n'),
+ 'utf8',
);
await writeFile(
- join(convexDir, "schema.ts"),
+ join(convexDir, 'schema.ts'),
[
'import { defineSchema, defineTable } from "convex/server";',
'import { authTables } from "@convex-dev/auth/server";',
'import { v } from "convex/values";',
- "",
- "export default defineSchema({",
- " ...authTables,",
- " uploadLimits: defineTable({",
- " userId: v.string(),",
- " windowStart: v.number(),",
- " count: v.number(),",
- " lastUploadAt: v.number(),",
+ '',
+ 'export default defineSchema({',
+ ' ...authTables,',
+ ' uploadLimits: defineTable({',
+ ' userId: v.string(),',
+ ' windowStart: v.number(),',
+ ' count: v.number(),',
+ ' lastUploadAt: v.number(),',
' }).index("by_user", ["userId"]),',
- "});",
- "",
- ].join("\n"),
- "utf8",
+ '});',
+ '',
+ ].join('\n'),
+ 'utf8',
);
await writeFile(
- join(convexDir, "auth.ts"),
+ join(convexDir, 'auth.ts'),
[
'import { convexAuth } from "@convex-dev/auth/server";',
'import { Anonymous } from "@convex-dev/auth/providers/Anonymous";',
- "",
- "const ensureConvexSiteUrl = () => {",
- " if (process.env.CONVEX_SITE_URL) return;",
- " const convexUrl = process.env.CONVEX_URL;",
- " if (!convexUrl) return;",
+ '',
+ 'const ensureConvexSiteUrl = () => {',
+ ' if (process.env.CONVEX_SITE_URL) return;',
+ ' const convexUrl = process.env.CONVEX_URL;',
+ ' if (!convexUrl) return;',
' if (!convexUrl.includes(".convex.cloud")) return;',
' process.env.CONVEX_SITE_URL = convexUrl.replace(".convex.cloud", ".convex.site");',
- "};",
- "",
- "ensureConvexSiteUrl();",
- "",
- "export const { auth, signIn, signOut, store, isAuthenticated } = convexAuth({",
- " providers: [Anonymous],",
- "});",
- "",
- ].join("\n"),
- "utf8",
+ '};',
+ '',
+ 'ensureConvexSiteUrl();',
+ '',
+ 'export const { auth, signIn, signOut, store, isAuthenticated } = convexAuth({',
+ ' providers: [Anonymous],',
+ '});',
+ '',
+ ].join('\n'),
+ 'utf8',
);
await writeFile(
- join(convexDir, "auth.config.ts"),
+ join(convexDir, 'auth.config.ts'),
[
- "const siteUrl =",
- " process.env.CONVEX_SITE_URL ??",
+ 'const siteUrl =',
+ ' process.env.CONVEX_SITE_URL ??',
' (process.env.CONVEX_URL?.includes(".convex.cloud")',
' ? process.env.CONVEX_URL.replace(".convex.cloud", ".convex.site")',
- " : process.env.CONVEX_URL);",
- "",
- "if (!siteUrl) {",
+ ' : process.env.CONVEX_URL);',
+ '',
+ 'if (!siteUrl) {',
' throw new Error("Missing CONVEX_SITE_URL or CONVEX_URL for auth.config.ts");',
- "}",
- "",
- "export default {",
- " providers: [",
- " {",
- " domain: siteUrl,",
+ '}',
+ '',
+ 'export default {',
+ ' providers: [',
+ ' {',
+ ' domain: siteUrl,',
' applicationID: "convex",',
- " },",
- " ],",
- "};",
- "",
- ].join("\n"),
- "utf8",
+ ' },',
+ ' ],',
+ '};',
+ '',
+ ].join('\n'),
+ 'utf8',
);
await writeFile(
- join(convexDir, "transloadit.ts"),
+ join(convexDir, 'transloadit.ts'),
[
- "import {",
- " vAssemblyResponse,",
- " vAssemblyResultResponse,",
- " vCreateAssemblyArgs,",
+ 'import {',
+ ' vAssemblyResponse,',
+ ' vAssemblyResultResponse,',
+ ' vCreateAssemblyArgs,',
'} from "@transloadit/convex";',
'import { actionGeneric, componentsGeneric, mutationGeneric, queryGeneric } from "convex/server";',
'import { v } from "convex/values";',
- "",
- "const components = componentsGeneric();",
- "",
- "const readConfig = () => {",
- " const authKey = process.env.TRANSLOADIT_KEY;",
- " if (!authKey) {",
+ '',
+ 'const components = componentsGeneric();',
+ '',
+ 'const readConfig = () => {',
+ ' const authKey = process.env.TRANSLOADIT_KEY;',
+ ' if (!authKey) {',
' throw new Error("Missing TRANSLOADIT_KEY environment variable");',
- " }",
- " const authSecret = process.env.TRANSLOADIT_SECRET;",
- " if (!authSecret) {",
+ ' }',
+ ' const authSecret = process.env.TRANSLOADIT_SECRET;',
+ ' if (!authSecret) {',
' throw new Error("Missing TRANSLOADIT_SECRET environment variable");',
- " }",
- " return { authKey, authSecret };",
- "};",
- "",
- "export const createAssembly = actionGeneric({",
- " args: vCreateAssemblyArgs,",
- " returns: v.object({",
- " assemblyId: v.string(),",
- " data: v.any(),",
- " }),",
- " handler: async (ctx, args) => {",
- " return ctx.runAction(components.transloadit.lib.createAssembly, {",
- " ...args,",
- " config: readConfig(),",
- " });",
- " },",
- "});",
- "",
- "export const handleWebhook = actionGeneric({",
- " args: {",
- " payload: v.any(),",
- " rawBody: v.optional(v.string()),",
- " signature: v.optional(v.string()),",
- " verifySignature: v.optional(v.boolean()),",
- " },",
- " returns: v.object({",
- " assemblyId: v.string(),",
- " resultCount: v.number(),",
- " }),",
- " handler: async (ctx, args) => {",
- " const { authSecret } = readConfig();",
- " return ctx.runAction(components.transloadit.lib.handleWebhook, {",
- " ...args,",
- " config: { authSecret },",
- " });",
- " },",
- "});",
- "",
- "export const queueWebhook = actionGeneric({",
- " args: {",
- " payload: v.any(),",
- " rawBody: v.optional(v.string()),",
- " signature: v.optional(v.string()),",
- " verifySignature: v.optional(v.boolean()),",
- " },",
- " returns: v.object({",
- " assemblyId: v.string(),",
- " queued: v.boolean(),",
- " }),",
- " handler: async (ctx, args) => {",
- " const { authSecret } = readConfig();",
- " return ctx.runAction(components.transloadit.lib.queueWebhook, {",
- " ...args,",
- " config: { authSecret },",
- " });",
- " },",
- "});",
- "",
- "export const refreshAssembly = actionGeneric({",
- " args: { assemblyId: v.string() },",
- " returns: v.object({",
- " assemblyId: v.string(),",
- " resultCount: v.number(),",
- " ok: v.optional(v.string()),",
- " status: v.optional(v.string()),",
- " }),",
- " handler: async (ctx, args) => {",
- " return ctx.runAction(components.transloadit.lib.refreshAssembly, {",
- " ...args,",
- " config: readConfig(),",
- " });",
- " },",
- "});",
- "",
- "export const getAssemblyStatus = queryGeneric({",
- " args: { assemblyId: v.string() },",
- " returns: v.union(vAssemblyResponse, v.null()),",
- " handler: async (ctx, args) => {",
- " return ctx.runQuery(components.transloadit.lib.getAssemblyStatus, args);",
- " },",
- "});",
- "",
- "export const listAssemblies = queryGeneric({",
- " args: {",
- " status: v.optional(v.string()),",
- " userId: v.optional(v.string()),",
- " limit: v.optional(v.number()),",
- " },",
- " returns: v.array(vAssemblyResponse),",
- " handler: async (ctx, args) => {",
- " return ctx.runQuery(components.transloadit.lib.listAssemblies, args);",
- " },",
- "});",
- "",
- "export const listResults = queryGeneric({",
- " args: {",
- " assemblyId: v.string(),",
- " stepName: v.optional(v.string()),",
- " limit: v.optional(v.number()),",
- " },",
- " returns: v.array(vAssemblyResultResponse),",
- " handler: async (ctx, args) => {",
- " return ctx.runQuery(components.transloadit.lib.listResults, args);",
- " },",
- "});",
- "",
- "export const listAlbumResults = queryGeneric({",
- " args: {",
- " album: v.string(),",
- " limit: v.optional(v.number()),",
- " },",
- " returns: v.array(vAssemblyResultResponse),",
- " handler: async (ctx, args) => {",
- " return ctx.runQuery(components.transloadit.lib.listAlbumResults, args);",
- " },",
- "});",
- "",
- "export const storeAssemblyMetadata = mutationGeneric({",
- " args: {",
- " assemblyId: v.string(),",
- " userId: v.optional(v.string()),",
- " fields: v.optional(v.record(v.string(), v.any())),",
- " },",
- " returns: v.union(vAssemblyResponse, v.null()),",
- " handler: async (ctx, args) => {",
- " return ctx.runMutation(components.transloadit.lib.storeAssemblyMetadata, args);",
- " },",
- "});",
- "",
- ].join("\n"),
- "utf8",
+ ' }',
+ ' return { authKey, authSecret };',
+ '};',
+ '',
+ 'export const createAssembly = actionGeneric({',
+ ' args: vCreateAssemblyArgs,',
+ ' returns: v.object({',
+ ' assemblyId: v.string(),',
+ ' data: v.any(),',
+ ' }),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runAction(components.transloadit.lib.createAssembly, {',
+ ' ...args,',
+ ' config: readConfig(),',
+ ' });',
+ ' },',
+ '});',
+ '',
+ 'export const handleWebhook = actionGeneric({',
+ ' args: {',
+ ' payload: v.any(),',
+ ' rawBody: v.optional(v.string()),',
+ ' signature: v.optional(v.string()),',
+ ' verifySignature: v.optional(v.boolean()),',
+ ' },',
+ ' returns: v.object({',
+ ' assemblyId: v.string(),',
+ ' resultCount: v.number(),',
+ ' }),',
+ ' handler: async (ctx, args) => {',
+ ' const { authSecret } = readConfig();',
+ ' return ctx.runAction(components.transloadit.lib.handleWebhook, {',
+ ' ...args,',
+ ' config: { authSecret },',
+ ' });',
+ ' },',
+ '});',
+ '',
+ 'export const queueWebhook = actionGeneric({',
+ ' args: {',
+ ' payload: v.any(),',
+ ' rawBody: v.optional(v.string()),',
+ ' signature: v.optional(v.string()),',
+ ' verifySignature: v.optional(v.boolean()),',
+ ' },',
+ ' returns: v.object({',
+ ' assemblyId: v.string(),',
+ ' queued: v.boolean(),',
+ ' }),',
+ ' handler: async (ctx, args) => {',
+ ' const { authSecret } = readConfig();',
+ ' return ctx.runAction(components.transloadit.lib.queueWebhook, {',
+ ' ...args,',
+ ' config: { authSecret },',
+ ' });',
+ ' },',
+ '});',
+ '',
+ 'export const refreshAssembly = actionGeneric({',
+ ' args: { assemblyId: v.string() },',
+ ' returns: v.object({',
+ ' assemblyId: v.string(),',
+ ' resultCount: v.number(),',
+ ' ok: v.optional(v.string()),',
+ ' status: v.optional(v.string()),',
+ ' }),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runAction(components.transloadit.lib.refreshAssembly, {',
+ ' ...args,',
+ ' config: readConfig(),',
+ ' });',
+ ' },',
+ '});',
+ '',
+ 'export const getAssemblyStatus = queryGeneric({',
+ ' args: { assemblyId: v.string() },',
+ ' returns: v.union(vAssemblyResponse, v.null()),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runQuery(components.transloadit.lib.getAssemblyStatus, args);',
+ ' },',
+ '});',
+ '',
+ 'export const listAssemblies = queryGeneric({',
+ ' args: {',
+ ' status: v.optional(v.string()),',
+ ' userId: v.optional(v.string()),',
+ ' limit: v.optional(v.number()),',
+ ' },',
+ ' returns: v.array(vAssemblyResponse),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runQuery(components.transloadit.lib.listAssemblies, args);',
+ ' },',
+ '});',
+ '',
+ 'export const listResults = queryGeneric({',
+ ' args: {',
+ ' assemblyId: v.string(),',
+ ' stepName: v.optional(v.string()),',
+ ' limit: v.optional(v.number()),',
+ ' },',
+ ' returns: v.array(vAssemblyResultResponse),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runQuery(components.transloadit.lib.listResults, args);',
+ ' },',
+ '});',
+ '',
+ 'export const listAlbumResults = queryGeneric({',
+ ' args: {',
+ ' album: v.string(),',
+ ' limit: v.optional(v.number()),',
+ ' },',
+ ' returns: v.array(vAssemblyResultResponse),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runQuery(components.transloadit.lib.listAlbumResults, args);',
+ ' },',
+ '});',
+ '',
+ 'export const storeAssemblyMetadata = mutationGeneric({',
+ ' args: {',
+ ' assemblyId: v.string(),',
+ ' userId: v.optional(v.string()),',
+ ' fields: v.optional(v.record(v.string(), v.any())),',
+ ' },',
+ ' returns: v.union(vAssemblyResponse, v.null()),',
+ ' handler: async (ctx, args) => {',
+ ' return ctx.runMutation(components.transloadit.lib.storeAssemblyMetadata, args);',
+ ' },',
+ '});',
+ '',
+ ].join('\n'),
+ 'utf8',
);
await writeFile(
- join(convexDir, "wedding.ts"),
+ join(convexDir, 'wedding.ts'),
[
'import { vAssemblyOptions } from "@transloadit/convex";',
'import { v } from "convex/values";',
'import { action, internalMutation } from "./_generated/server";',
'import { components, internal } from "./_generated/api";',
'import { buildWeddingSteps } from "../lib/transloadit-steps";',
- "",
- "const MAX_UPLOADS_PER_HOUR = 6;",
- "const WINDOW_MS = 60 * 60 * 1000;",
- "",
- "const requireEnv = (name: string) => {",
- " const value = process.env[name];",
- " if (!value) {",
+ '',
+ 'const MAX_UPLOADS_PER_HOUR = 6;',
+ 'const WINDOW_MS = 60 * 60 * 1000;',
+ '',
+ 'const requireEnv = (name: string) => {',
+ ' const value = process.env[name];',
+ ' if (!value) {',
' throw new Error("Missing " + name + " environment variable");',
- " }",
- " return value;",
- "};",
- "",
- "export const checkUploadLimit = internalMutation({",
- " args: { userId: v.string() },",
- " returns: v.null(),",
- " handler: async (ctx, args) => {",
- " const now = Date.now();",
- " const existing = await ctx.db",
+ ' }',
+ ' return value;',
+ '};',
+ '',
+ 'export const checkUploadLimit = internalMutation({',
+ ' args: { userId: v.string() },',
+ ' returns: v.null(),',
+ ' handler: async (ctx, args) => {',
+ ' const now = Date.now();',
+ ' const existing = await ctx.db',
' .query("uploadLimits")',
' .withIndex("by_user", (q) => q.eq("userId", args.userId))',
- " .first();",
- " if (!existing) {",
+ ' .first();',
+ ' if (!existing) {',
' await ctx.db.insert("uploadLimits", {',
- " userId: args.userId,",
- " windowStart: now,",
- " count: 1,",
- " lastUploadAt: now,",
- " });",
- " return null;",
- " }",
- " if (now - existing.windowStart > WINDOW_MS) {",
- " await ctx.db.patch(existing._id, {",
- " windowStart: now,",
- " count: 1,",
- " lastUploadAt: now,",
- " });",
- " return null;",
- " }",
- " if (existing.count >= MAX_UPLOADS_PER_HOUR) {",
+ ' userId: args.userId,',
+ ' windowStart: now,',
+ ' count: 1,',
+ ' lastUploadAt: now,',
+ ' });',
+ ' return null;',
+ ' }',
+ ' if (now - existing.windowStart > WINDOW_MS) {',
+ ' await ctx.db.patch(existing._id, {',
+ ' windowStart: now,',
+ ' count: 1,',
+ ' lastUploadAt: now,',
+ ' });',
+ ' return null;',
+ ' }',
+ ' if (existing.count >= MAX_UPLOADS_PER_HOUR) {',
' throw new Error("Upload limit reached. Try again later.");',
- " }",
- " await ctx.db.patch(existing._id, {",
- " count: existing.count + 1,",
- " lastUploadAt: now,",
- " });",
- " return null;",
- " },",
- "});",
- "",
- "export const createWeddingAssemblyOptions = action({",
- " args: {",
- " fileCount: v.number(),",
- " guestName: v.optional(v.string()),",
- " uploadCode: v.optional(v.string()),",
- " },",
- " returns: v.object({",
- " assemblyOptions: vAssemblyOptions,",
- " params: v.any(),",
- " }),",
- " handler: async (ctx, args) => {",
- " const identity = await ctx.auth.getUserIdentity();",
- " if (!identity) {",
+ ' }',
+ ' await ctx.db.patch(existing._id, {',
+ ' count: existing.count + 1,',
+ ' lastUploadAt: now,',
+ ' });',
+ ' return null;',
+ ' },',
+ '});',
+ '',
+ 'export const createWeddingAssemblyOptions = action({',
+ ' args: {',
+ ' fileCount: v.number(),',
+ ' guestName: v.optional(v.string()),',
+ ' uploadCode: v.optional(v.string()),',
+ ' },',
+ ' returns: v.object({',
+ ' assemblyOptions: vAssemblyOptions,',
+ ' params: v.any(),',
+ ' }),',
+ ' handler: async (ctx, args) => {',
+ ' const identity = await ctx.auth.getUserIdentity();',
+ ' if (!identity) {',
' throw new Error("Authentication required.");',
- " }",
- " await ctx.runMutation(internal.wedding.checkUploadLimit, { userId: identity.subject });",
- "",
- " const requiredCode = process.env.WEDDING_UPLOAD_CODE;",
- " if (requiredCode) {",
- " const provided = args.uploadCode?.trim();",
- " if (!provided || provided !== requiredCode) {",
+ ' }',
+ ' await ctx.runMutation(internal.wedding.checkUploadLimit, { userId: identity.subject });',
+ '',
+ ' const requiredCode = process.env.WEDDING_UPLOAD_CODE;',
+ ' if (requiredCode) {',
+ ' const provided = args.uploadCode?.trim();',
+ ' if (!provided || provided !== requiredCode) {',
' throw new Error("Upload code required.");',
- " }",
- " }",
- "",
- " const steps = buildWeddingSteps();",
+ ' }',
+ ' }',
+ '',
+ ' const steps = buildWeddingSteps();',
' const notifyUrl = requireEnv("TRANSLOADIT_NOTIFY_URL");',
- " const fileCount = Math.max(1, args.fileCount);",
- " const assemblyArgs = {",
- " steps,",
- " notifyUrl,",
- " numExpectedUploadFiles: fileCount,",
- " fields: {",
+ ' const fileCount = Math.max(1, args.fileCount);',
+ ' const assemblyArgs = {',
+ ' steps,',
+ ' notifyUrl,',
+ ' numExpectedUploadFiles: fileCount,',
+ ' fields: {',
' guestName: args.guestName ?? "Guest",',
' album: "wedding-gallery",',
- " fileCount,",
- " userId: identity.subject,",
- " },",
- " userId: identity.subject,",
- " };",
- " const assemblyOptions = await ctx.runAction(",
- " components.transloadit.lib.createAssemblyOptions,",
- " {",
- " ...assemblyArgs,",
- " config: {",
+ ' fileCount,',
+ ' userId: identity.subject,',
+ ' },',
+ ' userId: identity.subject,',
+ ' };',
+ ' const assemblyOptions = await ctx.runAction(',
+ ' components.transloadit.lib.createAssemblyOptions,',
+ ' {',
+ ' ...assemblyArgs,',
+ ' config: {',
' authKey: requireEnv("TRANSLOADIT_KEY"),',
' authSecret: requireEnv("TRANSLOADIT_SECRET"),',
- " },",
- " },",
- " );",
- " const parsedParams = safeParseParams(assemblyOptions.params);",
- " const params = redactSecrets(parsedParams ?? assemblyArgs);",
- " return { assemblyOptions, params };",
- " },",
- "});",
- "",
- "const safeParseParams = (value: string) => {",
- " try {",
- " return JSON.parse(value);",
- " } catch (error) {",
+ ' },',
+ ' },',
+ ' );',
+ ' const parsedParams = safeParseParams(assemblyOptions.params);',
+ ' const params = redactSecrets(parsedParams ?? assemblyArgs);',
+ ' return { assemblyOptions, params };',
+ ' },',
+ '});',
+ '',
+ 'const safeParseParams = (value: string) => {',
+ ' try {',
+ ' return JSON.parse(value);',
+ ' } catch (error) {',
' console.warn("Failed to parse Transloadit params", error);',
- " return null;",
- " }",
- "};",
- "",
- "const secretKeys = new Set([",
+ ' return null;',
+ ' }',
+ '};',
+ '',
+ 'const secretKeys = new Set([',
' "secret",',
' "key",',
' "credentials",',
' "authSecret",',
' "authKey",',
- "]);",
- "",
- "const redactSecrets = (value: unknown): unknown => {",
- " if (Array.isArray(value)) {",
- " return value.map((item) => redactSecrets(item));",
- " }",
+ ']);',
+ '',
+ 'const redactSecrets = (value: unknown): unknown => {',
+ ' if (Array.isArray(value)) {',
+ ' return value.map((item) => redactSecrets(item));',
+ ' }',
' if (value && typeof value === "object") {',
- " const entries = Object.entries(value).map(([key, val]) => {",
- " if (secretKeys.has(key)) {",
+ ' const entries = Object.entries(value).map(([key, val]) => {',
+ ' if (secretKeys.has(key)) {',
' return [key, "***"];',
- " }",
- " return [key, redactSecrets(val)];",
- " });",
- " return Object.fromEntries(entries);",
- " }",
- " return value;",
- "};",
- "",
- ].join("\n"),
- "utf8",
+ ' }',
+ ' return [key, redactSecrets(val)];',
+ ' });',
+ ' return Object.fromEntries(entries);',
+ ' }',
+ ' return value;',
+ '};',
+ '',
+ ].join('\n'),
+ 'utf8',
);
await writeFile(
- join(convexDir, "http.ts"),
+ join(convexDir, 'http.ts'),
[
'import { handleWebhookRequest } from "@transloadit/convex";',
'import { httpRouter, httpActionGeneric } from "convex/server";',
'import { api } from "./_generated/api";',
- "",
- "const http = httpRouter();",
- "const httpAction = httpActionGeneric;",
- "",
- "const requireEnv = (name: string) => {",
- " const value = process.env[name];",
- " if (!value) {",
+ '',
+ 'const http = httpRouter();',
+ 'const httpAction = httpActionGeneric;',
+ '',
+ 'const requireEnv = (name: string) => {',
+ ' const value = process.env[name];',
+ ' if (!value) {',
' throw new Error("Missing " + name);',
- " }",
- " return value;",
- "};",
- "",
- "http.route({",
+ ' }',
+ ' return value;',
+ '};',
+ '',
+ 'http.route({',
' path: "/.well-known/openid-configuration",',
' method: "GET",',
- " handler: httpAction(async () => {",
+ ' handler: httpAction(async () => {',
' const siteUrl = requireEnv("CONVEX_SITE_URL");',
' const jwksUrl = new URL(".well-known/jwks.json", siteUrl).toString();',
' const authorizeUrl = new URL("oauth/authorize", siteUrl).toString();',
- " return new Response(",
- " JSON.stringify({",
- " issuer: siteUrl,",
- " jwks_uri: jwksUrl,",
- " authorization_endpoint: authorizeUrl,",
- " }),",
- " {",
- " status: 200,",
- " headers: {",
+ ' return new Response(',
+ ' JSON.stringify({',
+ ' issuer: siteUrl,',
+ ' jwks_uri: jwksUrl,',
+ ' authorization_endpoint: authorizeUrl,',
+ ' }),',
+ ' {',
+ ' status: 200,',
+ ' headers: {',
' "Content-Type": "application/json",',
' "Cache-Control":',
' "public, max-age=15, stale-while-revalidate=15, stale-if-error=86400",',
- " },",
- " },",
- " );",
- " }),",
- "});",
- "",
- "http.route({",
+ ' },',
+ ' },',
+ ' );',
+ ' }),',
+ '});',
+ '',
+ 'http.route({',
' path: "/.well-known/jwks.json",',
' method: "GET",',
- " handler: httpAction(async () => {",
+ ' handler: httpAction(async () => {',
' return new Response(requireEnv("JWKS"), {',
- " status: 200,",
- " headers: {",
+ ' status: 200,',
+ ' headers: {',
' "Content-Type": "application/json",',
' "Cache-Control":',
' "public, max-age=15, stale-while-revalidate=15, stale-if-error=86400",',
- " },",
- " });",
- " }),",
- "});",
- "",
- "http.route({",
+ ' },',
+ ' });',
+ ' }),',
+ '});',
+ '',
+ 'http.route({',
' path: "/transloadit/webhook",',
' method: "POST",',
- " handler: httpAction(async (ctx, request) => {",
- " return handleWebhookRequest(request, {",
+ ' handler: httpAction(async (ctx, request) => {',
+ ' return handleWebhookRequest(request, {',
' mode: "queue",',
- " runAction: (args) => ctx.runAction(api.transloadit.queueWebhook, args),",
- " });",
- " }),",
- "});",
- "",
- "export default http;",
- "",
- ].join("\n"),
- "utf8",
+ ' runAction: (args) => ctx.runAction(api.transloadit.queueWebhook, args),',
+ ' });',
+ ' }),',
+ '});',
+ '',
+ 'export default http;',
+ '',
+ ].join('\n'),
+ 'utf8',
);
};
diff --git a/scripts/qa/run.ts b/scripts/qa/run.ts
index 47b7e52..2562e20 100644
--- a/scripts/qa/run.ts
+++ b/scripts/qa/run.ts
@@ -1,9 +1,9 @@
-import { spawnSync } from "node:child_process";
+import { spawnSync } from 'node:child_process';
export type RunOptions = {
cwd?: string;
env?: NodeJS.ProcessEnv;
- stdio?: "inherit" | "pipe";
+ stdio?: 'inherit' | 'pipe';
input?: string;
};
@@ -15,39 +15,34 @@ export const requireEnv = (name: string) => {
return value;
};
-export const run = (
- command: string,
- args: string[],
- options: RunOptions = {},
-) => {
- const stdio =
- options.input !== undefined ? "pipe" : (options.stdio ?? "inherit");
+export const run = (command: string, args: string[], options: RunOptions = {}) => {
+ const stdio = options.input !== undefined ? 'pipe' : (options.stdio ?? 'inherit');
const result = spawnSync(command, args, {
cwd: options.cwd,
env: options.env,
stdio,
input: options.input,
- encoding: "utf8",
+ encoding: 'utf8',
});
if (result.status !== 0) {
- const output = `${result.stdout ?? ""}${result.stderr ?? ""}`;
- throw new Error(`Command failed: ${command} ${args.join(" ")}\n${output}`);
+ const output = `${result.stdout ?? ''}${result.stderr ?? ''}`;
+ throw new Error(`Command failed: ${command} ${args.join(' ')}\n${output}`);
}
- return `${result.stdout ?? ""}${result.stderr ?? ""}`;
+ return `${result.stdout ?? ''}${result.stderr ?? ''}`;
};
export const parseJson = (output: string): T => {
const trimmed = output.trim();
if (!trimmed) {
- throw new Error("Template preflight returned empty output");
+ throw new Error('Template preflight returned empty output');
}
- if (trimmed.startsWith("{")) {
+ if (trimmed.startsWith('{')) {
return JSON.parse(trimmed) as T;
}
- const first = trimmed.indexOf("{");
- const last = trimmed.lastIndexOf("}");
+ const first = trimmed.indexOf('{');
+ const last = trimmed.lastIndexOf('}');
if (first === -1 || last === -1 || last <= first) {
throw new Error(`Unable to parse JSON from output: ${trimmed}`);
}
diff --git a/scripts/resolve-vercel-preview.ts b/scripts/resolve-vercel-preview.ts
index 017c8b4..e7237bb 100644
--- a/scripts/resolve-vercel-preview.ts
+++ b/scripts/resolve-vercel-preview.ts
@@ -1,36 +1,35 @@
-import { setTimeout as sleep } from "node:timers/promises";
-import { loadEnv } from "./env.ts";
+import { setTimeout as sleep } from 'node:timers/promises';
+import { loadEnv } from './env.ts';
loadEnv();
-const deployHook = process.env.VERCEL_PREVIEW_DEPLOY_HOOK ?? "";
-const vercelProject = process.env.VERCEL_PROJECT_SLUG ?? "";
-const vercelTeam = process.env.VERCEL_TEAM_SLUG ?? "";
-const vercelBypassToken = process.env.VERCEL_PROTECTION_BYPASS ?? "";
-const githubToken = process.env.GITHUB_TOKEN ?? "";
-const githubRepo = process.env.GITHUB_REPOSITORY ?? "";
-const githubSha = process.env.GITHUB_SHA ?? "";
-const githubEventPath = process.env.GITHUB_EVENT_PATH ?? "";
-const githubHeadRef =
- process.env.GITHUB_HEAD_REF ?? process.env.GITHUB_REF_NAME ?? "";
+const deployHook = process.env.VERCEL_PREVIEW_DEPLOY_HOOK ?? '';
+const vercelProject = process.env.VERCEL_PROJECT_SLUG ?? '';
+const vercelTeam = process.env.VERCEL_TEAM_SLUG ?? '';
+const vercelBypassToken = process.env.VERCEL_PROTECTION_BYPASS ?? '';
+const githubToken = process.env.GITHUB_TOKEN ?? '';
+const githubRepo = process.env.GITHUB_REPOSITORY ?? '';
+const githubSha = process.env.GITHUB_SHA ?? '';
+const githubEventPath = process.env.GITHUB_EVENT_PATH ?? '';
+const githubHeadRef = process.env.GITHUB_HEAD_REF ?? process.env.GITHUB_REF_NAME ?? '';
if (!githubToken) {
- throw new Error("Missing GITHUB_TOKEN");
+ throw new Error('Missing GITHUB_TOKEN');
}
if (!githubRepo || !githubSha) {
- throw new Error("Missing GITHUB_REPOSITORY or GITHUB_SHA");
+ throw new Error('Missing GITHUB_REPOSITORY or GITHUB_SHA');
}
-const apiBase = "https://api.github.com";
+const apiBase = 'https://api.github.com';
const headers = {
- accept: "application/vnd.github+json",
+ accept: 'application/vnd.github+json',
authorization: `Bearer ${githubToken}`,
- "user-agent": "convex-e2e-preview",
+ 'user-agent': 'convex-e2e-preview',
};
const triggerPreviewDeploy = async () => {
if (!deployHook) return;
- const response = await fetch(deployHook, { method: "POST" });
+ const response = await fetch(deployHook, { method: 'POST' });
if (!response.ok) {
throw new Error(`Vercel deploy hook failed: ${response.status}`);
}
@@ -42,9 +41,7 @@ const fetchDeploymentUrl = async (): Promise => {
{ headers },
);
if (!deploymentsResponse.ok) {
- throw new Error(
- `Failed to list deployments: ${deploymentsResponse.status}`,
- );
+ throw new Error(`Failed to list deployments: ${deploymentsResponse.status}`);
}
const deployments = (await deploymentsResponse.json()) as Array<{
environment?: string;
@@ -52,10 +49,7 @@ const fetchDeploymentUrl = async (): Promise => {
}>;
for (const deployment of deployments) {
- if (
- deployment.environment &&
- deployment.environment.toLowerCase() !== "preview"
- ) {
+ if (deployment.environment && deployment.environment.toLowerCase() !== 'preview') {
continue;
}
const statusesResponse = await fetch(deployment.statuses_url, { headers });
@@ -66,7 +60,7 @@ const fetchDeploymentUrl = async (): Promise => {
state?: string;
target_url?: string;
}>;
- const success = statuses.find((status) => status.state === "success");
+ const success = statuses.find((status) => status.state === 'success');
if (success?.target_url) return success.target_url;
}
@@ -74,10 +68,9 @@ const fetchDeploymentUrl = async (): Promise => {
};
const fetchCheckRunUrl = async (): Promise => {
- const response = await fetch(
- `${apiBase}/repos/${githubRepo}/commits/${githubSha}/check-runs`,
- { headers },
- );
+ const response = await fetch(`${apiBase}/repos/${githubRepo}/commits/${githubSha}/check-runs`, {
+ headers,
+ });
if (!response.ok) {
return null;
}
@@ -91,10 +84,7 @@ const fetchCheckRunUrl = async (): Promise => {
};
const checks = payload.check_runs ?? [];
const vercelCheck = checks.find(
- (check) =>
- check.app?.slug === "vercel" &&
- check.conclusion === "success" &&
- check.details_url,
+ (check) => check.app?.slug === 'vercel' && check.conclusion === 'success' && check.details_url,
);
return vercelCheck?.details_url ?? null;
};
@@ -102,38 +92,29 @@ const fetchCheckRunUrl = async (): Promise => {
const fetchPreviewUrlFromComments = async (): Promise => {
if (!githubEventPath) return null;
try {
- const eventRaw = await import("node:fs").then((fs) =>
- fs.readFileSync(githubEventPath, "utf8"),
- );
+ const eventRaw = await import('node:fs').then((fs) => fs.readFileSync(githubEventPath, 'utf8'));
const event = JSON.parse(eventRaw) as {
pull_request?: { number?: number };
};
const prNumber = event.pull_request?.number;
if (!prNumber) return null;
- const response = await fetch(
- `${apiBase}/repos/${githubRepo}/issues/${prNumber}/comments`,
- { headers },
- );
+ const response = await fetch(`${apiBase}/repos/${githubRepo}/issues/${prNumber}/comments`, {
+ headers,
+ });
if (!response.ok) return null;
const comments = (await response.json()) as Array<{
user?: { login?: string };
body?: string;
}>;
- const vercelComment = comments.find(
- (comment) => comment.user?.login === "vercel[bot]",
- );
+ const vercelComment = comments.find((comment) => comment.user?.login === 'vercel[bot]');
if (!vercelComment?.body) return null;
const match = vercelComment.body.match(/\[vc\]: #[^:]+:([A-Za-z0-9+/=]+)/);
if (!match?.[1]) return null;
- const payload = JSON.parse(
- Buffer.from(match[1], "base64").toString("utf8"),
- ) as {
+ const payload = JSON.parse(Buffer.from(match[1], 'base64').toString('utf8')) as {
projects?: Array<{ previewUrl?: string }>;
};
- const previewUrl = payload.projects?.find(
- (project) => project.previewUrl,
- )?.previewUrl;
+ const previewUrl = payload.projects?.find((project) => project.previewUrl)?.previewUrl;
return previewUrl ?? null;
} catch {
return null;
@@ -141,7 +122,7 @@ const fetchPreviewUrlFromComments = async (): Promise => {
};
const normalizeUrl = (value: string): string => {
- if (value.startsWith("http://") || value.startsWith("https://")) {
+ if (value.startsWith('http://') || value.startsWith('https://')) {
return value;
}
return `https://${value}`;
@@ -151,27 +132,23 @@ const slugifyBranch = (value: string): string =>
value
.trim()
.toLowerCase()
- .replace(/[^a-z0-9-]/g, "-")
- .replace(/-+/g, "-")
- .replace(/^-|-$/g, "");
+ .replace(/[^a-z0-9-]/g, '-')
+ .replace(/-+/g, '-')
+ .replace(/^-|-$/g, '');
const fallbackUrl =
vercelProject && vercelTeam && githubHeadRef
- ? `https://${vercelProject}-git-${slugifyBranch(
- githubHeadRef,
- )}-${vercelTeam}.vercel.app`
+ ? `https://${vercelProject}-git-${slugifyBranch(githubHeadRef)}-${vercelTeam}.vercel.app`
: null;
const isPreviewReady = async (url: string) => {
try {
const target = new URL(url);
if (vercelBypassToken) {
- target.searchParams.set("__vercel_protection_bypass", vercelBypassToken);
+ target.searchParams.set('__vercel_protection_bypass', vercelBypassToken);
}
const response = await fetch(target, {
- headers: vercelBypassToken
- ? { "x-vercel-protection-bypass": vercelBypassToken }
- : undefined,
+ headers: vercelBypassToken ? { 'x-vercel-protection-bypass': vercelBypassToken } : undefined,
});
if (response.status === 404) return false;
if (response.status >= 500) return false;
@@ -214,4 +191,4 @@ while (Date.now() < deadline) {
await sleep(5000);
}
-throw new Error("Timed out waiting for preview deployment URL");
+throw new Error('Timed out waiting for preview deployment URL');
diff --git a/scripts/slugify-branch.ts b/scripts/slugify-branch.ts
index d591c18..e6b6f83 100644
--- a/scripts/slugify-branch.ts
+++ b/scripts/slugify-branch.ts
@@ -2,17 +2,17 @@ const raw =
process.env.GITHUB_HEAD_REF ??
process.env.GITHUB_REF_NAME ??
process.env.VERCEL_GIT_COMMIT_REF ??
- "";
+ '';
const slug = raw
.trim()
.toLowerCase()
- .replace(/[^a-z0-9-]/g, "-")
- .replace(/-+/g, "-")
- .replace(/^-|-$/g, "");
+ .replace(/[^a-z0-9-]/g, '-')
+ .replace(/-+/g, '-')
+ .replace(/^-|-$/g, '');
if (!slug) {
- throw new Error("Missing branch name to slugify");
+ throw new Error('Missing branch name to slugify');
}
process.stdout.write(slug);
diff --git a/scripts/start-webhook-tunnel.ts b/scripts/start-webhook-tunnel.ts
index 19fe674..c47be97 100644
--- a/scripts/start-webhook-tunnel.ts
+++ b/scripts/start-webhook-tunnel.ts
@@ -1,12 +1,12 @@
-import { spawn } from "node:child_process";
-import { chmodSync, mkdirSync, statSync, writeFileSync } from "node:fs";
-import { join, resolve } from "node:path";
-import { loadEnv } from "./env.ts";
+import { spawn } from 'node:child_process';
+import { chmodSync, mkdirSync, statSync, writeFileSync } from 'node:fs';
+import { join, resolve } from 'node:path';
+import { loadEnv } from './env.ts';
loadEnv();
-const isWindows = process.platform === "win32";
-const binaryName = isWindows ? "cloudflared.exe" : "cloudflared";
+const isWindows = process.platform === 'win32';
+const binaryName = isWindows ? 'cloudflared.exe' : 'cloudflared';
function isExecutable(path: string) {
try {
@@ -20,9 +20,7 @@ function isExecutable(path: string) {
}
function findOnPath(name: string) {
- const pathEntries = (process.env.PATH || "").split(
- process.platform === "win32" ? ";" : ":",
- );
+ const pathEntries = (process.env.PATH || '').split(process.platform === 'win32' ? ';' : ':');
for (const entry of pathEntries) {
if (!entry) continue;
const candidate = join(entry, name);
@@ -37,22 +35,18 @@ function getDownloadName() {
const platform = process.platform;
const arch = process.arch;
- if (platform === "linux") {
- if (arch === "arm64") return "cloudflared-linux-arm64";
- if (arch === "arm") return "cloudflared-linux-arm";
- return "cloudflared-linux-amd64";
+ if (platform === 'linux') {
+ if (arch === 'arm64') return 'cloudflared-linux-arm64';
+ if (arch === 'arm') return 'cloudflared-linux-arm';
+ return 'cloudflared-linux-amd64';
}
- if (platform === "darwin") {
- return arch === "arm64"
- ? "cloudflared-darwin-arm64"
- : "cloudflared-darwin-amd64";
+ if (platform === 'darwin') {
+ return arch === 'arm64' ? 'cloudflared-darwin-arm64' : 'cloudflared-darwin-amd64';
}
- if (platform === "win32") {
- return arch === "arm64"
- ? "cloudflared-windows-arm64.exe"
- : "cloudflared-windows-amd64.exe";
+ if (platform === 'win32') {
+ return arch === 'arm64' ? 'cloudflared-windows-arm64.exe' : 'cloudflared-windows-amd64.exe';
}
throw new Error(`Unsupported platform: ${platform} ${arch}`);
@@ -62,7 +56,7 @@ async function ensureCloudflared() {
const existing = findOnPath(binaryName);
if (existing) return existing;
- const toolsDir = resolve(".tools");
+ const toolsDir = resolve('.tools');
mkdirSync(toolsDir, { recursive: true });
const target = join(toolsDir, binaryName);
@@ -74,9 +68,7 @@ async function ensureCloudflared() {
const url = `https://github.com/cloudflare/cloudflared/releases/latest/download/${assetName}`;
const response = await fetch(url);
if (!response.ok) {
- throw new Error(
- `Failed to download cloudflared: ${response.status} ${response.statusText}`,
- );
+ throw new Error(`Failed to download cloudflared: ${response.status} ${response.statusText}`);
}
const buffer = Buffer.from(await response.arrayBuffer());
writeFileSync(target, buffer);
@@ -87,33 +79,31 @@ async function ensureCloudflared() {
}
const args = process.argv.slice(2);
-const once = args.includes("--once");
-const compact = args.includes("--json");
-const portFlagIndex = args.indexOf("--port");
+const once = args.includes('--once');
+const compact = args.includes('--json');
+const portFlagIndex = args.indexOf('--port');
const port =
- portFlagIndex !== -1 && args[portFlagIndex + 1]
- ? Number(args[portFlagIndex + 1])
- : 3210;
+ portFlagIndex !== -1 && args[portFlagIndex + 1] ? Number(args[portFlagIndex + 1]) : 3210;
if (Number.isNaN(port)) {
- throw new Error("Invalid --port value");
+ throw new Error('Invalid --port value');
}
const cloudflared = await ensureCloudflared();
const tunnel = spawn(
cloudflared,
[
- "tunnel",
- "--url",
+ 'tunnel',
+ '--url',
`http://localhost:${port}`,
- "--protocol",
- "http2",
- "--no-autoupdate",
- "--retries",
- "20",
+ '--protocol',
+ 'http2',
+ '--no-autoupdate',
+ '--retries',
+ '20',
],
{
- stdio: ["ignore", "pipe", "pipe"],
+ stdio: ['ignore', 'pipe', 'pipe'],
},
);
@@ -139,18 +129,18 @@ function handleOutput(chunk: Buffer) {
}
if (tunnel.stdout) {
- tunnel.stdout.on("data", handleOutput);
+ tunnel.stdout.on('data', handleOutput);
}
if (tunnel.stderr) {
- tunnel.stderr.on("data", handleOutput);
+ tunnel.stderr.on('data', handleOutput);
}
-tunnel.on("error", (error) => {
+tunnel.on('error', (error) => {
console.error(error);
process.exit(1);
});
-tunnel.on("exit", (code) => {
+tunnel.on('exit', (code) => {
if (!printed && code && code !== 0) {
process.exit(code);
}
diff --git a/scripts/verify.ts b/scripts/verify.ts
index fb35f1d..e22ea86 100644
--- a/scripts/verify.ts
+++ b/scripts/verify.ts
@@ -1,17 +1,17 @@
-import { createDebugLogger } from "../src/debug/index.ts";
-import { loadEnv } from "./env.ts";
-import { run } from "./qa/run.ts";
+import { createDebugLogger } from '../src/debug/index.ts';
+import { loadEnv } from './env.ts';
+import { run } from './qa/run.ts';
loadEnv();
-type Mode = "local" | "cloud";
+type Mode = 'local' | 'cloud';
const parseArgs = (args: string[]) => {
let mode: string | undefined;
for (let index = 0; index < args.length; index += 1) {
const arg = args[index];
- if (arg === "--mode") {
+ if (arg === '--mode') {
mode = args[index + 1];
index += 1;
}
@@ -20,7 +20,7 @@ const parseArgs = (args: string[]) => {
return { mode };
};
-const logger = createDebugLogger({ namespace: "verify" });
+const logger = createDebugLogger({ namespace: 'verify' });
const runBrowser = async (options: {
mode: Mode;
@@ -29,15 +29,15 @@ const runBrowser = async (options: {
convexUrl?: string;
};
}) => {
- const skipInstall = process.env.PLAYWRIGHT_SKIP_INSTALL === "1";
+ const skipInstall = process.env.PLAYWRIGHT_SKIP_INSTALL === '1';
if (!skipInstall) {
- logger.event("playwright-install", { browser: "chromium" });
- run("yarn", ["exec", "playwright", "install", "chromium"]);
+ logger.event('playwright-install', { browser: 'chromium' });
+ run('yarn', ['exec', 'playwright', 'install', 'chromium']);
}
- logger.event("build");
- run("yarn", ["build"]);
+ logger.event('build');
+ run('yarn', ['build']);
const testEnv: NodeJS.ProcessEnv = {
...process.env,
@@ -51,24 +51,24 @@ const runBrowser = async (options: {
}
}
- run("yarn", ["exec", "vitest", "run", "--config", "vitest.e2e.config.ts"], {
+ run('yarn', ['exec', 'vitest', 'run', '--config', 'vitest.e2e.config.ts'], {
env: testEnv,
});
};
const resolveCloudConfig = () => {
- const appUrl = process.env.E2E_REMOTE_APP_URL ?? "";
+ const appUrl = process.env.E2E_REMOTE_APP_URL ?? '';
if (!appUrl) {
if (!process.env.CI) {
throw new Error(
- "E2E_REMOTE_APP_URL is required for local verify:cloud (CI resolves it automatically).",
+ 'E2E_REMOTE_APP_URL is required for local verify:cloud (CI resolves it automatically).',
);
}
throw new Error(
- "Missing E2E_REMOTE_APP_URL (CI should resolve it via resolve-vercel-preview).",
+ 'Missing E2E_REMOTE_APP_URL (CI should resolve it via resolve-vercel-preview).',
);
}
- const convexUrl = process.env.E2E_REMOTE_CONVEX_URL ?? "";
+ const convexUrl = process.env.E2E_REMOTE_CONVEX_URL ?? '';
return {
appUrl,
convexUrl,
@@ -76,31 +76,28 @@ const resolveCloudConfig = () => {
};
const args = parseArgs(process.argv.slice(2));
-const rawMode = args.mode ?? process.env.VERIFY_MODE ?? "local";
+const rawMode = args.mode ?? process.env.VERIFY_MODE ?? 'local';
const resolvedMode: Mode =
- rawMode === "cloud" ||
- rawMode === "preview" ||
- rawMode === "real" ||
- rawMode === "convex"
- ? "cloud"
- : "local";
+ rawMode === 'cloud' || rawMode === 'preview' || rawMode === 'real' || rawMode === 'convex'
+ ? 'cloud'
+ : 'local';
const runMain = async () => {
- logger.event("start", { mode: resolvedMode });
- if (resolvedMode === "cloud") {
+ logger.event('start', { mode: resolvedMode });
+ if (resolvedMode === 'cloud') {
const remote = resolveCloudConfig();
- logger.event("remote", {
+ logger.event('remote', {
appUrl: remote.appUrl,
convexUrl: remote.convexUrl,
});
await runBrowser({
- mode: "cloud",
+ mode: 'cloud',
remote,
});
return;
}
await runBrowser({
- mode: "local",
+ mode: 'local',
});
};
diff --git a/src/client/index.ts b/src/client/index.ts
index 5ad4712..ae33fd4 100644
--- a/src/client/index.ts
+++ b/src/client/index.ts
@@ -1,8 +1,8 @@
-import type { AssemblyStatus } from "@transloadit/zod/v3/assemblyStatus";
-import type { AssemblyInstructionsInput } from "@transloadit/zod/v3/template";
-import { actionGeneric, mutationGeneric, queryGeneric } from "convex/server";
-import { v } from "convex/values";
-import type { ComponentApi } from "../component/_generated/component.ts";
+import type { AssemblyStatus } from '@transloadit/zod/v3/assemblyStatus';
+import type { AssemblyInstructionsInput } from '@transloadit/zod/v3/template';
+import { actionGeneric, mutationGeneric, queryGeneric } from 'convex/server';
+import { v } from 'convex/values';
+import type { ComponentApi } from '../component/_generated/component.ts';
import {
type AssemblyOptions,
type AssemblyResponse,
@@ -23,8 +23,8 @@ import {
vStoreAssemblyMetadataArgs,
vWebhookActionArgs,
vWebhookResponse,
-} from "../shared/schemas.ts";
-import type { RunActionCtx, RunMutationCtx, RunQueryCtx } from "./types.ts";
+} from '../shared/schemas.ts';
+import type { RunActionCtx, RunMutationCtx, RunQueryCtx } from './types.ts';
export { vAssemblyResponse, vAssemblyResultResponse, vCreateAssemblyArgs };
@@ -42,17 +42,17 @@ export {
isAssemblyTerminalError,
isAssemblyTerminalOk,
isAssemblyTerminalOkStatus,
-} from "@transloadit/zod/v3/assemblyStatus";
+} from '@transloadit/zod/v3/assemblyStatus';
export {
buildWebhookQueueArgs,
handleWebhookRequest,
parseAndVerifyTransloaditWebhook,
parseTransloaditWebhook,
-} from "../component/apiUtils.ts";
+} from '../component/apiUtils.ts';
export type {
NormalizedAssemblyUrls,
TransloaditAssembly,
-} from "../shared/assemblyUrls.ts";
+} from '../shared/assemblyUrls.ts';
export {
ASSEMBLY_STATUS_COMPLETED,
ASSEMBLY_STATUS_UPLOADING,
@@ -64,8 +64,8 @@ export {
parseAssemblyResults,
parseAssemblyStatus,
parseAssemblyUrls,
-} from "../shared/assemblyUrls.ts";
-export { pollAssembly } from "../shared/pollAssembly.ts";
+} from '../shared/assemblyUrls.ts';
+export { pollAssembly } from '../shared/pollAssembly.ts';
export type {
ImageResizeResult,
ResultByRobot,
@@ -74,16 +74,16 @@ export type {
TransloaditResult,
VideoEncodeResult,
VideoThumbsResult,
-} from "../shared/resultTypes.ts";
+} from '../shared/resultTypes.ts';
export {
getResultOriginalKey,
getResultUrl,
-} from "../shared/resultUtils.ts";
+} from '../shared/resultUtils.ts';
export type {
ParsedWebhookRequest,
VerifiedWebhookRequest,
WebhookActionArgs,
-} from "../shared/schemas.ts";
+} from '../shared/schemas.ts';
export type { AssemblyStatus, AssemblyInstructionsInput };
export interface TransloaditConfig {
@@ -100,16 +100,11 @@ function requireEnv(names: string[]): string {
return value;
}
}
- throw new Error(`Missing ${names.join(" or ")} environment variable`);
+ throw new Error(`Missing ${names.join(' or ')} environment variable`);
}
export { vAssemblyOptions };
-export type {
- AssemblyOptions,
- AssemblyResponse,
- AssemblyResultResponse,
- CreateAssemblyArgs,
-};
+export type { AssemblyOptions, AssemblyResponse, AssemblyResultResponse, CreateAssemblyArgs };
/**
* @deprecated Prefer `makeTransloaditAPI` or `Transloadit` for new code.
@@ -118,14 +113,11 @@ export class TransloaditClient {
declare component: TransloaditComponent;
declare config: TransloaditConfig;
- constructor(
- component: TransloaditComponent,
- config?: Partial,
- ) {
+ constructor(component: TransloaditComponent, config?: Partial) {
this.component = component;
this.config = {
- authKey: config?.authKey ?? requireEnv(["TRANSLOADIT_KEY"]),
- authSecret: config?.authSecret ?? requireEnv(["TRANSLOADIT_SECRET"]),
+ authKey: config?.authKey ?? requireEnv(['TRANSLOADIT_KEY']),
+ authSecret: config?.authSecret ?? requireEnv(['TRANSLOADIT_SECRET']),
};
}
@@ -202,10 +194,7 @@ export class TransloaditClient {
return ctx.runQuery(this.component.lib.listResults, args);
}
- async listAlbumResults(
- ctx: RunQueryCtx,
- args: { album: string; limit?: number },
- ) {
+ async listAlbumResults(ctx: RunQueryCtx, args: { album: string; limit?: number }) {
return ctx.runQuery(this.component.lib.listAlbumResults, args);
}
@@ -238,8 +227,8 @@ export function makeTransloaditAPI(
config?: Partial,
) {
const resolveConfig = (): TransloaditConfig => ({
- authKey: config?.authKey ?? requireEnv(["TRANSLOADIT_KEY"]),
- authSecret: config?.authSecret ?? requireEnv(["TRANSLOADIT_SECRET"]),
+ authKey: config?.authKey ?? requireEnv(['TRANSLOADIT_KEY']),
+ authSecret: config?.authSecret ?? requireEnv(['TRANSLOADIT_SECRET']),
});
return {
diff --git a/src/client/types.ts b/src/client/types.ts
index e1d4206..b0540d1 100644
--- a/src/client/types.ts
+++ b/src/client/types.ts
@@ -5,23 +5,23 @@ import type {
FunctionReturnType,
StorageActionWriter,
StorageReader,
-} from "convex/server";
-import type { GenericId } from "convex/values";
+} from 'convex/server';
+import type { GenericId } from 'convex/values';
export type RunQueryCtx = {
- runQuery: >(
+ runQuery: >(
query: Query,
args: FunctionArgs,
) => Promise>;
};
export type RunMutationCtx = RunQueryCtx & {
- runMutation: >(
+ runMutation: >(
mutation: Mutation,
args: FunctionArgs,
) => Promise>;
};
export type RunActionCtx = RunMutationCtx & {
- runAction: >(
+ runAction: >(
action: Action,
args: FunctionArgs,
) => Promise>;
@@ -49,17 +49,11 @@ export type OpaqueIds =
export type UseApi = Expand<{
[mod in keyof API]: API[mod] extends FunctionReference<
infer FType,
- "public",
+ 'public',
infer FArgs,
infer FReturnType,
infer FComponentPath
>
- ? FunctionReference<
- FType,
- "internal",
- OpaqueIds,
- OpaqueIds,
- FComponentPath
- >
+ ? FunctionReference, OpaqueIds, FComponentPath>
: UseApi;
}>;
diff --git a/src/component/apiUtils.test.ts b/src/component/apiUtils.test.ts
index 7218885..a992211 100644
--- a/src/component/apiUtils.test.ts
+++ b/src/component/apiUtils.test.ts
@@ -1,5 +1,5 @@
-import { createHmac } from "node:crypto";
-import { describe, expect, test, vi } from "vitest";
+import { createHmac } from 'node:crypto';
+import { describe, expect, test, vi } from 'vitest';
import {
buildTransloaditParams,
buildWebhookQueueArgs,
@@ -8,38 +8,36 @@ import {
parseTransloaditWebhook,
signTransloaditParams,
verifyWebhookSignature,
-} from "./apiUtils.ts";
+} from './apiUtils.ts';
-describe("apiUtils", () => {
- test("buildTransloaditParams requires templateId or steps", () => {
+describe('apiUtils', () => {
+ test('buildTransloaditParams requires templateId or steps', () => {
expect(() =>
buildTransloaditParams({
- authKey: "key",
+ authKey: 'key',
}),
- ).toThrow("Provide either templateId or steps");
+ ).toThrow('Provide either templateId or steps');
});
- test("signTransloaditParams uses sha384", async () => {
+ test('signTransloaditParams uses sha384', async () => {
const { paramsString } = buildTransloaditParams({
- authKey: "key",
- templateId: "tmpl_123",
- notifyUrl: "https://example.com/webhook",
+ authKey: 'key',
+ templateId: 'tmpl_123',
+ notifyUrl: 'https://example.com/webhook',
});
- const signature = await signTransloaditParams(paramsString, "secret");
- expect(signature.startsWith("sha384:")).toBe(true);
+ const signature = await signTransloaditParams(paramsString, 'secret');
+ expect(signature.startsWith('sha384:')).toBe(true);
- const expected = createHmac("sha384", "secret")
- .update(paramsString)
- .digest("hex");
+ const expected = createHmac('sha384', 'secret').update(paramsString).digest('hex');
expect(signature).toBe(`sha384:${expected}`);
});
- test("verifyWebhookSignature supports sha1 fallback", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('verifyWebhookSignature supports sha1 fallback', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
- const secret = "webhook-secret";
- const digest = createHmac("sha1", secret).update(rawBody).digest("hex");
+ const secret = 'webhook-secret';
+ const digest = createHmac('sha1', secret).update(rawBody).digest('hex');
const verified = await verifyWebhookSignature({
rawBody,
@@ -50,45 +48,43 @@ describe("apiUtils", () => {
expect(verified).toBe(true);
});
- test("parseTransloaditWebhook returns payload and signature", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('parseTransloaditWebhook returns payload and signature', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const formData = new FormData();
- formData.append("transloadit", JSON.stringify(payload));
- formData.append("signature", "sha384:abc");
+ formData.append('transloadit', JSON.stringify(payload));
+ formData.append('signature', 'sha384:abc');
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
const result = await parseTransloaditWebhook(request);
expect(result.payload).toEqual(payload);
expect(result.rawBody).toBe(JSON.stringify(payload));
- expect(result.signature).toBe("sha384:abc");
+ expect(result.signature).toBe('sha384:abc');
});
- test("parseTransloaditWebhook throws on missing payload", async () => {
- const request = new Request("http://localhost", {
- method: "POST",
+ test('parseTransloaditWebhook throws on missing payload', async () => {
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: new FormData(),
});
- await expect(parseTransloaditWebhook(request)).rejects.toThrow(
- "Missing transloadit payload",
- );
+ await expect(parseTransloaditWebhook(request)).rejects.toThrow('Missing transloadit payload');
});
- test("parseAndVerifyTransloaditWebhook verifies signature", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('parseAndVerifyTransloaditWebhook verifies signature', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
- const secret = "webhook-secret";
- const digest = createHmac("sha384", secret).update(rawBody).digest("hex");
+ const secret = 'webhook-secret';
+ const digest = createHmac('sha384', secret).update(rawBody).digest('hex');
const formData = new FormData();
- formData.append("transloadit", rawBody);
- formData.append("signature", `sha384:${digest}`);
+ formData.append('transloadit', rawBody);
+ formData.append('signature', `sha384:${digest}`);
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
@@ -100,35 +96,35 @@ describe("apiUtils", () => {
expect(parsed.verified).toBe(true);
});
- test("parseAndVerifyTransloaditWebhook rejects invalid signature", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('parseAndVerifyTransloaditWebhook rejects invalid signature', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const formData = new FormData();
- formData.append("transloadit", JSON.stringify(payload));
- formData.append("signature", "sha384:bad");
+ formData.append('transloadit', JSON.stringify(payload));
+ formData.append('signature', 'sha384:bad');
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
await expect(
parseAndVerifyTransloaditWebhook(request, {
- authSecret: "secret",
+ authSecret: 'secret',
}),
- ).rejects.toThrow("Invalid Transloadit webhook signature");
+ ).rejects.toThrow('Invalid Transloadit webhook signature');
});
- test("buildWebhookQueueArgs returns webhook payload args", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('buildWebhookQueueArgs returns webhook payload args', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
- const secret = "webhook-secret";
- const digest = createHmac("sha384", secret).update(rawBody).digest("hex");
+ const secret = 'webhook-secret';
+ const digest = createHmac('sha384', secret).update(rawBody).digest('hex');
const formData = new FormData();
- formData.append("transloadit", rawBody);
- formData.append("signature", `sha384:${digest}`);
+ formData.append('transloadit', rawBody);
+ formData.append('signature', `sha384:${digest}`);
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
@@ -138,19 +134,19 @@ describe("apiUtils", () => {
expect(args.signature).toBe(`sha384:${digest}`);
});
- test("buildWebhookQueueArgs can skip verification", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('buildWebhookQueueArgs can skip verification', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
const formData = new FormData();
- formData.append("transloadit", rawBody);
+ formData.append('transloadit', rawBody);
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
const args = await buildWebhookQueueArgs(request, {
- authSecret: "secret",
+ authSecret: 'secret',
requireSignature: false,
});
expect(args.payload).toEqual(payload);
@@ -158,15 +154,15 @@ describe("apiUtils", () => {
expect(args.signature).toBeUndefined();
});
- test("handleWebhookRequest queues webhook by default", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('handleWebhookRequest queues webhook by default', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
const formData = new FormData();
- formData.append("transloadit", rawBody);
- formData.append("signature", "sha384:abc");
+ formData.append('transloadit', rawBody);
+ formData.append('signature', 'sha384:abc');
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
const runAction = vi.fn().mockResolvedValue(null);
@@ -178,28 +174,28 @@ describe("apiUtils", () => {
expect(runAction).toHaveBeenCalledWith({
payload,
rawBody,
- signature: "sha384:abc",
+ signature: 'sha384:abc',
});
expect(response.status).toBe(202);
});
- test("handleWebhookRequest supports sync mode with verification", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('handleWebhookRequest supports sync mode with verification', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
- const secret = "webhook-secret";
- const digest = createHmac("sha384", secret).update(rawBody).digest("hex");
+ const secret = 'webhook-secret';
+ const digest = createHmac('sha384', secret).update(rawBody).digest('hex');
const formData = new FormData();
- formData.append("transloadit", rawBody);
- formData.append("signature", `sha384:${digest}`);
+ formData.append('transloadit', rawBody);
+ formData.append('signature', `sha384:${digest}`);
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
const runAction = vi.fn().mockResolvedValue(null);
const response = await handleWebhookRequest(request, {
- mode: "sync",
+ mode: 'sync',
runAction,
requireSignature: true,
authSecret: secret,
@@ -213,15 +209,15 @@ describe("apiUtils", () => {
expect(response.status).toBe(204);
});
- test("handleWebhookRequest honors a custom response status", async () => {
- const payload = { ok: "ASSEMBLY_COMPLETED", assembly_id: "asm_123" };
+ test('handleWebhookRequest honors a custom response status', async () => {
+ const payload = { ok: 'ASSEMBLY_COMPLETED', assembly_id: 'asm_123' };
const rawBody = JSON.stringify(payload);
const formData = new FormData();
- formData.append("transloadit", rawBody);
- formData.append("signature", "sha384:abc");
+ formData.append('transloadit', rawBody);
+ formData.append('signature', 'sha384:abc');
- const request = new Request("http://localhost", {
- method: "POST",
+ const request = new Request('http://localhost', {
+ method: 'POST',
body: formData,
});
const runAction = vi.fn().mockResolvedValue(null);
@@ -234,7 +230,7 @@ describe("apiUtils", () => {
expect(runAction).toHaveBeenCalledWith({
payload,
rawBody,
- signature: "sha384:abc",
+ signature: 'sha384:abc',
});
expect(response.status).toBe(299);
});
diff --git a/src/component/apiUtils.ts b/src/component/apiUtils.ts
index 0e08f66..1d05512 100644
--- a/src/component/apiUtils.ts
+++ b/src/component/apiUtils.ts
@@ -1,21 +1,19 @@
-import { signParams, verifyWebhookSignature } from "@transloadit/utils";
-import type { AssemblyStatusResults } from "@transloadit/zod/v3/assemblyStatus";
-import { transloaditError } from "../shared/errors.ts";
+import { signParams, verifyWebhookSignature } from '@transloadit/utils';
+import type { AssemblyStatusResults } from '@transloadit/zod/v3/assemblyStatus';
+import { transloaditError } from '../shared/errors.ts';
import type {
BuildParamsOptions,
BuildParamsResult,
ParsedWebhookRequest,
VerifiedWebhookRequest,
WebhookActionArgs,
-} from "../shared/schemas.ts";
+} from '../shared/schemas.ts';
-export function buildTransloaditParams(
- options: BuildParamsOptions,
-): BuildParamsResult {
+export function buildTransloaditParams(options: BuildParamsOptions): BuildParamsResult {
if (!options.templateId && !options.steps) {
throw transloaditError(
- "createAssembly",
- "Provide either templateId or steps to create an Assembly",
+ 'createAssembly',
+ 'Provide either templateId or steps to create an Assembly',
);
}
@@ -23,8 +21,7 @@ export function buildTransloaditParams(
key: options.authKey,
};
- auth.expires =
- options.expires ?? new Date(Date.now() + 60 * 60 * 1000).toISOString();
+ auth.expires = options.expires ?? new Date(Date.now() + 60 * 60 * 1000).toISOString();
const params: Record = {
auth,
@@ -56,24 +53,22 @@ export async function signTransloaditParams(
paramsString: string,
authSecret: string,
): Promise {
- return signParams(paramsString, authSecret, "sha384");
+ return signParams(paramsString, authSecret, 'sha384');
}
-export async function parseTransloaditWebhook(
- request: Request,
-): Promise {
+export async function parseTransloaditWebhook(request: Request): Promise {
const formData = await request.formData();
- const rawPayload = formData.get("transloadit");
- const signature = formData.get("signature");
+ const rawPayload = formData.get('transloadit');
+ const signature = formData.get('signature');
- if (typeof rawPayload !== "string") {
- throw transloaditError("webhook", "Missing transloadit payload");
+ if (typeof rawPayload !== 'string') {
+ throw transloaditError('webhook', 'Missing transloadit payload');
}
return {
payload: JSON.parse(rawPayload),
rawBody: rawPayload,
- signature: typeof signature === "string" ? signature : undefined,
+ signature: typeof signature === 'string' ? signature : undefined,
};
}
@@ -87,10 +82,7 @@ export async function parseAndVerifyTransloaditWebhook(
const parsed = await parseTransloaditWebhook(request);
const authSecret = options.authSecret;
if (!authSecret) {
- throw transloaditError(
- "webhook",
- "Missing authSecret for webhook verification",
- );
+ throw transloaditError('webhook', 'Missing authSecret for webhook verification');
}
const verified = await verifyWebhookSignature({
rawBody: parsed.rawBody,
@@ -100,10 +92,7 @@ export async function parseAndVerifyTransloaditWebhook(
if (options.requireSignature ?? true) {
if (!verified) {
- throw transloaditError(
- "webhook",
- "Invalid Transloadit webhook signature",
- );
+ throw transloaditError('webhook', 'Invalid Transloadit webhook signature');
}
}
@@ -132,19 +121,19 @@ export async function buildWebhookQueueArgs(
export async function handleWebhookRequest(
request: Request,
options: {
- mode?: "queue" | "sync";
+ mode?: 'queue' | 'sync';
runAction: (args: WebhookActionArgs) => Promise;
requireSignature?: boolean;
authSecret?: string;
responseStatus?: number;
},
): Promise {
- const mode = options.mode ?? "queue";
+ const mode = options.mode ?? 'queue';
const requireSignature = options.requireSignature ?? false;
const parsed = requireSignature
? await parseAndVerifyTransloaditWebhook(request, {
- authSecret: options.authSecret ?? "",
+ authSecret: options.authSecret ?? '',
requireSignature: true,
})
: await parseTransloaditWebhook(request);
@@ -155,7 +144,7 @@ export async function handleWebhookRequest(
signature: parsed.signature,
});
- const status = options.responseStatus ?? (mode === "sync" ? 204 : 202);
+ const status = options.responseStatus ?? (mode === 'sync' ? 204 : 202);
return new Response(null, { status });
}
@@ -168,9 +157,7 @@ export type AssemblyResultRecord = {
result: AssemblyResult;
};
-export function flattenResults(
- results: AssemblyStatusResults | undefined,
-): AssemblyResultRecord[] {
+export function flattenResults(results: AssemblyStatusResults | undefined): AssemblyResultRecord[] {
if (!results) return [];
const output: AssemblyResultRecord[] = [];
for (const [stepName, entries] of Object.entries(results)) {
diff --git a/src/component/convex.config.ts b/src/component/convex.config.ts
index 4fa445d..526343d 100644
--- a/src/component/convex.config.ts
+++ b/src/component/convex.config.ts
@@ -1,3 +1,3 @@
-import { defineComponent } from "convex/server";
+import { defineComponent } from 'convex/server';
-export default defineComponent("transloadit");
+export default defineComponent('transloadit');
diff --git a/src/component/lib.test.ts b/src/component/lib.test.ts
index 0a5a3b3..cc1559d 100644
--- a/src/component/lib.test.ts
+++ b/src/component/lib.test.ts
@@ -1,40 +1,38 @@
///
-import { createHmac } from "node:crypto";
-import { convexTest } from "convex-test";
-import { describe, expect, test, vi } from "vitest";
-import { api } from "./_generated/api.ts";
-import schema from "./schema.ts";
-import { modules } from "./setup.test.ts";
-
-process.env.TRANSLOADIT_KEY = "test-key";
-process.env.TRANSLOADIT_SECRET = "test-secret";
-
-describe("Transloadit component lib", () => {
- test("handleWebhook stores assembly and results", async () => {
+import { createHmac } from 'node:crypto';
+import { convexTest } from 'convex-test';
+import { describe, expect, test, vi } from 'vitest';
+import { api } from './_generated/api.ts';
+import schema from './schema.ts';
+import { modules } from './setup.test.ts';
+
+process.env.TRANSLOADIT_KEY = 'test-key';
+process.env.TRANSLOADIT_SECRET = 'test-secret';
+
+describe('Transloadit component lib', () => {
+ test('handleWebhook stores assembly and results', async () => {
const t = convexTest(schema, modules);
const payload = {
- assembly_id: "asm_123",
- ok: "ASSEMBLY_COMPLETED",
- message: "Assembly complete",
+ assembly_id: 'asm_123',
+ ok: 'ASSEMBLY_COMPLETED',
+ message: 'Assembly complete',
results: {
resized: [
{
- id: "file_1",
- ssl_url: "https://example.com/file.jpg",
- name: "file.jpg",
+ id: 'file_1',
+ ssl_url: 'https://example.com/file.jpg',
+ name: 'file.jpg',
size: 12345,
- mime: "image/jpeg",
+ mime: 'image/jpeg',
},
],
},
};
const rawBody = JSON.stringify(payload);
- const signature = createHmac("sha1", "test-secret")
- .update(rawBody)
- .digest("hex");
+ const signature = createHmac('sha1', 'test-secret').update(rawBody).digest('hex');
const result = await t.action(api.lib.handleWebhook, {
payload,
@@ -42,51 +40,49 @@ describe("Transloadit component lib", () => {
signature: `sha1:${signature}`,
});
- expect(result.assemblyId).toBe("asm_123");
+ expect(result.assemblyId).toBe('asm_123');
expect(result.resultCount).toBe(1);
const assembly = await t.query(api.lib.getAssemblyStatus, {
- assemblyId: "asm_123",
+ assemblyId: 'asm_123',
});
- expect(assembly?.assemblyId).toBe("asm_123");
- expect(assembly?.ok).toBe("ASSEMBLY_COMPLETED");
+ expect(assembly?.assemblyId).toBe('asm_123');
+ expect(assembly?.ok).toBe('ASSEMBLY_COMPLETED');
const results = await t.query(api.lib.listResults, {
- assemblyId: "asm_123",
+ assemblyId: 'asm_123',
});
expect(results).toHaveLength(1);
- expect(results[0]?.stepName).toBe("resized");
+ expect(results[0]?.stepName).toBe('resized');
});
- test("listAlbumResults returns album-scoped results", async () => {
+ test('listAlbumResults returns album-scoped results', async () => {
const t = convexTest(schema, modules);
const payload = {
- assembly_id: "asm_album",
- ok: "ASSEMBLY_COMPLETED",
+ assembly_id: 'asm_album',
+ ok: 'ASSEMBLY_COMPLETED',
fields: {
- album: "wedding-gallery",
- userId: "user_123",
+ album: 'wedding-gallery',
+ userId: 'user_123',
},
results: {
resized: [
{
- id: "file_album",
- ssl_url: "https://example.com/album.jpg",
- name: "album.jpg",
+ id: 'file_album',
+ ssl_url: 'https://example.com/album.jpg',
+ name: 'album.jpg',
size: 100,
- mime: "image/jpeg",
+ mime: 'image/jpeg',
},
],
},
};
const rawBody = JSON.stringify(payload);
- const signature = createHmac("sha1", "test-secret")
- .update(rawBody)
- .digest("hex");
+ const signature = createHmac('sha1', 'test-secret').update(rawBody).digest('hex');
await t.action(api.lib.handleWebhook, {
payload,
@@ -95,37 +91,35 @@ describe("Transloadit component lib", () => {
});
const results = await t.query(api.lib.listAlbumResults, {
- album: "wedding-gallery",
+ album: 'wedding-gallery',
});
expect(results).toHaveLength(1);
- expect(results[0]?.album).toBe("wedding-gallery");
- expect(results[0]?.userId).toBe("user_123");
+ expect(results[0]?.album).toBe('wedding-gallery');
+ expect(results[0]?.userId).toBe('user_123');
});
- test("handleWebhook stores url when ssl_url missing", async () => {
+ test('handleWebhook stores url when ssl_url missing', async () => {
const t = convexTest(schema, modules);
const payload = {
- assembly_id: "asm_url",
- ok: "ASSEMBLY_COMPLETED",
+ assembly_id: 'asm_url',
+ ok: 'ASSEMBLY_COMPLETED',
results: {
stored: [
{
- id: "file_3",
- url: "https://example.com/file-3.jpg",
- name: "file-3.jpg",
+ id: 'file_3',
+ url: 'https://example.com/file-3.jpg',
+ name: 'file-3.jpg',
size: 42,
- mime: "image/jpeg",
+ mime: 'image/jpeg',
},
],
},
};
const rawBody = JSON.stringify(payload);
- const signature = createHmac("sha1", "test-secret")
- .update(rawBody)
- .digest("hex");
+ const signature = createHmac('sha1', 'test-secret').update(rawBody).digest('hex');
await t.action(api.lib.handleWebhook, {
payload,
@@ -134,55 +128,53 @@ describe("Transloadit component lib", () => {
});
const results = await t.query(api.lib.listResults, {
- assemblyId: "asm_url",
+ assemblyId: 'asm_url',
});
expect(results).toHaveLength(1);
- expect(results[0]?.sslUrl).toBe("https://example.com/file-3.jpg");
+ expect(results[0]?.sslUrl).toBe('https://example.com/file-3.jpg');
});
- test("listResults exposes expected fields for common robot outputs", async () => {
+ test('listResults exposes expected fields for common robot outputs', async () => {
const t = convexTest(schema, modules);
const payload = {
- assembly_id: "asm_schema",
- ok: "ASSEMBLY_COMPLETED",
+ assembly_id: 'asm_schema',
+ ok: 'ASSEMBLY_COMPLETED',
results: {
images_resized: [
{
- id: "img_1",
- ssl_url: "https://example.com/img.jpg",
- name: "img.jpg",
- mime: "image/jpeg",
+ id: 'img_1',
+ ssl_url: 'https://example.com/img.jpg',
+ name: 'img.jpg',
+ mime: 'image/jpeg',
width: 1600,
height: 1200,
},
],
videos_encoded: [
{
- id: "vid_1",
- ssl_url: "https://example.com/vid.mp4",
- name: "vid.mp4",
- mime: "video/mp4",
+ id: 'vid_1',
+ ssl_url: 'https://example.com/vid.mp4',
+ name: 'vid.mp4',
+ mime: 'video/mp4',
duration: 12.5,
},
],
videos_thumbs_output: [
{
- id: "thumb_1",
- ssl_url: "https://example.com/thumb.jpg",
- name: "thumb.jpg",
- mime: "image/jpeg",
- original_id: "vid_1",
+ id: 'thumb_1',
+ ssl_url: 'https://example.com/thumb.jpg',
+ name: 'thumb.jpg',
+ mime: 'image/jpeg',
+ original_id: 'vid_1',
},
],
},
};
const rawBody = JSON.stringify(payload);
- const signature = createHmac("sha1", "test-secret")
- .update(rawBody)
- .digest("hex");
+ const signature = createHmac('sha1', 'test-secret').update(rawBody).digest('hex');
await t.action(api.lib.handleWebhook, {
payload,
@@ -191,57 +183,57 @@ describe("Transloadit component lib", () => {
});
const results = await t.query(api.lib.listResults, {
- assemblyId: "asm_schema",
+ assemblyId: 'asm_schema',
});
expect(results).toHaveLength(3);
const byStep = new Map(results.map((result) => [result.stepName, result]));
- const image = byStep.get("images_resized");
- const video = byStep.get("videos_encoded");
- const thumb = byStep.get("videos_thumbs_output");
+ const image = byStep.get('images_resized');
+ const video = byStep.get('videos_encoded');
+ const thumb = byStep.get('videos_thumbs_output');
- expect(image?.sslUrl).toBe("https://example.com/img.jpg");
- expect(image?.mime).toBe("image/jpeg");
+ expect(image?.sslUrl).toBe('https://example.com/img.jpg');
+ expect(image?.mime).toBe('image/jpeg');
expect(image?.raw?.width).toBe(1600);
expect(image?.raw?.height).toBe(1200);
- expect(video?.sslUrl).toBe("https://example.com/vid.mp4");
- expect(video?.mime).toBe("video/mp4");
+ expect(video?.sslUrl).toBe('https://example.com/vid.mp4');
+ expect(video?.mime).toBe('video/mp4');
expect(video?.raw?.duration).toBe(12.5);
- expect(thumb?.sslUrl).toBe("https://example.com/thumb.jpg");
- expect(thumb?.raw?.original_id).toBe("vid_1");
+ expect(thumb?.sslUrl).toBe('https://example.com/thumb.jpg');
+ expect(thumb?.raw?.original_id).toBe('vid_1');
});
- test("handleWebhook requires rawBody when verifying signature", async () => {
+ test('handleWebhook requires rawBody when verifying signature', async () => {
const t = convexTest(schema, modules);
- const payload = { assembly_id: "asm_missing" };
- const signature = createHmac("sha1", "test-secret")
+ const payload = { assembly_id: 'asm_missing' };
+ const signature = createHmac('sha1', 'test-secret')
.update(JSON.stringify(payload))
- .digest("hex");
+ .digest('hex');
await expect(
t.action(api.lib.handleWebhook, {
payload,
signature: `sha1:${signature}`,
}),
- ).rejects.toThrow("Missing rawBody for webhook verification");
+ ).rejects.toThrow('Missing rawBody for webhook verification');
});
- test("handleWebhook can skip verification when configured", async () => {
+ test('handleWebhook can skip verification when configured', async () => {
const t = convexTest(schema, modules);
const payload = {
- assembly_id: "asm_skip",
- ok: "ASSEMBLY_COMPLETED",
+ assembly_id: 'asm_skip',
+ ok: 'ASSEMBLY_COMPLETED',
results: {
resized: [
{
- id: "file_skip",
- ssl_url: "https://example.com/skip.jpg",
- name: "skip.jpg",
+ id: 'file_skip',
+ ssl_url: 'https://example.com/skip.jpg',
+ name: 'skip.jpg',
size: 123,
- mime: "image/jpeg",
+ mime: 'image/jpeg',
},
],
},
@@ -252,58 +244,58 @@ describe("Transloadit component lib", () => {
verifySignature: false,
});
- expect(result.assemblyId).toBe("asm_skip");
+ expect(result.assemblyId).toBe('asm_skip');
expect(result.resultCount).toBe(1);
});
- test("createAssemblyOptions includes expected upload count when provided", async () => {
+ test('createAssemblyOptions includes expected upload count when provided', async () => {
const t = convexTest(schema, modules);
const result = await t.action(api.lib.createAssemblyOptions, {
steps: {
resize: {
- robot: "/image/resize",
+ robot: '/image/resize',
width: 120,
height: 120,
},
},
numExpectedUploadFiles: 3,
- config: { authKey: "test-key", authSecret: "test-secret" },
+ config: { authKey: 'test-key', authSecret: 'test-secret' },
});
const params = JSON.parse(result.params) as Record;
expect(params.num_expected_upload_files).toBe(3);
});
- test("queueWebhook rejects invalid signature", async () => {
+ test('queueWebhook rejects invalid signature', async () => {
const t = convexTest(schema, modules);
- const payload = { assembly_id: "asm_bad" };
+ const payload = { assembly_id: 'asm_bad' };
const rawBody = JSON.stringify(payload);
await expect(
t.action(api.lib.queueWebhook, {
payload,
rawBody,
- signature: "sha1:bad",
+ signature: 'sha1:bad',
}),
- ).rejects.toThrow("Invalid Transloadit webhook signature");
+ ).rejects.toThrow('Invalid Transloadit webhook signature');
});
- test("refreshAssembly fetches status and stores results", async () => {
+ test('refreshAssembly fetches status and stores results', async () => {
const t = convexTest(schema, modules);
const payload = {
- assembly_id: "asm_456",
- ok: "ASSEMBLY_COMPLETED",
- message: "Assembly complete",
+ assembly_id: 'asm_456',
+ ok: 'ASSEMBLY_COMPLETED',
+ message: 'Assembly complete',
results: {
resized: [
{
- id: "file_2",
- ssl_url: "https://example.com/file-2.jpg",
- name: "file-2.jpg",
+ id: 'file_2',
+ ssl_url: 'https://example.com/file-2.jpg',
+ name: 'file-2.jpg',
size: 54321,
- mime: "image/jpeg",
+ mime: 'image/jpeg',
},
],
},
@@ -312,48 +304,48 @@ describe("Transloadit component lib", () => {
const fetchMock = vi.fn(async () => {
return new Response(JSON.stringify(payload), {
status: 200,
- headers: { "content-type": "application/json" },
+ headers: { 'content-type': 'application/json' },
});
});
- vi.stubGlobal("fetch", fetchMock);
+ vi.stubGlobal('fetch', fetchMock);
try {
const result = await t.action(api.lib.refreshAssembly, {
- assemblyId: "asm_456",
- config: { authKey: "test-key", authSecret: "test-secret" },
+ assemblyId: 'asm_456',
+ config: { authKey: 'test-key', authSecret: 'test-secret' },
});
- expect(result.assemblyId).toBe("asm_456");
- expect(result.ok).toBe("ASSEMBLY_COMPLETED");
+ expect(result.assemblyId).toBe('asm_456');
+ expect(result.ok).toBe('ASSEMBLY_COMPLETED');
const requestInfo = fetchMock.mock.calls[0]?.[0];
const requestUrl =
- typeof requestInfo === "string"
+ typeof requestInfo === 'string'
? requestInfo
: requestInfo instanceof URL
? requestInfo.toString()
: requestInfo instanceof Request
? requestInfo.url
- : "";
+ : '';
if (!requestUrl) {
- throw new Error("Expected fetch to be called with a URL string");
+ throw new Error('Expected fetch to be called with a URL string');
}
const url = new URL(requestUrl);
- expect(url.origin).toBe("https://api2.transloadit.com");
- expect(url.searchParams.get("signature")).toBeTruthy();
- expect(url.searchParams.get("params")).toBeTruthy();
+ expect(url.origin).toBe('https://api2.transloadit.com');
+ expect(url.searchParams.get('signature')).toBeTruthy();
+ expect(url.searchParams.get('params')).toBeTruthy();
const assembly = await t.query(api.lib.getAssemblyStatus, {
- assemblyId: "asm_456",
+ assemblyId: 'asm_456',
});
- expect(assembly?.ok).toBe("ASSEMBLY_COMPLETED");
+ expect(assembly?.ok).toBe('ASSEMBLY_COMPLETED');
const results = await t.query(api.lib.listResults, {
- assemblyId: "asm_456",
+ assemblyId: 'asm_456',
});
expect(results).toHaveLength(1);
- expect(results[0]?.stepName).toBe("resized");
+ expect(results[0]?.stepName).toBe('resized');
} finally {
vi.unstubAllGlobals();
}
diff --git a/src/component/lib.ts b/src/component/lib.ts
index cb22775..1986b6e 100644
--- a/src/component/lib.ts
+++ b/src/component/lib.ts
@@ -1,10 +1,10 @@
-import type { AssemblyStatus } from "@transloadit/zod/v3/assemblyStatus";
-import type { AssemblyInstructionsInput } from "@transloadit/zod/v3/template";
-import { anyApi, type FunctionReference } from "convex/server";
-import { v } from "convex/values";
-import { parseAssemblyStatus } from "../shared/assemblyUrls.ts";
-import { transloaditError } from "../shared/errors.ts";
-import { getResultUrl } from "../shared/resultUtils.ts";
+import type { AssemblyStatus } from '@transloadit/zod/v3/assemblyStatus';
+import type { AssemblyInstructionsInput } from '@transloadit/zod/v3/template';
+import { anyApi, type FunctionReference } from 'convex/server';
+import { v } from 'convex/values';
+import { parseAssemblyStatus } from '../shared/assemblyUrls.ts';
+import { transloaditError } from '../shared/errors.ts';
+import { getResultUrl } from '../shared/resultUtils.ts';
import {
type ProcessWebhookResult,
vAssembly,
@@ -27,43 +27,32 @@ import {
vUpsertAssemblyArgs,
vWebhookArgs,
vWebhookResponse,
-} from "../shared/schemas.ts";
-import {
- action,
- internalAction,
- internalMutation,
- mutation,
- query,
-} from "./_generated/server.ts";
+} from '../shared/schemas.ts';
+import { action, internalAction, internalMutation, mutation, query } from './_generated/server.ts';
import {
buildTransloaditParams,
flattenResults,
signTransloaditParams,
verifyWebhookSignature,
-} from "./apiUtils.ts";
+} from './apiUtils.ts';
-const TRANSLOADIT_ASSEMBLY_URL = "https://api2.transloadit.com/assemblies";
+const TRANSLOADIT_ASSEMBLY_URL = 'https://api2.transloadit.com/assemblies';
export { vAssembly, vAssemblyResult, vTransloaditConfig };
-export type { Assembly, AssemblyResult } from "../shared/schemas.ts";
+export type { Assembly, AssemblyResult } from '../shared/schemas.ts';
type InternalApi = {
lib: {
- upsertAssembly: FunctionReference<
- "mutation",
- "internal",
- Record,
- unknown
- >;
+ upsertAssembly: FunctionReference<'mutation', 'internal', Record, unknown>;
replaceResultsForAssembly: FunctionReference<
- "mutation",
- "internal",
+ 'mutation',
+ 'internal',
Record,
unknown
>;
processWebhook: FunctionReference<
- "action",
- "internal",
+ 'action',
+ 'internal',
Record,
ProcessWebhookResult
>;
@@ -73,21 +62,21 @@ type InternalApi = {
const internal = anyApi as unknown as InternalApi;
const resolveAssemblyId = (payload: AssemblyStatus): string => {
- if (typeof payload.assembly_id === "string") return payload.assembly_id;
- if (typeof payload.assemblyId === "string") return payload.assemblyId;
- return "";
+ if (typeof payload.assembly_id === 'string') return payload.assembly_id;
+ if (typeof payload.assemblyId === 'string') return payload.assemblyId;
+ return '';
};
const getFieldString = (fields: unknown, key: string): string | undefined => {
- if (!fields || typeof fields !== "object") return undefined;
+ if (!fields || typeof fields !== 'object') return undefined;
const value = (fields as Record)[key];
- return typeof value === "string" ? value : undefined;
+ return typeof value === 'string' ? value : undefined;
};
const parseAssemblyPayload = (payload: unknown): AssemblyStatus => {
const parsed = parseAssemblyStatus(payload);
if (!parsed) {
- throw transloaditError("payload", "Invalid Transloadit payload");
+ throw transloaditError('payload', 'Invalid Transloadit payload');
}
return parsed;
};
@@ -97,7 +86,7 @@ const resolveWebhookRawBody = (args: {
rawBody?: string;
verifySignature?: boolean;
}) => {
- if (typeof args.rawBody === "string") return args.rawBody;
+ if (typeof args.rawBody === 'string') return args.rawBody;
if (args.verifySignature === false) {
return JSON.stringify(args.payload ?? {});
}
@@ -117,40 +106,38 @@ const buildSignedAssemblyUrl = async (
});
const signature = await signTransloaditParams(params, authSecret);
const url = new URL(`${TRANSLOADIT_ASSEMBLY_URL}/${assemblyId}`);
- url.searchParams.set("signature", signature);
- url.searchParams.set("params", params);
+ url.searchParams.set('signature', signature);
+ url.searchParams.set('params', params);
return url.toString();
};
const applyAssemblyStatus = async (
- ctx: Pick,
+ ctx: Pick,
payload: AssemblyStatus,
) => {
const assemblyId = resolveAssemblyId(payload);
if (!assemblyId) {
- throw transloaditError("webhook", "Webhook payload missing assembly_id");
+ throw transloaditError('webhook', 'Webhook payload missing assembly_id');
}
const results = flattenResults(payload.results ?? undefined);
await ctx.runMutation(internal.lib.upsertAssembly, {
assemblyId,
- status: typeof payload.ok === "string" ? payload.ok : undefined,
- ok: typeof payload.ok === "string" ? payload.ok : undefined,
- message: typeof payload.message === "string" ? payload.message : undefined,
- templateId:
- typeof payload.template_id === "string" ? payload.template_id : undefined,
- notifyUrl:
- typeof payload.notify_url === "string" ? payload.notify_url : undefined,
+ status: typeof payload.ok === 'string' ? payload.ok : undefined,
+ ok: typeof payload.ok === 'string' ? payload.ok : undefined,
+ message: typeof payload.message === 'string' ? payload.message : undefined,
+ templateId: typeof payload.template_id === 'string' ? payload.template_id : undefined,
+ notifyUrl: typeof payload.notify_url === 'string' ? payload.notify_url : undefined,
fields: payload.fields,
uploads: payload.uploads,
results: payload.results,
error: payload.error,
raw: payload,
userId:
- typeof payload.user_id === "string"
+ typeof payload.user_id === 'string'
? payload.user_id
- : getFieldString(payload.fields, "userId"),
+ : getFieldString(payload.fields, 'userId'),
});
await ctx.runMutation(internal.lib.replaceResultsForAssembly, {
@@ -161,26 +148,26 @@ const applyAssemblyStatus = async (
return {
assemblyId,
resultCount: results.length,
- ok: typeof payload.ok === "string" ? payload.ok : undefined,
- status: typeof payload.ok === "string" ? payload.ok : undefined,
+ ok: typeof payload.ok === 'string' ? payload.ok : undefined,
+ status: typeof payload.ok === 'string' ? payload.ok : undefined,
};
};
export const upsertAssembly = internalMutation({
args: vUpsertAssemblyArgs,
- returns: v.id("assemblies"),
+ returns: v.id('assemblies'),
handler: async (ctx, args) => {
// Note: we persist full `raw` + `results` for debugging/fidelity. Large
// assemblies can hit Convex document size limits; trim or externalize
// payloads if this becomes an issue for your workload.
const existing = await ctx.db
- .query("assemblies")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", args.assemblyId))
+ .query('assemblies')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', args.assemblyId))
.unique();
const now = Date.now();
if (!existing) {
- return await ctx.db.insert("assemblies", {
+ return await ctx.db.insert('assemblies', {
assemblyId: args.assemblyId,
status: args.status,
ok: args.ok,
@@ -205,8 +192,7 @@ export const upsertAssembly = internalMutation({
message: args.message ?? existing.message,
templateId: args.templateId ?? existing.templateId,
notifyUrl: args.notifyUrl ?? existing.notifyUrl,
- numExpectedUploadFiles:
- args.numExpectedUploadFiles ?? existing.numExpectedUploadFiles,
+ numExpectedUploadFiles: args.numExpectedUploadFiles ?? existing.numExpectedUploadFiles,
fields: args.fields ?? existing.fields,
uploads: args.uploads ?? existing.uploads,
results: args.results ?? existing.results,
@@ -229,8 +215,8 @@ export const replaceResultsForAssembly = internalMutation({
// This mutation replaces all results in one transaction; extremely large
// result sets may need batching or external storage to avoid Convex limits.
const existingResults = await ctx.db
- .query("results")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", args.assemblyId))
+ .query('results')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', args.assemblyId))
.collect();
for (const existing of existingResults) {
@@ -238,29 +224,29 @@ export const replaceResultsForAssembly = internalMutation({
}
const assembly = await ctx.db
- .query("assemblies")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", args.assemblyId))
+ .query('assemblies')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', args.assemblyId))
.unique();
- const album = getFieldString(assembly?.fields, "album");
+ const album = getFieldString(assembly?.fields, 'album');
const userId =
- typeof assembly?.userId === "string"
+ typeof assembly?.userId === 'string'
? assembly.userId
- : getFieldString(assembly?.fields, "userId");
+ : getFieldString(assembly?.fields, 'userId');
const now = Date.now();
for (const entry of args.results) {
const raw = entry.result as Record;
const sslUrl = getResultUrl(entry.result);
- await ctx.db.insert("results", {
+ await ctx.db.insert('results', {
assemblyId: args.assemblyId,
album,
userId,
stepName: entry.stepName,
- resultId: typeof raw.id === "string" ? raw.id : undefined,
+ resultId: typeof raw.id === 'string' ? raw.id : undefined,
sslUrl,
- name: typeof raw.name === "string" ? raw.name : undefined,
- size: typeof raw.size === "number" ? raw.size : undefined,
- mime: typeof raw.mime === "string" ? raw.mime : undefined,
+ name: typeof raw.name === 'string' ? raw.name : undefined,
+ size: typeof raw.size === 'number' ? raw.size : undefined,
+ mime: typeof raw.mime === 'string' ? raw.mime : undefined,
raw,
createdAt: now,
});
@@ -280,63 +266,49 @@ export const createAssembly = action({
const { paramsString, params } = buildTransloaditParams({
authKey: args.config.authKey,
templateId: args.templateId,
- steps: args.steps as AssemblyInstructionsInput["steps"],
- fields: args.fields as AssemblyInstructionsInput["fields"],
+ steps: args.steps as AssemblyInstructionsInput['steps'],
+ fields: args.fields as AssemblyInstructionsInput['fields'],
notifyUrl: args.notifyUrl,
numExpectedUploadFiles: args.numExpectedUploadFiles,
expires: args.expires,
- additionalParams: args.additionalParams as
- | Record
- | undefined,
+ additionalParams: args.additionalParams as Record | undefined,
});
- const signature = await signTransloaditParams(
- paramsString,
- args.config.authSecret,
- );
+ const signature = await signTransloaditParams(paramsString, args.config.authSecret);
const formData = new FormData();
- formData.append("params", paramsString);
- formData.append("signature", signature);
- if (typeof args.numExpectedUploadFiles === "number") {
- formData.append(
- "tus_num_expected_upload_files",
- String(args.numExpectedUploadFiles),
- );
+ formData.append('params', paramsString);
+ formData.append('signature', signature);
+ if (typeof args.numExpectedUploadFiles === 'number') {
+ formData.append('tus_num_expected_upload_files', String(args.numExpectedUploadFiles));
}
const response = await fetch(TRANSLOADIT_ASSEMBLY_URL, {
- method: "POST",
+ method: 'POST',
body: formData,
});
const data = (await response.json()) as Record;
if (!response.ok) {
- throw transloaditError(
- "createAssembly",
- `HTTP ${response.status}: ${JSON.stringify(data)}`,
- );
+ throw transloaditError('createAssembly', `HTTP ${response.status}: ${JSON.stringify(data)}`);
}
const assemblyId =
- typeof data.assembly_id === "string"
+ typeof data.assembly_id === 'string'
? data.assembly_id
- : typeof data.assemblyId === "string"
+ : typeof data.assemblyId === 'string'
? data.assemblyId
- : "";
+ : '';
if (!assemblyId) {
- throw transloaditError(
- "createAssembly",
- "Transloadit response missing assembly_id",
- );
+ throw transloaditError('createAssembly', 'Transloadit response missing assembly_id');
}
await ctx.runMutation(internal.lib.upsertAssembly, {
assemblyId,
- status: typeof data.ok === "string" ? data.ok : undefined,
- ok: typeof data.ok === "string" ? data.ok : undefined,
- message: typeof data.message === "string" ? data.message : undefined,
+ status: typeof data.ok === 'string' ? data.ok : undefined,
+ ok: typeof data.ok === 'string' ? data.ok : undefined,
+ message: typeof data.message === 'string' ? data.message : undefined,
templateId: args.templateId,
notifyUrl: args.notifyUrl,
numExpectedUploadFiles: args.numExpectedUploadFiles,
@@ -362,23 +334,18 @@ export const createAssemblyOptions = action({
const { paramsString, params } = buildTransloaditParams({
authKey: args.config.authKey,
templateId: args.templateId,
- steps: args.steps as AssemblyInstructionsInput["steps"],
- fields: args.fields as AssemblyInstructionsInput["fields"],
+ steps: args.steps as AssemblyInstructionsInput['steps'],
+ fields: args.fields as AssemblyInstructionsInput['fields'],
notifyUrl: args.notifyUrl,
numExpectedUploadFiles: args.numExpectedUploadFiles,
expires: args.expires,
- additionalParams: args.additionalParams as
- | Record
- | undefined,
+ additionalParams: args.additionalParams as Record | undefined,
});
- const signature = await signTransloaditParams(
- paramsString,
- args.config.authSecret,
- );
+ const signature = await signTransloaditParams(paramsString, args.config.authSecret);
const fields =
- params && typeof params.fields === "object" && params.fields
+ params && typeof params.fields === 'object' && params.fields
? (params.fields as Record)
: undefined;
@@ -400,16 +367,10 @@ export const processWebhook = internalAction({
if (shouldVerify) {
if (!rawBody) {
- throw transloaditError(
- "webhook",
- "Missing rawBody for webhook verification",
- );
+ throw transloaditError('webhook', 'Missing rawBody for webhook verification');
}
if (!authSecret) {
- throw transloaditError(
- "webhook",
- "Missing TRANSLOADIT_SECRET for webhook validation",
- );
+ throw transloaditError('webhook', 'Missing TRANSLOADIT_SECRET for webhook validation');
}
const verified = await verifyWebhookSignature({
rawBody,
@@ -417,10 +378,7 @@ export const processWebhook = internalAction({
authSecret,
});
if (!verified) {
- throw transloaditError(
- "webhook",
- "Invalid Transloadit webhook signature",
- );
+ throw transloaditError('webhook', 'Invalid Transloadit webhook signature');
}
}
@@ -450,21 +408,14 @@ export const queueWebhook = action({
handler: async (ctx, args) => {
const rawBody = resolveWebhookRawBody(args);
const shouldVerify = args.verifySignature ?? true;
- const authSecret =
- args.config?.authSecret ?? process.env.TRANSLOADIT_SECRET;
+ const authSecret = args.config?.authSecret ?? process.env.TRANSLOADIT_SECRET;
if (shouldVerify) {
if (!rawBody) {
- throw transloaditError(
- "webhook",
- "Missing rawBody for webhook verification",
- );
+ throw transloaditError('webhook', 'Missing rawBody for webhook verification');
}
if (!authSecret) {
- throw transloaditError(
- "webhook",
- "Missing TRANSLOADIT_SECRET for webhook validation",
- );
+ throw transloaditError('webhook', 'Missing TRANSLOADIT_SECRET for webhook validation');
}
const verified = await verifyWebhookSignature({
rawBody,
@@ -472,17 +423,14 @@ export const queueWebhook = action({
authSecret,
});
if (!verified) {
- throw transloaditError(
- "webhook",
- "Invalid Transloadit webhook signature",
- );
+ throw transloaditError('webhook', 'Invalid Transloadit webhook signature');
}
}
const parsed = parseAssemblyPayload(args.payload);
const assemblyId = resolveAssemblyId(parsed);
if (!assemblyId) {
- throw transloaditError("webhook", "Webhook payload missing assembly_id");
+ throw transloaditError('webhook', 'Webhook payload missing assembly_id');
}
await ctx.scheduler.runAfter(0, internal.lib.processWebhook, {
@@ -503,8 +451,7 @@ export const refreshAssembly = action({
handler: async (ctx, args) => {
const { assemblyId } = args;
const authKey = args.config?.authKey ?? process.env.TRANSLOADIT_KEY;
- const authSecret =
- args.config?.authSecret ?? process.env.TRANSLOADIT_SECRET;
+ const authSecret = args.config?.authSecret ?? process.env.TRANSLOADIT_SECRET;
const url =
authKey && authSecret
? await buildSignedAssemblyUrl(assemblyId, authKey, authSecret)
@@ -513,10 +460,7 @@ export const refreshAssembly = action({
const response = await fetch(url);
const payload = parseAssemblyPayload(await response.json());
if (!response.ok) {
- throw transloaditError(
- "status",
- `HTTP ${response.status}: ${JSON.stringify(payload)}`,
- );
+ throw transloaditError('status', `HTTP ${response.status}: ${JSON.stringify(payload)}`);
}
return applyAssemblyStatus(ctx, payload);
@@ -528,8 +472,8 @@ export const getAssemblyStatus = query({
returns: v.union(vAssembly, v.null()),
handler: async (ctx, args) => {
return await ctx.db
- .query("assemblies")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", args.assemblyId))
+ .query('assemblies')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', args.assemblyId))
.unique();
},
});
@@ -540,22 +484,22 @@ export const listAssemblies = query({
handler: async (ctx, args) => {
if (args.userId) {
return ctx.db
- .query("assemblies")
- .withIndex("by_userId", (q) => q.eq("userId", args.userId))
- .order("desc")
+ .query('assemblies')
+ .withIndex('by_userId', (q) => q.eq('userId', args.userId))
+ .order('desc')
.take(args.limit ?? 50);
}
if (args.status) {
return ctx.db
- .query("assemblies")
- .withIndex("by_status", (q) => q.eq("status", args.status))
- .order("desc")
+ .query('assemblies')
+ .withIndex('by_status', (q) => q.eq('status', args.status))
+ .order('desc')
.take(args.limit ?? 50);
}
return ctx.db
- .query("assemblies")
- .order("desc")
+ .query('assemblies')
+ .order('desc')
.take(args.limit ?? 50);
},
});
@@ -567,18 +511,18 @@ export const listResults = query({
if (args.stepName) {
const stepName = args.stepName;
return ctx.db
- .query("results")
- .withIndex("by_assemblyId_and_step", (q) =>
- q.eq("assemblyId", args.assemblyId).eq("stepName", stepName),
+ .query('results')
+ .withIndex('by_assemblyId_and_step', (q) =>
+ q.eq('assemblyId', args.assemblyId).eq('stepName', stepName),
)
- .order("desc")
+ .order('desc')
.take(args.limit ?? 200);
}
return ctx.db
- .query("results")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", args.assemblyId))
- .order("desc")
+ .query('results')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', args.assemblyId))
+ .order('desc')
.take(args.limit ?? 200);
},
});
@@ -588,9 +532,9 @@ export const listAlbumResults = query({
returns: v.array(vAssemblyResult),
handler: async (ctx, args) => {
return ctx.db
- .query("results")
- .withIndex("by_album", (q) => q.eq("album", args.album))
- .order("desc")
+ .query('results')
+ .withIndex('by_album', (q) => q.eq('album', args.album))
+ .order('desc')
.take(args.limit ?? 200);
},
});
@@ -600,8 +544,8 @@ export const purgeAlbum = mutation({
returns: vPurgeAlbumResponse,
handler: async (ctx, args) => {
const results = await ctx.db
- .query("results")
- .withIndex("by_album", (q) => q.eq("album", args.album))
+ .query('results')
+ .withIndex('by_album', (q) => q.eq('album', args.album))
.collect();
const assemblyIds = new Set();
@@ -614,8 +558,8 @@ export const purgeAlbum = mutation({
if (args.deleteAssemblies ?? true) {
for (const assemblyId of assemblyIds) {
const assembly = await ctx.db
- .query("assemblies")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", assemblyId))
+ .query('assemblies')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', assemblyId))
.unique();
if (assembly) {
await ctx.db.delete(assembly._id);
@@ -633,8 +577,8 @@ export const storeAssemblyMetadata = mutation({
returns: v.union(vAssembly, v.null()),
handler: async (ctx, args) => {
const existing = await ctx.db
- .query("assemblies")
- .withIndex("by_assemblyId", (q) => q.eq("assemblyId", args.assemblyId))
+ .query('assemblies')
+ .withIndex('by_assemblyId', (q) => q.eq('assemblyId', args.assemblyId))
.unique();
if (!existing) {
diff --git a/src/component/schema.ts b/src/component/schema.ts
index 11cf758..933ab0f 100644
--- a/src/component/schema.ts
+++ b/src/component/schema.ts
@@ -1,13 +1,13 @@
-import { defineSchema, defineTable } from "convex/server";
-import { vAssemblyFields, vAssemblyResultFields } from "../shared/schemas.ts";
+import { defineSchema, defineTable } from 'convex/server';
+import { vAssemblyFields, vAssemblyResultFields } from '../shared/schemas.ts';
export default defineSchema({
assemblies: defineTable(vAssemblyFields)
- .index("by_assemblyId", ["assemblyId"])
- .index("by_status", ["status"])
- .index("by_userId", ["userId"]),
+ .index('by_assemblyId', ['assemblyId'])
+ .index('by_status', ['status'])
+ .index('by_userId', ['userId']),
results: defineTable(vAssemblyResultFields)
- .index("by_assemblyId", ["assemblyId"])
- .index("by_assemblyId_and_step", ["assemblyId", "stepName"])
- .index("by_album", ["album"]),
+ .index('by_assemblyId', ['assemblyId'])
+ .index('by_assemblyId_and_step', ['assemblyId', 'stepName'])
+ .index('by_album', ['album']),
});
diff --git a/src/component/setup.test.ts b/src/component/setup.test.ts
index 78567cb..85327a4 100644
--- a/src/component/setup.test.ts
+++ b/src/component/setup.test.ts
@@ -1,6 +1,6 @@
///
-import { test } from "vitest";
+import { test } from 'vitest';
-export const modules = import.meta.glob("./**/*.*s");
+export const modules = import.meta.glob('./**/*.*s');
-test("setup", () => {});
+test('setup', () => {});
diff --git a/src/debug/index.ts b/src/debug/index.ts
index a48f85a..eaf31eb 100644
--- a/src/debug/index.ts
+++ b/src/debug/index.ts
@@ -1,4 +1,4 @@
-type ConsoleSink = Pick;
+type ConsoleSink = Pick;
export type DebugLogger = {
enabled: boolean;
@@ -18,7 +18,7 @@ export type DebugLoggerOptions = {
};
const resolveEnv = (): Record => {
- if (typeof process !== "undefined" && process.env) {
+ if (typeof process !== 'undefined' && process.env) {
return process.env;
}
return {};
@@ -27,7 +27,7 @@ const resolveEnv = (): Record => {
const parseEnabled = (value: string | undefined) => {
if (!value) return false;
const normalized = value.trim().toLowerCase();
- return normalized === "1" || normalized === "true" || normalized === "yes";
+ return normalized === '1' || normalized === 'true' || normalized === 'yes';
};
const formatLine = (
@@ -42,21 +42,18 @@ const formatLine = (
return `${timestamp} ${prefix} ${message} ${JSON.stringify(meta)}`;
};
-export const createDebugLogger = (
- options: DebugLoggerOptions = {},
-): DebugLogger => {
+export const createDebugLogger = (options: DebugLoggerOptions = {}): DebugLogger => {
const env = resolveEnv();
const enabled =
options.enabled ??
- (parseEnabled(env.TRANSLOADIT_DEBUG) ||
- parseEnabled(env.CONVEX_TRANSLOADIT_DEBUG));
- const namespace = options.namespace ?? "convex";
+ (parseEnabled(env.TRANSLOADIT_DEBUG) || parseEnabled(env.CONVEX_TRANSLOADIT_DEBUG));
+ const namespace = options.namespace ?? 'convex';
const prefix = `[transloadit:${namespace}]`;
const sink: ConsoleSink = options.sink ?? console;
const clock = options.clock ?? (() => new Date());
const emit = (
- level: "log" | "info" | "warn" | "error",
+ level: 'log' | 'info' | 'warn' | 'error',
message: string,
meta?: Record,
) => {
@@ -67,11 +64,11 @@ export const createDebugLogger = (
const logger: DebugLogger = {
enabled,
- log: (message, meta) => emit("log", message, meta),
- info: (message, meta) => emit("info", message, meta),
- warn: (message, meta) => emit("warn", message, meta),
- error: (message, meta) => emit("error", message, meta),
- event: (name, meta) => emit("info", `event:${name}`, meta),
+ log: (message, meta) => emit('log', message, meta),
+ info: (message, meta) => emit('info', message, meta),
+ warn: (message, meta) => emit('warn', message, meta),
+ error: (message, meta) => emit('error', message, meta),
+ event: (name, meta) => emit('info', `event:${name}`, meta),
child: (childNamespace) =>
createDebugLogger({
...options,
diff --git a/src/shared/assemblyUrls.test.ts b/src/shared/assemblyUrls.test.ts
index cd3be9e..ff21d9c 100644
--- a/src/shared/assemblyUrls.test.ts
+++ b/src/shared/assemblyUrls.test.ts
@@ -1,4 +1,4 @@
-import { describe, expect, it } from "vitest";
+import { describe, expect, it } from 'vitest';
import {
ASSEMBLY_STATUS_COMPLETED,
ASSEMBLY_STATUS_UPLOADING,
@@ -9,63 +9,59 @@ import {
parseAssemblyResults,
parseAssemblyStatus,
parseAssemblyUrls,
-} from "./assemblyUrls.ts";
+} from './assemblyUrls.ts';
-describe("assembly helpers", () => {
- it("parses tus and assembly URLs with fallbacks", () => {
+describe('assembly helpers', () => {
+ it('parses tus and assembly URLs with fallbacks', () => {
const parsed = parseAssemblyUrls({
- tus_url: "https://tus.transloadit.com",
- assembly_ssl_url: "https://ssl.transloadit.com/assembly",
- assembly_url: "https://transloadit.com/assembly",
+ tus_url: 'https://tus.transloadit.com',
+ assembly_ssl_url: 'https://ssl.transloadit.com/assembly',
+ assembly_url: 'https://transloadit.com/assembly',
});
expect(parsed).toEqual({
- tusUrl: "https://tus.transloadit.com",
- assemblyUrl: "https://ssl.transloadit.com/assembly",
+ tusUrl: 'https://tus.transloadit.com',
+ assemblyUrl: 'https://ssl.transloadit.com/assembly',
});
const fallback = parseAssemblyUrls({
- tusUrl: "https://tus.example.com",
- assemblyUrl: "https://assembly.example.com",
+ tusUrl: 'https://tus.example.com',
+ assemblyUrl: 'https://assembly.example.com',
});
expect(fallback).toEqual({
- tusUrl: "https://tus.example.com",
- assemblyUrl: "https://assembly.example.com",
+ tusUrl: 'https://tus.example.com',
+ assemblyUrl: 'https://assembly.example.com',
});
});
- it("parses assembly status, fields, and results safely", () => {
+ it('parses assembly status, fields, and results safely', () => {
const status = {
- ok: "ASSEMBLY_COMPLETED",
- fields: { album: "wedding-gallery" },
+ ok: 'ASSEMBLY_COMPLETED',
+ fields: { album: 'wedding-gallery' },
results: {
images_output: [
{
- id: "result-1",
- ssl_url: "https://cdn.example.com/image.jpg",
+ id: 'result-1',
+ ssl_url: 'https://cdn.example.com/image.jpg',
},
],
},
};
- expect(parseAssemblyStatus(status)?.ok).toBe("ASSEMBLY_COMPLETED");
- expect(parseAssemblyFields(status)).toEqual({ album: "wedding-gallery" });
- expect(Object.keys(parseAssemblyResults(status))).toEqual([
- "images_output",
- ]);
+ expect(parseAssemblyStatus(status)?.ok).toBe('ASSEMBLY_COMPLETED');
+ expect(parseAssemblyFields(status)).toEqual({ album: 'wedding-gallery' });
+ expect(Object.keys(parseAssemblyResults(status))).toEqual(['images_output']);
- expect(parseAssemblyStatus("nope")).toBeNull();
- expect(parseAssemblyFields("nope")).toEqual({});
- expect(parseAssemblyResults("nope")).toEqual({});
+ expect(parseAssemblyStatus('nope')).toBeNull();
+ expect(parseAssemblyFields('nope')).toEqual({});
+ expect(parseAssemblyResults('nope')).toEqual({});
});
- it("exposes canonical status helpers", () => {
+ it('exposes canonical status helpers', () => {
expect(isAssemblyCompletedStatus(ASSEMBLY_STATUS_COMPLETED)).toBe(true);
expect(isAssemblyCompletedStatus(ASSEMBLY_STATUS_UPLOADING)).toBe(false);
expect(isAssemblyUploadingStatus(ASSEMBLY_STATUS_UPLOADING)).toBe(true);
- expect(getAssemblyStage({ ok: ASSEMBLY_STATUS_COMPLETED })).toBe(
- "complete",
- );
+ expect(getAssemblyStage({ ok: ASSEMBLY_STATUS_COMPLETED })).toBe('complete');
});
});
diff --git a/src/shared/assemblyUrls.ts b/src/shared/assemblyUrls.ts
index 465638e..3415375 100644
--- a/src/shared/assemblyUrls.ts
+++ b/src/shared/assemblyUrls.ts
@@ -12,7 +12,7 @@ import {
type AssemblyStage as ZodAssemblyStage,
type AssemblyUrls as ZodAssemblyUrls,
type NormalizedAssemblyUrls as ZodNormalizedAssemblyUrls,
-} from "@transloadit/zod/v3";
+} from '@transloadit/zod/v3';
export type AssemblyUrls = ZodAssemblyUrls;
export type NormalizedAssemblyUrls = ZodNormalizedAssemblyUrls;
@@ -29,11 +29,9 @@ export {
};
const isRecord = (value: unknown): value is Record =>
- value !== null && typeof value === "object" && !Array.isArray(value);
+ value !== null && typeof value === 'object' && !Array.isArray(value);
-export const parseAssemblyStatus = (
- data: unknown,
-): TransloaditAssembly | null => {
+export const parseAssemblyStatus = (data: unknown): TransloaditAssembly | null => {
const parsed = assemblyStatusSchema.safeParse(data);
return parsed.success ? parsed.data : null;
};
diff --git a/src/shared/errors.ts b/src/shared/errors.ts
index 9ce4708..0c16c84 100644
--- a/src/shared/errors.ts
+++ b/src/shared/errors.ts
@@ -1,18 +1,18 @@
export type TransloaditErrorContext =
- | "createAssembly"
- | "upload"
- | "polling"
- | "status"
- | "webhook"
- | "payload"
- | "config";
+ | 'createAssembly'
+ | 'upload'
+ | 'polling'
+ | 'status'
+ | 'webhook'
+ | 'payload'
+ | 'config';
export class TransloaditError extends Error {
readonly context: TransloaditErrorContext;
constructor(context: TransloaditErrorContext, message: string) {
super(`Transloadit ${context}: ${message}`);
- this.name = "TransloaditError";
+ this.name = 'TransloaditError';
this.context = context;
}
}
diff --git a/src/shared/pollAssembly.ts b/src/shared/pollAssembly.ts
index e18c42d..38b9b5a 100644
--- a/src/shared/pollAssembly.ts
+++ b/src/shared/pollAssembly.ts
@@ -1,4 +1,4 @@
-import { transloaditError } from "./errors.ts";
+import { transloaditError } from './errors.ts';
export type PollAssemblyOptions = {
intervalMs: number;
@@ -12,9 +12,7 @@ export type PollAssemblyController = {
stop: () => void;
};
-export const pollAssembly = (
- options: PollAssemblyOptions,
-): PollAssemblyController => {
+export const pollAssembly = (options: PollAssemblyOptions): PollAssemblyController => {
const intervalMs = Math.max(0, options.intervalMs);
let cancelled = false;
let intervalId: ReturnType | null = null;
@@ -47,9 +45,7 @@ export const pollAssembly = (
await options.refresh();
} catch (error) {
const resolved =
- error instanceof Error
- ? error
- : transloaditError("polling", "Refresh failed");
+ error instanceof Error ? error : transloaditError('polling', 'Refresh failed');
options.onError?.(resolved);
} finally {
inFlight = false;
diff --git a/src/shared/resultTypes.ts b/src/shared/resultTypes.ts
index db369ff..1259b56 100644
--- a/src/shared/resultTypes.ts
+++ b/src/shared/resultTypes.ts
@@ -1,4 +1,4 @@
-import type { AssemblyStatusResult } from "@transloadit/zod/v3/assemblyStatus";
+import type { AssemblyStatusResult } from '@transloadit/zod/v3/assemblyStatus';
export type TransloaditResult = AssemblyStatusResult;
@@ -33,12 +33,11 @@ export type StoreResult = AssemblyStatusResult & {
};
export type ResultByRobot = {
- "/image/resize": ImageResizeResult;
- "/video/encode": VideoEncodeResult;
- "/video/thumbs": VideoThumbsResult;
- "/r2/store": StoreResult;
- "/s3/store": StoreResult;
+ '/image/resize': ImageResizeResult;
+ '/video/encode': VideoEncodeResult;
+ '/video/thumbs': VideoThumbsResult;
+ '/r2/store': StoreResult;
+ '/s3/store': StoreResult;
};
-export type ResultForRobot =
- ResultByRobot[Robot];
+export type ResultForRobot = ResultByRobot[Robot];
diff --git a/src/shared/resultUtils.test.ts b/src/shared/resultUtils.test.ts
index 465d2bc..d967e4f 100644
--- a/src/shared/resultUtils.test.ts
+++ b/src/shared/resultUtils.test.ts
@@ -1,29 +1,29 @@
-import { describe, expect, it } from "vitest";
-import { getResultOriginalKey, getResultUrl } from "./resultUtils.ts";
+import { describe, expect, it } from 'vitest';
+import { getResultOriginalKey, getResultUrl } from './resultUtils.ts';
-describe("result utils", () => {
- it("extracts result URLs with common fallbacks", () => {
- expect(getResultUrl({ ssl_url: "https://cdn.example.com/file.jpg" })).toBe(
- "https://cdn.example.com/file.jpg",
+describe('result utils', () => {
+ it('extracts result URLs with common fallbacks', () => {
+ expect(getResultUrl({ ssl_url: 'https://cdn.example.com/file.jpg' })).toBe(
+ 'https://cdn.example.com/file.jpg',
);
expect(
getResultUrl({
- meta: { url: "https://cdn.example.com/meta.jpg" },
+ meta: { url: 'https://cdn.example.com/meta.jpg' },
}),
- ).toBe("https://cdn.example.com/meta.jpg");
+ ).toBe('https://cdn.example.com/meta.jpg');
});
- it("derives original keys from raw metadata", () => {
+ it('derives original keys from raw metadata', () => {
expect(
getResultOriginalKey({
- raw: { original_id: "orig_1" },
+ raw: { original_id: 'orig_1' },
}),
- ).toBe("orig_1");
+ ).toBe('orig_1');
expect(
getResultOriginalKey({
- raw: { original_basename: "photo.jpg" },
+ raw: { original_basename: 'photo.jpg' },
}),
- ).toBe("photo.jpg");
- expect(getResultOriginalKey({ name: "fallback.jpg" })).toBe("fallback.jpg");
+ ).toBe('photo.jpg');
+ expect(getResultOriginalKey({ name: 'fallback.jpg' })).toBe('fallback.jpg');
});
});
diff --git a/src/shared/resultUtils.ts b/src/shared/resultUtils.ts
index 0d32da1..19d1734 100644
--- a/src/shared/resultUtils.ts
+++ b/src/shared/resultUtils.ts
@@ -1,4 +1,4 @@
-import type { TransloaditResult } from "./resultTypes.ts";
+import type { TransloaditResult } from './resultTypes.ts';
const extractUrlFromContainer = (container: Record) => {
const candidates = [
@@ -15,7 +15,7 @@ const extractUrlFromContainer = (container: Record) => {
container.signedUrl,
];
for (const candidate of candidates) {
- if (typeof candidate === "string" && candidate.length > 0) {
+ if (typeof candidate === 'string' && candidate.length > 0) {
return candidate;
}
}
@@ -23,7 +23,7 @@ const extractUrlFromContainer = (container: Record) => {
};
const extractNestedUrl = (value: unknown) => {
- if (!value || typeof value !== "object" || Array.isArray(value)) {
+ if (!value || typeof value !== 'object' || Array.isArray(value)) {
return undefined;
}
return extractUrlFromContainer(value as Record);
@@ -33,7 +33,7 @@ export const getResultUrl = (result: TransloaditResult) => {
const direct = extractUrlFromContainer(result as Record);
if (direct) return direct;
- const nestedKeys = ["meta", "metadata", "result", "results", "file", "data"];
+ const nestedKeys = ['meta', 'metadata', 'result', 'results', 'file', 'data'];
for (const key of nestedKeys) {
const nested = extractNestedUrl((result as Record)[key]);
if (nested) return nested;
@@ -52,14 +52,14 @@ export const getResultUrl = (result: TransloaditResult) => {
export const getResultOriginalKey = (result: TransloaditResult) => {
const raw = (result as TransloaditResult & { raw?: unknown }).raw;
- if (raw && typeof raw === "object" && !Array.isArray(raw)) {
+ if (raw && typeof raw === 'object' && !Array.isArray(raw)) {
const rawRecord = raw as Record;
const originalId = rawRecord.original_id;
- if (typeof originalId === "string" && originalId.length > 0) {
+ if (typeof originalId === 'string' && originalId.length > 0) {
return originalId;
}
const originalBase = rawRecord.original_basename;
- if (typeof originalBase === "string" && originalBase.length > 0) {
+ if (typeof originalBase === 'string' && originalBase.length > 0) {
return originalBase;
}
}
diff --git a/src/shared/schemas.ts b/src/shared/schemas.ts
index 148ea23..e13d40e 100644
--- a/src/shared/schemas.ts
+++ b/src/shared/schemas.ts
@@ -1,5 +1,5 @@
-import type { AssemblyInstructionsInput } from "@transloadit/zod/v3/template";
-import { type Infer, v } from "convex/values";
+import type { AssemblyInstructionsInput } from '@transloadit/zod/v3/template';
+import { type Infer, v } from 'convex/values';
export const vAssemblyFields = {
assemblyId: v.string(),
@@ -34,7 +34,7 @@ export const vAssemblyResultFields = {
};
export const vAssembly = v.object({
- _id: v.id("assemblies"),
+ _id: v.id('assemblies'),
_creationTime: v.number(),
...vAssemblyFields,
});
@@ -50,7 +50,7 @@ export const vAssemblyResponse = v.object({
export type AssemblyResponse = Infer;
export const vAssemblyResult = v.object({
- _id: v.id("results"),
+ _id: v.id('results'),
_creationTime: v.number(),
...vAssemblyResultFields,
});
@@ -85,12 +85,9 @@ export const vAssemblyBaseArgs = {
export const vCreateAssemblyArgs = v.object(vAssemblyBaseArgs);
-export type CreateAssemblyArgs = Omit<
- Infer,
- "steps" | "fields"
-> & {
- steps?: AssemblyInstructionsInput["steps"];
- fields?: AssemblyInstructionsInput["fields"];
+export type CreateAssemblyArgs = Omit, 'steps' | 'fields'> & {
+ steps?: AssemblyInstructionsInput['steps'];
+ fields?: AssemblyInstructionsInput['fields'];
};
export const vCreateAssemblyReturn = v.object({
@@ -246,12 +243,9 @@ export const vBuildParamsOptions = v.object({
additionalParams: v.optional(v.record(v.string(), v.any())),
});
-export type BuildParamsOptions = Omit<
- Infer,
- "steps" | "fields"
-> & {
- steps?: AssemblyInstructionsInput["steps"];
- fields?: AssemblyInstructionsInput["fields"];
+export type BuildParamsOptions = Omit, 'steps' | 'fields'> & {
+ steps?: AssemblyInstructionsInput['steps'];
+ fields?: AssemblyInstructionsInput['fields'];
};
export const vBuildParamsResult = v.object({
diff --git a/src/test/index.ts b/src/test/index.ts
index 5b62e7b..003598c 100644
--- a/src/test/index.ts
+++ b/src/test/index.ts
@@ -1,9 +1,9 @@
///
-import { convexTest } from "convex-test";
-import schema from "../component/schema.ts";
+import { convexTest } from 'convex-test';
+import schema from '../component/schema.ts';
-export const modules = import.meta.glob("../component/**/*.*s");
+export const modules = import.meta.glob('../component/**/*.*s');
export function createTransloaditTest() {
return convexTest(schema, modules);
diff --git a/src/test/nodeModules.ts b/src/test/nodeModules.ts
index 2f0775b..e4967bc 100644
--- a/src/test/nodeModules.ts
+++ b/src/test/nodeModules.ts
@@ -1,19 +1,19 @@
-import * as apiModule from "../component/_generated/api.ts";
-import * as componentModule from "../component/_generated/component.ts";
-import * as dataModelModule from "../component/_generated/dataModel.ts";
-import * as serverModule from "../component/_generated/server.ts";
-import * as apiUtilsModule from "../component/apiUtils.ts";
-import * as convexConfigModule from "../component/convex.config.ts";
-import * as libModule from "../component/lib.ts";
-import * as schemaModule from "../component/schema.ts";
+import * as apiModule from '../component/_generated/api.ts';
+import * as componentModule from '../component/_generated/component.ts';
+import * as dataModelModule from '../component/_generated/dataModel.ts';
+import * as serverModule from '../component/_generated/server.ts';
+import * as apiUtilsModule from '../component/apiUtils.ts';
+import * as convexConfigModule from '../component/convex.config.ts';
+import * as libModule from '../component/lib.ts';
+import * as schemaModule from '../component/schema.ts';
export const modules: Record Promise> = {
- "../component/apiUtils.ts": async () => apiUtilsModule,
- "../component/lib.ts": async () => libModule,
- "../component/convex.config.ts": async () => convexConfigModule,
- "../component/schema.ts": async () => schemaModule,
- "../component/_generated/api.ts": async () => apiModule,
- "../component/_generated/component.ts": async () => componentModule,
- "../component/_generated/dataModel.ts": async () => dataModelModule,
- "../component/_generated/server.ts": async () => serverModule,
+ '../component/apiUtils.ts': async () => apiUtilsModule,
+ '../component/lib.ts': async () => libModule,
+ '../component/convex.config.ts': async () => convexConfigModule,
+ '../component/schema.ts': async () => schemaModule,
+ '../component/_generated/api.ts': async () => apiModule,
+ '../component/_generated/component.ts': async () => componentModule,
+ '../component/_generated/dataModel.ts': async () => dataModelModule,
+ '../component/_generated/server.ts': async () => serverModule,
};
diff --git a/test/e2e/support/diagnostics.ts b/test/e2e/support/diagnostics.ts
index d2e2959..34ecdcf 100644
--- a/test/e2e/support/diagnostics.ts
+++ b/test/e2e/support/diagnostics.ts
@@ -1,4 +1,4 @@
-import type { Page } from "@playwright/test";
+import type { Page } from '@playwright/test';
type DiagnosticsOptions = {
shouldTrackRequest: (url: string) => boolean;
@@ -19,43 +19,41 @@ export const attachBrowserDiagnostics = (
const requestFailures: string[] = [];
const requestLog: string[] = [];
- page.on("console", (message) => {
+ page.on('console', (message) => {
consoleMessages.push(`[${message.type()}] ${message.text()}`);
});
- page.on("pageerror", (error) => {
+ page.on('pageerror', (error) => {
consoleMessages.push(`[pageerror] ${error.message}`);
});
- page.on("requestfailed", (request) => {
+ page.on('requestfailed', (request) => {
const url = request.url();
if (shouldTrackRequest(url)) {
- requestFailures.push(`${url} ${request.failure()?.errorText ?? ""}`);
+ requestFailures.push(`${url} ${request.failure()?.errorText ?? ''}`);
}
});
- page.on("request", (request) => {
+ page.on('request', (request) => {
const url = request.url();
if (shouldTrackRequest(url)) {
requestLog.push(`${new Date().toISOString()} ${request.method()} ${url}`);
}
});
- page.on("response", (response) => {
+ page.on('response', (response) => {
const url = response.url();
if (shouldTrackRequest(url)) {
- requestLog.push(
- `${new Date().toISOString()} ${response.status()} ${url}`,
- );
+ requestLog.push(`${new Date().toISOString()} ${response.status()} ${url}`);
}
});
const dump = () => {
if (consoleMessages.length) {
- console.log("Browser console logs:", consoleMessages);
+ console.log('Browser console logs:', consoleMessages);
}
if (requestFailures.length) {
- console.log("Browser request failures:", requestFailures);
+ console.log('Browser request failures:', requestFailures);
}
if (requestLog.length) {
const tail = requestLog.slice(-200);
- console.log("Browser request log (last 200):", tail);
+ console.log('Browser request log (last 200):', tail);
}
};
diff --git a/test/e2e/support/example-app.ts b/test/e2e/support/example-app.ts
index 53caee0..6d6540d 100644
--- a/test/e2e/support/example-app.ts
+++ b/test/e2e/support/example-app.ts
@@ -1,9 +1,9 @@
-import { spawn } from "node:child_process";
-import { once } from "node:events";
-import { createServer } from "node:http";
-import { resolve } from "node:path";
-import { sleep } from "./sleep.js";
-import { startTunnel } from "./tunnel.js";
+import { spawn } from 'node:child_process';
+import { once } from 'node:events';
+import { createServer } from 'node:http';
+import { resolve } from 'node:path';
+import { sleep } from './sleep.js';
+import { startTunnel } from './tunnel.js';
type ExampleAppOptions = {
env: NodeJS.ProcessEnv;
@@ -17,28 +17,24 @@ type ExampleApp = {
const findOpenPort = async () => {
const server = createServer();
- server.listen(0, "127.0.0.1");
- await once(server, "listening");
+ server.listen(0, '127.0.0.1');
+ await once(server, 'listening');
const address = server.address();
- if (!address || typeof address === "string") {
+ if (!address || typeof address === 'string') {
server.close();
- throw new Error("Unable to determine a free port");
+ throw new Error('Unable to determine a free port');
}
const port = address.port;
server.close();
return port;
};
-const waitForReady = async (
- url: string,
- child: ReturnType,
- logs: string[],
-) => {
+const waitForReady = async (url: string, child: ReturnType, logs: string[]) => {
const deadline = Date.now() + 240_000;
const onData = (chunk: Buffer) => {
const text = chunk.toString();
text
- .split("\n")
+ .split('\n')
.filter(Boolean)
.forEach((line) => {
logs.push(line);
@@ -46,14 +42,12 @@ const waitForReady = async (
});
};
- child.stdout?.on("data", onData);
- child.stderr?.on("data", onData);
+ child.stdout?.on('data', onData);
+ child.stderr?.on('data', onData);
while (Date.now() < deadline) {
if (child.exitCode !== null) {
- throw new Error(
- `Next example exited early (${child.exitCode}).\n${logs.join("\n")}`,
- );
+ throw new Error(`Next example exited early (${child.exitCode}).\n${logs.join('\n')}`);
}
try {
const response = await fetch(url);
@@ -64,7 +58,7 @@ const waitForReady = async (
await sleep(500);
}
- throw new Error(`Next example did not start in time.\n${logs.join("\n")}`);
+ throw new Error(`Next example did not start in time.\n${logs.join('\n')}`);
};
const runCommand = async (
@@ -75,72 +69,56 @@ const runCommand = async (
) => {
const child = spawn(command, args, {
env,
- stdio: ["ignore", "pipe", "pipe"],
+ stdio: ['ignore', 'pipe', 'pipe'],
});
const logs: string[] = [];
const onData = (chunk: Buffer) => {
const text = chunk.toString();
text
- .split("\n")
+ .split('\n')
.filter(Boolean)
.forEach((line) => {
logs.push(line);
if (logs.length > 200) logs.shift();
});
};
- child.stdout?.on("data", onData);
- child.stderr?.on("data", onData);
+ child.stdout?.on('data', onData);
+ child.stderr?.on('data', onData);
const exitCode: number = await new Promise((resolveExit) => {
- child.once("exit", (code) => resolveExit(code ?? 1));
+ child.once('exit', (code) => resolveExit(code ?? 1));
});
if (exitCode !== 0) {
- throw new Error(`${label} failed (${exitCode}).\n${logs.join("\n")}`);
+ throw new Error(`${label} failed (${exitCode}).\n${logs.join('\n')}`);
}
};
-export const startExampleApp = async ({
- env,
-}: ExampleAppOptions): Promise => {
+export const startExampleApp = async ({ env }: ExampleAppOptions): Promise => {
const port = await findOpenPort();
const tunnel = await startTunnel(port);
- const notifyUrl =
- tunnel.info.notifyUrl ?? `${tunnel.info.url}/transloadit/webhook`;
+ const notifyUrl = tunnel.info.notifyUrl ?? `${tunnel.info.url}/transloadit/webhook`;
const nextEnv = {
...process.env,
- NEXT_TELEMETRY_DISABLED: "1",
+ NEXT_TELEMETRY_DISABLED: '1',
TRANSLOADIT_NOTIFY_URL: notifyUrl,
...env,
};
- if (env.E2E_MODE === "local") {
- nextEnv.NEXT_PUBLIC_CONVEX_URL = "";
- nextEnv.CONVEX_URL = "";
+ if (env.E2E_MODE === 'local') {
+ nextEnv.NEXT_PUBLIC_CONVEX_URL = '';
+ nextEnv.CONVEX_URL = '';
}
- await runCommand("yarn", ["build"], nextEnv, "Package build");
+ await runCommand('yarn', ['build'], nextEnv, 'Package build');
- const nextCli = resolve("node_modules/next/dist/bin/next");
- await runCommand(
- "node",
- [nextCli, "build", "example", "--webpack"],
- nextEnv,
- "Next build",
- );
+ const nextCli = resolve('node_modules/next/dist/bin/next');
+ await runCommand('node', [nextCli, 'build', 'example', '--webpack'], nextEnv, 'Next build');
const child = spawn(
- "node",
- [
- nextCli,
- "start",
- "example",
- "--hostname",
- "127.0.0.1",
- "--port",
- `${port}`,
- ],
+ 'node',
+ [nextCli, 'start', 'example', '--hostname', '127.0.0.1', '--port', `${port}`],
{
env: nextEnv,
- stdio: ["ignore", "pipe", "pipe"],
+ stdio: ['ignore', 'pipe', 'pipe'],
},
);
@@ -153,10 +131,10 @@ export const startExampleApp = async ({
child.kill();
await new Promise((resolvePromise) => {
const fallback = setTimeout(() => {
- child.kill("SIGKILL");
+ child.kill('SIGKILL');
resolvePromise(null);
}, 3000);
- child.once("exit", () => {
+ child.once('exit', () => {
clearTimeout(fallback);
resolvePromise(null);
});
@@ -167,10 +145,10 @@ export const startExampleApp = async ({
tunnel.process.kill();
await new Promise((resolvePromise) => {
const fallback = setTimeout(() => {
- tunnel.process.kill("SIGKILL");
+ tunnel.process.kill('SIGKILL');
resolvePromise(null);
}, 3000);
- tunnel.process.once("exit", () => {
+ tunnel.process.once('exit', () => {
clearTimeout(fallback);
resolvePromise(null);
});
diff --git a/test/e2e/support/runtime.ts b/test/e2e/support/runtime.ts
index b0f1f52..cc36deb 100644
--- a/test/e2e/support/runtime.ts
+++ b/test/e2e/support/runtime.ts
@@ -1,4 +1,4 @@
-type Mode = "local" | "cloud";
+type Mode = 'local' | 'cloud';
export type RuntimeConfig = {
authKey: string;
@@ -10,13 +10,12 @@ export type RuntimeConfig = {
};
export const getRuntimeConfig = (): RuntimeConfig => {
- const authKey = process.env.TRANSLOADIT_KEY ?? "";
- const authSecret = process.env.TRANSLOADIT_SECRET ?? "";
- const modeEnv = process.env.E2E_MODE ?? "local";
- const mode: Mode = modeEnv === "cloud" ? "cloud" : "local";
- const useRemote = mode === "cloud";
- const remoteAppUrl =
- process.env.E2E_REMOTE_APP_URL ?? process.env.E2E_APP_URL ?? "";
+ const authKey = process.env.TRANSLOADIT_KEY ?? '';
+ const authSecret = process.env.TRANSLOADIT_SECRET ?? '';
+ const modeEnv = process.env.E2E_MODE ?? 'local';
+ const mode: Mode = modeEnv === 'cloud' ? 'cloud' : 'local';
+ const useRemote = mode === 'cloud';
+ const remoteAppUrl = process.env.E2E_REMOTE_APP_URL ?? process.env.E2E_APP_URL ?? '';
const shouldRun = useRemote || Boolean(authKey && authSecret);
return {
authKey,
diff --git a/test/e2e/support/tunnel.ts b/test/e2e/support/tunnel.ts
index 7010935..84c4128 100644
--- a/test/e2e/support/tunnel.ts
+++ b/test/e2e/support/tunnel.ts
@@ -1,5 +1,5 @@
-import { spawn } from "node:child_process";
-import { resolve } from "node:path";
+import { spawn } from 'node:child_process';
+import { resolve } from 'node:path';
export type TunnelInfo = {
url: string;
@@ -8,13 +8,13 @@ export type TunnelInfo = {
const startTunnelOnce = (port: number) => {
const process = spawn(
- "node",
- [resolve("scripts/start-webhook-tunnel.ts"), "--json", "--port", `${port}`],
- { stdio: ["ignore", "pipe", "pipe"] },
+ 'node',
+ [resolve('scripts/start-webhook-tunnel.ts'), '--json', '--port', `${port}`],
+ { stdio: ['ignore', 'pipe', 'pipe'] },
);
const info = new Promise((resolvePromise, reject) => {
- let buffer = "";
+ let buffer = '';
const logs: string[] = [];
let timeoutId: ReturnType | undefined;
@@ -24,11 +24,11 @@ const startTunnelOnce = (port: number) => {
const leftover = buffer.trim();
if (leftover) {
logs.push(leftover);
- buffer = "";
+ buffer = '';
}
}
if (error) {
- const details = logs.length ? `\n${logs.join("\n")}` : "";
+ const details = logs.length ? `\n${logs.join('\n')}` : '';
reject(new Error(`${error.message}${details}`));
return;
}
@@ -36,12 +36,12 @@ const startTunnelOnce = (port: number) => {
};
timeoutId = setTimeout(() => {
- finish(new Error("Timed out waiting for webhook tunnel URL"));
+ finish(new Error('Timed out waiting for webhook tunnel URL'));
}, 90_000);
const onData = (chunk: Buffer) => {
buffer += chunk.toString();
- let newlineIndex = buffer.indexOf("\n");
+ let newlineIndex = buffer.indexOf('\n');
while (newlineIndex !== -1) {
const line = buffer.slice(0, newlineIndex).trim();
buffer = buffer.slice(newlineIndex + 1);
@@ -53,14 +53,14 @@ const startTunnelOnce = (port: number) => {
logs.push(line);
}
}
- newlineIndex = buffer.indexOf("\n");
+ newlineIndex = buffer.indexOf('\n');
}
};
- process.stdout?.on("data", onData);
- process.stderr?.on("data", onData);
- process.on("error", (error) => finish(error));
- process.on("exit", (code) => {
+ process.stdout?.on('data', onData);
+ process.stderr?.on('data', onData);
+ process.on('error', (error) => finish(error));
+ process.on('exit', (code) => {
if (code && code !== 0) {
finish(new Error(`Webhook tunnel exited with code ${code}`));
}
@@ -82,5 +82,5 @@ export async function startTunnel(port: number) {
lastError = error instanceof Error ? error : new Error(String(error));
}
}
- throw lastError ?? new Error("Failed to start webhook tunnel");
+ throw lastError ?? new Error('Failed to start webhook tunnel');
}
diff --git a/test/e2e/upload.e2e.test.ts b/test/e2e/upload.e2e.test.ts
index def2af4..0f762c7 100644
--- a/test/e2e/upload.e2e.test.ts
+++ b/test/e2e/upload.e2e.test.ts
@@ -1,11 +1,11 @@
-import { existsSync } from "node:fs";
-import { join, resolve } from "node:path";
-import { chromium } from "@playwright/test";
-import { afterAll, beforeAll, describe, expect, test } from "vitest";
-import { attachBrowserDiagnostics } from "./support/diagnostics.js";
-import { startExampleApp } from "./support/example-app.js";
-import { runtime } from "./support/runtime.js";
-import { sleep } from "./support/sleep.js";
+import { existsSync } from 'node:fs';
+import { join, resolve } from 'node:path';
+import { chromium } from '@playwright/test';
+import { afterAll, beforeAll, describe, expect, test } from 'vitest';
+import { attachBrowserDiagnostics } from './support/diagnostics.js';
+import { startExampleApp } from './support/example-app.js';
+import { runtime } from './support/runtime.js';
+import { sleep } from './support/sleep.js';
const { authKey, authSecret, useRemote, remoteAppUrl, shouldRun } = runtime;
@@ -18,35 +18,33 @@ type DebugUppy = {
};
};
-const fixturesDir = resolve("test/e2e/fixtures");
+const fixturesDir = resolve('test/e2e/fixtures');
const describeE2e = shouldRun ? describe : describe.skip;
-describeE2e("e2e upload flow", () => {
+describeE2e('e2e upload flow', () => {
const timeouts = {
outcome: 180_000,
results: 180_000,
refresh: 240_000,
};
- const vercelBypassToken = process.env.VERCEL_PROTECTION_BYPASS ?? "";
- const remoteConvexUrl = process.env.E2E_REMOTE_CONVEX_URL ?? "";
- let serverUrl = "";
+ const vercelBypassToken = process.env.VERCEL_PROTECTION_BYPASS ?? '';
+ const remoteConvexUrl = process.env.E2E_REMOTE_CONVEX_URL ?? '';
+ let serverUrl = '';
let app: Awaited> | null = null;
beforeAll(async () => {
if (useRemote) {
if (!remoteAppUrl) {
- throw new Error("Missing E2E_REMOTE_APP_URL for cloud e2e run");
+ throw new Error('Missing E2E_REMOTE_APP_URL for cloud e2e run');
}
if (!vercelBypassToken) {
- throw new Error(
- "Missing VERCEL_PROTECTION_BYPASS for cloud preview access",
- );
+ throw new Error('Missing VERCEL_PROTECTION_BYPASS for cloud preview access');
}
- const parsed = new URL(remoteAppUrl.replace(/\/$/, ""));
- parsed.searchParams.set("__vercel_protection_bypass", vercelBypassToken);
+ const parsed = new URL(remoteAppUrl.replace(/\/$/, ''));
+ parsed.searchParams.set('__vercel_protection_bypass', vercelBypassToken);
if (remoteConvexUrl) {
- parsed.searchParams.set("convexUrl", remoteConvexUrl);
+ parsed.searchParams.set('convexUrl', remoteConvexUrl);
}
serverUrl = parsed.toString();
return;
@@ -54,7 +52,7 @@ describeE2e("e2e upload flow", () => {
app = await startExampleApp({
env: {
- E2E_MODE: "local",
+ E2E_MODE: 'local',
TRANSLOADIT_KEY: authKey,
TRANSLOADIT_SECRET: authSecret,
TRANSLOADIT_R2_CREDENTIALS: process.env.TRANSLOADIT_R2_CREDENTIALS,
@@ -76,21 +74,21 @@ describeE2e("e2e upload flow", () => {
}
});
- test("uploads wedding photos and videos", async () => {
+ test('uploads wedding photos and videos', async () => {
const browser = await chromium.launch();
const page = await browser.newPage();
const appOrigin = useRemote ? new URL(serverUrl).origin : serverUrl;
const shouldTrackRequest = (url: string) =>
- url.includes("transloadit") ||
- url.includes("resumable") ||
- url.includes("convex.cloud") ||
- url.includes("convex.site") ||
+ url.includes('transloadit') ||
+ url.includes('resumable') ||
+ url.includes('convex.cloud') ||
+ url.includes('convex.site') ||
(appOrigin ? url.startsWith(appOrigin) : false);
const diagnostics = attachBrowserDiagnostics(page, { shouldTrackRequest });
try {
if (useRemote && vercelBypassToken) {
- await page.route("**/*", async (route) => {
+ await page.route('**/*', async (route) => {
const url = route.request().url();
if (!url.startsWith(appOrigin)) {
await route.continue();
@@ -98,15 +96,15 @@ describeE2e("e2e upload flow", () => {
}
const headers = {
...route.request().headers(),
- "x-vercel-protection-bypass": vercelBypassToken,
- "x-vercel-set-bypass-cookie": "true",
+ 'x-vercel-protection-bypass': vercelBypassToken,
+ 'x-vercel-set-bypass-cookie': 'true',
};
await route.continue({ headers });
});
}
const navigation = await page.goto(serverUrl, {
- waitUntil: "domcontentloaded",
+ waitUntil: 'domcontentloaded',
});
if (useRemote) {
@@ -118,25 +116,25 @@ describeE2e("e2e upload flow", () => {
const title = await page.title().catch(() => null);
const status = navigation?.status() ?? null;
const authState = await page
- .getAttribute("main.page", "data-auth-state")
+ .getAttribute('main.page', 'data-auth-state')
.catch(() => null);
const headingText = await page
- .locator("h1, h2")
+ .locator('h1, h2')
.first()
.textContent()
.catch(() => null);
const headline = await page
- .locator(".headline")
+ .locator('.headline')
.first()
.textContent()
.catch(() => null);
const bodyHtml = await page
- .evaluate(() => document.body?.outerHTML ?? "")
- .catch(() => "");
+ .evaluate(() => document.body?.outerHTML ?? '')
+ .catch(() => '');
const bodyHtmlSnippet = bodyHtml.slice(0, 1000);
const bodyTextSnippet = await page
- .evaluate(() => document.body?.innerText?.slice(0, 500) ?? "")
- .catch(() => "");
+ .evaluate(() => document.body?.innerText?.slice(0, 500) ?? '')
+ .catch(() => '');
const authStorage = await page
.evaluate(() => {
const entries: Array<{ key: string; value: string | null }> = [];
@@ -144,17 +142,17 @@ describeE2e("e2e upload flow", () => {
for (let index = 0; index < localStorage.length; index += 1) {
const key = localStorage.key(index);
if (!key) continue;
- if (!key.includes("__convexAuth")) continue;
+ if (!key.includes('__convexAuth')) continue;
entries.push({ key, value: localStorage.getItem(key) });
}
} catch {
- return { error: "localStorage unavailable" };
+ return { error: 'localStorage unavailable' };
}
return entries;
})
- .catch(() => "localStorage read failed");
- const hasVercelProtection = bodyHtml.includes("Vercel");
- console.log("Cloud auth wait failed.", {
+ .catch(() => 'localStorage read failed');
+ const hasVercelProtection = bodyHtml.includes('Vercel');
+ console.log('Cloud auth wait failed.', {
title,
status,
authState,
@@ -170,27 +168,27 @@ describeE2e("e2e upload flow", () => {
}
}
- const imagePath = join(fixturesDir, "wedding-photo-01.png");
- const imagePathAlt = join(fixturesDir, "wedding-photo-02.png");
- const videoPath = join(fixturesDir, "wedding-video-01.mp4");
+ const imagePath = join(fixturesDir, 'wedding-photo-01.png');
+ const imagePathAlt = join(fixturesDir, 'wedding-photo-02.png');
+ const videoPath = join(fixturesDir, 'wedding-video-01.mp4');
if (!existsSync(imagePath) || !existsSync(imagePathAlt)) {
- throw new Error("Missing wedding photo fixtures for e2e run");
+ throw new Error('Missing wedding photo fixtures for e2e run');
}
if (!existsSync(videoPath)) {
- throw new Error("Missing wedding video fixture for e2e run");
+ throw new Error('Missing wedding video fixture for e2e run');
}
await page.waitForSelector('[data-testid="uppy-dashboard"]', {
- state: "attached",
+ state: 'attached',
});
const fileInput = page.locator(
'[data-testid="uppy-dashboard"] input.uppy-Dashboard-input[name="files[]"]:not([webkitdirectory])',
);
- await fileInput.waitFor({ state: "attached" });
+ await fileInput.waitFor({ state: 'attached' });
await fileInput.setInputFiles([imagePath, imagePathAlt, videoPath]);
await page.waitForFunction(
- () => document.querySelectorAll(".uppy-Dashboard-Item").length >= 2,
+ () => document.querySelectorAll('.uppy-Dashboard-Item').length >= 2,
undefined,
{ timeout: 20_000 },
);
@@ -208,12 +206,12 @@ describeE2e("e2e upload flow", () => {
while (Date.now() < deadline) {
const assemblyText = await readText('[data-testid="assembly-id"]');
if (assemblyText) {
- return { type: "assembly", text: assemblyText };
+ return { type: 'assembly', text: assemblyText };
}
const uploadError = await readText('[data-testid="upload-error"]');
if (uploadError) {
- return { type: "error", text: uploadError };
+ return { type: 'error', text: uploadError };
}
await page.waitForTimeout(1000);
@@ -224,35 +222,34 @@ describeE2e("e2e upload flow", () => {
const outcome = await waitForOutcome();
if (!outcome) {
- throw new Error("Timed out waiting for upload outcome");
+ throw new Error('Timed out waiting for upload outcome');
}
- if (outcome.type !== "assembly") {
+ if (outcome.type !== 'assembly') {
throw new Error(`Upload failed: ${outcome.text}`);
}
const assemblyText = outcome.text;
- const assemblyId = assemblyText?.replace("ID:", "").trim() ?? "";
- expect(assemblyId).not.toBe("");
+ const assemblyId = assemblyText?.replace('ID:', '').trim() ?? '';
+ expect(assemblyId).not.toBe('');
const readGalleryReady = async (targetAssemblyId: string) =>
page.evaluate((assemblyId) => {
const cards = Array.from(
- document.querySelectorAll("[data-assembly-id]"),
+ document.querySelectorAll('[data-assembly-id]'),
).filter((card) => card.dataset.assemblyId === assemblyId);
const imgs = cards.flatMap((card) =>
- Array.from(card.querySelectorAll("img")),
+ Array.from(card.querySelectorAll('img')),
);
const vids = cards.flatMap((card) =>
- Array.from(card.querySelectorAll("video")),
+ Array.from(card.querySelectorAll('video')),
);
- const imagesReady =
- imgs.length > 0 && imgs.every((img) => img.complete);
+ const imagesReady = imgs.length > 0 && imgs.every((img) => img.complete);
const videosReady =
vids.length > 0 &&
vids.every((video) => {
- const src = video.getAttribute("src");
+ const src = video.getAttribute('src');
if (src && src.length > 0) return true;
- const poster = video.getAttribute("poster");
+ const poster = video.getAttribute('poster');
return Boolean(poster && poster.length > 0);
});
return {
@@ -269,11 +266,11 @@ describeE2e("e2e upload flow", () => {
const text = await readText('[data-testid="assembly-status"]');
if (text) {
lastStatus = text;
- if (text.includes("ASSEMBLY_COMPLETED")) return;
+ if (text.includes('ASSEMBLY_COMPLETED')) return;
if (
- text.includes("ASSEMBLY_FAILED") ||
- text.includes("ASSEMBLY_CANCELED") ||
- text.includes("ASSEMBLY_ABORTED")
+ text.includes('ASSEMBLY_FAILED') ||
+ text.includes('ASSEMBLY_CANCELED') ||
+ text.includes('ASSEMBLY_ABORTED')
) {
throw new Error(`Assembly ended unsuccessfully: ${text}`);
}
@@ -285,7 +282,7 @@ describeE2e("e2e upload flow", () => {
await sleep(2000);
}
throw new Error(
- `Timed out waiting for assembly completion. Last status: ${lastStatus ?? "unknown"}`,
+ `Timed out waiting for assembly completion. Last status: ${lastStatus ?? 'unknown'}`,
);
};
@@ -303,7 +300,7 @@ describeE2e("e2e upload flow", () => {
if (ready.imagesReady && ready.videosReady) return;
await sleep(1000);
}
- throw new Error("Timed out waiting for gallery media to load");
+ throw new Error('Timed out waiting for gallery media to load');
};
await waitForAssemblyMedia(assemblyId);
@@ -316,21 +313,19 @@ describeE2e("e2e upload flow", () => {
const state = uppy.getState?.() ?? {};
return {
fileCount: uppy.getFiles?.().length ?? 0,
- hasTusPlugin: Boolean(uppy.getPlugin?.("Tus")),
- tusEndpoint: uppy.getPlugin?.("Tus")?.opts?.endpoint ?? null,
+ hasTusPlugin: Boolean(uppy.getPlugin?.('Tus')),
+ tusEndpoint: uppy.getPlugin?.('Tus')?.opts?.endpoint ?? null,
uploadState: state.uploads ?? null,
currentUploads: state.currentUploads ?? null,
files: uppy.getFiles?.().map((file) => ({
- id: (file as { id?: string }).id ?? "",
- tusEndpoint:
- (file as { tus?: { endpoint?: string | null } }).tus
- ?.endpoint ?? null,
+ id: (file as { id?: string }).id ?? '',
+ tusEndpoint: (file as { tus?: { endpoint?: string | null } }).tus?.endpoint ?? null,
})),
};
})
.catch(() => null);
if (uppyState) {
- console.log("Uppy state:", uppyState);
+ console.log('Uppy state:', uppyState);
}
throw error;
} finally {
diff --git a/vitest.config.ts b/vitest.config.ts
index 59cdf58..5c2c245 100644
--- a/vitest.config.ts
+++ b/vitest.config.ts
@@ -1,8 +1,8 @@
-import { defineConfig } from "vitest/config";
+import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
- environment: "edge-runtime",
- exclude: ["node_modules/**", "dist/**", "test/e2e/**"],
+ environment: 'edge-runtime',
+ exclude: ['node_modules/**', 'dist/**', 'test/e2e/**'],
},
});
diff --git a/vitest.e2e.config.ts b/vitest.e2e.config.ts
index 2d0eb69..a639160 100644
--- a/vitest.e2e.config.ts
+++ b/vitest.e2e.config.ts
@@ -1,9 +1,9 @@
-import { defineConfig } from "vitest/config";
+import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
- environment: "node",
- include: ["test/e2e/**/*.test.ts"],
+ environment: 'node',
+ include: ['test/e2e/**/*.test.ts'],
testTimeout: 600_000,
hookTimeout: 600_000,
},