Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/check-prisma-migrations.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,4 @@ jobs:
run: pnpm run db:init

- name: Check for differences in Prisma schema and current DB
run: cd apps/backend && pnpm run prisma migrate diff --from-config-datasource --to-schema ./prisma/schema.prisma --exit-code
run: cd apps/backend && pnpm run prisma:dev migrate diff --from-config-datasource --to-schema ./prisma/schema.prisma --exit-code
36 changes: 19 additions & 17 deletions apps/backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,38 +6,40 @@
"scripts": {
"clean": "rimraf src/generated && rimraf .next && rimraf node_modules",
"typecheck": "tsc --noEmit",
"with-env": "dotenv -c development --",
"with-env:prod": "dotenv -c --",
"with-env": "dotenv -c --",
Copy link

Copilot AI Dec 30, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The script commands have been updated from "with-env" to "with-env:dev" or "with-env:prod", but the base "with-env" command now uses "dotenv -c --" without specifying development or production. This changes the default behavior. Ensure this is intentional and that all scripts that relied on the previous "with-env" behavior are updated appropriately.

Suggested change
"with-env": "dotenv -c --",
"with-env": "dotenv -c development --",

Copilot uses AI. Check for mistakes.
"with-env:dev": "dotenv -c development --",
"with-env:prod": "dotenv -c production --",
"dev": "concurrently -n \"dev,codegen,prisma-studio,email-queue\" -k \"next dev --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}02\" \"pnpm run codegen:watch\" \"pnpm run prisma-studio\" \"pnpm run run-email-queue\"",
"build": "pnpm run codegen && next build",
"docker-build": "pnpm run codegen && next build --experimental-build-mode compile",
"build-self-host-migration-script": "tsup --config scripts/db-migrations.tsup.config.ts",
"analyze-bundle": "next experimental-analyze",
"start": "next start --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}02",
"codegen-prisma": "pnpm run prisma generate",
"codegen-prisma:watch": "pnpm run prisma generate --watch",
"codegen-prisma": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate",
"codegen-prisma:watch": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate --watch",
Comment on lines +18 to +19
Copy link

Copilot AI Dec 30, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The placeholder value "placeholder-database-connection-string" might cause confusion or issues if Prisma attempts to use it. Consider using a more explicit placeholder like "postgresql://placeholder" or documenting why this approach is used, especially since this value could appear in error messages.

Suggested change
"codegen-prisma": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate",
"codegen-prisma:watch": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate --watch",
"codegen-prisma": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-postgresql://placeholder}\" pnpm run prisma generate",
"codegen-prisma:watch": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-postgresql://placeholder}\" pnpm run prisma generate --watch",

Copilot uses AI. Check for mistakes.
"codegen-route-info": "pnpm run with-env tsx scripts/generate-route-info.ts",
"codegen-route-info:watch": "pnpm run with-env tsx watch --clear-screen=false scripts/generate-route-info.ts",
"codegen": "pnpm run with-env pnpm run generate-migration-imports && pnpm run with-env bash -c 'if [ \"$STACK_ACCELERATE_ENABLED\" = \"true\" ]; then pnpm run prisma generate --no-engine; else pnpm run codegen-prisma; fi' && pnpm run codegen-route-info",
"codegen:watch": "concurrently -n \"prisma,docs,route-info,migration-imports\" -k \"pnpm run codegen-prisma:watch\" \"pnpm run watch-docs\" \"pnpm run codegen-route-info:watch\" \"pnpm run generate-migration-imports:watch\"",
"codegen": "pnpm run with-env pnpm run generate-migration-imports && pnpm run with-env bash -c 'if [ \"$STACK_ACCELERATE_ENABLED\" = \"true\" ]; then pnpm run prisma generate --no-engine; else pnpm run codegen-prisma; fi' && pnpm run codegen-docs && pnpm run codegen-route-info",
"codegen:watch": "concurrently -n \"prisma,docs,route-info,migration-imports\" -k \"pnpm run codegen-prisma:watch\" \"pnpm run codegen-docs:watch\" \"pnpm run codegen-route-info:watch\" \"pnpm run generate-migration-imports:watch\"",
"psql-inner": "psql $(echo $STACK_DATABASE_CONNECTION_STRING | sed 's/\\?.*$//')",
"psql": "pnpm run with-env pnpm run psql-inner",
"prisma-studio": "pnpm run with-env prisma studio --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}06 --browser none",
"psql": "pnpm run with-env:dev pnpm run psql-inner",
"prisma-studio": "pnpm run with-env:dev prisma studio --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}06 --browser none",
"prisma:dev": "pnpm run with-env:dev prisma",
"prisma": "pnpm run with-env prisma",
"db:migration-gen": "pnpm run with-env tsx scripts/db-migrations.ts generate-migration-file",
"db:reset": "pnpm run with-env tsx scripts/db-migrations.ts reset",
"db:seed": "pnpm run with-env tsx scripts/db-migrations.ts seed",
"db:init": "pnpm run with-env tsx scripts/db-migrations.ts init",
"db:migrate": "pnpm run with-env tsx scripts/db-migrations.ts migrate",
"db:migration-gen": "pnpm run with-env:dev tsx scripts/db-migrations.ts generate-migration-file",
"db:reset": "pnpm run with-env:dev tsx scripts/db-migrations.ts reset",
"db:seed": "pnpm run with-env:dev tsx scripts/db-migrations.ts seed",
"db:init": "pnpm run with-env:dev tsx scripts/db-migrations.ts init",
"db:migrate": "pnpm run with-env:dev tsx scripts/db-migrations.ts migrate",
"generate-migration-imports": "pnpm run with-env tsx scripts/generate-migration-imports.ts",
"generate-migration-imports:watch": "chokidar 'prisma/migrations/**/*.sql' -c 'pnpm run generate-migration-imports'",
"lint": "eslint .",
"watch-docs": "pnpm run with-env tsx watch --exclude '**/node_modules/**' --clear-screen=false scripts/generate-openapi-fumadocs.ts",
"generate-openapi-fumadocs": "pnpm run with-env tsx scripts/generate-openapi-fumadocs.ts",
"codegen-docs": "pnpm run with-env tsx scripts/generate-openapi-fumadocs.ts",
"codegen-docs:watch": "pnpm run with-env tsx watch --exclude '**/node_modules/**' --clear-screen=false scripts/generate-openapi-fumadocs.ts",
"generate-keys": "pnpm run with-env tsx scripts/generate-keys.ts",
"db-seed-script": "pnpm run db:seed",
"verify-data-integrity": "pnpm run with-env tsx scripts/verify-data-integrity.ts",
"run-email-queue": "pnpm run with-env tsx scripts/run-email-queue.ts"
"verify-data-integrity": "pnpm run with-env:dev tsx scripts/verify-data-integrity.ts",
"run-email-queue": "pnpm run with-env:dev tsx scripts/run-email-queue.ts"
},
"prisma": {
"seed": "pnpm run db-seed-script"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
-- CreateIndex
-- Partial index for emails currently being rendered (finishedRenderingAt is NULL)
-- Indexed by startedRenderingAt to efficiently query in-progress rendering jobs
CREATE INDEX "EmailOutbox_rendering_in_progress_idx"
ON "EmailOutbox" ("startedRenderingAt")
WHERE "finishedRenderingAt" IS NULL;

-- CreateIndex
-- Partial index for emails currently being sent (finishedSendingAt is NULL)
-- Indexed by startedSendingAt to efficiently query in-progress sending jobs
CREATE INDEX "EmailOutbox_sending_in_progress_idx"
ON "EmailOutbox" ("startedSendingAt")
WHERE "finishedSendingAt" IS NULL;

-- CreateIndex
-- Index for looking up team members by user and selection status
CREATE INDEX "TeamMember_projectUserId_isSelected_idx"
ON "TeamMember" ("tenancyId", "projectUserId", "isSelected");

-- CreateIndex
-- Index for looking up projects by owner team
CREATE INDEX "Project_ownerTeamId_idx"
ON "Project" ("ownerTeamId");

3 changes: 3 additions & 0 deletions apps/backend/prisma/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ model Project {
provisionedProject ProvisionedProject?
tenancies Tenancy[]
environmentConfigOverrides EnvironmentConfigOverride[]

@@index([ownerTeamId], map: "Project_ownerTeamId_idx")
}

model Tenancy {
Expand Down Expand Up @@ -127,6 +129,7 @@ model TeamMember {

@@id([tenancyId, projectUserId, teamId])
@@unique([tenancyId, projectUserId, isSelected])
@@index([tenancyId, projectUserId, isSelected], map: "TeamMember_projectUserId_isSelected_idx")
}

model ProjectUserDirectPermission {
Expand Down
4 changes: 3 additions & 1 deletion apps/backend/scripts/run-email-queue.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@ async function main() {

const baseUrl = `http://localhost:${getEnvVariable('NEXT_PUBLIC_STACK_PORT_PREFIX', '81')}02`;

// Wait a few seconds to make sure the server is fully started
await wait(5_000);
Comment on lines +11 to +12
Copy link

Copilot AI Dec 30, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The wait time of 5 seconds is hardcoded. This magic number should be extracted to a named constant with a descriptive name like "SERVER_STARTUP_DELAY_MS" to improve code readability and make it easier to adjust if needed.

Copilot uses AI. Check for mistakes.

const run = () => runAsynchronously(async () => {
// If a the server is restarted, then the existing email queue step may be cancelled prematurely. That's why we
// have an extra loop here to detect and restart the email queue step if it completes too quickly.
const startTime = performance.now();
while (true) {

console.log("Running email queue step...");
const res = await fetch(`${baseUrl}/api/latest/internal/email-queue-step`, {
method: "GET",
Expand Down
13 changes: 0 additions & 13 deletions apps/backend/src/app/api/latest/internal/config/crud.tsx

This file was deleted.

46 changes: 0 additions & 46 deletions apps/backend/src/app/api/latest/internal/config/override/crud.tsx

This file was deleted.

57 changes: 55 additions & 2 deletions apps/backend/src/app/api/latest/internal/config/override/route.tsx
Original file line number Diff line number Diff line change
@@ -1,3 +1,56 @@
import { configOverridesCrudHandlers } from "./crud";
import { overrideEnvironmentConfigOverride } from "@/lib/config";
import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler";
import { environmentConfigSchema, getConfigOverrideErrors, migrateConfigOverride } from "@stackframe/stack-shared/dist/config/schema";
import { adaptSchema, adminAuthTypeSchema, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields";
import { StatusError } from "@stackframe/stack-shared/dist/utils/errors";

export const PATCH = configOverridesCrudHandlers.updateHandler;
export const PATCH = createSmartRouteHandler({
metadata: {
summary: 'Update the config',
description: 'Update the config for a project and branch with an override',
tags: ['Config'],
},
request: yupObject({
auth: yupObject({
type: adminAuthTypeSchema,
tenancy: adaptSchema,
}).defined(),
body: yupObject({
config_override_string: yupString().optional(),
}).defined(),
}),
response: yupObject({
statusCode: yupNumber().oneOf([200]).defined(),
bodyType: yupString().oneOf(["success"]).defined(),
}),
handler: async (req) => {
if (req.body.config_override_string) {
let parsedConfig;
try {
parsedConfig = JSON.parse(req.body.config_override_string);
} catch (e) {
if (e instanceof SyntaxError) {
throw new StatusError(StatusError.BadRequest, 'Invalid config JSON');
}
throw e;
}

// TODO instead of doing this check here, we should change overrideEnvironmentConfigOverride to return the errors from its ensureNoConfigOverrideErrors call
const overrideError = await getConfigOverrideErrors(environmentConfigSchema, migrateConfigOverride("environment", parsedConfig));
if (overrideError.status === "error") {
throw new StatusError(StatusError.BadRequest, overrideError.error);
}

await overrideEnvironmentConfigOverride({
projectId: req.auth.tenancy.project.id,
branchId: req.auth.tenancy.branchId,
environmentConfigOverrideOverride: parsedConfig,
});
}

return {
statusCode: 200,
bodyType: "success",
};
},
});
33 changes: 31 additions & 2 deletions apps/backend/src/app/api/latest/internal/config/route.tsx
Original file line number Diff line number Diff line change
@@ -1,3 +1,32 @@
import { configCrudHandlers } from "./crud";
import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler";
import { adaptSchema, adminAuthTypeSchema, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields";

export const GET = configCrudHandlers.readHandler;
export const GET = createSmartRouteHandler({
metadata: {
summary: 'Get the config',
description: 'Get the config for a project and branch',
tags: ['Config'],
},
request: yupObject({
auth: yupObject({
type: adminAuthTypeSchema,
tenancy: adaptSchema,
}).defined(),
}),
response: yupObject({
statusCode: yupNumber().oneOf([200]).defined(),
bodyType: yupString().oneOf(["json"]).defined(),
body: yupObject({
config_string: yupString().defined(),
}).defined(),
}),
handler: async (req) => {
return {
statusCode: 200,
bodyType: "json",
body: {
config_string: JSON.stringify(req.auth.tenancy.config),
},
};
},
});
86 changes: 86 additions & 0 deletions apps/backend/src/app/dev-stats/api/route.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import {
clearRequestStats,
getAggregateStats,
getMostCommonRequests,
getMostTimeConsumingRequests,
getSlowestRequests,
} from "@/lib/dev-request-stats";
import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler";
import { yupArray, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields";
import { getNodeEnvironment } from "@stackframe/stack-shared/dist/utils/env";
import { StatusError } from "@stackframe/stack-shared/dist/utils/errors";

const requestStatSchema = yupObject({
method: yupString().defined(),
path: yupString().defined(),
count: yupNumber().defined(),
totalTimeMs: yupNumber().defined(),
minTimeMs: yupNumber().defined(),
maxTimeMs: yupNumber().defined(),
lastCalledAt: yupNumber().defined(),
});

const aggregateStatsSchema = yupObject({
totalRequests: yupNumber().defined(),
totalTimeMs: yupNumber().defined(),
uniqueEndpoints: yupNumber().defined(),
averageTimeMs: yupNumber().defined(),
});

function assertDevelopmentMode() {
if (getNodeEnvironment() !== "development") {
throw new StatusError(403, "This endpoint is only available in development mode");
}
}

export const GET = createSmartRouteHandler({
metadata: {
hidden: true,
},
request: yupObject({}),
response: yupObject({
statusCode: yupNumber().oneOf([200]).defined(),
bodyType: yupString().oneOf(["json"]).defined(),
body: yupObject({
aggregate: aggregateStatsSchema.defined(),
mostCommon: yupArray(requestStatSchema.defined()).defined(),
mostTimeConsuming: yupArray(requestStatSchema.defined()).defined(),
slowest: yupArray(requestStatSchema.defined()).defined(),
}).defined(),
}),
handler: async () => {
assertDevelopmentMode();

return {
statusCode: 200,
bodyType: "json",
body: {
aggregate: getAggregateStats(),
mostCommon: getMostCommonRequests(20),
mostTimeConsuming: getMostTimeConsumingRequests(20),
slowest: getSlowestRequests(20),
},
};
},
});

export const DELETE = createSmartRouteHandler({
metadata: {
hidden: true,
},
request: yupObject({}),
response: yupObject({
statusCode: yupNumber().oneOf([200]).defined(),
bodyType: yupString().oneOf(["success"]).defined(),
}),
handler: async () => {
assertDevelopmentMode();

clearRequestStats();

return {
statusCode: 200,
bodyType: "success",
};
},
});
Loading