-
Notifications
You must be signed in to change notification settings - Fork 0
chore: bump cookie to ^0.7.0 (security) #1
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
ba33dde
8f74949
a09188f
7f2a080
af2436e
8514363
48ac44e
e6998cc
630630b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -6,38 +6,40 @@ | |||||||||
| "scripts": { | ||||||||||
| "clean": "rimraf src/generated && rimraf .next && rimraf node_modules", | ||||||||||
| "typecheck": "tsc --noEmit", | ||||||||||
| "with-env": "dotenv -c development --", | ||||||||||
| "with-env:prod": "dotenv -c --", | ||||||||||
| "with-env": "dotenv -c --", | ||||||||||
| "with-env:dev": "dotenv -c development --", | ||||||||||
| "with-env:prod": "dotenv -c production --", | ||||||||||
| "dev": "concurrently -n \"dev,codegen,prisma-studio,email-queue\" -k \"next dev --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}02\" \"pnpm run codegen:watch\" \"pnpm run prisma-studio\" \"pnpm run run-email-queue\"", | ||||||||||
| "build": "pnpm run codegen && next build", | ||||||||||
| "docker-build": "pnpm run codegen && next build --experimental-build-mode compile", | ||||||||||
| "build-self-host-migration-script": "tsup --config scripts/db-migrations.tsup.config.ts", | ||||||||||
| "analyze-bundle": "next experimental-analyze", | ||||||||||
| "start": "next start --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}02", | ||||||||||
| "codegen-prisma": "pnpm run prisma generate", | ||||||||||
| "codegen-prisma:watch": "pnpm run prisma generate --watch", | ||||||||||
| "codegen-prisma": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate", | ||||||||||
| "codegen-prisma:watch": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate --watch", | ||||||||||
|
Comment on lines
+18
to
+19
|
||||||||||
| "codegen-prisma": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate", | |
| "codegen-prisma:watch": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-placeholder-database-connection-string}\" pnpm run prisma generate --watch", | |
| "codegen-prisma": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-postgresql://placeholder}\" pnpm run prisma generate", | |
| "codegen-prisma:watch": "STACK_DATABASE_CONNECTION_STRING=\"${STACK_DATABASE_CONNECTION_STRING:-postgresql://placeholder}\" pnpm run prisma generate --watch", |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,24 @@ | ||
| -- CreateIndex | ||
| -- Partial index for emails currently being rendered (finishedRenderingAt is NULL) | ||
| -- Indexed by startedRenderingAt to efficiently query in-progress rendering jobs | ||
| CREATE INDEX "EmailOutbox_rendering_in_progress_idx" | ||
| ON "EmailOutbox" ("startedRenderingAt") | ||
| WHERE "finishedRenderingAt" IS NULL; | ||
|
|
||
| -- CreateIndex | ||
| -- Partial index for emails currently being sent (finishedSendingAt is NULL) | ||
| -- Indexed by startedSendingAt to efficiently query in-progress sending jobs | ||
| CREATE INDEX "EmailOutbox_sending_in_progress_idx" | ||
| ON "EmailOutbox" ("startedSendingAt") | ||
| WHERE "finishedSendingAt" IS NULL; | ||
|
|
||
| -- CreateIndex | ||
| -- Index for looking up team members by user and selection status | ||
| CREATE INDEX "TeamMember_projectUserId_isSelected_idx" | ||
| ON "TeamMember" ("tenancyId", "projectUserId", "isSelected"); | ||
|
|
||
| -- CreateIndex | ||
| -- Index for looking up projects by owner team | ||
| CREATE INDEX "Project_ownerTeamId_idx" | ||
| ON "Project" ("ownerTeamId"); | ||
|
|
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -8,12 +8,14 @@ async function main() { | |
|
|
||
| const baseUrl = `http://localhost:${getEnvVariable('NEXT_PUBLIC_STACK_PORT_PREFIX', '81')}02`; | ||
|
|
||
| // Wait a few seconds to make sure the server is fully started | ||
| await wait(5_000); | ||
|
Comment on lines
+11
to
+12
|
||
|
|
||
| const run = () => runAsynchronously(async () => { | ||
| // If a the server is restarted, then the existing email queue step may be cancelled prematurely. That's why we | ||
| // have an extra loop here to detect and restart the email queue step if it completes too quickly. | ||
| const startTime = performance.now(); | ||
| while (true) { | ||
|
|
||
| console.log("Running email queue step..."); | ||
| const res = await fetch(`${baseUrl}/api/latest/internal/email-queue-step`, { | ||
| method: "GET", | ||
|
|
||
This file was deleted.
This file was deleted.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,3 +1,56 @@ | ||
| import { configOverridesCrudHandlers } from "./crud"; | ||
| import { overrideEnvironmentConfigOverride } from "@/lib/config"; | ||
| import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; | ||
| import { environmentConfigSchema, getConfigOverrideErrors, migrateConfigOverride } from "@stackframe/stack-shared/dist/config/schema"; | ||
| import { adaptSchema, adminAuthTypeSchema, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields"; | ||
| import { StatusError } from "@stackframe/stack-shared/dist/utils/errors"; | ||
|
|
||
| export const PATCH = configOverridesCrudHandlers.updateHandler; | ||
| export const PATCH = createSmartRouteHandler({ | ||
| metadata: { | ||
| summary: 'Update the config', | ||
| description: 'Update the config for a project and branch with an override', | ||
| tags: ['Config'], | ||
| }, | ||
| request: yupObject({ | ||
| auth: yupObject({ | ||
| type: adminAuthTypeSchema, | ||
| tenancy: adaptSchema, | ||
| }).defined(), | ||
| body: yupObject({ | ||
| config_override_string: yupString().optional(), | ||
| }).defined(), | ||
| }), | ||
| response: yupObject({ | ||
| statusCode: yupNumber().oneOf([200]).defined(), | ||
| bodyType: yupString().oneOf(["success"]).defined(), | ||
| }), | ||
| handler: async (req) => { | ||
| if (req.body.config_override_string) { | ||
| let parsedConfig; | ||
| try { | ||
| parsedConfig = JSON.parse(req.body.config_override_string); | ||
| } catch (e) { | ||
| if (e instanceof SyntaxError) { | ||
| throw new StatusError(StatusError.BadRequest, 'Invalid config JSON'); | ||
| } | ||
| throw e; | ||
| } | ||
|
|
||
| // TODO instead of doing this check here, we should change overrideEnvironmentConfigOverride to return the errors from its ensureNoConfigOverrideErrors call | ||
| const overrideError = await getConfigOverrideErrors(environmentConfigSchema, migrateConfigOverride("environment", parsedConfig)); | ||
| if (overrideError.status === "error") { | ||
| throw new StatusError(StatusError.BadRequest, overrideError.error); | ||
| } | ||
|
|
||
| await overrideEnvironmentConfigOverride({ | ||
| projectId: req.auth.tenancy.project.id, | ||
| branchId: req.auth.tenancy.branchId, | ||
| environmentConfigOverrideOverride: parsedConfig, | ||
| }); | ||
| } | ||
|
|
||
| return { | ||
| statusCode: 200, | ||
| bodyType: "success", | ||
| }; | ||
| }, | ||
| }); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,3 +1,32 @@ | ||
| import { configCrudHandlers } from "./crud"; | ||
| import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; | ||
| import { adaptSchema, adminAuthTypeSchema, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields"; | ||
|
|
||
| export const GET = configCrudHandlers.readHandler; | ||
| export const GET = createSmartRouteHandler({ | ||
| metadata: { | ||
| summary: 'Get the config', | ||
| description: 'Get the config for a project and branch', | ||
| tags: ['Config'], | ||
| }, | ||
| request: yupObject({ | ||
| auth: yupObject({ | ||
| type: adminAuthTypeSchema, | ||
| tenancy: adaptSchema, | ||
| }).defined(), | ||
| }), | ||
| response: yupObject({ | ||
| statusCode: yupNumber().oneOf([200]).defined(), | ||
| bodyType: yupString().oneOf(["json"]).defined(), | ||
| body: yupObject({ | ||
| config_string: yupString().defined(), | ||
| }).defined(), | ||
| }), | ||
| handler: async (req) => { | ||
| return { | ||
| statusCode: 200, | ||
| bodyType: "json", | ||
| body: { | ||
| config_string: JSON.stringify(req.auth.tenancy.config), | ||
| }, | ||
| }; | ||
| }, | ||
| }); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,86 @@ | ||
| import { | ||
| clearRequestStats, | ||
| getAggregateStats, | ||
| getMostCommonRequests, | ||
| getMostTimeConsumingRequests, | ||
| getSlowestRequests, | ||
| } from "@/lib/dev-request-stats"; | ||
| import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; | ||
| import { yupArray, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields"; | ||
| import { getNodeEnvironment } from "@stackframe/stack-shared/dist/utils/env"; | ||
| import { StatusError } from "@stackframe/stack-shared/dist/utils/errors"; | ||
|
|
||
| const requestStatSchema = yupObject({ | ||
| method: yupString().defined(), | ||
| path: yupString().defined(), | ||
| count: yupNumber().defined(), | ||
| totalTimeMs: yupNumber().defined(), | ||
| minTimeMs: yupNumber().defined(), | ||
| maxTimeMs: yupNumber().defined(), | ||
| lastCalledAt: yupNumber().defined(), | ||
| }); | ||
|
|
||
| const aggregateStatsSchema = yupObject({ | ||
| totalRequests: yupNumber().defined(), | ||
| totalTimeMs: yupNumber().defined(), | ||
| uniqueEndpoints: yupNumber().defined(), | ||
| averageTimeMs: yupNumber().defined(), | ||
| }); | ||
|
|
||
| function assertDevelopmentMode() { | ||
| if (getNodeEnvironment() !== "development") { | ||
| throw new StatusError(403, "This endpoint is only available in development mode"); | ||
| } | ||
| } | ||
|
|
||
| export const GET = createSmartRouteHandler({ | ||
| metadata: { | ||
| hidden: true, | ||
| }, | ||
| request: yupObject({}), | ||
| response: yupObject({ | ||
| statusCode: yupNumber().oneOf([200]).defined(), | ||
| bodyType: yupString().oneOf(["json"]).defined(), | ||
| body: yupObject({ | ||
| aggregate: aggregateStatsSchema.defined(), | ||
| mostCommon: yupArray(requestStatSchema.defined()).defined(), | ||
| mostTimeConsuming: yupArray(requestStatSchema.defined()).defined(), | ||
| slowest: yupArray(requestStatSchema.defined()).defined(), | ||
| }).defined(), | ||
| }), | ||
| handler: async () => { | ||
| assertDevelopmentMode(); | ||
|
|
||
| return { | ||
| statusCode: 200, | ||
| bodyType: "json", | ||
| body: { | ||
| aggregate: getAggregateStats(), | ||
| mostCommon: getMostCommonRequests(20), | ||
| mostTimeConsuming: getMostTimeConsumingRequests(20), | ||
| slowest: getSlowestRequests(20), | ||
| }, | ||
| }; | ||
| }, | ||
| }); | ||
|
|
||
| export const DELETE = createSmartRouteHandler({ | ||
| metadata: { | ||
| hidden: true, | ||
| }, | ||
| request: yupObject({}), | ||
| response: yupObject({ | ||
| statusCode: yupNumber().oneOf([200]).defined(), | ||
| bodyType: yupString().oneOf(["success"]).defined(), | ||
| }), | ||
| handler: async () => { | ||
| assertDevelopmentMode(); | ||
|
|
||
| clearRequestStats(); | ||
|
|
||
| return { | ||
| statusCode: 200, | ||
| bodyType: "success", | ||
| }; | ||
| }, | ||
| }); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The script commands have been updated from "with-env" to "with-env:dev" or "with-env:prod", but the base "with-env" command now uses "dotenv -c --" without specifying development or production. This changes the default behavior. Ensure this is intentional and that all scripts that relied on the previous "with-env" behavior are updated appropriately.