Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
184 changes: 182 additions & 2 deletions .github/workflows/db-migration-backwards-compatibility.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: DB migrations are backwards-compatible
name: DB migration compat

on:
push:
Expand Down Expand Up @@ -48,7 +48,7 @@ jobs:
fi

backwards-compatibility:
name: Test migrations with ${{ needs.check-migrations-changed.outputs.base_branch }} branch code
name: Back-compat — Current branch migrations with ${{ needs.check-migrations-changed.outputs.base_branch }} branch code
needs: check-migrations-changed
if: needs.check-migrations-changed.outputs.migrations_changed == 'true'
runs-on: ubicloud-standard-8
Expand Down Expand Up @@ -235,6 +235,186 @@ jobs:
if: always()
run: docker compose -f docker/dependencies/docker.compose.yaml logs

forward-compatibility:
name: Forward-compat — Current branch code with ${{ needs.check-migrations-changed.outputs.base_branch }} branch migrations
needs: [check-migrations-changed, backwards-compatibility]
if: always() && needs.backwards-compatibility.result == 'failure'
runs-on: ubicloud-standard-8
env:
NODE_ENV: test
STACK_ENABLE_HARDCODED_PASSKEY_CHALLENGE_FOR_TESTING: yes
STACK_DATABASE_CONNECTION_STRING: "postgres://postgres:PASSWORD-PLACEHOLDER--uqfEC1hmmv@localhost:8128/stackframe"

steps:
# First, checkout the base branch to get its migrations
- name: Checkout base branch
uses: actions/checkout@v6
with:
ref: ${{ needs.check-migrations-changed.outputs.base_branch }}
path: base-branch

- name: Save base branch migrations
run: |
mkdir -p saved-migrations
cp -r base-branch/apps/backend/prisma/migrations/* saved-migrations/

# Now checkout current branch (new code)
- name: Checkout current branch
uses: actions/checkout@v6
with:
path: current-branch

# Move current branch to the root for the rest of the workflow
- name: Setup working directory
run: |
shopt -s dotglob
mv current-branch/* .
rm -rf current-branch base-branch

# Replace current branch's migrations with base branch's (old) migrations
- name: Replace migrations with base branch migrations
run: |
rm -rf apps/backend/prisma/migrations/*
cp -r saved-migrations/* apps/backend/prisma/migrations/
rm -rf saved-migrations

- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 22.x

- name: Setup pnpm
uses: pnpm/action-setup@v4

# Start Docker Compose in the background
- name: Start Docker Compose in background
uses: JarvusInnovations/background-action@v1.0.7
with:
run: docker compose -f docker/dependencies/docker.compose.yaml up --pull always -d &
wait-on: /dev/null
tail: true
wait-for: 3s
log-output-if: true

- name: Install dependencies
run: pnpm install --frozen-lockfile

- name: Create .env.test.local file for apps/backend
run: cp apps/backend/.env.development apps/backend/.env.test.local

- name: Create .env.test.local file for apps/dashboard
run: cp apps/dashboard/.env.development apps/dashboard/.env.test.local

- name: Create .env.test.local file for apps/e2e
run: cp apps/e2e/.env.development apps/e2e/.env.test.local

- name: Create .env.test.local file for docs
run: cp docs/.env.development docs/.env.test.local

- name: Create .env.test.local file for examples/cjs-test
run: cp examples/cjs-test/.env.development examples/cjs-test/.env.test.local

- name: Create .env.test.local file for examples/demo
run: cp examples/demo/.env.development examples/demo/.env.test.local

- name: Create .env.test.local file for examples/docs-examples
run: cp examples/docs-examples/.env.development examples/docs-examples/.env.test.local

- name: Create .env.test.local file for examples/e-commerce
run: cp examples/e-commerce/.env.development examples/e-commerce/.env.test.local

- name: Create .env.test.local file for examples/middleware
run: cp examples/middleware/.env.development examples/middleware/.env.test.local

- name: Create .env.test.local file for examples/supabase
run: cp examples/supabase/.env.development examples/supabase/.env.test.local

- name: Create .env.test.local file for examples/convex
run: cp examples/convex/.env.development examples/convex/.env.test.local

- name: Build
run: pnpm build

- name: Wait on Postgres
run: pnpm run wait-until-postgres-is-ready:pg_isready

- name: Wait on Inbucket
run: pnpx wait-on tcp:localhost:8129

- name: Wait on Svix
run: pnpx wait-on tcp:localhost:8113

- name: Wait on ClickHouse
run: pnpx wait-on http://localhost:8136/ping

- name: Initialize database
run: pnpm run db:init

- name: Start stack-backend in background
uses: JarvusInnovations/background-action@v1.0.7
with:
run: pnpm run start:backend --log-order=stream &
wait-on: |
http://localhost:8102
tail: true
wait-for: 30s
log-output-if: true

- name: Start stack-dashboard in background
uses: JarvusInnovations/background-action@v1.0.7
with:
run: pnpm run start:dashboard --log-order=stream &
wait-on: |
http://localhost:8101
tail: true
wait-for: 30s
log-output-if: true

- name: Start mock-oauth-server in background
uses: JarvusInnovations/background-action@v1.0.7
with:
run: pnpm run start:mock-oauth-server --log-order=stream &
wait-on: |
http://localhost:8102
tail: true
wait-for: 30s
log-output-if: true

- name: Start run-email-queue in background
uses: JarvusInnovations/background-action@v1.0.7
with:
run: pnpm -C apps/backend run run-email-queue --log-order=stream &
wait-on: |
http://localhost:8102
tail: true
wait-for: 30s
log-output-if: true

- name: Start run-cron-jobs in background
uses: JarvusInnovations/background-action@v1.0.7
if: ${{ hashFiles('apps/backend/scripts/run-cron-jobs.ts') != '' }}
with:
run: pnpm -C apps/backend run with-env:dev tsx scripts/run-cron-jobs.ts --log-order=stream &
wait-on: |
http://localhost:8102
tail: true
wait-for: 30s
log-output-if: true

- name: Wait 10 seconds
run: sleep 10

# Run tests: current branch code with base branch (old) migrations
- name: Run tests (current branch code with base branch migrations)
run: pnpm test

- name: Verify data integrity
run: pnpm run verify-data-integrity --no-bail

- name: Print Docker Compose logs
if: always()
run: docker compose -f docker/dependencies/docker.compose.yaml logs

# This job runs when migrations haven't changed, ensuring the workflow succeeds
skip-unchanged:
name: No migration changes (skipped)
Expand Down
2 changes: 1 addition & 1 deletion AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ To see all development ports, refer to the index.html of `apps/dev-launchpad/pub
- NEVER implement a hacky solution without EXPLICIT approval from the user. Always go the extra mile to make sure the solution is clean, maintainable, and robust.
- Fail early, fail loud. Fail fast with an error instead of silently continuing.
- Do NOT use `as`/`any`/type casts or anything else like that to bypass the type system unless you specifically asked the user about it. Most of the time a place where you would use type casts is not one where you actually need them. Avoid wherever possible.
- When writing database migration files, assume that we have >1,000,000 rows in every table (unless otherwise specified). This means you may have to use CONDITIONALLY_REPEAT_MIGRATION_SENTINEL to avoid running the migration and things like concurrent index builds; see the existing migrations for examples. One common pattern is to add a temporary extra boolean column
- When writing database migration files, assume that we have >1,000,000 rows in every table (unless otherwise specified). This means you may have to use CONDITIONALLY_REPEAT_MIGRATION_SENTINEL to avoid running the migration and things like concurrent index builds; see the existing migrations for examples. One common pattern is to add a temporary index or extra boolean column marking whether the row has already been migrated (then deleting the column at the end).
- Each migration file runs in its own transaction with a relatively short timeout. Split long-running operations into separate migration files to avoid timeouts. For example, when adding CHECK constraints, use `NOT VALID` in one migration, then `VALIDATE CONSTRAINT` in a separate migration file.
- Note that each database migration file is executed in a single transaction. Even with the run-outside-transaction sentinel, the transaction will still continue during the entire migration file. If you want to split things up into multiple transactions, put it into their own migration files.
- When writing database migration files, ALWAYS ALWAYS add tests for all the potential edge cases! See the folder structure of the other migrations to see how that works.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
-- Create temporary expression index to speed up the migration
-- (B-tree on the specific JSONB path, not GIN on the whole column,
-- so the index is actually used by the #>> WHERE clause)
-- SPLIT_STATEMENT_SENTINEL
-- SINGLE_STATEMENT_SENTINEL
-- RUN_OUTSIDE_TRANSACTION_SENTINEL
CREATE INDEX CONCURRENTLY IF NOT EXISTS "temp_project_require_publishable_client_key_idx"
ON /* SCHEMA_NAME_SENTINEL */."Project"
USING GIN ("projectConfigOverride");
-- SPLIT_STATEMENT_SENTINEL

-- Set requirePublishableClientKey to true for existing projects when missing
-- SPLIT_STATEMENT_SENTINEL
-- SINGLE_STATEMENT_SENTINEL
-- CONDITIONALLY_REPEAT_MIGRATION_SENTINEL
WITH to_update AS (
SELECT "id"
FROM "Project"
WHERE "projectConfigOverride" IS NULL
OR NOT "projectConfigOverride" ? 'project.requirePublishableClientKey'
LIMIT 10000
)
UPDATE "Project" p
SET "projectConfigOverride" = jsonb_set(
COALESCE(p."projectConfigOverride", '{}'::jsonb),
'{project.requirePublishableClientKey}',
'true'::jsonb,
Comment thread
N2D4 marked this conversation as resolved.
true
)
FROM to_update tu
WHERE p."id" = tu."id"
Comment thread
N2D4 marked this conversation as resolved.
RETURNING true AS should_repeat_migration;
Comment thread
N2D4 marked this conversation as resolved.
-- SPLIT_STATEMENT_SENTINEL

-- Clean up temporary index
DROP INDEX IF EXISTS "temp_project_require_publishable_client_key_idx";
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { checkApiKeySet } from "@/lib/internal-api-keys";
import { checkApiKeySet, throwCheckApiKeySetError } from "@/lib/internal-api-keys";
import { getSoleTenancyFromProjectBranch } from "@/lib/tenancies";
import { decodeAccessToken, oauthCookieSchema } from "@/lib/tokens";
import { getProjectBranchFromClientId, getProvider } from "@/oauth";
Expand Down Expand Up @@ -60,8 +60,9 @@ export const GET = createSmartRouteHandler({
throw new KnownErrors.InvalidOAuthClientIdOrSecret(query.client_id);
}

if (!(await checkApiKeySet(tenancy.project.id, { publishableClientKey: query.client_secret }))) {
throw new KnownErrors.InvalidPublishableClientKey(tenancy.project.id);
const keyCheck = await checkApiKeySet(tenancy.project.id, { publishableClientKey: query.client_secret });
if (keyCheck.status === "error") {
throwCheckApiKeySetError(keyCheck.error, tenancy.project.id, new KnownErrors.InvalidPublishableClientKey(tenancy.project.id));
}

const providerRaw = Object.entries(tenancy.config.auth.oauth.providers).find(([providerId, _]) => providerId === params.provider_id);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { usersCrudHandlers } from "@/app/api/latest/users/crud";
import { checkApiKeySet, throwCheckApiKeySetError } from "@/lib/internal-api-keys";
import { createOAuthUserAndAccount, findExistingOAuthAccount, handleOAuthEmailMergeStrategy, linkOAuthAccountToUser } from "@/lib/oauth";
import { isAcceptedNativeAppUrl, validateRedirectUrl } from "@/lib/redirect-urls";
import { Tenancy, getTenancy } from "@/lib/tenancies";
Expand Down Expand Up @@ -126,6 +127,11 @@ const handler = createSmartRouteHandler({

const provider = { id: providerRaw[0], ...providerRaw[1] };

const keyCheck = await checkApiKeySet(tenancy.project.id, { publishableClientKey: outerInfo.publishableClientKey });
if (keyCheck.status === "error") {
throwCheckApiKeySetError(keyCheck.error, tenancy.project.id, new KnownErrors.InvalidPublishableClientKey(tenancy.project.id));
}

const providerObj = await getProvider(provider as any);
let callbackResult: Awaited<ReturnType<typeof providerObj.getCallback>>;
try {
Expand Down
25 changes: 24 additions & 1 deletion apps/backend/src/app/api/latest/auth/oauth/token/route.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import { oauthServer } from "@/oauth";
import { checkApiKeySet, throwCheckApiKeySetError } from "@/lib/internal-api-keys";
import { getSoleTenancyFromProjectBranch } from "@/lib/tenancies";
import { getProjectBranchFromClientId, oauthServer } from "@/oauth";
import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler";
import { InvalidClientError, InvalidGrantError, InvalidRequestError, Request as OAuthRequest, Response as OAuthResponse, ServerError } from "@node-oauth/oauth2-server";
import { KnownErrors } from "@stackframe/stack-shared/dist/known-errors";
Expand All @@ -15,6 +17,8 @@ export const POST = createSmartRouteHandler({
request: yupObject({
body: yupObject({
grant_type: yupString().oneOf(["authorization_code", "refresh_token"]).defined(),
client_id: yupString().optional(),
client_secret: yupString().optional(),
}).unknown().defined(),
}).defined(),
response: yupObject({
Expand All @@ -24,6 +28,25 @@ export const POST = createSmartRouteHandler({
headers: yupMixed().defined(),
}),
async handler(req, fullReq) {
// Pre-validate the publishable client key to provide specific error messages
// before the OAuth library processes the request
const clientId = req.body.client_id;
const clientSecret = req.body.client_secret;

if (clientId) {
const tenancy = await getSoleTenancyFromProjectBranch(...getProjectBranchFromClientId(clientId), true);
if (tenancy) {
if (clientSecret) {
const keyCheck = await checkApiKeySet(tenancy.project.id, { publishableClientKey: clientSecret });
if (keyCheck.status === "error") {
throwCheckApiKeySetError(keyCheck.error, tenancy.project.id, new KnownErrors.InvalidOAuthClientIdOrSecret());
}
} else if (tenancy.config.project.requirePublishableClientKey) {
throw new KnownErrors.PublishableClientKeyRequiredForProject(tenancy.project.id);
}
}
}
Comment thread
N2D4 marked this conversation as resolved.

const oauthRequest = new OAuthRequest({
headers: {
...fullReq.headers,
Expand Down
Loading
Loading