diff --git a/.claude/CLAUDE-KNOWLEDGE.md b/.claude/CLAUDE-KNOWLEDGE.md index 17a34543b9..a0e6606150 100644 --- a/.claude/CLAUDE-KNOWLEDGE.md +++ b/.claude/CLAUDE-KNOWLEDGE.md @@ -199,6 +199,12 @@ await niceBackendFetch("/api/v1/internal/config/override/environment", { ### Q: Where does domain validation logic belong? A: Core validation functions (`isValidHostnameWithWildcards`, `matchHostnamePattern`) belong in the shared utils package (`packages/stack-shared/src/utils/urls.tsx`) so they can be used by both frontend and backend. +### Q: How should OIDC federation exchange failures be exposed to callers? +A: Keep detailed failure reasons in audit rows and system events, but return a generic `invalid_grant` to unauthenticated token-exchange callers. This avoids leaking IdP fetch/JWKS details or trust-policy claim condition details while preserving operator diagnostics. + +### Q: What should OIDC JWT verification do on a JWKS key miss? +A: Restrict `jwtVerify` to asymmetric OIDC algorithms and, on `ERR_JWKS_NO_MATCHING_KEY`, invalidate both the JWKS cache and the discovery cache before retrying. If an IdP moved `jwks_uri`, clearing only the JWKS row keeps retrying the stale URI. + ### Q: How do you simplify validation logic with wildcards? A: Replace wildcards with valid placeholders before validation: ```typescript @@ -361,3 +367,6 @@ A: Invalid `tools` entries are rejected by `requestBodySchema` in `apps/backend/ ## Q: Why did the internal metrics E2E snapshots need to change in April 2026? A: The `/api/v1/internal/metrics` response now intentionally includes `analytics_overview.daily_anonymous_visitors_fallback`, `analytics_overview.anonymous_visitors_fallback`, and `active_users_by_country`. Those additions are reflected in `packages/stack-shared/src/interface/admin-metrics.ts` and the backend route, so the E2E snapshots must include them instead of treating them as regressions. + +## Q: What body shape should E2E tests expect from backend `StatusError` responses? +A: `StatusError.getBody()` returns the error message as a plain text response body with `Content-Type: text/plain; charset=utf-8`. E2E tests using `niceBackendFetch` should assert against `response.body` directly, for example `expect(response.body).toBe("invalid_grant")`, not `response.body.error`. diff --git a/apps/backend/.env.development b/apps/backend/.env.development index 8ec5f41785..698e20dd77 100644 --- a/apps/backend/.env.development +++ b/apps/backend/.env.development @@ -20,6 +20,10 @@ STACK_SEED_INTERNAL_PROJECT_SUPER_SECRET_ADMIN_KEY=this-super-secret-admin-key-i STACK_OAUTH_MOCK_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}14 STACK_TURNSTILE_SITEVERIFY_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}14/turnstile/siteverify +# Local mock OIDC IdP for OIDC federation testing (apps/mock-oidc-idp). +# Read by the seed script to install a default trust policy on the dummy project. +STACK_MOCK_OIDC_ISSUER_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}15 + # Cloudflare Turnstile test keys — always-pass widgets, no real challenges # See https://developers.cloudflare.com/turnstile/troubleshooting/testing/ NEXT_PUBLIC_STACK_BOT_CHALLENGE_SITE_KEY=1x00000000000000000000AA diff --git a/apps/backend/package.json b/apps/backend/package.json index 6569d968bc..1df468ac29 100644 --- a/apps/backend/package.json +++ b/apps/backend/package.json @@ -115,6 +115,7 @@ "sharp": "^0.34.4", "stripe": "^18.3.0", "svix": "^1.89.0", + "undici": "^6.19.8", "vite": "^6.1.0", "yaml": "^2.4.5", "yup": "^1.7.1", diff --git a/apps/backend/prisma/migrations/20260420000000_add_oidc_federation_audit/migration.sql b/apps/backend/prisma/migrations/20260420000000_add_oidc_federation_audit/migration.sql new file mode 100644 index 0000000000..dea28c2cee --- /dev/null +++ b/apps/backend/prisma/migrations/20260420000000_add_oidc_federation_audit/migration.sql @@ -0,0 +1,21 @@ +-- CreateTable +CREATE TABLE "OidcFederationExchangeAudit" ( + "id" UUID NOT NULL, + "tenancyId" UUID NOT NULL, + "policyId" TEXT NOT NULL, + "issuer" TEXT NOT NULL, + "subject" TEXT NOT NULL, + "outcome" TEXT NOT NULL, + "reason" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + + CONSTRAINT "OidcFederationExchangeAudit_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE INDEX "OidcFederationExchangeAudit_tenancy_policy_createdAt_idx" ON "OidcFederationExchangeAudit"("tenancyId", "policyId", "createdAt" DESC); + +-- Constrain outcome to the current vocabulary. `NOT VALID` skips the backfill scan for existing +-- rows; a follow-up migration can VALIDATE once we're confident all historical rows comply. +ALTER TABLE "OidcFederationExchangeAudit" ADD CONSTRAINT "OidcFederationExchangeAudit_outcome_check" + CHECK ("outcome" IN ('success', 'failure')) NOT VALID; diff --git a/apps/backend/prisma/migrations/20260420000000_add_oidc_federation_audit/tests/shape-and-index.ts b/apps/backend/prisma/migrations/20260420000000_add_oidc_federation_audit/tests/shape-and-index.ts new file mode 100644 index 0000000000..299ed2b1e8 --- /dev/null +++ b/apps/backend/prisma/migrations/20260420000000_add_oidc_federation_audit/tests/shape-and-index.ts @@ -0,0 +1,97 @@ +import { randomUUID } from "crypto"; +import type { Sql } from "postgres"; +import { expect } from "vitest"; + +/** + * Migration-level test for `20260420000000_add_oidc_federation_audit`. + * + * Verifies that: + * - `OidcFederationExchangeAudit` exists with the expected columns + types, + * - `createdAt` defaults to now and is non-nullable, + * - the lookup index on (tenancyId, policyId, createdAt DESC) exists, + * - inserts + a per-tenancy-per-policy MAX(createdAt) aggregate work (this is the + * query shape the dashboard will use to show "last used at" per policy). + */ +export const postMigration = async (sql: Sql) => { + // 1. Column shape. + const columnRows = await sql>` + SELECT column_name, is_nullable, data_type + FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'OidcFederationExchangeAudit' + ORDER BY ordinal_position + `; + // Columns are validated as a set — Prisma may reorder ordinals when fields are reshuffled, + // and the set is what the application actually depends on. + expect(columnRows.map(r => r.column_name).sort()).toEqual([ + "createdAt", + "id", + "issuer", + "outcome", + "policyId", + "reason", + "subject", + "tenancyId", + ]); + for (const row of columnRows) { + expect(row.is_nullable).toBe("NO"); + } + const byName = Object.fromEntries(columnRows.map(r => [r.column_name, r])); + expect(byName["id"].data_type).toBe("uuid"); + expect(byName["tenancyId"].data_type).toBe("uuid"); + expect(byName["createdAt"].data_type).toBe("timestamp without time zone"); + + // 2. Index exists with the expected column list + ordering. + const indexRows = await sql>` + SELECT indexdef + FROM pg_indexes + WHERE schemaname = 'public' + AND tablename = 'OidcFederationExchangeAudit' + AND indexname = 'OidcFederationExchangeAudit_tenancy_policy_createdAt_idx' + `; + expect(indexRows).toHaveLength(1); + expect(indexRows[0].indexdef).toContain('"tenancyId"'); + expect(indexRows[0].indexdef).toContain('"policyId"'); + expect(indexRows[0].indexdef).toContain('"createdAt" DESC'); + + // 3. Insert + aggregate — the dashboard "last used at" query shape. + // The audit table intentionally stores tenancyId as a scalar without an FK: audit writes + // should not add delete/update trigger overhead to the hot Tenancy table. + const tenancyId = randomUUID(); + const otherTenancyId = randomUUID(); + + try { + await sql.unsafe(` + INSERT INTO "OidcFederationExchangeAudit" ("id", "tenancyId", "policyId", "issuer", "subject", "outcome", "reason", "createdAt") + VALUES + (gen_random_uuid(), '${tenancyId}', 'policy-a', 'https://idp', 'sub-1', 'success', '', '2026-01-01 00:00:00'), + (gen_random_uuid(), '${tenancyId}', 'policy-a', 'https://idp', 'sub-2', 'success', '', '2026-01-02 00:00:00'), + (gen_random_uuid(), '${tenancyId}', 'policy-b', '', '', 'failure', 'nope', '2026-01-03 00:00:00'), + (gen_random_uuid(), '${otherTenancyId}', 'policy-a', 'https://idp', 'sub-3', 'success', '', '2026-01-05 00:00:00'); + `); + + const defaultRows = await sql>` + INSERT INTO "OidcFederationExchangeAudit" ("id", "tenancyId", "policyId", "issuer", "subject", "outcome", "reason") + VALUES (gen_random_uuid(), ${otherTenancyId}::uuid, 'policy-default', 'https://idp', 'sub-default', 'success', '') + RETURNING "createdAt" + `; + expect(defaultRows).toHaveLength(1); + expect(defaultRows[0].createdAt).toBeInstanceOf(Date); + + const aggregate = await sql>` + SELECT "policyId", to_char(MAX("createdAt"), 'YYYY-MM-DD HH24:MI:SS') AS "lastAt", COUNT(*)::bigint AS total + FROM "OidcFederationExchangeAudit" + WHERE "tenancyId" = ${tenancyId} + GROUP BY "policyId" + ORDER BY "policyId" + `; + expect(aggregate).toHaveLength(2); + expect(aggregate[0].policyId).toBe("policy-a"); + expect(Number(aggregate[0].total)).toBe(2); + expect(aggregate[0].lastAt).toBe("2026-01-02 00:00:00"); + expect(aggregate[1].policyId).toBe("policy-b"); + expect(Number(aggregate[1].total)).toBe(1); + } finally { + await sql`DELETE FROM "OidcFederationExchangeAudit" WHERE "tenancyId" IN (${tenancyId}::uuid, ${otherTenancyId}::uuid)`; + } +}; diff --git a/apps/backend/prisma/schema.prisma b/apps/backend/prisma/schema.prisma index 1b20c77b17..a9fa67dd06 100644 --- a/apps/backend/prisma/schema.prisma +++ b/apps/backend/prisma/schema.prisma @@ -630,6 +630,31 @@ model OAuthOuterInfo { updatedAt DateTime @updatedAt } +// Durable audit row for OIDC federation exchange attempts. Gives the dashboard a simple +// "last used at" + count aggregate per trust policy without having to scan the global +// Event log by JSON claim. Written best-effort — a failure here must not break the exchange. +model OidcFederationExchangeAudit { + id String @id @default(uuid()) @db.Uuid + + tenancyId String @db.Uuid + + // Matched trust-policy id on success; "" when the exchange failed before any policy matched. + policyId String + // OIDC issuer (post-discovery) on success; "" on failure. + issuer String + // OIDC `sub` claim on success; "" on failure. + subject String + // "success" | "failure". Constrained at the DB layer via a CHECK constraint in the audit + // migration; extending the vocabulary requires a follow-up ALTER CONSTRAINT. + outcome String + // Human-readable failure reason. Empty on success. + reason String + + createdAt DateTime @default(now()) + + @@index([tenancyId, policyId, createdAt(sort: Desc)], map: "OidcFederationExchangeAudit_tenancy_policy_createdAt_idx") +} + model ProjectUserRefreshToken { id String @default(uuid()) @db.Uuid tenancyId String @db.Uuid diff --git a/apps/backend/src/app/api/latest/auth/oidc-federation/exchange/route.tsx b/apps/backend/src/app/api/latest/auth/oidc-federation/exchange/route.tsx new file mode 100644 index 0000000000..47b3ae3c31 --- /dev/null +++ b/apps/backend/src/app/api/latest/auth/oidc-federation/exchange/route.tsx @@ -0,0 +1,192 @@ +import { Prisma } from "@/generated/prisma/client"; +import { SystemEventTypes, logEvent } from "@/lib/events"; +import { validateOidcJwt } from "@/lib/oidc-jwt"; +import { mintServerAccessToken } from "@/lib/server-access-token"; +import { DEFAULT_BRANCH_ID, getSoleTenancyFromProjectBranch } from "@/lib/tenancies"; +import { globalPrismaClient } from "@/prisma-client"; +import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; +import { yupNumber, yupObject, yupString, yupTuple } from "@stackframe/stack-shared/dist/schema-fields"; +import { StatusError, captureError } from "@stackframe/stack-shared/dist/utils/errors"; +import { matchClaims } from "@stackframe/stack-shared/dist/utils/oidc-federation"; +import { runAsynchronously } from "@stackframe/stack-shared/dist/utils/promises"; + +type AuditRow = Omit & { + outcome: "success" | "failure", +}; + +async function writeAudit(row: AuditRow): Promise { + try { + await globalPrismaClient.oidcFederationExchangeAudit.create({ data: row }); + } catch (error) { + captureError("oidc-federation-audit-write-failed", error); + } +} + +const GRANT_TYPE = "urn:ietf:params:oauth:grant-type:token-exchange"; +const SUBJECT_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:jwt"; +const ISSUED_TOKEN_TYPE = "urn:ietf:params:oauth:token-type:access_token"; + +function flattenClaimConditions( + conds: Record | undefined> | undefined, +): Map { + const out = new Map(); + for (const [claimKey, valueRecord] of Object.entries(conds ?? {})) { + const values = Object.values(valueRecord ?? {}).filter((v): v is string => typeof v === "string"); + if (values.length > 0) out.set(claimKey, values); + } + return out; +} + +export const POST = createSmartRouteHandler({ + metadata: { + summary: "OIDC Federation token exchange", + description: + "Exchange an OIDC JWT issued by a project-trusted identity provider for a short-lived Stack server access token. " + + "Follows RFC 8693 (OAuth 2.0 Token Exchange).", + tags: ["Auth"], + }, + request: yupObject({ + method: yupString().oneOf(["POST"]).defined(), + headers: yupObject({ + "x-stack-project-id": yupTuple([yupString().defined()]).defined(), + "x-stack-branch-id": yupTuple([yupString().defined()]).optional(), + }).defined(), + body: yupObject({ + grant_type: yupString().oneOf([GRANT_TYPE]).defined(), + subject_token: yupString().defined(), + subject_token_type: yupString().oneOf([SUBJECT_TOKEN_TYPE]).defined(), + // RFC 8693 optional params. Only `requested_token_type` is accepted, and only with + // the access-token value we actually issue. Audience/resource/scope are not + // negotiable per-request — they're fixed by the trust policy — so reject them + // outright to avoid giving callers a false sense of configurability. + requested_token_type: yupString().oneOf([ISSUED_TOKEN_TYPE]).optional(), + }).defined(), + }), + response: yupObject({ + statusCode: yupNumber().oneOf([200]).defined(), + bodyType: yupString().oneOf(["json"]).defined(), + body: yupObject({ + access_token: yupString().defined(), + issued_token_type: yupString().oneOf([ISSUED_TOKEN_TYPE]).defined(), + token_type: yupString().oneOf(["Bearer"]).defined(), + expires_in: yupNumber().defined(), + }).defined(), + }), + handler: async (req) => { + const projectId = req.headers["x-stack-project-id"][0]; + const branchId = req.headers["x-stack-branch-id"]?.[0] ?? DEFAULT_BRANCH_ID; + + const tenancy = await getSoleTenancyFromProjectBranch(projectId, branchId, true); + if (!tenancy) { + throw new StatusError(400, "invalid_request: project or branch not found"); + } + + const recordFailure = (failureContext: { policyId: string, issuer: string, subject: string, reason: string }) => { + runAsynchronously(logEvent([SystemEventTypes.OidcFederationExchange], { + projectId: tenancy.project.id, + policyId: failureContext.policyId, + issuer: failureContext.issuer, + subject: failureContext.subject, + outcome: "failure", + reason: failureContext.reason, + })); + runAsynchronously(writeAudit({ + tenancyId: tenancy.id, + policyId: failureContext.policyId, + issuer: failureContext.issuer, + subject: failureContext.subject, + outcome: "failure", + reason: failureContext.reason, + })); + }; + + const trustPolicies = tenancy.config.oidcFederation.trustPolicies; + const policyEntries = Object.entries(trustPolicies).filter(([_, policy]) => policy.enabled); + if (policyEntries.length === 0) { + recordFailure({ + policyId: "", + issuer: "", + subject: "", + reason: "no enabled OIDC federation trust policies for this project", + }); + throw new StatusError(400, "invalid_grant"); + } + + const attemptReasons: Array<{ policyId: string, reason: string }> = []; + let bestAttempt: { policyId: string, issuer: string, subject: string } | null = null; + for (const [policyId, policy] of policyEntries) { + const issuerUrl = policy.issuerUrl; + const audiences = Object.values(policy.audiences ?? {}).filter((v): v is string => typeof v === "string"); + if (typeof issuerUrl !== "string" || audiences.length === 0) { + attemptReasons.push({ policyId, reason: "policy is missing issuerUrl or audiences" }); + continue; + } + + let validated: Awaited>; + try { + validated = await validateOidcJwt({ issuerUrl, audiences, token: req.body.subject_token, prisma: globalPrismaClient }); + } catch (error) { + attemptReasons.push({ policyId, reason: error instanceof Error ? error.message : String(error) }); + continue; + } + bestAttempt = { policyId, issuer: validated.issuer, subject: validated.subject }; + + const stringEquals = flattenClaimConditions(policy.claimConditions.stringEquals); + const stringLike = flattenClaimConditions(policy.claimConditions.stringLike); + const match = matchClaims({ stringEquals, stringLike }, validated.claims); + if (!match.matched) { + attemptReasons.push({ policyId, reason: match.reason }); + continue; + } + + const minted = await mintServerAccessToken({ + projectId: tenancy.project.id, + branchId: tenancy.branchId, + federation: { + policyId, + issuer: validated.issuer, + subject: validated.subject, + audience: validated.audience, + }, + ttlSeconds: policy.tokenTtlSeconds ?? 900, + }); + + runAsynchronously(logEvent([SystemEventTypes.OidcFederationExchange], { + projectId: tenancy.project.id, + policyId, + issuer: validated.issuer, + subject: validated.subject, + outcome: "success", + reason: "", + })); + runAsynchronously(writeAudit({ + tenancyId: tenancy.id, + policyId, + issuer: validated.issuer, + subject: validated.subject, + outcome: "success", + reason: "", + })); + + return { + statusCode: 200, + bodyType: "json" as const, + body: { + access_token: minted.accessToken, + issued_token_type: ISSUED_TOKEN_TYPE, + token_type: "Bearer" as const, + expires_in: minted.ttlSeconds, + }, + }; + } + + const reasonForPolicy = (policyId: string): string => + attemptReasons.find(a => a.policyId === policyId)?.reason ?? "no trust policy matched"; + const failureContext = bestAttempt + ? { policyId: bestAttempt.policyId, issuer: bestAttempt.issuer, subject: bestAttempt.subject, reason: reasonForPolicy(bestAttempt.policyId) } + : { policyId: attemptReasons[0]?.policyId ?? "", issuer: "", subject: "", reason: attemptReasons[0]?.reason ?? "no trust policy matched" }; + + recordFailure(failureContext); + throw new StatusError(400, "invalid_grant"); + }, +}); diff --git a/apps/backend/src/app/api/latest/internal/oidc-federation/probe-discovery/route.tsx b/apps/backend/src/app/api/latest/internal/oidc-federation/probe-discovery/route.tsx new file mode 100644 index 0000000000..407970abc8 --- /dev/null +++ b/apps/backend/src/app/api/latest/internal/oidc-federation/probe-discovery/route.tsx @@ -0,0 +1,47 @@ +import { fetchOidcDiscoveryDocument } from "@/lib/oidc-jwt"; +import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler"; +import { adaptSchema, adminAuthTypeSchema, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields"; + +export const POST = createSmartRouteHandler({ + metadata: { + hidden: true, + }, + request: yupObject({ + auth: yupObject({ + type: adminAuthTypeSchema, + tenancy: adaptSchema.defined(), + }).defined(), + body: yupObject({ + issuer_url: yupString().defined(), + }).defined(), + method: yupString().oneOf(["POST"]).defined(), + }), + response: yupObject({ + statusCode: yupNumber().oneOf([200]).defined(), + bodyType: yupString().oneOf(["json"]).defined(), + body: yupObject({ + ok: yupObject({ + issuer: yupString().defined(), + jwks_uri: yupString().defined(), + }).optional(), + error: yupString().optional(), + }).defined(), + }), + handler: async ({ body }) => { + const trimmed = body.issuer_url.trim(); + if (!trimmed) { + return { statusCode: 200, bodyType: "json" as const, body: { error: "issuer URL is empty" } }; + } + try { + const doc = await fetchOidcDiscoveryDocument(trimmed); + return { + statusCode: 200, + bodyType: "json" as const, + body: { ok: { issuer: doc.issuer, jwks_uri: doc.jwks_uri } }, + }; + } catch (e) { + const message = e instanceof Error ? e.message : "discovery failed"; + return { statusCode: 200, bodyType: "json" as const, body: { error: message } }; + } + }, +}); diff --git a/apps/backend/src/lib/events.tsx b/apps/backend/src/lib/events.tsx index 77fb19606a..12f46d059c 100644 --- a/apps/backend/src/lib/events.tsx +++ b/apps/backend/src/lib/events.tsx @@ -166,6 +166,22 @@ const SignUpRuleTriggerEventType = { inherits: [], } as const satisfies SystemEventTypeBase; +const OidcFederationExchangeEventType = { + id: "$oidc-federation-exchange", + dataSchema: yupObject({ + // Always set. Matched policy on success; empty string on failure before any policy matched. + policyId: yupString().defined(), + // Advertised issuer from the OIDC discovery doc if the token made it that far, empty otherwise. + issuer: yupString().defined(), + // Token's `sub` claim when available, empty otherwise. + subject: yupString().defined(), + outcome: yupString().oneOf(['success', 'failure']).defined(), + // Human-readable reason; free-form. Useful for admin debugging. Empty on success. + reason: yupString().defined(), + }), + inherits: [ProjectActivityEventType], +} as const satisfies SystemEventTypeBase; + export const SystemEventTypes = stripEventTypeSuffixFromKeys({ ProjectEventType, ProjectActivityEventType, @@ -175,6 +191,7 @@ export const SystemEventTypes = stripEventTypeSuffixFromKeys({ ApiRequestEventType, LegacyApiEventType, SignUpRuleTriggerEventType, + OidcFederationExchangeEventType, } as const); const systemEventTypesById = new Map(Object.values(SystemEventTypes).map(eventType => [eventType.id, eventType])); diff --git a/apps/backend/src/lib/oidc-jwt.test.tsx b/apps/backend/src/lib/oidc-jwt.test.tsx new file mode 100644 index 0000000000..7ca205e52c --- /dev/null +++ b/apps/backend/src/lib/oidc-jwt.test.tsx @@ -0,0 +1,483 @@ +import type { PrismaClientTransaction } from "@/prisma-client"; +import { randomBytes } from "node:crypto"; +import { SignJWT, exportJWK, generateKeyPair, type JWK } from "jose"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { OidcJwtValidationError, validateOidcJwt } from "./oidc-jwt"; + +// `validateSafeFetchUrl` does a real DNS lookup (and fails closed on errors) before +// any fetch; these tests use `test-idp.example.com`, which doesn't resolve. Mock +// dns.lookup to a public IP that passes the blocklist so we exercise the fetch +// path with our `fetchMock` instead of tripping on the DNS check. +vi.mock("node:dns/promises", () => ({ + lookup: vi.fn(async () => [{ address: "93.184.216.34", family: 4 }]), +})); + +vi.mock("undici", async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + fetch: (...args: Parameters) => globalThis.fetch(...args), + }; +}); + +type CacheRow = { namespace: string, cacheKey: string, payload: unknown, expiresAt: Date }; + +function createMockPrismaCache(): PrismaClientTransaction { + const store = new Map(); + const keyOf = (ns: string, k: string) => `${ns}\\0${k}`; + return { + cacheEntry: { + findUnique: async ({ where }: { where: { namespace_cacheKey: { namespace: string, cacheKey: string } } }) => { + return store.get(keyOf(where.namespace_cacheKey.namespace, where.namespace_cacheKey.cacheKey)) ?? null; + }, + upsert: async ({ where, create, update }: { + where: { namespace_cacheKey: { namespace: string, cacheKey: string } }, + create: CacheRow, + update: Partial, + }) => { + const k = keyOf(where.namespace_cacheKey.namespace, where.namespace_cacheKey.cacheKey); + const existing = store.get(k); + if (existing) store.set(k, { ...existing, ...update }); + else store.set(k, { ...create }); + return store.get(k); + }, + deleteMany: async ({ where }: { where: { namespace: string, cacheKey: string } }) => { + for (const [k, v] of store.entries()) { + if (v.namespace === where.namespace && v.cacheKey === where.cacheKey) store.delete(k); + } + return { count: 0 }; + }, + }, + } as unknown as PrismaClientTransaction; +} + +/** + * These tests generate a real RSA keypair per test, sign tokens with it, and mock `fetch` to + * serve an OIDC discovery document + JWKS backed by that key. This exercises the real `jose` + * verification path without requiring a live IdP. + */ + +async function setupMockIdp(options: { issuerUrl: string, kid?: string }) { + const { publicKey, privateKey } = await generateKeyPair("RS256"); + const jwk = { ...(await exportJWK(publicKey)), kid: options.kid ?? "test-key", alg: "RS256", use: "sig" }; + return { privateKey, jwk }; +} + +function installFetchMock(setup: { + issuerUrl: string, + /** The issuer declared in the discovery doc (defaults to `issuerUrl`). */ + advertisedIssuer?: string, + jwks: JWK[], +}) { + const discoveryUrl = `${setup.issuerUrl}/.well-known/openid-configuration`; + const jwksUrl = `${setup.issuerUrl}/jwks`; + const fetchMock = vi.fn(async (input: string | URL, _init?: RequestInit) => { + const url = typeof input === "string" ? input : input.toString(); + if (url === discoveryUrl) { + return new Response( + JSON.stringify({ issuer: setup.advertisedIssuer ?? setup.issuerUrl, jwks_uri: jwksUrl }), + { status: 200, headers: { "content-type": "application/json" } }, + ); + } + if (url === jwksUrl) { + return new Response(JSON.stringify({ keys: setup.jwks }), { status: 200, headers: { "content-type": "application/json" } }); + } + throw new Error(`unexpected fetch in test: ${url}`); + }); + vi.stubGlobal("fetch", fetchMock); + return fetchMock; +} + +async function mintTestToken(privateKey: CryptoKey, payload: Record, options: { + issuer: string, + audience: string | string[], + kid?: string, + expiresIn?: string, + notBefore?: number, +}) { + const jwt = new SignJWT(payload) + .setProtectedHeader({ alg: "RS256", kid: options.kid ?? "test-key" }) + .setIssuer(options.issuer) + .setAudience(options.audience) + .setIssuedAt(); + if (options.expiresIn !== undefined) jwt.setExpirationTime(options.expiresIn); + if (options.notBefore !== undefined) jwt.setNotBefore(options.notBefore); + return await jwt.sign(privateKey); +} + +const issuerUrl = "https://test-idp.example.com"; + +describe("validateOidcJwt", () => { + let prisma: PrismaClientTransaction; + beforeEach(() => { + prisma = createMockPrismaCache(); + }); + afterEach(() => { + vi.unstubAllGlobals(); + }); + + it("validates a well-formed token signed by the advertised JWKS", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + installFetchMock({ issuerUrl, jwks: [jwk] }); + const token = await mintTestToken(privateKey, { sub: "workload-1", environment: "production" }, { + issuer: issuerUrl, + audience: "stack-auth", + expiresIn: "5m", + }); + + const result = await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma }); + expect(result.subject).toBe("workload-1"); + expect(result.issuer).toBe(issuerUrl); + expect(result.audience).toBe("stack-auth"); + expect(result.claims.environment).toBe("production"); + }); + + it("rejects a token with a mismatched audience", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + installFetchMock({ issuerUrl, jwks: [jwk] }); + const token = await mintTestToken(privateKey, { sub: "w" }, { + issuer: issuerUrl, + audience: "wrong-audience", + expiresIn: "5m", + }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + }); + + it("rejects an expired token", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + installFetchMock({ issuerUrl, jwks: [jwk] }); + const token = await mintTestToken(privateKey, { sub: "w" }, { + issuer: issuerUrl, + audience: "stack-auth", + expiresIn: "-10m", + }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toMatchObject({ reason: "token expired" }); + }); + + it("rejects a token whose signature doesn't match the JWKS", async () => { + const { privateKey } = await setupMockIdp({ issuerUrl }); + // Advertise a DIFFERENT key than the one used to sign. + const { jwk: differentJwk } = await setupMockIdp({ issuerUrl, kid: "test-key" }); + installFetchMock({ issuerUrl, jwks: [differentJwk] }); + const token = await mintTestToken(privateKey, { sub: "w" }, { + issuer: issuerUrl, + audience: "stack-auth", + expiresIn: "5m", + }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + }); + + it("rejects symmetric JWT algorithms even if the JWKS advertises an oct key", async () => { + const secret = randomBytes(32); + const jwk: JWK = { kty: "oct", k: secret.toString("base64url"), kid: "symmetric-key", alg: "HS256" }; + installFetchMock({ issuerUrl, jwks: [jwk] }); + const token = await new SignJWT({ sub: "w" }) + .setProtectedHeader({ alg: "HS256", kid: "symmetric-key" }) + .setIssuer(issuerUrl) + .setAudience("stack-auth") + .setIssuedAt() + .setExpirationTime("5m") + .sign(secret); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + }); + + it("fails closed when no audiences are configured", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + installFetchMock({ issuerUrl, jwks: [jwk] }); + const token = await mintTestToken(privateKey, { sub: "w" }, { + issuer: issuerUrl, + audience: "stack-auth", + expiresIn: "5m", + }); + await expect(validateOidcJwt({ issuerUrl, audiences: [], token, prisma })).rejects.toMatchObject({ reason: "trust policy has no configured audiences" }); + }); + + it("rejects a structurally-invalid token before hitting the network", async () => { + const fetchMock = installFetchMock({ issuerUrl, jwks: [] }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: "not.a.jwt", prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock).not.toHaveBeenCalled(); + }); + + it("respects nbf (not-before) with clock-skew tolerance", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + installFetchMock({ issuerUrl, jwks: [jwk] }); + // nbf 10 minutes in the future — well beyond our 60s skew. + const token = await mintTestToken(privateKey, { sub: "w" }, { + issuer: issuerUrl, + audience: "stack-auth", + expiresIn: "30m", + notBefore: Math.floor(Date.now() / 1000) + 600, + }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + }); + + it("rejects a discovery document whose advertised issuer mismatches the configured issuer URL", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + installFetchMock({ + issuerUrl, + advertisedIssuer: "https://issuer-from-discovery.example.com", + jwks: [jwk], + }); + const token = await mintTestToken(privateKey, { sub: "workload-1" }, { + issuer: "https://issuer-from-discovery.example.com", + audience: "stack-auth", + expiresIn: "5m", + }); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toMatchObject({ + reason: "issuer discovery failed", + }); + }); + + it("serves discovery + JWKS from cache on the second validation (no new fetches)", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + const fetchMock = installFetchMock({ issuerUrl, jwks: [jwk] }); + const mkToken = () => mintTestToken(privateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m" }); + + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: await mkToken(), prisma }); + const fetchesAfterFirst = fetchMock.mock.calls.length; + expect(fetchesAfterFirst).toBe(2); // discovery + jwks + + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: await mkToken(), prisma }); + expect(fetchMock.mock.calls.length).toBe(fetchesAfterFirst); // no extra fetches + }); + + it("caches discovery failures with short TTL to prevent IdP hammering", async () => { + const { privateKey } = await setupMockIdp({ issuerUrl }); + const token = await mintTestToken(privateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m" }); + + const fetchMock = vi.fn(async () => + new Response("boom", { status: 500, headers: { "content-type": "text/plain" } }), + ); + vi.stubGlobal("fetch", fetchMock); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock.mock.calls.length).toBe(1); // one discovery attempt + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock.mock.calls.length).toBe(1); // negative-cache hit; no retry + }); + + it("caches transient discovery fetch errors instead of treating them as URL rejections", async () => { + const { privateKey } = await setupMockIdp({ issuerUrl }); + const token = await mintTestToken(privateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m" }); + + const fetchMock = vi.fn(async () => { + throw new Error("socket hang up"); + }); + vi.stubGlobal("fetch", fetchMock); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock.mock.calls.length).toBe(1); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock.mock.calls.length).toBe(1); + }); + + it("invalidates JWKS cache and refetches on ERR_JWKS_NO_MATCHING_KEY (key rotation)", async () => { + const { privateKey: oldPrivateKey, jwk: oldJwk } = await setupMockIdp({ issuerUrl, kid: "old-key" }); + const { privateKey: newPrivateKey, jwk: newJwk } = await setupMockIdp({ issuerUrl, kid: "new-key" }); + + let jwksPayload: JWK[] = [oldJwk]; + const discoveryUrl = `${issuerUrl}/.well-known/openid-configuration`; + const jwksUrl = `${issuerUrl}/jwks`; + const fetchMock = vi.fn(async (input: string | URL) => { + const url = typeof input === "string" ? input : input.toString(); + if (url === discoveryUrl) { + return new Response(JSON.stringify({ issuer: issuerUrl, jwks_uri: jwksUrl }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === jwksUrl) { + return new Response(JSON.stringify({ keys: jwksPayload }), { status: 200, headers: { "content-type": "application/json" } }); + } + throw new Error(`unexpected fetch: ${url}`); + }); + vi.stubGlobal("fetch", fetchMock); + + // Phase 1: prime caches with the old key. + const oldToken = await mintTestToken(oldPrivateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m", kid: "old-key" }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: oldToken, prisma })).resolves.toMatchObject({ subject: "w" }); + const jwksFetchesAfterPrime = fetchMock.mock.calls.filter(c => (typeof c[0] === "string" ? c[0] : c[0].toString()) === jwksUrl).length; + expect(jwksFetchesAfterPrime).toBe(1); + + // Phase 2: IdP rotates to a new key; a token signed with the new key must trigger + // invalidate-and-refetch because the cached JWKS has only the old kid. + jwksPayload = [newJwk]; + const newToken = await mintTestToken(newPrivateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m", kid: "new-key" }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: newToken, prisma })).resolves.toMatchObject({ subject: "w" }); + + const jwksFetchesAfterRotation = fetchMock.mock.calls.filter(c => (typeof c[0] === "string" ? c[0] : c[0].toString()) === jwksUrl).length; + expect(jwksFetchesAfterRotation).toBe(2); // original prime + refetch after kid-miss + }); + + it("invalidates discovery when a key miss may be caused by a moved jwks_uri", async () => { + const { privateKey: oldPrivateKey, jwk: oldJwk } = await setupMockIdp({ issuerUrl, kid: "old-key" }); + const { privateKey: newPrivateKey, jwk: newJwk } = await setupMockIdp({ issuerUrl, kid: "new-key" }); + + let currentJwksPath = "/jwks-old"; + const discoveryUrl = `${issuerUrl}/.well-known/openid-configuration`; + const fetchMock = vi.fn(async (input: string | URL) => { + const url = typeof input === "string" ? input : input.toString(); + if (url === discoveryUrl) { + return new Response(JSON.stringify({ issuer: issuerUrl, jwks_uri: `${issuerUrl}${currentJwksPath}` }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === `${issuerUrl}/jwks-old`) { + return new Response(JSON.stringify({ keys: [oldJwk] }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === `${issuerUrl}/jwks-new`) { + return new Response(JSON.stringify({ keys: [newJwk] }), { status: 200, headers: { "content-type": "application/json" } }); + } + throw new Error(`unexpected fetch: ${url}`); + }); + vi.stubGlobal("fetch", fetchMock); + + const oldToken = await mintTestToken(oldPrivateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m", kid: "old-key" }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: oldToken, prisma })).resolves.toMatchObject({ subject: "w" }); + + currentJwksPath = "/jwks-new"; + const newToken = await mintTestToken(newPrivateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m", kid: "new-key" }); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: newToken, prisma })).resolves.toMatchObject({ subject: "w" }); + + const calls = fetchMock.mock.calls.map(c => typeof c[0] === "string" ? c[0] : c[0].toString()); + expect(calls.filter(url => url === discoveryUrl)).toHaveLength(2); + expect(calls).toContain(`${issuerUrl}/jwks-new`); + }); + + it("refetches after each cache TTL expires (JWKS 10m, discovery 1h)", async () => { + vi.useFakeTimers({ toFake: ["Date"] }); + try { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + const fetchMock = installFetchMock({ issuerUrl, jwks: [jwk] }); + const token = await mintTestToken(privateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "3h" }); + + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma }); + expect(fetchMock.mock.calls.length).toBe(2); // discovery + JWKS + + // Within both TTLs — no new fetches. + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma }); + expect(fetchMock.mock.calls.length).toBe(2); + + // Past JWKS TTL (10min) but within discovery TTL (1h) → 1 new JWKS fetch. + vi.setSystemTime(new Date(Date.now() + 11 * 60 * 1000)); + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma }); + expect(fetchMock.mock.calls.length).toBe(3); + + // Past discovery TTL (1h total) → both expire → 2 new fetches. + vi.setSystemTime(new Date(Date.now() + 60 * 60 * 1000)); + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma }); + expect(fetchMock.mock.calls.length).toBe(5); + } finally { + vi.useRealTimers(); + } + }); + + it("lets a recovered IdP succeed after negative-cache TTL expires", async () => { + vi.useFakeTimers({ toFake: ["Date"] }); + try { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + let idpAlive = false; + const discoveryUrl = `${issuerUrl}/.well-known/openid-configuration`; + const jwksUrl = `${issuerUrl}/jwks`; + const fetchMock = vi.fn(async (input: string | URL) => { + const url = typeof input === "string" ? input : input.toString(); + if (!idpAlive) return new Response("down", { status: 500 }); + if (url === discoveryUrl) { + return new Response(JSON.stringify({ issuer: issuerUrl, jwks_uri: jwksUrl }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === jwksUrl) { + return new Response(JSON.stringify({ keys: [jwk] }), { status: 200, headers: { "content-type": "application/json" } }); + } + throw new Error(`unexpected: ${url}`); + }); + vi.stubGlobal("fetch", fetchMock); + + const token = await mintTestToken(privateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "3h" }); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock.mock.calls.length).toBe(1); + + // Even after the IdP recovers, we're still inside the 30s negative TTL. + idpAlive = true; + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toBeInstanceOf(OidcJwtValidationError); + expect(fetchMock.mock.calls.length).toBe(1); + + // Past negative TTL → retries → succeeds. + vi.setSystemTime(new Date(Date.now() + 31 * 1000)); + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).resolves.toMatchObject({ subject: "w" }); + expect(fetchMock.mock.calls.length).toBe(3); // +discovery +jwks + } finally { + vi.useRealTimers(); + } + }); + + it("surfaces 'no matching JWKS key' when refetched JWKS still lacks the token kid", async () => { + const { privateKey: knownPrivateKey, jwk: knownJwk } = await setupMockIdp({ issuerUrl, kid: "known" }); + const fetchMock = installFetchMock({ issuerUrl, jwks: [knownJwk] }); + + // Prime caches with a good token for kid "known". + const primeToken = await mintTestToken(knownPrivateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m", kid: "known" }); + await validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: primeToken, prisma }); + expect(fetchMock.mock.calls.filter(c => (typeof c[0] === "string" ? c[0] : c[0].toString()) === `${issuerUrl}/jwks`).length).toBe(1); + + // Token with an unknown kid that's in NEITHER the cached JWKS nor any subsequent fetch. + const { privateKey: unknownPrivateKey } = await setupMockIdp({ issuerUrl, kid: "mystery" }); + const mysteryToken = await mintTestToken(unknownPrivateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m", kid: "mystery" }); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token: mysteryToken, prisma })).rejects.toMatchObject({ + reason: "no matching JWKS key for token `kid`", + }); + // Retry path should have refetched once; so JWKS fetch count is now 2. + expect(fetchMock.mock.calls.filter(c => (typeof c[0] === "string" ? c[0] : c[0].toString()) === `${issuerUrl}/jwks`).length).toBe(2); + }); + + it("keys the cache by issuer — different issuers don't share entries", async () => { + const issuerA = "https://idp-a.example.com"; + const issuerB = "https://idp-b.example.com"; + const { privateKey: keyA, jwk: jwkA } = await setupMockIdp({ issuerUrl: issuerA }); + const { privateKey: keyB, jwk: jwkB } = await setupMockIdp({ issuerUrl: issuerB }); + + const fetchMock = vi.fn(async (input: string | URL) => { + const url = typeof input === "string" ? input : input.toString(); + if (url === `${issuerA}/.well-known/openid-configuration`) { + return new Response(JSON.stringify({ issuer: issuerA, jwks_uri: `${issuerA}/jwks` }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === `${issuerA}/jwks`) { + return new Response(JSON.stringify({ keys: [jwkA] }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === `${issuerB}/.well-known/openid-configuration`) { + return new Response(JSON.stringify({ issuer: issuerB, jwks_uri: `${issuerB}/jwks` }), { status: 200, headers: { "content-type": "application/json" } }); + } + if (url === `${issuerB}/jwks`) { + return new Response(JSON.stringify({ keys: [jwkB] }), { status: 200, headers: { "content-type": "application/json" } }); + } + throw new Error(`unexpected: ${url}`); + }); + vi.stubGlobal("fetch", fetchMock); + + const tokenA = await mintTestToken(keyA, { sub: "a" }, { issuer: issuerA, audience: "stack-auth", expiresIn: "5m" }); + const tokenB = await mintTestToken(keyB, { sub: "b" }, { issuer: issuerB, audience: "stack-auth", expiresIn: "5m" }); + + const resA = await validateOidcJwt({ issuerUrl: issuerA, audiences: ["stack-auth"], token: tokenA, prisma }); + const resB = await validateOidcJwt({ issuerUrl: issuerB, audiences: ["stack-auth"], token: tokenB, prisma }); + expect(resA.subject).toBe("a"); + expect(resB.subject).toBe("b"); + expect(fetchMock.mock.calls.length).toBe(4); // 2 per issuer, no cross-hit + }); + + it("caches discovery issuer-mismatch errors (not just HTTP failures)", async () => { + const { privateKey, jwk } = await setupMockIdp({ issuerUrl }); + const fetchMock = installFetchMock({ + issuerUrl, + advertisedIssuer: "https://lying-issuer.example.com", + jwks: [jwk], + }); + const token = await mintTestToken(privateKey, { sub: "w" }, { issuer: issuerUrl, audience: "stack-auth", expiresIn: "5m" }); + + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toMatchObject({ reason: "issuer discovery failed" }); + expect(fetchMock.mock.calls.length).toBe(1); + + // Second call within negative TTL — cached error, no new fetch. + await expect(validateOidcJwt({ issuerUrl, audiences: ["stack-auth"], token, prisma })).rejects.toMatchObject({ reason: "issuer discovery failed" }); + expect(fetchMock.mock.calls.length).toBe(1); + }); +}); diff --git a/apps/backend/src/lib/oidc-jwt.tsx b/apps/backend/src/lib/oidc-jwt.tsx new file mode 100644 index 0000000000..fbb8f1238f --- /dev/null +++ b/apps/backend/src/lib/oidc-jwt.tsx @@ -0,0 +1,262 @@ +import { Prisma } from "@/generated/prisma/client"; +import type { PrismaClientTransaction } from "@/prisma-client"; +import { StatusError, captureError } from "@stackframe/stack-shared/dist/utils/errors"; +import { createLocalJWKSet, decodeProtectedHeader, jwtVerify, type JWK, type JWTPayload } from "jose"; +import { getOrSetCacheValue } from "./cache"; +import { safeFetchJson, validateSafeFetchUrl } from "./safe-fetch"; + +export type DiscoveryDoc = { issuer: string, jwks_uri: string }; +type JwksJson = { keys: JWK[] }; + +type DiscoveryPayload = + | { kind: "ok", doc: DiscoveryDoc } + | { kind: "err", message: string }; + +const DISCOVERY_OK_TTL_MS = 60 * 60 * 1000; +const DISCOVERY_ERR_TTL_MS = 30 * 1000; +const JWKS_TTL_MS = 10 * 60 * 1000; +const CLOCK_SKEW_SECONDS = 60; +const DISCOVERY_NAMESPACE = "oidc-discovery"; +const JWKS_NAMESPACE = "oidc-jwks"; +const FETCH_TIMEOUT_MS = 5000; +const OIDC_JWT_ALGORITHMS = ["RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512", "EdDSA"]; + +function stripTrailingSlash(s: string): string { + return s.endsWith("/") ? s.slice(0, -1) : s; +} + +function toAudArray(aud: JWTPayload["aud"]): string[] { + if (Array.isArray(aud)) return aud; + if (aud == null) return []; + return [aud]; +} + +async function writeDiscoveryCache( + prisma: PrismaClientTransaction, + cacheKey: string, + payload: DiscoveryPayload, + ttlMs: number, +): Promise { + const expiresAt = new Date(Date.now() + ttlMs); + await prisma.cacheEntry.upsert({ + where: { namespace_cacheKey: { namespace: DISCOVERY_NAMESPACE, cacheKey } }, + create: { namespace: DISCOVERY_NAMESPACE, cacheKey, payload: payload as unknown as Prisma.InputJsonValue, expiresAt }, + update: { payload: payload as unknown as Prisma.InputJsonValue, expiresAt }, + }); +} + +async function cacheErrorAndRethrow( + prisma: PrismaClientTransaction, + cacheKey: string, + error: unknown, +): Promise { + const message = error instanceof Error ? error.message : String(error); + try { + await writeDiscoveryCache(prisma, cacheKey, { kind: "err", message }, DISCOVERY_ERR_TTL_MS); + } catch (cacheErr) { + // Don't let a DB hiccup clobber the real discovery error — surface both. + captureError("oidc-discovery-cache-write-failed", cacheErr); + } + throw error instanceof Error ? error : new Error(message); +} + +export async function fetchOidcDiscoveryDocument(issuerUrl: string): Promise { + const cacheKey = stripTrailingSlash(issuerUrl); + + const discovery = await safeFetchJson>(`${cacheKey}/.well-known/openid-configuration`, { + headers: { accept: "application/json" }, + timeoutMs: FETCH_TIMEOUT_MS, + }); + if (discovery.kind === "url-error") { + throw new Error(`OIDC discovery URL rejected for ${issuerUrl}: ${discovery.reason}`); + } + if (discovery.kind === "fetch-error") { + throw new Error(`OIDC discovery fetch failed for ${issuerUrl}: ${discovery.reason}`); + } + if (discovery.kind === "http-error") { + throw new Error(`OIDC discovery fetch failed for ${issuerUrl} (status ${discovery.status})`); + } + + const body = discovery.body; + if (typeof body.issuer !== "string" || typeof body.jwks_uri !== "string") { + throw new Error(`OIDC discovery response for ${issuerUrl} is missing issuer or jwks_uri`); + } + if (stripTrailingSlash(body.issuer) !== cacheKey) { + throw new Error(`OIDC discovery issuer mismatch for ${issuerUrl}: expected ${cacheKey}, got ${body.issuer}`); + } + const jwksSafe = await validateSafeFetchUrl(body.jwks_uri); + if (jwksSafe.kind !== "ok") { + throw new Error(`OIDC discovery jwks_uri rejected for ${issuerUrl}: ${jwksSafe.reason}`); + } + return { issuer: body.issuer, jwks_uri: body.jwks_uri }; +} + +async function loadDiscovery(issuerUrl: string, prisma: PrismaClientTransaction): Promise { + const cacheKey = stripTrailingSlash(issuerUrl); + + const cached = await prisma.cacheEntry.findUnique({ + where: { namespace_cacheKey: { namespace: DISCOVERY_NAMESPACE, cacheKey } }, + }); + if (cached && cached.expiresAt.getTime() > Date.now()) { + const payload = cached.payload as unknown as DiscoveryPayload; + if (payload.kind === "err") throw new Error(payload.message); + return payload.doc; + } + + try { + const doc = await fetchOidcDiscoveryDocument(issuerUrl); + await writeDiscoveryCache(prisma, cacheKey, { kind: "ok", doc }, DISCOVERY_OK_TTL_MS); + return doc; + } catch (error) { + // URL-validation failures are deterministic per-config; don't poison the error + // cache with them, or fixing a mistyped issuer URL would still fail for DISCOVERY_ERR_TTL_MS. + if (error instanceof Error && error.message.startsWith("OIDC discovery URL rejected")) { + throw error; + } + return await cacheErrorAndRethrow(prisma, cacheKey, error); + } +} + +async function fetchJwks(jwksUrl: string): Promise { + const jwks = await safeFetchJson(jwksUrl, { + headers: { accept: "application/json" }, + timeoutMs: FETCH_TIMEOUT_MS, + }); + if (jwks.kind === "url-error") { + throw new Error(`OIDC JWKS URL rejected: ${jwks.reason}`); + } + if (jwks.kind === "fetch-error") { + throw new Error(`OIDC JWKS fetch failed for ${jwksUrl}: ${jwks.reason}`); + } + if (jwks.kind === "http-error") { + throw new Error(`OIDC JWKS fetch failed for ${jwksUrl} (status ${jwks.status})`); + } + const body = jwks.body; + if (!Array.isArray(body.keys)) { + throw new Error(`OIDC JWKS response for ${jwksUrl} is not a valid JWKS`); + } + return body; +} + +async function loadJwks(jwksUrl: string, prisma: PrismaClientTransaction): Promise { + return await getOrSetCacheValue({ + namespace: JWKS_NAMESPACE, + cacheKey: jwksUrl, + ttlMs: JWKS_TTL_MS, + prisma, + loader: () => fetchJwks(jwksUrl), + }); +} + +async function invalidateJwks(prisma: PrismaClientTransaction, jwksUrl: string): Promise { + await prisma.cacheEntry.deleteMany({ + where: { namespace: JWKS_NAMESPACE, cacheKey: jwksUrl }, + }); +} + +async function invalidateDiscovery(prisma: PrismaClientTransaction, issuerUrl: string): Promise { + await prisma.cacheEntry.deleteMany({ + where: { namespace: DISCOVERY_NAMESPACE, cacheKey: stripTrailingSlash(issuerUrl) }, + }); +} + +export class OidcJwtValidationError extends StatusError { + public override readonly cause?: unknown; + constructor(public readonly reason: string, options?: { cause?: unknown }) { + super(401, `OIDC token validation failed: ${reason}`); + if (options?.cause !== undefined) this.cause = options.cause; + } +} + +export type ValidateOidcJwtOptions = { + issuerUrl: string, + audiences: string[], + token: string, + prisma: PrismaClientTransaction, +}; + +export type ValidatedOidcJwt = { + claims: JWTPayload, + issuer: string, + subject: string, + audience: string, +}; + +function translateVerifyError(error: unknown): OidcJwtValidationError { + if (error instanceof OidcJwtValidationError) return error; + const code = (error as { code?: unknown }).code; + const reason = + code === "ERR_JWT_EXPIRED" ? "token expired" + : code === "ERR_JWT_CLAIM_VALIDATION_FAILED" ? `claim validation failed: ${(error as { claim?: string }).claim ?? "unknown"}` + : code === "ERR_JWS_SIGNATURE_VERIFICATION_FAILED" ? "signature verification failed" + : code === "ERR_JWKS_NO_MATCHING_KEY" ? "no matching JWKS key for token `kid`" + : "token verification failed"; + return new OidcJwtValidationError(reason, { cause: error }); +} + +export async function validateOidcJwt(options: ValidateOidcJwtOptions): Promise { + const { issuerUrl, audiences, token, prisma } = options; + + if (audiences.length === 0) { + throw new OidcJwtValidationError("trust policy has no configured audiences"); + } + + try { + decodeProtectedHeader(token); + } catch (error) { + throw new OidcJwtValidationError("token is not a well-formed JWT", { cause: error }); + } + + let doc: DiscoveryDoc; + try { + doc = await loadDiscovery(issuerUrl, prisma); + } catch (error) { + captureError("oidc-federation-discovery-failed", error); + throw new OidcJwtValidationError("issuer discovery failed", { cause: error }); + } + + const verifyOnce = async () => { + const jwks = await loadJwks(doc.jwks_uri, prisma); + const keystore = createLocalJWKSet(jwks); + return await jwtVerify(token, keystore, { + issuer: doc.issuer, + audience: audiences, + clockTolerance: CLOCK_SKEW_SECONDS, + algorithms: OIDC_JWT_ALGORITHMS, + }); + }; + + let verifyResult: Awaited>; + try { + verifyResult = await verifyOnce(); + } catch (error) { + // Cached JWKS may be stale after key rotation — invalidate and retry once. + if ((error as { code?: unknown }).code === "ERR_JWKS_NO_MATCHING_KEY") { + await invalidateJwks(prisma, doc.jwks_uri); + await invalidateDiscovery(prisma, issuerUrl); + doc = await loadDiscovery(issuerUrl, prisma); + try { + verifyResult = await verifyOnce(); + } catch (retryError) { + throw translateVerifyError(retryError); + } + } else { + throw translateVerifyError(error); + } + } + + const { payload } = verifyResult; + if (typeof payload.sub !== "string" || payload.sub.length === 0) { + throw new OidcJwtValidationError("token is missing `sub` claim"); + } + const matchedAudience = toAudArray(payload.aud).find(a => audiences.includes(a)); + if (matchedAudience === undefined) { + throw new OidcJwtValidationError("token audience does not match policy"); + } + return { + claims: payload, + issuer: doc.issuer, + subject: payload.sub, + audience: matchedAudience, + }; +} diff --git a/apps/backend/src/lib/safe-fetch.ts b/apps/backend/src/lib/safe-fetch.ts new file mode 100644 index 0000000000..e84100b65a --- /dev/null +++ b/apps/backend/src/lib/safe-fetch.ts @@ -0,0 +1,206 @@ +import { lookup } from "node:dns/promises"; +import { BlockList, isIPv4, isIPv6 } from "node:net"; +import { getNodeEnvironment } from "@stackframe/stack-shared/dist/utils/env"; +import { runAsynchronously } from "@stackframe/stack-shared/dist/utils/promises"; +import { Agent, fetch as undiciFetch, type Dispatcher, type RequestInit as UndiciRequestInit } from "undici"; + +type LookupAddress = { address: string, family: number }; +type IpFamily = 4 | 6; +type ResolvedSafeFetchUrl = { kind: "ok", url: URL, address: string, family: IpFamily }; + +export type SafeFetchUrlResult = + | { kind: "ok", url: URL } + | { kind: "error", reason: string }; + +export type SafeFetchJsonResult = + | { kind: "ok", url: URL, status: number, body: T } + | { kind: "http-error", url: URL, status: number, body: string } + | { kind: "url-error", reason: string } + | { kind: "fetch-error", url: URL, reason: string }; + +type LookupCallback = (err: NodeJS.ErrnoException | null, address: string, family: number) => void; + +// Precomputed blocklist of CIDR ranges the server must never dereference. Covers +// loopback, RFC1918, link-local + cloud metadata, CGNAT, multicast/reserved for +// IPv4, and loopback/unspecified/link-local/unique-local/site-local/multicast for +// IPv6. `net.BlockList` gives us numeric subnet matching so we don't rely on +// string-prefix heuristics that tend to grow subtle gaps. +const BLOCKED_RANGES: BlockList = (() => { + const list = new BlockList(); + // IPv4 + list.addSubnet("0.0.0.0", 8, "ipv4"); // "this network" / unspecified + list.addSubnet("10.0.0.0", 8, "ipv4"); // RFC1918 + list.addSubnet("127.0.0.0", 8, "ipv4"); // loopback + list.addSubnet("169.254.0.0", 16, "ipv4"); // link-local + cloud metadata (169.254.169.254) + list.addSubnet("172.16.0.0", 12, "ipv4"); // RFC1918 + list.addSubnet("192.168.0.0", 16, "ipv4"); // RFC1918 + list.addSubnet("100.64.0.0", 10, "ipv4"); // CGNAT + list.addSubnet("224.0.0.0", 3, "ipv4"); // multicast + reserved (224.0.0.0/3 covers 240.0.0.0/4 too) + // IPv6 + list.addAddress("::", "ipv6"); // unspecified + list.addAddress("::1", "ipv6"); // loopback + list.addSubnet("fe80::", 10, "ipv6"); // link-local + list.addSubnet("fc00::", 7, "ipv6"); // unique-local + list.addSubnet("fec0::", 10, "ipv6"); // site-local (deprecated but still routable) + list.addSubnet("ff00::", 8, "ipv6"); // multicast + return list; +})(); + +/** + * Screens a URL before the server dereferences it to an external network resource. + * Rejects URLs that would expose internal services via SSRF: non-http(s) schemes, + * plain http outside of dev-loopback, and hostnames that resolve to addresses in + * BLOCKED_RANGES. + * + */ +export async function validateSafeFetchUrl(raw: string): Promise { + const safe = await resolveSafeFetchUrl(raw); + if (safe.kind !== "ok") return safe; + return { kind: "ok", url: safe.url }; +} + +export async function safeFetchJson(raw: string, options?: { + headers?: HeadersInit, + timeoutMs?: number, +}): Promise> { + const safe = await resolveSafeFetchUrl(raw); + if (safe.kind !== "ok") return { kind: "url-error", reason: safe.reason }; + + let dispatcher: Dispatcher | undefined; + try { + dispatcher = createPinnedDispatcher(safe.address, safe.family); + const response = await undiciFetch(safe.url.toString(), { + method: "GET", + headers: options?.headers, + signal: AbortSignal.timeout(options?.timeoutMs ?? 5000), + dispatcher, + } satisfies UndiciRequestInit); + + if (!response.ok) { + const body = await response.text(); + return { kind: "http-error", url: safe.url, status: response.status, body }; + } + const body = await response.json() as T; + return { kind: "ok", url: safe.url, status: response.status, body }; + } catch (err) { + const reason = err instanceof Error ? err.message : String(err); + return { kind: "fetch-error", url: safe.url, reason }; + } finally { + if (dispatcher) runAsynchronously(dispatcher.close()); + } +} + +async function resolveSafeFetchUrl(raw: string): Promise { + let url: URL; + try { + url = new URL(raw); + } catch { + return { kind: "error", reason: "invalid URL" }; + } + // URL.hostname preserves brackets around IPv6 literals (e.g. "[::1]"); strip them so + // the loopback check and DNS lookup see the literal address. + const hostname = url.hostname.startsWith("[") && url.hostname.endsWith("]") + ? url.hostname.slice(1, -1) + : url.hostname; + const isLoopbackHostname = hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1"; + const isDevLoopback = getNodeEnvironment() !== "production" && isLoopbackHostname; + if (url.protocol !== "https:" && !(url.protocol === "http:" && isDevLoopback)) { + return { kind: "error", reason: "URL must use https (http is only allowed for localhost in non-production)" }; + } + // Fail closed on DNS errors: on this auth path, we'd rather reject than take a + // chance that retry + rebinding lets a later resolution through the block. + let resolved: LookupAddress[]; + try { + resolved = await lookup(hostname, { all: true }); + } catch (err) { + const reason = err instanceof Error ? err.message : String(err); + return { kind: "error", reason: `DNS lookup failed for ${hostname}: ${reason}` }; + } + for (const { address } of resolved) { + if (isBlockedAddress(address) && !isDevLoopback) { + return { kind: "error", reason: "hostname resolves to a disallowed IP range" }; + } + } + if (resolved.length === 0) { + return { kind: "error", reason: `DNS lookup returned no addresses for ${hostname}` }; + } + const selected = resolved[0]; + const family = toIpFamily(selected); + if (family === null) { + return { kind: "error", reason: `DNS lookup returned unsupported address family for ${hostname}` }; + } + return { kind: "ok", url, address: selected.address, family }; +} + +function createPinnedDispatcher(address: string, family: IpFamily): Agent { + return new Agent({ + connect: { + lookup: (_hostname: string, _options: unknown, callback: LookupCallback) => { + callback(null, address, family); + }, + }, + }); +} + +function toIpFamily(resolved: LookupAddress): IpFamily | null { + if (resolved.family === 4 || isIPv4(resolved.address)) return 4; + if (resolved.family === 6 || isIPv6(resolved.address)) return 6; + return null; +} + +function isBlockedAddress(address: string): boolean { + if (isIPv4(address)) return BLOCKED_RANGES.check(address, "ipv4"); + if (isIPv6(address)) { + // Normalize IPv4-mapped (::ffff:a.b.c.d) and IPv4-compatible (::a.b.c.d) + // forms so they're tested against the IPv4 ruleset rather than matched + // opportunistically as IPv6. + const embedded = extractEmbeddedIPv4(address); + if (embedded !== null) return BLOCKED_RANGES.check(embedded, "ipv4"); + return BLOCKED_RANGES.check(address, "ipv6"); + } + // Not a valid IP literal — be conservative and block. + return true; +} + +function extractEmbeddedIPv4(address: string): string | null { + const lower = address.toLowerCase(); + // Dotted tail forms: `::ffff:a.b.c.d` (IPv4-mapped) and `::a.b.c.d` (deprecated compat). + for (const prefix of ["::ffff:", "::"]) { + if (lower.startsWith(prefix)) { + const tail = lower.slice(prefix.length); + if (isIPv4(tail)) return tail; + } + } + // Canonical hex form: `::ffff:7f00:1`. Expand the address to 8 fully-written groups, + // then take the last two groups as the IPv4 octets. We only treat it as embedded v4 + // when the high 96 bits match ::ffff: (IPv4-mapped); IPv4-compatible (all zeros in + // the top 96 bits) collapses to addresses like `::` which are already handled. + const groups = expandIPv6Groups(lower); + if (!groups) return null; + const topAllZero = groups.slice(0, 5).every(g => g === 0); + if (topAllZero && groups[5] === 0xffff) { + const v4 = `${groups[6] >> 8}.${groups[6] & 0xff}.${groups[7] >> 8}.${groups[7] & 0xff}`; + if (isIPv4(v4)) return v4; + } + return null; +} + +// Expand a canonical/compressed IPv6 literal (e.g. `::ffff:7f00:1`, `2001:db8::1`) to +// its eight 16-bit groups. Returns null if the string isn't a valid IPv6 literal. +function expandIPv6Groups(address: string): number[] | null { + if (!isIPv6(address)) return null; + const [head, tail] = address.split("::") as [string, string | undefined]; + const headGroups = head === "" ? [] : head.split(":"); + const tailGroups = tail === undefined ? [] : (tail === "" ? [] : tail.split(":")); + const totalGroups = headGroups.length + tailGroups.length; + if (tail === undefined) { + if (totalGroups !== 8) return null; + } else if (totalGroups > 8) { + return null; + } + const zerosNeeded = tail === undefined ? 0 : 8 - totalGroups; + const fullGroups = [...headGroups, ...Array(zerosNeeded).fill("0"), ...tailGroups]; + const numeric = fullGroups.map(g => parseInt(g, 16)); + if (numeric.some(n => !Number.isFinite(n) || n < 0 || n > 0xffff)) return null; + return numeric; +} diff --git a/apps/backend/src/lib/seed-dummy-data.ts b/apps/backend/src/lib/seed-dummy-data.ts index a9ad484273..27372c4b6a 100644 --- a/apps/backend/src/lib/seed-dummy-data.ts +++ b/apps/backend/src/lib/seed-dummy-data.ts @@ -2059,6 +2059,28 @@ export async function seedDummyProject(options: SeedDummyProjectOptions): Promis .filter(([, app]) => !options.excludeAlphaApps || app.stage !== "alpha") .map(([key]) => [key, { enabled: true }])), }, + // Default trust policy pointing at the local mock OIDC IdP (apps/mock-oidc-idp). + // The demo at `examples/demo/oidc-federation-demo` mints tokens from this IdP and + // exchanges them here. Written with path notation so a seeded policy doesn't + // wipe sibling policies that might be set at a higher config override level. + // The id `mock-idp-demo` must not contain `.` (path separator); enforced by + // USER_SPECIFIED_ID_PATTERN on oidcTrustPolicyId at config validation time. + "oidcFederation.trustPolicies.mock-idp-demo": { + displayName: "Mock IdP (local dev)", + enabled: true, + issuerUrl: process.env.STACK_MOCK_OIDC_ISSUER_URL + ?? `http://localhost:${process.env.NEXT_PUBLIC_STACK_PORT_PREFIX ?? "81"}15`, + audiences: { + default: "stack-demo", + }, + claimConditions: { + stringLike: { + sub: { demo: "workload:*" }, + }, + stringEquals: {}, + }, + tokenTtlSeconds: 900, + }, }, }), overrideEnvironmentConfigOverride({ diff --git a/apps/backend/src/lib/server-access-token.test.tsx b/apps/backend/src/lib/server-access-token.test.tsx new file mode 100644 index 0000000000..2250720433 --- /dev/null +++ b/apps/backend/src/lib/server-access-token.test.tsx @@ -0,0 +1,77 @@ +import { KnownErrors } from "@stackframe/stack-shared"; +import { describe, expect, it } from "vitest"; +import { clampServerAccessTokenTtlSeconds, mintServerAccessToken, verifyServerAccessToken } from "./server-access-token"; + +describe("clampServerAccessTokenTtlSeconds", () => { + it("returns the default when undefined", () => { + expect(clampServerAccessTokenTtlSeconds(undefined)).toBe(900); + }); + it("clamps to min", () => { + expect(clampServerAccessTokenTtlSeconds(5)).toBe(30); + }); + it("clamps to max", () => { + expect(clampServerAccessTokenTtlSeconds(10_000)).toBe(3600); + }); + it("passes through in-range values", () => { + expect(clampServerAccessTokenTtlSeconds(1800)).toBe(1800); + }); + it("falls back to default for NaN", () => { + expect(clampServerAccessTokenTtlSeconds(Number.NaN)).toBe(900); + }); +}); + +describe("mintServerAccessToken + verifyServerAccessToken roundtrip", () => { + const federation = { + policyId: "policy-1", + issuer: "https://oidc.vercel.com/acme", + subject: "owner:acme:project:app:environment:production", + audience: "https://vercel.com/acme", + }; + + it("mints a token that verifies back out with the same project + federation metadata", async () => { + const minted = await mintServerAccessToken({ + projectId: "internal", + branchId: "main", + federation, + ttlSeconds: 60, + }); + expect(minted.ttlSeconds).toBe(60); + expect(typeof minted.accessToken).toBe("string"); + const verified = await verifyServerAccessToken(minted.accessToken, { projectId: "internal" }); + if (verified.status === "error") throw verified.error; + expect(verified.data.projectId).toBe("internal"); + expect(verified.data.branchId).toBe("main"); + expect(verified.data.federation).toEqual(federation); + }); + + it("rejects a token presented with the wrong projectId (cross-project replay guard)", async () => { + const minted = await mintServerAccessToken({ + projectId: "internal", + branchId: "main", + federation, + ttlSeconds: 60, + }); + const verified = await verifyServerAccessToken(minted.accessToken, { projectId: "other-project" }); + expect(verified.status).toBe("error"); + if (verified.status === "error") { + expect(verified.error).toBeInstanceOf(KnownErrors.UnparsableAccessToken); + } + }); + + it("rejects garbage", async () => { + const verified = await verifyServerAccessToken("not-a-real-token", { projectId: "internal" }); + expect(verified.status).toBe("error"); + }); + + it("returns the token's own branchId so the caller can compare against its asserted branch", async () => { + const minted = await mintServerAccessToken({ + projectId: "internal", + branchId: "main", + federation, + ttlSeconds: 60, + }); + const verified = await verifyServerAccessToken(minted.accessToken, { projectId: "internal" }); + if (verified.status === "error") throw verified.error; + expect(verified.data.branchId).toBe("main"); + }); +}); diff --git a/apps/backend/src/lib/server-access-token.tsx b/apps/backend/src/lib/server-access-token.tsx new file mode 100644 index 0000000000..ce71256462 --- /dev/null +++ b/apps/backend/src/lib/server-access-token.tsx @@ -0,0 +1,117 @@ +import { KnownErrors } from "@stackframe/stack-shared"; +import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env"; +import { signJWT, verifyJWT } from "@stackframe/stack-shared/dist/utils/jwt"; +import { Result } from "@stackframe/stack-shared/dist/utils/results"; +import { JOSEError, JWTExpired } from "jose/errors"; + +export const SERVER_ACCESS_TOKEN_SCOPE = "server" as const; + +const DEFAULT_TTL_SECONDS = 900; +const MIN_TTL_SECONDS = 30; +const MAX_TTL_SECONDS = 3600; + +function getIssuer(projectId: string): string { + const url = new URL(`/api/v1/projects/${encodeURIComponent(projectId)}/oidc-federation`, getEnvVariable("NEXT_PUBLIC_STACK_API_URL")); + return url.toString(); +} + +function getAudience(projectId: string): string { + return `${projectId}:server`; +} + +export function clampServerAccessTokenTtlSeconds(requested: number | undefined): number { + const value = requested ?? DEFAULT_TTL_SECONDS; + if (!Number.isFinite(value)) return DEFAULT_TTL_SECONDS; + return Math.min(MAX_TTL_SECONDS, Math.max(MIN_TTL_SECONDS, Math.floor(value))); +} + +export type ServerAccessTokenFederation = { + policyId: string, + issuer: string, + subject: string, + audience: string, +}; + +export type MintServerAccessTokenOptions = { + projectId: string, + branchId: string, + federation: ServerAccessTokenFederation, + /** Seconds. Clamped to [30, 3600]. */ + ttlSeconds: number, +}; + +export async function mintServerAccessToken(options: MintServerAccessTokenOptions): Promise<{ accessToken: string, expiresAtMs: number, ttlSeconds: number }> { + const ttl = clampServerAccessTokenTtlSeconds(options.ttlSeconds); + const accessToken = await signJWT({ + issuer: getIssuer(options.projectId), + audience: getAudience(options.projectId), + expirationTime: `${ttl}s`, + payload: { + sub: `${options.federation.issuer}|${options.federation.subject}`, + project_id: options.projectId, + branch_id: options.branchId, + scope: SERVER_ACCESS_TOKEN_SCOPE, + fed: { + policy_id: options.federation.policyId, + issuer: options.federation.issuer, + sub: options.federation.subject, + audience: options.federation.audience, + }, + }, + }); + return { accessToken, expiresAtMs: Date.now() + ttl * 1000, ttlSeconds: ttl }; +} + +export type VerifiedServerAccessToken = { + projectId: string, + branchId: string, + federation: ServerAccessTokenFederation, +}; + +export async function verifyServerAccessToken(token: string, options: { projectId: string }): Promise | InstanceType>> { + try { + const payload = await verifyJWT({ + allowedIssuers: [getIssuer(options.projectId)], + jwt: token, + }); + + if (payload.aud !== getAudience(options.projectId)) { + return Result.error(new KnownErrors.UnparsableAccessToken()); + } + if (payload.scope !== SERVER_ACCESS_TOKEN_SCOPE) { + return Result.error(new KnownErrors.UnparsableAccessToken()); + } + if (payload.project_id !== options.projectId) { + return Result.error(new KnownErrors.UnparsableAccessToken()); + } + const branchId = payload.branch_id; + const fed = payload.fed; + if (typeof branchId !== "string" + || typeof fed !== "object" || fed === null + || !("policy_id" in fed) || !("issuer" in fed) || !("sub" in fed) || !("audience" in fed)) { + return Result.error(new KnownErrors.UnparsableAccessToken()); + } + const { policy_id: policyId, issuer, sub: subject, audience } = fed; + if (typeof policyId !== "string" || typeof issuer !== "string" || typeof subject !== "string" || typeof audience !== "string") { + return Result.error(new KnownErrors.UnparsableAccessToken()); + } + return Result.ok({ + projectId: options.projectId, + branchId, + federation: { policyId, issuer, subject, audience }, + }); + } catch (error) { + if (error instanceof JWTExpired) { + return Result.error(new KnownErrors.AccessTokenExpired( + error.payload.exp ? new Date(error.payload.exp * 1000) : undefined, + options.projectId, + undefined, + undefined, + )); + } + if (error instanceof JOSEError) { + return Result.error(new KnownErrors.UnparsableAccessToken()); + } + throw error; + } +} diff --git a/apps/backend/src/route-handlers/smart-request.tsx b/apps/backend/src/route-handlers/smart-request.tsx index e25b71648f..19e073abdd 100644 --- a/apps/backend/src/route-handlers/smart-request.tsx +++ b/apps/backend/src/route-handlers/smart-request.tsx @@ -4,6 +4,7 @@ import { getUser, getUserIfOnGlobalPrismaClientQuery } from "@/app/api/latest/us import { checkApiKeySet, checkApiKeySetQuery } from "@/lib/internal-api-keys"; import { getProjectQuery, listManagedProjectIds } from "@/lib/projects"; import { DEFAULT_BRANCH_ID, Tenancy, getSoleTenancyFromProjectBranchQuery } from "@/lib/tenancies"; +import { verifyServerAccessToken } from "@/lib/server-access-token"; import { decodeAccessToken } from "@/lib/tokens"; import { globalPrismaClient, rawQueryAll } from "@/prisma-client"; import { KnownErrors } from "@stackframe/stack-shared"; @@ -160,10 +161,12 @@ async function parseBody(req: NextRequest, bodyBuffer: ArrayBuffer): Promise => { const projectId = req.headers.get("x-stack-project-id"); - const branchId = req.headers.get("x-stack-branch-id") ?? DEFAULT_BRANCH_ID; + const branchIdHeader = req.headers.get("x-stack-branch-id"); + const requestedBranchId = branchIdHeader ?? DEFAULT_BRANCH_ID; let requestType = req.headers.get("x-stack-access-type"); const publishableClientKey = req.headers.get("x-stack-publishable-client-key"); const secretServerKey = req.headers.get("x-stack-secret-server-key"); + const serverAccessToken = req.headers.get("x-stack-server-access-token"); const superSecretAdminKey = req.headers.get("x-stack-super-secret-admin-key"); const adminAccessToken = req.headers.get("x-stack-admin-access-token"); const accessToken = req.headers.get("x-stack-access-token"); @@ -172,7 +175,7 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque const allowRestrictedUser = allowAnonymousUser || req.headers.get("x-stack-allow-restricted-user") === "true"; // Ensure header combinations are valid - const eitherKeyOrToken = !!(publishableClientKey || secretServerKey || superSecretAdminKey || adminAccessToken); + const eitherKeyOrToken = !!(publishableClientKey || secretServerKey || superSecretAdminKey || adminAccessToken || serverAccessToken); if (!requestType && eitherKeyOrToken) { throw new KnownErrors.ProjectKeyWithoutAccessType(); } @@ -180,6 +183,10 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque if (!typedIncludes(["client", "server", "admin"] as const, requestType)) throw new KnownErrors.InvalidAccessType(requestType); if (!projectId) throw new KnownErrors.AccessTypeWithoutProjectId(requestType); + if (serverAccessToken && requestType !== "server") { + throw new StatusError(401, "x-stack-server-access-token is only valid with x-stack-access-type: server"); + } + const extractUserIdAndRefreshTokenIdFromAccessToken = async (options: { token: string, projectId: string, allowAnonymous: boolean, allowRestricted: boolean }) => { const result = await decodeAccessToken(options.token, { allowAnonymous: /* always true as we check for anonymous users later */ true, allowRestricted: /* always true as we check for restricted users later */ true }); if (result.status === "error") { @@ -238,6 +245,18 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque const { userId, refreshTokenId } = projectId && accessToken ? await extractUserIdAndRefreshTokenIdFromAccessToken({ token: accessToken, projectId, allowAnonymous: allowAnonymousUser, allowRestricted: allowRestrictedUser }) : { userId: null, refreshTokenId: null }; + const verifiedServerAccessToken = projectId && serverAccessToken && requestType === "server" + ? await verifyServerAccessToken(serverAccessToken, { projectId }) + : null; + if (verifiedServerAccessToken?.status === "ok" + && branchIdHeader != null + && verifiedServerAccessToken.data.branchId !== branchIdHeader) { + throw new KnownErrors.AccessTokenBranchMismatch(verifiedServerAccessToken.data.branchId, branchIdHeader); + } + const effectiveBranchId = verifiedServerAccessToken?.status === "ok" + ? verifiedServerAccessToken.data.branchId + : requestedBranchId; + // Prisma does a query for every function call by default, even if we batch them with transactions // Because smart route handlers are always called, we instead send over a single raw query that fetches all the // data at the same time, saving us a lot of requests @@ -248,12 +267,12 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque // the user from the global database and only fall back to the source-of-truth database if we later determine that // the user is not on the global database. const bundledQueries = { - userIfOnGlobalPrismaClient: userId ? getUserIfOnGlobalPrismaClientQuery(projectId, branchId, userId) : undefined, + userIfOnGlobalPrismaClient: userId ? getUserIfOnGlobalPrismaClientQuery(projectId, effectiveBranchId, userId) : undefined, isClientKeyValid: publishableClientKey && requestType === "client" ? checkApiKeySetQuery(projectId, { publishableClientKey }) : undefined, isServerKeyValid: secretServerKey && requestType === "server" ? checkApiKeySetQuery(projectId, { secretServerKey }) : undefined, isAdminKeyValid: superSecretAdminKey && requestType === "admin" ? checkApiKeySetQuery(projectId, { superSecretAdminKey }) : undefined, project: getProjectQuery(projectId), - tenancy: getSoleTenancyFromProjectBranchQuery(projectId, branchId, true), + tenancy: getSoleTenancyFromProjectBranchQuery(projectId, effectiveBranchId, true), }; const queriesResults = await rawQueryAll(globalPrismaClient, bundledQueries); const project = await queriesResults.project; @@ -291,6 +310,15 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque break; } case "server": { + if (serverAccessToken) { + if (!verifiedServerAccessToken) { + throw new KnownErrors.UnparsableAccessToken(); + } + if (verifiedServerAccessToken.status === "error") { + throw verifiedServerAccessToken.error; + } + break; + } if (!secretServerKey) throw new KnownErrors.ServerAuthenticationRequired(); if (isServerKeyValid.status === "error") throw new KnownErrors.InvalidSecretServerKey(projectId); break; @@ -308,7 +336,7 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque if (!tenancy) { // note that we only check branch existence here so you can't probe branches unless you have the project keys - throw new KnownErrors.BranchDoesNotExist(branchId); + throw new KnownErrors.BranchDoesNotExist(effectiveBranchId); } // As explained above, as a performance optimization we already fetch the user from the global database optimistically @@ -316,11 +344,11 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque // database instead. const user = tenancy.config.sourceOfTruth.type === "hosted" ? await queriesResults.userIfOnGlobalPrismaClient - : (userId ? await getUser({ userId, projectId, branchId }) : undefined); + : (userId ? await getUser({ userId, projectId, branchId: effectiveBranchId }) : undefined); return { project, - branchId, + branchId: effectiveBranchId, refreshTokenId: refreshTokenId ?? undefined, tenancy, user: user ?? undefined, diff --git a/apps/backend/src/route-handlers/smart-response.tsx b/apps/backend/src/route-handlers/smart-response.tsx index bf5bb5aa02..3ad3cdb986 100644 --- a/apps/backend/src/route-handlers/smart-response.tsx +++ b/apps/backend/src/route-handlers/smart-response.tsx @@ -141,6 +141,8 @@ export async function createResponse(req: NextRequest | // If the x-stack-override-error-status header is given, override 4xx statuses to 200. + // 5xx responses are deliberately not folded so that infrastructure (load balancers, + // retry/circuit-breaker logic, monitoring) can still observe real server errors. if (req?.headers.has("x-stack-override-error-status") && status >= 400 && status < 500) { status = 200; headers.set("x-stack-actual-status", [obj.statusCode.toString()]); diff --git a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/project-keys/oidc-policy-dialog.tsx b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/project-keys/oidc-policy-dialog.tsx new file mode 100644 index 0000000000..f3ff374cbf --- /dev/null +++ b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/project-keys/oidc-policy-dialog.tsx @@ -0,0 +1,547 @@ +"use client"; + +import { DesignAlert, DesignButton, DesignInput, DesignPillToggle } from "@/components/design-components"; +import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, Label, Textarea, Typography } from "@/components/ui"; +import type { StackAdminApp } from "@stackframe/stack"; +import { ClockIcon, GlobeHemisphereWestIcon, LinkSimpleIcon, PlusIcon, ShieldCheckIcon, TrashIcon } from "@phosphor-icons/react"; +import { useEffect, useMemo, useRef, useState } from "react"; +import { + DiscoveryProbeResult, + DraftValidationIssue, + PolicyDraft, + draftToPolicy, + emptyDraft, + newAudienceRow, + parseClaimConditionsJson, + validateDraft, + type TrustPolicy, +} from "./oidc-policy-form"; +import { runAsynchronouslyWithAlert } from "@stackframe/stack-shared/dist/utils/promises"; + +type Preset = { + id: string, + label: string, + description: string, + seed: () => Pick, + exampleSnippet: (projectId: string) => string, +}; + +const stringifyClaims = (v: { stringEquals?: Record, stringLike?: Record }) => + JSON.stringify({ stringEquals: v.stringEquals ?? {}, stringLike: v.stringLike ?? {} }, null, 2); + +const PRESETS: Preset[] = [ + { + id: "vercel", + label: "Vercel", + description: "Team-scoped issuer like https://oidc.vercel.com/", + seed: () => ({ + displayName: "Vercel production", + issuerUrl: "https://oidc.vercel.com/YOUR_TEAM_SLUG", + audiences: [newAudienceRow("https://vercel.com/YOUR_TEAM_SLUG")], + claimConditionsJson: stringifyClaims({ + stringEquals: { environment: ["production"] }, + stringLike: { sub: ["owner:YOUR_TEAM_SLUG:project:*:environment:production"] }, + }), + }), + exampleSnippet: (projectId) => `import { StackServerApp, fromVercelOidc } from "@stackframe/stack"; + +export const stackServerApp = new StackServerApp({ + projectId: "${projectId}", + tokenStore: "nextjs-cookie", + auth: { oidcFederation: fromVercelOidc() }, +}); +`, + }, + { + id: "github-actions", + label: "GitHub Actions", + description: "Per-repo subject matching against token.actions.githubusercontent.com", + seed: () => ({ + displayName: "GitHub Actions", + issuerUrl: "https://token.actions.githubusercontent.com", + audiences: [newAudienceRow("https://github.com/YOUR_ORG")], + claimConditionsJson: stringifyClaims({ + stringLike: { sub: ["repo:YOUR_ORG/YOUR_REPO:*"] }, + }), + }), + exampleSnippet: (projectId) => `import { StackServerApp, fromGithubActionsOidc } from "@stackframe/stack"; + +export const stackServerApp = new StackServerApp({ + projectId: "${projectId}", + tokenStore: "memory", + auth: { oidcFederation: fromGithubActionsOidc({ audience: "https://github.com/YOUR_ORG" }) }, +}); +`, + }, + { + id: "gcp", + label: "GCP", + description: "Trust Google-signed identity tokens issued to a GCP workload", + seed: () => ({ + displayName: "GCP workload", + issuerUrl: "https://accounts.google.com", + audiences: [newAudienceRow("stack-auth")], + claimConditionsJson: stringifyClaims({ + stringLike: { email: ["*@YOUR_PROJECT.iam.gserviceaccount.com"] }, + }), + }), + exampleSnippet: (projectId) => `import { StackServerApp, fromGcpMetadata } from "@stackframe/stack"; + +export const stackServerApp = new StackServerApp({ + projectId: "${projectId}", + tokenStore: "memory", + auth: { oidcFederation: fromGcpMetadata({ audience: "stack-auth" }) }, +}); +`, + }, + { + id: "custom", + label: "Custom", + description: "Any OIDC-compliant issuer with a discovery URL", + seed: () => ({ + displayName: "Custom IdP", + issuerUrl: "https://issuer.example.com", + audiences: [newAudienceRow("")], + claimConditionsJson: stringifyClaims({}), + }), + exampleSnippet: (projectId) => `import { StackServerApp, fromOidcToken } from "@stackframe/stack"; + +export const stackServerApp = new StackServerApp({ + projectId: "${projectId}", + tokenStore: "memory", + auth: { oidcFederation: fromOidcToken(async () => process.env.MY_OIDC_TOKEN!) }, +}); +`, + }, +]; + +type StepId = "identity" | "audiences" | "conditions" | "token"; +const STEPS: Array<{ id: StepId, label: string }> = [ + { id: "identity", label: "Identity" }, + { id: "audiences", label: "Audiences" }, + { id: "conditions", label: "Conditions" }, + { id: "token", label: "Token" }, +]; + +function issueKindsForStep(step: StepId): DraftValidationIssue["kind"][] { + switch (step) { + case "identity": { + return ["missing-display-name", "missing-issuer"]; + } + case "audiences": { + return ["missing-audiences"]; + } + case "conditions": { + return ["invalid-claim-conditions-json"]; + } + case "token": { + return ["invalid-ttl"]; + } + } +} + +export function OidcPolicyDialog(props: { + open: boolean, + mode: "create" | "edit", + initial: PolicyDraft, + projectId: string, + onSave: (policy: TrustPolicy, draft: PolicyDraft) => Promise, + onClose: () => void, + adminApp: StackAdminApp, +}) { + const [draft, setDraft] = useState(props.initial); + const [preset, setPreset] = useState("custom"); + const [step, setStep] = useState("identity"); + const [discoveryState, setDiscoveryState] = useState({ kind: "idle" }); + const [saving, setSaving] = useState(false); + + // Reset transient state only on the false→true transition of `open`. Depending on + // `props.initial` directly would wipe in-progress edits every time the parent + // re-rendered with a fresh object reference. + const wasOpenRef = useRef(false); + useEffect(() => { + if (props.open && !wasOpenRef.current) { + setDraft(props.initial); + setPreset("custom"); + setStep("identity"); + setDiscoveryState({ kind: "idle" }); + setSaving(false); + } + wasOpenRef.current = props.open; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [props.open]); + + const issues = validateDraft(draft); + const issueByKind = useMemo(() => new Map(issues.map(i => [i.kind, i])), [issues]); + const stepHasIssue = (s: StepId) => issueKindsForStep(s).some(k => issueByKind.has(k)); + + const applyPreset = (presetId: string) => { + setPreset(presetId); + const p = PRESETS.find(x => x.id === presetId); + if (!p) return; + setDraft(prev => ({ ...prev, ...p.seed() })); + setDiscoveryState({ kind: "idle" }); + }; + const selectedPreset = PRESETS.find(p => p.id === preset); + + const updateAudienceRow = (rowId: string, value: string) => { + setDraft(d => ({ ...d, audiences: d.audiences.map(a => a.rowId === rowId ? { ...a, value } : a) })); + }; + const addAudienceRow = () => setDraft(d => ({ ...d, audiences: [...d.audiences, newAudienceRow()] })); + const removeAudienceRow = (rowId: string) => { + setDraft(d => ({ ...d, audiences: d.audiences.length > 1 ? d.audiences.filter(a => a.rowId !== rowId) : d.audiences })); + }; + + const runDiscovery = async () => { + setDiscoveryState({ kind: "loading" }); + try { + const result = await props.adminApp.probeOidcDiscovery({ issuerUrl: draft.issuerUrl }); + setDiscoveryState( + result.status === "ok" + ? { kind: "ok", issuer: result.data.issuer, jwksUri: result.data.jwksUri } + : { kind: "error", reason: result.error.errorMessage }, + ); + } catch (err) { + setDiscoveryState({ kind: "error", reason: err instanceof Error ? err.message : "discovery probe failed" }); + } + }; + + const stepIndex = STEPS.findIndex(s => s.id === step); + const isLast = stepIndex === STEPS.length - 1; + const isFirst = stepIndex === 0; + const canAdvance = !stepHasIssue(step); + + const handleNext = () => { + if (!canAdvance) return; + setStep(STEPS[Math.min(stepIndex + 1, STEPS.length - 1)].id); + }; + const handleBack = () => { + setStep(STEPS[Math.max(stepIndex - 1, 0)].id); + }; + const handleSave = async () => { + if (issues.length > 0) { + const firstBad = STEPS.find(s => stepHasIssue(s.id)); + if (firstBad) setStep(firstBad.id); + return; + } + setSaving(true); + try { + await props.onSave(draftToPolicy(draft), draft); + } finally { + setSaving(false); + } + }; + + return ( + { if (!open) props.onClose(); }}> + + + + + {props.mode === "create" ? "Add OIDC trust policy" : "Edit OIDC trust policy"} + + + Let deployed workloads exchange a short-lived OIDC token for a Stack server access token. + + + +
+ {STEPS.map((s, i) => { + const active = s.id === step; + const hasIssue = stepHasIssue(s.id); + return ( + + ); + })} +
+ +
+ {step === "identity" && ( + runAsynchronouslyWithAlert(runDiscovery())} + /> + )} + {step === "audiences" && ( + + )} + {step === "conditions" && ( + + )} + {step === "token" && ( + + )} +
+ +
+ Cancel +
+ Back + {isLast ? ( + runAsynchronouslyWithAlert(handleSave())} disabled={saving || issues.length > 0}> + {saving ? "Saving…" : "Save"} + + ) : ( + Next + )} +
+
+
+
+ ); +} + +// ── Step components ─────────────────────────────────────────────────────── + +function Field({ id, label, children }: { id?: string, label: string, children: React.ReactNode }) { + return ( +
+ + {children} +
+ ); +} + +function IdentityStep(props: { + isCreate: boolean, + preset: string, + applyPreset: (id: string) => void, + selectedPreset: Preset | undefined, + draft: PolicyDraft, + setDraft: (fn: (d: PolicyDraft) => PolicyDraft) => void, + discoveryState: DiscoveryProbeResult | { kind: "idle" } | { kind: "loading" }, + setDiscoveryState: (s: DiscoveryProbeResult | { kind: "idle" } | { kind: "loading" }) => void, + runDiscovery: () => void, +}) { + return ( + <> + {props.isCreate && ( +
+ ({ id: p.id, label: p.label }))} + selected={props.preset} + onSelect={props.applyPreset} + size="sm" + gradient="default" + /> + {props.selectedPreset && ( + + {props.selectedPreset.description} + + )} +
+ )} +
+ + props.setDraft(d => ({ ...d, displayName: e.target.value }))} + placeholder="e.g. Vercel production" + /> + + +
+ } + value={props.draft.issuerUrl} + onChange={(e) => { + props.setDraft(d => ({ ...d, issuerUrl: e.target.value })); + props.setDiscoveryState({ kind: "idle" }); + }} + placeholder="https://oidc.example.com" + /> + + Discover + +
+ +
+
+ + ); +} + +function AudiencesStep(props: { + draft: PolicyDraft, + addAudienceRow: () => void, + removeAudienceRow: (rowId: string) => void, + updateAudienceRow: (rowId: string, value: string) => void, +}) { + return ( +
+
+ + + Add audience + +
+ + At least one required. The incoming token's aud claim must match any listed value. + +
+ {props.draft.audiences.map(a => ( +
+ } + value={a.value} + onChange={(e) => props.updateAudienceRow(a.rowId, e.target.value)} + placeholder="https://example.com/aud" + /> + props.removeAudienceRow(a.rowId)} + disabled={props.draft.audiences.length <= 1} + aria-label="Remove audience" + > + + +
+ ))} +
+
+ ); +} + +function ConditionsStep(props: { + draft: PolicyDraft, + setDraft: (fn: (d: PolicyDraft) => PolicyDraft) => void, +}) { + const parseResult = parseClaimConditionsJson(props.draft.claimConditionsJson); + return ( +
+ + JSON with stringEquals and/or stringLike. Each maps a claim to allowed values. Claims combine with AND; values within a claim combine with OR. stringLike supports * / ?. Empty = any validly-signed token with a matching audience passes. + +