From 61577b28cc1bc48b91a0bb653c40e65d62d33561 Mon Sep 17 00:00:00 2001 From: Claude Date: Fri, 8 May 2026 17:21:53 +0000 Subject: [PATCH 01/22] feat: prod/staging/local env setup with prod-to-target sync script Adds three Vite-mode-based environments so we can test changes against a staging Supabase project before touching prod, plus a one-way sync script that copies prod data into staging or local with PII scrubbed. - New scripts: dev:staging, dev:local, build:staging, db:sync:{staging,local} - scripts/sync-from-prod.sh: pg_dump public schema -> truncate target -> restore -> run anonymize.sql. Refuses to write back to prod, prompts before overwriting, never copies auth.users. - scripts/anonymize.sql: scrubs profiles.username, artist_notes.note_content, rotates group_invites.invite_token. - .env.{,staging,local}.example templates + scripts/.env.sync.example. - docs/ENVIRONMENTS.md walks through one-time setup and day-to-day usage. --- .env.example | 4 ++ .env.local.example | 5 ++ .env.staging.example | 4 ++ .gitignore | 5 +- docs/ENVIRONMENTS.md | 91 ++++++++++++++++++++++++++++++++ package.json | 5 ++ scripts/.env.sync.example | 11 ++++ scripts/anonymize.sql | 24 +++++++++ scripts/sync-from-prod.sh | 107 ++++++++++++++++++++++++++++++++++++++ 9 files changed, 255 insertions(+), 1 deletion(-) create mode 100644 .env.example create mode 100644 .env.local.example create mode 100644 .env.staging.example create mode 100644 docs/ENVIRONMENTS.md create mode 100644 scripts/.env.sync.example create mode 100644 scripts/anonymize.sql create mode 100755 scripts/sync-from-prod.sh diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..2d8e7b7b --- /dev/null +++ b/.env.example @@ -0,0 +1,4 @@ +# Production Supabase project (default — used by `pnpm run dev` and `pnpm run build`) +# Copy this file to `.env.local` and fill in the values. +VITE_SUPABASE_URL=https://your-prod-project.supabase.co +VITE_SUPABASE_PUBLISHABLE_KEY=your-prod-anon-key diff --git a/.env.local.example b/.env.local.example new file mode 100644 index 00000000..133b1612 --- /dev/null +++ b/.env.local.example @@ -0,0 +1,5 @@ +# Local Supabase (used by `pnpm run dev:local` against `supabase start`) +# Copy this file to `.env.local.local`. The default values below match the +# Supabase CLI's local stack — change them only if you customised your setup. +VITE_SUPABASE_URL=http://127.0.0.1:54321 +VITE_SUPABASE_PUBLISHABLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0 diff --git a/.env.staging.example b/.env.staging.example new file mode 100644 index 00000000..eb047aa8 --- /dev/null +++ b/.env.staging.example @@ -0,0 +1,4 @@ +# Staging Supabase project (used by `pnpm run dev:staging` and `pnpm run build:staging`) +# Copy this file to `.env.staging.local` and fill in the values. +VITE_SUPABASE_URL=https://your-staging-project.supabase.co +VITE_SUPABASE_PUBLISHABLE_KEY=your-staging-anon-key diff --git a/.gitignore b/.gitignore index 9cd36c1e..9a3df1b3 100644 --- a/.gitignore +++ b/.gitignore @@ -37,4 +37,7 @@ supabase/.branches .vercel dev-dist -dump.sql \ No newline at end of file +dump.sql + +# Sync script credentials (DB connection strings — never commit) +scripts/.env.sync \ No newline at end of file diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md new file mode 100644 index 00000000..376a7103 --- /dev/null +++ b/docs/ENVIRONMENTS.md @@ -0,0 +1,91 @@ +# Environments + +UpLine runs against three Supabase environments. + +| Env | Project | Used by | Auto-pause? | +| --- | --- | --- | --- | +| **prod** | `qssmazlqrmxiudxckxvi` | `pnpm run dev`, `pnpm run build` | no | +| **staging** | a second free Supabase project | `pnpm run dev:staging`, `pnpm run build:staging` | yes (after 7 days idle) | +| **local** | Supabase CLI (`supabase start`) | `pnpm run dev:local`, e2e tests | n/a | + +The frontend reads `VITE_SUPABASE_URL` / `VITE_SUPABASE_PUBLISHABLE_KEY` from a Vite env file picked by `--mode`. Vite loads them in this order (later overrides earlier): + +``` +.env -> .env.[mode] -> .env.local -> .env.[mode].local +``` + +`.env.local` and `.env.[mode].local` are gitignored. `.env.example`, `.env.staging.example`, and `.env.local.example` are committed as templates. + +## One-time setup + +### 1. Create the staging Supabase project + +In the Supabase dashboard, create a second project (free tier is fine). Name it something like `upline-staging`. + +After it's created: + +- Apply the same migrations as prod: + ```bash + supabase link --project-ref + supabase db push + ``` +- Copy the project URL and anon key from **Project Settings -> API**. + +### 2. Wire up env files + +```bash +cp .env.example .env.local # prod creds +cp .env.staging.example .env.staging.local # staging creds +cp .env.local.example .env.local.local # local Supabase (optional) +``` + +Fill in the URLs and anon keys. Anon keys are safe in the browser bundle, but the convention here keeps them out of git so each developer can swap targets. + +### 3. Wire up the sync script + +```bash +cp scripts/.env.sync.example scripts/.env.sync +``` + +Open `scripts/.env.sync` and paste the **direct Postgres connection strings** (Project Settings -> Database -> Connection string -> URI) for prod and staging. This file is gitignored. + +You'll need `pg_dump` and `psql` on your machine — both ship with the Postgres client tools (`brew install libpq` on macOS, `apt-get install postgresql-client` on Debian/Ubuntu). + +## Day-to-day + +### Run the app against an env + +```bash +pnpm run dev # prod (default — be careful, this is real data) +pnpm run dev:staging # staging +pnpm run dev:local # local supabase +``` + +### Sync prod data into staging or local + +```bash +pnpm run db:sync:staging # overwrites staging public schema with prod data +pnpm run db:sync:local # overwrites local public schema with prod data +``` + +Both prompt for confirmation before touching the target. The script: + +1. `pg_dump`s the `public` schema (data only) from prod. +2. `TRUNCATE`s the target's `public` tables. +3. Restores the dump. +4. Runs `scripts/anonymize.sql` to scrub PII (`profiles.username`, `artist_notes.note_content`, `group_invites.invite_token`). + +`auth.users` is **never** copied — keep test accounts on staging/local separate from real users. A consequence is that `user_id` columns in synced data point to UUIDs that don't exist in the target's `auth.users`. That's fine for read-only browsing of data, but writes that join against `auth.users` (e.g. RLS checks) will only work for rows owned by your test user. + +### When PII shape changes + +If you add a column that holds free-form user input or a personal identifier, **edit `scripts/anonymize.sql`** to scrub it. The dump-and-anonymize approach is only safe as long as that file is kept current. + +### Promoting a migration to staging, then prod + +1. Develop locally (`supabase migration new …`, edit, `supabase db reset`). +2. Push to staging: `supabase link --project-ref && supabase db push`. +3. Verify in the staging app (`pnpm run dev:staging`). +4. Push to prod: `supabase link --project-ref qssmazlqrmxiudxckxvi && supabase db push`. + +Re-link to whichever project you intend to operate on — the Supabase CLI keeps a single linked project at a time. diff --git a/package.json b/package.json index e7b03d77..bad112e3 100644 --- a/package.json +++ b/package.json @@ -6,8 +6,11 @@ "type": "module", "scripts": { "dev": "vite", + "dev:staging": "vite --mode staging", + "dev:local": "vite --mode local", "build": "vite build", "build:dev": "vite build --mode development", + "build:staging": "vite build --mode staging", "lint": "oxlint . --fix", "format": "prettier --write .", "format:check": "prettier --check .", @@ -24,6 +27,8 @@ "test:setup:full": "bash scripts/setup-local-supabase.sh", "types:generate": "supabase gen types typescript --project-id qssmazlqrmxiudxckxvi > src/integrations/supabase/types.ts", "types:generate:local": "supabase gen types typescript --local > src/integrations/supabase/types.ts", + "db:sync:staging": "bash scripts/sync-from-prod.sh staging", + "db:sync:local": "bash scripts/sync-from-prod.sh local", "typecheck": "tsc --noEmit --project tsconfig.app.json", "prepare": "husky" }, diff --git a/scripts/.env.sync.example b/scripts/.env.sync.example new file mode 100644 index 00000000..fc67e511 --- /dev/null +++ b/scripts/.env.sync.example @@ -0,0 +1,11 @@ +# Copy this file to scripts/.env.sync and fill in values. +# scripts/.env.sync is gitignored — never commit real connection strings. +# +# Find these in Supabase Dashboard -> Project Settings -> Database +# -> Connection string -> URI (use the "Direct connection" string, not the pooler). + +PROD_DB_URL="postgresql://postgres:PASSWORD@db.qssmazlqrmxiudxckxvi.supabase.co:5432/postgres" +STAGING_DB_URL="postgresql://postgres:PASSWORD@db.YOUR-STAGING-REF.supabase.co:5432/postgres" + +# Optional — defaults to the Supabase CLI's local DB. +# LOCAL_DB_URL="postgresql://postgres:postgres@127.0.0.1:54322/postgres" diff --git a/scripts/anonymize.sql b/scripts/anonymize.sql new file mode 100644 index 00000000..8fe8c643 --- /dev/null +++ b/scripts/anonymize.sql @@ -0,0 +1,24 @@ +-- Scrubs PII from a freshly-restored prod dump. +-- Run after restoring data into staging or local. Idempotent. +-- +-- Notes on what is and isn't synced: +-- * auth.users is NOT copied. Test accounts on the target are kept as-is. +-- This means user_id columns may reference UUIDs that don't exist in the +-- target's auth.users — which is fine for read-only testing of public data. +-- * Any new column that holds free-text user input should be added here. + +BEGIN; + +-- profiles.username may contain a real handle. Replace with a synthetic value. +UPDATE public.profiles + SET username = 'user_' || substring(id::text, 1, 8); + +-- artist_notes.note_content is free-form user text. Wipe it. +UPDATE public.artist_notes + SET note_content = '[redacted]'; + +-- group_invites.invite_token is a live secret — rotate so old links are dead. +UPDATE public.group_invites + SET invite_token = encode(gen_random_bytes(16), 'hex'); + +COMMIT; diff --git a/scripts/sync-from-prod.sh b/scripts/sync-from-prod.sh new file mode 100755 index 00000000..af47581b --- /dev/null +++ b/scripts/sync-from-prod.sh @@ -0,0 +1,107 @@ +#!/usr/bin/env bash +# +# Sync production data into staging or local for testing. +# +# Usage: +# pnpm run db:sync:staging +# pnpm run db:sync:local +# +# What it does: +# 1. pg_dump the `public` schema (data only) from PROD_DB_URL. +# 2. TRUNCATE the target's public tables and restore the dump. +# 3. Run scripts/anonymize.sql against the target to scrub PII. +# 4. Leaves auth.users untouched on the target — create test users separately. +# +# Required env vars (put them in scripts/.env.sync, which is gitignored): +# PROD_DB_URL Postgres connection string for the prod project +# (Supabase Dashboard -> Project Settings -> Database -> Connection string -> URI) +# STAGING_DB_URL Same, for the staging project +# LOCAL_DB_URL Defaults to the Supabase CLI local DB if unset +# +set -euo pipefail + +TARGET="${1:-}" +if [[ -z "$TARGET" ]]; then + echo "Usage: $0 " >&2 + exit 1 +fi + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ENV_FILE="$SCRIPT_DIR/.env.sync" +if [[ -f "$ENV_FILE" ]]; then + set -a + # shellcheck disable=SC1090 + source "$ENV_FILE" + set +a +fi + +: "${PROD_DB_URL:?PROD_DB_URL is required (see scripts/.env.sync.example)}" + +case "$TARGET" in + staging) + : "${STAGING_DB_URL:?STAGING_DB_URL is required for staging sync}" + TARGET_URL="$STAGING_DB_URL" + ;; + local) + TARGET_URL="${LOCAL_DB_URL:-postgresql://postgres:postgres@127.0.0.1:54322/postgres}" + ;; + *) + echo "Unknown target: $TARGET (expected: staging | local)" >&2 + exit 1 + ;; +esac + +if [[ "$TARGET_URL" == "$PROD_DB_URL" ]]; then + echo "Refusing to run: target URL equals PROD_DB_URL." >&2 + exit 1 +fi + +echo "About to OVERWRITE the public schema in:" +echo " $TARGET_URL" +echo "with data from prod." +read -r -p "Type 'yes' to continue: " CONFIRM +if [[ "$CONFIRM" != "yes" ]]; then + echo "Aborted." + exit 1 +fi + +DUMP_FILE="$(mktemp -t upline-prod-dump.XXXXXX.sql)" +trap 'rm -f "$DUMP_FILE"' EXIT + +echo "Dumping public schema data from prod…" +pg_dump \ + --no-owner \ + --no-privileges \ + --data-only \ + --schema=public \ + --disable-triggers \ + --column-inserts=false \ + --file="$DUMP_FILE" \ + "$PROD_DB_URL" + +echo "Truncating public tables in target…" +psql "$TARGET_URL" -v ON_ERROR_STOP=1 <<'SQL' +DO $$ +DECLARE + stmt TEXT; +BEGIN + SELECT 'TRUNCATE TABLE ' || string_agg(format('%I.%I', schemaname, tablename), ', ') + || ' RESTART IDENTITY CASCADE' + INTO stmt + FROM pg_tables + WHERE schemaname = 'public'; + IF stmt IS NOT NULL THEN + EXECUTE stmt; + END IF; +END $$; +SQL + +echo "Restoring dump into target…" +psql "$TARGET_URL" -v ON_ERROR_STOP=1 -f "$DUMP_FILE" + +echo "Running anonymizer…" +psql "$TARGET_URL" -v ON_ERROR_STOP=1 -f "$SCRIPT_DIR/anonymize.sql" + +echo "Done. Target has prod data with PII scrubbed." +echo "Note: auth.users on the target was not modified. Sign in there with" +echo " whatever test accounts you've already created." From f162cd2f10c2d3c78a42bf5152fc9f1332dc89cc Mon Sep 17 00:00:00 2001 From: Claude Date: Fri, 8 May 2026 17:28:11 +0000 Subject: [PATCH 02/22] feat(sync): sync auth.users with email rewriting Adds an auth.users sync step that runs before the public schema dump so synced FK references resolve and RLS-gated reads/writes work as a test user. - Pulls (id, email, email_confirmed_at, created_at, updated_at, aud, role) from prod via psql \copy into a temp table on the target. - Upserts into auth.users with ON CONFLICT (id) DO NOTHING so existing test accounts on staging/local are preserved. - Synced rows get email = user-@example.test, no password, no OAuth metadata, is_super_admin = false. They can't sign in. - Skip with SYNC_AUTH=0. Also fixes a bogus pg_dump flag (--column-inserts=false isn't valid). --- docs/ENVIRONMENTS.md | 21 +++++++--- scripts/sync-from-prod.sh | 81 +++++++++++++++++++++++++++++++++------ 2 files changed, 85 insertions(+), 17 deletions(-) diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 376a7103..357de1e0 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -70,12 +70,23 @@ pnpm run db:sync:local # overwrites local public schema with prod data Both prompt for confirmation before touching the target. The script: -1. `pg_dump`s the `public` schema (data only) from prod. -2. `TRUNCATE`s the target's `public` tables. -3. Restores the dump. -4. Runs `scripts/anonymize.sql` to scrub PII (`profiles.username`, `artist_notes.note_content`, `group_invites.invite_token`). +1. **Syncs `auth.users`** from prod into a temp table on the target, then upserts with `ON CONFLICT (id) DO NOTHING`. New rows have: + - `email` rewritten to `user-@example.test` + - no `encrypted_password` (synced users can't sign in) + - `raw_user_meta_data` / `raw_app_meta_data` stripped of OAuth/profile info + - `is_super_admin = false` -`auth.users` is **never** copied — keep test accounts on staging/local separate from real users. A consequence is that `user_id` columns in synced data point to UUIDs that don't exist in the target's `auth.users`. That's fine for read-only browsing of data, but writes that join against `auth.users` (e.g. RLS checks) will only work for rows owned by your test user. + Your existing test accounts on the target are **preserved** because of the `ON CONFLICT` clause. They keep their original emails and passwords, so you can still log in as them. +2. `pg_dump`s the `public` schema (data only) from prod. +3. `TRUNCATE`s the target's `public` tables. +4. Restores the dump. FK references from `public.*` to `auth.users(id)` now resolve, so RLS policies that check `auth.uid()` work for any user — log in as a test account and you can read/write any synced row that user owns. +5. Runs `scripts/anonymize.sql` to scrub remaining PII (`profiles.username`, `artist_notes.note_content`, `group_invites.invite_token`). + +To skip auth syncing (public schema only): + +```bash +SYNC_AUTH=0 pnpm run db:sync:staging +``` ### When PII shape changes diff --git a/scripts/sync-from-prod.sh b/scripts/sync-from-prod.sh index af47581b..1924d746 100755 --- a/scripts/sync-from-prod.sh +++ b/scripts/sync-from-prod.sh @@ -7,10 +7,13 @@ # pnpm run db:sync:local # # What it does: -# 1. pg_dump the `public` schema (data only) from PROD_DB_URL. -# 2. TRUNCATE the target's public tables and restore the dump. -# 3. Run scripts/anonymize.sql against the target to scrub PII. -# 4. Leaves auth.users untouched on the target — create test users separately. +# 1. Sync auth.users from prod into target. Existing target users are preserved +# (ON CONFLICT DO NOTHING); newly inserted rows have their emails rewritten +# to user-@example.test, no password, no OAuth metadata. +# 2. pg_dump the `public` schema (data only) from PROD_DB_URL, TRUNCATE the +# target's public tables, and restore the dump. +# 3. Run scripts/anonymize.sql against the target to scrub remaining PII in +# the public schema. # # Required env vars (put them in scripts/.env.sync, which is gitignored): # PROD_DB_URL Postgres connection string for the prod project @@ -18,6 +21,8 @@ # STAGING_DB_URL Same, for the staging project # LOCAL_DB_URL Defaults to the Supabase CLI local DB if unset # +# Skip auth syncing with: SYNC_AUTH=0 pnpm run db:sync:staging +# set -euo pipefail TARGET="${1:-}" @@ -36,6 +41,7 @@ if [[ -f "$ENV_FILE" ]]; then fi : "${PROD_DB_URL:?PROD_DB_URL is required (see scripts/.env.sync.example)}" +SYNC_AUTH="${SYNC_AUTH:-1}" case "$TARGET" in staging) @@ -58,24 +64,72 @@ fi echo "About to OVERWRITE the public schema in:" echo " $TARGET_URL" -echo "with data from prod." +if [[ "$SYNC_AUTH" == "1" ]]; then + echo "and upsert anonymized auth.users from prod (existing target users kept)." +fi read -r -p "Type 'yes' to continue: " CONFIRM if [[ "$CONFIRM" != "yes" ]]; then echo "Aborted." exit 1 fi -DUMP_FILE="$(mktemp -t upline-prod-dump.XXXXXX.sql)" -trap 'rm -f "$DUMP_FILE"' EXIT +TMP_DIR="$(mktemp -d -t upline-sync.XXXXXX)" +trap 'rm -rf "$TMP_DIR"' EXIT + +if [[ "$SYNC_AUTH" == "1" ]]; then + echo "Syncing auth.users from prod (anonymized)…" + AUTH_CSV="$TMP_DIR/auth-users.csv" + + psql "$PROD_DB_URL" -v ON_ERROR_STOP=1 -At -c "\ + \\copy ( \ + SELECT id, email, email_confirmed_at, created_at, updated_at, aud, role \ + FROM auth.users \ + ) TO '$AUTH_CSV' WITH (FORMAT csv)" + + psql "$TARGET_URL" -v ON_ERROR_STOP=1 -v authcsv="$AUTH_CSV" <<'SQL' +CREATE TEMP TABLE _sync_auth_users ( + id uuid PRIMARY KEY, + email varchar, + email_confirmed_at timestamptz, + created_at timestamptz, + updated_at timestamptz, + aud varchar, + role varchar +); + +\copy _sync_auth_users FROM :'authcsv' WITH (FORMAT csv) + +INSERT INTO auth.users ( + instance_id, id, aud, role, email, + email_confirmed_at, created_at, updated_at, + raw_app_meta_data, raw_user_meta_data, + is_super_admin +) +SELECT + '00000000-0000-0000-0000-000000000000'::uuid, + s.id, + COALESCE(s.aud, 'authenticated'), + COALESCE(s.role, 'authenticated'), + 'user-' || substring(s.id::text, 1, 8) || '@example.test', + s.email_confirmed_at, + s.created_at, + s.updated_at, + '{"provider":"synced","providers":["synced"]}'::jsonb, + '{}'::jsonb, + false +FROM _sync_auth_users s +ON CONFLICT (id) DO NOTHING; +SQL +fi echo "Dumping public schema data from prod…" +DUMP_FILE="$TMP_DIR/public-dump.sql" pg_dump \ --no-owner \ --no-privileges \ --data-only \ --schema=public \ --disable-triggers \ - --column-inserts=false \ --file="$DUMP_FILE" \ "$PROD_DB_URL" @@ -99,9 +153,12 @@ SQL echo "Restoring dump into target…" psql "$TARGET_URL" -v ON_ERROR_STOP=1 -f "$DUMP_FILE" -echo "Running anonymizer…" +echo "Running anonymizer on public schema…" psql "$TARGET_URL" -v ON_ERROR_STOP=1 -f "$SCRIPT_DIR/anonymize.sql" -echo "Done. Target has prod data with PII scrubbed." -echo "Note: auth.users on the target was not modified. Sign in there with" -echo " whatever test accounts you've already created." +echo "Done." +if [[ "$SYNC_AUTH" == "1" ]]; then + echo "auth.users: synced rows have anonymized emails (user-@example.test)" + echo " with no password — they exist for FK integrity, not for sign-in." + echo " Existing test accounts on the target were preserved." +fi From 56216bd281509d402fe2432986e7088f58dd6e9e Mon Sep 17 00:00:00 2001 From: Claude Date: Fri, 8 May 2026 17:40:37 +0000 Subject: [PATCH 03/22] feat(ci): auto-push migrations to staging on develop, prod on main Adds .github/workflows/db-migrate.yml that runs on push to develop or main when supabase/migrations/** changes. Resolves target from the branch (or workflow_dispatch input), then runs supabase link + supabase db push against the matching project. Uses GitHub environments (staging, production) so prod migrations can later be gated behind required-reviewer approval via repo settings. Required secrets: SUPABASE_ACCESS_TOKEN, {PROD,STAGING}_PROJECT_REF, {PROD,STAGING}_DB_PASSWORD. Documented in docs/ENVIRONMENTS.md. --- .github/workflows/db-migrate.yml | 51 ++++++++++++++++++++++++++++++++ docs/ENVIRONMENTS.md | 41 ++++++++++++++++++++++--- 2 files changed, 88 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/db-migrate.yml diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml new file mode 100644 index 00000000..e532559e --- /dev/null +++ b/.github/workflows/db-migrate.yml @@ -0,0 +1,51 @@ +name: DB Migrate + +on: + push: + branches: [main, develop] + paths: + - "supabase/migrations/**" + - ".github/workflows/db-migrate.yml" + workflow_dispatch: + inputs: + target: + description: "Which environment to migrate" + required: true + type: choice + options: [staging, prod] + +jobs: + migrate: + name: Push migrations + runs-on: ubuntu-latest + timeout-minutes: 10 + environment: ${{ (github.ref == 'refs/heads/main' || github.event.inputs.target == 'prod') && 'production' || 'staging' }} + + steps: + - uses: actions/checkout@v4 + + - uses: supabase/setup-cli@v1 + with: + version: latest + + - name: Resolve target + id: target + run: | + if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + TARGET="${{ github.event.inputs.target }}" + elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then + TARGET="prod" + else + TARGET="staging" + fi + echo "target=$TARGET" >> "$GITHUB_OUTPUT" + echo "Migrating: $TARGET" + + - name: Push migrations + env: + SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }} + PROJECT_REF: ${{ steps.target.outputs.target == 'prod' && secrets.PROD_PROJECT_REF || secrets.STAGING_PROJECT_REF }} + DB_PASSWORD: ${{ steps.target.outputs.target == 'prod' && secrets.PROD_DB_PASSWORD || secrets.STAGING_DB_PASSWORD }} + run: | + supabase link --project-ref "$PROJECT_REF" --password "$DB_PASSWORD" + supabase db push --password "$DB_PASSWORD" diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 357de1e0..16d1abee 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -94,9 +94,42 @@ If you add a column that holds free-form user input or a personal identifier, ** ### Promoting a migration to staging, then prod -1. Develop locally (`supabase migration new …`, edit, `supabase db reset`). -2. Push to staging: `supabase link --project-ref && supabase db push`. -3. Verify in the staging app (`pnpm run dev:staging`). -4. Push to prod: `supabase link --project-ref qssmazlqrmxiudxckxvi && supabase db push`. +Branching model: + +- `develop` — staging branch. Auto-deployed by Vercel; auto-migrated by `.github/workflows/db-migrate.yml`. +- `main` — prod branch. Auto-deployed by Vercel; auto-migrated by the same workflow. + +Day-to-day flow: + +1. Develop locally on a feature branch (`supabase migration new …`, edit, `supabase db reset` to verify). +2. PR into `develop`. On merge: + - Vercel deploys the staging URL with staging env vars. + - If your PR touched `supabase/migrations/**`, the **DB Migrate** workflow runs and pushes migrations to the staging Supabase project. +3. Verify in staging. +4. PR `develop` → `main`. On merge, the same workflow runs against prod. + +You can also trigger the workflow manually from the Actions tab (workflow_dispatch) if you need to re-run a migration push. + +### CI secrets required + +The DB-migrate workflow reads these from GitHub Actions secrets (Settings -> Secrets and variables -> Actions): + +| Secret | Where to find it | +| --- | --- | +| `SUPABASE_ACCESS_TOKEN` | https://supabase.com/dashboard/account/tokens — generate a personal access token | +| `PROD_PROJECT_REF` | `qssmazlqrmxiudxckxvi` (already public, but kept as a secret for symmetry with staging) | +| `STAGING_PROJECT_REF` | the staging project's ref (visible in its dashboard URL) | +| `PROD_DB_PASSWORD` | Project Settings -> Database -> Database password | +| `STAGING_DB_PASSWORD` | Same, for the staging project | + +The workflow also references `staging` and `production` GitHub **environments**. They don't need any settings to function, but you can add required-reviewer protection rules to the `production` environment (Settings -> Environments) to gate prod migrations behind manual approval. + +### Manual fallback + +If you need to push a migration without going through CI: + +```bash +supabase link --project-ref && supabase db push +``` Re-link to whichever project you intend to operate on — the Supabase CLI keeps a single linked project at a time. From e0d8ead4a7bbaa3227a2b345312658b84d52e868 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:08:16 +0000 Subject: [PATCH 04/22] =?UTF-8?q?chore(ci):=20drop=20develop=20branch=20?= =?UTF-8?q?=E2=80=94=20main-only=20flow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PRs target main directly. Vercel preview deploys (any non-main branch) point at staging Supabase, so testing happens on the preview URL before merge. - db-migrate.yml: only auto-runs on push to main (-> prod). Use workflow_dispatch with target=staging to push a PR's migrations to staging before merging. - lint/unit-tests/e2e-tests: drop develop from triggers. - docs/ENVIRONMENTS.md: rewrite the promotion flow + flag the staging drift caveat (abandoned PRs leave migrations on staging) and recommend required-reviewer protection on the production environment. --- .github/workflows/db-migrate.yml | 6 ++---- .github/workflows/e2e-tests.yml | 4 ++-- .github/workflows/lint.yml | 4 ++-- .github/workflows/unit-tests.yml | 4 ++-- docs/ENVIRONMENTS.md | 20 +++++++++----------- 5 files changed, 17 insertions(+), 21 deletions(-) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index e532559e..e2b62379 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -2,7 +2,7 @@ name: DB Migrate on: push: - branches: [main, develop] + branches: [main] paths: - "supabase/migrations/**" - ".github/workflows/db-migrate.yml" @@ -33,10 +33,8 @@ jobs: run: | if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then TARGET="${{ github.event.inputs.target }}" - elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then - TARGET="prod" else - TARGET="staging" + TARGET="prod" fi echo "target=$TARGET" >> "$GITHUB_OUTPUT" echo "Migrating: $TARGET" diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index c028f23f..d5941974 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -2,9 +2,9 @@ name: E2E Tests on: push: - branches: [main, develop] + branches: [main] pull_request: - branches: [main, develop] + branches: [main] jobs: test: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 035c62e4..5d953bdc 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,9 +2,9 @@ name: Lint on: push: - branches: [main, develop] + branches: [main] pull_request: - branches: [main, develop] + branches: [main] jobs: lint: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 50c551d4..0c05fae2 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -2,9 +2,9 @@ name: Unit Tests on: push: - branches: [main, develop] + branches: [main] pull_request: - branches: [main, develop] + branches: [main] jobs: test: diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 16d1abee..3297b567 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -94,21 +94,19 @@ If you add a column that holds free-form user input or a personal identifier, ** ### Promoting a migration to staging, then prod -Branching model: - -- `develop` — staging branch. Auto-deployed by Vercel; auto-migrated by `.github/workflows/db-migrate.yml`. -- `main` — prod branch. Auto-deployed by Vercel; auto-migrated by the same workflow. +Branching model: there is only `main`. Feature branches are PR'd directly to it. Vercel preview deploys (any non-`main` branch) point at the **staging** Supabase project, so you test the PR against staging before merging. Day-to-day flow: 1. Develop locally on a feature branch (`supabase migration new …`, edit, `supabase db reset` to verify). -2. PR into `develop`. On merge: - - Vercel deploys the staging URL with staging env vars. - - If your PR touched `supabase/migrations/**`, the **DB Migrate** workflow runs and pushes migrations to the staging Supabase project. -3. Verify in staging. -4. PR `develop` → `main`. On merge, the same workflow runs against prod. +2. Open a PR against `main`. Vercel auto-deploys a preview URL wired to staging Supabase. +3. **If the PR includes a migration**, push it to staging before testing the preview: + - GitHub → **Actions** → **DB Migrate** → **Run workflow** → target = `staging`. + - The workflow runs `supabase db push` on the *PR branch's* migration set, so staging gets the new migration. +4. Test the preview URL. +5. Merge to `main`. The **DB Migrate** workflow auto-runs against prod (only when `supabase/migrations/**` actually changed). -You can also trigger the workflow manually from the Actions tab (workflow_dispatch) if you need to re-run a migration push. +Caveat about staging drift: because staging migrations are pushed from PR branches, an abandoned PR can leave a stray migration applied to staging that no longer exists in `main`. Postgres can't roll it back automatically. If staging gets confused, the recovery is `supabase db reset --linked` (with staging linked) — destructive, but staging is meant to be disposable. ### CI secrets required @@ -122,7 +120,7 @@ The DB-migrate workflow reads these from GitHub Actions secrets (Settings -> Sec | `PROD_DB_PASSWORD` | Project Settings -> Database -> Database password | | `STAGING_DB_PASSWORD` | Same, for the staging project | -The workflow also references `staging` and `production` GitHub **environments**. They don't need any settings to function, but you can add required-reviewer protection rules to the `production` environment (Settings -> Environments) to gate prod migrations behind manual approval. +The workflow also references `staging` and `production` GitHub **environments**. They don't need any settings to function, but you should add a required-reviewer protection rule to the `production` environment (Settings -> Environments -> production -> Required reviewers). Without it, every merge to `main` that touches a migration applies it to prod immediately. ### Manual fallback From aa706c7005d143fff31b3b58e67b817b7718487e Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:11:39 +0000 Subject: [PATCH 05/22] feat(ci): auto-migrate staging on PRs that touch migrations Removes the manual workflow_dispatch step from the dev loop. The DB Migrate workflow now also runs on pull_request to main when supabase/migrations/** changes, and resolves target=staging for that event. supabase db push is idempotent so re-runs on each PR commit are safe. workflow_dispatch is kept for manual re-runs and for migrating staging when no migration file changed. --- .github/workflows/db-migrate.yml | 16 +++++++++++----- docs/ENVIRONMENTS.md | 17 +++++++++-------- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index e2b62379..80711308 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -6,6 +6,11 @@ on: paths: - "supabase/migrations/**" - ".github/workflows/db-migrate.yml" + pull_request: + branches: [main] + paths: + - "supabase/migrations/**" + - ".github/workflows/db-migrate.yml" workflow_dispatch: inputs: target: @@ -31,11 +36,12 @@ jobs: - name: Resolve target id: target run: | - if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - TARGET="${{ github.event.inputs.target }}" - else - TARGET="prod" - fi + case "${{ github.event_name }}" in + workflow_dispatch) TARGET="${{ github.event.inputs.target }}" ;; + pull_request) TARGET="staging" ;; + push) TARGET="prod" ;; + *) echo "Unknown event"; exit 1 ;; + esac echo "target=$TARGET" >> "$GITHUB_OUTPUT" echo "Migrating: $TARGET" diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 3297b567..6dc44362 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -99,14 +99,15 @@ Branching model: there is only `main`. Feature branches are PR'd directly to it. Day-to-day flow: 1. Develop locally on a feature branch (`supabase migration new …`, edit, `supabase db reset` to verify). -2. Open a PR against `main`. Vercel auto-deploys a preview URL wired to staging Supabase. -3. **If the PR includes a migration**, push it to staging before testing the preview: - - GitHub → **Actions** → **DB Migrate** → **Run workflow** → target = `staging`. - - The workflow runs `supabase db push` on the *PR branch's* migration set, so staging gets the new migration. -4. Test the preview URL. -5. Merge to `main`. The **DB Migrate** workflow auto-runs against prod (only when `supabase/migrations/**` actually changed). - -Caveat about staging drift: because staging migrations are pushed from PR branches, an abandoned PR can leave a stray migration applied to staging that no longer exists in `main`. Postgres can't roll it back automatically. If staging gets confused, the recovery is `supabase db reset --linked` (with staging linked) — destructive, but staging is meant to be disposable. +2. Open a PR against `main`. Two things happen automatically: + - Vercel deploys a preview URL wired to staging Supabase. + - If the PR touched `supabase/migrations/**`, the **DB Migrate** workflow pushes those migrations to staging. Subsequent commits to the PR re-run it (`supabase db push` is idempotent — already-applied migrations are skipped). +3. Test the preview URL against staging. +4. Merge to `main`. The same workflow runs against prod. + +You can also trigger the workflow manually from the Actions tab (workflow_dispatch) — useful for re-running a push or migrating staging when no migration file changed. + +Caveat about staging drift: an abandoned PR can leave a stray migration applied to staging that no longer exists in `main`. Postgres can't roll it back automatically. If staging gets confused, recovery is `supabase db reset --linked` (with staging linked) — destructive, but staging is meant to be disposable. Re-seed afterwards with `pnpm run db:sync:staging`. ### CI secrets required From 7655e83e75b660092a29b1da809204960ff7722d Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:13:30 +0000 Subject: [PATCH 06/22] feat(ci): comment on PR when staging migration fails Adds a step that runs only on failure during pull_request events. Posts a comment with a link to the failed run and a hint about common causes, so the failure is visible in the PR conversation rather than only in the Actions tab. Adds the required pull-requests: write permission at the workflow level. --- .github/workflows/db-migrate.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index 80711308..19746af4 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -19,6 +19,10 @@ on: type: choice options: [staging, prod] +permissions: + contents: read + pull-requests: write + jobs: migrate: name: Push migrations @@ -53,3 +57,19 @@ jobs: run: | supabase link --project-ref "$PROJECT_REF" --password "$DB_PASSWORD" supabase db push --password "$DB_PASSWORD" + + - name: Comment on PR if migration failed + if: failure() && github.event_name == 'pull_request' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + TARGET: ${{ steps.target.outputs.target }} + run: | + gh pr comment "${{ github.event.pull_request.number }}" --body "$(cat < Date: Sat, 9 May 2026 13:21:17 +0000 Subject: [PATCH 07/22] fix(ci): read PROJECT_REF from vars, not secrets Project refs aren't sensitive (they're in dashboard URLs), so they belong as Actions variables. Using secrets.PROD_PROJECT_REF returned an empty string and supabase link failed. --- .github/workflows/db-migrate.yml | 2 +- docs/ENVIRONMENTS.md | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index 19746af4..2b234e82 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -52,7 +52,7 @@ jobs: - name: Push migrations env: SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }} - PROJECT_REF: ${{ steps.target.outputs.target == 'prod' && secrets.PROD_PROJECT_REF || secrets.STAGING_PROJECT_REF }} + PROJECT_REF: ${{ steps.target.outputs.target == 'prod' && vars.PROD_PROJECT_REF || vars.STAGING_PROJECT_REF }} DB_PASSWORD: ${{ steps.target.outputs.target == 'prod' && secrets.PROD_DB_PASSWORD || secrets.STAGING_DB_PASSWORD }} run: | supabase link --project-ref "$PROJECT_REF" --password "$DB_PASSWORD" diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 6dc44362..38874cce 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -111,13 +111,20 @@ Caveat about staging drift: an abandoned PR can leave a stray migration applied ### CI secrets required -The DB-migrate workflow reads these from GitHub Actions secrets (Settings -> Secrets and variables -> Actions): +The DB-migrate workflow reads these from GitHub Actions (Settings -> Secrets and variables -> Actions). Project refs are **variables** (not sensitive); the access token and DB passwords are **secrets**. -| Secret | Where to find it | +**Variables** (Variables tab): + +| Name | Where to find it | | --- | --- | -| `SUPABASE_ACCESS_TOKEN` | https://supabase.com/dashboard/account/tokens — generate a personal access token | -| `PROD_PROJECT_REF` | `qssmazlqrmxiudxckxvi` (already public, but kept as a secret for symmetry with staging) | +| `PROD_PROJECT_REF` | `qssmazlqrmxiudxckxvi` | | `STAGING_PROJECT_REF` | the staging project's ref (visible in its dashboard URL) | + +**Secrets** (Secrets tab): + +| Name | Where to find it | +| --- | --- | +| `SUPABASE_ACCESS_TOKEN` | https://supabase.com/dashboard/account/tokens — generate a personal access token | | `PROD_DB_PASSWORD` | Project Settings -> Database -> Database password | | `STAGING_DB_PASSWORD` | Same, for the staging project | From 9588d37dff398b99fbddf729009ac467a98eb482 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:43:37 +0000 Subject: [PATCH 08/22] fix: address PR #30 review feedback MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - package.json: pnpm dev now defaults to local supabase; old behavior moves to pnpm dev:prod - db-migrate.yml: environment selection now event-driven (workflow_dispatch with target=staging from main no longer picks production); pin supabase-cli to 2.58.5 instead of latest; rewrite the failure-comment as a single-line --body so leading indentation doesn't render as a code block - anonymize.sql: header comment was stale — auth.users IS now synced separately by sync-from-prod.sh - docs/ENVIRONMENTS.md: spell out PII on first use; correct Vite env load order (.env -> .env.local -> .env.[mode] -> .env.[mode].local); reorder env table to put local first --- .github/workflows/db-migrate.yml | 14 ++++---------- docs/ENVIRONMENTS.md | 12 ++++++------ package.json | 4 ++-- scripts/anonymize.sql | 14 ++++++++------ 4 files changed, 20 insertions(+), 24 deletions(-) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index 2b234e82..d08b8ac6 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -28,14 +28,14 @@ jobs: name: Push migrations runs-on: ubuntu-latest timeout-minutes: 10 - environment: ${{ (github.ref == 'refs/heads/main' || github.event.inputs.target == 'prod') && 'production' || 'staging' }} + environment: ${{ ((github.event_name == 'workflow_dispatch' && github.event.inputs.target == 'prod') || github.event_name == 'push') && 'production' || 'staging' }} steps: - uses: actions/checkout@v4 - uses: supabase/setup-cli@v1 with: - version: latest + version: 2.58.5 - name: Resolve target id: target @@ -65,11 +65,5 @@ jobs: RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} TARGET: ${{ steps.target.outputs.target }} run: | - gh pr comment "${{ github.event.pull_request.number }}" --body "$(cat < .env.[mode] -> .env.local -> .env.[mode].local +.env -> .env.local -> .env.[mode] -> .env.[mode].local ``` `.env.local` and `.env.[mode].local` are gitignored. `.env.example`, `.env.staging.example`, and `.env.local.example` are committed as templates. @@ -56,9 +56,9 @@ You'll need `pg_dump` and `psql` on your machine — both ship with the Postgres ### Run the app against an env ```bash -pnpm run dev # prod (default — be careful, this is real data) +pnpm run dev # local supabase (default — requires `supabase start` running) pnpm run dev:staging # staging -pnpm run dev:local # local supabase +pnpm run dev:prod # prod (be careful — real data) ``` ### Sync prod data into staging or local @@ -80,7 +80,7 @@ Both prompt for confirmation before touching the target. The script: 2. `pg_dump`s the `public` schema (data only) from prod. 3. `TRUNCATE`s the target's `public` tables. 4. Restores the dump. FK references from `public.*` to `auth.users(id)` now resolve, so RLS policies that check `auth.uid()` work for any user — log in as a test account and you can read/write any synced row that user owns. -5. Runs `scripts/anonymize.sql` to scrub remaining PII (`profiles.username`, `artist_notes.note_content`, `group_invites.invite_token`). +5. Runs `scripts/anonymize.sql` to scrub remaining PII — personally identifiable information — in the public schema (`profiles.username`, `artist_notes.note_content`, `group_invites.invite_token`). To skip auth syncing (public schema only): diff --git a/package.json b/package.json index bad112e3..2c390cf6 100644 --- a/package.json +++ b/package.json @@ -5,9 +5,9 @@ "version": "0.0.0", "type": "module", "scripts": { - "dev": "vite", + "dev": "vite --mode local", "dev:staging": "vite --mode staging", - "dev:local": "vite --mode local", + "dev:prod": "vite", "build": "vite build", "build:dev": "vite build --mode development", "build:staging": "vite build --mode staging", diff --git a/scripts/anonymize.sql b/scripts/anonymize.sql index 8fe8c643..e7c5ca56 100644 --- a/scripts/anonymize.sql +++ b/scripts/anonymize.sql @@ -1,11 +1,13 @@ --- Scrubs PII from a freshly-restored prod dump. +-- Scrubs PII from a freshly-restored prod dump (public schema only). -- Run after restoring data into staging or local. Idempotent. -- --- Notes on what is and isn't synced: --- * auth.users is NOT copied. Test accounts on the target are kept as-is. --- This means user_id columns may reference UUIDs that don't exist in the --- target's auth.users — which is fine for read-only testing of public data. --- * Any new column that holds free-text user input should be added here. +-- auth.users IS synced separately by sync-from-prod.sh, with emails rewritten +-- to user-@example.test at load time and no password set. Existing +-- target accounts are preserved via ON CONFLICT (id) DO NOTHING. Skip the auth +-- sync entirely with SYNC_AUTH=0. +-- +-- Any new public-schema column that holds free-text user input should be +-- added below. BEGIN; From 7b0751c640188587e0b276c34497e1f2734a5ad5 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:47:05 +0000 Subject: [PATCH 09/22] fix(supabase): throw on missing env vars instead of silently hitting prod The hardcoded fallback to prod URL/anon key meant that running pnpm dev without an env file would quietly send local-dev requests to production. Removes the fallback and throws at module load with a message pointing to .env.local.example. Wires VITE_SUPABASE_URL / VITE_SUPABASE_PUBLISHABLE_KEY into the e2e workflow at job level (pointing at the local supabase started in the Setup Supabase step) so the build and the Playwright webServer both have them. Lint and unit tests don't import the client and don't need them. --- .github/workflows/e2e-tests.yml | 3 +++ src/integrations/supabase/client.ts | 16 ++++++++++------ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index d5941974..76c9f639 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -10,6 +10,9 @@ jobs: test: timeout-minutes: 60 runs-on: ubuntu-latest + env: + VITE_SUPABASE_URL: http://127.0.0.1:54321 + VITE_SUPABASE_PUBLISHABLE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0 strategy: fail-fast: false matrix: diff --git a/src/integrations/supabase/client.ts b/src/integrations/supabase/client.ts index 5709f2d7..3c667bd3 100644 --- a/src/integrations/supabase/client.ts +++ b/src/integrations/supabase/client.ts @@ -3,12 +3,16 @@ import { createClient } from "@supabase/supabase-js"; import type { Database } from "./types"; import { createSupabaseStorage } from "@/lib/crossDomainStorage"; -const SUPABASE_URL = - import.meta.env.VITE_SUPABASE_URL || - "https://qssmazlqrmxiudxckxvi.supabase.co"; -const SUPABASE_PUBLISHABLE_KEY = - import.meta.env.VITE_SUPABASE_PUBLISHABLE_KEY || - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InFzc21hemxxcm14aXVkeGNreHZpIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTAzOTk4NjUsImV4cCI6MjA2NTk3NTg2NX0.4bltEUMgtxiDIbZDB9NLLKmeEDARt3yLjAbnO02RD_M"; +const SUPABASE_URL = import.meta.env.VITE_SUPABASE_URL; +const SUPABASE_PUBLISHABLE_KEY = import.meta.env.VITE_SUPABASE_PUBLISHABLE_KEY; + +if (!SUPABASE_URL || !SUPABASE_PUBLISHABLE_KEY) { + throw new Error( + "Missing VITE_SUPABASE_URL or VITE_SUPABASE_PUBLISHABLE_KEY. " + + "For local dev: copy .env.local.example to .env.local and run `supabase start`. " + + "For deploys: set them in your hosting provider's environment variables.", + ); +} export const supabase = createClient( SUPABASE_URL, From 3b2a50b0dfcad04251118152373e7db54d58fecc Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:48:10 +0000 Subject: [PATCH 10/22] fix(sync): collapse auth.users dump command to a single line psql -c with multi-line continuations and leading whitespace was sending \copy to the server as SQL instead of treating it as a client meta-command. Single line works because psql sees the leading backslash immediately. --- scripts/sync-from-prod.sh | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/scripts/sync-from-prod.sh b/scripts/sync-from-prod.sh index 1924d746..e7692d2c 100755 --- a/scripts/sync-from-prod.sh +++ b/scripts/sync-from-prod.sh @@ -80,11 +80,7 @@ if [[ "$SYNC_AUTH" == "1" ]]; then echo "Syncing auth.users from prod (anonymized)…" AUTH_CSV="$TMP_DIR/auth-users.csv" - psql "$PROD_DB_URL" -v ON_ERROR_STOP=1 -At -c "\ - \\copy ( \ - SELECT id, email, email_confirmed_at, created_at, updated_at, aud, role \ - FROM auth.users \ - ) TO '$AUTH_CSV' WITH (FORMAT csv)" + psql "$PROD_DB_URL" -v ON_ERROR_STOP=1 -c "\copy (SELECT id, email, email_confirmed_at, created_at, updated_at, aud, role FROM auth.users) TO '$AUTH_CSV' WITH (FORMAT csv)" psql "$TARGET_URL" -v ON_ERROR_STOP=1 -v authcsv="$AUTH_CSV" <<'SQL' CREATE TEMP TABLE _sync_auth_users ( From 7733c4a6598219fb3f05d8c950af6d7e70b77bd9 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 13:55:10 +0000 Subject: [PATCH 11/22] fix(sync): use bash expansion for csv path instead of psql :var MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The :'authcsv' substitution wasn't being applied — psql was trying to open the literal string ":'authcsv'" as a file. Unquoted heredoc with '$AUTH_CSV' lets bash do the substitution before psql sees the SQL, which is more portable across psql versions. --- scripts/sync-from-prod.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/sync-from-prod.sh b/scripts/sync-from-prod.sh index e7692d2c..98e19ea3 100755 --- a/scripts/sync-from-prod.sh +++ b/scripts/sync-from-prod.sh @@ -82,7 +82,7 @@ if [[ "$SYNC_AUTH" == "1" ]]; then psql "$PROD_DB_URL" -v ON_ERROR_STOP=1 -c "\copy (SELECT id, email, email_confirmed_at, created_at, updated_at, aud, role FROM auth.users) TO '$AUTH_CSV' WITH (FORMAT csv)" - psql "$TARGET_URL" -v ON_ERROR_STOP=1 -v authcsv="$AUTH_CSV" <<'SQL' + psql "$TARGET_URL" -v ON_ERROR_STOP=1 < Date: Sat, 9 May 2026 14:06:23 +0000 Subject: [PATCH 12/22] fix(sync): use session_replication_role=replica instead of --disable-triggers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit pg_dump --disable-triggers emits ALTER TABLE ... DISABLE TRIGGER ALL, which Postgres treats as touching system triggers (RI_ConstraintTrigger_*) and rejects without superuser — Supabase's postgres role isn't super. Drop the flag and instead set session_replication_role=replica for the restore session, which silences FK/constraint triggers session-wide without per-trigger ALTERs. --- scripts/sync-from-prod.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/sync-from-prod.sh b/scripts/sync-from-prod.sh index 98e19ea3..31e632a5 100755 --- a/scripts/sync-from-prod.sh +++ b/scripts/sync-from-prod.sh @@ -125,7 +125,6 @@ pg_dump \ --no-privileges \ --data-only \ --schema=public \ - --disable-triggers \ --file="$DUMP_FILE" \ "$PROD_DB_URL" @@ -147,7 +146,11 @@ END $$; SQL echo "Restoring dump into target…" -psql "$TARGET_URL" -v ON_ERROR_STOP=1 -f "$DUMP_FILE" +psql "$TARGET_URL" -v ON_ERROR_STOP=1 < Date: Sat, 9 May 2026 14:15:19 +0000 Subject: [PATCH 13/22] fix(sync): anonymize profiles.email to avoid collision on new sign-in MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit profiles.email is UNIQUE. When a real user signs in to staging with their prod email, Supabase tries to create a new auth.users row, and the handle_new_user trigger inserts a profile with that email — colliding with the synced profile row which still held the real email. Sign-in fails with "Database error saving new user". Anonymize email to the same user-@example.test pattern as auth.users so new sign-ins don't collide. --- scripts/anonymize.sql | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/anonymize.sql b/scripts/anonymize.sql index e7c5ca56..93c93435 100644 --- a/scripts/anonymize.sql +++ b/scripts/anonymize.sql @@ -11,9 +11,13 @@ BEGIN; --- profiles.username may contain a real handle. Replace with a synthetic value. +-- profiles.username may contain a real handle, and profiles.email is UNIQUE +-- and would collide if a real user signs in to staging (the new auth.users +-- row created by Supabase would conflict with the synced profile's email). +-- Both replaced with synthetic values matching the auth.users anonymization. UPDATE public.profiles - SET username = 'user_' || substring(id::text, 1, 8); + SET username = 'user_' || substring(id::text, 1, 8), + email = 'user-' || substring(id::text, 1, 8) || '@example.test'; -- artist_notes.note_content is free-form user text. Wipe it. UPDATE public.artist_notes From 7f6ce7929182b4f37807887e03050697715a6526 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:22:13 +0000 Subject: [PATCH 14/22] feat(auth): commit magic-link email template MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds supabase/templates/magic_link.html as the single source of truth for the magic-link / OTP email body. Wires it into supabase/config.toml so local supabase uses it via supabase start. Prod and staging templates still have to be updated through the dashboard for now — documented in ENVIRONMENTS.md. Automation via Management API is a follow-up. --- docs/ENVIRONMENTS.md | 12 ++ supabase/config.toml | 6 +- supabase/templates/magic_link.html | 208 +++++++++++++++++++++++++++++ 3 files changed, 225 insertions(+), 1 deletion(-) create mode 100644 supabase/templates/magic_link.html diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 27560c9a..06c8db1a 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -139,3 +139,15 @@ supabase link --project-ref && supabase db push ``` Re-link to whichever project you intend to operate on — the Supabase CLI keeps a single linked project at a time. + +### Auth email templates + +The magic-link / OTP email template lives at `supabase/templates/magic_link.html` and is wired into `supabase/config.toml`, so local Supabase (`supabase start`) picks it up automatically. + +**For prod and staging, the template currently has to be updated by hand:** + +1. Open the file in your editor and edit it. +2. Supabase Dashboard → the project → Authentication → Email Templates → Magic Link. +3. Paste the HTML and save. Repeat for the other project. + +Both projects should be updated together so they don't drift. Automating this via the Supabase Management API (`PATCH /v1/projects/{ref}/config/auth`) is straightforward; we just haven't done it yet. diff --git a/supabase/config.toml b/supabase/config.toml index a5c3e455..69370b3f 100644 --- a/supabase/config.toml +++ b/supabase/config.toml @@ -1,4 +1,8 @@ project_id = "qssmazlqrmxiudxckxvi" [functions.sync-artist-data] -verify_jwt = false \ No newline at end of file +verify_jwt = false + +[auth.email.template.magic_link] +subject = "Welcome to the Festival!" +content_path = "./supabase/templates/magic_link.html" \ No newline at end of file diff --git a/supabase/templates/magic_link.html b/supabase/templates/magic_link.html new file mode 100644 index 00000000..3a98bbb6 --- /dev/null +++ b/supabase/templates/magic_link.html @@ -0,0 +1,208 @@ + + + + + + Welcome to the Festival! + + + + + + From ee50fb225f11b4e7ade2847052cbc67e79c5ce9f Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:25:48 +0000 Subject: [PATCH 15/22] docs(auth): track enable_confirmations and otp_length in config.toml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit These settings are binding for `supabase start` only — remote projects still need the same toggles flipped in the dashboard. Tracking them here so local supabase matches prod and the intended config is in source control as documentation. --- supabase/config.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/supabase/config.toml b/supabase/config.toml index 69370b3f..36efcda0 100644 --- a/supabase/config.toml +++ b/supabase/config.toml @@ -3,6 +3,14 @@ project_id = "qssmazlqrmxiudxckxvi" [functions.sync-artist-data] verify_jwt = false +[auth.email] +# Match prod: passwordless sign-in via magic link without a separate +# confirmation step. Toggled in the Supabase dashboard for remote projects +# (config.toml is only binding for `supabase start`). +enable_confirmations = false +# 6-digit OTP to match the email template copy. +otp_length = 6 + [auth.email.template.magic_link] subject = "Welcome to the Festival!" content_path = "./supabase/templates/magic_link.html" \ No newline at end of file From 41511ff32e0e9d74e58dca6258525862aa57850a Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:32:38 +0000 Subject: [PATCH 16/22] docs: rewrite ENVIRONMENTS.md as a concise setup checklist MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Folds in everything covered while bootstrapping staging — auth settings (confirm email off, otp length 6, redirect URLs, magic-link template), GitHub vars vs secrets, Vercel scoping, local prerequisites — and trims the long prose explanations of the sync script and migration flow into short bullets. No troubleshooting section; failures are uncommon enough that the commit history is a better record. --- docs/ENVIRONMENTS.md | 182 +++++++++++++++---------------------------- 1 file changed, 61 insertions(+), 121 deletions(-) diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 06c8db1a..870b8c5b 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -1,153 +1,93 @@ # Environments -UpLine runs against three Supabase environments. +| Env | Project | Used by | +| --- | --- | --- | +| **local** | Supabase CLI (`supabase start`) | `pnpm run dev` (default), e2e tests | +| **staging** | a second Supabase project | `pnpm run dev:staging`, Vercel preview deploys | +| **prod** | `qssmazlqrmxiudxckxvi` | `pnpm run dev:prod`, Vercel production | -| Env | Project | Used by | Auto-pause? | -| --- | --- | --- | --- | -| **local** | Supabase CLI (`supabase start`) | `pnpm run dev` (default), e2e tests | n/a | -| **staging** | a second free Supabase project | `pnpm run dev:staging`, `pnpm run build:staging` | yes (after 7 days idle) | -| **prod** | `qssmazlqrmxiudxckxvi` | `pnpm run dev:prod`, `pnpm run build` | no | - -The frontend reads `VITE_SUPABASE_URL` / `VITE_SUPABASE_PUBLISHABLE_KEY` from a Vite env file picked by `--mode`. Vite loads them in this order (later overrides earlier): +The frontend reads `VITE_SUPABASE_URL` / `VITE_SUPABASE_PUBLISHABLE_KEY` from a Vite env file picked by `--mode`. Vite load order (later overrides earlier): ``` .env -> .env.local -> .env.[mode] -> .env.[mode].local ``` -`.env.local` and `.env.[mode].local` are gitignored. `.env.example`, `.env.staging.example`, and `.env.local.example` are committed as templates. - -## One-time setup - -### 1. Create the staging Supabase project - -In the Supabase dashboard, create a second project (free tier is fine). Name it something like `upline-staging`. - -After it's created: - -- Apply the same migrations as prod: - ```bash - supabase link --project-ref - supabase db push - ``` -- Copy the project URL and anon key from **Project Settings -> API**. +`*.local` files are gitignored; the `*.example` files are templates. -### 2. Wire up env files +## Setting up a new Supabase project (e.g. staging) -```bash -cp .env.example .env.local # prod creds -cp .env.staging.example .env.staging.local # staging creds -cp .env.local.example .env.local.local # local Supabase (optional) -``` +1. **Create the project** in the Supabase dashboard. +2. **Configure auth** (Authentication → Sign In / Providers → Email): + - "Confirm email" → **off** + - "Email OTP Length" → **6** + - Site URL and additional redirect URLs → match prod +3. **Paste the magic-link email template** (Authentication → Email Templates → Magic Link) from `supabase/templates/magic_link.html`. Re-paste on every change. +4. **Apply migrations**: GitHub → Actions → **DB Migrate** → Run workflow → target = `staging`. +5. **Save the database password** (Project Settings → Database). You'll need it for GitHub secrets and `scripts/.env.sync`. -Fill in the URLs and anon keys. Anon keys are safe in the browser bundle, but the convention here keeps them out of git so each developer can swap targets. +## GitHub Actions config -### 3. Wire up the sync script +Settings → Secrets and variables → Actions. -```bash -cp scripts/.env.sync.example scripts/.env.sync -``` - -Open `scripts/.env.sync` and paste the **direct Postgres connection strings** (Project Settings -> Database -> Connection string -> URI) for prod and staging. This file is gitignored. - -You'll need `pg_dump` and `psql` on your machine — both ship with the Postgres client tools (`brew install libpq` on macOS, `apt-get install postgresql-client` on Debian/Ubuntu). - -## Day-to-day - -### Run the app against an env - -```bash -pnpm run dev # local supabase (default — requires `supabase start` running) -pnpm run dev:staging # staging -pnpm run dev:prod # prod (be careful — real data) -``` - -### Sync prod data into staging or local - -```bash -pnpm run db:sync:staging # overwrites staging public schema with prod data -pnpm run db:sync:local # overwrites local public schema with prod data -``` +**Variables:** -Both prompt for confirmation before touching the target. The script: - -1. **Syncs `auth.users`** from prod into a temp table on the target, then upserts with `ON CONFLICT (id) DO NOTHING`. New rows have: - - `email` rewritten to `user-@example.test` - - no `encrypted_password` (synced users can't sign in) - - `raw_user_meta_data` / `raw_app_meta_data` stripped of OAuth/profile info - - `is_super_admin = false` - - Your existing test accounts on the target are **preserved** because of the `ON CONFLICT` clause. They keep their original emails and passwords, so you can still log in as them. -2. `pg_dump`s the `public` schema (data only) from prod. -3. `TRUNCATE`s the target's `public` tables. -4. Restores the dump. FK references from `public.*` to `auth.users(id)` now resolve, so RLS policies that check `auth.uid()` work for any user — log in as a test account and you can read/write any synced row that user owns. -5. Runs `scripts/anonymize.sql` to scrub remaining PII — personally identifiable information — in the public schema (`profiles.username`, `artist_notes.note_content`, `group_invites.invite_token`). - -To skip auth syncing (public schema only): - -```bash -SYNC_AUTH=0 pnpm run db:sync:staging -``` - -### When PII shape changes - -If you add a column that holds free-form user input or a personal identifier, **edit `scripts/anonymize.sql`** to scrub it. The dump-and-anonymize approach is only safe as long as that file is kept current. - -### Promoting a migration to staging, then prod - -Branching model: there is only `main`. Feature branches are PR'd directly to it. Vercel preview deploys (any non-`main` branch) point at the **staging** Supabase project, so you test the PR against staging before merging. - -Day-to-day flow: - -1. Develop locally on a feature branch (`supabase migration new …`, edit, `supabase db reset` to verify). -2. Open a PR against `main`. Two things happen automatically: - - Vercel deploys a preview URL wired to staging Supabase. - - If the PR touched `supabase/migrations/**`, the **DB Migrate** workflow pushes those migrations to staging. Subsequent commits to the PR re-run it (`supabase db push` is idempotent — already-applied migrations are skipped). -3. Test the preview URL against staging. -4. Merge to `main`. The same workflow runs against prod. +| Name | Value | +| --- | --- | +| `PROD_PROJECT_REF` | `qssmazlqrmxiudxckxvi` | +| `STAGING_PROJECT_REF` | the staging project's ref | -You can also trigger the workflow manually from the Actions tab (workflow_dispatch) — useful for re-running a push or migrating staging when no migration file changed. +**Secrets:** -Caveat about staging drift: an abandoned PR can leave a stray migration applied to staging that no longer exists in `main`. Postgres can't roll it back automatically. If staging gets confused, recovery is `supabase db reset --linked` (with staging linked) — destructive, but staging is meant to be disposable. Re-seed afterwards with `pnpm run db:sync:staging`. +| Name | Source | +| --- | --- | +| `SUPABASE_ACCESS_TOKEN` | https://supabase.com/dashboard/account/tokens | +| `PROD_DB_PASSWORD` | Project Settings → Database | +| `STAGING_DB_PASSWORD` | Same, on the staging project | -### CI secrets required +Add a **required-reviewer** rule to the `production` GitHub environment (Settings → Environments → production) so prod migrations pause for approval. -The DB-migrate workflow reads these from GitHub Actions (Settings -> Secrets and variables -> Actions). Project refs are **variables** (not sensitive); the access token and DB passwords are **secrets**. +## Vercel config -**Variables** (Variables tab): +Project Settings → Environment Variables. For each Supabase var, add it twice: -| Name | Where to find it | -| --- | --- | -| `PROD_PROJECT_REF` | `qssmazlqrmxiudxckxvi` | -| `STAGING_PROJECT_REF` | the staging project's ref (visible in its dashboard URL) | +| Variable | Production scope (main) | Preview scope (everything else) | +| --- | --- | --- | +| `VITE_SUPABASE_URL` | prod URL | staging URL | +| `VITE_SUPABASE_PUBLISHABLE_KEY` | prod anon key | staging anon key | +| `VITE_PUBLIC_POSTHOG_KEY` | PostHog key | same | +| `VITE_PUBLIC_POSTHOG_HOST` | PostHog host | same | -**Secrets** (Secrets tab): +## Local prerequisites -| Name | Where to find it | -| --- | --- | -| `SUPABASE_ACCESS_TOKEN` | https://supabase.com/dashboard/account/tokens — generate a personal access token | -| `PROD_DB_PASSWORD` | Project Settings -> Database -> Database password | -| `STAGING_DB_PASSWORD` | Same, for the staging project | - -The workflow also references `staging` and `production` GitHub **environments**. They don't need any settings to function, but you should add a required-reviewer protection rule to the `production` environment (Settings -> Environments -> production -> Required reviewers). Without it, every merge to `main` that touches a migration applies it to prod immediately. - -### Manual fallback +- **Supabase CLI** + **Docker** (for `supabase start`) +- **Postgres client tools** for the sync script: `brew install libpq` on macOS (and add `/opt/homebrew/opt/libpq/bin` to your PATH), `apt-get install postgresql-client` on Debian. +- Copy env templates: + ```bash + cp .env.local.example .env.local # local supabase + cp .env.staging.example .env.staging.local # staging + cp scripts/.env.sync.example scripts/.env.sync # prod + staging direct DB connection strings (for sync script) + ``` -If you need to push a migration without going through CI: +## Day-to-day commands ```bash -supabase link --project-ref && supabase db push +pnpm run dev # local supabase (requires `supabase start`) +pnpm run dev:staging # staging +pnpm run dev:prod # prod (real data — be careful) +pnpm run db:sync:staging # overwrite staging public schema with prod data, anonymized +pnpm run db:sync:local # same, into local supabase ``` -Re-link to whichever project you intend to operate on — the Supabase CLI keeps a single linked project at a time. +The sync script syncs `auth.users` (with anonymized emails, no passwords) and `public.*` (PII scrubbed via `scripts/anonymize.sql`). Existing target users on `auth.users` are preserved via `ON CONFLICT DO NOTHING`. Skip auth sync with `SYNC_AUTH=0`. -### Auth email templates +If you add a public-schema column that holds free-form user input or PII, update `scripts/anonymize.sql`. -The magic-link / OTP email template lives at `supabase/templates/magic_link.html` and is wired into `supabase/config.toml`, so local Supabase (`supabase start`) picks it up automatically. +## Migration flow -**For prod and staging, the template currently has to be updated by hand:** +- PR → `main`: Vercel preview points at staging. If the PR touches `supabase/migrations/**`, **DB Migrate** auto-pushes to staging. `supabase db push` is idempotent so re-runs on each commit are safe. +- Merge to `main`: **DB Migrate** auto-pushes to prod (gated by the `production` environment's reviewer rule, if configured). +- Manual: Actions → **DB Migrate** → Run workflow → pick target. -1. Open the file in your editor and edit it. -2. Supabase Dashboard → the project → Authentication → Email Templates → Magic Link. -3. Paste the HTML and save. Repeat for the other project. +## Auth email template -Both projects should be updated together so they don't drift. Automating this via the Supabase Management API (`PATCH /v1/projects/{ref}/config/auth`) is straightforward; we just haven't done it yet. +`supabase/templates/magic_link.html` is the source of truth. `supabase/config.toml` wires it into local supabase automatically. For staging and prod, paste it into the dashboard manually after each change (Authentication → Email Templates → Magic Link). Automating this via the Supabase Management API is a future improvement. From 5312f18aea14f1fdd7c5cceb529b20750adb593e Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:36:26 +0000 Subject: [PATCH 17/22] fix(ci): wire VITE_SUPABASE_* into unit-tests workflow MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Same fix as e2e-tests.yml — without the fallback, any test that transitively imports the supabase client now hits the throw at module load. Setting the local-supabase URL + demo anon key at job level keeps the import side-effect-free for tests. --- .github/workflows/unit-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 0c05fae2..a20aaaeb 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -11,6 +11,9 @@ jobs: name: Run Unit Tests runs-on: ubuntu-latest timeout-minutes: 10 + env: + VITE_SUPABASE_URL: http://127.0.0.1:54321 + VITE_SUPABASE_PUBLISHABLE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0 steps: - uses: actions/checkout@v4 From a5cbb266fbc5bc049e8031c4cbe5a97e50a0c34d Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:38:30 +0000 Subject: [PATCH 18/22] fix(ci): upsert single PR comment for migration status, delete on success MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Old behavior posted a fresh failure comment on every failed run and never cleaned up after a subsequent success — leaving stale comments on the PR. New behavior: - Always runs (success or failure) and finds prior comments via a hidden marker. - On failure: upserts (PATCH if exists, create if not) — at most one failure comment per PR. - On success: deletes any existing failure comment. Also trims the canned "common causes" text since it was misleading when the actual failure was a config issue (missing secret, etc.). --- .github/workflows/db-migrate.yml | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index d08b8ac6..d428b0f0 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -58,12 +58,25 @@ jobs: supabase link --project-ref "$PROJECT_REF" --password "$DB_PASSWORD" supabase db push --password "$DB_PASSWORD" - - name: Comment on PR if migration failed - if: failure() && github.event_name == 'pull_request' + - name: Update PR comment with migration status + if: always() && github.event_name == 'pull_request' env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + OUTCOME: ${{ job.status }} + PR: ${{ github.event.pull_request.number }} + REPO: ${{ github.repository }} RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - TARGET: ${{ steps.target.outputs.target }} + TARGET: ${{ steps.target.outputs.target || 'staging' }} + MARKER: "" run: | - gh pr comment "${{ github.event.pull_request.number }}" \ - --body "❌ **DB Migrate failed** — \`$TARGET\` was not updated. [View workflow run]($RUN_URL). Common causes: a migration that conflicts with the current schema, a dependency on objects that don't exist, or an out-of-order timestamp. Fix the migration and push again to re-run." + EXISTING=$(gh api "repos/$REPO/issues/$PR/comments" --jq ".[] | select(.body | contains(\"$MARKER\")) | .id" | head -n1) + if [[ "$OUTCOME" == "success" ]]; then + [[ -n "$EXISTING" ]] && gh api -X DELETE "repos/$REPO/issues/comments/$EXISTING" || true + else + BODY="$MARKER"$'\n'"❌ **DB Migrate failed** for \`$TARGET\` — see [workflow run]($RUN_URL)." + if [[ -n "$EXISTING" ]]; then + gh api -X PATCH "repos/$REPO/issues/comments/$EXISTING" -f body="$BODY" + else + gh pr comment "$PR" --body "$BODY" + fi + fi From d08abafe55ba192ab5d45aa4e86033f23c58e965 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:41:10 +0000 Subject: [PATCH 19/22] fix(ci): post success comment instead of deleting on green migrate Always upserts a single status comment (success or failure) so the current state is always visible on the PR rather than disappearing on the happy path. --- .github/workflows/db-migrate.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index d428b0f0..e5f76ae3 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -69,14 +69,14 @@ jobs: TARGET: ${{ steps.target.outputs.target || 'staging' }} MARKER: "" run: | - EXISTING=$(gh api "repos/$REPO/issues/$PR/comments" --jq ".[] | select(.body | contains(\"$MARKER\")) | .id" | head -n1) if [[ "$OUTCOME" == "success" ]]; then - [[ -n "$EXISTING" ]] && gh api -X DELETE "repos/$REPO/issues/comments/$EXISTING" || true + BODY="$MARKER"$'\n'"✅ **DB Migrate succeeded** for \`$TARGET\` — [workflow run]($RUN_URL)." + else + BODY="$MARKER"$'\n'"❌ **DB Migrate failed** for \`$TARGET\` — [workflow run]($RUN_URL)." + fi + EXISTING=$(gh api "repos/$REPO/issues/$PR/comments" --jq ".[] | select(.body | contains(\"$MARKER\")) | .id" | head -n1) + if [[ -n "$EXISTING" ]]; then + gh api -X PATCH "repos/$REPO/issues/comments/$EXISTING" -f body="$BODY" else - BODY="$MARKER"$'\n'"❌ **DB Migrate failed** for \`$TARGET\` — see [workflow run]($RUN_URL)." - if [[ -n "$EXISTING" ]]; then - gh api -X PATCH "repos/$REPO/issues/comments/$EXISTING" -f body="$BODY" - else - gh pr comment "$PR" --body "$BODY" - fi + gh pr comment "$PR" --body "$BODY" fi From d2cf6b7c1a8686d90cbeea619cacafb1d204ab0c Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:42:46 +0000 Subject: [PATCH 20/22] chore: pin node version via .nvmrc + engines Adds .nvmrc (22) so local devs using nvm pick up the right runtime, and mirrors it in package.json engines (node >=22 <23, pnpm >=10) so pnpm validates the environment on install. Workflows now read node-version-file: .nvmrc instead of hardcoding the version, giving us one place to bump. --- .github/workflows/e2e-tests.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unit-tests.yml | 2 +- .nvmrc | 1 + package.json | 6 +++++- 5 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 .nvmrc diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index 76c9f639..ca67ee68 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -27,7 +27,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 22 + node-version-file: ".nvmrc" cache: "pnpm" - name: Install dependencies @@ -68,7 +68,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 22 + node-version-file: ".nvmrc" cache: "pnpm" - name: Install dependencies diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5d953bdc..b775a568 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,7 +19,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 22 + node-version-file: ".nvmrc" cache: "pnpm" - name: Install dependencies diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index a20aaaeb..22f44ce1 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -22,7 +22,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 22 + node-version-file: ".nvmrc" cache: "pnpm" - name: Install dependencies diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 00000000..2bd5a0a9 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +22 diff --git a/package.json b/package.json index 2c390cf6..6e37c5cb 100644 --- a/package.json +++ b/package.json @@ -143,5 +143,9 @@ "prettier --write" ] }, - "packageManager": "pnpm@10.12.4" + "packageManager": "pnpm@10.12.4", + "engines": { + "node": ">=22 <23", + "pnpm": ">=10" + } } From e310329a1d00e3a34d9121745777d8829410cd05 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:46:07 +0000 Subject: [PATCH 21/22] =?UTF-8?q?fix:=20address=20PR=20review=20=E2=80=94?= =?UTF-8?q?=20drop=20--mode=20local=20and=20clean=20env=20file=20confusion?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - pnpm dev: 'vite --mode local' -> 'vite' (default mode=development). This makes .env.local the natural personal override (loaded for every mode but beaten by .env.[mode] / .env.[mode].local), so the awkward .env.local.local filename goes away. - pnpm dev:prod: explicit '--mode production'. - .env.example: deleted (was a duplicate "prod template" with stale comments). Devs only need .env.local for normal local dev. - .env.local.example: refreshed wording, copy target is .env.local. - client.ts: tiny tweak to error message. - db-migrate.yml push trigger: drop the workflow file from paths so editing the workflow on main doesn't fire a no-op prod migration. Kept in pull_request paths so workflow changes are still tested on staging before merge. --- .env.example | 4 ---- .env.local.example | 8 +++++--- .github/workflows/db-migrate.yml | 1 - package.json | 4 ++-- src/integrations/supabase/client.ts | 2 +- 5 files changed, 8 insertions(+), 11 deletions(-) delete mode 100644 .env.example diff --git a/.env.example b/.env.example deleted file mode 100644 index 2d8e7b7b..00000000 --- a/.env.example +++ /dev/null @@ -1,4 +0,0 @@ -# Production Supabase project (default — used by `pnpm run dev` and `pnpm run build`) -# Copy this file to `.env.local` and fill in the values. -VITE_SUPABASE_URL=https://your-prod-project.supabase.co -VITE_SUPABASE_PUBLISHABLE_KEY=your-prod-anon-key diff --git a/.env.local.example b/.env.local.example index 133b1612..e7ef699e 100644 --- a/.env.local.example +++ b/.env.local.example @@ -1,5 +1,7 @@ -# Local Supabase (used by `pnpm run dev:local` against `supabase start`) -# Copy this file to `.env.local.local`. The default values below match the -# Supabase CLI's local stack — change them only if you customised your setup. +# Personal env file for local development. Loaded by Vite for every mode, +# but mode-specific files (.env.staging, .env.staging.local, etc.) override it. +# +# Copy this file to `.env.local` and adjust as needed. The defaults below +# match the Supabase CLI's local stack (`supabase start`). VITE_SUPABASE_URL=http://127.0.0.1:54321 VITE_SUPABASE_PUBLISHABLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0 diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index e5f76ae3..bfb09f84 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -5,7 +5,6 @@ on: branches: [main] paths: - "supabase/migrations/**" - - ".github/workflows/db-migrate.yml" pull_request: branches: [main] paths: diff --git a/package.json b/package.json index 6e37c5cb..8e85dc51 100644 --- a/package.json +++ b/package.json @@ -5,9 +5,9 @@ "version": "0.0.0", "type": "module", "scripts": { - "dev": "vite --mode local", + "dev": "vite", "dev:staging": "vite --mode staging", - "dev:prod": "vite", + "dev:prod": "vite --mode production", "build": "vite build", "build:dev": "vite build --mode development", "build:staging": "vite build --mode staging", diff --git a/src/integrations/supabase/client.ts b/src/integrations/supabase/client.ts index 3c667bd3..9a346a19 100644 --- a/src/integrations/supabase/client.ts +++ b/src/integrations/supabase/client.ts @@ -10,7 +10,7 @@ if (!SUPABASE_URL || !SUPABASE_PUBLISHABLE_KEY) { throw new Error( "Missing VITE_SUPABASE_URL or VITE_SUPABASE_PUBLISHABLE_KEY. " + "For local dev: copy .env.local.example to .env.local and run `supabase start`. " + - "For deploys: set them in your hosting provider's environment variables.", + "For deploys: set them as environment variables in your hosting provider.", ); } From b5f5a83de739bb0dc4d0d8c0ed4013db8dcb4cf2 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 14:59:15 +0000 Subject: [PATCH 22/22] =?UTF-8?q?fix:=20address=20PR=20review=20=E2=80=94?= =?UTF-8?q?=20drop=20dev:prod,=20drop=20email=20from=20auth=20dump?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - scripts/sync-from-prod.sh: drop email column from the prod auth.users dump and the temp table on target. Email is recomputed from id during the upsert; no need to write real prod emails to disk in between. - package.json: remove the dev:prod script. There's no .env.production* template and devving against prod from a laptop is a footgun. - docs/ENVIRONMENTS.md: drop dev:prod from the day-to-day commands and from the env table. --- docs/ENVIRONMENTS.md | 3 +-- package.json | 1 - scripts/sync-from-prod.sh | 3 +-- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/docs/ENVIRONMENTS.md b/docs/ENVIRONMENTS.md index 870b8c5b..d3993cdb 100644 --- a/docs/ENVIRONMENTS.md +++ b/docs/ENVIRONMENTS.md @@ -4,7 +4,7 @@ | --- | --- | --- | | **local** | Supabase CLI (`supabase start`) | `pnpm run dev` (default), e2e tests | | **staging** | a second Supabase project | `pnpm run dev:staging`, Vercel preview deploys | -| **prod** | `qssmazlqrmxiudxckxvi` | `pnpm run dev:prod`, Vercel production | +| **prod** | `qssmazlqrmxiudxckxvi` | Vercel production only | The frontend reads `VITE_SUPABASE_URL` / `VITE_SUPABASE_PUBLISHABLE_KEY` from a Vite env file picked by `--mode`. Vite load order (later overrides earlier): @@ -73,7 +73,6 @@ Project Settings → Environment Variables. For each Supabase var, add it twice: ```bash pnpm run dev # local supabase (requires `supabase start`) pnpm run dev:staging # staging -pnpm run dev:prod # prod (real data — be careful) pnpm run db:sync:staging # overwrite staging public schema with prod data, anonymized pnpm run db:sync:local # same, into local supabase ``` diff --git a/package.json b/package.json index 8e85dc51..9b1d1c73 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,6 @@ "scripts": { "dev": "vite", "dev:staging": "vite --mode staging", - "dev:prod": "vite --mode production", "build": "vite build", "build:dev": "vite build --mode development", "build:staging": "vite build --mode staging", diff --git a/scripts/sync-from-prod.sh b/scripts/sync-from-prod.sh index 31e632a5..754ff0d8 100755 --- a/scripts/sync-from-prod.sh +++ b/scripts/sync-from-prod.sh @@ -80,12 +80,11 @@ if [[ "$SYNC_AUTH" == "1" ]]; then echo "Syncing auth.users from prod (anonymized)…" AUTH_CSV="$TMP_DIR/auth-users.csv" - psql "$PROD_DB_URL" -v ON_ERROR_STOP=1 -c "\copy (SELECT id, email, email_confirmed_at, created_at, updated_at, aud, role FROM auth.users) TO '$AUTH_CSV' WITH (FORMAT csv)" + psql "$PROD_DB_URL" -v ON_ERROR_STOP=1 -c "\copy (SELECT id, email_confirmed_at, created_at, updated_at, aud, role FROM auth.users) TO '$AUTH_CSV' WITH (FORMAT csv)" psql "$TARGET_URL" -v ON_ERROR_STOP=1 <