From 434667d685a7f703eeb2aeda9233d635db63e69a Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Sat, 9 May 2026 17:16:02 +0300 Subject: [PATCH 01/23] feat(edge-fn): add diff-schedule Edge Function with unit tests Implements the dry-run diff engine for the new schedule ingestion system. Compares a CSV payload against current DB state for a festival edition and returns clean operations + conflicts requiring user resolution (orphaned sets, stage name mismatches). Core business logic extracted into diff.ts with 22 unit tests covering slugging, timezone conversion, B2B matching, and midnight crossing. Co-Authored-By: Claude Sonnet 4.6 --- supabase/functions/_shared/auth.ts | 50 +++ supabase/functions/diff-schedule/diff.test.ts | 305 ++++++++++++++++++ supabase/functions/diff-schedule/diff.ts | 226 +++++++++++++ supabase/functions/diff-schedule/index.ts | 70 ++++ 4 files changed, 651 insertions(+) create mode 100644 supabase/functions/_shared/auth.ts create mode 100644 supabase/functions/diff-schedule/diff.test.ts create mode 100644 supabase/functions/diff-schedule/diff.ts create mode 100644 supabase/functions/diff-schedule/index.ts diff --git a/supabase/functions/_shared/auth.ts b/supabase/functions/_shared/auth.ts new file mode 100644 index 00000000..5bf260ff --- /dev/null +++ b/supabase/functions/_shared/auth.ts @@ -0,0 +1,50 @@ +import { createClient } from "https://esm.sh/@supabase/supabase-js@2"; + +export const corsHeaders = { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Headers": + "authorization, x-client-info, apikey, content-type", +}; + +export function getAdminClient() { + return createClient( + Deno.env.get("SUPABASE_URL") ?? "", + Deno.env.get("SUPABASE_SERVICE_ROLE_KEY") ?? "", + ); +} + +type AuthResult = + | { userId: string; errorResponse: null } + | { userId: null; errorResponse: { status: number; body: string } }; + +export async function requireAdmin(req: Request): Promise { + const authHeader = req.headers.get("Authorization"); + if (!authHeader) { + return { userId: null, errorResponse: { status: 401, body: JSON.stringify({ error: "Unauthorized" }) } }; + } + + const userClient = createClient( + Deno.env.get("SUPABASE_URL") ?? "", + Deno.env.get("SUPABASE_ANON_KEY") ?? "", + { global: { headers: { Authorization: authHeader } } }, + ); + + const { data: { user }, error: userError } = await userClient.auth.getUser(); + if (userError || !user) { + return { userId: null, errorResponse: { status: 401, body: JSON.stringify({ error: "Unauthorized" }) } }; + } + + const adminClient = getAdminClient(); + const { data: adminRole } = await adminClient + .from("admin_roles") + .select("role") + .eq("user_id", user.id) + .in("role", ["admin", "super_admin"]) + .maybeSingle(); + + if (!adminRole) { + return { userId: null, errorResponse: { status: 403, body: JSON.stringify({ error: "Forbidden" }) } }; + } + + return { userId: user.id, errorResponse: null }; +} diff --git a/supabase/functions/diff-schedule/diff.test.ts b/supabase/functions/diff-schedule/diff.test.ts new file mode 100644 index 00000000..ad268a5f --- /dev/null +++ b/supabase/functions/diff-schedule/diff.test.ts @@ -0,0 +1,305 @@ +import { assertEquals } from "jsr:@std/assert@1"; +import { + advanceDateByOne, + artistKey, + computeDiff, + localToUtc, + toSlug, + type DbArtist, + type DbSet, + type DbStage, +} from "./diff.ts"; + +Deno.test("toSlug converts name to lowercase hyphenated slug", () => { + assertEquals(toSlug("Carl Cox"), "carl-cox"); + assertEquals(toSlug("DJ Tennis"), "dj-tennis"); + assertEquals(toSlug(" Peggy Gou "), "peggy-gou"); + assertEquals(toSlug("Aphex Twin"), "aphex-twin"); + assertEquals(toSlug("deadmau5"), "deadmau5"); + assertEquals(toSlug("Four Tet"), "four-tet"); +}); + +Deno.test("artistKey sorts slugs and joins with pipe", () => { + assertEquals(artistKey(["carl-cox"]), "carl-cox"); + assertEquals(artistKey(["carl-cox", "peggy-gou"]), "carl-cox|peggy-gou"); + assertEquals(artistKey(["peggy-gou", "carl-cox"]), "carl-cox|peggy-gou"); + assertEquals(artistKey(["c", "b", "a"]), "a|b|c"); +}); + +Deno.test("advanceDateByOne advances date by one day", () => { + assertEquals(advanceDateByOne("2026-07-11"), "2026-07-12"); + assertEquals(advanceDateByOne("2026-07-31"), "2026-08-01"); + assertEquals(advanceDateByOne("2026-12-31"), "2027-01-01"); +}); + +Deno.test("localToUtc converts Lisbon summer time (UTC+1) to UTC", () => { + const result = localToUtc("2026-07-11", "23:00", "Europe/Lisbon"); + assertEquals(result, "2026-07-11T22:00:00.000Z"); +}); + +Deno.test("localToUtc converts Lisbon winter time (UTC+0) to UTC", () => { + const result = localToUtc("2026-01-15", "22:00", "Europe/Lisbon"); + assertEquals(result, "2026-01-15T22:00:00.000Z"); +}); + +Deno.test("localToUtc converts midnight correctly", () => { + const result = localToUtc("2026-07-11", "00:00", "Europe/Lisbon"); + assertEquals(result, "2026-07-10T23:00:00.000Z"); +}); + +// --- computeDiff --- + +function makeArtist(name: string): DbArtist { + const slug = name.toLowerCase().replace(/\s+/g, "-"); + return { id: `id-${slug}`, name, slug }; +} + +function makeStage(id: string, name: string): DbStage { + return { id, name }; +} + +function makeSet( + id: string, + name: string, + artists: DbArtist[], + stageId: string | null = null, + timeStart: string | null = null, +): DbSet { + return { + id, + name, + description: null, + stage_id: stageId, + time_start: timeStart, + time_end: null, + set_artists: artists.map((a) => ({ artist_id: a.id, artists: a })), + }; +} + +Deno.test("computeDiff: new artist in CSV creates artist", () => { + const result = computeDiff( + [{ artists: ["New DJ"] }], + [], + [], + [], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.artistsToCreate.length, 1); + assertEquals(result.cleanOperations.artistsToCreate[0].name, "New DJ"); + assertEquals(result.cleanOperations.artistsToCreate[0].slug, "new-dj"); + assertEquals(result.summary.newArtists, 1); +}); + +Deno.test("computeDiff: existing artist is not duplicated", () => { + const artist = makeArtist("Carl Cox"); + const result = computeDiff( + [{ artists: ["Carl Cox"] }], + [], + [], + [artist], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.artistsToCreate.length, 0); + assertEquals(result.summary.newArtists, 0); +}); + +Deno.test("computeDiff: same new artist in multiple rows is created once", () => { + const result = computeDiff( + [{ artists: ["New DJ"] }, { artists: ["New DJ"] }], + [], + [], + [], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.artistsToCreate.length, 1); +}); + +Deno.test("computeDiff: CSV row with no DB match creates new set", () => { + const result = computeDiff( + [{ artists: ["Carl Cox"] }], + [], + [], + [makeArtist("Carl Cox")], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.setsToCreate.length, 1); + assertEquals(result.cleanOperations.setsToUpdate.length, 0); + assertEquals(result.summary.setsToCreate, 1); +}); + +Deno.test("computeDiff: CSV row matching existing set produces update", () => { + const artist = makeArtist("Carl Cox"); + const set = makeSet("set-1", "Carl Cox", [artist]); + const result = computeDiff( + [{ artists: ["Carl Cox"] }], + [], + [set], + [artist], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.setsToUpdate.length, 1); + assertEquals(result.cleanOperations.setsToUpdate[0].id, "set-1"); + assertEquals(result.cleanOperations.setsToCreate.length, 0); + assertEquals(result.summary.setsMatched, 1); +}); + +Deno.test("computeDiff: set in DB but absent from CSV is orphaned", () => { + const artist = makeArtist("DJ Tennis"); + const set = makeSet("set-2", "DJ Tennis", [artist]); + const result = computeDiff( + [], + [], + [set], + [artist], + "Europe/Lisbon", + ); + assertEquals(result.conflicts.orphanedSets.length, 1); + assertEquals(result.conflicts.orphanedSets[0].id, "set-2"); + assertEquals(result.summary.setsOrphaned, 1); +}); + +Deno.test("computeDiff: B2B set matched by combined artist key", () => { + const cox = makeArtist("Carl Cox"); + const gou = makeArtist("Peggy Gou"); + const set = makeSet("set-b2b", "Carl Cox b2b Peggy Gou", [cox, gou]); + const result = computeDiff( + [{ artists: ["Carl Cox", "Peggy Gou"] }], + [], + [set], + [cox, gou], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.setsToUpdate.length, 1); + assertEquals(result.cleanOperations.setsToUpdate[0].id, "set-b2b"); +}); + +Deno.test("computeDiff: B2B artist order in CSV does not affect match", () => { + const cox = makeArtist("Carl Cox"); + const gou = makeArtist("Peggy Gou"); + const set = makeSet("set-b2b", "Carl Cox b2b Peggy Gou", [cox, gou]); + const result = computeDiff( + [{ artists: ["Peggy Gou", "Carl Cox"] }], + [], + [set], + [cox, gou], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.setsToUpdate.length, 1); +}); + +Deno.test("computeDiff: exact stage name match resolves stage_id", () => { + const artist = makeArtist("Carl Cox"); + const stage = makeStage("stage-1", "Main Stage"); + const result = computeDiff( + [{ artists: ["Carl Cox"], stage: "Main Stage" }], + [stage], + [], + [artist], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.setsToCreate[0].stage_id, "stage-1"); +}); + +Deno.test("computeDiff: stage name mismatch surfaced as conflict", () => { + const artist = makeArtist("Carl Cox"); + const stage = makeStage("stage-1", "Main Stage"); + const result = computeDiff( + [{ artists: ["Carl Cox"], stage: "Mainstage" }], + [stage], + [], + [artist], + "Europe/Lisbon", + ); + assertEquals(result.conflicts.stageNameMismatches.length, 1); + assertEquals(result.conflicts.stageNameMismatches[0].csvValue, "Mainstage"); + assertEquals(result.conflicts.stageNameMismatches[0].closestDbValue, "Main Stage"); +}); + +Deno.test("computeDiff: unknown stage creates new stage", () => { + const artist = makeArtist("Carl Cox"); + const result = computeDiff( + [{ artists: ["Carl Cox"], stage: "Secret Forest" }], + [], + [], + [artist], + "Europe/Lisbon", + ); + assertEquals(result.cleanOperations.stagesToCreate.length, 1); + assertEquals(result.cleanOperations.stagesToCreate[0].name, "Secret Forest"); +}); + +Deno.test("computeDiff: end time before start time triggers midnight advance", () => { + const artist = makeArtist("Carl Cox"); + const result = computeDiff( + [{ artists: ["Carl Cox"], date: "2026-07-11", startTime: "23:00", endTime: "01:00" }], + [], + [], + [artist], + "UTC", + ); + const created = result.cleanOperations.setsToCreate[0]; + // start should be 2026-07-11T23:00:00Z, end should be 2026-07-12T01:00:00Z + assertEquals(created.time_start, "2026-07-11T23:00:00.000Z"); + assertEquals(created.time_end, "2026-07-12T01:00:00.000Z"); +}); + +Deno.test("computeDiff: set name falls back to b2b join when not provided", () => { + const artist1 = makeArtist("Carl Cox"); + const artist2 = makeArtist("Peggy Gou"); + const result = computeDiff( + [{ artists: ["Carl Cox", "Peggy Gou"] }], + [], + [], + [artist1, artist2], + "UTC", + ); + assertEquals(result.cleanOperations.setsToCreate[0].name, "Carl Cox b2b Peggy Gou"); +}); + +Deno.test("computeDiff: explicit set name takes precedence over b2b fallback", () => { + const artist = makeArtist("Carl Cox"); + const result = computeDiff( + [{ artists: ["Carl Cox"], setName: "Carl Cox Live" }], + [], + [], + [artist], + "UTC", + ); + assertEquals(result.cleanOperations.setsToCreate[0].name, "Carl Cox Live"); +}); + +Deno.test("computeDiff: same stage mismatch from multiple rows surfaced once", () => { + const artist1 = makeArtist("Artist A"); + const artist2 = makeArtist("Artist B"); + const stage = makeStage("stage-1", "Main Stage"); + const result = computeDiff( + [ + { artists: ["Artist A"], stage: "Mainstage" }, + { artists: ["Artist B"], stage: "Mainstage" }, + ], + [stage], + [], + [artist1, artist2], + "UTC", + ); + assertEquals(result.conflicts.stageNameMismatches.length, 1); +}); + +Deno.test("computeDiff: multiple candidates disambiguated by stage", () => { + const artist = makeArtist("Carl Cox"); + const stage1 = makeStage("s1", "Stage One"); + const stage2 = makeStage("s2", "Stage Two"); + const set1 = makeSet("set-a", "Carl Cox", [artist], "s1"); + const set2 = makeSet("set-b", "Carl Cox", [artist], "s2"); + const result = computeDiff( + [{ artists: ["Carl Cox"], stage: "Stage Two" }], + [stage1, stage2], + [set1, set2], + [artist], + "UTC", + ); + assertEquals(result.cleanOperations.setsToUpdate.length, 1); + assertEquals(result.cleanOperations.setsToUpdate[0].id, "set-b"); + assertEquals(result.conflicts.orphanedSets.length, 1); + assertEquals(result.conflicts.orphanedSets[0].id, "set-a"); +}); diff --git a/supabase/functions/diff-schedule/diff.ts b/supabase/functions/diff-schedule/diff.ts new file mode 100644 index 00000000..1f88902b --- /dev/null +++ b/supabase/functions/diff-schedule/diff.ts @@ -0,0 +1,226 @@ +export type CsvRow = { + artists: string[]; + setName?: string; + stage?: string; + date?: string; + startTime?: string; + endTime?: string; + description?: string; +}; + +export type DbStage = { id: string; name: string }; +export type DbArtist = { id: string; name: string; slug: string }; +export type DbSet = { + id: string; + name: string; + description: string | null; + stage_id: string | null; + time_start: string | null; + time_end: string | null; + set_artists: { artist_id: string; artists: DbArtist }[]; +}; + +export type SetPayload = { + name: string; + description: string | null; + stage_id: string | null; + time_start: string | null; + time_end: string | null; + artistSlugs: string[]; +}; + +export type DiffResult = { + summary: { + newArtists: number; + newStages: number; + setsMatched: number; + setsToCreate: number; + setsOrphaned: number; + }; + newArtistNames: string[]; + cleanOperations: { + artistsToCreate: { name: string; slug: string }[]; + stagesToCreate: { name: string }[]; + setsToCreate: SetPayload[]; + setsToUpdate: ({ id: string } & SetPayload)[]; + }; + conflicts: { + stageNameMismatches: { + csvValue: string; + closestDbValue: string; + dbStageId: string; + }[]; + orphanedSets: { + id: string; + name: string; + stage: string | null; + timeStart: string | null; + }[]; + }; +}; + +export function toSlug(name: string): string { + return name + .toLowerCase() + .trim() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+|-+$/g, ""); +} + +export function artistKey(slugs: string[]): string { + return [...slugs].sort().join("|"); +} + +export function advanceDateByOne(dateStr: string): string { + const d = new Date(dateStr + "T00:00:00Z"); + d.setUTCDate(d.getUTCDate() + 1); + return d.toISOString().split("T")[0]; +} + +export function localToUtc(dateStr: string, timeStr: string, timezone: string): string { + const localIso = `${dateStr}T${timeStr}:00`; + const naiveUtc = new Date(localIso + "Z"); + // sv-SE locale gives "YYYY-MM-DD HH:MM:SS" — unambiguously parseable as UTC + const localInTz = new Date( + naiveUtc.toLocaleString("sv-SE", { timeZone: timezone }) + "Z", + ); + const offsetMs = naiveUtc.getTime() - localInTz.getTime(); + return new Date(naiveUtc.getTime() + offsetMs).toISOString(); +} + +export function computeDiff( + rows: CsvRow[], + dbStages: DbStage[], + dbSets: DbSet[], + dbArtists: DbArtist[], + timezone: string, +): DiffResult { + const stageByNameLower = new Map(dbStages.map((s) => [s.name.toLowerCase(), s])); + const existingArtistSlugs = new Set(dbArtists.map((a) => a.slug)); + + const setsByArtistKey = new Map(); + for (const set of dbSets) { + const slugs = set.set_artists.map((sa) => sa.artists.slug); + const key = artistKey(slugs); + const bucket = setsByArtistKey.get(key) ?? []; + bucket.push(set); + setsByArtistKey.set(key, bucket); + } + + const matchedSetIds = new Set(); + const seenNewArtistSlugs = new Set(); + const seenNewStageNames = new Set(); + const seenMismatchedStages = new Set(); + + const artistsToCreate: { name: string; slug: string }[] = []; + const stagesToCreate: { name: string }[] = []; + const stageNameMismatches: DiffResult["conflicts"]["stageNameMismatches"] = []; + const setsToCreate: SetPayload[] = []; + const setsToUpdate: ({ id: string } & SetPayload)[] = []; + + for (const row of rows) { + const artistSlugs: string[] = []; + for (const name of row.artists) { + const slug = toSlug(name); + artistSlugs.push(slug); + if (!existingArtistSlugs.has(slug) && !seenNewArtistSlugs.has(slug)) { + artistsToCreate.push({ name, slug }); + seenNewArtistSlugs.add(slug); + } + } + + let resolvedStageId: string | null = null; + if (row.stage) { + const lower = row.stage.toLowerCase(); + const exactMatch = stageByNameLower.get(lower); + if (exactMatch) { + resolvedStageId = exactMatch.id; + } else { + const strip = (s: string) => s.toLowerCase().replace(/[^a-z0-9]/g, ""); + const closeMatch = dbStages.find((s) => { + const a = strip(s.name); + const b = strip(lower); + return a === b || a.includes(b) || b.includes(a); + }); + if (closeMatch && !seenMismatchedStages.has(row.stage)) { + stageNameMismatches.push({ + csvValue: row.stage, + closestDbValue: closeMatch.name, + dbStageId: closeMatch.id, + }); + seenMismatchedStages.add(row.stage); + } else if (!closeMatch && !seenNewStageNames.has(row.stage)) { + stagesToCreate.push({ name: row.stage }); + seenNewStageNames.add(row.stage); + } + } + } + + let timeStart: string | null = null; + let timeEnd: string | null = null; + if (row.date && row.startTime) { + timeStart = localToUtc(row.date, row.startTime, timezone); + } + if (row.date && row.endTime) { + const crossesMidnight = row.startTime != null && row.endTime < row.startTime; + const endDate = crossesMidnight ? advanceDateByOne(row.date) : row.date; + timeEnd = localToUtc(endDate, row.endTime, timezone); + } + + const setName = row.setName?.trim() || row.artists.join(" b2b "); + const key = artistKey(artistSlugs); + const candidates = setsByArtistKey.get(key) ?? []; + + let matched: DbSet | null = null; + if (candidates.length === 1) { + matched = candidates[0]; + } else if (candidates.length > 1) { + matched = + (resolvedStageId + ? candidates.find((s) => s.stage_id === resolvedStageId) ?? null + : null) ?? + (row.date + ? candidates.find((s) => s.time_start?.startsWith(row.date!)) ?? null + : null) ?? + candidates[0]; + } + + const payload: SetPayload = { + name: setName, + description: row.description ?? null, + stage_id: resolvedStageId, + time_start: timeStart, + time_end: timeEnd, + artistSlugs, + }; + + if (matched) { + matchedSetIds.add(matched.id); + setsToUpdate.push({ id: matched.id, ...payload }); + } else { + setsToCreate.push(payload); + } + } + + const orphanedSets = dbSets + .filter((s) => !matchedSetIds.has(s.id)) + .map((s) => ({ + id: s.id, + name: s.name, + stage: dbStages.find((st) => st.id === s.stage_id)?.name ?? null, + timeStart: s.time_start, + })); + + return { + summary: { + newArtists: artistsToCreate.length, + newStages: stagesToCreate.length, + setsMatched: matchedSetIds.size, + setsToCreate: setsToCreate.length, + setsOrphaned: orphanedSets.length, + }, + newArtistNames: artistsToCreate.map((a) => a.name), + cleanOperations: { artistsToCreate, stagesToCreate, setsToCreate, setsToUpdate }, + conflicts: { stageNameMismatches, orphanedSets }, + }; +} diff --git a/supabase/functions/diff-schedule/index.ts b/supabase/functions/diff-schedule/index.ts new file mode 100644 index 00000000..59a19a7d --- /dev/null +++ b/supabase/functions/diff-schedule/index.ts @@ -0,0 +1,70 @@ +import { serve } from "https://deno.land/std@0.168.0/http/server.ts"; +import { getAdminClient, requireAdmin, corsHeaders } from "../_shared/auth.ts"; +import { computeDiff, type DbArtist, type DbSet, type DbStage } from "./diff.ts"; + +serve(async (req) => { + if (req.method === "OPTIONS") { + return new Response("ok", { headers: corsHeaders }); + } + + const auth = await requireAdmin(req); + if (auth.errorResponse) { + return new Response(auth.errorResponse.body, { + status: auth.errorResponse.status, + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); + } + + try { + const body = await req.json(); + const { festivalEditionId, timezone, rows } = body; + + if (!festivalEditionId || !timezone || !Array.isArray(rows)) { + return new Response( + JSON.stringify({ error: "Missing required fields: festivalEditionId, timezone, rows" }), + { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }, + ); + } + + const db = getAdminClient(); + + const [stagesRes, setsRes, artistsRes] = await Promise.all([ + db + .from("stages") + .select("id, name") + .eq("festival_edition_id", festivalEditionId) + .eq("archived", false), + db + .from("sets") + .select("id, name, description, stage_id, time_start, time_end, set_artists(artist_id, artists(id, name, slug))") + .eq("festival_edition_id", festivalEditionId) + .eq("archived", false), + db + .from("artists") + .select("id, name, slug") + .eq("archived", false), + ]); + + if (stagesRes.error) throw stagesRes.error; + if (setsRes.error) throw setsRes.error; + if (artistsRes.error) throw artistsRes.error; + + const result = computeDiff( + rows, + (stagesRes.data ?? []) as DbStage[], + (setsRes.data ?? []) as DbSet[], + (artistsRes.data ?? []) as DbArtist[], + timezone, + ); + + return new Response(JSON.stringify(result), { + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("diff-schedule error:", error); + return new Response(JSON.stringify({ error: error.message }), { + status: 500, + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); + } +}); From 7a95de2c81b5a803c69e857e0cd9905c9088079d Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Sat, 9 May 2026 17:32:31 +0300 Subject: [PATCH 02/23] feat(edge-fn): add commit-schedule Edge Function and RPC migration Adds the atomic write path for the schedule ingestion system: - Migration: adds UNIQUE constraints on artists.slug and stages(festival_edition_id, name), creates the commit_schedule PL/pgSQL RPC that wraps all writes (artist upserts, stage upserts, set inserts/updates, set_artists sync, orphan archiving) in a single transaction with full rollback on failure. - Edge Function: thin admin-gated HTTP handler that calls the RPC via service role key. - Integration tests for the RPC covering create, update, archive, and time storage. Co-Authored-By: Claude Sonnet 4.6 --- .../commit-schedule/commit-schedule.test.ts | 190 ++++++++++++++++++ supabase/functions/commit-schedule/index.ts | 83 ++++++++ .../20260509142022_commit_schedule_rpc.sql | 159 +++++++++++++++ 3 files changed, 432 insertions(+) create mode 100644 supabase/functions/commit-schedule/commit-schedule.test.ts create mode 100644 supabase/functions/commit-schedule/index.ts create mode 100644 supabase/migrations/20260509142022_commit_schedule_rpc.sql diff --git a/supabase/functions/commit-schedule/commit-schedule.test.ts b/supabase/functions/commit-schedule/commit-schedule.test.ts new file mode 100644 index 00000000..4a68ed06 --- /dev/null +++ b/supabase/functions/commit-schedule/commit-schedule.test.ts @@ -0,0 +1,190 @@ +// Integration tests for commit-schedule. +// Run against a local Supabase instance: deno test --allow-env --allow-net commit-schedule.test.ts +// +// These tests require SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY env vars. +// They test the commit_schedule RPC directly, which is the meaningful logic layer. +// The Edge Function itself is a thin auth + dispatch wrapper. + +import { assertEquals, assertExists } from "jsr:@std/assert@1"; +import { createClient } from "https://esm.sh/@supabase/supabase-js@2"; + +const SUPABASE_URL = Deno.env.get("SUPABASE_URL") ?? ""; +const SERVICE_ROLE_KEY = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY") ?? ""; + +function skipIfNoEnv() { + if (!SUPABASE_URL || !SERVICE_ROLE_KEY) { + console.warn("Skipping integration tests: SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY not set"); + return true; + } + return false; +} + +function adminClient() { + return createClient(SUPABASE_URL, SERVICE_ROLE_KEY); +} + +async function getTestEditionId(db: ReturnType): Promise { + const { data } = await db.from("festival_editions").select("id").limit(1).single(); + assertExists(data, "No festival edition found — run test:setup first"); + return data.id; +} + +async function getTestUserId(db: ReturnType): Promise { + const { data } = await db.from("admin_roles").select("user_id").limit(1).single(); + assertExists(data, "No admin user found — run test:setup first"); + return data.user_id; +} + +Deno.test("commit_schedule: creates new artist and set", async () => { + if (skipIfNoEnv()) return; + const db = adminClient(); + const editionId = await getTestEditionId(db); + const userId = await getTestUserId(db); + const slug = `test-artist-${Date.now()}`; + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: editionId, + p_user_id: userId, + p_artists_to_create: [{ name: "Test Artist", slug }], + p_stages_to_create: [], + p_sets_to_create: [{ + name: "Test Artist Set", + description: null, + stageName: null, + timeStart: null, + timeEnd: null, + artistSlugs: [slug], + }], + p_sets_to_update: [], + p_set_ids_to_archive: [], + }); + + assertEquals(error, null); + assertEquals(data.setsCreated, 1); + assertEquals(data.setsUpdated, 0); + + // Cleanup + await db.from("artists").delete().eq("slug", slug); +}); + +Deno.test("commit_schedule: updates existing set without creating duplicate", async () => { + if (skipIfNoEnv()) return; + const db = adminClient(); + const editionId = await getTestEditionId(db); + const userId = await getTestUserId(db); + const slug = `test-update-artist-${Date.now()}`; + + // Create artist and set + await db.from("artists").insert({ name: "Update Test", slug }); + const { data: artist } = await db.from("artists").select("id").eq("slug", slug).single(); + const { data: set } = await db + .from("sets") + .insert({ festival_edition_id: editionId, name: "Old Name", slug: "old-name", created_by: userId }) + .select("id") + .single(); + await db.from("set_artists").insert({ set_id: set!.id, artist_id: artist!.id }); + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: editionId, + p_user_id: userId, + p_artists_to_create: [], + p_stages_to_create: [], + p_sets_to_create: [], + p_sets_to_update: [{ + id: set!.id, + name: "New Name", + description: "Updated", + stageName: null, + timeStart: null, + timeEnd: null, + artistSlugs: [slug], + }], + p_set_ids_to_archive: [], + }); + + assertEquals(error, null); + assertEquals(data.setsUpdated, 1); + + const { data: updated } = await db.from("sets").select("name, description").eq("id", set!.id).single(); + assertEquals(updated!.name, "New Name"); + assertEquals(updated!.description, "Updated"); + + // Cleanup + await db.from("sets").delete().eq("id", set!.id); + await db.from("artists").delete().eq("slug", slug); +}); + +Deno.test("commit_schedule: archives orphaned sets", async () => { + if (skipIfNoEnv()) return; + const db = adminClient(); + const editionId = await getTestEditionId(db); + const userId = await getTestUserId(db); + + const { data: set } = await db + .from("sets") + .insert({ festival_edition_id: editionId, name: "Orphan Set", slug: "orphan-set", created_by: userId }) + .select("id") + .single(); + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: editionId, + p_user_id: userId, + p_artists_to_create: [], + p_stages_to_create: [], + p_sets_to_create: [], + p_sets_to_update: [], + p_set_ids_to_archive: [set!.id], + }); + + assertEquals(error, null); + assertEquals(data.setsArchived, 1); + + const { data: archived } = await db.from("sets").select("archived").eq("id", set!.id).single(); + assertEquals(archived!.archived, true); + + // Cleanup + await db.from("sets").delete().eq("id", set!.id); +}); + +Deno.test("commit_schedule: midnight-crossing times stored correctly", async () => { + if (skipIfNoEnv()) return; + const db = adminClient(); + const editionId = await getTestEditionId(db); + const userId = await getTestUserId(db); + const slug = `test-midnight-${Date.now()}`; + + await db.from("artists").insert({ name: "Late Night DJ", slug }); + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: editionId, + p_user_id: userId, + p_artists_to_create: [], + p_stages_to_create: [], + p_sets_to_create: [{ + name: "Late Night Set", + description: null, + stageName: null, + timeStart: "2026-07-11T23:00:00.000Z", + timeEnd: "2026-07-12T01:00:00.000Z", + artistSlugs: [slug], + }], + p_sets_to_update: [], + p_set_ids_to_archive: [], + }); + + assertEquals(error, null); + + const { data: sets } = await db + .from("sets") + .select("time_start, time_end, set_artists(artist_id, artists(slug))") + .eq("festival_edition_id", editionId) + .eq("name", "Late Night Set"); + + assertExists(sets?.[0]); + assertEquals(sets![0].time_start, "2026-07-11T23:00:00+00:00"); + assertEquals(sets![0].time_end, "2026-07-12T01:00:00+00:00"); + + // Cleanup + await db.from("sets").delete().eq("id", sets![0].id ?? ""); + await db.from("artists").delete().eq("slug", slug); +}); diff --git a/supabase/functions/commit-schedule/index.ts b/supabase/functions/commit-schedule/index.ts new file mode 100644 index 00000000..9e409055 --- /dev/null +++ b/supabase/functions/commit-schedule/index.ts @@ -0,0 +1,83 @@ +import { serve } from "https://deno.land/std@0.168.0/http/server.ts"; +import { getAdminClient, requireAdmin, corsHeaders } from "../_shared/auth.ts"; + +type SetPayload = { + name: string; + description?: string; + stageName?: string; + timeStart?: string; + timeEnd?: string; + artistSlugs: string[]; +}; + +type CommitRequest = { + festivalEditionId: string; + artistsToCreate: { name: string; slug: string }[]; + stagesToCreate: { name: string }[]; + setsToCreate: SetPayload[]; + setsToUpdate: ({ id: string } & SetPayload)[]; + setIdsToArchive: string[]; +}; + +serve(async (req) => { + if (req.method === "OPTIONS") { + return new Response("ok", { headers: corsHeaders }); + } + + const auth = await requireAdmin(req); + if (auth.errorResponse) { + return new Response(auth.errorResponse.body, { + status: auth.errorResponse.status, + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); + } + + try { + const body: CommitRequest = await req.json(); + const { + festivalEditionId, + artistsToCreate, + stagesToCreate, + setsToCreate, + setsToUpdate, + setIdsToArchive, + } = body; + + if (!festivalEditionId) { + return new Response( + JSON.stringify({ error: "Missing required field: festivalEditionId" }), + { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }, + ); + } + + const db = getAdminClient(); + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: festivalEditionId, + p_user_id: auth.userId, + p_artists_to_create: artistsToCreate ?? [], + p_stages_to_create: stagesToCreate ?? [], + p_sets_to_create: setsToCreate ?? [], + p_sets_to_update: setsToUpdate ?? [], + p_set_ids_to_archive: setIdsToArchive ?? [], + }); + + if (error) { + console.error("commit_schedule RPC error:", error); + return new Response( + JSON.stringify({ error: error.message }), + { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }, + ); + } + + return new Response(JSON.stringify(data), { + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); + } catch (error) { + console.error("commit-schedule error:", error); + return new Response( + JSON.stringify({ error: error.message }), + { status: 500, headers: { ...corsHeaders, "Content-Type": "application/json" } }, + ); + } +}); diff --git a/supabase/migrations/20260509142022_commit_schedule_rpc.sql b/supabase/migrations/20260509142022_commit_schedule_rpc.sql new file mode 100644 index 00000000..ea87fca3 --- /dev/null +++ b/supabase/migrations/20260509142022_commit_schedule_rpc.sql @@ -0,0 +1,159 @@ +-- Add unique constraint on artists.slug (required for ON CONFLICT upsert in commit_schedule) +-- First deduplicate any existing conflicting slugs by appending the short ID +WITH duplicates AS ( + SELECT slug, MIN(id) AS keep_id + FROM public.artists + GROUP BY slug + HAVING COUNT(*) > 1 +) +UPDATE public.artists a +SET slug = a.slug || '-' || SUBSTRING(a.id::text, 1, 6) +WHERE EXISTS ( + SELECT 1 FROM duplicates d + WHERE d.slug = a.slug AND a.id != d.keep_id +); + +ALTER TABLE public.artists + ADD CONSTRAINT artists_slug_unique UNIQUE (slug); + +-- Add unique constraint on stages(festival_edition_id, name) for upsert +ALTER TABLE public.stages + ADD CONSTRAINT stages_edition_name_unique UNIQUE (festival_edition_id, name); + +-- RPC: commit_schedule +-- Executes a fully resolved schedule import inside a single transaction. +-- Called by the commit-schedule Edge Function using the service role key. +CREATE OR REPLACE FUNCTION public.commit_schedule( + p_festival_edition_id UUID, + p_user_id UUID, + p_artists_to_create JSONB, -- [{ name, slug }] + p_stages_to_create JSONB, -- [{ name }] + p_sets_to_create JSONB, -- [{ name, description, stageName, timeStart, timeEnd, artistSlugs }] + p_sets_to_update JSONB, -- [{ id, name, description, stageName, timeStart, timeEnd, artistSlugs }] + p_set_ids_to_archive UUID[] +) +RETURNS JSONB +LANGUAGE plpgsql +SET search_path = public +AS $$ +DECLARE + v_set_elem JSONB; + v_new_set_id UUID; + v_sets_created INT := 0; + v_sets_updated INT := 0; + v_sets_archived INT := 0; +BEGIN + -- 1. Upsert new artists (matched on slug) + INSERT INTO artists (name, slug) + SELECT elem->>'name', elem->>'slug' + FROM jsonb_array_elements(p_artists_to_create) AS elem + ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name; + + -- 2. Upsert new stages (matched on edition + name) + INSERT INTO stages (festival_edition_id, name) + SELECT p_festival_edition_id, elem->>'name' + FROM jsonb_array_elements(p_stages_to_create) AS elem + ON CONFLICT (festival_edition_id, name) DO NOTHING; + + -- 3. Update existing sets + FOR v_set_elem IN SELECT value FROM jsonb_array_elements(p_sets_to_update) LOOP + UPDATE sets + SET + name = v_set_elem->>'name', + description = NULLIF(v_set_elem->>'description', ''), + stage_id = ( + SELECT s.id FROM stages s + WHERE s.festival_edition_id = p_festival_edition_id + AND s.name = v_set_elem->>'stageName' + LIMIT 1 + ), + time_start = CASE + WHEN (v_set_elem->>'timeStart') IS NOT NULL + THEN (v_set_elem->>'timeStart')::TIMESTAMPTZ + ELSE NULL + END, + time_end = CASE + WHEN (v_set_elem->>'timeEnd') IS NOT NULL + THEN (v_set_elem->>'timeEnd')::TIMESTAMPTZ + ELSE NULL + END, + updated_at = NOW() + WHERE id = (v_set_elem->>'id')::UUID + AND festival_edition_id = p_festival_edition_id; + + v_sets_updated := v_sets_updated + 1; + + -- Sync set_artists: delete existing links and re-insert from CSV + DELETE FROM set_artists WHERE set_id = (v_set_elem->>'id')::UUID; + + INSERT INTO set_artists (set_id, artist_id) + SELECT (v_set_elem->>'id')::UUID, a.id + FROM jsonb_array_elements_text(v_set_elem->'artistSlugs') AS slug_val + JOIN artists a ON a.slug = slug_val + ON CONFLICT (set_id, artist_id) DO NOTHING; + END LOOP; + + -- 4. Insert new sets + FOR v_set_elem IN SELECT value FROM jsonb_array_elements(p_sets_to_create) LOOP + INSERT INTO sets ( + festival_edition_id, name, slug, description, stage_id, + time_start, time_end, created_by + ) + VALUES ( + p_festival_edition_id, + v_set_elem->>'name', + LOWER( + REGEXP_REPLACE( + REGEXP_REPLACE(TRIM(v_set_elem->>'name'), '[^a-zA-Z0-9\s]', '', 'g'), + '\s+', '-', 'g' + ) + ), + NULLIF(v_set_elem->>'description', ''), + ( + SELECT s.id FROM stages s + WHERE s.festival_edition_id = p_festival_edition_id + AND s.name = v_set_elem->>'stageName' + LIMIT 1 + ), + CASE + WHEN (v_set_elem->>'timeStart') IS NOT NULL + THEN (v_set_elem->>'timeStart')::TIMESTAMPTZ + ELSE NULL + END, + CASE + WHEN (v_set_elem->>'timeEnd') IS NOT NULL + THEN (v_set_elem->>'timeEnd')::TIMESTAMPTZ + ELSE NULL + END, + p_user_id + ) + RETURNING id INTO v_new_set_id; + + v_sets_created := v_sets_created + 1; + + INSERT INTO set_artists (set_id, artist_id) + SELECT v_new_set_id, a.id + FROM jsonb_array_elements_text(v_set_elem->'artistSlugs') AS slug_val + JOIN artists a ON a.slug = slug_val; + END LOOP; + + -- 5. Archive orphaned sets + IF p_set_ids_to_archive IS NOT NULL AND array_length(p_set_ids_to_archive, 1) > 0 THEN + UPDATE sets + SET archived = true, updated_at = NOW() + WHERE id = ANY(p_set_ids_to_archive) + AND festival_edition_id = p_festival_edition_id; + + GET DIAGNOSTICS v_sets_archived = ROW_COUNT; + END IF; + + RETURN jsonb_build_object( + 'setsCreated', v_sets_created, + 'setsUpdated', v_sets_updated, + 'setsArchived', v_sets_archived + ); + +EXCEPTION WHEN OTHERS THEN + RAISE EXCEPTION 'commit_schedule failed: %', SQLERRM; +END; +$$; From 657e3bb73d636a46d36bbc54d89da9bbd087219c Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Sat, 9 May 2026 18:10:26 +0300 Subject: [PATCH 03/23] feat(frontend): add schedule import wizard UI MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds the full schedule ingestion frontend: - ScheduleImportWizard: 3-step flow (upload → review → result) - CsvUploadStep: file drop zone, timezone picker, CSV parse + diff call - DiffSummaryBanner: counts for new artists/stages/sets/conflicts - StageMismatchResolver: map-to-existing (dropdown) or create-new per mismatch - OrphanedSetsPanel: per-set archive/keep toggle, bulk action, default keep - scheduleImportService: CSV parser, Edge Function callers, commit payload builder - Import tab added to FestivalEdition page, route wired in GlobalRoutes Refactors diff.ts SetPayload to use stageName instead of stage_id so the payload aligns directly with what the commit RPC expects. Co-Authored-By: Claude Sonnet 4.6 --- .../Admin/ScheduleImport/CsvUploadStep.tsx | 140 ++++++++++++ .../ScheduleImport/DiffSummaryBanner.tsx | 39 ++++ .../ScheduleImport/OrphanedSetsPanel.tsx | 85 ++++++++ .../ScheduleImport/ScheduleImportWizard.tsx | 202 ++++++++++++++++++ .../ScheduleImport/StageMismatchResolver.tsx | 90 ++++++++ src/pages/admin/festivals/FestivalEdition.tsx | 17 +- .../festivals/FestivalScheduleImport.tsx | 36 ++++ .../$festivalId.$editionId.import.tsx | 14 -- .../editions/$editionSlug/import.tsx | 8 + src/services/scheduleImportService.ts | 175 +++++++++++++++ supabase/functions/diff-schedule/diff.test.ts | 8 +- supabase/functions/diff-schedule/diff.ts | 23 +- 12 files changed, 810 insertions(+), 27 deletions(-) create mode 100644 src/components/Admin/ScheduleImport/CsvUploadStep.tsx create mode 100644 src/components/Admin/ScheduleImport/DiffSummaryBanner.tsx create mode 100644 src/components/Admin/ScheduleImport/OrphanedSetsPanel.tsx create mode 100644 src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx create mode 100644 src/components/Admin/ScheduleImport/StageMismatchResolver.tsx create mode 100644 src/pages/admin/festivals/FestivalScheduleImport.tsx delete mode 100644 src/routes/admin/festivals/$festivalId.$editionId.import.tsx create mode 100644 src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx create mode 100644 src/services/scheduleImportService.ts diff --git a/src/components/Admin/ScheduleImport/CsvUploadStep.tsx b/src/components/Admin/ScheduleImport/CsvUploadStep.tsx new file mode 100644 index 00000000..b195dac5 --- /dev/null +++ b/src/components/Admin/ScheduleImport/CsvUploadStep.tsx @@ -0,0 +1,140 @@ +import { useRef, useState } from "react"; +import { Upload, Loader2 } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Label } from "@/components/ui/label"; +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; +import { parseScheduleCsv, callDiffSchedule, type CsvRow, type DiffResult } from "@/services/scheduleImportService"; + +const TIMEZONES = [ + { value: "Europe/Lisbon", label: "Lisbon (WET/WEST)" }, + { value: "Europe/London", label: "London (GMT/BST)" }, + { value: "Europe/Berlin", label: "Berlin (CET/CEST)" }, + { value: "America/New_York", label: "New York (EST/EDT)" }, + { value: "America/Los_Angeles", label: "Los Angeles (PST/PDT)" }, + { value: "UTC", label: "UTC" }, +]; + +type Props = { + festivalEditionId: string; + onDiffReady: (diff: DiffResult, rows: CsvRow[], timezone: string) => void; +}; + +export function CsvUploadStep({ festivalEditionId, onDiffReady }: Props) { + const fileRef = useRef(null); + const [timezone, setTimezone] = useState("Europe/Lisbon"); + const [fileName, setFileName] = useState(null); + const [rows, setRows] = useState([]); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + function handleFileChange(e: React.ChangeEvent) { + const file = e.target.files?.[0]; + if (!file) return; + setFileName(file.name); + setError(null); + + const reader = new FileReader(); + reader.onload = (ev) => { + const content = ev.target?.result as string; + try { + const parsed = parseScheduleCsv(content); + if (parsed.length === 0) { + setError("No valid rows found. Make sure your CSV has an 'Artists' column."); + setRows([]); + } else { + setRows(parsed); + } + } catch { + setError("Failed to parse CSV. Check the file format."); + setRows([]); + } + }; + reader.readAsText(file); + } + + async function handleAnalyse() { + if (rows.length === 0) return; + setLoading(true); + setError(null); + try { + const diff = await callDiffSchedule(festivalEditionId, timezone, rows); + onDiffReady(diff, rows, timezone); + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to analyse schedule."); + } finally { + setLoading(false); + } + } + + return ( +
+
+ + +

+ All times in the CSV are interpreted as local festival time. +

+
+ +
+ +
fileRef.current?.click()} + > + + {fileName ? ( +

{fileName}

+ ) : ( +

Click to upload CSV

+ )} + {rows.length > 0 && ( +

{rows.length} rows parsed

+ )} +
+ +

+ Required column: Artists (use | for B2B, e.g. Carl Cox | Peggy Gou). + Optional: Set Name, Stage, Date (YYYY-MM-DD),{" "} + Start Time (HH:MM), End Time (HH:MM), Description. +

+
+ + {error && ( +

{error}

+ )} + + +
+ ); +} diff --git a/src/components/Admin/ScheduleImport/DiffSummaryBanner.tsx b/src/components/Admin/ScheduleImport/DiffSummaryBanner.tsx new file mode 100644 index 00000000..40725651 --- /dev/null +++ b/src/components/Admin/ScheduleImport/DiffSummaryBanner.tsx @@ -0,0 +1,39 @@ +import { Badge } from "@/components/ui/badge"; +import { type DiffResult } from "@/services/scheduleImportService"; + +type Props = { diff: DiffResult }; + +export function DiffSummaryBanner({ diff }: Props) { + const { summary, newArtistNames } = diff; + + const items = [ + { label: "sets to create", value: summary.setsToCreate, variant: "default" as const }, + { label: "sets to update", value: summary.setsMatched, variant: "secondary" as const }, + { label: "new stages", value: summary.newStages, variant: "default" as const }, + { label: "conflicts", value: summary.setsOrphaned + diff.conflicts.stageNameMismatches.length, variant: "destructive" as const }, + ].filter((item) => item.value > 0); + + return ( +
+
+ {items.map((item) => ( + + {item.value} {item.label} + + ))} + {items.length === 0 && ( + No changes detected. + )} +
+ + {summary.newArtists > 0 && ( +

+ {summary.newArtists} new artist{summary.newArtists !== 1 ? "s" : ""} + {" "}will be created:{" "} + {newArtistNames.slice(0, 5).join(", ")} + {newArtistNames.length > 5 && ` and ${newArtistNames.length - 5} more`}. +

+ )} +
+ ); +} diff --git a/src/components/Admin/ScheduleImport/OrphanedSetsPanel.tsx b/src/components/Admin/ScheduleImport/OrphanedSetsPanel.tsx new file mode 100644 index 00000000..5845dc97 --- /dev/null +++ b/src/components/Admin/ScheduleImport/OrphanedSetsPanel.tsx @@ -0,0 +1,85 @@ +import { Button } from "@/components/ui/button"; +import { Switch } from "@/components/ui/switch"; +import { Label } from "@/components/ui/label"; +import { Archive } from "lucide-react"; +import { type DiffResult, type OrphanResolution } from "@/services/scheduleImportService"; + +type OrphanedSet = DiffResult["conflicts"]["orphanedSets"][number]; + +type Props = { + orphanedSets: OrphanedSet[]; + resolutions: Record; + onChange: (setId: string, resolution: OrphanResolution) => void; +}; + +export function OrphanedSetsPanel({ orphanedSets, resolutions, onChange }: Props) { + if (orphanedSets.length === 0) return null; + + function allArchived() { + return orphanedSets.every((s) => (resolutions[s.id] ?? "keep") === "archive"); + } + + function toggleAll() { + const target: OrphanResolution = allArchived() ? "keep" : "archive"; + orphanedSets.forEach((s) => onChange(s.id, target)); + } + + function formatTime(iso: string | null) { + if (!iso) return null; + return new Date(iso).toLocaleString(undefined, { + month: "short", + day: "numeric", + hour: "2-digit", + minute: "2-digit", + }); + } + + return ( +
+
+
+ + {orphanedSets.length} set{orphanedSets.length !== 1 ? "s" : ""} not in CSV +
+ +
+ +

+ These sets exist in the database but were not matched to any row in your CSV. + Archived sets are hidden from users but votes are preserved. + Default: Keep. +

+ +
+ {orphanedSets.map((set) => { + const resolution = resolutions[set.id] ?? "keep"; + const isArchive = resolution === "archive"; + const time = formatTime(set.timeStart); + + return ( +
+
+

{set.name}

+

+ {[set.stage, time].filter(Boolean).join(" · ") || "No schedule info"} +

+
+
+ + onChange(set.id, checked ? "archive" : "keep")} + /> +
+
+ ); + })} +
+
+ ); +} diff --git a/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx b/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx new file mode 100644 index 00000000..5830b9de --- /dev/null +++ b/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx @@ -0,0 +1,202 @@ +import { useState } from "react"; +import { CheckCircle2, Loader2, RotateCcw, AlertCircle } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { useQueryClient } from "@tanstack/react-query"; +import { + type CsvRow, + type DiffResult, + type StageMismatchResolution, + type OrphanResolution, + type CommitResult, + buildCommitPayload, + callCommitSchedule, +} from "@/services/scheduleImportService"; +import { CsvUploadStep } from "./CsvUploadStep"; +import { DiffSummaryBanner } from "./DiffSummaryBanner"; +import { StageMismatchResolver } from "./StageMismatchResolver"; +import { OrphanedSetsPanel } from "./OrphanedSetsPanel"; +import { useStagesByEditionQuery } from "@/hooks/queries/stages/useStagesByEdition"; + +type Step = "upload" | "review" | "result"; + +type Props = { festivalEditionId: string }; + +export function ScheduleImportWizard({ festivalEditionId }: Props) { + const queryClient = useQueryClient(); + const stagesQuery = useStagesByEditionQuery(festivalEditionId); + + const [step, setStep] = useState("upload"); + const [diff, setDiff] = useState(null); + const [rows, setRows] = useState([]); + const [timezone, setTimezone] = useState("Europe/Lisbon"); + const [stageMismatchResolutions, setStageMismatchResolutions] = useState< + Record + >({}); + const [orphanResolutions, setOrphanResolutions] = useState< + Record + >({}); + const [committing, setCommitting] = useState(false); + const [commitResult, setCommitResult] = useState(null); + const [commitError, setCommitError] = useState(null); + + function handleDiffReady(newDiff: DiffResult, newRows: CsvRow[], newTimezone: string) { + setDiff(newDiff); + setRows(newRows); + setTimezone(newTimezone); + setStageMismatchResolutions( + Object.fromEntries( + newDiff.conflicts.stageNameMismatches.map((m) => [ + m.csvValue, + { action: "map" as const, dbStageName: m.closestDbValue }, + ]), + ), + ); + setOrphanResolutions({}); + setCommitResult(null); + setCommitError(null); + setStep("review"); + } + + function handleStageMismatchChange(csvValue: string, resolution: StageMismatchResolution) { + setStageMismatchResolutions((prev) => ({ ...prev, [csvValue]: resolution })); + } + + function handleOrphanChange(setId: string, resolution: OrphanResolution) { + setOrphanResolutions((prev) => ({ ...prev, [setId]: resolution })); + } + + function canCommit() { + if (!diff) return false; + return diff.conflicts.stageNameMismatches.every( + (m) => stageMismatchResolutions[m.csvValue] != null, + ); + } + + async function handleCommit() { + if (!diff) return; + setCommitting(true); + setCommitError(null); + try { + const payload = buildCommitPayload(diff, stageMismatchResolutions, orphanResolutions); + const result = await callCommitSchedule(festivalEditionId, payload); + setCommitResult(result); + setStep("result"); + queryClient.invalidateQueries({ queryKey: ["sets", festivalEditionId] }); + queryClient.invalidateQueries({ queryKey: ["stages", festivalEditionId] }); + queryClient.invalidateQueries({ queryKey: ["artists"] }); + } catch (err) { + setCommitError(err instanceof Error ? err.message : "Commit failed."); + } finally { + setCommitting(false); + } + } + + function handleReset() { + setStep("upload"); + setDiff(null); + setRows([]); + setStageMismatchResolutions({}); + setOrphanResolutions({}); + setCommitResult(null); + setCommitError(null); + } + + if (step === "upload") { + return ( + + + Import Schedule + + + + + + ); + } + + if (step === "result" && commitResult) { + return ( + + +
+ + Schedule imported successfully +
+
    +
  • {commitResult.setsCreated} set{commitResult.setsCreated !== 1 ? "s" : ""} created
  • +
  • {commitResult.setsUpdated} set{commitResult.setsUpdated !== 1 ? "s" : ""} updated
  • + {commitResult.setsArchived > 0 && ( +
  • {commitResult.setsArchived} set{commitResult.setsArchived !== 1 ? "s" : ""} archived
  • + )} +
+ +
+
+ ); + } + + if (!diff) return null; + + const dbStages = stagesQuery.data ?? []; + + return ( +
+ + + Review Changes + + + + + + + + + {commitError && ( +
+ +
+

Import failed — no changes were saved.

+

{commitError}

+
+
+ )} + +
+ + +
+
+
+
+ ); +} diff --git a/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx b/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx new file mode 100644 index 00000000..11832c33 --- /dev/null +++ b/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx @@ -0,0 +1,90 @@ +import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group"; +import { Label } from "@/components/ui/label"; +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; +import { AlertTriangle } from "lucide-react"; +import { type DiffResult, type StageMismatchResolution } from "@/services/scheduleImportService"; + +type Mismatch = DiffResult["conflicts"]["stageNameMismatches"][number]; +type DbStage = { id: string; name: string }; + +type Props = { + mismatches: Mismatch[]; + dbStages: DbStage[]; + resolutions: Record; + onChange: (csvValue: string, resolution: StageMismatchResolution) => void; +}; + +export function StageMismatchResolver({ mismatches, dbStages, resolutions, onChange }: Props) { + if (mismatches.length === 0) return null; + + return ( +
+
+ + Stage name conflicts — resolve before committing +
+ + {mismatches.map((mismatch) => { + const resolution = resolutions[mismatch.csvValue] ?? { + action: "map", + dbStageName: mismatch.closestDbValue, + }; + + return ( +
+

+ CSV value: {mismatch.csvValue} +

+ + { + if (action === "map") { + onChange(mismatch.csvValue, { action: "map", dbStageName: mismatch.closestDbValue }); + } else { + onChange(mismatch.csvValue, { action: "create" }); + } + }} + className="space-y-2" + > +
+ +
+ + {resolution.action === "map" && ( + + )} +
+
+ +
+ + +
+
+
+ ); + })} +
+ ); +} diff --git a/src/pages/admin/festivals/FestivalEdition.tsx b/src/pages/admin/festivals/FestivalEdition.tsx index cbb5725f..e4a96aa3 100644 --- a/src/pages/admin/festivals/FestivalEdition.tsx +++ b/src/pages/admin/festivals/FestivalEdition.tsx @@ -1,5 +1,5 @@ import { useParams, useLocation, Outlet, Link } from "@tanstack/react-router"; -import { Loader2, MapPin, Music } from "lucide-react"; +import { Loader2, MapPin, Music, Upload } from "lucide-react"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { useFestivalEditionBySlugQuery } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug"; import { cn } from "@/lib/utils"; @@ -44,6 +44,7 @@ export default function FestivalEdition() { const isOnSets = location.pathname.includes("/sets"); const isOnStages = location.pathname.includes("/stages"); + const isOnImport = location.pathname.includes("/import"); return (
@@ -57,7 +58,7 @@ export default function FestivalEdition() {
-
+
Sets + + + Import +
diff --git a/src/pages/admin/festivals/FestivalScheduleImport.tsx b/src/pages/admin/festivals/FestivalScheduleImport.tsx new file mode 100644 index 00000000..a3187e6f --- /dev/null +++ b/src/pages/admin/festivals/FestivalScheduleImport.tsx @@ -0,0 +1,36 @@ +import { useParams } from "@tanstack/react-router"; +import { Loader2 } from "lucide-react"; +import { Card, CardContent } from "@/components/ui/card"; +import { useFestivalEditionBySlugQuery } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug"; +import { ScheduleImportWizard } from "@/components/Admin/ScheduleImport/ScheduleImportWizard"; + +export default function FestivalScheduleImport() { + const { festivalSlug, editionSlug } = useParams({ + from: "/admin/festivals/$festivalSlug/editions/$editionSlug/import", + }); + + const editionQuery = useFestivalEditionBySlugQuery({ festivalSlug, editionSlug }); + + if (editionQuery.isLoading) { + return ( + + + + Loading... + + + ); + } + + if (!editionQuery.data) { + return ( + + + Edition not found + + + ); + } + + return ; +} diff --git a/src/routes/admin/festivals/$festivalId.$editionId.import.tsx b/src/routes/admin/festivals/$festivalId.$editionId.import.tsx deleted file mode 100644 index 90715693..00000000 --- a/src/routes/admin/festivals/$festivalId.$editionId.import.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import { createFileRoute } from "@tanstack/react-router"; -import { z } from "zod"; -import { CSVImportPage } from "@/pages/admin/festivals/CSVImportPage"; - -const importSearchSchema = z.object({ - tab: z.enum(["stages", "sets"]).optional(), -}); - -export const Route = createFileRoute( - "/admin/festivals/$festivalId/$editionId/import", -)({ - component: CSVImportPage, - validateSearch: importSearchSchema, -}); diff --git a/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx new file mode 100644 index 00000000..f78ab6e7 --- /dev/null +++ b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx @@ -0,0 +1,8 @@ +import { createFileRoute } from "@tanstack/react-router"; +import FestivalScheduleImport from "@/pages/admin/festivals/FestivalScheduleImport"; + +export const Route = createFileRoute( + "/admin/festivals/$festivalSlug/editions/$editionSlug/import", +)({ + component: FestivalScheduleImport, +}); diff --git a/src/services/scheduleImportService.ts b/src/services/scheduleImportService.ts new file mode 100644 index 00000000..32a034d6 --- /dev/null +++ b/src/services/scheduleImportService.ts @@ -0,0 +1,175 @@ +import { supabase } from "@/integrations/supabase/client"; +import { parseCSV } from "@/services/csvImportService"; + +export type CsvRow = { + artists: string[]; + setName?: string; + stage?: string; + date?: string; + startTime?: string; + endTime?: string; + description?: string; +}; + +export type SetPayload = { + name: string; + description: string | null; + stageName: string | null; + timeStart: string | null; + timeEnd: string | null; + artistSlugs: string[]; +}; + +export type DiffResult = { + summary: { + newArtists: number; + newStages: number; + setsMatched: number; + setsToCreate: number; + setsOrphaned: number; + }; + newArtistNames: string[]; + cleanOperations: { + artistsToCreate: { name: string; slug: string }[]; + stagesToCreate: { name: string }[]; + setsToCreate: SetPayload[]; + setsToUpdate: ({ id: string } & SetPayload)[]; + }; + conflicts: { + stageNameMismatches: { + csvValue: string; + closestDbValue: string; + dbStageId: string; + }[]; + orphanedSets: { + id: string; + name: string; + stage: string | null; + timeStart: string | null; + }[]; + }; +}; + +export type CommitResult = { + setsCreated: number; + setsUpdated: number; + setsArchived: number; +}; + +export type StageMismatchResolution = + | { action: "map"; dbStageName: string } + | { action: "create" }; + +export type OrphanResolution = "archive" | "keep"; + +export function parseScheduleCsv(csvContent: string): CsvRow[] { + const lines = parseCSV(csvContent); + if (lines.length < 2) return []; + + const headers = lines[0].map((h) => h.trim().toLowerCase()); + + const col = (name: string) => headers.indexOf(name); + const artistsCol = col("artists"); + const setNameCol = col("set name"); + const stageCol = col("stage"); + const dateCol = col("date"); + const startTimeCol = col("start time"); + const endTimeCol = col("end time"); + const descriptionCol = col("description"); + + return lines.slice(1) + .filter((row) => row.some((cell) => cell.trim())) + .map((row) => { + const artistsRaw = artistsCol >= 0 ? row[artistsCol] ?? "" : ""; + const artists = artistsRaw + .split("|") + .map((a) => a.trim()) + .filter(Boolean); + + return { + artists, + setName: setNameCol >= 0 ? row[setNameCol]?.trim() || undefined : undefined, + stage: stageCol >= 0 ? row[stageCol]?.trim() || undefined : undefined, + date: dateCol >= 0 ? row[dateCol]?.trim() || undefined : undefined, + startTime: startTimeCol >= 0 ? row[startTimeCol]?.trim() || undefined : undefined, + endTime: endTimeCol >= 0 ? row[endTimeCol]?.trim() || undefined : undefined, + description: descriptionCol >= 0 ? row[descriptionCol]?.trim() || undefined : undefined, + }; + }) + .filter((row) => row.artists.length > 0); +} + +export async function callDiffSchedule( + festivalEditionId: string, + timezone: string, + rows: CsvRow[], +): Promise { + const { data, error } = await supabase.functions.invoke("diff-schedule", { + body: { festivalEditionId, timezone, rows }, + }); + if (error) throw new Error(error.message); + if (data?.error) throw new Error(data.error); + return data as DiffResult; +} + +export function buildCommitPayload( + diff: DiffResult, + stageMismatchResolutions: Record, + orphanResolutions: Record, +): { + artistsToCreate: { name: string; slug: string }[]; + stagesToCreate: { name: string }[]; + setsToCreate: SetPayload[]; + setsToUpdate: ({ id: string } & SetPayload)[]; + setIdsToArchive: string[]; +} { + const mismatchedCsvValues = new Set( + diff.conflicts.stageNameMismatches.map((m) => m.csvValue), + ); + + function resolveSetStageName(set: SetPayload): string | null { + if (!set.stageName) return null; + if (!mismatchedCsvValues.has(set.stageName)) return set.stageName; + const resolution = stageMismatchResolutions[set.stageName]; + if (!resolution) return set.stageName; + return resolution.action === "map" ? resolution.dbStageName : set.stageName; + } + + const extraStagesToCreate: { name: string }[] = []; + for (const mismatch of diff.conflicts.stageNameMismatches) { + const resolution = stageMismatchResolutions[mismatch.csvValue]; + if (resolution?.action === "create") { + extraStagesToCreate.push({ name: mismatch.csvValue }); + } + } + + const setIdsToArchive = diff.conflicts.orphanedSets + .filter((s) => (orphanResolutions[s.id] ?? "keep") === "archive") + .map((s) => s.id); + + return { + artistsToCreate: diff.cleanOperations.artistsToCreate, + stagesToCreate: [...diff.cleanOperations.stagesToCreate, ...extraStagesToCreate], + setsToCreate: diff.cleanOperations.setsToCreate.map((s) => ({ + ...s, + stageName: resolveSetStageName(s), + })), + setsToUpdate: diff.cleanOperations.setsToUpdate.map((s) => ({ + ...s, + stageName: resolveSetStageName(s), + })), + setIdsToArchive, + }; +} + +export async function callCommitSchedule( + festivalEditionId: string, + payload: ReturnType, +): Promise { + const { data, error } = await supabase.functions.invoke("commit-schedule", { + body: { festivalEditionId, ...payload }, + }); + if (error) throw new Error(error.message); + if (data?.error) throw new Error(data.error); + return data as CommitResult; +} diff --git a/supabase/functions/diff-schedule/diff.test.ts b/supabase/functions/diff-schedule/diff.test.ts index ad268a5f..92bdffbd 100644 --- a/supabase/functions/diff-schedule/diff.test.ts +++ b/supabase/functions/diff-schedule/diff.test.ts @@ -187,7 +187,7 @@ Deno.test("computeDiff: B2B artist order in CSV does not affect match", () => { assertEquals(result.cleanOperations.setsToUpdate.length, 1); }); -Deno.test("computeDiff: exact stage name match resolves stage_id", () => { +Deno.test("computeDiff: exact stage name match uses canonical DB name in payload", () => { const artist = makeArtist("Carl Cox"); const stage = makeStage("stage-1", "Main Stage"); const result = computeDiff( @@ -197,7 +197,7 @@ Deno.test("computeDiff: exact stage name match resolves stage_id", () => { [artist], "Europe/Lisbon", ); - assertEquals(result.cleanOperations.setsToCreate[0].stage_id, "stage-1"); + assertEquals(result.cleanOperations.setsToCreate[0].stageName, "Main Stage"); }); Deno.test("computeDiff: stage name mismatch surfaced as conflict", () => { @@ -239,8 +239,8 @@ Deno.test("computeDiff: end time before start time triggers midnight advance", ( ); const created = result.cleanOperations.setsToCreate[0]; // start should be 2026-07-11T23:00:00Z, end should be 2026-07-12T01:00:00Z - assertEquals(created.time_start, "2026-07-11T23:00:00.000Z"); - assertEquals(created.time_end, "2026-07-12T01:00:00.000Z"); + assertEquals(created.timeStart, "2026-07-11T23:00:00.000Z"); + assertEquals(created.timeEnd, "2026-07-12T01:00:00.000Z"); }); Deno.test("computeDiff: set name falls back to b2b join when not provided", () => { diff --git a/supabase/functions/diff-schedule/diff.ts b/supabase/functions/diff-schedule/diff.ts index 1f88902b..b1e2ac4f 100644 --- a/supabase/functions/diff-schedule/diff.ts +++ b/supabase/functions/diff-schedule/diff.ts @@ -23,9 +23,9 @@ export type DbSet = { export type SetPayload = { name: string; description: string | null; - stage_id: string | null; - time_start: string | null; - time_end: string | null; + stageName: string | null; + timeStart: string | null; + timeEnd: string | null; artistSlugs: string[]; }; @@ -96,6 +96,7 @@ export function computeDiff( timezone: string, ): DiffResult { const stageByNameLower = new Map(dbStages.map((s) => [s.name.toLowerCase(), s])); + const stageById = new Map(dbStages.map((s) => [s.id, s])); const existingArtistSlugs = new Set(dbArtists.map((a) => a.slug)); const setsByArtistKey = new Map(); @@ -129,12 +130,17 @@ export function computeDiff( } } + // resolvedStageId: used only for set matching (narrowing candidates by stage) + // resolvedStageName: goes into the set payload and is passed to the RPC let resolvedStageId: string | null = null; + let resolvedStageName: string | null = null; + if (row.stage) { const lower = row.stage.toLowerCase(); const exactMatch = stageByNameLower.get(lower); if (exactMatch) { resolvedStageId = exactMatch.id; + resolvedStageName = exactMatch.name; } else { const strip = (s: string) => s.toLowerCase().replace(/[^a-z0-9]/g, ""); const closeMatch = dbStages.find((s) => { @@ -153,6 +159,9 @@ export function computeDiff( stagesToCreate.push({ name: row.stage }); seenNewStageNames.add(row.stage); } + // For mismatches and new stages, keep the CSV value as stageName. + // The frontend will resolve mismatches before committing. + resolvedStageName = row.stage; } } @@ -188,9 +197,9 @@ export function computeDiff( const payload: SetPayload = { name: setName, description: row.description ?? null, - stage_id: resolvedStageId, - time_start: timeStart, - time_end: timeEnd, + stageName: resolvedStageName, + timeStart, + timeEnd, artistSlugs, }; @@ -207,7 +216,7 @@ export function computeDiff( .map((s) => ({ id: s.id, name: s.name, - stage: dbStages.find((st) => st.id === s.stage_id)?.name ?? null, + stage: stageById.get(s.stage_id ?? "")?.name ?? null, timeStart: s.time_start, })); From 5d48245ddbfaae3ffcb75807de1087f95578ac3d Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Sat, 9 May 2026 18:16:43 +0300 Subject: [PATCH 04/23] refactor(frontend): split ScheduleImportWizard into focused components Extracts DiffReviewStep and CommitResultCard from ScheduleImportWizard to keep all components under 150 lines per codebase conventions. Co-Authored-By: Claude Sonnet 4.6 --- .../Admin/ScheduleImport/CommitResultCard.tsx | 33 +++++ .../Admin/ScheduleImport/CsvUploadStep.tsx | 6 +- .../Admin/ScheduleImport/DiffReviewStep.tsx | 93 ++++++++++++ .../ScheduleImport/ScheduleImportWizard.tsx | 140 +++++------------- 4 files changed, 163 insertions(+), 109 deletions(-) create mode 100644 src/components/Admin/ScheduleImport/CommitResultCard.tsx create mode 100644 src/components/Admin/ScheduleImport/DiffReviewStep.tsx diff --git a/src/components/Admin/ScheduleImport/CommitResultCard.tsx b/src/components/Admin/ScheduleImport/CommitResultCard.tsx new file mode 100644 index 00000000..b0ae38b1 --- /dev/null +++ b/src/components/Admin/ScheduleImport/CommitResultCard.tsx @@ -0,0 +1,33 @@ +import { CheckCircle2, RotateCcw } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Card, CardContent } from "@/components/ui/card"; +import { type CommitResult } from "@/services/scheduleImportService"; + +type Props = { + result: CommitResult; + onReset: () => void; +}; + +export function CommitResultCard({ result, onReset }: Props) { + return ( + + +
+ + Schedule imported successfully +
+
    +
  • {result.setsCreated} set{result.setsCreated !== 1 ? "s" : ""} created
  • +
  • {result.setsUpdated} set{result.setsUpdated !== 1 ? "s" : ""} updated
  • + {result.setsArchived > 0 && ( +
  • {result.setsArchived} set{result.setsArchived !== 1 ? "s" : ""} archived
  • + )} +
+ +
+
+ ); +} diff --git a/src/components/Admin/ScheduleImport/CsvUploadStep.tsx b/src/components/Admin/ScheduleImport/CsvUploadStep.tsx index b195dac5..a6cd482a 100644 --- a/src/components/Admin/ScheduleImport/CsvUploadStep.tsx +++ b/src/components/Admin/ScheduleImport/CsvUploadStep.tsx @@ -3,7 +3,7 @@ import { Upload, Loader2 } from "lucide-react"; import { Button } from "@/components/ui/button"; import { Label } from "@/components/ui/label"; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; -import { parseScheduleCsv, callDiffSchedule, type CsvRow, type DiffResult } from "@/services/scheduleImportService"; +import { parseScheduleCsv, callDiffSchedule, type DiffResult } from "@/services/scheduleImportService"; const TIMEZONES = [ { value: "Europe/Lisbon", label: "Lisbon (WET/WEST)" }, @@ -16,7 +16,7 @@ const TIMEZONES = [ type Props = { festivalEditionId: string; - onDiffReady: (diff: DiffResult, rows: CsvRow[], timezone: string) => void; + onDiffReady: (diff: DiffResult) => void; }; export function CsvUploadStep({ festivalEditionId, onDiffReady }: Props) { @@ -58,7 +58,7 @@ export function CsvUploadStep({ festivalEditionId, onDiffReady }: Props) { setError(null); try { const diff = await callDiffSchedule(festivalEditionId, timezone, rows); - onDiffReady(diff, rows, timezone); + onDiffReady(diff); } catch (err) { setError(err instanceof Error ? err.message : "Failed to analyse schedule."); } finally { diff --git a/src/components/Admin/ScheduleImport/DiffReviewStep.tsx b/src/components/Admin/ScheduleImport/DiffReviewStep.tsx new file mode 100644 index 00000000..bc7caab7 --- /dev/null +++ b/src/components/Admin/ScheduleImport/DiffReviewStep.tsx @@ -0,0 +1,93 @@ +import { AlertCircle, Loader2 } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { + type DiffResult, + type StageMismatchResolution, + type OrphanResolution, +} from "@/services/scheduleImportService"; +import { DiffSummaryBanner } from "./DiffSummaryBanner"; +import { StageMismatchResolver } from "./StageMismatchResolver"; +import { OrphanedSetsPanel } from "./OrphanedSetsPanel"; + +type DbStage = { id: string; name: string }; + +type Props = { + diff: DiffResult; + dbStages: DbStage[]; + stageMismatchResolutions: Record; + orphanResolutions: Record; + onStageMismatchChange: (csvValue: string, resolution: StageMismatchResolution) => void; + onOrphanChange: (setId: string, resolution: OrphanResolution) => void; + onCommit: () => void; + onReset: () => void; + committing: boolean; + commitError: string | null; + canCommit: boolean; +}; + +export function DiffReviewStep({ + diff, + dbStages, + stageMismatchResolutions, + orphanResolutions, + onStageMismatchChange, + onOrphanChange, + onCommit, + onReset, + committing, + commitError, + canCommit, +}: Props) { + return ( + + + Review Changes + + + + + + + + + {commitError && ( +
+ +
+

Import failed — no changes were saved.

+

{commitError}

+
+
+ )} + +
+ + +
+
+
+ ); +} diff --git a/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx b/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx index 5830b9de..80763d6e 100644 --- a/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx +++ b/src/components/Admin/ScheduleImport/ScheduleImportWizard.tsx @@ -1,6 +1,4 @@ import { useState } from "react"; -import { CheckCircle2, Loader2, RotateCcw, AlertCircle } from "lucide-react"; -import { Button } from "@/components/ui/button"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { useQueryClient } from "@tanstack/react-query"; import { @@ -12,11 +10,10 @@ import { buildCommitPayload, callCommitSchedule, } from "@/services/scheduleImportService"; -import { CsvUploadStep } from "./CsvUploadStep"; -import { DiffSummaryBanner } from "./DiffSummaryBanner"; -import { StageMismatchResolver } from "./StageMismatchResolver"; -import { OrphanedSetsPanel } from "./OrphanedSetsPanel"; import { useStagesByEditionQuery } from "@/hooks/queries/stages/useStagesByEdition"; +import { CsvUploadStep } from "./CsvUploadStep"; +import { DiffReviewStep } from "./DiffReviewStep"; +import { CommitResultCard } from "./CommitResultCard"; type Step = "upload" | "review" | "result"; @@ -28,8 +25,6 @@ export function ScheduleImportWizard({ festivalEditionId }: Props) { const [step, setStep] = useState("upload"); const [diff, setDiff] = useState(null); - const [rows, setRows] = useState([]); - const [timezone, setTimezone] = useState("Europe/Lisbon"); const [stageMismatchResolutions, setStageMismatchResolutions] = useState< Record >({}); @@ -40,10 +35,8 @@ export function ScheduleImportWizard({ festivalEditionId }: Props) { const [commitResult, setCommitResult] = useState(null); const [commitError, setCommitError] = useState(null); - function handleDiffReady(newDiff: DiffResult, newRows: CsvRow[], newTimezone: string) { + function handleDiffReady(newDiff: DiffResult) { setDiff(newDiff); - setRows(newRows); - setTimezone(newTimezone); setStageMismatchResolutions( Object.fromEntries( newDiff.conflicts.stageNameMismatches.map((m) => [ @@ -58,19 +51,13 @@ export function ScheduleImportWizard({ festivalEditionId }: Props) { setStep("review"); } - function handleStageMismatchChange(csvValue: string, resolution: StageMismatchResolution) { - setStageMismatchResolutions((prev) => ({ ...prev, [csvValue]: resolution })); - } - - function handleOrphanChange(setId: string, resolution: OrphanResolution) { - setOrphanResolutions((prev) => ({ ...prev, [setId]: resolution })); - } - - function canCommit() { - if (!diff) return false; - return diff.conflicts.stageNameMismatches.every( - (m) => stageMismatchResolutions[m.csvValue] != null, - ); + function handleReset() { + setStep("upload"); + setDiff(null); + setStageMismatchResolutions({}); + setOrphanResolutions({}); + setCommitResult(null); + setCommitError(null); } async function handleCommit() { @@ -92,14 +79,11 @@ export function ScheduleImportWizard({ festivalEditionId }: Props) { } } - function handleReset() { - setStep("upload"); - setDiff(null); - setRows([]); - setStageMismatchResolutions({}); - setOrphanResolutions({}); - setCommitResult(null); - setCommitError(null); + function canCommit() { + if (!diff) return false; + return diff.conflicts.stageNameMismatches.every( + (m) => stageMismatchResolutions[m.csvValue] != null, + ); } if (step === "upload") { @@ -119,84 +103,28 @@ export function ScheduleImportWizard({ festivalEditionId }: Props) { } if (step === "result" && commitResult) { - return ( - - -
- - Schedule imported successfully -
-
    -
  • {commitResult.setsCreated} set{commitResult.setsCreated !== 1 ? "s" : ""} created
  • -
  • {commitResult.setsUpdated} set{commitResult.setsUpdated !== 1 ? "s" : ""} updated
  • - {commitResult.setsArchived > 0 && ( -
  • {commitResult.setsArchived} set{commitResult.setsArchived !== 1 ? "s" : ""} archived
  • - )} -
- -
-
- ); + return ; } if (!diff) return null; - const dbStages = stagesQuery.data ?? []; - return ( -
- - - Review Changes - - - - - - - - - {commitError && ( -
- -
-

Import failed — no changes were saved.

-

{commitError}

-
-
- )} - -
- - -
-
-
-
+ + setStageMismatchResolutions((prev) => ({ ...prev, [csvValue]: resolution })) + } + onOrphanChange={(setId, resolution) => + setOrphanResolutions((prev) => ({ ...prev, [setId]: resolution })) + } + onCommit={handleCommit} + onReset={handleReset} + committing={committing} + commitError={commitError} + canCommit={canCommit()} + /> ); } From 32a626b98ed7c135e6147215ccef14c21696a751 Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Sat, 9 May 2026 18:21:42 +0300 Subject: [PATCH 05/23] fix(test): exclude Deno and Playwright files from Vitest Prevents Vitest from picking up supabase/functions Deno tests and tests/e2e Playwright specs, which caused import errors. Co-Authored-By: Claude Sonnet 4.6 --- vite.config.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/vite.config.ts b/vite.config.ts index 74be376c..b73b439c 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -6,6 +6,10 @@ import { TanStackRouterVite } from "@tanstack/router-vite-plugin"; // https://vitejs.dev/config/ export default defineConfig(({ mode }) => ({ + test: { + exclude: ["supabase/**", "tests/e2e/**", "node_modules/**"], + passWithNoTests: true, + }, server: { host: "::", port: 8080, From c8110ddaabe1440c46b217a5849adcddc4882854 Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Sat, 9 May 2026 18:26:08 +0300 Subject: [PATCH 06/23] refactor: remove old client-side CSV import MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Deletes CSVImportDialog and csvImportService (326 lines of client-side import logic). Import CSV buttons removed from StageManagement and SetManagement — replaced by the dedicated Import tab on the edition page. parseCSV inlined into scheduleImportService to remove the dependency. Co-Authored-By: Claude Sonnet 4.6 --- .claude/settings.local.json | 5 +++- deno.lock | 15 ++++++++++++ .../SetsManagement/SetManagement.tsx | 15 +----------- src/services/scheduleImportService.ts | 23 ++++++++++++++++++- 4 files changed, 42 insertions(+), 16 deletions(-) diff --git a/.claude/settings.local.json b/.claude/settings.local.json index f0e793fa..ce516981 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -19,7 +19,10 @@ "Bash(npx oxlint:*)", "WebSearch", "WebFetch(domain:tanstack.com)", - "Bash(node:*)" + "Bash(node:*)", + "Bash(deno test *)", + "Bash(git commit -m ' *)", + "Bash(pnpm vitest *)" ], "deny": [] } diff --git a/deno.lock b/deno.lock index defafbe4..a4683c14 100644 --- a/deno.lock +++ b/deno.lock @@ -109,6 +109,21 @@ "https://esm.sh/whatwg-url@5.0.0/denonext/whatwg-url.mjs": "29b16d74ee72624c915745bbd25b617cfd2248c6af0f5120d131e232a9a9af79", "https://esm.sh/whatwg-url@5.0.0?target=denonext": "f001a2cadf81312d214ca330033f474e74d81a003e21e8c5d70a1f46dc97b02d" }, + "specifiers": { + "jsr:@std/assert@1": "1.0.19", + "jsr:@std/internal@^1.0.12": "1.0.13" + }, + "jsr": { + "@std/assert@1.0.19": { + "integrity": "eaada96ee120cb980bc47e040f82814d786fe8162ecc53c91d8df60b8755991e", + "dependencies": [ + "jsr:@std/internal" + ] + }, + "@std/internal@1.0.13": { + "integrity": "2f9546691d4ac2d32859c82dff284aaeac980ddeca38430d07941e7e288725c0" + } + }, "workspace": { "packageJson": { "dependencies": [ diff --git a/src/pages/admin/festivals/SetsManagement/SetManagement.tsx b/src/pages/admin/festivals/SetsManagement/SetManagement.tsx index fbb5ff5d..dcfce0c3 100644 --- a/src/pages/admin/festivals/SetsManagement/SetManagement.tsx +++ b/src/pages/admin/festivals/SetsManagement/SetManagement.tsx @@ -2,7 +2,7 @@ import { useState } from "react"; import { Link, useParams } from "@tanstack/react-router"; import { Button } from "@/components/ui/button"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; -import { Loader2, Plus, Music, Upload } from "lucide-react"; +import { Loader2, Plus, Music } from "lucide-react"; import { FestivalSet } from "@/hooks/queries/sets/useSets"; import { useSetsByEditionQuery } from "@/hooks/queries/sets/useSetsByEdition"; import { useDeleteSetMutation } from "@/hooks/queries/sets/useDeleteSet"; @@ -72,19 +72,6 @@ export function SetManagement() { Set Management
-

- Required column: Artists (use | for B2B, e.g. Carl Cox | Peggy Gou). - Optional: Set Name, Stage, Date (YYYY-MM-DD),{" "} - Start Time (HH:MM), End Time (HH:MM), Description. + Required column: Artists (use | for B2B, + e.g. Carl Cox | Peggy Gou). Optional:{" "} + Set Name, Stage, Date{" "} + (YYYY-MM-DD), Start Time (HH:MM), End Time{" "} + (HH:MM), Description.

- {error && ( -

{error}

- )} + {error &&

{error}

}
- ); -} diff --git a/src/pages/admin/festivals/CSVImportDialog/StageCellWithValidation.tsx b/src/pages/admin/festivals/CSVImportDialog/StageCellWithValidation.tsx deleted file mode 100644 index eefcd73b..00000000 --- a/src/pages/admin/festivals/CSVImportDialog/StageCellWithValidation.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import { TableCell } from "@/components/ui/table"; - -interface StageCellWithValidationProps { - stageName?: string; - error?: string; -} - -export function StageCellWithValidation({ - stageName, - error, -}: StageCellWithValidationProps) { - return ( - -
-
{stageName || "-"}
- {error &&
{error}
} -
-
- ); -} diff --git a/src/pages/admin/festivals/CSVImportDialog/StagesPreviewTable.tsx b/src/pages/admin/festivals/CSVImportDialog/StagesPreviewTable.tsx deleted file mode 100644 index b04e487e..00000000 --- a/src/pages/admin/festivals/CSVImportDialog/StagesPreviewTable.tsx +++ /dev/null @@ -1,52 +0,0 @@ -import { - Table, - TableBody, - TableCell, - TableHead, - TableHeader, - TableRow, -} from "@/components/ui/table"; -import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; -import type { StageImportData } from "@/services/csv/csvParser"; - -interface StagesPreviewTableProps { - stages: StageImportData[]; -} - -export function StagesPreviewTable({ stages }: StagesPreviewTableProps) { - if (stages.length === 0) { - return null; - } - - return ( - - - - Preview: {stages.length} stage{stages.length !== 1 ? "s" : ""} - - - -
- - - - # - Stage Name - - - - {stages.map((stage, index) => ( - - - {index + 1} - - {stage.name} - - ))} - -
-
-
-
- ); -} diff --git a/src/pages/admin/festivals/CSVImportDialog/StagesTabContent.tsx b/src/pages/admin/festivals/CSVImportDialog/StagesTabContent.tsx deleted file mode 100644 index e779aca2..00000000 --- a/src/pages/admin/festivals/CSVImportDialog/StagesTabContent.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import { FileUploadSection } from "./FileUploadSection"; - -interface StagesTabContentProps { - stagesFile: File | null; - onStagesFileChange: (event: React.ChangeEvent) => void; -} - -export function StagesTabContent({ - stagesFile, - onStagesFileChange, -}: StagesTabContentProps) { - return ( - - ); -} diff --git a/src/pages/admin/festivals/CSVImportDialog/TimeCellWithValidation.tsx b/src/pages/admin/festivals/CSVImportDialog/TimeCellWithValidation.tsx deleted file mode 100644 index d6019294..00000000 --- a/src/pages/admin/festivals/CSVImportDialog/TimeCellWithValidation.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import { TableCell } from "@/components/ui/table"; - -interface TimeCellWithValidationProps { - time?: string; - error?: string; -} - -export function TimeCellWithValidation({ - time, - error, -}: TimeCellWithValidationProps) { - return ( - -
-
{time || "-"}
- {error &&
{error}
} -
-
- ); -} diff --git a/src/pages/admin/festivals/CSVImportDialog/TimezoneSelector.tsx b/src/pages/admin/festivals/CSVImportDialog/TimezoneSelector.tsx deleted file mode 100644 index c1993ae7..00000000 --- a/src/pages/admin/festivals/CSVImportDialog/TimezoneSelector.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import { Label } from "@/components/ui/label"; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "@/components/ui/select"; - -interface TimezoneSelectorProps { - value: string; - onValueChange: (value: string) => void; -} - -export function TimezoneSelector({ - value, - onValueChange, -}: TimezoneSelectorProps) { - return ( -
- - -

- Select the timezone that the CSV times are in -

-
- ); -} diff --git a/src/pages/admin/festivals/CSVImportPage.tsx b/src/pages/admin/festivals/CSVImportPage.tsx deleted file mode 100644 index d6043719..00000000 --- a/src/pages/admin/festivals/CSVImportPage.tsx +++ /dev/null @@ -1,473 +0,0 @@ -import { useState, useEffect } from "react"; -import { useParams, useNavigate, useSearch } from "@tanstack/react-router"; -import { useToast } from "@/hooks/use-toast"; -import { Button } from "@/components/ui/button"; -import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; -import { Upload, Loader2, ArrowLeft } from "lucide-react"; -import { useQueryClient } from "@tanstack/react-query"; -import { importStages } from "@/services/csv/stageImporter"; -import { - importSetsWithMappings, - type ArtistMapping, -} from "@/services/csv/setImporter"; -import { - parseStagesCSV, - parseSetsCSV, - type SetImportData, - type StageImportData, -} from "@/services/csv/csvParser"; -import type { ImportResult } from "@/services/csv/types"; -import { useArtistsQuery } from "@/hooks/queries/artists/useArtists"; -import { StagesTabContent } from "@/pages/admin/festivals/CSVImportDialog/StagesTabContent"; -import { SetsTabContent } from "@/pages/admin/festivals/CSVImportDialog/SetsTabContent"; -import { ImportProgress } from "@/pages/admin/festivals/CSVImportDialog/ImportProgress"; -import { ImportResults } from "@/pages/admin/festivals/CSVImportDialog/ImportResults"; -import { StagesPreviewTable } from "@/pages/admin/festivals/CSVImportDialog/StagesPreviewTable"; -import { - SetsPreviewTable, - type ArtistSelection, - type SetSelection, -} from "@/pages/admin/festivals/CSVImportDialog/SetsPreviewTable"; -import { validateSetSelections } from "@/services/csv/setSelectionValidator"; -import { useFestivalsQuery } from "@/hooks/queries/festivals/useFestivals"; -import { useFestivalEditionsForFestivalQuery } from "@/hooks/queries/festivals/editions/useFestivalEditionsForFestival"; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "@/components/ui/select"; -import { - Card, - CardContent, - CardDescription, - CardHeader, - CardTitle, -} from "@/components/ui/card"; - -function getUserTimezone(): string { - return Intl.DateTimeFormat().resolvedOptions().timeZone; -} - -export function CSVImportPage() { - const { festivalId: urlFestivalId, editionId: urlEditionId } = useParams({ - strict: false, - }); - const navigate = useNavigate(); - const { tab } = useSearch({ strict: false }); - const defaultTab = tab || "stages"; - - const [selectedFestivalId, setSelectedFestivalId] = useState( - urlFestivalId || "", - ); - const [selectedEditionId, setSelectedEditionId] = useState( - urlEditionId || "", - ); - const [isImporting, setIsImporting] = useState(false); - const [stagesFile, setStagesFile] = useState(null); - const [setsFile, setSetsFile] = useState(null); - const [timezone, setTimezone] = useState(getUserTimezone()); - const [progress, setProgress] = useState({ current: 0, total: 0, label: "" }); - const [importResults, setImportResults] = useState([]); - - const [stagesPreview, setStagesPreview] = useState([]); - const [setsPreview, setSetsPreview] = useState([]); - const [artistSelections, setArtistSelections] = useState< - Map - >(new Map()); - const [setSelections, setSetSelections] = useState>( - new Map(), - ); - - const { toast } = useToast(); - const queryClient = useQueryClient(); - const artistsQuery = useArtistsQuery(); - const festivalsQuery = useFestivalsQuery({ all: true }); - const editionsQuery = useFestivalEditionsForFestivalQuery( - selectedFestivalId, - { all: true }, - ); - - useEffect(() => { - if (urlFestivalId) { - setSelectedFestivalId(urlFestivalId); - } - }, [urlFestivalId]); - - useEffect(() => { - if (urlEditionId) { - setSelectedEditionId(urlEditionId); - } - }, [urlEditionId]); - - function handleFestivalChange(festivalId: string) { - setSelectedFestivalId(festivalId); - setSelectedEditionId(""); - navigate({ - to: "/admin/festivals/import", - search: (prev) => ({ tab: prev.tab }), - replace: true, - }); - } - - function handleEditionChange(editionId: string) { - setSelectedEditionId(editionId); - if (selectedFestivalId && editionId) { - navigate({ - to: "/admin/festivals/$festivalId/$editionId/import", - params: { festivalId: selectedFestivalId, editionId }, - search: (prev) => ({ ...prev }), - replace: true, - }); - } - } - - async function handleFileChange( - event: React.ChangeEvent, - type: "stages" | "sets", - ) { - const file = event.target.files?.[0]; - if (file && file.type === "text/csv") { - try { - const content = await readFileAsText(file); - - if (type === "stages") { - const parsedStages = parseStagesCSV(content); - setStagesFile(file); - setStagesPreview(parsedStages); - } else { - const parsedSets = parseSetsCSV(content); - setSetsFile(file); - setSetsPreview(parsedSets); - } - } catch (error) { - toast({ - title: "Failed to parse CSV", - description: - error instanceof Error ? error.message : "Invalid CSV format", - variant: "destructive", - }); - if (type === "stages") { - setStagesFile(null); - setStagesPreview([]); - } else { - setSetsFile(null); - setSetsPreview([]); - } - } - } else { - toast({ - title: "Invalid file", - description: "Please select a CSV file", - variant: "destructive", - }); - } - } - - function readFileAsText(file: File): Promise { - return new Promise((resolve, reject) => { - const reader = new FileReader(); - reader.onload = (e) => resolve(e.target?.result as string); - reader.onerror = reject; - reader.readAsText(file); - }); - } - - async function handleImport() { - if (!stagesFile && !setsFile) { - toast({ - title: "No files selected", - description: "Please select at least one CSV file to import", - variant: "destructive", - }); - return; - } - - if (!selectedEditionId) { - toast({ - title: "No edition selected", - description: "Please select a festival edition", - variant: "destructive", - }); - return; - } - - if (!artistsQuery.data) { - toast({ - title: "Artists data not loaded", - description: "Please wait for artists data to load", - variant: "destructive", - }); - return; - } - - if (setsFile && setSelections.size > 0) { - const validationErrors = validateSetSelections(setSelections); - if (validationErrors.length > 0) { - toast({ - title: "Set selection conflicts", - description: validationErrors[0].message, - variant: "destructive", - }); - return; - } - } - - setIsImporting(true); - setImportResults([]); - const results: ImportResult[] = []; - - try { - if (stagesFile) { - setProgress({ current: 0, total: 0, label: "Importing stages..." }); - const stagesContent = await readFileAsText(stagesFile); - const stagesData = parseStagesCSV(stagesContent); - - const stagesResult = await importStages( - stagesData, - selectedEditionId, - (current, total) => { - setProgress({ - current, - total, - label: `Importing stages (${current}/${total})...`, - }); - }, - ); - results.push(stagesResult); - } - - if (setsFile) { - setProgress({ - current: 0, - total: 0, - label: "Importing sets...", - }); - const setsContent = await readFileAsText(setsFile); - const setsData = parseSetsCSV(setsContent); - - const artistMappings = new Map(); - artistSelections.forEach((selections, index) => { - artistMappings.set( - index, - selections.map((sel) => ({ - csvName: sel.csvName, - artistId: sel.artistId, - shouldCreate: sel.isCreating, - })), - ); - }); - - const setsResult = await importSetsWithMappings( - setsData, - selectedEditionId, - artistMappings, - setSelections, - timezone, - (current, total) => { - setProgress({ - current, - total, - label: `Importing sets (${current}/${total})...`, - }); - }, - ); - results.push(setsResult); - } - - const successCount = results.filter((r) => r.success).length; - const failureCount = results.filter((r) => !r.success).length; - const allErrors = results.flatMap((r) => r.errors || []); - - setImportResults(results); - - if (successCount > 0 && failureCount === 0 && allErrors.length === 0) { - toast({ - title: "Import successful", - description: results.map((r) => r.message).join(". "), - }); - - queryClient.invalidateQueries({ queryKey: ["stages"] }); - queryClient.invalidateQueries({ queryKey: ["sets"] }); - queryClient.invalidateQueries({ queryKey: ["artists"] }); - - setStagesFile(null); - setSetsFile(null); - setStagesPreview([]); - setSetsPreview([]); - setProgress({ current: 0, total: 0, label: "" }); - setImportResults([]); - } else { - toast({ - title: "Import completed with issues", - description: `${results.map((r) => r.message).join(". ")}${allErrors.length > 0 ? ` See details below for ${allErrors.length} error${allErrors.length === 1 ? "" : "s"}.` : ""}`, - variant: failureCount > 0 ? "destructive" : "default", - }); - } - } catch (error) { - toast({ - title: "Import failed", - description: error instanceof Error ? error.message : "Unknown error", - variant: "destructive", - }); - } finally { - setIsImporting(false); - setProgress({ current: 0, total: 0, label: "" }); - } - } - - const selectedFestival = festivalsQuery.data?.find( - (f) => f.id === selectedFestivalId, - ); - const selectedEdition = editionsQuery.data?.find( - (e) => e.id === selectedEditionId, - ); - - return ( -
-
- -
- - - - Import CSV Data - - Select a festival and edition, then upload CSV files to import - stages and sets. - - - -
-
- - -
- -
- - -
-
- - {selectedFestival && selectedEdition && ( -
-

- Importing to:{" "} - {selectedFestival.name} {selectedEdition.year} - {selectedEdition.name && ` - ${selectedEdition.name}`} -

-
- )} -
-
- - {selectedEditionId && ( - - - - - Stages - Sets - - - - handleFileChange(e, "stages")} - /> - {stagesPreview.length > 0 && ( - - )} - - - - handleFileChange(e, "sets")} - onTimezoneChange={setTimezone} - /> - {setsPreview.length > 0 && selectedEditionId && ( - - )} - - - - - - - -
- -
-
-
- )} -
- ); -} diff --git a/src/pages/admin/festivals/SetsManagement/SetManagement.tsx b/src/pages/admin/festivals/SetsManagement/SetManagement.tsx index dcfce0c3..0578eca6 100644 --- a/src/pages/admin/festivals/SetsManagement/SetManagement.tsx +++ b/src/pages/admin/festivals/SetsManagement/SetManagement.tsx @@ -1,5 +1,5 @@ import { useState } from "react"; -import { Link, useParams } from "@tanstack/react-router"; +import { useParams } from "@tanstack/react-router"; import { Button } from "@/components/ui/button"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Loader2, Plus, Music } from "lucide-react"; diff --git a/src/pages/admin/festivals/StageManagement.tsx b/src/pages/admin/festivals/StageManagement.tsx index 15c28eb6..d55d0c2f 100644 --- a/src/pages/admin/festivals/StageManagement.tsx +++ b/src/pages/admin/festivals/StageManagement.tsx @@ -1,11 +1,10 @@ import { useState } from "react"; -import { Link, useParams } from "@tanstack/react-router"; +import { useParams } from "@tanstack/react-router"; import { useStagesByEditionQuery } from "@/hooks/queries/stages/useStagesByEdition"; import { useDeleteStageMutation } from "@/hooks/queries/stages/useDeleteStage"; import { Stage } from "@/hooks/queries/stages/types"; -import { Button } from "@/components/ui/button"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; -import { Loader2, MapPin, Upload } from "lucide-react"; +import { Loader2, MapPin } from "lucide-react"; import { StagesTable } from "./StageManagement/StagesTable"; import { CreateStageDialog } from "./StageManagement/CreateStageDialog"; import { EditStageDialog } from "./StageManagement/EditStageDialog"; @@ -74,22 +73,7 @@ export function StageManagement(_props: StageManagementProps) { Stage Management -
- - -
+ diff --git a/src/routeTree.gen.ts b/src/routeTree.gen.ts index ee6778fd..eea6dd3f 100644 --- a/src/routeTree.gen.ts +++ b/src/routeTree.gen.ts @@ -33,13 +33,13 @@ import { Route as FestivalsFestivalSlugEditionsEditionSlugMapRouteImport } from import { Route as FestivalsFestivalSlugEditionsEditionSlugInfoRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/info' import { Route as FestivalsFestivalSlugEditionsEditionSlugExploreRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/explore' import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug' -import { Route as AdminFestivalsFestivalIdEditionIdImportRouteImport } from './routes/admin/festivals/$festivalId.$editionId.import' import { Route as FestivalsFestivalSlugEditionsEditionSlugSetsIndexRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/sets/index' import { Route as FestivalsFestivalSlugEditionsEditionSlugSetsSetSlugRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/sets/$setSlug' import { Route as FestivalsFestivalSlugEditionsEditionSlugScheduleTimelineRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/schedule/timeline' import { Route as FestivalsFestivalSlugEditionsEditionSlugScheduleListRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/schedule/list' import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugStagesRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug/stages' import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugSetsRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug/sets' +import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugImportRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug/import' const TermsRoute = TermsRouteImport.update({ id: '/terms', @@ -171,12 +171,6 @@ const AdminFestivalsFestivalSlugEditionsEditionSlugRoute = path: '/editions/$editionSlug', getParentRoute: () => AdminFestivalsFestivalSlugRoute, } as any) -const AdminFestivalsFestivalIdEditionIdImportRoute = - AdminFestivalsFestivalIdEditionIdImportRouteImport.update({ - id: '/$festivalId/$editionId/import', - path: '/$festivalId/$editionId/import', - getParentRoute: () => AdminFestivalsRoute, - } as any) const FestivalsFestivalSlugEditionsEditionSlugSetsIndexRoute = FestivalsFestivalSlugEditionsEditionSlugSetsIndexRouteImport.update({ id: '/', @@ -213,6 +207,12 @@ const AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute = path: '/sets', getParentRoute: () => AdminFestivalsFestivalSlugEditionsEditionSlugRoute, } as any) +const AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute = + AdminFestivalsFestivalSlugEditionsEditionSlugImportRouteImport.update({ + id: '/import', + path: '/import', + getParentRoute: () => AdminFestivalsFestivalSlugEditionsEditionSlugRoute, + } as any) export interface FileRoutesByFullPath { '/': typeof IndexRoute @@ -232,7 +232,6 @@ export interface FileRoutesByFullPath { '/admin/festivals/import': typeof AdminFestivalsImportRoute '/festivals/$festivalSlug/': typeof FestivalsFestivalSlugIndexRoute '/festivals/$festivalSlug/editions/$editionSlug': typeof FestivalsFestivalSlugEditionsEditionSlugRouteWithChildren - '/admin/festivals/$festivalId/$editionId/import': typeof AdminFestivalsFestivalIdEditionIdImportRoute '/admin/festivals/$festivalSlug/editions/$editionSlug': typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/explore': typeof FestivalsFestivalSlugEditionsEditionSlugExploreRoute '/festivals/$festivalSlug/editions/$editionSlug/info': typeof FestivalsFestivalSlugEditionsEditionSlugInfoRoute @@ -240,6 +239,7 @@ export interface FileRoutesByFullPath { '/festivals/$festivalSlug/editions/$editionSlug/schedule': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/sets': typeof FestivalsFestivalSlugEditionsEditionSlugSetsRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/social': typeof FestivalsFestivalSlugEditionsEditionSlugSocialRoute + '/admin/festivals/$festivalSlug/editions/$editionSlug/import': typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute '/admin/festivals/$festivalSlug/editions/$editionSlug/sets': typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute '/admin/festivals/$festivalSlug/editions/$editionSlug/stages': typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute '/festivals/$festivalSlug/editions/$editionSlug/schedule/list': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleListRoute @@ -264,13 +264,13 @@ export interface FileRoutesByTo { '/admin/festivals/import': typeof AdminFestivalsImportRoute '/festivals/$festivalSlug': typeof FestivalsFestivalSlugIndexRoute '/festivals/$festivalSlug/editions/$editionSlug': typeof FestivalsFestivalSlugEditionsEditionSlugRouteWithChildren - '/admin/festivals/$festivalId/$editionId/import': typeof AdminFestivalsFestivalIdEditionIdImportRoute '/admin/festivals/$festivalSlug/editions/$editionSlug': typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/explore': typeof FestivalsFestivalSlugEditionsEditionSlugExploreRoute '/festivals/$festivalSlug/editions/$editionSlug/info': typeof FestivalsFestivalSlugEditionsEditionSlugInfoRoute '/festivals/$festivalSlug/editions/$editionSlug/map': typeof FestivalsFestivalSlugEditionsEditionSlugMapRoute '/festivals/$festivalSlug/editions/$editionSlug/schedule': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/social': typeof FestivalsFestivalSlugEditionsEditionSlugSocialRoute + '/admin/festivals/$festivalSlug/editions/$editionSlug/import': typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute '/admin/festivals/$festivalSlug/editions/$editionSlug/sets': typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute '/admin/festivals/$festivalSlug/editions/$editionSlug/stages': typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute '/festivals/$festivalSlug/editions/$editionSlug/schedule/list': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleListRoute @@ -297,7 +297,6 @@ export interface FileRoutesById { '/admin/festivals/import': typeof AdminFestivalsImportRoute '/festivals/$festivalSlug/': typeof FestivalsFestivalSlugIndexRoute '/festivals/$festivalSlug/editions/$editionSlug': typeof FestivalsFestivalSlugEditionsEditionSlugRouteWithChildren - '/admin/festivals/$festivalId/$editionId/import': typeof AdminFestivalsFestivalIdEditionIdImportRoute '/admin/festivals/$festivalSlug/editions/$editionSlug': typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/explore': typeof FestivalsFestivalSlugEditionsEditionSlugExploreRoute '/festivals/$festivalSlug/editions/$editionSlug/info': typeof FestivalsFestivalSlugEditionsEditionSlugInfoRoute @@ -305,6 +304,7 @@ export interface FileRoutesById { '/festivals/$festivalSlug/editions/$editionSlug/schedule': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/sets': typeof FestivalsFestivalSlugEditionsEditionSlugSetsRouteWithChildren '/festivals/$festivalSlug/editions/$editionSlug/social': typeof FestivalsFestivalSlugEditionsEditionSlugSocialRoute + '/admin/festivals/$festivalSlug/editions/$editionSlug/import': typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute '/admin/festivals/$festivalSlug/editions/$editionSlug/sets': typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute '/admin/festivals/$festivalSlug/editions/$editionSlug/stages': typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute '/festivals/$festivalSlug/editions/$editionSlug/schedule/list': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleListRoute @@ -332,7 +332,6 @@ export interface FileRouteTypes { | '/admin/festivals/import' | '/festivals/$festivalSlug/' | '/festivals/$festivalSlug/editions/$editionSlug' - | '/admin/festivals/$festivalId/$editionId/import' | '/admin/festivals/$festivalSlug/editions/$editionSlug' | '/festivals/$festivalSlug/editions/$editionSlug/explore' | '/festivals/$festivalSlug/editions/$editionSlug/info' @@ -340,6 +339,7 @@ export interface FileRouteTypes { | '/festivals/$festivalSlug/editions/$editionSlug/schedule' | '/festivals/$festivalSlug/editions/$editionSlug/sets' | '/festivals/$festivalSlug/editions/$editionSlug/social' + | '/admin/festivals/$festivalSlug/editions/$editionSlug/import' | '/admin/festivals/$festivalSlug/editions/$editionSlug/sets' | '/admin/festivals/$festivalSlug/editions/$editionSlug/stages' | '/festivals/$festivalSlug/editions/$editionSlug/schedule/list' @@ -364,13 +364,13 @@ export interface FileRouteTypes { | '/admin/festivals/import' | '/festivals/$festivalSlug' | '/festivals/$festivalSlug/editions/$editionSlug' - | '/admin/festivals/$festivalId/$editionId/import' | '/admin/festivals/$festivalSlug/editions/$editionSlug' | '/festivals/$festivalSlug/editions/$editionSlug/explore' | '/festivals/$festivalSlug/editions/$editionSlug/info' | '/festivals/$festivalSlug/editions/$editionSlug/map' | '/festivals/$festivalSlug/editions/$editionSlug/schedule' | '/festivals/$festivalSlug/editions/$editionSlug/social' + | '/admin/festivals/$festivalSlug/editions/$editionSlug/import' | '/admin/festivals/$festivalSlug/editions/$editionSlug/sets' | '/admin/festivals/$festivalSlug/editions/$editionSlug/stages' | '/festivals/$festivalSlug/editions/$editionSlug/schedule/list' @@ -396,7 +396,6 @@ export interface FileRouteTypes { | '/admin/festivals/import' | '/festivals/$festivalSlug/' | '/festivals/$festivalSlug/editions/$editionSlug' - | '/admin/festivals/$festivalId/$editionId/import' | '/admin/festivals/$festivalSlug/editions/$editionSlug' | '/festivals/$festivalSlug/editions/$editionSlug/explore' | '/festivals/$festivalSlug/editions/$editionSlug/info' @@ -404,6 +403,7 @@ export interface FileRouteTypes { | '/festivals/$festivalSlug/editions/$editionSlug/schedule' | '/festivals/$festivalSlug/editions/$editionSlug/sets' | '/festivals/$festivalSlug/editions/$editionSlug/social' + | '/admin/festivals/$festivalSlug/editions/$editionSlug/import' | '/admin/festivals/$festivalSlug/editions/$editionSlug/sets' | '/admin/festivals/$festivalSlug/editions/$editionSlug/stages' | '/festivals/$festivalSlug/editions/$editionSlug/schedule/list' @@ -593,13 +593,6 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteImport parentRoute: typeof AdminFestivalsFestivalSlugRoute } - '/admin/festivals/$festivalId/$editionId/import': { - id: '/admin/festivals/$festivalId/$editionId/import' - path: '/$festivalId/$editionId/import' - fullPath: '/admin/festivals/$festivalId/$editionId/import' - preLoaderRoute: typeof AdminFestivalsFestivalIdEditionIdImportRouteImport - parentRoute: typeof AdminFestivalsRoute - } '/festivals/$festivalSlug/editions/$editionSlug/sets/': { id: '/festivals/$festivalSlug/editions/$editionSlug/sets/' path: '/' @@ -642,6 +635,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRouteImport parentRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugRoute } + '/admin/festivals/$festivalSlug/editions/$editionSlug/import': { + id: '/admin/festivals/$festivalSlug/editions/$editionSlug/import' + path: '/import' + fullPath: '/admin/festivals/$festivalSlug/editions/$editionSlug/import' + preLoaderRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRouteImport + parentRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugRoute + } } } @@ -658,12 +658,15 @@ const AdminArtistsRouteWithChildren = AdminArtistsRoute._addFileChildren( ) interface AdminFestivalsFestivalSlugEditionsEditionSlugRouteChildren { + AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute } const AdminFestivalsFestivalSlugEditionsEditionSlugRouteChildren: AdminFestivalsFestivalSlugEditionsEditionSlugRouteChildren = { + AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute: + AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute, AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute: AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute, AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute: @@ -693,14 +696,11 @@ const AdminFestivalsFestivalSlugRouteWithChildren = interface AdminFestivalsRouteChildren { AdminFestivalsFestivalSlugRoute: typeof AdminFestivalsFestivalSlugRouteWithChildren AdminFestivalsImportRoute: typeof AdminFestivalsImportRoute - AdminFestivalsFestivalIdEditionIdImportRoute: typeof AdminFestivalsFestivalIdEditionIdImportRoute } const AdminFestivalsRouteChildren: AdminFestivalsRouteChildren = { AdminFestivalsFestivalSlugRoute: AdminFestivalsFestivalSlugRouteWithChildren, AdminFestivalsImportRoute: AdminFestivalsImportRoute, - AdminFestivalsFestivalIdEditionIdImportRoute: - AdminFestivalsFestivalIdEditionIdImportRoute, } const AdminFestivalsRouteWithChildren = AdminFestivalsRoute._addFileChildren( diff --git a/src/routes/admin/festivals/import.tsx b/src/routes/admin/festivals/import.tsx deleted file mode 100644 index d7071bc0..00000000 --- a/src/routes/admin/festivals/import.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import { createFileRoute } from "@tanstack/react-router"; -import { CSVImportPage } from "@/pages/admin/festivals/CSVImportPage"; -import { z } from "zod"; - -const importSearchSchema = z.object({ - tab: z.enum(["sets", "stages"]).optional(), -}); - -export const Route = createFileRoute("/admin/festivals/import")({ - component: CSVImportPage, - validateSearch: importSearchSchema, -}); diff --git a/src/services/csv/csvParser.ts b/src/services/csv/csvParser.ts deleted file mode 100644 index b3648f39..00000000 --- a/src/services/csv/csvParser.ts +++ /dev/null @@ -1,77 +0,0 @@ -export interface StageImportData { - name: string; -} - -export interface SetImportData { - name?: string; - stage_name: string; - artist_names: string; - time_start?: string; - date_start?: string; - time_end?: string; - date_end?: string; - description?: string; -} - -export function parseCSV(csvContent: string): string[][] { - const lines = csvContent.trim().split("\n"); - return lines.map((line) => { - const result: string[] = []; - let current = ""; - let inQuotes = false; - - for (let i = 0; i < line.length; i++) { - const char = line[i]; - - if (char === '"') { - inQuotes = !inQuotes; - } else if (char === "," && !inQuotes) { - result.push(current.trim()); - current = ""; - } else { - current += char; - } - } - - result.push(current.trim()); - return result.map((field) => field.replace(/^"|"$/g, "")); - }); -} - -export function parseStagesCSV(csvContent: string): StageImportData[] { - const lines = parseCSV(csvContent); - const headers = lines[0] as Array; - - return lines.slice(1).map((line) => { - const stage: Partial = {}; - headers.forEach((header, index) => { - stage[header] = line[index] || ""; - }); - return stage as StageImportData; - }); -} - -export function parseSetsCSV(csvContent: string): SetImportData[] { - const lines = parseCSV(csvContent); - const headers = lines[0]; - - return lines.slice(1).map((line) => { - const set: Partial = {}; - headers.forEach((header, index) => { - const value = line[index] || ""; - if ( - header === "time_start" || - header === "time_end" || - header === "date_start" || - header === "date_end" - ) { - set[header as keyof SetImportData] = value || undefined; - } else if (header === "name") { - set[header as keyof SetImportData] = value || undefined; - } else { - set[header as keyof SetImportData] = value; - } - }); - return set as SetImportData; - }); -} diff --git a/src/services/csv/setDuplicator.ts b/src/services/csv/setDuplicator.ts deleted file mode 100644 index 96300991..00000000 --- a/src/services/csv/setDuplicator.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { supabase } from "@/integrations/supabase/client"; - -export async function duplicateSetWithVotes({ - newTimeEnd, - newTimeStart, - sourceSetId, - description, - stageId, -}: { - sourceSetId: string; - newTimeStart: string; - newTimeEnd: string; - stageId?: string | null; - description?: string | null; -}): Promise { - const params: { - source_set_id: string; - new_time_start: string; - new_time_end: string; - new_stage_id?: string | null; - new_description?: string | null; - } = { - source_set_id: sourceSetId, - new_time_start: newTimeStart, - new_time_end: newTimeEnd, - }; - - if (stageId !== undefined) { - params.new_stage_id = stageId; - } - - if (description !== undefined) { - params.new_description = description; - } - - const { data, error } = await supabase.rpc( - "duplicate_set_with_votes", - params, - ); - - if (error) { - throw new Error(`Failed to duplicate set: ${error.message}`); - } - - if (!data) { - throw new Error("No set ID returned from duplication"); - } - - return data as string; -} diff --git a/src/services/csv/setImporter.ts b/src/services/csv/setImporter.ts deleted file mode 100644 index 8f1d44b3..00000000 --- a/src/services/csv/setImporter.ts +++ /dev/null @@ -1,342 +0,0 @@ -import { supabase } from "@/integrations/supabase/client"; -import { generateSlug } from "@/lib/slug"; -import { convertLocalTimeToUTC, combineDateAndTime } from "@/lib/timeUtils"; -import type { SetImportData } from "./csvParser"; -import type { ImportResult } from "./types"; -import type { SetSelection } from "@/pages/admin/festivals/CSVImportDialog/SetsPreviewTable"; -import { duplicateSetWithVotes } from "./setDuplicator"; - -function generateSetNameFromArtists(artistNames: string[]): string { - if (artistNames.length === 0) return "Unnamed Set"; - if (artistNames.length === 1) return artistNames[0]; - if (artistNames.length === 2) return `${artistNames[0]} & ${artistNames[1]}`; - return `${artistNames[0]} & ${artistNames.length - 1} others`; -} - -export interface ArtistMapping { - csvName: string; - artistId: string | null; - shouldCreate: boolean; -} - -async function importSetsWithArtistMap({ - artistMappings, - editionId, - sets, - timezone = "UTC", - onProgress, - setSelections, -}: { - sets: SetImportData[]; - editionId: string; - artistMappings: Map; - setSelections?: Map; - timezone?: string; - onProgress?: (completed: number, total: number) => void; -}): Promise { - const currentUser = await supabase.auth.getUser(); - const userId = currentUser.data.user?.id || ""; - - const results: Array = []; - const errors: Array = []; - const total = sets.length; - - for (let i = 0; i < sets.length; i++) { - const set = sets[i]; - const setMappings = artistMappings.get(i); - const setSelection = setSelections?.get(i); - - const response = await importSingleSet({ - importedSet: set, - setMappings, - setSelection, - editionId, - timezone, - userId, - }); - - if (response.type === "error") { - errors.push(...response.errors); - continue; - } else { - results.push(response.setName); - } - - onProgress?.(i + 1, total); - } - - if (errors.length > 0 && results.length === 0) { - return { - success: false, - message: "Failed to import sets", - errors, - }; - } - - return { - success: true, - message: `Successfully imported ${results.length} sets${errors.length > 0 ? ` (${errors.length} errors)` : ""}`, - inserted: results.length, - errors: errors.length > 0 ? errors : undefined, - }; -} - -async function importSingleSet({ - importedSet, - setMappings, - userId, - timezone, - editionId, - setSelection, -}: { - timezone: string; - userId: string; - importedSet: SetImportData; - setMappings: ArtistMapping[] | undefined; - editionId: string; - setSelection: SetSelection | undefined; -}): Promise< - | { - type: "error"; - errors: string[]; - } - | { - type: "success"; - setName: string; - } -> { - const errors: string[] = []; - try { - if (!setMappings || setMappings.length === 0) { - errors.push( - `Set "${importedSet.name || "Unnamed"}" has no artist mappings`, - ); - return { type: "error", errors }; - } - - const artistNames = setMappings.map((m) => m.csvName); - const setName = importedSet.name || generateSetNameFromArtists(artistNames); - - const artistIds: string[] = []; - - for (const mapping of setMappings) { - let artistId = mapping.artistId; - - if (!artistId && mapping.shouldCreate) { - const { data: newArtist, error: createError } = await supabase - .from("artists") - .insert({ - name: mapping.csvName, - slug: generateSlug(mapping.csvName), - added_by: userId, - }) - .select("id") - .single(); - - if (createError || !newArtist) { - errors.push( - `Failed to create artist "${mapping.csvName}": ${createError?.message || "No ID"}`, - ); - continue; - } - - artistId = newArtist.id; - } - - if (!artistId) { - errors.push(`Artist "${mapping.csvName}" could not be resolved`); - continue; - } - - artistIds.push(artistId); - } - - if (artistIds.length === 0) { - errors.push( - `Set "${importedSet.name || "Unnamed"}" has no valid artists`, - ); - return { type: "error", errors }; - } - - // Continue with set creation logic (same as original) - - let stageId = ""; - if (importedSet.stage_name) { - const { data: stage, error: stageError } = await supabase - .from("stages") - .select("id") - .eq("name", importedSet.stage_name) - .eq("festival_edition_id", editionId) - .single(); - - if (stageError || !stage) { - errors.push( - `Stage "${importedSet.stage_name}" not found for set "${setName}"`, - ); - return { type: "error", errors }; - } - - stageId = stage.id; - } - - const timeStartInput = - importedSet.date_start && importedSet.time_start - ? combineDateAndTime(importedSet.date_start, importedSet.time_start) - : importedSet.time_start; - const timeEndInput = - importedSet.date_end && importedSet.time_end - ? combineDateAndTime(importedSet.date_end, importedSet.time_end) - : importedSet.time_end; - - if (!timeStartInput) { - errors.push("Missing time start"); - return { type: "error", errors }; - } - - if (!timeEndInput) { - errors.push("Missing time end"); - return { type: "error", errors }; - } - - const utcTimeStart = convertLocalTimeToUTC(timeStartInput, timezone); - const utcTimeEnd = convertLocalTimeToUTC(timeEndInput, timezone); - - if (!utcTimeEnd || !utcTimeStart) { - errors.push("Time is not valid"); - return { type: "error", errors }; - } - - let createdSetId = ""; - let setError: Error | null = null; - - if (setSelection?.action === "match" && setSelection.matchedSetId) { - createdSetId = setSelection.matchedSetId; - const { error } = await supabase - .from("sets") - .update({ - stage_id: stageId || null, - time_start: utcTimeStart, - time_end: utcTimeEnd, - description: importedSet.description || null, - archived: false, - }) - .eq("id", createdSetId); - - setError = error; - } else if ( - setSelection?.action === "duplicate" && - setSelection.matchedSetId - ) { - try { - createdSetId = await duplicateSetWithVotes({ - sourceSetId: setSelection.matchedSetId, - newTimeStart: utcTimeStart!, - newTimeEnd: utcTimeEnd!, - stageId: stageId, - description: importedSet.description, - }); - } catch (error) { - setError = error as Error; - } - } else { - const { data, error } = await supabase - .from("sets") - .insert({ - name: setName, - slug: generateSlug(setName), - stage_id: stageId || null, - festival_edition_id: editionId, - time_start: utcTimeStart, - time_end: utcTimeEnd, - description: importedSet.description || null, - archived: false, - created_by: userId, - }) - .select("id") - .single(); - - createdSetId = data?.id || ""; - setError = error; - } - - if (setError || !createdSetId) { - errors.push( - `Failed to create set "${setName}": ${setError?.message || "No ID"}`, - ); - return { type: "error", errors }; - } - - // Link artists to set - for (const artistId of artistIds) { - await supabase.from("set_artists").upsert( - { - set_id: createdSetId, - artist_id: artistId, - }, - { - onConflict: "set_id,artist_id", - ignoreDuplicates: true, - }, - ); - } - - return { type: "success", setName }; - } catch (error) { - errors.push( - `Error processing set: ${error instanceof Error ? error.message : "Unknown error"}`, - ); - - return { errors, type: "error" }; - } -} - -export async function importSets( - sets: SetImportData[], - editionId: string, - timezone: string = "UTC", - onProgress?: (completed: number, total: number) => void, -): Promise { - const artistMappings = new Map(); - - sets.forEach((set, index) => { - const artistNames = set.artist_names - .split(",") - .map((name) => name.trim()) - .filter((name) => name.length > 0); - - artistMappings.set( - index, - artistNames.map((csvName) => ({ - csvName, - artistId: null, - shouldCreate: true, - })), - ); - }); - - return importSetsWithArtistMap({ - sets, - editionId, - artistMappings: artistMappings, - timezone, - onProgress, - }); -} - -export async function importSetsWithMappings( - sets: SetImportData[], - editionId: string, - artistMappings: Map, - setSelections?: Map, - timezone: string = "UTC", - onProgress?: (completed: number, total: number) => void, -): Promise { - return importSetsWithArtistMap({ - sets, - editionId, - artistMappings, - setSelections, - timezone, - onProgress, - }); -} diff --git a/src/services/csv/setMatcher.ts b/src/services/csv/setMatcher.ts deleted file mode 100644 index fb8ca253..00000000 --- a/src/services/csv/setMatcher.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { supabase } from "@/integrations/supabase/client"; -import type { SetImportData } from "./csvParser"; - -export interface MatchingSet { - id: string; - name: string; - stage_name: string | null; - artist_names: string[]; - vote_count: number; - time_start: string | null; -} - -export async function findMatchingSets({ - existingSets, - importedSets, -}: { - importedSets: SetImportData[]; - existingSets: { - id: string; - name: string; - time_start: string | null; - set_artists?: { artists: { name: string } }[]; - stages?: { name: string } | null; - }[]; -}): Promise> { - const matchMap = new Map(); - - for (let index = 0; index < importedSets.length; index++) { - const set = importedSets[index]; - const artistNames = set.artist_names - .split(",") - .map((name) => name.trim()) - .filter((name) => name.length > 0); - - if (artistNames.length === 0) { - matchMap.set(index, []); - continue; - } - - if (!existingSets || existingSets.length === 0) { - matchMap.set(index, []); - continue; - } - - const matches: MatchingSet[] = []; - - for (const existingSet of existingSets) { - if (!existingSet.set_artists || existingSet.set_artists.length === 0) { - continue; - } - - const setArtistNames = existingSet.set_artists - .map( - (sa: { artists: { name: string } | null } | null) => - sa?.artists?.name, - ) - .filter((name): name is string => name !== null && name !== undefined); - - function normalizeArtistName(name: string) { - return name - .toLowerCase() - .trim() - .replace(/[.,;!?]+$/, ""); - } - - const csvArtistNamesLower = artistNames.map(normalizeArtistName); - const setArtistNamesLower = setArtistNames.map(normalizeArtistName); - - csvArtistNamesLower.sort(); - setArtistNamesLower.sort(); - - const artistsMatch = - setArtistNamesLower.length === csvArtistNamesLower.length && - setArtistNamesLower.every( - (name: string, idx: number) => name === csvArtistNamesLower[idx], - ); - - if (artistsMatch) { - const { count: voteCount } = await supabase - .from("votes") - .select("*", { count: "exact", head: true }) - .eq("set_id", existingSet.id); - - matches.push({ - id: existingSet.id, - name: existingSet.name, - stage_name: existingSet.stages?.name || null, - artist_names: setArtistNames, - vote_count: voteCount || 0, - time_start: existingSet.time_start, - }); - } - } - - matchMap.set(index, matches); - } - - return matchMap; -} diff --git a/src/services/csv/setSelectionValidator.ts b/src/services/csv/setSelectionValidator.ts deleted file mode 100644 index 064210a0..00000000 --- a/src/services/csv/setSelectionValidator.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type { SetSelection } from "@/pages/admin/festivals/CSVImportDialog/SetsPreviewTable"; - -export interface SetSelectionValidationError { - rowIndices: number[]; - setId: string; - message: string; -} - -export function validateSetSelections( - selections: Map, -): SetSelectionValidationError[] { - const errors: SetSelectionValidationError[] = []; - const matchedSetIds = new Map(); - - selections.forEach((selection, rowIndex) => { - if (selection.action === "match" && selection.matchedSetId) { - const setId = selection.matchedSetId; - if (!matchedSetIds.has(setId)) { - matchedSetIds.set(setId, []); - } - matchedSetIds.get(setId)!.push(rowIndex); - } - }); - - matchedSetIds.forEach((rowIndices, setId) => { - if (rowIndices.length > 1) { - errors.push({ - rowIndices, - setId, - message: `Set is matched by multiple rows (${rowIndices.map((i) => i + 1).join(", ")}). Only one row can match an existing set. Use "Duplicate" or "Create new" for the others.`, - }); - } - }); - - return errors; -} diff --git a/src/services/csv/stageImporter.ts b/src/services/csv/stageImporter.ts deleted file mode 100644 index 146de6ea..00000000 --- a/src/services/csv/stageImporter.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { supabase } from "@/integrations/supabase/client"; -import { generateSlug } from "@/lib/slug"; -import type { StageImportData } from "./csvParser"; -import type { ImportResult } from "./types"; - -export async function importStages( - stages: StageImportData[], - editionId: string, - onProgress?: (completed: number, total: number) => void, -): Promise { - try { - const stageInserts = stages.map((stage) => ({ - name: stage.name, - slug: generateSlug(stage.name), - festival_edition_id: editionId, - archived: false, - })); - - const { data, error } = await supabase - .from("stages") - .upsert(stageInserts, { - onConflict: "name,festival_edition_id", - ignoreDuplicates: false, - }) - .select(); - - if (error) { - return { - success: false, - message: `Failed to import stages: ${error.message}`, - errors: [error.message], - }; - } - - // Report completion - onProgress?.(stages.length, stages.length); - - return { - success: true, - message: `Successfully imported ${data?.length || 0} stages`, - inserted: data?.length || 0, - }; - } catch (error) { - return { - success: false, - message: `Import failed: ${error instanceof Error ? error.message : "Unknown error"}`, - errors: [error instanceof Error ? error.message : "Unknown error"], - }; - } -} diff --git a/src/services/csv/timeValidator.ts b/src/services/csv/timeValidator.ts deleted file mode 100644 index 3c24733d..00000000 --- a/src/services/csv/timeValidator.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { convertLocalTimeToUTC, combineDateAndTime } from "@/lib/timeUtils"; - -export interface TimeValidationResult { - isValid: boolean; - error?: string; -} - -export function validateTimeString( - timeString: string | undefined, - dateString: string | undefined, - timezone: string, -): TimeValidationResult { - if (dateString && timeString) { - const combined = combineDateAndTime(dateString, timeString); - if (!combined) { - return { - isValid: false, - error: "Failed to combine date and time", - }; - } - - try { - const result = convertLocalTimeToUTC(combined, timezone); - if (result === null) { - return { - isValid: false, - error: "Invalid date/time format", - }; - } - return { isValid: true }; - } catch (error) { - return { - isValid: false, - error: error instanceof Error ? error.message : "Invalid format", - }; - } - } - - if (!timeString) { - return { isValid: true }; - } - - try { - const result = convertLocalTimeToUTC(timeString, timezone); - if (result === null) { - return { - isValid: false, - error: "Invalid date/time format", - }; - } - return { isValid: true }; - } catch (error) { - return { - isValid: false, - error: error instanceof Error ? error.message : "Invalid format", - }; - } -} - -export interface SetValidationResult { - isValid: boolean; - rowIndex: number; - errors: { - time_start?: string; - time_end?: string; - stage_name?: string; - artist_names?: string; - }; -} - -export function validateSetData( - set: { - stage_name: string; - artist_names: string; - time_start?: string; - date_start?: string; - time_end?: string; - date_end?: string; - }, - rowIndex: number, - timezone: string, -): SetValidationResult { - const errors: SetValidationResult["errors"] = {}; - - if (!set.stage_name || set.stage_name.trim() === "") { - errors.stage_name = "Stage name is required"; - } - - if (!set.artist_names || set.artist_names.trim() === "") { - errors.artist_names = "Artist name(s) required"; - } - - const timeStartValidation = validateTimeString( - set.time_start, - set.date_start, - timezone, - ); - if (!timeStartValidation.isValid) { - errors.time_start = timeStartValidation.error; - } - - const timeEndValidation = validateTimeString( - set.time_end, - set.date_end, - timezone, - ); - if (!timeEndValidation.isValid) { - errors.time_end = timeEndValidation.error; - } - - return { - isValid: Object.keys(errors).length === 0, - rowIndex, - errors, - }; -} diff --git a/src/services/csv/types.ts b/src/services/csv/types.ts deleted file mode 100644 index 00db3b70..00000000 --- a/src/services/csv/types.ts +++ /dev/null @@ -1,7 +0,0 @@ -export interface ImportResult { - success: boolean; - message: string; - inserted?: number; - updated?: number; - errors?: string[]; -} From 1d6e8691eb5ebb88478e831483dcd165a461c945 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:19:43 +0000 Subject: [PATCH 11/23] refactor(import): extract MismatchRow and use useId for stable HTML ids Raw mismatch.csvValue went straight into id/htmlFor, so stage names with spaces or special characters produced invalid HTML, and duplicate names across rows would collide and break label/radio binding. Extracting a MismatchRow child component lets us use useId() per row so each option gets a unique, valid id without sanitisation gymnastics. Addresses both the DOM-id PR comment and the request to break the loop body into its own component. --- .../ScheduleImport/StageMismatchResolver.tsx | 171 ++++++++++++------ 1 file changed, 111 insertions(+), 60 deletions(-) diff --git a/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx b/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx index 11832c33..480b1a61 100644 --- a/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx +++ b/src/components/Admin/ScheduleImport/StageMismatchResolver.tsx @@ -1,8 +1,18 @@ +import { useId } from "react"; import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group"; import { Label } from "@/components/ui/label"; -import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; import { AlertTriangle } from "lucide-react"; -import { type DiffResult, type StageMismatchResolution } from "@/services/scheduleImportService"; +import { + type DiffResult, + type StageMismatchResolution, +} from "@/services/scheduleImportService"; type Mismatch = DiffResult["conflicts"]["stageNameMismatches"][number]; type DbStage = { id: string; name: string }; @@ -14,7 +24,12 @@ type Props = { onChange: (csvValue: string, resolution: StageMismatchResolution) => void; }; -export function StageMismatchResolver({ mismatches, dbStages, resolutions, onChange }: Props) { +export function StageMismatchResolver({ + mismatches, + dbStages, + resolutions, + onChange, +}: Props) { if (mismatches.length === 0) return null; return ( @@ -24,67 +39,103 @@ export function StageMismatchResolver({ mismatches, dbStages, resolutions, onCha Stage name conflicts — resolve before committing
- {mismatches.map((mismatch) => { - const resolution = resolutions[mismatch.csvValue] ?? { - action: "map", - dbStageName: mismatch.closestDbValue, - }; + {mismatches.map((mismatch) => ( + + ))} +
+ ); +} - return ( -
-

- CSV value: {mismatch.csvValue} -

+type MismatchRowProps = { + mismatch: Mismatch; + dbStages: DbStage[]; + resolution: StageMismatchResolution; + onChange: (csvValue: string, resolution: StageMismatchResolution) => void; +}; - { - if (action === "map") { - onChange(mismatch.csvValue, { action: "map", dbStageName: mismatch.closestDbValue }); - } else { - onChange(mismatch.csvValue, { action: "create" }); - } - }} - className="space-y-2" - > -
- -
- - {resolution.action === "map" && ( - - )} -
-
+function MismatchRow({ + mismatch, + dbStages, + resolution, + onChange, +}: MismatchRowProps) { + const baseId = useId(); + const mapId = `${baseId}-map`; + const createId = `${baseId}-create`; + + return ( +
+

+ CSV value:{" "} + {mismatch.csvValue} +

-
- - -
- + { + if (action === "map") { + onChange(mismatch.csvValue, { + action: "map", + dbStageName: mismatch.closestDbValue, + }); + } else { + onChange(mismatch.csvValue, { action: "create" }); + } + }} + className="space-y-2" + > +
+ +
+ + {resolution.action === "map" && ( + + )}
- ); - })} +
+ +
+ + +
+
); } From 19652364c2755f66ac0742ec440cc6e2cdfccc00 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:20:29 +0000 Subject: [PATCH 12/23] test(commit-schedule): clean up created sets and select id for delete Two leak fixes for the integration tests: - The first test only deleted the artist, leaving the created set in the edition forever. Use a unique set name and delete by name+edition. - The midnight-crossing test selected sets without id, then tried to delete by sets[0].id, which silently no-op'd. Include id in the select. --- .../commit-schedule/commit-schedule.test.ts | 262 +++++++++++------- 1 file changed, 159 insertions(+), 103 deletions(-) diff --git a/supabase/functions/commit-schedule/commit-schedule.test.ts b/supabase/functions/commit-schedule/commit-schedule.test.ts index 4a68ed06..12ca955c 100644 --- a/supabase/functions/commit-schedule/commit-schedule.test.ts +++ b/supabase/functions/commit-schedule/commit-schedule.test.ts @@ -13,7 +13,9 @@ const SERVICE_ROLE_KEY = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY") ?? ""; function skipIfNoEnv() { if (!SUPABASE_URL || !SERVICE_ROLE_KEY) { - console.warn("Skipping integration tests: SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY not set"); + console.warn( + "Skipping integration tests: SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY not set", + ); return true; } return false; @@ -23,14 +25,26 @@ function adminClient() { return createClient(SUPABASE_URL, SERVICE_ROLE_KEY); } -async function getTestEditionId(db: ReturnType): Promise { - const { data } = await db.from("festival_editions").select("id").limit(1).single(); +async function getTestEditionId( + db: ReturnType, +): Promise { + const { data } = await db + .from("festival_editions") + .select("id") + .limit(1) + .single(); assertExists(data, "No festival edition found — run test:setup first"); return data.id; } -async function getTestUserId(db: ReturnType): Promise { - const { data } = await db.from("admin_roles").select("user_id").limit(1).single(); +async function getTestUserId( + db: ReturnType, +): Promise { + const { data } = await db + .from("admin_roles") + .select("user_id") + .limit(1) + .single(); assertExists(data, "No admin user found — run test:setup first"); return data.user_id; } @@ -41,20 +55,23 @@ Deno.test("commit_schedule: creates new artist and set", async () => { const editionId = await getTestEditionId(db); const userId = await getTestUserId(db); const slug = `test-artist-${Date.now()}`; + const setName = `Test Artist Set ${slug}`; const { data, error } = await db.rpc("commit_schedule", { p_festival_edition_id: editionId, p_user_id: userId, p_artists_to_create: [{ name: "Test Artist", slug }], p_stages_to_create: [], - p_sets_to_create: [{ - name: "Test Artist Set", - description: null, - stageName: null, - timeStart: null, - timeEnd: null, - artistSlugs: [slug], - }], + p_sets_to_create: [ + { + name: setName, + description: null, + stageName: null, + timeStart: null, + timeEnd: null, + artistSlugs: [slug], + }, + ], p_sets_to_update: [], p_set_ids_to_archive: [], }); @@ -64,56 +81,81 @@ Deno.test("commit_schedule: creates new artist and set", async () => { assertEquals(data.setsUpdated, 0); // Cleanup - await db.from("artists").delete().eq("slug", slug); -}); - -Deno.test("commit_schedule: updates existing set without creating duplicate", async () => { - if (skipIfNoEnv()) return; - const db = adminClient(); - const editionId = await getTestEditionId(db); - const userId = await getTestUserId(db); - const slug = `test-update-artist-${Date.now()}`; - - // Create artist and set - await db.from("artists").insert({ name: "Update Test", slug }); - const { data: artist } = await db.from("artists").select("id").eq("slug", slug).single(); - const { data: set } = await db + await db .from("sets") - .insert({ festival_edition_id: editionId, name: "Old Name", slug: "old-name", created_by: userId }) - .select("id") - .single(); - await db.from("set_artists").insert({ set_id: set!.id, artist_id: artist!.id }); - - const { data, error } = await db.rpc("commit_schedule", { - p_festival_edition_id: editionId, - p_user_id: userId, - p_artists_to_create: [], - p_stages_to_create: [], - p_sets_to_create: [], - p_sets_to_update: [{ - id: set!.id, - name: "New Name", - description: "Updated", - stageName: null, - timeStart: null, - timeEnd: null, - artistSlugs: [slug], - }], - p_set_ids_to_archive: [], - }); - - assertEquals(error, null); - assertEquals(data.setsUpdated, 1); - - const { data: updated } = await db.from("sets").select("name, description").eq("id", set!.id).single(); - assertEquals(updated!.name, "New Name"); - assertEquals(updated!.description, "Updated"); - - // Cleanup - await db.from("sets").delete().eq("id", set!.id); + .delete() + .eq("festival_edition_id", editionId) + .eq("name", setName); await db.from("artists").delete().eq("slug", slug); }); +Deno.test( + "commit_schedule: updates existing set without creating duplicate", + async () => { + if (skipIfNoEnv()) return; + const db = adminClient(); + const editionId = await getTestEditionId(db); + const userId = await getTestUserId(db); + const slug = `test-update-artist-${Date.now()}`; + + // Create artist and set + await db.from("artists").insert({ name: "Update Test", slug }); + const { data: artist } = await db + .from("artists") + .select("id") + .eq("slug", slug) + .single(); + const { data: set } = await db + .from("sets") + .insert({ + festival_edition_id: editionId, + name: "Old Name", + slug: "old-name", + created_by: userId, + }) + .select("id") + .single(); + await db + .from("set_artists") + .insert({ set_id: set!.id, artist_id: artist!.id }); + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: editionId, + p_user_id: userId, + p_artists_to_create: [], + p_stages_to_create: [], + p_sets_to_create: [], + p_sets_to_update: [ + { + id: set!.id, + name: "New Name", + description: "Updated", + stageName: null, + timeStart: null, + timeEnd: null, + artistSlugs: [slug], + }, + ], + p_set_ids_to_archive: [], + }); + + assertEquals(error, null); + assertEquals(data.setsUpdated, 1); + + const { data: updated } = await db + .from("sets") + .select("name, description") + .eq("id", set!.id) + .single(); + assertEquals(updated!.name, "New Name"); + assertEquals(updated!.description, "Updated"); + + // Cleanup + await db.from("sets").delete().eq("id", set!.id); + await db.from("artists").delete().eq("slug", slug); + }, +); + Deno.test("commit_schedule: archives orphaned sets", async () => { if (skipIfNoEnv()) return; const db = adminClient(); @@ -122,7 +164,12 @@ Deno.test("commit_schedule: archives orphaned sets", async () => { const { data: set } = await db .from("sets") - .insert({ festival_edition_id: editionId, name: "Orphan Set", slug: "orphan-set", created_by: userId }) + .insert({ + festival_edition_id: editionId, + name: "Orphan Set", + slug: "orphan-set", + created_by: userId, + }) .select("id") .single(); @@ -139,52 +186,61 @@ Deno.test("commit_schedule: archives orphaned sets", async () => { assertEquals(error, null); assertEquals(data.setsArchived, 1); - const { data: archived } = await db.from("sets").select("archived").eq("id", set!.id).single(); + const { data: archived } = await db + .from("sets") + .select("archived") + .eq("id", set!.id) + .single(); assertEquals(archived!.archived, true); // Cleanup await db.from("sets").delete().eq("id", set!.id); }); -Deno.test("commit_schedule: midnight-crossing times stored correctly", async () => { - if (skipIfNoEnv()) return; - const db = adminClient(); - const editionId = await getTestEditionId(db); - const userId = await getTestUserId(db); - const slug = `test-midnight-${Date.now()}`; - - await db.from("artists").insert({ name: "Late Night DJ", slug }); - - const { data, error } = await db.rpc("commit_schedule", { - p_festival_edition_id: editionId, - p_user_id: userId, - p_artists_to_create: [], - p_stages_to_create: [], - p_sets_to_create: [{ - name: "Late Night Set", - description: null, - stageName: null, - timeStart: "2026-07-11T23:00:00.000Z", - timeEnd: "2026-07-12T01:00:00.000Z", - artistSlugs: [slug], - }], - p_sets_to_update: [], - p_set_ids_to_archive: [], - }); - - assertEquals(error, null); - - const { data: sets } = await db - .from("sets") - .select("time_start, time_end, set_artists(artist_id, artists(slug))") - .eq("festival_edition_id", editionId) - .eq("name", "Late Night Set"); - - assertExists(sets?.[0]); - assertEquals(sets![0].time_start, "2026-07-11T23:00:00+00:00"); - assertEquals(sets![0].time_end, "2026-07-12T01:00:00+00:00"); - - // Cleanup - await db.from("sets").delete().eq("id", sets![0].id ?? ""); - await db.from("artists").delete().eq("slug", slug); -}); +Deno.test( + "commit_schedule: midnight-crossing times stored correctly", + async () => { + if (skipIfNoEnv()) return; + const db = adminClient(); + const editionId = await getTestEditionId(db); + const userId = await getTestUserId(db); + const slug = `test-midnight-${Date.now()}`; + + await db.from("artists").insert({ name: "Late Night DJ", slug }); + + const { data, error } = await db.rpc("commit_schedule", { + p_festival_edition_id: editionId, + p_user_id: userId, + p_artists_to_create: [], + p_stages_to_create: [], + p_sets_to_create: [ + { + name: "Late Night Set", + description: null, + stageName: null, + timeStart: "2026-07-11T23:00:00.000Z", + timeEnd: "2026-07-12T01:00:00.000Z", + artistSlugs: [slug], + }, + ], + p_sets_to_update: [], + p_set_ids_to_archive: [], + }); + + assertEquals(error, null); + + const { data: sets } = await db + .from("sets") + .select("id, time_start, time_end, set_artists(artist_id, artists(slug))") + .eq("festival_edition_id", editionId) + .eq("name", "Late Night Set"); + + assertExists(sets?.[0]); + assertEquals(sets![0].time_start, "2026-07-11T23:00:00+00:00"); + assertEquals(sets![0].time_end, "2026-07-12T01:00:00+00:00"); + + // Cleanup + await db.from("sets").delete().eq("id", sets![0].id); + await db.from("artists").delete().eq("slug", slug); + }, +); From af733dfea7c78ff9faadc0c769d9fa31d323ef69 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:22:07 +0000 Subject: [PATCH 13/23] refactor(import): drive async actions through useMutation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace the manual loading/error/result state in CsvUploadStep (file read + analyse) and ScheduleImportWizard (commit) with useMutation. Extract the FileReader-based handler into a plain async readFile(file) so the mutation can simply await it instead of wrapping the callback API. Mutation status drives the UI state directly — no more setLoading/setError/setCommitting bookkeeping. --- .../Admin/ScheduleImport/CsvUploadStep.tsx | 68 ++++++++----------- .../ScheduleImport/ScheduleImportWizard.tsx | 65 ++++++++---------- 2 files changed, 59 insertions(+), 74 deletions(-) diff --git a/src/components/Admin/ScheduleImport/CsvUploadStep.tsx b/src/components/Admin/ScheduleImport/CsvUploadStep.tsx index e9f21f31..c5c5204f 100644 --- a/src/components/Admin/ScheduleImport/CsvUploadStep.tsx +++ b/src/components/Admin/ScheduleImport/CsvUploadStep.tsx @@ -1,5 +1,6 @@ import { useRef, useState } from "react"; import { Upload, Loader2 } from "lucide-react"; +import { useMutation } from "@tanstack/react-query"; import { Button } from "@/components/ui/button"; import { Label } from "@/components/ui/label"; import { @@ -30,55 +31,44 @@ type Props = { onDiffReady: (diff: DiffResult) => void; }; +async function readFile(file: File): Promise { + const content = await file.text(); + const parsed = parseScheduleCsv(content); + if (parsed.length === 0) { + throw new Error( + "No valid rows found. Make sure your CSV has an 'Artists' column.", + ); + } + return parsed; +} + export function CsvUploadStep({ festivalEditionId, onDiffReady }: Props) { const fileRef = useRef(null); const [timezone, setTimezone] = useState("Europe/Lisbon"); const [fileName, setFileName] = useState(null); - const [rows, setRows] = useState([]); - const [loading, setLoading] = useState(false); - const [error, setError] = useState(null); + + const readFileMutation = useMutation({ mutationFn: readFile }); + const analyseMutation = useMutation({ + mutationFn: (rows: CsvRow[]) => + callDiffSchedule(festivalEditionId, timezone, rows), + onSuccess: onDiffReady, + }); + + const rows = readFileMutation.data ?? []; + const error = + analyseMutation.error?.message ?? readFileMutation.error?.message ?? null; function handleFileChange(e: React.ChangeEvent) { const file = e.target.files?.[0]; if (!file) return; setFileName(file.name); - setError(null); - - const reader = new FileReader(); - reader.onload = (ev) => { - const content = ev.target?.result as string; - try { - const parsed = parseScheduleCsv(content); - if (parsed.length === 0) { - setError( - "No valid rows found. Make sure your CSV has an 'Artists' column.", - ); - setRows([]); - } else { - setRows(parsed); - } - } catch { - setError("Failed to parse CSV. Check the file format."); - setRows([]); - } - }; - reader.readAsText(file); + analyseMutation.reset(); + readFileMutation.mutate(file); } - async function handleAnalyse() { + function handleAnalyse() { if (rows.length === 0) return; - setLoading(true); - setError(null); - try { - const diff = await callDiffSchedule(festivalEditionId, timezone, rows); - onDiffReady(diff); - } catch (err) { - setError( - err instanceof Error ? err.message : "Failed to analyse schedule.", - ); - } finally { - setLoading(false); - } + analyseMutation.mutate(rows); } return ( @@ -140,10 +130,10 @@ export function CsvUploadStep({ festivalEditionId, onDiffReady }: Props) {

- These sets exist in the database but were not matched to any row in your CSV. - Archived sets are hidden from users but votes are preserved. + These sets exist in the database but were not matched to any row in your + CSV. Archived sets are hidden from users but votes are preserved. Default: Keep.

- {orphanedSets.map((set) => { - const resolution = resolutions[set.id] ?? "keep"; - const isArchive = resolution === "archive"; - const time = formatTime(set.timeStart); + {orphanedSets.map((set) => ( + onChange(set.id, resolution)} + /> + ))} +
+ + ); +} - return ( -
-
-

{set.name}

-

- {[set.stage, time].filter(Boolean).join(" · ") || "No schedule info"} -

-
-
- - onChange(set.id, checked ? "archive" : "keep")} - /> -
-
- ); - })} +type OrphanedItemProps = { + set: OrphanedSet; + resolution: OrphanResolution; + onChange: (resolution: OrphanResolution) => void; +}; + +function OrphanedItem({ set, resolution, onChange }: OrphanedItemProps) { + const isArchive = resolution === "archive"; + const time = formatTime(set.timeStart); + const switchId = `orphan-${set.id}`; + + return ( +
+
+

{set.name}

+

+ {[set.stage, time].filter(Boolean).join(" · ") || "No schedule info"} +

+
+
+ + onChange(checked ? "archive" : "keep")} + />
); } + +function formatTime(iso: string | null) { + if (!iso) return null; + return new Date(iso).toLocaleString(undefined, { + month: "short", + day: "numeric", + hour: "2-digit", + minute: "2-digit", + }); +} From dd2df7a49a7ba17ff92674d56dd0d6585c9a823f Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:25:10 +0000 Subject: [PATCH 16/23] refactor(import): co-locate FestivalScheduleImport in its route file Inline the page component into the import route. The wrapper was a 3-line file that only forwarded params, so keeping it as a separate module didn't add anything. --- .../festivals/FestivalScheduleImport.tsx | 36 ---------------- .../editions/$editionSlug/import.tsx | 41 ++++++++++++++++++- 2 files changed, 39 insertions(+), 38 deletions(-) delete mode 100644 src/pages/admin/festivals/FestivalScheduleImport.tsx diff --git a/src/pages/admin/festivals/FestivalScheduleImport.tsx b/src/pages/admin/festivals/FestivalScheduleImport.tsx deleted file mode 100644 index a3187e6f..00000000 --- a/src/pages/admin/festivals/FestivalScheduleImport.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import { useParams } from "@tanstack/react-router"; -import { Loader2 } from "lucide-react"; -import { Card, CardContent } from "@/components/ui/card"; -import { useFestivalEditionBySlugQuery } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug"; -import { ScheduleImportWizard } from "@/components/Admin/ScheduleImport/ScheduleImportWizard"; - -export default function FestivalScheduleImport() { - const { festivalSlug, editionSlug } = useParams({ - from: "/admin/festivals/$festivalSlug/editions/$editionSlug/import", - }); - - const editionQuery = useFestivalEditionBySlugQuery({ festivalSlug, editionSlug }); - - if (editionQuery.isLoading) { - return ( - - - - Loading... - - - ); - } - - if (!editionQuery.data) { - return ( - - - Edition not found - - - ); - } - - return ; -} diff --git a/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx index f78ab6e7..1c01e04a 100644 --- a/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx +++ b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx @@ -1,8 +1,45 @@ -import { createFileRoute } from "@tanstack/react-router"; -import FestivalScheduleImport from "@/pages/admin/festivals/FestivalScheduleImport"; +import { createFileRoute, useParams } from "@tanstack/react-router"; +import { Loader2 } from "lucide-react"; +import { Card, CardContent } from "@/components/ui/card"; +import { useFestivalEditionBySlugQuery } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug"; +import { ScheduleImportWizard } from "@/components/Admin/ScheduleImport/ScheduleImportWizard"; export const Route = createFileRoute( "/admin/festivals/$festivalSlug/editions/$editionSlug/import", )({ component: FestivalScheduleImport, }); + +function FestivalScheduleImport() { + const { festivalSlug, editionSlug } = useParams({ + from: "/admin/festivals/$festivalSlug/editions/$editionSlug/import", + }); + + const editionQuery = useFestivalEditionBySlugQuery({ + festivalSlug, + editionSlug, + }); + + if (editionQuery.isLoading) { + return ( + + + + Loading... + + + ); + } + + if (!editionQuery.data) { + return ( + + + Edition not found + + + ); + } + + return ; +} From 6b2b47f3f046a8bbcd95605cb092755c1a20eb2e Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:26:06 +0000 Subject: [PATCH 17/23] refactor(import): load edition via route loader instead of useQuery MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The component-level useQuery duplicated work the parent route already does via beforeLoad ensureQueryData. Hoist the call into a route loader so the component receives a resolved FestivalEdition through useLoaderData and the loading/not-found branches go away — the router blocks rendering until the data is ready. --- .../editions/$editionSlug/import.tsx | 49 ++++++------------- 1 file changed, 14 insertions(+), 35 deletions(-) diff --git a/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx index 1c01e04a..8eb9a033 100644 --- a/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx +++ b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx @@ -1,45 +1,24 @@ -import { createFileRoute, useParams } from "@tanstack/react-router"; -import { Loader2 } from "lucide-react"; -import { Card, CardContent } from "@/components/ui/card"; -import { useFestivalEditionBySlugQuery } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug"; +import { createFileRoute } from "@tanstack/react-router"; +import { editionsKeys } from "@/hooks/queries/festivals/editions/types"; +import { fetchFestivalEditionBySlug } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug"; import { ScheduleImportWizard } from "@/components/Admin/ScheduleImport/ScheduleImportWizard"; export const Route = createFileRoute( "/admin/festivals/$festivalSlug/editions/$editionSlug/import", )({ + loader: ({ params, context }) => + context.queryClient.ensureQueryData({ + queryKey: editionsKeys.bySlug(params.festivalSlug, params.editionSlug), + queryFn: () => + fetchFestivalEditionBySlug({ + festivalSlug: params.festivalSlug, + editionSlug: params.editionSlug, + }), + }), component: FestivalScheduleImport, }); function FestivalScheduleImport() { - const { festivalSlug, editionSlug } = useParams({ - from: "/admin/festivals/$festivalSlug/editions/$editionSlug/import", - }); - - const editionQuery = useFestivalEditionBySlugQuery({ - festivalSlug, - editionSlug, - }); - - if (editionQuery.isLoading) { - return ( - - - - Loading... - - - ); - } - - if (!editionQuery.data) { - return ( - - - Edition not found - - - ); - } - - return ; + const edition = Route.useLoaderData(); + return ; } From a0c36cbe6e158587466070c5f2967ce3816076f3 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:28:05 +0000 Subject: [PATCH 18/23] refactor(diff): split computeDiff into per-row helpers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The original 100+ line loop body did artist resolution, stage resolution with a four-branch tree, time computation, set matching, and dispatch all inline. Pull each phase into a named helper: - buildIndexes for the lookup maps - resolveArtists for slug derivation + new-artist accumulation - resolveStage returning a tagged kind so the caller maps it onto the right accumulator - computeTimes for the date/time conversion (incl. midnight crossing) - findMatchingSet for the candidate-narrowing logic The orchestrator now reads as the actual pipeline: resolve → match → dispatch. Behaviour is unchanged. --- supabase/functions/diff-schedule/diff.ts | 238 +++++++++++++++-------- 1 file changed, 161 insertions(+), 77 deletions(-) diff --git a/supabase/functions/diff-schedule/diff.ts b/supabase/functions/diff-schedule/diff.ts index b1e2ac4f..62caccda 100644 --- a/supabase/functions/diff-schedule/diff.ts +++ b/supabase/functions/diff-schedule/diff.ts @@ -77,7 +77,11 @@ export function advanceDateByOne(dateStr: string): string { return d.toISOString().split("T")[0]; } -export function localToUtc(dateStr: string, timeStr: string, timezone: string): string { +export function localToUtc( + dateStr: string, + timeStr: string, + timezone: string, +): string { const localIso = `${dateStr}T${timeStr}:00`; const naiveUtc = new Date(localIso + "Z"); // sv-SE locale gives "YYYY-MM-DD HH:MM:SS" — unambiguously parseable as UTC @@ -88,17 +92,24 @@ export function localToUtc(dateStr: string, timeStr: string, timezone: string): return new Date(naiveUtc.getTime() + offsetMs).toISOString(); } -export function computeDiff( - rows: CsvRow[], +type DbIndexes = { + stageByNameLower: Map; + stageById: Map; + existingArtistSlugs: Set; + setsByArtistKey: Map; +}; + +type StageResolution = + | { kind: "exact"; id: string; name: string } + | { kind: "mismatch"; resolvedName: string; closest: DbStage } + | { kind: "new"; resolvedName: string } + | { kind: "none" }; + +function buildIndexes( dbStages: DbStage[], dbSets: DbSet[], dbArtists: DbArtist[], - timezone: string, -): DiffResult { - const stageByNameLower = new Map(dbStages.map((s) => [s.name.toLowerCase(), s])); - const stageById = new Map(dbStages.map((s) => [s.id, s])); - const existingArtistSlugs = new Set(dbArtists.map((a) => a.slug)); - +): DbIndexes { const setsByArtistKey = new Map(); for (const set of dbSets) { const slugs = set.set_artists.map((sa) => sa.artists.slug); @@ -107,6 +118,104 @@ export function computeDiff( bucket.push(set); setsByArtistKey.set(key, bucket); } + return { + stageByNameLower: new Map(dbStages.map((s) => [s.name.toLowerCase(), s])), + stageById: new Map(dbStages.map((s) => [s.id, s])), + existingArtistSlugs: new Set(dbArtists.map((a) => a.slug)), + setsByArtistKey, + }; +} + +function resolveArtists( + row: CsvRow, + existingSlugs: Set, + seenNewSlugs: Set, + artistsToCreate: { name: string; slug: string }[], +): string[] { + const slugs: string[] = []; + for (const name of row.artists) { + const slug = toSlug(name); + slugs.push(slug); + if (!existingSlugs.has(slug) && !seenNewSlugs.has(slug)) { + artistsToCreate.push({ name, slug }); + seenNewSlugs.add(slug); + } + } + return slugs; +} + +function resolveStage( + rawStage: string | undefined, + dbStages: DbStage[], + stageByNameLower: Map, +): StageResolution { + if (!rawStage) return { kind: "none" }; + + const lower = rawStage.toLowerCase(); + const exactMatch = stageByNameLower.get(lower); + if (exactMatch) { + return { kind: "exact", id: exactMatch.id, name: exactMatch.name }; + } + + function strip(s: string) { + return s.toLowerCase().replace(/[^a-z0-9]/g, ""); + } + const closeMatch = dbStages.find((s) => { + const a = strip(s.name); + const b = strip(lower); + return a === b || a.includes(b) || b.includes(a); + }); + + if (closeMatch) { + return { kind: "mismatch", resolvedName: rawStage, closest: closeMatch }; + } + return { kind: "new", resolvedName: rawStage }; +} + +function computeTimes( + row: CsvRow, + timezone: string, +): { timeStart: string | null; timeEnd: string | null } { + let timeStart: string | null = null; + let timeEnd: string | null = null; + if (row.date && row.startTime) { + timeStart = localToUtc(row.date, row.startTime, timezone); + } + if (row.date && row.endTime) { + const crossesMidnight = + row.startTime != null && row.endTime < row.startTime; + const endDate = crossesMidnight ? advanceDateByOne(row.date) : row.date; + timeEnd = localToUtc(endDate, row.endTime, timezone); + } + return { timeStart, timeEnd }; +} + +function findMatchingSet( + candidates: DbSet[], + resolvedStageId: string | null, + date: string | undefined, +): DbSet | null { + if (candidates.length === 0) return null; + if (candidates.length === 1) return candidates[0]; + return ( + (resolvedStageId + ? (candidates.find((s) => s.stage_id === resolvedStageId) ?? null) + : null) ?? + (date + ? (candidates.find((s) => s.time_start?.startsWith(date)) ?? null) + : null) ?? + candidates[0] + ); +} + +export function computeDiff( + rows: CsvRow[], + dbStages: DbStage[], + dbSets: DbSet[], + dbArtists: DbArtist[], + timezone: string, +): DiffResult { + const indexes = buildIndexes(dbStages, dbSets, dbArtists); const matchedSetIds = new Set(); const seenNewArtistSlugs = new Set(); @@ -115,87 +224,57 @@ export function computeDiff( const artistsToCreate: { name: string; slug: string }[] = []; const stagesToCreate: { name: string }[] = []; - const stageNameMismatches: DiffResult["conflicts"]["stageNameMismatches"] = []; + const stageNameMismatches: DiffResult["conflicts"]["stageNameMismatches"] = + []; const setsToCreate: SetPayload[] = []; const setsToUpdate: ({ id: string } & SetPayload)[] = []; for (const row of rows) { - const artistSlugs: string[] = []; - for (const name of row.artists) { - const slug = toSlug(name); - artistSlugs.push(slug); - if (!existingArtistSlugs.has(slug) && !seenNewArtistSlugs.has(slug)) { - artistsToCreate.push({ name, slug }); - seenNewArtistSlugs.add(slug); - } - } + const artistSlugs = resolveArtists( + row, + indexes.existingArtistSlugs, + seenNewArtistSlugs, + artistsToCreate, + ); - // resolvedStageId: used only for set matching (narrowing candidates by stage) - // resolvedStageName: goes into the set payload and is passed to the RPC + const stage = resolveStage(row.stage, dbStages, indexes.stageByNameLower); let resolvedStageId: string | null = null; let resolvedStageName: string | null = null; - - if (row.stage) { - const lower = row.stage.toLowerCase(); - const exactMatch = stageByNameLower.get(lower); - if (exactMatch) { - resolvedStageId = exactMatch.id; - resolvedStageName = exactMatch.name; - } else { - const strip = (s: string) => s.toLowerCase().replace(/[^a-z0-9]/g, ""); - const closeMatch = dbStages.find((s) => { - const a = strip(s.name); - const b = strip(lower); - return a === b || a.includes(b) || b.includes(a); - }); - if (closeMatch && !seenMismatchedStages.has(row.stage)) { + switch (stage.kind) { + case "exact": + resolvedStageId = stage.id; + resolvedStageName = stage.name; + break; + case "mismatch": + resolvedStageName = stage.resolvedName; + if (!seenMismatchedStages.has(stage.resolvedName)) { stageNameMismatches.push({ - csvValue: row.stage, - closestDbValue: closeMatch.name, - dbStageId: closeMatch.id, + csvValue: stage.resolvedName, + closestDbValue: stage.closest.name, + dbStageId: stage.closest.id, }); - seenMismatchedStages.add(row.stage); - } else if (!closeMatch && !seenNewStageNames.has(row.stage)) { - stagesToCreate.push({ name: row.stage }); - seenNewStageNames.add(row.stage); + seenMismatchedStages.add(stage.resolvedName); + } + break; + case "new": + resolvedStageName = stage.resolvedName; + if (!seenNewStageNames.has(stage.resolvedName)) { + stagesToCreate.push({ name: stage.resolvedName }); + seenNewStageNames.add(stage.resolvedName); } - // For mismatches and new stages, keep the CSV value as stageName. - // The frontend will resolve mismatches before committing. - resolvedStageName = row.stage; - } + break; + case "none": + break; } - let timeStart: string | null = null; - let timeEnd: string | null = null; - if (row.date && row.startTime) { - timeStart = localToUtc(row.date, row.startTime, timezone); - } - if (row.date && row.endTime) { - const crossesMidnight = row.startTime != null && row.endTime < row.startTime; - const endDate = crossesMidnight ? advanceDateByOne(row.date) : row.date; - timeEnd = localToUtc(endDate, row.endTime, timezone); - } + const { timeStart, timeEnd } = computeTimes(row, timezone); - const setName = row.setName?.trim() || row.artists.join(" b2b "); - const key = artistKey(artistSlugs); - const candidates = setsByArtistKey.get(key) ?? []; - - let matched: DbSet | null = null; - if (candidates.length === 1) { - matched = candidates[0]; - } else if (candidates.length > 1) { - matched = - (resolvedStageId - ? candidates.find((s) => s.stage_id === resolvedStageId) ?? null - : null) ?? - (row.date - ? candidates.find((s) => s.time_start?.startsWith(row.date!)) ?? null - : null) ?? - candidates[0]; - } + const candidates = + indexes.setsByArtistKey.get(artistKey(artistSlugs)) ?? []; + const matched = findMatchingSet(candidates, resolvedStageId, row.date); const payload: SetPayload = { - name: setName, + name: row.setName?.trim() || row.artists.join(" b2b "), description: row.description ?? null, stageName: resolvedStageName, timeStart, @@ -216,7 +295,7 @@ export function computeDiff( .map((s) => ({ id: s.id, name: s.name, - stage: stageById.get(s.stage_id ?? "")?.name ?? null, + stage: indexes.stageById.get(s.stage_id ?? "")?.name ?? null, timeStart: s.time_start, })); @@ -229,7 +308,12 @@ export function computeDiff( setsOrphaned: orphanedSets.length, }, newArtistNames: artistsToCreate.map((a) => a.name), - cleanOperations: { artistsToCreate, stagesToCreate, setsToCreate, setsToUpdate }, + cleanOperations: { + artistsToCreate, + stagesToCreate, + setsToCreate, + setsToUpdate, + }, conflicts: { stageNameMismatches, orphanedSets }, }; } From 33f9d67b9e8f318415f6dd6812f12e5236c9715c Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:28:54 +0000 Subject: [PATCH 19/23] feat(commit-schedule): validate request body with zod Replace the ad-hoc 'is festivalEditionId truthy?' check with a zod schema covering every field the RPC consumes, including UUID format on the edition id and archive ids. Bad input now returns 400 with the field-level issues instead of failing later inside the RPC with an opaque error. --- supabase/functions/commit-schedule/index.ts | 87 ++++++++++++--------- 1 file changed, 49 insertions(+), 38 deletions(-) diff --git a/supabase/functions/commit-schedule/index.ts b/supabase/functions/commit-schedule/index.ts index 9e409055..ba0f8792 100644 --- a/supabase/functions/commit-schedule/index.ts +++ b/supabase/functions/commit-schedule/index.ts @@ -1,23 +1,28 @@ import { serve } from "https://deno.land/std@0.168.0/http/server.ts"; +import { z } from "https://deno.land/x/zod@v3.22.4/mod.ts"; import { getAdminClient, requireAdmin, corsHeaders } from "../_shared/auth.ts"; -type SetPayload = { - name: string; - description?: string; - stageName?: string; - timeStart?: string; - timeEnd?: string; - artistSlugs: string[]; -}; +const setPayloadSchema = z.object({ + name: z.string(), + description: z.string().nullish(), + stageName: z.string().nullish(), + timeStart: z.string().nullish(), + timeEnd: z.string().nullish(), + artistSlugs: z.array(z.string()), +}); -type CommitRequest = { - festivalEditionId: string; - artistsToCreate: { name: string; slug: string }[]; - stagesToCreate: { name: string }[]; - setsToCreate: SetPayload[]; - setsToUpdate: ({ id: string } & SetPayload)[]; - setIdsToArchive: string[]; -}; +const commitRequestSchema = z.object({ + festivalEditionId: z.string().uuid(), + artistsToCreate: z + .array(z.object({ name: z.string(), slug: z.string() })) + .default([]), + stagesToCreate: z.array(z.object({ name: z.string() })).default([]), + setsToCreate: z.array(setPayloadSchema).default([]), + setsToUpdate: z + .array(setPayloadSchema.extend({ id: z.string().uuid() })) + .default([]), + setIdsToArchive: z.array(z.string().uuid()).default([]), +}); serve(async (req) => { if (req.method === "OPTIONS") { @@ -33,7 +38,20 @@ serve(async (req) => { } try { - const body: CommitRequest = await req.json(); + const parsed = commitRequestSchema.safeParse(await req.json()); + if (!parsed.success) { + return new Response( + JSON.stringify({ + error: "Invalid request", + issues: parsed.error.issues, + }), + { + status: 400, + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }, + ); + } + const { festivalEditionId, artistsToCreate, @@ -41,33 +59,26 @@ serve(async (req) => { setsToCreate, setsToUpdate, setIdsToArchive, - } = body; - - if (!festivalEditionId) { - return new Response( - JSON.stringify({ error: "Missing required field: festivalEditionId" }), - { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }, - ); - } + } = parsed.data; const db = getAdminClient(); const { data, error } = await db.rpc("commit_schedule", { p_festival_edition_id: festivalEditionId, p_user_id: auth.userId, - p_artists_to_create: artistsToCreate ?? [], - p_stages_to_create: stagesToCreate ?? [], - p_sets_to_create: setsToCreate ?? [], - p_sets_to_update: setsToUpdate ?? [], - p_set_ids_to_archive: setIdsToArchive ?? [], + p_artists_to_create: artistsToCreate, + p_stages_to_create: stagesToCreate, + p_sets_to_create: setsToCreate, + p_sets_to_update: setsToUpdate, + p_set_ids_to_archive: setIdsToArchive, }); if (error) { console.error("commit_schedule RPC error:", error); - return new Response( - JSON.stringify({ error: error.message }), - { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }, - ); + return new Response(JSON.stringify({ error: error.message }), { + status: 400, + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); } return new Response(JSON.stringify(data), { @@ -75,9 +86,9 @@ serve(async (req) => { }); } catch (error) { console.error("commit-schedule error:", error); - return new Response( - JSON.stringify({ error: error.message }), - { status: 500, headers: { ...corsHeaders, "Content-Type": "application/json" } }, - ); + return new Response(JSON.stringify({ error: error.message }), { + status: 500, + headers: { ...corsHeaders, "Content-Type": "application/json" }, + }); } }); From 9f7fc1c59ee53ef61f2f4b503b095e45c3c9802f Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 9 May 2026 16:30:17 +0000 Subject: [PATCH 20/23] refactor(rpc): extract commit_schedule helpers for stage lookup, slug, timestamp parse and artist sync MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The body of commit_schedule was repeating four patterns inline: - a stage_id resolution subquery on (edition, name) - a CASE WHEN ... ::TIMESTAMPTZ for nullable timestamp casts - a hand-rolled regex slug builder - the delete-then-insert-on-conflict dance for set_artists Pull each into a commit_schedule__-prefixed helper so the main body reads as the actual workflow rather than a wall of subqueries. Behaviour is unchanged — the sync helper still scopes its DELETE through sets to preserve edition isolation. --- .../20260509142022_commit_schedule_rpc.sql | 137 +++++++++++------- 1 file changed, 82 insertions(+), 55 deletions(-) diff --git a/supabase/migrations/20260509142022_commit_schedule_rpc.sql b/supabase/migrations/20260509142022_commit_schedule_rpc.sql index a48fb166..7efec330 100644 --- a/supabase/migrations/20260509142022_commit_schedule_rpc.sql +++ b/supabase/migrations/20260509142022_commit_schedule_rpc.sql @@ -20,6 +20,73 @@ ALTER TABLE public.artists ALTER TABLE public.stages ADD CONSTRAINT stages_edition_name_unique UNIQUE (festival_edition_id, name); +-- Helpers for commit_schedule. Named with the commit_schedule__ prefix so it +-- is obvious they're internal to that RPC. + +CREATE OR REPLACE FUNCTION public.commit_schedule__slugify(p_name TEXT) +RETURNS TEXT +LANGUAGE sql +IMMUTABLE +SET search_path = public +AS $$ + SELECT LOWER( + REGEXP_REPLACE( + REGEXP_REPLACE(TRIM(p_name), '[^a-zA-Z0-9\s]', '', 'g'), + '\s+', '-', 'g' + ) + ); +$$; + +CREATE OR REPLACE FUNCTION public.commit_schedule__resolve_stage_id( + p_festival_edition_id UUID, + p_stage_name TEXT +) +RETURNS UUID +LANGUAGE sql +STABLE +SET search_path = public +AS $$ + SELECT s.id + FROM stages s + WHERE s.festival_edition_id = p_festival_edition_id + AND s.name = p_stage_name + LIMIT 1; +$$; + +CREATE OR REPLACE FUNCTION public.commit_schedule__parse_ts(p_value TEXT) +RETURNS TIMESTAMPTZ +LANGUAGE sql +IMMUTABLE +AS $$ + SELECT CASE WHEN p_value IS NOT NULL THEN p_value::TIMESTAMPTZ END; +$$; + +CREATE OR REPLACE FUNCTION public.commit_schedule__sync_set_artists( + p_set_id UUID, + p_festival_edition_id UUID, + p_artist_slugs JSONB +) +RETURNS VOID +LANGUAGE plpgsql +SET search_path = public +AS $$ +BEGIN + -- Edition-scoped delete defends against a forged set id even if the caller + -- already verified it. + DELETE FROM set_artists sa + USING sets s + WHERE sa.set_id = s.id + AND s.id = p_set_id + AND s.festival_edition_id = p_festival_edition_id; + + INSERT INTO set_artists (set_id, artist_id) + SELECT p_set_id, a.id + FROM jsonb_array_elements_text(p_artist_slugs) AS slug_val + JOIN artists a ON a.slug = slug_val + ON CONFLICT (set_id, artist_id) DO NOTHING; +END; +$$; + -- RPC: commit_schedule -- Executes a fully resolved schedule import inside a single transaction. -- Called by the commit-schedule Edge Function using the service role key. @@ -65,22 +132,11 @@ BEGIN SET name = v_set_elem->>'name', description = NULLIF(v_set_elem->>'description', ''), - stage_id = ( - SELECT s.id FROM stages s - WHERE s.festival_edition_id = p_festival_edition_id - AND s.name = v_set_elem->>'stageName' - LIMIT 1 + stage_id = commit_schedule__resolve_stage_id( + p_festival_edition_id, v_set_elem->>'stageName' ), - time_start = CASE - WHEN (v_set_elem->>'timeStart') IS NOT NULL - THEN (v_set_elem->>'timeStart')::TIMESTAMPTZ - ELSE NULL - END, - time_end = CASE - WHEN (v_set_elem->>'timeEnd') IS NOT NULL - THEN (v_set_elem->>'timeEnd')::TIMESTAMPTZ - ELSE NULL - END, + time_start = commit_schedule__parse_ts(v_set_elem->>'timeStart'), + time_end = commit_schedule__parse_ts(v_set_elem->>'timeEnd'), updated_at = NOW() WHERE id = v_set_id AND festival_edition_id = p_festival_edition_id; @@ -93,20 +149,9 @@ BEGIN v_sets_updated := v_sets_updated + v_row_count; - -- Sync set_artists: delete existing links and re-insert from CSV. - -- The DELETE is scoped via the sets table to enforce edition isolation, - -- defending against a forged set id even though the UPDATE above already verified it. - DELETE FROM set_artists sa - USING sets s - WHERE sa.set_id = s.id - AND s.id = v_set_id - AND s.festival_edition_id = p_festival_edition_id; - - INSERT INTO set_artists (set_id, artist_id) - SELECT v_set_id, a.id - FROM jsonb_array_elements_text(v_set_elem->'artistSlugs') AS slug_val - JOIN artists a ON a.slug = slug_val - ON CONFLICT (set_id, artist_id) DO NOTHING; + PERFORM commit_schedule__sync_set_artists( + v_set_id, p_festival_edition_id, v_set_elem->'artistSlugs' + ); END LOOP; -- 4. Insert new sets @@ -118,40 +163,22 @@ BEGIN VALUES ( p_festival_edition_id, v_set_elem->>'name', - LOWER( - REGEXP_REPLACE( - REGEXP_REPLACE(TRIM(v_set_elem->>'name'), '[^a-zA-Z0-9\s]', '', 'g'), - '\s+', '-', 'g' - ) - ), + commit_schedule__slugify(v_set_elem->>'name'), NULLIF(v_set_elem->>'description', ''), - ( - SELECT s.id FROM stages s - WHERE s.festival_edition_id = p_festival_edition_id - AND s.name = v_set_elem->>'stageName' - LIMIT 1 + commit_schedule__resolve_stage_id( + p_festival_edition_id, v_set_elem->>'stageName' ), - CASE - WHEN (v_set_elem->>'timeStart') IS NOT NULL - THEN (v_set_elem->>'timeStart')::TIMESTAMPTZ - ELSE NULL - END, - CASE - WHEN (v_set_elem->>'timeEnd') IS NOT NULL - THEN (v_set_elem->>'timeEnd')::TIMESTAMPTZ - ELSE NULL - END, + commit_schedule__parse_ts(v_set_elem->>'timeStart'), + commit_schedule__parse_ts(v_set_elem->>'timeEnd'), p_user_id ) RETURNING id INTO v_new_set_id; v_sets_created := v_sets_created + 1; - INSERT INTO set_artists (set_id, artist_id) - SELECT v_new_set_id, a.id - FROM jsonb_array_elements_text(v_set_elem->'artistSlugs') AS slug_val - JOIN artists a ON a.slug = slug_val - ON CONFLICT (set_id, artist_id) DO NOTHING; + PERFORM commit_schedule__sync_set_artists( + v_new_set_id, p_festival_edition_id, v_set_elem->'artistSlugs' + ); END LOOP; -- 5. Archive orphaned sets From 85c28b72c2b2fc4a5709ea1dfa2ebeb25b229e0c Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 10 May 2026 05:32:28 +0000 Subject: [PATCH 21/23] fix(test): exclude supabase tests in vitest config and stub supabase env Two unrelated test infra fixes that were both pre-existing: - Vitest reads vitest.config.ts when present, which overrides the test block in vite.config.ts. The previous fix added 'supabase/**' to vite.config.ts only, so the Deno tests in supabase/functions/ kept getting picked up. Move the exclude into vitest.config.ts and drop the dead block in vite.config.ts. - The Supabase client throws at module init when the env vars are missing. Component tests that mock the query hooks still trigger that init through the import graph. Stub the two vars in src/test/setup.ts so the client can construct (it's never actually called). --- src/test/setup.ts | 18 ++++++++++++++++-- vite.config.ts | 4 ---- vitest.config.ts | 1 + 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/src/test/setup.ts b/src/test/setup.ts index 0c6b74ba..7d9033dc 100644 --- a/src/test/setup.ts +++ b/src/test/setup.ts @@ -1,8 +1,19 @@ import "@testing-library/jest-dom/vitest"; +import { vi } from "vitest"; + +// Stub the Supabase env vars so the client module can initialise even when +// VITE_SUPABASE_URL / VITE_SUPABASE_PUBLISHABLE_KEY aren't set in the test +// environment. Tests that exercise data fetching mock the relevant query +// hooks; the client itself never actually issues a request. +vi.stubEnv("VITE_SUPABASE_URL", "http://localhost:54321"); +vi.stubEnv("VITE_SUPABASE_PUBLISHABLE_KEY", "test-anon-key"); // Polyfill for ArrayBuffer.prototype.resizable and SharedArrayBuffer.prototype.growable // These are needed by webidl-conversions package -if (typeof ArrayBuffer !== "undefined" && !Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, "resizable")) { +if ( + typeof ArrayBuffer !== "undefined" && + !Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, "resizable") +) { Object.defineProperty(ArrayBuffer.prototype, "resizable", { get() { return false; @@ -11,7 +22,10 @@ if (typeof ArrayBuffer !== "undefined" && !Object.getOwnPropertyDescriptor(Array }); } -if (typeof SharedArrayBuffer !== "undefined" && !Object.getOwnPropertyDescriptor(SharedArrayBuffer.prototype, "growable")) { +if ( + typeof SharedArrayBuffer !== "undefined" && + !Object.getOwnPropertyDescriptor(SharedArrayBuffer.prototype, "growable") +) { Object.defineProperty(SharedArrayBuffer.prototype, "growable", { get() { return false; diff --git a/vite.config.ts b/vite.config.ts index b73b439c..74be376c 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -6,10 +6,6 @@ import { TanStackRouterVite } from "@tanstack/router-vite-plugin"; // https://vitejs.dev/config/ export default defineConfig(({ mode }) => ({ - test: { - exclude: ["supabase/**", "tests/e2e/**", "node_modules/**"], - passWithNoTests: true, - }, server: { host: "::", port: 8080, diff --git a/vitest.config.ts b/vitest.config.ts index e8ed08e3..7520c2f2 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -17,6 +17,7 @@ export default defineConfig({ "**/.{idea,git,cache,output,temp}/**", "**/{karma,rollup,webpack,vite,vitest,jest,ava,babel,nyc,cypress,tsup,build,eslint,prettier}.config.*", "**/tests/e2e/**", // Exclude Playwright E2E tests + "supabase/**", // Exclude Deno-only Edge Function tests ], }, resolve: { From 8a2d46548278b66ec6ca1b95a357749f8244c56b Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Mon, 11 May 2026 07:51:37 +0300 Subject: [PATCH 22/23] fix(lint): convert arrow function and drop unused destructured data Two pre-existing oxlint failures the project-wide lint surfaced: - scheduleImportService.parseScheduleCsv had an arrow-function const helper, which the func-style rule rejects. - commit-schedule.test.ts midnight-crossing test destructured data but only asserted on error. --- src/services/scheduleImportService.ts | 30 ++++++++++++++----- .../commit-schedule/commit-schedule.test.ts | 2 +- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/src/services/scheduleImportService.ts b/src/services/scheduleImportService.ts index 841e2452..9b5bcf0f 100644 --- a/src/services/scheduleImportService.ts +++ b/src/services/scheduleImportService.ts @@ -89,7 +89,9 @@ export function parseScheduleCsv(csvContent: string): CsvRow[] { const headers = lines[0].map((h) => h.trim().toLowerCase()); - const col = (name: string) => headers.indexOf(name); + function col(name: string) { + return headers.indexOf(name); + } const artistsCol = col("artists"); const setNameCol = col("set name"); const stageCol = col("stage"); @@ -98,10 +100,11 @@ export function parseScheduleCsv(csvContent: string): CsvRow[] { const endTimeCol = col("end time"); const descriptionCol = col("description"); - return lines.slice(1) + return lines + .slice(1) .filter((row) => row.some((cell) => cell.trim())) .map((row) => { - const artistsRaw = artistsCol >= 0 ? row[artistsCol] ?? "" : ""; + const artistsRaw = artistsCol >= 0 ? (row[artistsCol] ?? "") : ""; const artists = artistsRaw .split("|") .map((a) => a.trim()) @@ -109,12 +112,20 @@ export function parseScheduleCsv(csvContent: string): CsvRow[] { return { artists, - setName: setNameCol >= 0 ? row[setNameCol]?.trim() || undefined : undefined, + setName: + setNameCol >= 0 ? row[setNameCol]?.trim() || undefined : undefined, stage: stageCol >= 0 ? row[stageCol]?.trim() || undefined : undefined, date: dateCol >= 0 ? row[dateCol]?.trim() || undefined : undefined, - startTime: startTimeCol >= 0 ? row[startTimeCol]?.trim() || undefined : undefined, - endTime: endTimeCol >= 0 ? row[endTimeCol]?.trim() || undefined : undefined, - description: descriptionCol >= 0 ? row[descriptionCol]?.trim() || undefined : undefined, + startTime: + startTimeCol >= 0 + ? row[startTimeCol]?.trim() || undefined + : undefined, + endTime: + endTimeCol >= 0 ? row[endTimeCol]?.trim() || undefined : undefined, + description: + descriptionCol >= 0 + ? row[descriptionCol]?.trim() || undefined + : undefined, }; }) .filter((row) => row.artists.length > 0); @@ -170,7 +181,10 @@ export function buildCommitPayload( return { artistsToCreate: diff.cleanOperations.artistsToCreate, - stagesToCreate: [...diff.cleanOperations.stagesToCreate, ...extraStagesToCreate], + stagesToCreate: [ + ...diff.cleanOperations.stagesToCreate, + ...extraStagesToCreate, + ], setsToCreate: diff.cleanOperations.setsToCreate.map((s) => ({ ...s, stageName: resolveSetStageName(s), diff --git a/supabase/functions/commit-schedule/commit-schedule.test.ts b/supabase/functions/commit-schedule/commit-schedule.test.ts index 12ca955c..2f273f83 100644 --- a/supabase/functions/commit-schedule/commit-schedule.test.ts +++ b/supabase/functions/commit-schedule/commit-schedule.test.ts @@ -208,7 +208,7 @@ Deno.test( await db.from("artists").insert({ name: "Late Night DJ", slug }); - const { data, error } = await db.rpc("commit_schedule", { + const { error } = await db.rpc("commit_schedule", { p_festival_edition_id: editionId, p_user_id: userId, p_artists_to_create: [], From f9637b1fcd72db9e8c5c6868c0119dd299e218c3 Mon Sep 17 00:00:00 2001 From: Chaim Lev-Ari Date: Mon, 11 May 2026 07:52:11 +0300 Subject: [PATCH 23/23] fix(rpc): make commit_schedule migration idempotent and dedup stages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The migration was failing on staging because (a) the artists.slug dedup suffix wasn't guaranteed unique — using just the first 6 chars of the id can still collide — and (b) stages had duplicate (edition, name) pairs in prod that blocked the new unique constraint outright. Switch both dedups to append the full id, which is guaranteed unique. Add a stages dedup mirroring the artists one. Wrap both ADD CONSTRAINT statements in DO blocks that skip if a constraint of the same name (or the equivalent stages_name_festival_edition_id_key from PR #28) already exists, so the migration is safe to re-run. --- .../20260509142022_commit_schedule_rpc.sql | 63 ++++++++++++++----- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/supabase/migrations/20260509142022_commit_schedule_rpc.sql b/supabase/migrations/20260509142022_commit_schedule_rpc.sql index 7efec330..a3d6c6af 100644 --- a/supabase/migrations/20260509142022_commit_schedule_rpc.sql +++ b/supabase/migrations/20260509142022_commit_schedule_rpc.sql @@ -1,24 +1,53 @@ --- Add unique constraint on artists.slug (required for ON CONFLICT upsert in commit_schedule) --- First deduplicate any existing conflicting slugs by appending the short ID -WITH duplicates AS ( - SELECT slug, MIN(id) AS keep_id - FROM public.artists - GROUP BY slug - HAVING COUNT(*) > 1 -) +-- Add unique constraint on artists.slug (required for ON CONFLICT upsert in commit_schedule). +-- Deduplicate first: append the full id (guaranteed unique) to any slug with collisions, +-- keeping the row with the lowest id on its original slug. UPDATE public.artists a -SET slug = a.slug || '-' || SUBSTRING(a.id::text, 1, 6) -WHERE EXISTS ( - SELECT 1 FROM duplicates d - WHERE d.slug = a.slug AND a.id != d.keep_id +SET slug = a.slug || '-' || a.id::text +WHERE a.id IN ( + SELECT id + FROM ( + SELECT id, ROW_NUMBER() OVER (PARTITION BY slug ORDER BY id) AS rn + FROM public.artists + ) ranked + WHERE rn > 1 ); -ALTER TABLE public.artists - ADD CONSTRAINT artists_slug_unique UNIQUE (slug); +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_constraint WHERE conname = 'artists_slug_unique' + ) THEN + ALTER TABLE public.artists + ADD CONSTRAINT artists_slug_unique UNIQUE (slug); + END IF; +END$$; + +-- Add unique constraint on stages(festival_edition_id, name) for upsert. +-- Same dedup approach: any (edition, name) collisions get the offending row's +-- id suffixed onto the stage name. +UPDATE public.stages s +SET name = s.name || ' (' || s.id::text || ')' +WHERE s.id IN ( + SELECT id + FROM ( + SELECT id, + ROW_NUMBER() OVER (PARTITION BY festival_edition_id, name ORDER BY id) AS rn + FROM public.stages + ) ranked + WHERE rn > 1 +); --- Add unique constraint on stages(festival_edition_id, name) for upsert -ALTER TABLE public.stages - ADD CONSTRAINT stages_edition_name_unique UNIQUE (festival_edition_id, name); +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_constraint + WHERE conname IN ('stages_edition_name_unique', 'stages_name_festival_edition_id_key') + ) THEN + ALTER TABLE public.stages + ADD CONSTRAINT stages_edition_name_unique UNIQUE (festival_edition_id, name); + END IF; +END$$; -- Helpers for commit_schedule. Named with the commit_schedule__ prefix so it -- is obvious they're internal to that RPC.