diff --git a/src/pages/admin/festivals/SetsManagement/SetManagement.tsx b/src/pages/admin/festivals/SetsManagement/SetManagement.tsx
index fbb5ff5d..0578eca6 100644
--- a/src/pages/admin/festivals/SetsManagement/SetManagement.tsx
+++ b/src/pages/admin/festivals/SetsManagement/SetManagement.tsx
@@ -1,8 +1,8 @@
import { useState } from "react";
-import { Link, useParams } from "@tanstack/react-router";
+import { useParams } from "@tanstack/react-router";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
-import { Loader2, Plus, Music, Upload } from "lucide-react";
+import { Loader2, Plus, Music } from "lucide-react";
import { FestivalSet } from "@/hooks/queries/sets/useSets";
import { useSetsByEditionQuery } from "@/hooks/queries/sets/useSetsByEdition";
import { useDeleteSetMutation } from "@/hooks/queries/sets/useDeleteSet";
@@ -72,19 +72,6 @@ export function SetManagement() {
Set Management
-
-
-
- Import CSV
-
-
Stage Management
-
-
-
-
- Import CSV
-
-
-
-
+
diff --git a/src/routeTree.gen.ts b/src/routeTree.gen.ts
index ee6778fd..47dc86ce 100644
--- a/src/routeTree.gen.ts
+++ b/src/routeTree.gen.ts
@@ -22,7 +22,6 @@ import { Route as AdminArtistsRouteImport } from './routes/admin/artists'
import { Route as AdminAnalyticsRouteImport } from './routes/admin/analytics'
import { Route as AdminAdminsRouteImport } from './routes/admin/admins'
import { Route as FestivalsFestivalSlugIndexRouteImport } from './routes/festivals/$festivalSlug/index'
-import { Route as AdminFestivalsImportRouteImport } from './routes/admin/festivals/import'
import { Route as AdminFestivalsFestivalSlugRouteImport } from './routes/admin/festivals/$festivalSlug'
import { Route as AdminArtistsDuplicatesRouteImport } from './routes/admin/artists/duplicates'
import { Route as FestivalsFestivalSlugEditionsEditionSlugRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug'
@@ -33,13 +32,13 @@ import { Route as FestivalsFestivalSlugEditionsEditionSlugMapRouteImport } from
import { Route as FestivalsFestivalSlugEditionsEditionSlugInfoRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/info'
import { Route as FestivalsFestivalSlugEditionsEditionSlugExploreRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/explore'
import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug'
-import { Route as AdminFestivalsFestivalIdEditionIdImportRouteImport } from './routes/admin/festivals/$festivalId.$editionId.import'
import { Route as FestivalsFestivalSlugEditionsEditionSlugSetsIndexRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/sets/index'
import { Route as FestivalsFestivalSlugEditionsEditionSlugSetsSetSlugRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/sets/$setSlug'
import { Route as FestivalsFestivalSlugEditionsEditionSlugScheduleTimelineRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/schedule/timeline'
import { Route as FestivalsFestivalSlugEditionsEditionSlugScheduleListRouteImport } from './routes/festivals/$festivalSlug/editions/$editionSlug/schedule/list'
import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugStagesRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug/stages'
import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugSetsRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug/sets'
+import { Route as AdminFestivalsFestivalSlugEditionsEditionSlugImportRouteImport } from './routes/admin/festivals/$festivalSlug/editions/$editionSlug/import'
const TermsRoute = TermsRouteImport.update({
id: '/terms',
@@ -107,11 +106,6 @@ const FestivalsFestivalSlugIndexRoute =
path: '/',
getParentRoute: () => FestivalsFestivalSlugRoute,
} as any)
-const AdminFestivalsImportRoute = AdminFestivalsImportRouteImport.update({
- id: '/import',
- path: '/import',
- getParentRoute: () => AdminFestivalsRoute,
-} as any)
const AdminFestivalsFestivalSlugRoute =
AdminFestivalsFestivalSlugRouteImport.update({
id: '/$festivalSlug',
@@ -171,12 +165,6 @@ const AdminFestivalsFestivalSlugEditionsEditionSlugRoute =
path: '/editions/$editionSlug',
getParentRoute: () => AdminFestivalsFestivalSlugRoute,
} as any)
-const AdminFestivalsFestivalIdEditionIdImportRoute =
- AdminFestivalsFestivalIdEditionIdImportRouteImport.update({
- id: '/$festivalId/$editionId/import',
- path: '/$festivalId/$editionId/import',
- getParentRoute: () => AdminFestivalsRoute,
- } as any)
const FestivalsFestivalSlugEditionsEditionSlugSetsIndexRoute =
FestivalsFestivalSlugEditionsEditionSlugSetsIndexRouteImport.update({
id: '/',
@@ -213,6 +201,12 @@ const AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute =
path: '/sets',
getParentRoute: () => AdminFestivalsFestivalSlugEditionsEditionSlugRoute,
} as any)
+const AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute =
+ AdminFestivalsFestivalSlugEditionsEditionSlugImportRouteImport.update({
+ id: '/import',
+ path: '/import',
+ getParentRoute: () => AdminFestivalsFestivalSlugEditionsEditionSlugRoute,
+ } as any)
export interface FileRoutesByFullPath {
'/': typeof IndexRoute
@@ -229,10 +223,8 @@ export interface FileRoutesByFullPath {
'/groups': typeof GroupsIndexRoute
'/admin/artists/duplicates': typeof AdminArtistsDuplicatesRoute
'/admin/festivals/$festivalSlug': typeof AdminFestivalsFestivalSlugRouteWithChildren
- '/admin/festivals/import': typeof AdminFestivalsImportRoute
'/festivals/$festivalSlug/': typeof FestivalsFestivalSlugIndexRoute
'/festivals/$festivalSlug/editions/$editionSlug': typeof FestivalsFestivalSlugEditionsEditionSlugRouteWithChildren
- '/admin/festivals/$festivalId/$editionId/import': typeof AdminFestivalsFestivalIdEditionIdImportRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug': typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/explore': typeof FestivalsFestivalSlugEditionsEditionSlugExploreRoute
'/festivals/$festivalSlug/editions/$editionSlug/info': typeof FestivalsFestivalSlugEditionsEditionSlugInfoRoute
@@ -240,6 +232,7 @@ export interface FileRoutesByFullPath {
'/festivals/$festivalSlug/editions/$editionSlug/schedule': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/sets': typeof FestivalsFestivalSlugEditionsEditionSlugSetsRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/social': typeof FestivalsFestivalSlugEditionsEditionSlugSocialRoute
+ '/admin/festivals/$festivalSlug/editions/$editionSlug/import': typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug/sets': typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug/stages': typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute
'/festivals/$festivalSlug/editions/$editionSlug/schedule/list': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleListRoute
@@ -261,16 +254,15 @@ export interface FileRoutesByTo {
'/groups': typeof GroupsIndexRoute
'/admin/artists/duplicates': typeof AdminArtistsDuplicatesRoute
'/admin/festivals/$festivalSlug': typeof AdminFestivalsFestivalSlugRouteWithChildren
- '/admin/festivals/import': typeof AdminFestivalsImportRoute
'/festivals/$festivalSlug': typeof FestivalsFestivalSlugIndexRoute
'/festivals/$festivalSlug/editions/$editionSlug': typeof FestivalsFestivalSlugEditionsEditionSlugRouteWithChildren
- '/admin/festivals/$festivalId/$editionId/import': typeof AdminFestivalsFestivalIdEditionIdImportRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug': typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/explore': typeof FestivalsFestivalSlugEditionsEditionSlugExploreRoute
'/festivals/$festivalSlug/editions/$editionSlug/info': typeof FestivalsFestivalSlugEditionsEditionSlugInfoRoute
'/festivals/$festivalSlug/editions/$editionSlug/map': typeof FestivalsFestivalSlugEditionsEditionSlugMapRoute
'/festivals/$festivalSlug/editions/$editionSlug/schedule': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/social': typeof FestivalsFestivalSlugEditionsEditionSlugSocialRoute
+ '/admin/festivals/$festivalSlug/editions/$editionSlug/import': typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug/sets': typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug/stages': typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute
'/festivals/$festivalSlug/editions/$editionSlug/schedule/list': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleListRoute
@@ -294,10 +286,8 @@ export interface FileRoutesById {
'/groups/': typeof GroupsIndexRoute
'/admin/artists/duplicates': typeof AdminArtistsDuplicatesRoute
'/admin/festivals/$festivalSlug': typeof AdminFestivalsFestivalSlugRouteWithChildren
- '/admin/festivals/import': typeof AdminFestivalsImportRoute
'/festivals/$festivalSlug/': typeof FestivalsFestivalSlugIndexRoute
'/festivals/$festivalSlug/editions/$editionSlug': typeof FestivalsFestivalSlugEditionsEditionSlugRouteWithChildren
- '/admin/festivals/$festivalId/$editionId/import': typeof AdminFestivalsFestivalIdEditionIdImportRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug': typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/explore': typeof FestivalsFestivalSlugEditionsEditionSlugExploreRoute
'/festivals/$festivalSlug/editions/$editionSlug/info': typeof FestivalsFestivalSlugEditionsEditionSlugInfoRoute
@@ -305,6 +295,7 @@ export interface FileRoutesById {
'/festivals/$festivalSlug/editions/$editionSlug/schedule': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/sets': typeof FestivalsFestivalSlugEditionsEditionSlugSetsRouteWithChildren
'/festivals/$festivalSlug/editions/$editionSlug/social': typeof FestivalsFestivalSlugEditionsEditionSlugSocialRoute
+ '/admin/festivals/$festivalSlug/editions/$editionSlug/import': typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug/sets': typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute
'/admin/festivals/$festivalSlug/editions/$editionSlug/stages': typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute
'/festivals/$festivalSlug/editions/$editionSlug/schedule/list': typeof FestivalsFestivalSlugEditionsEditionSlugScheduleListRoute
@@ -329,10 +320,8 @@ export interface FileRouteTypes {
| '/groups'
| '/admin/artists/duplicates'
| '/admin/festivals/$festivalSlug'
- | '/admin/festivals/import'
| '/festivals/$festivalSlug/'
| '/festivals/$festivalSlug/editions/$editionSlug'
- | '/admin/festivals/$festivalId/$editionId/import'
| '/admin/festivals/$festivalSlug/editions/$editionSlug'
| '/festivals/$festivalSlug/editions/$editionSlug/explore'
| '/festivals/$festivalSlug/editions/$editionSlug/info'
@@ -340,6 +329,7 @@ export interface FileRouteTypes {
| '/festivals/$festivalSlug/editions/$editionSlug/schedule'
| '/festivals/$festivalSlug/editions/$editionSlug/sets'
| '/festivals/$festivalSlug/editions/$editionSlug/social'
+ | '/admin/festivals/$festivalSlug/editions/$editionSlug/import'
| '/admin/festivals/$festivalSlug/editions/$editionSlug/sets'
| '/admin/festivals/$festivalSlug/editions/$editionSlug/stages'
| '/festivals/$festivalSlug/editions/$editionSlug/schedule/list'
@@ -361,16 +351,15 @@ export interface FileRouteTypes {
| '/groups'
| '/admin/artists/duplicates'
| '/admin/festivals/$festivalSlug'
- | '/admin/festivals/import'
| '/festivals/$festivalSlug'
| '/festivals/$festivalSlug/editions/$editionSlug'
- | '/admin/festivals/$festivalId/$editionId/import'
| '/admin/festivals/$festivalSlug/editions/$editionSlug'
| '/festivals/$festivalSlug/editions/$editionSlug/explore'
| '/festivals/$festivalSlug/editions/$editionSlug/info'
| '/festivals/$festivalSlug/editions/$editionSlug/map'
| '/festivals/$festivalSlug/editions/$editionSlug/schedule'
| '/festivals/$festivalSlug/editions/$editionSlug/social'
+ | '/admin/festivals/$festivalSlug/editions/$editionSlug/import'
| '/admin/festivals/$festivalSlug/editions/$editionSlug/sets'
| '/admin/festivals/$festivalSlug/editions/$editionSlug/stages'
| '/festivals/$festivalSlug/editions/$editionSlug/schedule/list'
@@ -393,10 +382,8 @@ export interface FileRouteTypes {
| '/groups/'
| '/admin/artists/duplicates'
| '/admin/festivals/$festivalSlug'
- | '/admin/festivals/import'
| '/festivals/$festivalSlug/'
| '/festivals/$festivalSlug/editions/$editionSlug'
- | '/admin/festivals/$festivalId/$editionId/import'
| '/admin/festivals/$festivalSlug/editions/$editionSlug'
| '/festivals/$festivalSlug/editions/$editionSlug/explore'
| '/festivals/$festivalSlug/editions/$editionSlug/info'
@@ -404,6 +391,7 @@ export interface FileRouteTypes {
| '/festivals/$festivalSlug/editions/$editionSlug/schedule'
| '/festivals/$festivalSlug/editions/$editionSlug/sets'
| '/festivals/$festivalSlug/editions/$editionSlug/social'
+ | '/admin/festivals/$festivalSlug/editions/$editionSlug/import'
| '/admin/festivals/$festivalSlug/editions/$editionSlug/sets'
| '/admin/festivals/$festivalSlug/editions/$editionSlug/stages'
| '/festivals/$festivalSlug/editions/$editionSlug/schedule/list'
@@ -516,13 +504,6 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof FestivalsFestivalSlugIndexRouteImport
parentRoute: typeof FestivalsFestivalSlugRoute
}
- '/admin/festivals/import': {
- id: '/admin/festivals/import'
- path: '/import'
- fullPath: '/admin/festivals/import'
- preLoaderRoute: typeof AdminFestivalsImportRouteImport
- parentRoute: typeof AdminFestivalsRoute
- }
'/admin/festivals/$festivalSlug': {
id: '/admin/festivals/$festivalSlug'
path: '/$festivalSlug'
@@ -593,13 +574,6 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugRouteImport
parentRoute: typeof AdminFestivalsFestivalSlugRoute
}
- '/admin/festivals/$festivalId/$editionId/import': {
- id: '/admin/festivals/$festivalId/$editionId/import'
- path: '/$festivalId/$editionId/import'
- fullPath: '/admin/festivals/$festivalId/$editionId/import'
- preLoaderRoute: typeof AdminFestivalsFestivalIdEditionIdImportRouteImport
- parentRoute: typeof AdminFestivalsRoute
- }
'/festivals/$festivalSlug/editions/$editionSlug/sets/': {
id: '/festivals/$festivalSlug/editions/$editionSlug/sets/'
path: '/'
@@ -642,6 +616,13 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRouteImport
parentRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugRoute
}
+ '/admin/festivals/$festivalSlug/editions/$editionSlug/import': {
+ id: '/admin/festivals/$festivalSlug/editions/$editionSlug/import'
+ path: '/import'
+ fullPath: '/admin/festivals/$festivalSlug/editions/$editionSlug/import'
+ preLoaderRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRouteImport
+ parentRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugRoute
+ }
}
}
@@ -658,12 +639,15 @@ const AdminArtistsRouteWithChildren = AdminArtistsRoute._addFileChildren(
)
interface AdminFestivalsFestivalSlugEditionsEditionSlugRouteChildren {
+ AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute
AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute
AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute: typeof AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute
}
const AdminFestivalsFestivalSlugEditionsEditionSlugRouteChildren: AdminFestivalsFestivalSlugEditionsEditionSlugRouteChildren =
{
+ AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute:
+ AdminFestivalsFestivalSlugEditionsEditionSlugImportRoute,
AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute:
AdminFestivalsFestivalSlugEditionsEditionSlugSetsRoute,
AdminFestivalsFestivalSlugEditionsEditionSlugStagesRoute:
@@ -692,15 +676,10 @@ const AdminFestivalsFestivalSlugRouteWithChildren =
interface AdminFestivalsRouteChildren {
AdminFestivalsFestivalSlugRoute: typeof AdminFestivalsFestivalSlugRouteWithChildren
- AdminFestivalsImportRoute: typeof AdminFestivalsImportRoute
- AdminFestivalsFestivalIdEditionIdImportRoute: typeof AdminFestivalsFestivalIdEditionIdImportRoute
}
const AdminFestivalsRouteChildren: AdminFestivalsRouteChildren = {
AdminFestivalsFestivalSlugRoute: AdminFestivalsFestivalSlugRouteWithChildren,
- AdminFestivalsImportRoute: AdminFestivalsImportRoute,
- AdminFestivalsFestivalIdEditionIdImportRoute:
- AdminFestivalsFestivalIdEditionIdImportRoute,
}
const AdminFestivalsRouteWithChildren = AdminFestivalsRoute._addFileChildren(
diff --git a/src/routes/admin/festivals/$festivalId.$editionId.import.tsx b/src/routes/admin/festivals/$festivalId.$editionId.import.tsx
deleted file mode 100644
index 90715693..00000000
--- a/src/routes/admin/festivals/$festivalId.$editionId.import.tsx
+++ /dev/null
@@ -1,14 +0,0 @@
-import { createFileRoute } from "@tanstack/react-router";
-import { z } from "zod";
-import { CSVImportPage } from "@/pages/admin/festivals/CSVImportPage";
-
-const importSearchSchema = z.object({
- tab: z.enum(["stages", "sets"]).optional(),
-});
-
-export const Route = createFileRoute(
- "/admin/festivals/$festivalId/$editionId/import",
-)({
- component: CSVImportPage,
- validateSearch: importSearchSchema,
-});
diff --git a/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx
new file mode 100644
index 00000000..8eb9a033
--- /dev/null
+++ b/src/routes/admin/festivals/$festivalSlug/editions/$editionSlug/import.tsx
@@ -0,0 +1,24 @@
+import { createFileRoute } from "@tanstack/react-router";
+import { editionsKeys } from "@/hooks/queries/festivals/editions/types";
+import { fetchFestivalEditionBySlug } from "@/hooks/queries/festivals/editions/useFestivalEditionBySlug";
+import { ScheduleImportWizard } from "@/components/Admin/ScheduleImport/ScheduleImportWizard";
+
+export const Route = createFileRoute(
+ "/admin/festivals/$festivalSlug/editions/$editionSlug/import",
+)({
+ loader: ({ params, context }) =>
+ context.queryClient.ensureQueryData({
+ queryKey: editionsKeys.bySlug(params.festivalSlug, params.editionSlug),
+ queryFn: () =>
+ fetchFestivalEditionBySlug({
+ festivalSlug: params.festivalSlug,
+ editionSlug: params.editionSlug,
+ }),
+ }),
+ component: FestivalScheduleImport,
+});
+
+function FestivalScheduleImport() {
+ const edition = Route.useLoaderData();
+ return ;
+}
diff --git a/src/routes/admin/festivals/import.tsx b/src/routes/admin/festivals/import.tsx
deleted file mode 100644
index d7071bc0..00000000
--- a/src/routes/admin/festivals/import.tsx
+++ /dev/null
@@ -1,12 +0,0 @@
-import { createFileRoute } from "@tanstack/react-router";
-import { CSVImportPage } from "@/pages/admin/festivals/CSVImportPage";
-import { z } from "zod";
-
-const importSearchSchema = z.object({
- tab: z.enum(["sets", "stages"]).optional(),
-});
-
-export const Route = createFileRoute("/admin/festivals/import")({
- component: CSVImportPage,
- validateSearch: importSearchSchema,
-});
diff --git a/src/services/csv/csvParser.ts b/src/services/csv/csvParser.ts
deleted file mode 100644
index b3648f39..00000000
--- a/src/services/csv/csvParser.ts
+++ /dev/null
@@ -1,77 +0,0 @@
-export interface StageImportData {
- name: string;
-}
-
-export interface SetImportData {
- name?: string;
- stage_name: string;
- artist_names: string;
- time_start?: string;
- date_start?: string;
- time_end?: string;
- date_end?: string;
- description?: string;
-}
-
-export function parseCSV(csvContent: string): string[][] {
- const lines = csvContent.trim().split("\n");
- return lines.map((line) => {
- const result: string[] = [];
- let current = "";
- let inQuotes = false;
-
- for (let i = 0; i < line.length; i++) {
- const char = line[i];
-
- if (char === '"') {
- inQuotes = !inQuotes;
- } else if (char === "," && !inQuotes) {
- result.push(current.trim());
- current = "";
- } else {
- current += char;
- }
- }
-
- result.push(current.trim());
- return result.map((field) => field.replace(/^"|"$/g, ""));
- });
-}
-
-export function parseStagesCSV(csvContent: string): StageImportData[] {
- const lines = parseCSV(csvContent);
- const headers = lines[0] as Array;
-
- return lines.slice(1).map((line) => {
- const stage: Partial = {};
- headers.forEach((header, index) => {
- stage[header] = line[index] || "";
- });
- return stage as StageImportData;
- });
-}
-
-export function parseSetsCSV(csvContent: string): SetImportData[] {
- const lines = parseCSV(csvContent);
- const headers = lines[0];
-
- return lines.slice(1).map((line) => {
- const set: Partial = {};
- headers.forEach((header, index) => {
- const value = line[index] || "";
- if (
- header === "time_start" ||
- header === "time_end" ||
- header === "date_start" ||
- header === "date_end"
- ) {
- set[header as keyof SetImportData] = value || undefined;
- } else if (header === "name") {
- set[header as keyof SetImportData] = value || undefined;
- } else {
- set[header as keyof SetImportData] = value;
- }
- });
- return set as SetImportData;
- });
-}
diff --git a/src/services/csv/setDuplicator.ts b/src/services/csv/setDuplicator.ts
deleted file mode 100644
index 96300991..00000000
--- a/src/services/csv/setDuplicator.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import { supabase } from "@/integrations/supabase/client";
-
-export async function duplicateSetWithVotes({
- newTimeEnd,
- newTimeStart,
- sourceSetId,
- description,
- stageId,
-}: {
- sourceSetId: string;
- newTimeStart: string;
- newTimeEnd: string;
- stageId?: string | null;
- description?: string | null;
-}): Promise {
- const params: {
- source_set_id: string;
- new_time_start: string;
- new_time_end: string;
- new_stage_id?: string | null;
- new_description?: string | null;
- } = {
- source_set_id: sourceSetId,
- new_time_start: newTimeStart,
- new_time_end: newTimeEnd,
- };
-
- if (stageId !== undefined) {
- params.new_stage_id = stageId;
- }
-
- if (description !== undefined) {
- params.new_description = description;
- }
-
- const { data, error } = await supabase.rpc(
- "duplicate_set_with_votes",
- params,
- );
-
- if (error) {
- throw new Error(`Failed to duplicate set: ${error.message}`);
- }
-
- if (!data) {
- throw new Error("No set ID returned from duplication");
- }
-
- return data as string;
-}
diff --git a/src/services/csv/setImporter.ts b/src/services/csv/setImporter.ts
deleted file mode 100644
index 8f1d44b3..00000000
--- a/src/services/csv/setImporter.ts
+++ /dev/null
@@ -1,342 +0,0 @@
-import { supabase } from "@/integrations/supabase/client";
-import { generateSlug } from "@/lib/slug";
-import { convertLocalTimeToUTC, combineDateAndTime } from "@/lib/timeUtils";
-import type { SetImportData } from "./csvParser";
-import type { ImportResult } from "./types";
-import type { SetSelection } from "@/pages/admin/festivals/CSVImportDialog/SetsPreviewTable";
-import { duplicateSetWithVotes } from "./setDuplicator";
-
-function generateSetNameFromArtists(artistNames: string[]): string {
- if (artistNames.length === 0) return "Unnamed Set";
- if (artistNames.length === 1) return artistNames[0];
- if (artistNames.length === 2) return `${artistNames[0]} & ${artistNames[1]}`;
- return `${artistNames[0]} & ${artistNames.length - 1} others`;
-}
-
-export interface ArtistMapping {
- csvName: string;
- artistId: string | null;
- shouldCreate: boolean;
-}
-
-async function importSetsWithArtistMap({
- artistMappings,
- editionId,
- sets,
- timezone = "UTC",
- onProgress,
- setSelections,
-}: {
- sets: SetImportData[];
- editionId: string;
- artistMappings: Map;
- setSelections?: Map;
- timezone?: string;
- onProgress?: (completed: number, total: number) => void;
-}): Promise {
- const currentUser = await supabase.auth.getUser();
- const userId = currentUser.data.user?.id || "";
-
- const results: Array = [];
- const errors: Array = [];
- const total = sets.length;
-
- for (let i = 0; i < sets.length; i++) {
- const set = sets[i];
- const setMappings = artistMappings.get(i);
- const setSelection = setSelections?.get(i);
-
- const response = await importSingleSet({
- importedSet: set,
- setMappings,
- setSelection,
- editionId,
- timezone,
- userId,
- });
-
- if (response.type === "error") {
- errors.push(...response.errors);
- continue;
- } else {
- results.push(response.setName);
- }
-
- onProgress?.(i + 1, total);
- }
-
- if (errors.length > 0 && results.length === 0) {
- return {
- success: false,
- message: "Failed to import sets",
- errors,
- };
- }
-
- return {
- success: true,
- message: `Successfully imported ${results.length} sets${errors.length > 0 ? ` (${errors.length} errors)` : ""}`,
- inserted: results.length,
- errors: errors.length > 0 ? errors : undefined,
- };
-}
-
-async function importSingleSet({
- importedSet,
- setMappings,
- userId,
- timezone,
- editionId,
- setSelection,
-}: {
- timezone: string;
- userId: string;
- importedSet: SetImportData;
- setMappings: ArtistMapping[] | undefined;
- editionId: string;
- setSelection: SetSelection | undefined;
-}): Promise<
- | {
- type: "error";
- errors: string[];
- }
- | {
- type: "success";
- setName: string;
- }
-> {
- const errors: string[] = [];
- try {
- if (!setMappings || setMappings.length === 0) {
- errors.push(
- `Set "${importedSet.name || "Unnamed"}" has no artist mappings`,
- );
- return { type: "error", errors };
- }
-
- const artistNames = setMappings.map((m) => m.csvName);
- const setName = importedSet.name || generateSetNameFromArtists(artistNames);
-
- const artistIds: string[] = [];
-
- for (const mapping of setMappings) {
- let artistId = mapping.artistId;
-
- if (!artistId && mapping.shouldCreate) {
- const { data: newArtist, error: createError } = await supabase
- .from("artists")
- .insert({
- name: mapping.csvName,
- slug: generateSlug(mapping.csvName),
- added_by: userId,
- })
- .select("id")
- .single();
-
- if (createError || !newArtist) {
- errors.push(
- `Failed to create artist "${mapping.csvName}": ${createError?.message || "No ID"}`,
- );
- continue;
- }
-
- artistId = newArtist.id;
- }
-
- if (!artistId) {
- errors.push(`Artist "${mapping.csvName}" could not be resolved`);
- continue;
- }
-
- artistIds.push(artistId);
- }
-
- if (artistIds.length === 0) {
- errors.push(
- `Set "${importedSet.name || "Unnamed"}" has no valid artists`,
- );
- return { type: "error", errors };
- }
-
- // Continue with set creation logic (same as original)
-
- let stageId = "";
- if (importedSet.stage_name) {
- const { data: stage, error: stageError } = await supabase
- .from("stages")
- .select("id")
- .eq("name", importedSet.stage_name)
- .eq("festival_edition_id", editionId)
- .single();
-
- if (stageError || !stage) {
- errors.push(
- `Stage "${importedSet.stage_name}" not found for set "${setName}"`,
- );
- return { type: "error", errors };
- }
-
- stageId = stage.id;
- }
-
- const timeStartInput =
- importedSet.date_start && importedSet.time_start
- ? combineDateAndTime(importedSet.date_start, importedSet.time_start)
- : importedSet.time_start;
- const timeEndInput =
- importedSet.date_end && importedSet.time_end
- ? combineDateAndTime(importedSet.date_end, importedSet.time_end)
- : importedSet.time_end;
-
- if (!timeStartInput) {
- errors.push("Missing time start");
- return { type: "error", errors };
- }
-
- if (!timeEndInput) {
- errors.push("Missing time end");
- return { type: "error", errors };
- }
-
- const utcTimeStart = convertLocalTimeToUTC(timeStartInput, timezone);
- const utcTimeEnd = convertLocalTimeToUTC(timeEndInput, timezone);
-
- if (!utcTimeEnd || !utcTimeStart) {
- errors.push("Time is not valid");
- return { type: "error", errors };
- }
-
- let createdSetId = "";
- let setError: Error | null = null;
-
- if (setSelection?.action === "match" && setSelection.matchedSetId) {
- createdSetId = setSelection.matchedSetId;
- const { error } = await supabase
- .from("sets")
- .update({
- stage_id: stageId || null,
- time_start: utcTimeStart,
- time_end: utcTimeEnd,
- description: importedSet.description || null,
- archived: false,
- })
- .eq("id", createdSetId);
-
- setError = error;
- } else if (
- setSelection?.action === "duplicate" &&
- setSelection.matchedSetId
- ) {
- try {
- createdSetId = await duplicateSetWithVotes({
- sourceSetId: setSelection.matchedSetId,
- newTimeStart: utcTimeStart!,
- newTimeEnd: utcTimeEnd!,
- stageId: stageId,
- description: importedSet.description,
- });
- } catch (error) {
- setError = error as Error;
- }
- } else {
- const { data, error } = await supabase
- .from("sets")
- .insert({
- name: setName,
- slug: generateSlug(setName),
- stage_id: stageId || null,
- festival_edition_id: editionId,
- time_start: utcTimeStart,
- time_end: utcTimeEnd,
- description: importedSet.description || null,
- archived: false,
- created_by: userId,
- })
- .select("id")
- .single();
-
- createdSetId = data?.id || "";
- setError = error;
- }
-
- if (setError || !createdSetId) {
- errors.push(
- `Failed to create set "${setName}": ${setError?.message || "No ID"}`,
- );
- return { type: "error", errors };
- }
-
- // Link artists to set
- for (const artistId of artistIds) {
- await supabase.from("set_artists").upsert(
- {
- set_id: createdSetId,
- artist_id: artistId,
- },
- {
- onConflict: "set_id,artist_id",
- ignoreDuplicates: true,
- },
- );
- }
-
- return { type: "success", setName };
- } catch (error) {
- errors.push(
- `Error processing set: ${error instanceof Error ? error.message : "Unknown error"}`,
- );
-
- return { errors, type: "error" };
- }
-}
-
-export async function importSets(
- sets: SetImportData[],
- editionId: string,
- timezone: string = "UTC",
- onProgress?: (completed: number, total: number) => void,
-): Promise {
- const artistMappings = new Map();
-
- sets.forEach((set, index) => {
- const artistNames = set.artist_names
- .split(",")
- .map((name) => name.trim())
- .filter((name) => name.length > 0);
-
- artistMappings.set(
- index,
- artistNames.map((csvName) => ({
- csvName,
- artistId: null,
- shouldCreate: true,
- })),
- );
- });
-
- return importSetsWithArtistMap({
- sets,
- editionId,
- artistMappings: artistMappings,
- timezone,
- onProgress,
- });
-}
-
-export async function importSetsWithMappings(
- sets: SetImportData[],
- editionId: string,
- artistMappings: Map,
- setSelections?: Map,
- timezone: string = "UTC",
- onProgress?: (completed: number, total: number) => void,
-): Promise {
- return importSetsWithArtistMap({
- sets,
- editionId,
- artistMappings,
- setSelections,
- timezone,
- onProgress,
- });
-}
diff --git a/src/services/csv/setMatcher.ts b/src/services/csv/setMatcher.ts
deleted file mode 100644
index fb8ca253..00000000
--- a/src/services/csv/setMatcher.ts
+++ /dev/null
@@ -1,99 +0,0 @@
-import { supabase } from "@/integrations/supabase/client";
-import type { SetImportData } from "./csvParser";
-
-export interface MatchingSet {
- id: string;
- name: string;
- stage_name: string | null;
- artist_names: string[];
- vote_count: number;
- time_start: string | null;
-}
-
-export async function findMatchingSets({
- existingSets,
- importedSets,
-}: {
- importedSets: SetImportData[];
- existingSets: {
- id: string;
- name: string;
- time_start: string | null;
- set_artists?: { artists: { name: string } }[];
- stages?: { name: string } | null;
- }[];
-}): Promise> {
- const matchMap = new Map();
-
- for (let index = 0; index < importedSets.length; index++) {
- const set = importedSets[index];
- const artistNames = set.artist_names
- .split(",")
- .map((name) => name.trim())
- .filter((name) => name.length > 0);
-
- if (artistNames.length === 0) {
- matchMap.set(index, []);
- continue;
- }
-
- if (!existingSets || existingSets.length === 0) {
- matchMap.set(index, []);
- continue;
- }
-
- const matches: MatchingSet[] = [];
-
- for (const existingSet of existingSets) {
- if (!existingSet.set_artists || existingSet.set_artists.length === 0) {
- continue;
- }
-
- const setArtistNames = existingSet.set_artists
- .map(
- (sa: { artists: { name: string } | null } | null) =>
- sa?.artists?.name,
- )
- .filter((name): name is string => name !== null && name !== undefined);
-
- function normalizeArtistName(name: string) {
- return name
- .toLowerCase()
- .trim()
- .replace(/[.,;!?]+$/, "");
- }
-
- const csvArtistNamesLower = artistNames.map(normalizeArtistName);
- const setArtistNamesLower = setArtistNames.map(normalizeArtistName);
-
- csvArtistNamesLower.sort();
- setArtistNamesLower.sort();
-
- const artistsMatch =
- setArtistNamesLower.length === csvArtistNamesLower.length &&
- setArtistNamesLower.every(
- (name: string, idx: number) => name === csvArtistNamesLower[idx],
- );
-
- if (artistsMatch) {
- const { count: voteCount } = await supabase
- .from("votes")
- .select("*", { count: "exact", head: true })
- .eq("set_id", existingSet.id);
-
- matches.push({
- id: existingSet.id,
- name: existingSet.name,
- stage_name: existingSet.stages?.name || null,
- artist_names: setArtistNames,
- vote_count: voteCount || 0,
- time_start: existingSet.time_start,
- });
- }
- }
-
- matchMap.set(index, matches);
- }
-
- return matchMap;
-}
diff --git a/src/services/csv/setSelectionValidator.ts b/src/services/csv/setSelectionValidator.ts
deleted file mode 100644
index 064210a0..00000000
--- a/src/services/csv/setSelectionValidator.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import type { SetSelection } from "@/pages/admin/festivals/CSVImportDialog/SetsPreviewTable";
-
-export interface SetSelectionValidationError {
- rowIndices: number[];
- setId: string;
- message: string;
-}
-
-export function validateSetSelections(
- selections: Map,
-): SetSelectionValidationError[] {
- const errors: SetSelectionValidationError[] = [];
- const matchedSetIds = new Map();
-
- selections.forEach((selection, rowIndex) => {
- if (selection.action === "match" && selection.matchedSetId) {
- const setId = selection.matchedSetId;
- if (!matchedSetIds.has(setId)) {
- matchedSetIds.set(setId, []);
- }
- matchedSetIds.get(setId)!.push(rowIndex);
- }
- });
-
- matchedSetIds.forEach((rowIndices, setId) => {
- if (rowIndices.length > 1) {
- errors.push({
- rowIndices,
- setId,
- message: `Set is matched by multiple rows (${rowIndices.map((i) => i + 1).join(", ")}). Only one row can match an existing set. Use "Duplicate" or "Create new" for the others.`,
- });
- }
- });
-
- return errors;
-}
diff --git a/src/services/csv/stageImporter.ts b/src/services/csv/stageImporter.ts
deleted file mode 100644
index 146de6ea..00000000
--- a/src/services/csv/stageImporter.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import { supabase } from "@/integrations/supabase/client";
-import { generateSlug } from "@/lib/slug";
-import type { StageImportData } from "./csvParser";
-import type { ImportResult } from "./types";
-
-export async function importStages(
- stages: StageImportData[],
- editionId: string,
- onProgress?: (completed: number, total: number) => void,
-): Promise {
- try {
- const stageInserts = stages.map((stage) => ({
- name: stage.name,
- slug: generateSlug(stage.name),
- festival_edition_id: editionId,
- archived: false,
- }));
-
- const { data, error } = await supabase
- .from("stages")
- .upsert(stageInserts, {
- onConflict: "name,festival_edition_id",
- ignoreDuplicates: false,
- })
- .select();
-
- if (error) {
- return {
- success: false,
- message: `Failed to import stages: ${error.message}`,
- errors: [error.message],
- };
- }
-
- // Report completion
- onProgress?.(stages.length, stages.length);
-
- return {
- success: true,
- message: `Successfully imported ${data?.length || 0} stages`,
- inserted: data?.length || 0,
- };
- } catch (error) {
- return {
- success: false,
- message: `Import failed: ${error instanceof Error ? error.message : "Unknown error"}`,
- errors: [error instanceof Error ? error.message : "Unknown error"],
- };
- }
-}
diff --git a/src/services/csv/timeValidator.ts b/src/services/csv/timeValidator.ts
deleted file mode 100644
index 3c24733d..00000000
--- a/src/services/csv/timeValidator.ts
+++ /dev/null
@@ -1,116 +0,0 @@
-import { convertLocalTimeToUTC, combineDateAndTime } from "@/lib/timeUtils";
-
-export interface TimeValidationResult {
- isValid: boolean;
- error?: string;
-}
-
-export function validateTimeString(
- timeString: string | undefined,
- dateString: string | undefined,
- timezone: string,
-): TimeValidationResult {
- if (dateString && timeString) {
- const combined = combineDateAndTime(dateString, timeString);
- if (!combined) {
- return {
- isValid: false,
- error: "Failed to combine date and time",
- };
- }
-
- try {
- const result = convertLocalTimeToUTC(combined, timezone);
- if (result === null) {
- return {
- isValid: false,
- error: "Invalid date/time format",
- };
- }
- return { isValid: true };
- } catch (error) {
- return {
- isValid: false,
- error: error instanceof Error ? error.message : "Invalid format",
- };
- }
- }
-
- if (!timeString) {
- return { isValid: true };
- }
-
- try {
- const result = convertLocalTimeToUTC(timeString, timezone);
- if (result === null) {
- return {
- isValid: false,
- error: "Invalid date/time format",
- };
- }
- return { isValid: true };
- } catch (error) {
- return {
- isValid: false,
- error: error instanceof Error ? error.message : "Invalid format",
- };
- }
-}
-
-export interface SetValidationResult {
- isValid: boolean;
- rowIndex: number;
- errors: {
- time_start?: string;
- time_end?: string;
- stage_name?: string;
- artist_names?: string;
- };
-}
-
-export function validateSetData(
- set: {
- stage_name: string;
- artist_names: string;
- time_start?: string;
- date_start?: string;
- time_end?: string;
- date_end?: string;
- },
- rowIndex: number,
- timezone: string,
-): SetValidationResult {
- const errors: SetValidationResult["errors"] = {};
-
- if (!set.stage_name || set.stage_name.trim() === "") {
- errors.stage_name = "Stage name is required";
- }
-
- if (!set.artist_names || set.artist_names.trim() === "") {
- errors.artist_names = "Artist name(s) required";
- }
-
- const timeStartValidation = validateTimeString(
- set.time_start,
- set.date_start,
- timezone,
- );
- if (!timeStartValidation.isValid) {
- errors.time_start = timeStartValidation.error;
- }
-
- const timeEndValidation = validateTimeString(
- set.time_end,
- set.date_end,
- timezone,
- );
- if (!timeEndValidation.isValid) {
- errors.time_end = timeEndValidation.error;
- }
-
- return {
- isValid: Object.keys(errors).length === 0,
- rowIndex,
- errors,
- };
-}
diff --git a/src/services/csv/types.ts b/src/services/csv/types.ts
deleted file mode 100644
index 00db3b70..00000000
--- a/src/services/csv/types.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-export interface ImportResult {
- success: boolean;
- message: string;
- inserted?: number;
- updated?: number;
- errors?: string[];
-}
diff --git a/src/services/scheduleImportService.ts b/src/services/scheduleImportService.ts
new file mode 100644
index 00000000..9b5bcf0f
--- /dev/null
+++ b/src/services/scheduleImportService.ts
@@ -0,0 +1,210 @@
+import { supabase } from "@/integrations/supabase/client";
+
+function parseCSV(csvContent: string): string[][] {
+ const lines = csvContent.trim().split("\n");
+ return lines.map((line) => {
+ const result: string[] = [];
+ let current = "";
+ let inQuotes = false;
+ for (let i = 0; i < line.length; i++) {
+ const char = line[i];
+ if (char === '"') {
+ inQuotes = !inQuotes;
+ } else if (char === "," && !inQuotes) {
+ result.push(current.trim());
+ current = "";
+ } else {
+ current += char;
+ }
+ }
+ result.push(current.trim());
+ return result.map((field) => field.replace(/^"|"$/g, ""));
+ });
+}
+
+export type CsvRow = {
+ artists: string[];
+ setName?: string;
+ stage?: string;
+ date?: string;
+ startTime?: string;
+ endTime?: string;
+ description?: string;
+};
+
+export type SetPayload = {
+ name: string;
+ description: string | null;
+ stageName: string | null;
+ timeStart: string | null;
+ timeEnd: string | null;
+ artistSlugs: string[];
+};
+
+export type DiffResult = {
+ summary: {
+ newArtists: number;
+ newStages: number;
+ setsMatched: number;
+ setsToCreate: number;
+ setsOrphaned: number;
+ };
+ newArtistNames: string[];
+ cleanOperations: {
+ artistsToCreate: { name: string; slug: string }[];
+ stagesToCreate: { name: string }[];
+ setsToCreate: SetPayload[];
+ setsToUpdate: ({ id: string } & SetPayload)[];
+ };
+ conflicts: {
+ stageNameMismatches: {
+ csvValue: string;
+ closestDbValue: string;
+ dbStageId: string;
+ }[];
+ orphanedSets: {
+ id: string;
+ name: string;
+ stage: string | null;
+ timeStart: string | null;
+ }[];
+ };
+};
+
+export type CommitResult = {
+ setsCreated: number;
+ setsUpdated: number;
+ setsArchived: number;
+};
+
+export type StageMismatchResolution =
+ | { action: "map"; dbStageName: string }
+ | { action: "create" };
+
+export type OrphanResolution = "archive" | "keep";
+
+export function parseScheduleCsv(csvContent: string): CsvRow[] {
+ const lines = parseCSV(csvContent);
+ if (lines.length < 2) return [];
+
+ const headers = lines[0].map((h) => h.trim().toLowerCase());
+
+ function col(name: string) {
+ return headers.indexOf(name);
+ }
+ const artistsCol = col("artists");
+ const setNameCol = col("set name");
+ const stageCol = col("stage");
+ const dateCol = col("date");
+ const startTimeCol = col("start time");
+ const endTimeCol = col("end time");
+ const descriptionCol = col("description");
+
+ return lines
+ .slice(1)
+ .filter((row) => row.some((cell) => cell.trim()))
+ .map((row) => {
+ const artistsRaw = artistsCol >= 0 ? (row[artistsCol] ?? "") : "";
+ const artists = artistsRaw
+ .split("|")
+ .map((a) => a.trim())
+ .filter(Boolean);
+
+ return {
+ artists,
+ setName:
+ setNameCol >= 0 ? row[setNameCol]?.trim() || undefined : undefined,
+ stage: stageCol >= 0 ? row[stageCol]?.trim() || undefined : undefined,
+ date: dateCol >= 0 ? row[dateCol]?.trim() || undefined : undefined,
+ startTime:
+ startTimeCol >= 0
+ ? row[startTimeCol]?.trim() || undefined
+ : undefined,
+ endTime:
+ endTimeCol >= 0 ? row[endTimeCol]?.trim() || undefined : undefined,
+ description:
+ descriptionCol >= 0
+ ? row[descriptionCol]?.trim() || undefined
+ : undefined,
+ };
+ })
+ .filter((row) => row.artists.length > 0);
+}
+
+export async function callDiffSchedule(
+ festivalEditionId: string,
+ timezone: string,
+ rows: CsvRow[],
+): Promise {
+ const { data, error } = await supabase.functions.invoke("diff-schedule", {
+ body: { festivalEditionId, timezone, rows },
+ });
+ if (error) throw new Error(error.message);
+ if (data?.error) throw new Error(data.error);
+ return data as DiffResult;
+}
+
+export function buildCommitPayload(
+ diff: DiffResult,
+ stageMismatchResolutions: Record,
+ orphanResolutions: Record,
+): {
+ artistsToCreate: { name: string; slug: string }[];
+ stagesToCreate: { name: string }[];
+ setsToCreate: SetPayload[];
+ setsToUpdate: ({ id: string } & SetPayload)[];
+ setIdsToArchive: string[];
+} {
+ const mismatchedCsvValues = new Set(
+ diff.conflicts.stageNameMismatches.map((m) => m.csvValue),
+ );
+
+ function resolveSetStageName(set: SetPayload): string | null {
+ if (!set.stageName) return null;
+ if (!mismatchedCsvValues.has(set.stageName)) return set.stageName;
+ const resolution = stageMismatchResolutions[set.stageName];
+ if (!resolution) return set.stageName;
+ return resolution.action === "map" ? resolution.dbStageName : set.stageName;
+ }
+
+ const extraStagesToCreate: { name: string }[] = [];
+ for (const mismatch of diff.conflicts.stageNameMismatches) {
+ const resolution = stageMismatchResolutions[mismatch.csvValue];
+ if (resolution?.action === "create") {
+ extraStagesToCreate.push({ name: mismatch.csvValue });
+ }
+ }
+
+ const setIdsToArchive = diff.conflicts.orphanedSets
+ .filter((s) => (orphanResolutions[s.id] ?? "keep") === "archive")
+ .map((s) => s.id);
+
+ return {
+ artistsToCreate: diff.cleanOperations.artistsToCreate,
+ stagesToCreate: [
+ ...diff.cleanOperations.stagesToCreate,
+ ...extraStagesToCreate,
+ ],
+ setsToCreate: diff.cleanOperations.setsToCreate.map((s) => ({
+ ...s,
+ stageName: resolveSetStageName(s),
+ })),
+ setsToUpdate: diff.cleanOperations.setsToUpdate.map((s) => ({
+ ...s,
+ stageName: resolveSetStageName(s),
+ })),
+ setIdsToArchive,
+ };
+}
+
+export async function callCommitSchedule(
+ festivalEditionId: string,
+ payload: ReturnType,
+): Promise {
+ const { data, error } = await supabase.functions.invoke("commit-schedule", {
+ body: { festivalEditionId, ...payload },
+ });
+ if (error) throw new Error(error.message);
+ if (data?.error) throw new Error(data.error);
+ return data as CommitResult;
+}
diff --git a/src/test/setup.ts b/src/test/setup.ts
index 0c6b74ba..7d9033dc 100644
--- a/src/test/setup.ts
+++ b/src/test/setup.ts
@@ -1,8 +1,19 @@
import "@testing-library/jest-dom/vitest";
+import { vi } from "vitest";
+
+// Stub the Supabase env vars so the client module can initialise even when
+// VITE_SUPABASE_URL / VITE_SUPABASE_PUBLISHABLE_KEY aren't set in the test
+// environment. Tests that exercise data fetching mock the relevant query
+// hooks; the client itself never actually issues a request.
+vi.stubEnv("VITE_SUPABASE_URL", "http://localhost:54321");
+vi.stubEnv("VITE_SUPABASE_PUBLISHABLE_KEY", "test-anon-key");
// Polyfill for ArrayBuffer.prototype.resizable and SharedArrayBuffer.prototype.growable
// These are needed by webidl-conversions package
-if (typeof ArrayBuffer !== "undefined" && !Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, "resizable")) {
+if (
+ typeof ArrayBuffer !== "undefined" &&
+ !Object.getOwnPropertyDescriptor(ArrayBuffer.prototype, "resizable")
+) {
Object.defineProperty(ArrayBuffer.prototype, "resizable", {
get() {
return false;
@@ -11,7 +22,10 @@ if (typeof ArrayBuffer !== "undefined" && !Object.getOwnPropertyDescriptor(Array
});
}
-if (typeof SharedArrayBuffer !== "undefined" && !Object.getOwnPropertyDescriptor(SharedArrayBuffer.prototype, "growable")) {
+if (
+ typeof SharedArrayBuffer !== "undefined" &&
+ !Object.getOwnPropertyDescriptor(SharedArrayBuffer.prototype, "growable")
+) {
Object.defineProperty(SharedArrayBuffer.prototype, "growable", {
get() {
return false;
diff --git a/supabase/functions/_shared/auth.ts b/supabase/functions/_shared/auth.ts
new file mode 100644
index 00000000..5bf260ff
--- /dev/null
+++ b/supabase/functions/_shared/auth.ts
@@ -0,0 +1,50 @@
+import { createClient } from "https://esm.sh/@supabase/supabase-js@2";
+
+export const corsHeaders = {
+ "Access-Control-Allow-Origin": "*",
+ "Access-Control-Allow-Headers":
+ "authorization, x-client-info, apikey, content-type",
+};
+
+export function getAdminClient() {
+ return createClient(
+ Deno.env.get("SUPABASE_URL") ?? "",
+ Deno.env.get("SUPABASE_SERVICE_ROLE_KEY") ?? "",
+ );
+}
+
+type AuthResult =
+ | { userId: string; errorResponse: null }
+ | { userId: null; errorResponse: { status: number; body: string } };
+
+export async function requireAdmin(req: Request): Promise {
+ const authHeader = req.headers.get("Authorization");
+ if (!authHeader) {
+ return { userId: null, errorResponse: { status: 401, body: JSON.stringify({ error: "Unauthorized" }) } };
+ }
+
+ const userClient = createClient(
+ Deno.env.get("SUPABASE_URL") ?? "",
+ Deno.env.get("SUPABASE_ANON_KEY") ?? "",
+ { global: { headers: { Authorization: authHeader } } },
+ );
+
+ const { data: { user }, error: userError } = await userClient.auth.getUser();
+ if (userError || !user) {
+ return { userId: null, errorResponse: { status: 401, body: JSON.stringify({ error: "Unauthorized" }) } };
+ }
+
+ const adminClient = getAdminClient();
+ const { data: adminRole } = await adminClient
+ .from("admin_roles")
+ .select("role")
+ .eq("user_id", user.id)
+ .in("role", ["admin", "super_admin"])
+ .maybeSingle();
+
+ if (!adminRole) {
+ return { userId: null, errorResponse: { status: 403, body: JSON.stringify({ error: "Forbidden" }) } };
+ }
+
+ return { userId: user.id, errorResponse: null };
+}
diff --git a/supabase/functions/commit-schedule/commit-schedule.test.ts b/supabase/functions/commit-schedule/commit-schedule.test.ts
new file mode 100644
index 00000000..2f273f83
--- /dev/null
+++ b/supabase/functions/commit-schedule/commit-schedule.test.ts
@@ -0,0 +1,246 @@
+// Integration tests for commit-schedule.
+// Run against a local Supabase instance: deno test --allow-env --allow-net commit-schedule.test.ts
+//
+// These tests require SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY env vars.
+// They test the commit_schedule RPC directly, which is the meaningful logic layer.
+// The Edge Function itself is a thin auth + dispatch wrapper.
+
+import { assertEquals, assertExists } from "jsr:@std/assert@1";
+import { createClient } from "https://esm.sh/@supabase/supabase-js@2";
+
+const SUPABASE_URL = Deno.env.get("SUPABASE_URL") ?? "";
+const SERVICE_ROLE_KEY = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY") ?? "";
+
+function skipIfNoEnv() {
+ if (!SUPABASE_URL || !SERVICE_ROLE_KEY) {
+ console.warn(
+ "Skipping integration tests: SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY not set",
+ );
+ return true;
+ }
+ return false;
+}
+
+function adminClient() {
+ return createClient(SUPABASE_URL, SERVICE_ROLE_KEY);
+}
+
+async function getTestEditionId(
+ db: ReturnType,
+): Promise {
+ const { data } = await db
+ .from("festival_editions")
+ .select("id")
+ .limit(1)
+ .single();
+ assertExists(data, "No festival edition found — run test:setup first");
+ return data.id;
+}
+
+async function getTestUserId(
+ db: ReturnType,
+): Promise {
+ const { data } = await db
+ .from("admin_roles")
+ .select("user_id")
+ .limit(1)
+ .single();
+ assertExists(data, "No admin user found — run test:setup first");
+ return data.user_id;
+}
+
+Deno.test("commit_schedule: creates new artist and set", async () => {
+ if (skipIfNoEnv()) return;
+ const db = adminClient();
+ const editionId = await getTestEditionId(db);
+ const userId = await getTestUserId(db);
+ const slug = `test-artist-${Date.now()}`;
+ const setName = `Test Artist Set ${slug}`;
+
+ const { data, error } = await db.rpc("commit_schedule", {
+ p_festival_edition_id: editionId,
+ p_user_id: userId,
+ p_artists_to_create: [{ name: "Test Artist", slug }],
+ p_stages_to_create: [],
+ p_sets_to_create: [
+ {
+ name: setName,
+ description: null,
+ stageName: null,
+ timeStart: null,
+ timeEnd: null,
+ artistSlugs: [slug],
+ },
+ ],
+ p_sets_to_update: [],
+ p_set_ids_to_archive: [],
+ });
+
+ assertEquals(error, null);
+ assertEquals(data.setsCreated, 1);
+ assertEquals(data.setsUpdated, 0);
+
+ // Cleanup
+ await db
+ .from("sets")
+ .delete()
+ .eq("festival_edition_id", editionId)
+ .eq("name", setName);
+ await db.from("artists").delete().eq("slug", slug);
+});
+
+Deno.test(
+ "commit_schedule: updates existing set without creating duplicate",
+ async () => {
+ if (skipIfNoEnv()) return;
+ const db = adminClient();
+ const editionId = await getTestEditionId(db);
+ const userId = await getTestUserId(db);
+ const slug = `test-update-artist-${Date.now()}`;
+
+ // Create artist and set
+ await db.from("artists").insert({ name: "Update Test", slug });
+ const { data: artist } = await db
+ .from("artists")
+ .select("id")
+ .eq("slug", slug)
+ .single();
+ const { data: set } = await db
+ .from("sets")
+ .insert({
+ festival_edition_id: editionId,
+ name: "Old Name",
+ slug: "old-name",
+ created_by: userId,
+ })
+ .select("id")
+ .single();
+ await db
+ .from("set_artists")
+ .insert({ set_id: set!.id, artist_id: artist!.id });
+
+ const { data, error } = await db.rpc("commit_schedule", {
+ p_festival_edition_id: editionId,
+ p_user_id: userId,
+ p_artists_to_create: [],
+ p_stages_to_create: [],
+ p_sets_to_create: [],
+ p_sets_to_update: [
+ {
+ id: set!.id,
+ name: "New Name",
+ description: "Updated",
+ stageName: null,
+ timeStart: null,
+ timeEnd: null,
+ artistSlugs: [slug],
+ },
+ ],
+ p_set_ids_to_archive: [],
+ });
+
+ assertEquals(error, null);
+ assertEquals(data.setsUpdated, 1);
+
+ const { data: updated } = await db
+ .from("sets")
+ .select("name, description")
+ .eq("id", set!.id)
+ .single();
+ assertEquals(updated!.name, "New Name");
+ assertEquals(updated!.description, "Updated");
+
+ // Cleanup
+ await db.from("sets").delete().eq("id", set!.id);
+ await db.from("artists").delete().eq("slug", slug);
+ },
+);
+
+Deno.test("commit_schedule: archives orphaned sets", async () => {
+ if (skipIfNoEnv()) return;
+ const db = adminClient();
+ const editionId = await getTestEditionId(db);
+ const userId = await getTestUserId(db);
+
+ const { data: set } = await db
+ .from("sets")
+ .insert({
+ festival_edition_id: editionId,
+ name: "Orphan Set",
+ slug: "orphan-set",
+ created_by: userId,
+ })
+ .select("id")
+ .single();
+
+ const { data, error } = await db.rpc("commit_schedule", {
+ p_festival_edition_id: editionId,
+ p_user_id: userId,
+ p_artists_to_create: [],
+ p_stages_to_create: [],
+ p_sets_to_create: [],
+ p_sets_to_update: [],
+ p_set_ids_to_archive: [set!.id],
+ });
+
+ assertEquals(error, null);
+ assertEquals(data.setsArchived, 1);
+
+ const { data: archived } = await db
+ .from("sets")
+ .select("archived")
+ .eq("id", set!.id)
+ .single();
+ assertEquals(archived!.archived, true);
+
+ // Cleanup
+ await db.from("sets").delete().eq("id", set!.id);
+});
+
+Deno.test(
+ "commit_schedule: midnight-crossing times stored correctly",
+ async () => {
+ if (skipIfNoEnv()) return;
+ const db = adminClient();
+ const editionId = await getTestEditionId(db);
+ const userId = await getTestUserId(db);
+ const slug = `test-midnight-${Date.now()}`;
+
+ await db.from("artists").insert({ name: "Late Night DJ", slug });
+
+ const { error } = await db.rpc("commit_schedule", {
+ p_festival_edition_id: editionId,
+ p_user_id: userId,
+ p_artists_to_create: [],
+ p_stages_to_create: [],
+ p_sets_to_create: [
+ {
+ name: "Late Night Set",
+ description: null,
+ stageName: null,
+ timeStart: "2026-07-11T23:00:00.000Z",
+ timeEnd: "2026-07-12T01:00:00.000Z",
+ artistSlugs: [slug],
+ },
+ ],
+ p_sets_to_update: [],
+ p_set_ids_to_archive: [],
+ });
+
+ assertEquals(error, null);
+
+ const { data: sets } = await db
+ .from("sets")
+ .select("id, time_start, time_end, set_artists(artist_id, artists(slug))")
+ .eq("festival_edition_id", editionId)
+ .eq("name", "Late Night Set");
+
+ assertExists(sets?.[0]);
+ assertEquals(sets![0].time_start, "2026-07-11T23:00:00+00:00");
+ assertEquals(sets![0].time_end, "2026-07-12T01:00:00+00:00");
+
+ // Cleanup
+ await db.from("sets").delete().eq("id", sets![0].id);
+ await db.from("artists").delete().eq("slug", slug);
+ },
+);
diff --git a/supabase/functions/commit-schedule/index.ts b/supabase/functions/commit-schedule/index.ts
new file mode 100644
index 00000000..3165540b
--- /dev/null
+++ b/supabase/functions/commit-schedule/index.ts
@@ -0,0 +1,95 @@
+import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
+import { z } from "https://deno.land/x/zod@v3.22.4/mod.ts";
+import { getAdminClient, requireAdmin, corsHeaders } from "../_shared/auth.ts";
+
+const setPayloadSchema = z.object({
+ name: z.string(),
+ description: z.string().nullish(),
+ stageName: z.string().nullish(),
+ timeStart: z.string().nullish(),
+ timeEnd: z.string().nullish(),
+ artistSlugs: z.array(z.string()),
+});
+
+const commitRequestSchema = z.object({
+ festivalEditionId: z.string().uuid(),
+ artistsToCreate: z
+ .array(z.object({ name: z.string(), slug: z.string() }))
+ .default([]),
+ stagesToCreate: z.array(z.object({ name: z.string() })).default([]),
+ setsToCreate: z.array(setPayloadSchema).default([]),
+ setsToUpdate: z
+ .array(setPayloadSchema.extend({ id: z.string().uuid() }))
+ .default([]),
+ setIdsToArchive: z.array(z.string().uuid()).default([]),
+});
+
+serve(async (req) => {
+ if (req.method === "OPTIONS") {
+ return new Response("ok", { headers: corsHeaders });
+ }
+
+ const auth = await requireAdmin(req);
+ if (auth.errorResponse) {
+ return new Response(auth.errorResponse.body, {
+ status: auth.errorResponse.status,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ }
+
+ try {
+ const parsed = commitRequestSchema.safeParse(await req.json());
+ if (!parsed.success) {
+ return new Response(
+ JSON.stringify({
+ error: "Invalid request",
+ issues: parsed.error.issues,
+ }),
+ {
+ status: 400,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ },
+ );
+ }
+
+ const {
+ festivalEditionId,
+ artistsToCreate,
+ stagesToCreate,
+ setsToCreate,
+ setsToUpdate,
+ setIdsToArchive,
+ } = parsed.data;
+
+ const db = getAdminClient();
+
+ const { data, error } = await db.rpc("commit_schedule", {
+ p_festival_edition_id: festivalEditionId,
+ p_user_id: auth.userId,
+ p_artists_to_create: artistsToCreate,
+ p_stages_to_create: stagesToCreate,
+ p_sets_to_create: setsToCreate,
+ p_sets_to_update: setsToUpdate,
+ p_set_ids_to_archive: setIdsToArchive,
+ });
+
+ if (error) {
+ console.error("commit_schedule RPC error:", error);
+ return new Response(JSON.stringify({ error: error.message }), {
+ status: 400,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ }
+
+ return new Response(JSON.stringify(data), {
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ console.error("commit-schedule error:", error);
+ const message = error instanceof Error ? error.message : String(error);
+ return new Response(JSON.stringify({ error: message }), {
+ status: 500,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ }
+});
diff --git a/supabase/functions/diff-schedule/diff.test.ts b/supabase/functions/diff-schedule/diff.test.ts
new file mode 100644
index 00000000..92bdffbd
--- /dev/null
+++ b/supabase/functions/diff-schedule/diff.test.ts
@@ -0,0 +1,305 @@
+import { assertEquals } from "jsr:@std/assert@1";
+import {
+ advanceDateByOne,
+ artistKey,
+ computeDiff,
+ localToUtc,
+ toSlug,
+ type DbArtist,
+ type DbSet,
+ type DbStage,
+} from "./diff.ts";
+
+Deno.test("toSlug converts name to lowercase hyphenated slug", () => {
+ assertEquals(toSlug("Carl Cox"), "carl-cox");
+ assertEquals(toSlug("DJ Tennis"), "dj-tennis");
+ assertEquals(toSlug(" Peggy Gou "), "peggy-gou");
+ assertEquals(toSlug("Aphex Twin"), "aphex-twin");
+ assertEquals(toSlug("deadmau5"), "deadmau5");
+ assertEquals(toSlug("Four Tet"), "four-tet");
+});
+
+Deno.test("artistKey sorts slugs and joins with pipe", () => {
+ assertEquals(artistKey(["carl-cox"]), "carl-cox");
+ assertEquals(artistKey(["carl-cox", "peggy-gou"]), "carl-cox|peggy-gou");
+ assertEquals(artistKey(["peggy-gou", "carl-cox"]), "carl-cox|peggy-gou");
+ assertEquals(artistKey(["c", "b", "a"]), "a|b|c");
+});
+
+Deno.test("advanceDateByOne advances date by one day", () => {
+ assertEquals(advanceDateByOne("2026-07-11"), "2026-07-12");
+ assertEquals(advanceDateByOne("2026-07-31"), "2026-08-01");
+ assertEquals(advanceDateByOne("2026-12-31"), "2027-01-01");
+});
+
+Deno.test("localToUtc converts Lisbon summer time (UTC+1) to UTC", () => {
+ const result = localToUtc("2026-07-11", "23:00", "Europe/Lisbon");
+ assertEquals(result, "2026-07-11T22:00:00.000Z");
+});
+
+Deno.test("localToUtc converts Lisbon winter time (UTC+0) to UTC", () => {
+ const result = localToUtc("2026-01-15", "22:00", "Europe/Lisbon");
+ assertEquals(result, "2026-01-15T22:00:00.000Z");
+});
+
+Deno.test("localToUtc converts midnight correctly", () => {
+ const result = localToUtc("2026-07-11", "00:00", "Europe/Lisbon");
+ assertEquals(result, "2026-07-10T23:00:00.000Z");
+});
+
+// --- computeDiff ---
+
+function makeArtist(name: string): DbArtist {
+ const slug = name.toLowerCase().replace(/\s+/g, "-");
+ return { id: `id-${slug}`, name, slug };
+}
+
+function makeStage(id: string, name: string): DbStage {
+ return { id, name };
+}
+
+function makeSet(
+ id: string,
+ name: string,
+ artists: DbArtist[],
+ stageId: string | null = null,
+ timeStart: string | null = null,
+): DbSet {
+ return {
+ id,
+ name,
+ description: null,
+ stage_id: stageId,
+ time_start: timeStart,
+ time_end: null,
+ set_artists: artists.map((a) => ({ artist_id: a.id, artists: a })),
+ };
+}
+
+Deno.test("computeDiff: new artist in CSV creates artist", () => {
+ const result = computeDiff(
+ [{ artists: ["New DJ"] }],
+ [],
+ [],
+ [],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.artistsToCreate.length, 1);
+ assertEquals(result.cleanOperations.artistsToCreate[0].name, "New DJ");
+ assertEquals(result.cleanOperations.artistsToCreate[0].slug, "new-dj");
+ assertEquals(result.summary.newArtists, 1);
+});
+
+Deno.test("computeDiff: existing artist is not duplicated", () => {
+ const artist = makeArtist("Carl Cox");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"] }],
+ [],
+ [],
+ [artist],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.artistsToCreate.length, 0);
+ assertEquals(result.summary.newArtists, 0);
+});
+
+Deno.test("computeDiff: same new artist in multiple rows is created once", () => {
+ const result = computeDiff(
+ [{ artists: ["New DJ"] }, { artists: ["New DJ"] }],
+ [],
+ [],
+ [],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.artistsToCreate.length, 1);
+});
+
+Deno.test("computeDiff: CSV row with no DB match creates new set", () => {
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"] }],
+ [],
+ [],
+ [makeArtist("Carl Cox")],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.setsToCreate.length, 1);
+ assertEquals(result.cleanOperations.setsToUpdate.length, 0);
+ assertEquals(result.summary.setsToCreate, 1);
+});
+
+Deno.test("computeDiff: CSV row matching existing set produces update", () => {
+ const artist = makeArtist("Carl Cox");
+ const set = makeSet("set-1", "Carl Cox", [artist]);
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"] }],
+ [],
+ [set],
+ [artist],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.setsToUpdate.length, 1);
+ assertEquals(result.cleanOperations.setsToUpdate[0].id, "set-1");
+ assertEquals(result.cleanOperations.setsToCreate.length, 0);
+ assertEquals(result.summary.setsMatched, 1);
+});
+
+Deno.test("computeDiff: set in DB but absent from CSV is orphaned", () => {
+ const artist = makeArtist("DJ Tennis");
+ const set = makeSet("set-2", "DJ Tennis", [artist]);
+ const result = computeDiff(
+ [],
+ [],
+ [set],
+ [artist],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.conflicts.orphanedSets.length, 1);
+ assertEquals(result.conflicts.orphanedSets[0].id, "set-2");
+ assertEquals(result.summary.setsOrphaned, 1);
+});
+
+Deno.test("computeDiff: B2B set matched by combined artist key", () => {
+ const cox = makeArtist("Carl Cox");
+ const gou = makeArtist("Peggy Gou");
+ const set = makeSet("set-b2b", "Carl Cox b2b Peggy Gou", [cox, gou]);
+ const result = computeDiff(
+ [{ artists: ["Carl Cox", "Peggy Gou"] }],
+ [],
+ [set],
+ [cox, gou],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.setsToUpdate.length, 1);
+ assertEquals(result.cleanOperations.setsToUpdate[0].id, "set-b2b");
+});
+
+Deno.test("computeDiff: B2B artist order in CSV does not affect match", () => {
+ const cox = makeArtist("Carl Cox");
+ const gou = makeArtist("Peggy Gou");
+ const set = makeSet("set-b2b", "Carl Cox b2b Peggy Gou", [cox, gou]);
+ const result = computeDiff(
+ [{ artists: ["Peggy Gou", "Carl Cox"] }],
+ [],
+ [set],
+ [cox, gou],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.setsToUpdate.length, 1);
+});
+
+Deno.test("computeDiff: exact stage name match uses canonical DB name in payload", () => {
+ const artist = makeArtist("Carl Cox");
+ const stage = makeStage("stage-1", "Main Stage");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"], stage: "Main Stage" }],
+ [stage],
+ [],
+ [artist],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.setsToCreate[0].stageName, "Main Stage");
+});
+
+Deno.test("computeDiff: stage name mismatch surfaced as conflict", () => {
+ const artist = makeArtist("Carl Cox");
+ const stage = makeStage("stage-1", "Main Stage");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"], stage: "Mainstage" }],
+ [stage],
+ [],
+ [artist],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.conflicts.stageNameMismatches.length, 1);
+ assertEquals(result.conflicts.stageNameMismatches[0].csvValue, "Mainstage");
+ assertEquals(result.conflicts.stageNameMismatches[0].closestDbValue, "Main Stage");
+});
+
+Deno.test("computeDiff: unknown stage creates new stage", () => {
+ const artist = makeArtist("Carl Cox");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"], stage: "Secret Forest" }],
+ [],
+ [],
+ [artist],
+ "Europe/Lisbon",
+ );
+ assertEquals(result.cleanOperations.stagesToCreate.length, 1);
+ assertEquals(result.cleanOperations.stagesToCreate[0].name, "Secret Forest");
+});
+
+Deno.test("computeDiff: end time before start time triggers midnight advance", () => {
+ const artist = makeArtist("Carl Cox");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"], date: "2026-07-11", startTime: "23:00", endTime: "01:00" }],
+ [],
+ [],
+ [artist],
+ "UTC",
+ );
+ const created = result.cleanOperations.setsToCreate[0];
+ // start should be 2026-07-11T23:00:00Z, end should be 2026-07-12T01:00:00Z
+ assertEquals(created.timeStart, "2026-07-11T23:00:00.000Z");
+ assertEquals(created.timeEnd, "2026-07-12T01:00:00.000Z");
+});
+
+Deno.test("computeDiff: set name falls back to b2b join when not provided", () => {
+ const artist1 = makeArtist("Carl Cox");
+ const artist2 = makeArtist("Peggy Gou");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox", "Peggy Gou"] }],
+ [],
+ [],
+ [artist1, artist2],
+ "UTC",
+ );
+ assertEquals(result.cleanOperations.setsToCreate[0].name, "Carl Cox b2b Peggy Gou");
+});
+
+Deno.test("computeDiff: explicit set name takes precedence over b2b fallback", () => {
+ const artist = makeArtist("Carl Cox");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"], setName: "Carl Cox Live" }],
+ [],
+ [],
+ [artist],
+ "UTC",
+ );
+ assertEquals(result.cleanOperations.setsToCreate[0].name, "Carl Cox Live");
+});
+
+Deno.test("computeDiff: same stage mismatch from multiple rows surfaced once", () => {
+ const artist1 = makeArtist("Artist A");
+ const artist2 = makeArtist("Artist B");
+ const stage = makeStage("stage-1", "Main Stage");
+ const result = computeDiff(
+ [
+ { artists: ["Artist A"], stage: "Mainstage" },
+ { artists: ["Artist B"], stage: "Mainstage" },
+ ],
+ [stage],
+ [],
+ [artist1, artist2],
+ "UTC",
+ );
+ assertEquals(result.conflicts.stageNameMismatches.length, 1);
+});
+
+Deno.test("computeDiff: multiple candidates disambiguated by stage", () => {
+ const artist = makeArtist("Carl Cox");
+ const stage1 = makeStage("s1", "Stage One");
+ const stage2 = makeStage("s2", "Stage Two");
+ const set1 = makeSet("set-a", "Carl Cox", [artist], "s1");
+ const set2 = makeSet("set-b", "Carl Cox", [artist], "s2");
+ const result = computeDiff(
+ [{ artists: ["Carl Cox"], stage: "Stage Two" }],
+ [stage1, stage2],
+ [set1, set2],
+ [artist],
+ "UTC",
+ );
+ assertEquals(result.cleanOperations.setsToUpdate.length, 1);
+ assertEquals(result.cleanOperations.setsToUpdate[0].id, "set-b");
+ assertEquals(result.conflicts.orphanedSets.length, 1);
+ assertEquals(result.conflicts.orphanedSets[0].id, "set-a");
+});
diff --git a/supabase/functions/diff-schedule/diff.ts b/supabase/functions/diff-schedule/diff.ts
new file mode 100644
index 00000000..ee93e0e5
--- /dev/null
+++ b/supabase/functions/diff-schedule/diff.ts
@@ -0,0 +1,336 @@
+export type CsvRow = {
+ artists: string[];
+ setName?: string;
+ stage?: string;
+ date?: string;
+ startTime?: string;
+ endTime?: string;
+ description?: string;
+};
+
+export type DbStage = { id: string; name: string };
+export type DbArtist = { id: string; name: string; slug: string };
+export type DbSet = {
+ id: string;
+ name: string;
+ description: string | null;
+ stage_id: string | null;
+ time_start: string | null;
+ time_end: string | null;
+ set_artists: { artist_id: string; artists: DbArtist }[];
+};
+
+export type SetPayload = {
+ name: string;
+ description: string | null;
+ stageName: string | null;
+ timeStart: string | null;
+ timeEnd: string | null;
+ artistSlugs: string[];
+};
+
+export type DiffResult = {
+ summary: {
+ newArtists: number;
+ newStages: number;
+ setsMatched: number;
+ setsToCreate: number;
+ setsOrphaned: number;
+ };
+ newArtistNames: string[];
+ cleanOperations: {
+ artistsToCreate: { name: string; slug: string }[];
+ stagesToCreate: { name: string }[];
+ setsToCreate: SetPayload[];
+ setsToUpdate: ({ id: string } & SetPayload)[];
+ };
+ conflicts: {
+ stageNameMismatches: {
+ csvValue: string;
+ closestDbValue: string;
+ dbStageId: string;
+ }[];
+ orphanedSets: {
+ id: string;
+ name: string;
+ stage: string | null;
+ timeStart: string | null;
+ }[];
+ };
+};
+
+export function toSlug(name: string): string {
+ return name
+ .toLowerCase()
+ .trim()
+ .replace(/[^a-z0-9]+/g, "-")
+ .replace(/^-+|-+$/g, "");
+}
+
+export function artistKey(slugs: string[]): string {
+ return [...slugs].sort().join("|");
+}
+
+export function advanceDateByOne(dateStr: string): string {
+ const d = new Date(dateStr + "T00:00:00Z");
+ d.setUTCDate(d.getUTCDate() + 1);
+ return d.toISOString().split("T")[0];
+}
+
+export function localToUtc(
+ dateStr: string,
+ timeStr: string,
+ timezone: string,
+): string {
+ const localIso = `${dateStr}T${timeStr}:00`;
+ const naiveUtc = new Date(localIso + "Z");
+ // sv-SE locale gives "YYYY-MM-DD HH:MM:SS" — unambiguously parseable as UTC
+ const localInTz = new Date(
+ naiveUtc.toLocaleString("sv-SE", { timeZone: timezone }) + "Z",
+ );
+ const offsetMs = naiveUtc.getTime() - localInTz.getTime();
+ return new Date(naiveUtc.getTime() + offsetMs).toISOString();
+}
+
+export function utcToLocalDate(utcIso: string, timezone: string): string {
+ // sv-SE renders as "YYYY-MM-DD HH:MM:SS" so we can take the date portion.
+ return new Date(utcIso)
+ .toLocaleString("sv-SE", { timeZone: timezone })
+ .split(" ")[0];
+}
+
+type DbIndexes = {
+ stageByNameLower: Map;
+ stageById: Map;
+ existingArtistSlugs: Set;
+ setsByArtistKey: Map;
+};
+
+type StageResolution =
+ | { kind: "exact"; id: string; name: string }
+ | { kind: "mismatch"; resolvedName: string; closest: DbStage }
+ | { kind: "new"; resolvedName: string }
+ | { kind: "none" };
+
+function buildIndexes(
+ dbStages: DbStage[],
+ dbSets: DbSet[],
+ dbArtists: DbArtist[],
+): DbIndexes {
+ const setsByArtistKey = new Map();
+ for (const set of dbSets) {
+ const slugs = set.set_artists.map((sa) => sa.artists.slug);
+ const key = artistKey(slugs);
+ const bucket = setsByArtistKey.get(key) ?? [];
+ bucket.push(set);
+ setsByArtistKey.set(key, bucket);
+ }
+ return {
+ stageByNameLower: new Map(dbStages.map((s) => [s.name.toLowerCase(), s])),
+ stageById: new Map(dbStages.map((s) => [s.id, s])),
+ existingArtistSlugs: new Set(dbArtists.map((a) => a.slug)),
+ setsByArtistKey,
+ };
+}
+
+function resolveArtists(
+ row: CsvRow,
+ existingSlugs: Set,
+ seenNewSlugs: Set,
+ artistsToCreate: { name: string; slug: string }[],
+): string[] {
+ const slugs: string[] = [];
+ for (const name of row.artists) {
+ const slug = toSlug(name);
+ slugs.push(slug);
+ if (!existingSlugs.has(slug) && !seenNewSlugs.has(slug)) {
+ artistsToCreate.push({ name, slug });
+ seenNewSlugs.add(slug);
+ }
+ }
+ return slugs;
+}
+
+function resolveStage(
+ rawStage: string | undefined,
+ dbStages: DbStage[],
+ stageByNameLower: Map,
+): StageResolution {
+ if (!rawStage) return { kind: "none" };
+
+ const lower = rawStage.toLowerCase();
+ const exactMatch = stageByNameLower.get(lower);
+ if (exactMatch) {
+ return { kind: "exact", id: exactMatch.id, name: exactMatch.name };
+ }
+
+ function strip(s: string) {
+ return s.toLowerCase().replace(/[^a-z0-9]/g, "");
+ }
+ const closeMatch = dbStages.find((s) => {
+ const a = strip(s.name);
+ const b = strip(lower);
+ return a === b || a.includes(b) || b.includes(a);
+ });
+
+ if (closeMatch) {
+ return { kind: "mismatch", resolvedName: rawStage, closest: closeMatch };
+ }
+ return { kind: "new", resolvedName: rawStage };
+}
+
+function computeTimes(
+ row: CsvRow,
+ timezone: string,
+): { timeStart: string | null; timeEnd: string | null } {
+ let timeStart: string | null = null;
+ let timeEnd: string | null = null;
+ if (row.date && row.startTime) {
+ timeStart = localToUtc(row.date, row.startTime, timezone);
+ }
+ if (row.date && row.endTime) {
+ const crossesMidnight =
+ row.startTime != null && row.endTime < row.startTime;
+ const endDate = crossesMidnight ? advanceDateByOne(row.date) : row.date;
+ timeEnd = localToUtc(endDate, row.endTime, timezone);
+ }
+ return { timeStart, timeEnd };
+}
+
+function findMatchingSet(
+ candidates: DbSet[],
+ resolvedStageId: string | null,
+ date: string | undefined,
+ timezone: string,
+): DbSet | null {
+ if (candidates.length === 0) return null;
+ if (candidates.length === 1) return candidates[0];
+ return (
+ (resolvedStageId
+ ? (candidates.find((s) => s.stage_id === resolvedStageId) ?? null)
+ : null) ??
+ (date
+ ? (candidates.find(
+ (s) =>
+ s.time_start != null &&
+ utcToLocalDate(s.time_start, timezone) === date,
+ ) ?? null)
+ : null) ??
+ candidates[0]
+ );
+}
+
+export function computeDiff(
+ rows: CsvRow[],
+ dbStages: DbStage[],
+ dbSets: DbSet[],
+ dbArtists: DbArtist[],
+ timezone: string,
+): DiffResult {
+ const indexes = buildIndexes(dbStages, dbSets, dbArtists);
+
+ const matchedSetIds = new Set();
+ const seenNewArtistSlugs = new Set();
+ const seenNewStageNames = new Set();
+ const seenMismatchedStages = new Set();
+
+ const artistsToCreate: { name: string; slug: string }[] = [];
+ const stagesToCreate: { name: string }[] = [];
+ const stageNameMismatches: DiffResult["conflicts"]["stageNameMismatches"] =
+ [];
+ const setsToCreate: SetPayload[] = [];
+ const setsToUpdate: ({ id: string } & SetPayload)[] = [];
+
+ for (const row of rows) {
+ const artistSlugs = resolveArtists(
+ row,
+ indexes.existingArtistSlugs,
+ seenNewArtistSlugs,
+ artistsToCreate,
+ );
+
+ const stage = resolveStage(row.stage, dbStages, indexes.stageByNameLower);
+ let resolvedStageId: string | null = null;
+ let resolvedStageName: string | null = null;
+ switch (stage.kind) {
+ case "exact":
+ resolvedStageId = stage.id;
+ resolvedStageName = stage.name;
+ break;
+ case "mismatch":
+ resolvedStageName = stage.resolvedName;
+ if (!seenMismatchedStages.has(stage.resolvedName)) {
+ stageNameMismatches.push({
+ csvValue: stage.resolvedName,
+ closestDbValue: stage.closest.name,
+ dbStageId: stage.closest.id,
+ });
+ seenMismatchedStages.add(stage.resolvedName);
+ }
+ break;
+ case "new":
+ resolvedStageName = stage.resolvedName;
+ if (!seenNewStageNames.has(stage.resolvedName)) {
+ stagesToCreate.push({ name: stage.resolvedName });
+ seenNewStageNames.add(stage.resolvedName);
+ }
+ break;
+ case "none":
+ break;
+ }
+
+ const { timeStart, timeEnd } = computeTimes(row, timezone);
+
+ const candidates =
+ indexes.setsByArtistKey.get(artistKey(artistSlugs)) ?? [];
+ const matched = findMatchingSet(
+ candidates,
+ resolvedStageId,
+ row.date,
+ timezone,
+ );
+
+ const payload: SetPayload = {
+ name: row.setName?.trim() || row.artists.join(" b2b "),
+ description: row.description ?? null,
+ stageName: resolvedStageName,
+ timeStart,
+ timeEnd,
+ artistSlugs,
+ };
+
+ if (matched) {
+ matchedSetIds.add(matched.id);
+ setsToUpdate.push({ id: matched.id, ...payload });
+ } else {
+ setsToCreate.push(payload);
+ }
+ }
+
+ const orphanedSets = dbSets
+ .filter((s) => !matchedSetIds.has(s.id))
+ .map((s) => ({
+ id: s.id,
+ name: s.name,
+ stage: indexes.stageById.get(s.stage_id ?? "")?.name ?? null,
+ timeStart: s.time_start,
+ }));
+
+ return {
+ summary: {
+ newArtists: artistsToCreate.length,
+ newStages: stagesToCreate.length,
+ setsMatched: matchedSetIds.size,
+ setsToCreate: setsToCreate.length,
+ setsOrphaned: orphanedSets.length,
+ },
+ newArtistNames: artistsToCreate.map((a) => a.name),
+ cleanOperations: {
+ artistsToCreate,
+ stagesToCreate,
+ setsToCreate,
+ setsToUpdate,
+ },
+ conflicts: { stageNameMismatches, orphanedSets },
+ };
+}
diff --git a/supabase/functions/diff-schedule/index.ts b/supabase/functions/diff-schedule/index.ts
new file mode 100644
index 00000000..4bd566e1
--- /dev/null
+++ b/supabase/functions/diff-schedule/index.ts
@@ -0,0 +1,80 @@
+import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
+import { getAdminClient, requireAdmin, corsHeaders } from "../_shared/auth.ts";
+import {
+ computeDiff,
+ type DbArtist,
+ type DbSet,
+ type DbStage,
+} from "./diff.ts";
+
+serve(async (req) => {
+ if (req.method === "OPTIONS") {
+ return new Response("ok", { headers: corsHeaders });
+ }
+
+ const auth = await requireAdmin(req);
+ if (auth.errorResponse) {
+ return new Response(auth.errorResponse.body, {
+ status: auth.errorResponse.status,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ }
+
+ try {
+ const body = await req.json();
+ const { festivalEditionId, timezone, rows } = body;
+
+ if (!festivalEditionId || !timezone || !Array.isArray(rows)) {
+ return new Response(
+ JSON.stringify({
+ error: "Missing required fields: festivalEditionId, timezone, rows",
+ }),
+ {
+ status: 400,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ },
+ );
+ }
+
+ const db = getAdminClient();
+
+ const [stagesRes, setsRes, artistsRes] = await Promise.all([
+ db
+ .from("stages")
+ .select("id, name")
+ .eq("festival_edition_id", festivalEditionId)
+ .eq("archived", false),
+ db
+ .from("sets")
+ .select(
+ "id, name, description, stage_id, time_start, time_end, set_artists(artist_id, artists(id, name, slug))",
+ )
+ .eq("festival_edition_id", festivalEditionId)
+ .eq("archived", false),
+ db.from("artists").select("id, name, slug").eq("archived", false),
+ ]);
+
+ if (stagesRes.error) throw stagesRes.error;
+ if (setsRes.error) throw setsRes.error;
+ if (artistsRes.error) throw artistsRes.error;
+
+ const result = computeDiff(
+ rows,
+ (stagesRes.data ?? []) as DbStage[],
+ (setsRes.data ?? []) as DbSet[],
+ (artistsRes.data ?? []) as DbArtist[],
+ timezone,
+ );
+
+ return new Response(JSON.stringify(result), {
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ console.error("diff-schedule error:", error);
+ const message = error instanceof Error ? error.message : String(error);
+ return new Response(JSON.stringify({ error: message }), {
+ status: 500,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ }
+});
diff --git a/supabase/migrations/20260509142022_commit_schedule_rpc.sql b/supabase/migrations/20260509142022_commit_schedule_rpc.sql
new file mode 100644
index 00000000..c012412d
--- /dev/null
+++ b/supabase/migrations/20260509142022_commit_schedule_rpc.sql
@@ -0,0 +1,257 @@
+-- Add unique constraint on artists.slug (required for ON CONFLICT upsert in commit_schedule).
+-- Deduplicate first: append the full id (guaranteed unique) to any slug with collisions,
+-- keeping the row with the lowest id on its original slug.
+UPDATE public.artists a
+SET slug = a.slug || '-' || a.id::text
+WHERE a.id IN (
+ SELECT id
+ FROM (
+ SELECT id, ROW_NUMBER() OVER (PARTITION BY slug ORDER BY id) AS rn
+ FROM public.artists
+ ) ranked
+ WHERE rn > 1
+);
+
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_constraint WHERE conname = 'artists_slug_unique'
+ ) THEN
+ ALTER TABLE public.artists
+ ADD CONSTRAINT artists_slug_unique UNIQUE (slug);
+ END IF;
+END$$;
+
+-- Add unique constraint on stages(festival_edition_id, name) for upsert.
+-- Same dedup approach: any (edition, name) collisions get the offending row's
+-- id suffixed onto the stage name.
+UPDATE public.stages s
+SET name = s.name || ' (' || s.id::text || ')'
+WHERE s.id IN (
+ SELECT id
+ FROM (
+ SELECT id,
+ ROW_NUMBER() OVER (PARTITION BY festival_edition_id, name ORDER BY id) AS rn
+ FROM public.stages
+ ) ranked
+ WHERE rn > 1
+);
+
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_constraint
+ WHERE conname IN ('stages_edition_name_unique', 'stages_name_festival_edition_id_key')
+ ) THEN
+ ALTER TABLE public.stages
+ ADD CONSTRAINT stages_edition_name_unique UNIQUE (festival_edition_id, name);
+ END IF;
+END$$;
+
+-- Helpers for commit_schedule. Named with the commit_schedule__ prefix so it
+-- is obvious they're internal to that RPC.
+
+CREATE OR REPLACE FUNCTION public.commit_schedule__slugify(p_name TEXT)
+RETURNS TEXT
+LANGUAGE sql
+IMMUTABLE
+SET search_path = public
+AS $$
+ -- Matches src/lib/slug.ts generateSlug and diff-schedule's toSlug:
+ -- replace non-alphanumeric runs with a single hyphen, trim, collapse.
+ SELECT TRIM(
+ BOTH '-' FROM
+ REGEXP_REPLACE(
+ REGEXP_REPLACE(LOWER(TRIM(p_name)), '[^a-z0-9]+', '-', 'g'),
+ '-+', '-', 'g'
+ )
+ );
+$$;
+
+CREATE OR REPLACE FUNCTION public.commit_schedule__resolve_stage_id(
+ p_festival_edition_id UUID,
+ p_stage_name TEXT
+)
+RETURNS UUID
+LANGUAGE plpgsql
+STABLE
+SET search_path = public
+AS $$
+DECLARE
+ v_stage_id UUID;
+BEGIN
+ IF p_stage_name IS NULL THEN
+ RETURN NULL;
+ END IF;
+
+ SELECT s.id
+ INTO v_stage_id
+ FROM stages s
+ WHERE s.festival_edition_id = p_festival_edition_id
+ AND s.name = p_stage_name
+ LIMIT 1;
+
+ IF v_stage_id IS NULL THEN
+ RAISE EXCEPTION 'Stage % not found in edition %', p_stage_name, p_festival_edition_id;
+ END IF;
+
+ RETURN v_stage_id;
+END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.commit_schedule__parse_ts(p_value TEXT)
+RETURNS TIMESTAMPTZ
+LANGUAGE sql
+IMMUTABLE
+AS $$
+ SELECT CASE WHEN p_value IS NOT NULL THEN p_value::TIMESTAMPTZ END;
+$$;
+
+CREATE OR REPLACE FUNCTION public.commit_schedule__sync_set_artists(
+ p_set_id UUID,
+ p_festival_edition_id UUID,
+ p_artist_slugs JSONB
+)
+RETURNS VOID
+LANGUAGE plpgsql
+SET search_path = public
+AS $$
+BEGIN
+ -- Edition-scoped delete defends against a forged set id even if the caller
+ -- already verified it.
+ DELETE FROM set_artists sa
+ USING sets s
+ WHERE sa.set_id = s.id
+ AND s.id = p_set_id
+ AND s.festival_edition_id = p_festival_edition_id;
+
+ INSERT INTO set_artists (set_id, artist_id)
+ SELECT p_set_id, a.id
+ FROM jsonb_array_elements_text(p_artist_slugs) AS slug_val
+ JOIN artists a ON a.slug = slug_val
+ ON CONFLICT (set_id, artist_id) DO NOTHING;
+END;
+$$;
+
+-- RPC: commit_schedule
+-- Executes a fully resolved schedule import inside a single transaction.
+-- Called by the commit-schedule Edge Function using the service role key.
+CREATE OR REPLACE FUNCTION public.commit_schedule(
+ p_festival_edition_id UUID,
+ p_user_id UUID,
+ p_artists_to_create JSONB, -- [{ name, slug }]
+ p_stages_to_create JSONB, -- [{ name }]
+ p_sets_to_create JSONB, -- [{ name, description, stageName, timeStart, timeEnd, artistSlugs }]
+ p_sets_to_update JSONB, -- [{ id, name, description, stageName, timeStart, timeEnd, artistSlugs }]
+ p_set_ids_to_archive UUID[]
+)
+RETURNS JSONB
+LANGUAGE plpgsql
+SET search_path = public
+AS $$
+DECLARE
+ v_set_elem JSONB;
+ v_new_set_id UUID;
+ v_set_id UUID;
+ v_row_count INT;
+ v_sets_created INT := 0;
+ v_sets_updated INT := 0;
+ v_sets_archived INT := 0;
+BEGIN
+ -- 1. Upsert new artists (matched on slug)
+ INSERT INTO artists (name, slug)
+ SELECT elem->>'name', elem->>'slug'
+ FROM jsonb_array_elements(p_artists_to_create) AS elem
+ ON CONFLICT (slug) DO UPDATE SET name = EXCLUDED.name;
+
+ -- 2. Upsert new stages (matched on edition + name)
+ INSERT INTO stages (festival_edition_id, name)
+ SELECT p_festival_edition_id, elem->>'name'
+ FROM jsonb_array_elements(p_stages_to_create) AS elem
+ ON CONFLICT (festival_edition_id, name) DO NOTHING;
+
+ -- 3. Update existing sets
+ FOR v_set_elem IN SELECT value FROM jsonb_array_elements(p_sets_to_update) LOOP
+ v_set_id := (v_set_elem->>'id')::UUID;
+
+ UPDATE sets
+ SET
+ name = v_set_elem->>'name',
+ description = NULLIF(v_set_elem->>'description', ''),
+ stage_id = commit_schedule__resolve_stage_id(
+ p_festival_edition_id, v_set_elem->>'stageName'
+ ),
+ time_start = commit_schedule__parse_ts(v_set_elem->>'timeStart'),
+ time_end = commit_schedule__parse_ts(v_set_elem->>'timeEnd'),
+ updated_at = NOW()
+ WHERE id = v_set_id
+ AND festival_edition_id = p_festival_edition_id;
+
+ GET DIAGNOSTICS v_row_count = ROW_COUNT;
+
+ IF v_row_count = 0 THEN
+ RAISE EXCEPTION 'Set % not found in edition %', v_set_id, p_festival_edition_id;
+ END IF;
+
+ v_sets_updated := v_sets_updated + v_row_count;
+
+ PERFORM commit_schedule__sync_set_artists(
+ v_set_id, p_festival_edition_id, v_set_elem->'artistSlugs'
+ );
+ END LOOP;
+
+ -- 4. Insert new sets
+ FOR v_set_elem IN SELECT value FROM jsonb_array_elements(p_sets_to_create) LOOP
+ INSERT INTO sets (
+ festival_edition_id, name, slug, description, stage_id,
+ time_start, time_end, created_by
+ )
+ VALUES (
+ p_festival_edition_id,
+ v_set_elem->>'name',
+ commit_schedule__slugify(v_set_elem->>'name'),
+ NULLIF(v_set_elem->>'description', ''),
+ commit_schedule__resolve_stage_id(
+ p_festival_edition_id, v_set_elem->>'stageName'
+ ),
+ commit_schedule__parse_ts(v_set_elem->>'timeStart'),
+ commit_schedule__parse_ts(v_set_elem->>'timeEnd'),
+ p_user_id
+ )
+ RETURNING id INTO v_new_set_id;
+
+ -- Always suffix the slug with a short id chunk so two sets with the same
+ -- name (common when an artist plays multiple days) don't collide on the
+ -- (edition, slug) lookup used by the set detail pages.
+ UPDATE sets
+ SET slug = slug || '-' || SUBSTRING(v_new_set_id::text, 1, 8)
+ WHERE id = v_new_set_id;
+
+ v_sets_created := v_sets_created + 1;
+
+ PERFORM commit_schedule__sync_set_artists(
+ v_new_set_id, p_festival_edition_id, v_set_elem->'artistSlugs'
+ );
+ END LOOP;
+
+ -- 5. Archive orphaned sets
+ IF p_set_ids_to_archive IS NOT NULL AND array_length(p_set_ids_to_archive, 1) > 0 THEN
+ UPDATE sets
+ SET archived = true, updated_at = NOW()
+ WHERE id = ANY(p_set_ids_to_archive)
+ AND festival_edition_id = p_festival_edition_id;
+
+ GET DIAGNOSTICS v_sets_archived = ROW_COUNT;
+ END IF;
+
+ RETURN jsonb_build_object(
+ 'setsCreated', v_sets_created,
+ 'setsUpdated', v_sets_updated,
+ 'setsArchived', v_sets_archived
+ );
+
+EXCEPTION WHEN OTHERS THEN
+ RAISE EXCEPTION 'commit_schedule failed: %', SQLERRM;
+END;
+$$;
diff --git a/vitest.config.ts b/vitest.config.ts
index e8ed08e3..7520c2f2 100644
--- a/vitest.config.ts
+++ b/vitest.config.ts
@@ -17,6 +17,7 @@ export default defineConfig({
"**/.{idea,git,cache,output,temp}/**",
"**/{karma,rollup,webpack,vite,vitest,jest,ava,babel,nyc,cypress,tsup,build,eslint,prettier}.config.*",
"**/tests/e2e/**", // Exclude Playwright E2E tests
+ "supabase/**", // Exclude Deno-only Edge Function tests
],
},
resolve: {