From 50aa460c2ac114230a820b6f69f9903bcc57c099 Mon Sep 17 00:00:00 2001 From: Adam Shiervani Date: Tue, 28 Apr 2026 10:22:31 +0200 Subject: [PATCH 1/3] feat(sync-releases): show artifact details, verify GPG, allow custom rollout MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The previous production prompt was a single y/N with no artifact context, no way to override the hardcoded 10% rollout, and no signature verification. An operator confirming a release this way had to trust that the right files were in S3 and that the .sig was issued by the OTA root key — neither was visible. Changes: * Print full artifact summary before the prompt: URL, sha256, compatibleSkus, and signature status for each artifact. * Verify each .sig file with gpg --status-fd=1 and check the primary key fingerprint against OTA_ROOT_KEY_FPR (mirrored from rv1106-system's release_r2.sh). Reports valid / wrong-root / invalid / missing-pubkey / gpg-unavailable / absent, with a loud WARNING line for wrong-root and invalid signatures so the operator cannot miss them. * Print the latest already-synced release of the same type before the prompt so the operator can confirm this is the next expected version. * Add an interactive rollout-percentage prompt with 10% default, validated to 0-100, replacing the hardcoded 10. * Add an `a`/`abort` answer alongside y/N so operators can stop a multi- release sync mid-run when they spot something wrong, instead of having to N through every remaining version. * Print the DB target and bucket as the first line of main() so a wrong .env.production selection is obvious before any prompts fire. * Print a final run summary with counters: created / skipped-by-user / already-synced / no-artifacts, plus an "aborted at " line when the run was cut short. * Add `npm run sync-releases:production` script and ignore .env.production. Verification path runs only when NODE_ENV=production, so non-prod runs and the test suite never spawn gpg or download artifacts. All 52 existing tests still pass; tsc build is clean. --- .gitignore | 1 + package.json | 1 + scripts/sync-releases.ts | 433 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 425 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index f34b126..2b000c9 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ node_modules dist/ .env .env.development +.env.production diff --git a/package.json b/package.json index a8648af..4cdeb56 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "prisma-migrate": "prisma migrate deploy", "seed": "NODE_ENV=development node -r ts-node/register --env-file=.env.development ./scripts/seed.ts", "sync-releases": "NODE_ENV=development node -r ts-node/register --env-file=.env.development ./scripts/sync-releases.ts", + "sync-releases:production": "NODE_ENV=production node -r ts-node/register --env-file=.env.production ./scripts/sync-releases.ts", "build": "tsc", "test": "vitest run", "test:watch": "vitest", diff --git a/scripts/sync-releases.ts b/scripts/sync-releases.ts index 8ae0624..7c1b290 100644 --- a/scripts/sync-releases.ts +++ b/scripts/sync-releases.ts @@ -1,3 +1,13 @@ +import { spawn } from "node:child_process"; +import { createWriteStream } from "node:fs"; +import { mkdtemp, rm } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import path from "node:path"; +import { stdin, stdout } from "node:process"; +import { Readable } from "node:stream"; +import { pipeline } from "node:stream/promises"; +import { createInterface } from "node:readline/promises"; + import { GetObjectCommand, HeadObjectCommand, @@ -9,6 +19,8 @@ import semver from "semver"; import { streamToString } from "../src/helpers"; +const OTA_ROOT_KEY_FPR = "AF5A36A993D828FEFE7C18C2D1B9856C26A79E95"; + type ReleaseType = "app" | "system"; const DEFAULT_SKU = "jetkvm-v2"; @@ -41,6 +53,346 @@ function legacyCompatibleSkus(): string[] { return [DEFAULT_SKU]; } +const DEFAULT_ROLLOUT_PERCENTAGE = 10; + +type ReleaseOutcome = + | "created" + | "already-synced" + | "no-artifacts" + | "skipped" + | "aborted"; + +type ReleaseDecision = + | { kind: "create"; rolloutPercentage: number } + | { kind: "skip" } + | { kind: "abort" }; + +interface LatestExistingRelease { + version: string; + rolloutPercentage: number; +} + +type SignatureStatus = + | { kind: "absent" } + | { kind: "valid"; signingFpr: string; rootFpr: string } + | { kind: "wrong-root"; signingFpr: string; rootFpr: string } + | { kind: "invalid"; reason: string } + | { kind: "missing-pubkey"; rootFpr?: string } + | { kind: "gpg-unavailable" }; + +interface ArtifactDisplayInfo { + artifact: ReleaseArtifactInput; + signature: SignatureStatus; +} + +function s3KeyFromArtifactUrl(artifactUrl: string): string { + return decodeURIComponent(new URL(artifactUrl).pathname.replace(/^\/+/, "")); +} + +function shortFpr(fpr: string): string { + // Keep the leading 16 hex chars (8 bytes) — enough to be unambiguous in a log + // line while staying readable. The full fingerprint is what we actually + // compare against; this is just for display. + return fpr.slice(0, 16); +} + +function describeSignature(status: SignatureStatus): string { + switch (status.kind) { + case "absent": + return "NO (no .sig file in S3)"; + case "valid": + return `yes (root ${shortFpr(status.rootFpr)})`; + case "wrong-root": + return `WRONG ROOT (got ${shortFpr(status.rootFpr)}, expected ${shortFpr(OTA_ROOT_KEY_FPR)})`; + case "invalid": + return `INVALID (${status.reason})`; + case "missing-pubkey": + return `cannot verify (OTA root key ${shortFpr(OTA_ROOT_KEY_FPR)} not in local GPG keyring)`; + case "gpg-unavailable": + return "cannot verify (gpg not installed)"; + } +} + +async function downloadObjectToFile( + s3Client: S3Client, + bucketName: string, + key: string, + destPath: string, +): Promise { + const response = await s3Client.send( + new GetObjectCommand({ Bucket: bucketName, Key: key }), + ); + if (!response.Body) { + throw new Error(`Empty body from S3 for key ${key}`); + } + await pipeline(response.Body as Readable, createWriteStream(destPath)); +} + +function runGpgVerify( + sigPath: string, + artifactPath: string, +): Promise<{ exitCode: number; statusOutput: string; stderrOutput: string }> { + return new Promise((resolve, reject) => { + const proc = spawn( + "gpg", + ["--batch", "--status-fd=1", "--verify", sigPath, artifactPath], + { stdio: ["ignore", "pipe", "pipe"] }, + ); + let statusOutput = ""; + let stderrOutput = ""; + proc.stdout.on("data", chunk => (statusOutput += chunk.toString())); + proc.stderr.on("data", chunk => (stderrOutput += chunk.toString())); + proc.on("error", reject); + proc.on("close", exitCode => { + resolve({ exitCode: exitCode ?? -1, statusOutput, stderrOutput }); + }); + }); +} + +interface GpgStatus { + validSig?: { signingFpr: string; rootFpr: string }; + noPubkey?: boolean; + badSig?: boolean; +} + +function parseGpgStatus(statusOutput: string): GpgStatus { + const result: GpgStatus = {}; + for (const rawLine of statusOutput.split("\n")) { + const line = rawLine.replace(/^\[GNUPG:\]\s+/, "").trim(); + + if (line.startsWith("VALIDSIG ")) { + // VALIDSIG + // + // Fields are space-separated; index 10 is the primary key fingerprint. + const parts = line.split(/\s+/); + if (parts.length >= 11) { + result.validSig = { signingFpr: parts[1], rootFpr: parts[10] }; + } + } else if (line.startsWith("NO_PUBKEY ") || line.startsWith("ERRSIG ")) { + // ERRSIG with rc=9 means missing pubkey too; treat both as no-pubkey + // unless we already saw an explicit BADSIG below. + result.noPubkey = true; + } else if (line.startsWith("BADSIG ")) { + result.badSig = true; + } + } + return result; +} + +async function verifySignature( + s3Client: S3Client, + bucketName: string, + artifactKey: string, +): Promise { + const sigKey = `${artifactKey}.sig`; + if (!(await s3ObjectExists(s3Client, bucketName, sigKey))) { + return { kind: "absent" }; + } + + const dir = await mkdtemp(path.join(tmpdir(), "sync-releases-verify-")); + const sigPath = path.join(dir, "artifact.sig"); + const artifactPath = path.join(dir, "artifact"); + + try { + await Promise.all([ + downloadObjectToFile(s3Client, bucketName, sigKey, sigPath), + downloadObjectToFile(s3Client, bucketName, artifactKey, artifactPath), + ]); + + let result: Awaited>; + try { + result = await runGpgVerify(sigPath, artifactPath); + } catch (err: any) { + if (err?.code === "ENOENT") { + return { kind: "gpg-unavailable" }; + } + throw err; + } + + const parsed = parseGpgStatus(result.statusOutput); + + if (parsed.badSig) { + return { kind: "invalid", reason: "BADSIG (signature does not match)" }; + } + if (parsed.validSig) { + const rootFprUpper = parsed.validSig.rootFpr.toUpperCase(); + if (rootFprUpper !== OTA_ROOT_KEY_FPR.toUpperCase()) { + return { kind: "wrong-root", ...parsed.validSig }; + } + return { kind: "valid", ...parsed.validSig }; + } + if (parsed.noPubkey) { + return { kind: "missing-pubkey" }; + } + const stderrFirstLine = + result.stderrOutput.split("\n").find(l => l.trim().length > 0)?.trim() ?? + `gpg exited ${result.exitCode}`; + return { kind: "invalid", reason: stderrFirstLine }; + } finally { + await rm(dir, { recursive: true, force: true }); + } +} + +async function loadArtifactDisplayInfo( + clients: Pick, + config: SyncConfig, + artifacts: ReleaseArtifactInput[], +): Promise { + return Promise.all( + artifacts.map(async artifact => { + const signature = await verifySignature( + clients.s3Client, + config.bucketName, + s3KeyFromArtifactUrl(artifact.url), + ); + return { artifact, signature }; + }), + ); +} + +async function findLatestExistingRelease( + prisma: PrismaClient, + type: ReleaseType, +): Promise { + const releases = await prisma.release.findMany({ + where: { type }, + select: { version: true, rolloutPercentage: true }, + }); + if (releases.length === 0) return null; + + const latestVersion = semver.maxSatisfying( + releases.map(r => r.version), + "*", + { includePrerelease: true }, + ); + if (!latestVersion) return null; + + return releases.find(r => r.version === latestVersion) ?? null; +} + +function printArtifactSummary( + type: ReleaseType, + version: string, + artifactInfos: ArtifactDisplayInfo[], + latestExisting: LatestExistingRelease | null, +): void { + console.log(""); + console.log( + `[sync-releases] About to create production ${type} release ${version}:`, + ); + + if (latestExisting) { + console.log( + ` latest existing: ${latestExisting.version} at ${latestExisting.rolloutPercentage}% rollout`, + ); + } else { + console.log(` latest existing: (none — this will be the first ${type} release)`); + } + + console.log(` artifacts (${artifactInfos.length}):`); + artifactInfos.forEach(({ artifact, signature }, index) => { + console.log(` [${index + 1}] url: ${artifact.url}`); + console.log(` hash: ${artifact.hash}`); + console.log(` skus: ${artifact.compatibleSkus.join(", ")}`); + console.log(` signed: ${describeSignature(signature)}`); + }); + + const warnings = artifactInfos.flatMap(({ signature }, index) => { + const label = `artifact [${index + 1}]`; + switch (signature.kind) { + case "wrong-root": + return [ + `WARNING: ${label} signed by an UNTRUSTED root (got ${signature.rootFpr}, expected ${OTA_ROOT_KEY_FPR}). Devices that enforce the OTA root will reject this firmware.`, + ]; + case "invalid": + return [ + `WARNING: ${label} signature is INVALID: ${signature.reason}. Do not publish unless you have verified this manually.`, + ]; + default: + return []; + } + }); + + if (warnings.length > 0) { + console.log(""); + for (const warning of warnings) { + console.log(` ${warning}`); + } + } + console.log(""); +} + +async function promptRolloutPercentage( + readline: ReturnType, +): Promise { + while (true) { + const answer = ( + await readline.question( + ` Rollout percentage [${DEFAULT_ROLLOUT_PERCENTAGE}]: `, + ) + ).trim(); + + if (answer === "") { + return DEFAULT_ROLLOUT_PERCENTAGE; + } + + const parsed = Number(answer); + if (!Number.isInteger(parsed) || parsed < 0 || parsed > 100) { + console.log(" Error: enter an integer between 0 and 100"); + continue; + } + return parsed; + } +} + +async function confirmProductionCreate( + clients: SyncClients, + config: SyncConfig, + type: ReleaseType, + version: string, + artifacts: ReleaseArtifactInput[], +): Promise { + if (process.env.NODE_ENV !== "production") { + return { kind: "create", rolloutPercentage: DEFAULT_ROLLOUT_PERCENTAGE }; + } + + if (!stdin.isTTY || !stdout.isTTY) { + throw new Error( + "Production release sync requires an interactive terminal for DB write confirmation.", + ); + } + + const [artifactInfos, latestExisting] = await Promise.all([ + loadArtifactDisplayInfo(clients, config, artifacts), + findLatestExistingRelease(clients.prisma, type), + ]); + + printArtifactSummary(type, version, artifactInfos, latestExisting); + + const readline = createInterface({ input: stdin, output: stdout }); + try { + const rolloutPercentage = await promptRolloutPercentage(readline); + + const confirmation = ( + await readline.question( + ` Create production ${type} release ${version} at ${rolloutPercentage}% rollout? [y/N/a (abort run)] `, + ) + ) + .trim() + .toLowerCase(); + + if (["a", "abort"].includes(confirmation)) { + return { kind: "abort" }; + } + if (!["y", "yes"].includes(confirmation)) { + return { kind: "skip" }; + } + return { kind: "create", rolloutPercentage }; + } finally { + readline.close(); + } +} + function isS3NotFound(error: any): boolean { return ( error.name === "NotFound" || @@ -184,35 +536,52 @@ async function listStableVersions( } async function syncRelease( - prisma: PrismaClient, + clients: SyncClients, + config: SyncConfig, type: ReleaseType, version: string, artifacts: ReleaseArtifactInput[], -): Promise { +): Promise { if (artifacts.length === 0) { console.log(`[sync-releases] ${type} ${version}: skipped, no compatible artifacts`); - return; + return "no-artifacts"; } // Sync only registers brand-new releases. Existing rows (rollout state, URLs, // artifact compatibility) are left untouched — backfills/repairs are handled // by one-off scripts so a routine sync run can never rewrite production data. - const existing = await prisma.release.findUnique({ + const existing = await clients.prisma.release.findUnique({ where: { version_type: { version, type } }, select: { id: true }, }); if (existing) { console.log(`[sync-releases] ${type} ${version}: already synced, skipping`); - return; + return "already-synced"; + } + + const decision = await confirmProductionCreate( + clients, + config, + type, + version, + artifacts, + ); + if (decision.kind === "abort") { + console.log(`[sync-releases] ${type} ${version}: aborted by user`); + return "aborted"; + } + if (decision.kind === "skip") { + console.log(`[sync-releases] ${type} ${version}: skipped by user`); + return "skipped"; } const primaryArtifact = artifacts[0]; - await prisma.release.create({ + await clients.prisma.release.create({ data: { version, type, - rolloutPercentage: 10, + rolloutPercentage: decision.rolloutPercentage, url: primaryArtifact.url, hash: primaryArtifact.hash, artifacts: { @@ -226,25 +595,69 @@ async function syncRelease( }); console.log( - `[sync-releases] ${type} ${version}: created with ${artifacts.length} artifact(s)`, + `[sync-releases] ${type} ${version}: created with ${artifacts.length} artifact(s) at ${decision.rolloutPercentage}% rollout`, ); + return "created"; } export async function syncReleases( clients: SyncClients, config: SyncConfig, ): Promise { - for (const type of ["app", "system"] as const) { + const stats: Record = { + created: 0, + "already-synced": 0, + "no-artifacts": 0, + skipped: 0, + aborted: 0, + }; + let abortedAt: { type: ReleaseType; version: string } | null = null; + + outer: for (const type of ["app", "system"] as const) { const versions = await listStableVersions(clients.s3Client, config.bucketName, type); for (const version of versions) { const artifacts = await collectReleaseArtifacts(clients, config, type, version); - await syncRelease(clients.prisma, type, version, artifacts); + const outcome = await syncRelease(clients, config, type, version, artifacts); + stats[outcome]++; + + if (outcome === "aborted") { + abortedAt = { type, version }; + break outer; + } } } + + if (abortedAt) { + console.log( + `[sync-releases] aborted at ${abortedAt.type} ${abortedAt.version}; remaining versions in this run were not processed`, + ); + } + console.log( + `[sync-releases] done: created=${stats.created} skipped-by-user=${stats.skipped} already-synced=${stats["already-synced"]} no-artifacts=${stats["no-artifacts"]}`, + ); +} + +function describeDbTarget(): string { + const raw = process.env.DATABASE_URL; + if (!raw) return "(DATABASE_URL not set)"; + + try { + const parsed = new URL(raw); + const host = parsed.hostname || "?"; + const port = parsed.port ? `:${parsed.port}` : ""; + const dbName = parsed.pathname.replace(/^\/+/, "") || "?"; + return `${host}${port}/${dbName}`; + } catch { + return "(unparseable DATABASE_URL)"; + } } async function main(): Promise { + console.log( + `[sync-releases] env=${process.env.NODE_ENV ?? "(unset)"} db=${describeDbTarget()} bucket=${process.env.R2_BUCKET ?? "(unset)"}`, + ); + const prisma = new PrismaClient(); const s3Client = new S3Client({ endpoint: process.env.R2_ENDPOINT!, From e6cb7952d8c3847f649867b36070e513c1c641e3 Mon Sep 17 00:00:00 2001 From: Adam Shiervani Date: Tue, 28 Apr 2026 10:39:30 +0200 Subject: [PATCH 2/3] refactor: hoist objectKeyFromArtifactUrl into helpers Both src/releases.ts and scripts/sync-releases.ts had their own copy of the same URL-to-S3-key conversion. Moved into src/helpers.ts and imported from both call sites so a future change (e.g. CDN path prefix handling) only needs to land once. --- scripts/sync-releases.ts | 8 ++------ src/helpers.ts | 8 ++++++++ src/releases.ts | 6 +----- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/sync-releases.ts b/scripts/sync-releases.ts index 7c1b290..f462f17 100644 --- a/scripts/sync-releases.ts +++ b/scripts/sync-releases.ts @@ -17,7 +17,7 @@ import { import { PrismaClient } from "@prisma/client"; import semver from "semver"; -import { streamToString } from "../src/helpers"; +import { objectKeyFromArtifactUrl, streamToString } from "../src/helpers"; const OTA_ROOT_KEY_FPR = "AF5A36A993D828FEFE7C18C2D1B9856C26A79E95"; @@ -85,10 +85,6 @@ interface ArtifactDisplayInfo { signature: SignatureStatus; } -function s3KeyFromArtifactUrl(artifactUrl: string): string { - return decodeURIComponent(new URL(artifactUrl).pathname.replace(/^\/+/, "")); -} - function shortFpr(fpr: string): string { // Keep the leading 16 hex chars (8 bytes) — enough to be unambiguous in a log // line while staying readable. The full fingerprint is what we actually @@ -243,7 +239,7 @@ async function loadArtifactDisplayInfo( const signature = await verifySignature( clients.s3Client, config.bucketName, - s3KeyFromArtifactUrl(artifact.url), + objectKeyFromArtifactUrl(artifact.url), ); return { artifact, signature }; }), diff --git a/src/helpers.ts b/src/helpers.ts index 453e040..9d7a542 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -56,4 +56,12 @@ export function getDeviceRolloutBucket(deviceId: string): number { const hash = createHash("md5").update(deviceId).digest("hex"); const hashPrefix = hash.substring(0, 8); return parseInt(hashPrefix, 16) % 100; +} + +/** + * Extracts the S3 object key from an artifact URL like + * `https://cdn.example.com/app/0.5.0/jetkvm_app` → `app/0.5.0/jetkvm_app`. + */ +export function objectKeyFromArtifactUrl(artifactUrl: string): string { + return decodeURIComponent(new URL(artifactUrl).pathname.replace(/^\/+/, "")); } \ No newline at end of file diff --git a/src/releases.ts b/src/releases.ts index 1985b6c..7088d55 100644 --- a/src/releases.ts +++ b/src/releases.ts @@ -13,6 +13,7 @@ import { LRUCache } from "lru-cache"; import { getDeviceRolloutBucket, + objectKeyFromArtifactUrl, streamToString, toSemverRange, verifyHash, @@ -388,11 +389,6 @@ function toRelease( return release as Release; } -function objectKeyFromArtifactUrl(artifactUrl: string): string { - const parsed = new URL(artifactUrl); - return decodeURIComponent(parsed.pathname.replace(/^\/+/, "")); -} - async function resolveSigUrlFromArtifactUrl( artifactUrl: string, ): Promise { From d51c76a576848c4416f212f86fe841a7c7cb184a Mon Sep 17 00:00:00 2001 From: Adam Shiervani Date: Tue, 28 Apr 2026 11:02:36 +0200 Subject: [PATCH 3/3] fix(sync-releases): only treat ERRSIG rc=9 as missing pubkey MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit GnuPG's ERRSIG line carries an `rc` reason code. rc=9 is the only one that means "we don't have the signer's key" — rc=4 (unsupported algorithm) and other codes are real verification failures. The previous implementation collapsed every ERRSIG into noPubkey, which would have falsely told the operator the OTA root key was missing when the actual problem was e.g. an unsupported pubkey algorithm. Now parses the rc field and surfaces non-9 ERRSIG codes as `invalid` with a human reason (rc=4 → "unsupported algorithm", others → "gpg error code N"). --- scripts/sync-releases.ts | 38 ++++++++++++++++++++++++++++++++++---- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/scripts/sync-releases.ts b/scripts/sync-releases.ts index f462f17..30e965a 100644 --- a/scripts/sync-releases.ts +++ b/scripts/sync-releases.ts @@ -148,9 +148,22 @@ function runGpgVerify( interface GpgStatus { validSig?: { signingFpr: string; rootFpr: string }; noPubkey?: boolean; + // ERRSIG `rc` field. GnuPG documents rc=4 (unsupported algorithm), + // rc=9 (missing public key); other codes are possible and we leave + // them as raw strings for the caller to format. + errSigRc?: string; badSig?: boolean; } +const ERRSIG_RC_REASONS: Record = { + "4": "unsupported algorithm", + "9": "missing public key", +}; + +function describeErrSigRc(rc: string): string { + return ERRSIG_RC_REASONS[rc] ?? `gpg error code ${rc}`; +} + function parseGpgStatus(statusOutput: string): GpgStatus { const result: GpgStatus = {}; for (const rawLine of statusOutput.split("\n")) { @@ -164,10 +177,17 @@ function parseGpgStatus(statusOutput: string): GpgStatus { if (parts.length >= 11) { result.validSig = { signingFpr: parts[1], rootFpr: parts[10] }; } - } else if (line.startsWith("NO_PUBKEY ") || line.startsWith("ERRSIG ")) { - // ERRSIG with rc=9 means missing pubkey too; treat both as no-pubkey - // unless we already saw an explicit BADSIG below. + } else if (line.startsWith("NO_PUBKEY ")) { result.noPubkey = true; + } else if (line.startsWith("ERRSIG ")) { + // ERRSIG