diff --git a/apps/dev-playground/client/package-lock.json b/apps/dev-playground/client/package-lock.json index 80bd5ad40..0f2eae0eb 100644 --- a/apps/dev-playground/client/package-lock.json +++ b/apps/dev-playground/client/package-lock.json @@ -19,8 +19,10 @@ "class-variance-authority": "0.7.1", "clsx": "2.1.1", "lucide-react": "0.546.0", + "plotly.js": "^3.5.0", "react": "19.2.0", "react-dom": "19.2.0", + "react-plotly.js": "^2.6.0", "recharts": "3.4.1", "tailwind-merge": "3.3.1", "tailwindcss-animate": "1.0.7", @@ -33,6 +35,7 @@ "@types/node": "24.6.0", "@types/react": "19.2.2", "@types/react-dom": "19.2.2", + "@types/react-plotly.js": "^2.6.4", "@vitejs/plugin-react": "5.0.4", "autoprefixer": "10.4.21", "eslint": "9.36.0", @@ -47,35 +50,8 @@ "vite": "npm:rolldown-vite@7.1.14" } }, - "../../../packages/appkit-ui": { - "name": "@databricks/appkit-ui", - "version": "1.0.0", - "extraneous": true, - "dependencies": { - "clsx": "^2.1.1", - "shared": "workspace:*", - "tailwind-merge": "^3.4.0" - }, - "devDependencies": { - "@types/react": "^19.0.0", - "@types/react-dom": "^19.0.0", - "react": "^19.0.0", - "react-dom": "^19.0.0", - "recharts": "^3.4.1" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0", - "recharts": "^2.0.0 || ^3.0.0" - } - }, - "../../../packages/appkit-ui/dist": { - "extraneous": true - }, "node_modules/@alloc/quick-lru": { "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", - "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", "dev": true, "license": "MIT", "engines": { @@ -87,8 +63,6 @@ }, "node_modules/@babel/code-frame": { "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", - "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", "license": "MIT", "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", @@ -101,8 +75,6 @@ }, "node_modules/@babel/compat-data": { "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz", - "integrity": "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -110,8 +82,6 @@ }, "node_modules/@babel/core": { "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz", - "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==", "license": "MIT", "dependencies": { "@babel/code-frame": "^7.27.1", @@ -140,8 +110,6 @@ }, "node_modules/@babel/generator": { "version": "7.29.1", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", - "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", "license": "MIT", "dependencies": { "@babel/parser": "^7.29.0", @@ -156,8 +124,6 @@ }, "node_modules/@babel/helper-annotate-as-pure": { "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", "license": "MIT", "dependencies": { "@babel/types": "^7.27.3" @@ -168,8 +134,6 @@ }, "node_modules/@babel/helper-compilation-targets": { "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", "license": "MIT", "dependencies": { "@babel/compat-data": "^7.27.2", @@ -184,8 +148,6 @@ }, "node_modules/@babel/helper-create-class-features-plugin": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.6.tgz", - "integrity": "sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==", "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.27.3", @@ -205,8 +167,6 @@ }, "node_modules/@babel/helper-globals": { "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", - "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -214,8 +174,6 @@ }, "node_modules/@babel/helper-member-expression-to-functions": { "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", - "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", "license": "MIT", "dependencies": { "@babel/traverse": "^7.28.5", @@ -227,8 +185,6 @@ }, "node_modules/@babel/helper-module-imports": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", - "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", "license": "MIT", "dependencies": { "@babel/traverse": "^7.28.6", @@ -240,8 +196,6 @@ }, "node_modules/@babel/helper-module-transforms": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", - "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", "license": "MIT", "dependencies": { "@babel/helper-module-imports": "^7.28.6", @@ -257,8 +211,6 @@ }, "node_modules/@babel/helper-optimise-call-expression": { "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", - "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", "license": "MIT", "dependencies": { "@babel/types": "^7.27.1" @@ -269,8 +221,6 @@ }, "node_modules/@babel/helper-plugin-utils": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", - "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -278,8 +228,6 @@ }, "node_modules/@babel/helper-replace-supers": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.28.6.tgz", - "integrity": "sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==", "license": "MIT", "dependencies": { "@babel/helper-member-expression-to-functions": "^7.28.5", @@ -295,8 +243,6 @@ }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", - "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", "license": "MIT", "dependencies": { "@babel/traverse": "^7.27.1", @@ -308,8 +254,6 @@ }, "node_modules/@babel/helper-string-parser": { "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", - "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -317,8 +261,6 @@ }, "node_modules/@babel/helper-validator-identifier": { "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", - "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -326,8 +268,6 @@ }, "node_modules/@babel/helper-validator-option": { "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -335,8 +275,6 @@ }, "node_modules/@babel/helpers": { "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", "license": "MIT", "dependencies": { "@babel/template": "^7.27.2", @@ -348,8 +286,6 @@ }, "node_modules/@babel/parser": { "version": "7.29.2", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", - "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", "license": "MIT", "dependencies": { "@babel/types": "^7.29.0" @@ -363,8 +299,6 @@ }, "node_modules/@babel/plugin-syntax-jsx": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", - "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.28.6" @@ -378,8 +312,6 @@ }, "node_modules/@babel/plugin-syntax-typescript": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", - "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.28.6" @@ -393,8 +325,6 @@ }, "node_modules/@babel/plugin-transform-modules-commonjs": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.28.6.tgz", - "integrity": "sha512-jppVbf8IV9iWWwWTQIxJMAJCWBuuKx71475wHwYytrRGQ2CWiDvYlADQno3tcYpS/T2UUWFQp3nVtYfK/YBQrA==", "license": "MIT", "dependencies": { "@babel/helper-module-transforms": "^7.28.6", @@ -409,8 +339,6 @@ }, "node_modules/@babel/plugin-transform-react-jsx-self": { "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", - "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", "dev": true, "license": "MIT", "dependencies": { @@ -425,8 +353,6 @@ }, "node_modules/@babel/plugin-transform-react-jsx-source": { "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", - "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", "dev": true, "license": "MIT", "dependencies": { @@ -441,8 +367,6 @@ }, "node_modules/@babel/plugin-transform-typescript": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.6.tgz", - "integrity": "sha512-0YWL2RFxOqEm9Efk5PvreamxPME8OyY0wM5wh5lHjF+VtVhdneCWGzZeSqzOfiobVqQaNCd2z0tQvnI9DaPWPw==", "license": "MIT", "dependencies": { "@babel/helper-annotate-as-pure": "^7.27.3", @@ -460,8 +384,6 @@ }, "node_modules/@babel/preset-typescript": { "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.28.5.tgz", - "integrity": "sha512-+bQy5WOI2V6LJZpPVxY+yp66XdZ2yifu0Mc1aP5CQKgjn4QM5IN2i5fAZ4xKop47pr8rpVhiAeu+nDQa12C8+g==", "license": "MIT", "dependencies": { "@babel/helper-plugin-utils": "^7.27.1", @@ -479,8 +401,6 @@ }, "node_modules/@babel/template": { "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", - "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", "license": "MIT", "dependencies": { "@babel/code-frame": "^7.28.6", @@ -493,8 +413,6 @@ }, "node_modules/@babel/traverse": { "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", - "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", "license": "MIT", "dependencies": { "@babel/code-frame": "^7.29.0", @@ -511,8 +429,6 @@ }, "node_modules/@babel/types": { "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", - "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", "license": "MIT", "dependencies": { "@babel/helper-string-parser": "^7.27.1", @@ -522,10 +438,18 @@ "node": ">=6.9.0" } }, + "node_modules/@choojs/findup": { + "version": "0.2.1", + "license": "MIT", + "dependencies": { + "commander": "^2.15.1" + }, + "bin": { + "findup": "bin/findup.js" + } + }, "node_modules/@emnapi/core": { "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.5.0.tgz", - "integrity": "sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==", "license": "MIT", "optional": true, "dependencies": { @@ -535,8 +459,6 @@ }, "node_modules/@emnapi/runtime": { "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.5.0.tgz", - "integrity": "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==", "license": "MIT", "optional": true, "dependencies": { @@ -545,8 +467,6 @@ }, "node_modules/@emnapi/wasi-threads": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", - "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", "license": "MIT", "optional": true, "dependencies": { @@ -619,8 +539,6 @@ }, "node_modules/@esbuild/darwin-arm64": { "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", - "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", "cpu": [ "arm64" ], @@ -971,8 +889,6 @@ }, "node_modules/@eslint-community/eslint-utils": { "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", - "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", "dev": true, "license": "MIT", "dependencies": { @@ -990,8 +906,6 @@ }, "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { "version": "3.4.3", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", - "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1003,8 +917,6 @@ }, "node_modules/@eslint-community/regexpp": { "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", "dev": true, "license": "MIT", "engines": { @@ -1013,8 +925,6 @@ }, "node_modules/@eslint/config-array": { "version": "0.21.0", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", - "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1028,8 +938,6 @@ }, "node_modules/@eslint/config-helpers": { "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.1.tgz", - "integrity": "sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1038,8 +946,6 @@ }, "node_modules/@eslint/core": { "version": "0.15.2", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.2.tgz", - "integrity": "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1051,8 +957,6 @@ }, "node_modules/@eslint/eslintrc": { "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", - "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1075,8 +979,6 @@ }, "node_modules/@eslint/eslintrc/node_modules/globals": { "version": "14.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", - "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", "dev": true, "license": "MIT", "engines": { @@ -1088,8 +990,6 @@ }, "node_modules/@eslint/js": { "version": "9.36.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.36.0.tgz", - "integrity": "sha512-uhCbYtYynH30iZErszX78U+nR3pJU3RHGQ57NXy5QupD4SBVwDeU8TNBy+MjMngc1UyIW9noKqsRqfjQTBU2dw==", "dev": true, "license": "MIT", "engines": { @@ -1101,8 +1001,6 @@ }, "node_modules/@eslint/object-schema": { "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", - "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1111,8 +1009,6 @@ }, "node_modules/@eslint/plugin-kit": { "version": "0.3.5", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz", - "integrity": "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1125,8 +1021,6 @@ }, "node_modules/@floating-ui/core": { "version": "1.7.3", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz", - "integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==", "license": "MIT", "dependencies": { "@floating-ui/utils": "^0.2.10" @@ -1134,8 +1028,6 @@ }, "node_modules/@floating-ui/dom": { "version": "1.7.4", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz", - "integrity": "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==", "license": "MIT", "dependencies": { "@floating-ui/core": "^1.7.3", @@ -1144,8 +1036,6 @@ }, "node_modules/@floating-ui/react-dom": { "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.6.tgz", - "integrity": "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==", "license": "MIT", "dependencies": { "@floating-ui/dom": "^1.7.4" @@ -1157,14 +1047,10 @@ }, "node_modules/@floating-ui/utils": { "version": "0.2.10", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", - "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", "license": "MIT" }, "node_modules/@humanfs/core": { "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", - "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1173,8 +1059,6 @@ }, "node_modules/@humanfs/node": { "version": "0.16.7", - "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", - "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1187,8 +1071,6 @@ }, "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1201,8 +1083,6 @@ }, "node_modules/@humanwhocodes/retry": { "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", - "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1215,8 +1095,6 @@ }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.13", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", - "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "license": "MIT", "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", @@ -1225,8 +1103,6 @@ }, "node_modules/@jridgewell/remapping": { "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", - "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", "license": "MIT", "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", @@ -1235,8 +1111,6 @@ }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "license": "MIT", "engines": { "node": ">=6.0.0" @@ -1244,24 +1118,98 @@ }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { "version": "0.3.31", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", - "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@mapbox/geojson-rewind": { + "version": "0.5.2", + "license": "ISC", + "dependencies": { + "get-stream": "^6.0.1", + "minimist": "^1.2.6" + }, + "bin": { + "geojson-rewind": "geojson-rewind" + } + }, + "node_modules/@mapbox/geojson-types": { + "version": "1.0.2", + "license": "ISC" + }, + "node_modules/@mapbox/jsonlint-lines-primitives": { + "version": "2.0.2", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@mapbox/mapbox-gl-supported": { + "version": "1.5.0", + "license": "BSD-3-Clause", + "peerDependencies": { + "mapbox-gl": ">=0.32.1 <2.0.0" + } + }, + "node_modules/@mapbox/point-geometry": { + "version": "0.1.0", + "license": "ISC" + }, + "node_modules/@mapbox/tiny-sdf": { + "version": "1.2.5", + "license": "BSD-2-Clause" + }, + "node_modules/@mapbox/unitbezier": { + "version": "0.0.0", + "license": "BSD-2-Clause" + }, + "node_modules/@mapbox/vector-tile": { + "version": "1.3.1", + "license": "BSD-3-Clause", + "dependencies": { + "@mapbox/point-geometry": "~0.1.0" + } + }, + "node_modules/@mapbox/whoots-js": { + "version": "3.1.0", + "license": "ISC", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@maplibre/maplibre-gl-style-spec": { + "version": "20.4.0", + "license": "ISC", + "dependencies": { + "@mapbox/jsonlint-lines-primitives": "~2.0.2", + "@mapbox/unitbezier": "^0.0.1", + "json-stringify-pretty-compact": "^4.0.0", + "minimist": "^1.2.8", + "quickselect": "^2.0.0", + "rw": "^1.3.3", + "tinyqueue": "^3.0.0" + }, + "bin": { + "gl-style-format": "dist/gl-style-format.mjs", + "gl-style-migrate": "dist/gl-style-migrate.mjs", + "gl-style-validate": "dist/gl-style-validate.mjs" + } + }, + "node_modules/@maplibre/maplibre-gl-style-spec/node_modules/@mapbox/unitbezier": { + "version": "0.0.1", + "license": "BSD-2-Clause" + }, + "node_modules/@maplibre/maplibre-gl-style-spec/node_modules/tinyqueue": { + "version": "3.0.0", + "license": "ISC" + }, "node_modules/@napi-rs/wasm-runtime": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.0.7.tgz", - "integrity": "sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==", "license": "MIT", "optional": true, "dependencies": { @@ -1272,8 +1220,6 @@ }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dev": true, "license": "MIT", "dependencies": { @@ -1286,8 +1232,6 @@ }, "node_modules/@nodelib/fs.stat": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "dev": true, "license": "MIT", "engines": { @@ -1296,8 +1240,6 @@ }, "node_modules/@nodelib/fs.walk": { "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "dev": true, "license": "MIT", "dependencies": { @@ -1310,8 +1252,6 @@ }, "node_modules/@oxc-project/runtime": { "version": "0.92.0", - "resolved": "https://registry.npmjs.org/@oxc-project/runtime/-/runtime-0.92.0.tgz", - "integrity": "sha512-Z7x2dZOmznihvdvCvLKMl+nswtOSVxS2H2ocar+U9xx6iMfTp0VGIrX6a4xB1v80IwOPC7dT1LXIJrY70Xu3Jw==", "license": "MIT", "engines": { "node": "^20.19.0 || >=22.12.0" @@ -1319,29 +1259,125 @@ }, "node_modules/@oxc-project/types": { "version": "0.93.0", - "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.93.0.tgz", - "integrity": "sha512-yNtwmWZIBtJsMr5TEfoZFDxIWV6OdScOpza/f5YxbqUMJk+j6QX3Cf3jgZShGEFYWQJ5j9mJ6jM0tZHu2J9Yrg==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/Boshen" } }, + "node_modules/@plotly/d3": { + "version": "3.8.2", + "license": "BSD-3-Clause" + }, + "node_modules/@plotly/d3-sankey": { + "version": "0.7.2", + "license": "BSD-3-Clause", + "dependencies": { + "d3-array": "1", + "d3-collection": "1", + "d3-shape": "^1.2.0" + } + }, + "node_modules/@plotly/d3-sankey-circular": { + "version": "0.33.1", + "license": "MIT", + "dependencies": { + "d3-array": "^1.2.1", + "d3-collection": "^1.0.4", + "d3-shape": "^1.2.0", + "elementary-circuits-directed-graph": "^1.0.4" + } + }, + "node_modules/@plotly/d3-sankey-circular/node_modules/d3-array": { + "version": "1.2.4", + "license": "BSD-3-Clause" + }, + "node_modules/@plotly/d3-sankey-circular/node_modules/d3-path": { + "version": "1.0.9", + "license": "BSD-3-Clause" + }, + "node_modules/@plotly/d3-sankey-circular/node_modules/d3-shape": { + "version": "1.3.7", + "license": "BSD-3-Clause", + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/@plotly/d3-sankey/node_modules/d3-array": { + "version": "1.2.4", + "license": "BSD-3-Clause" + }, + "node_modules/@plotly/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "license": "BSD-3-Clause" + }, + "node_modules/@plotly/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "license": "BSD-3-Clause", + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/@plotly/mapbox-gl": { + "version": "1.13.4", + "license": "SEE LICENSE IN LICENSE.txt", + "dependencies": { + "@mapbox/geojson-rewind": "^0.5.2", + "@mapbox/geojson-types": "^1.0.2", + "@mapbox/jsonlint-lines-primitives": "^2.0.2", + "@mapbox/mapbox-gl-supported": "^1.5.0", + "@mapbox/point-geometry": "^0.1.0", + "@mapbox/tiny-sdf": "^1.1.1", + "@mapbox/unitbezier": "^0.0.0", + "@mapbox/vector-tile": "^1.3.1", + "@mapbox/whoots-js": "^3.1.0", + "csscolorparser": "~1.0.3", + "earcut": "^2.2.2", + "geojson-vt": "^3.2.1", + "gl-matrix": "^3.2.1", + "grid-index": "^1.1.0", + "murmurhash-js": "^1.0.0", + "pbf": "^3.2.1", + "potpack": "^1.0.1", + "quickselect": "^2.0.0", + "rw": "^1.3.3", + "supercluster": "^7.1.0", + "tinyqueue": "^2.0.3", + "vt-pbf": "^3.1.1" + }, + "engines": { + "node": ">=6.4.0" + } + }, + "node_modules/@plotly/point-cluster": { + "version": "3.1.9", + "license": "MIT", + "dependencies": { + "array-bounds": "^1.0.1", + "binary-search-bounds": "^2.0.4", + "clamp": "^1.0.1", + "defined": "^1.0.0", + "dtype": "^2.0.0", + "flatten-vertex-data": "^1.0.2", + "is-obj": "^1.0.1", + "math-log2": "^1.0.1", + "parse-rect": "^1.2.0", + "pick-by-alias": "^1.2.0" + } + }, + "node_modules/@plotly/regl": { + "version": "2.1.2", + "license": "MIT" + }, "node_modules/@radix-ui/number": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", - "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==", "license": "MIT" }, "node_modules/@radix-ui/primitive": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", - "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", "license": "MIT" }, "node_modules/@radix-ui/react-arrow": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", - "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", "license": "MIT", "dependencies": { "@radix-ui/react-primitive": "2.1.3" @@ -1363,8 +1399,6 @@ }, "node_modules/@radix-ui/react-collection": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", - "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", @@ -1389,8 +1423,6 @@ }, "node_modules/@radix-ui/react-compose-refs": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", - "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1404,8 +1436,6 @@ }, "node_modules/@radix-ui/react-context": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", - "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1419,8 +1449,6 @@ }, "node_modules/@radix-ui/react-direction": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", - "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1434,8 +1462,6 @@ }, "node_modules/@radix-ui/react-dismissable-layer": { "version": "1.1.11", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", - "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.3", @@ -1461,8 +1487,6 @@ }, "node_modules/@radix-ui/react-dropdown-menu": { "version": "2.1.16", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz", - "integrity": "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==", "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.3", @@ -1490,8 +1514,6 @@ }, "node_modules/@radix-ui/react-focus-guards": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", - "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1505,8 +1527,6 @@ }, "node_modules/@radix-ui/react-focus-scope": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", - "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", @@ -1530,8 +1550,6 @@ }, "node_modules/@radix-ui/react-id": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", - "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", "license": "MIT", "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" @@ -1548,8 +1566,6 @@ }, "node_modules/@radix-ui/react-menu": { "version": "2.1.16", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz", - "integrity": "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==", "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.3", @@ -1588,8 +1604,6 @@ }, "node_modules/@radix-ui/react-popper": { "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", - "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", "license": "MIT", "dependencies": { "@floating-ui/react-dom": "^2.0.0", @@ -1620,8 +1634,6 @@ }, "node_modules/@radix-ui/react-portal": { "version": "1.1.9", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", - "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", "license": "MIT", "dependencies": { "@radix-ui/react-primitive": "2.1.3", @@ -1644,8 +1656,6 @@ }, "node_modules/@radix-ui/react-presence": { "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", - "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", @@ -1668,8 +1678,6 @@ }, "node_modules/@radix-ui/react-primitive": { "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", - "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", "license": "MIT", "dependencies": { "@radix-ui/react-slot": "1.2.3" @@ -1691,8 +1699,6 @@ }, "node_modules/@radix-ui/react-roving-focus": { "version": "1.1.11", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", - "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.3", @@ -1722,8 +1728,6 @@ }, "node_modules/@radix-ui/react-select": { "version": "2.2.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.2.6.tgz", - "integrity": "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==", "license": "MIT", "dependencies": { "@radix-ui/number": "1.1.1", @@ -1765,8 +1769,6 @@ }, "node_modules/@radix-ui/react-slot": { "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", - "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" @@ -1783,8 +1785,6 @@ }, "node_modules/@radix-ui/react-tooltip": { "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz", - "integrity": "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==", "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.3", @@ -1817,8 +1817,6 @@ }, "node_modules/@radix-ui/react-use-callback-ref": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", - "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1832,8 +1830,6 @@ }, "node_modules/@radix-ui/react-use-controllable-state": { "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", - "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", "license": "MIT", "dependencies": { "@radix-ui/react-use-effect-event": "0.0.2", @@ -1851,8 +1847,6 @@ }, "node_modules/@radix-ui/react-use-effect-event": { "version": "0.0.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", - "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", "license": "MIT", "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" @@ -1869,8 +1863,6 @@ }, "node_modules/@radix-ui/react-use-escape-keydown": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", - "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", "license": "MIT", "dependencies": { "@radix-ui/react-use-callback-ref": "1.1.1" @@ -1887,8 +1879,6 @@ }, "node_modules/@radix-ui/react-use-layout-effect": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", - "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1902,8 +1892,6 @@ }, "node_modules/@radix-ui/react-use-previous": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", - "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", "license": "MIT", "peerDependencies": { "@types/react": "*", @@ -1917,8 +1905,6 @@ }, "node_modules/@radix-ui/react-use-rect": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", - "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", "license": "MIT", "dependencies": { "@radix-ui/rect": "1.1.1" @@ -1935,8 +1921,6 @@ }, "node_modules/@radix-ui/react-use-size": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", - "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", "license": "MIT", "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" @@ -1953,8 +1937,6 @@ }, "node_modules/@radix-ui/react-visually-hidden": { "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", - "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", "license": "MIT", "dependencies": { "@radix-ui/react-primitive": "2.1.3" @@ -1976,14 +1958,10 @@ }, "node_modules/@radix-ui/rect": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", - "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", "license": "MIT" }, "node_modules/@reduxjs/toolkit": { "version": "2.10.1", - "resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-2.10.1.tgz", - "integrity": "sha512-/U17EXQ9Do9Yx4DlNGU6eVNfZvFJfYpUtRRdLf19PbPjdWBxNlxGZXywQZ1p1Nz8nMkWplTI7iD/23m07nolDA==", "license": "MIT", "dependencies": { "@standard-schema/spec": "^1.0.0", @@ -2024,8 +2002,6 @@ }, "node_modules/@rolldown/binding-darwin-arm64": { "version": "1.0.0-beta.41", - "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-beta.41.tgz", - "integrity": "sha512-XGCzqfjdk7550PlyZRTBKbypXrB7ATtXhw/+bjtxnklLQs0mKP/XkQVOKyn9qGKSlvH8I56JLYryVxl0PCvSNw==", "cpu": [ "arm64" ], @@ -2232,15 +2208,11 @@ }, "node_modules/@rolldown/pluginutils": { "version": "1.0.0-beta.38", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.38.tgz", - "integrity": "sha512-N/ICGKleNhA5nc9XXQG/kkKHJ7S55u0x0XUJbbkmdCnFuoRkM1Il12q9q0eX19+M7KKUEPw/daUPIRnxhcxAIw==", "dev": true, "license": "MIT" }, "node_modules/@shikijs/core": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.15.0.tgz", - "integrity": "sha512-8TOG6yG557q+fMsSVa8nkEDOZNTSxjbbR8l6lF2gyr6Np+jrPlslqDxQkN6rMXCECQ3isNPZAGszAfYoJOPGlg==", "dev": true, "license": "MIT", "dependencies": { @@ -2252,8 +2224,6 @@ }, "node_modules/@shikijs/engine-javascript": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.15.0.tgz", - "integrity": "sha512-ZedbOFpopibdLmvTz2sJPJgns8Xvyabe2QbmqMTz07kt1pTzfEvKZc5IqPVO/XFiEbbNyaOpjPBkkr1vlwS+qg==", "dev": true, "license": "MIT", "dependencies": { @@ -2264,8 +2234,6 @@ }, "node_modules/@shikijs/engine-oniguruma": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.15.0.tgz", - "integrity": "sha512-HnqFsV11skAHvOArMZdLBZZApRSYS4LSztk2K3016Y9VCyZISnlYUYsL2hzlS7tPqKHvNqmI5JSUJZprXloMvA==", "dev": true, "license": "MIT", "dependencies": { @@ -2275,8 +2243,6 @@ }, "node_modules/@shikijs/langs": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.15.0.tgz", - "integrity": "sha512-WpRvEFvkVvO65uKYW4Rzxs+IG0gToyM8SARQMtGGsH4GDMNZrr60qdggXrFOsdfOVssG/QQGEl3FnJ3EZ+8w8A==", "dev": true, "license": "MIT", "dependencies": { @@ -2285,8 +2251,6 @@ }, "node_modules/@shikijs/themes": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.15.0.tgz", - "integrity": "sha512-8ow2zWb1IDvCKjYb0KiLNrK4offFdkfNVPXb1OZykpLCzRU6j+efkY+Y7VQjNlNFXonSw+4AOdGYtmqykDbRiQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2295,8 +2259,6 @@ }, "node_modules/@shikijs/types": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.15.0.tgz", - "integrity": "sha512-BnP+y/EQnhihgHy4oIAN+6FFtmfTekwOLsQbRw9hOKwqgNy8Bdsjq8B05oAt/ZgvIWWFrshV71ytOrlPfYjIJw==", "dev": true, "license": "MIT", "dependencies": { @@ -2306,27 +2268,19 @@ }, "node_modules/@shikijs/vscode-textmate": { "version": "10.0.2", - "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", - "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", "dev": true, "license": "MIT" }, "node_modules/@standard-schema/spec": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", - "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", "license": "MIT" }, "node_modules/@standard-schema/utils": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", - "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", "license": "MIT" }, "node_modules/@tailwindcss/node": { "version": "4.1.17", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.17.tgz", - "integrity": "sha512-csIkHIgLb3JisEFQ0vxr2Y57GUNYh447C8xzwj89U/8fdW8LhProdxvnVH6U8M2Y73QKiTIH+LWbK3V2BBZsAg==", "dev": true, "license": "MIT", "dependencies": { @@ -2341,8 +2295,6 @@ }, "node_modules/@tailwindcss/oxide": { "version": "4.1.17", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.17.tgz", - "integrity": "sha512-F0F7d01fmkQhsTjXezGBLdrl1KresJTcI3DB8EkScCldyKp3Msz4hub4uyYaVnk88BAS1g5DQjjF6F5qczheLA==", "dev": true, "license": "MIT", "engines": { @@ -2382,8 +2334,6 @@ }, "node_modules/@tailwindcss/oxide-darwin-arm64": { "version": "4.1.17", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.17.tgz", - "integrity": "sha512-EquyumkQweUBNk1zGEU/wfZo2qkp/nQKRZM8bUYO0J+Lums5+wl2CcG1f9BgAjn/u9pJzdYddHWBiFXJTcxmOg==", "cpu": [ "arm64" ], @@ -2546,6 +2496,66 @@ "node": ">=14.0.0" } }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/core": { + "version": "1.6.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.1.0", + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/runtime": { + "version": "1.6.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@emnapi/wasi-threads": { + "version": "1.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { + "version": "1.0.7", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.5.0", + "@emnapi/runtime": "^1.5.0", + "@tybys/wasm-util": "^0.10.1" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/@tybys/wasm-util": { + "version": "0.10.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi/node_modules/tslib": { + "version": "2.8.1", + "dev": true, + "inBundle": true, + "license": "0BSD", + "optional": true + }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { "version": "4.1.17", "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.17.tgz", @@ -2582,8 +2592,6 @@ }, "node_modules/@tailwindcss/postcss": { "version": "4.1.17", - "resolved": "https://registry.npmjs.org/@tailwindcss/postcss/-/postcss-4.1.17.tgz", - "integrity": "sha512-+nKl9N9mN5uJ+M7dBOOCzINw94MPstNR/GtIhz1fpZysxL/4a+No64jCBD6CPN+bIHWFx3KWuu8XJRrj/572Dw==", "dev": true, "license": "MIT", "dependencies": { @@ -2596,8 +2604,6 @@ }, "node_modules/@tanstack/history": { "version": "1.133.19", - "resolved": "https://registry.npmjs.org/@tanstack/history/-/history-1.133.19.tgz", - "integrity": "sha512-Y866qBVVprdQkmO0/W1AFBI8tiQy398vFeIwP+VrRWCOzs3VecxSVzAvaOM4iHfkJz81fFAZMhLLjDVoPikD+w==", "license": "MIT", "engines": { "node": ">=12" @@ -2609,8 +2615,6 @@ }, "node_modules/@tanstack/react-router": { "version": "1.133.22", - "resolved": "https://registry.npmjs.org/@tanstack/react-router/-/react-router-1.133.22.tgz", - "integrity": "sha512-0tg2yoXVMvvgR3UdOhEX9ICmgZ/Ou/I8VOl07exSYEJYfyCr5nhtB/62F9NGbuUZVrJnCzc8Rz0e4/MYU18pIg==", "license": "MIT", "dependencies": { "@tanstack/history": "1.133.19", @@ -2634,8 +2638,6 @@ }, "node_modules/@tanstack/react-router-devtools": { "version": "1.133.22", - "resolved": "https://registry.npmjs.org/@tanstack/react-router-devtools/-/react-router-devtools-1.133.22.tgz", - "integrity": "sha512-YG498dyttY7yszEGo0iE4S3ymNrX+PSWXbP7zy94RhLf3mizupInxlKaypxhIU16toKiyOQzgFgOqi6v4RqfEQ==", "license": "MIT", "dependencies": { "@tanstack/router-devtools-core": "1.133.22", @@ -2656,8 +2658,6 @@ }, "node_modules/@tanstack/react-store": { "version": "0.7.7", - "resolved": "https://registry.npmjs.org/@tanstack/react-store/-/react-store-0.7.7.tgz", - "integrity": "sha512-qqT0ufegFRDGSof9D/VqaZgjNgp4tRPHZIJq2+QIHkMUtHjaJ0lYrrXjeIUJvjnTbgPfSD1XgOMEt0lmANn6Zg==", "license": "MIT", "dependencies": { "@tanstack/store": "0.7.7", @@ -2674,8 +2674,6 @@ }, "node_modules/@tanstack/react-table": { "version": "8.21.3", - "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.21.3.tgz", - "integrity": "sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww==", "license": "MIT", "dependencies": { "@tanstack/table-core": "8.21.3" @@ -2694,8 +2692,6 @@ }, "node_modules/@tanstack/router-cli": { "version": "1.133.20", - "resolved": "https://registry.npmjs.org/@tanstack/router-cli/-/router-cli-1.133.20.tgz", - "integrity": "sha512-XFghXTGUDzBhLbe5UWikLDbcAcuDfqWtlJvyVhDl7rYV7Pvkdb8hGgbxsriUpaVKPx5nmud8JGINIW56lQUTyA==", "dev": true, "license": "MIT", "dependencies": { @@ -2716,8 +2712,6 @@ }, "node_modules/@tanstack/router-core": { "version": "1.133.20", - "resolved": "https://registry.npmjs.org/@tanstack/router-core/-/router-core-1.133.20.tgz", - "integrity": "sha512-cO8E6XA0vMX2BaPZck9kfgXK76e6Lqo13GmXEYxtXshmW8cIlgcLHhBDKnI/sCjIy9OPY2sV1qrGHtcxJy/4ew==", "license": "MIT", "dependencies": { "@tanstack/history": "1.133.19", @@ -2738,8 +2732,6 @@ }, "node_modules/@tanstack/router-devtools-core": { "version": "1.133.22", - "resolved": "https://registry.npmjs.org/@tanstack/router-devtools-core/-/router-devtools-core-1.133.22.tgz", - "integrity": "sha512-Pcpyrd3rlNA6C1jnL6jy4pC/8s4PN7270RM7+krnlKex1Rk3REgQ5LXAaAJJxOXS2coY14tiQtfQS3gx+H3b4w==", "license": "MIT", "dependencies": { "clsx": "^2.1.1", @@ -2767,8 +2759,6 @@ }, "node_modules/@tanstack/router-generator": { "version": "1.133.20", - "resolved": "https://registry.npmjs.org/@tanstack/router-generator/-/router-generator-1.133.20.tgz", - "integrity": "sha512-63lhmNNoVfqTgnSx5MUnEl/QBKSN6hA1sWLhZSQhCjLp9lrWbCXM8l9QpG3Tgzq/LdX7jjDMf783sUL4p4NbYw==", "license": "MIT", "dependencies": { "@tanstack/router-core": "1.133.20", @@ -2790,8 +2780,6 @@ }, "node_modules/@tanstack/router-plugin": { "version": "1.133.22", - "resolved": "https://registry.npmjs.org/@tanstack/router-plugin/-/router-plugin-1.133.22.tgz", - "integrity": "sha512-VVUazrxqFyon9bFSFY2mysgTbQAH5BV8kP8Gq1IHd7AxlboRW9tnj6TQcy8KGgG/KPCbKB9CFZtvSheKqrAVQg==", "license": "MIT", "dependencies": { "@babel/core": "^7.27.7", @@ -2843,8 +2831,6 @@ }, "node_modules/@tanstack/router-utils": { "version": "1.133.19", - "resolved": "https://registry.npmjs.org/@tanstack/router-utils/-/router-utils-1.133.19.tgz", - "integrity": "sha512-WEp5D2gPxvlLDRXwD/fV7RXjYtqaqJNXKB/L6OyZEbT+9BG/Ib2d7oG9GSUZNNMGPGYAlhBUOi3xutySsk6rxA==", "license": "MIT", "dependencies": { "@babel/core": "^7.27.4", @@ -2866,8 +2852,6 @@ }, "node_modules/@tanstack/store": { "version": "0.7.7", - "resolved": "https://registry.npmjs.org/@tanstack/store/-/store-0.7.7.tgz", - "integrity": "sha512-xa6pTan1bcaqYDS9BDpSiS63qa6EoDkPN9RsRaxHuDdVDNntzq3xNwR5YKTU/V3SkSyC9T4YVOPh2zRQN0nhIQ==", "license": "MIT", "funding": { "type": "github", @@ -2876,8 +2860,6 @@ }, "node_modules/@tanstack/table-core": { "version": "8.21.3", - "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.21.3.tgz", - "integrity": "sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==", "license": "MIT", "engines": { "node": ">=12" @@ -2889,8 +2871,6 @@ }, "node_modules/@tanstack/virtual-file-routes": { "version": "1.133.19", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-file-routes/-/virtual-file-routes-1.133.19.tgz", - "integrity": "sha512-IKwZENsK7owmW1Lm5FhuHegY/SyQ8KqtL/7mTSnzoKJgfzhrrf9qwKB1rmkKkt+svUuy/Zw3uVEpZtUzQruWtA==", "license": "MIT", "engines": { "node": ">=12" @@ -2900,10 +2880,70 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, + "node_modules/@turf/area": { + "version": "7.3.5", + "license": "MIT", + "dependencies": { + "@turf/helpers": "7.3.5", + "@turf/meta": "7.3.5", + "@types/geojson": "^7946.0.10", + "tslib": "^2.8.1" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/bbox": { + "version": "7.3.5", + "license": "MIT", + "dependencies": { + "@turf/helpers": "7.3.5", + "@turf/meta": "7.3.5", + "@types/geojson": "^7946.0.10", + "tslib": "^2.8.1" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/centroid": { + "version": "7.3.5", + "license": "MIT", + "dependencies": { + "@turf/helpers": "7.3.5", + "@turf/meta": "7.3.5", + "@types/geojson": "^7946.0.10", + "tslib": "^2.8.1" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/helpers": { + "version": "7.3.5", + "license": "MIT", + "dependencies": { + "@types/geojson": "^7946.0.10", + "tslib": "^2.8.1" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/meta": { + "version": "7.3.5", + "license": "MIT", + "dependencies": { + "@turf/helpers": "7.3.5", + "@types/geojson": "^7946.0.10", + "tslib": "^2.8.1" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, "node_modules/@tybys/wasm-util": { "version": "0.10.1", - "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", - "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", "license": "MIT", "optional": true, "dependencies": { @@ -2912,8 +2952,6 @@ }, "node_modules/@types/babel__core": { "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", "dev": true, "license": "MIT", "dependencies": { @@ -2926,8 +2964,6 @@ }, "node_modules/@types/babel__generator": { "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", - "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", "dev": true, "license": "MIT", "dependencies": { @@ -2936,8 +2972,6 @@ }, "node_modules/@types/babel__template": { "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", "dev": true, "license": "MIT", "dependencies": { @@ -2947,8 +2981,6 @@ }, "node_modules/@types/babel__traverse": { "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", - "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", "dev": true, "license": "MIT", "dependencies": { @@ -2957,26 +2989,18 @@ }, "node_modules/@types/d3-array": { "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", - "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", "license": "MIT" }, "node_modules/@types/d3-color": { "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", - "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", "license": "MIT" }, "node_modules/@types/d3-ease": { "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", - "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", "license": "MIT" }, "node_modules/@types/d3-interpolate": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", - "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", "license": "MIT", "dependencies": { "@types/d3-color": "*" @@ -2984,14 +3008,10 @@ }, "node_modules/@types/d3-path": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", - "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", "license": "MIT" }, "node_modules/@types/d3-scale": { "version": "4.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", - "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", "license": "MIT", "dependencies": { "@types/d3-time": "*" @@ -2999,8 +3019,6 @@ }, "node_modules/@types/d3-shape": { "version": "3.1.7", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz", - "integrity": "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==", "license": "MIT", "dependencies": { "@types/d3-path": "*" @@ -3008,27 +3026,30 @@ }, "node_modules/@types/d3-time": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", - "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", "license": "MIT" }, "node_modules/@types/d3-timer": { "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", - "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", "license": "MIT" }, "node_modules/@types/estree": { "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "license": "MIT" + }, + "node_modules/@types/geojson-vt": { + "version": "3.2.5", + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, "node_modules/@types/hast": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", - "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3037,15 +3058,24 @@ }, "node_modules/@types/json-schema": { "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", "dev": true, "license": "MIT" }, + "node_modules/@types/mapbox__point-geometry": { + "version": "0.1.4", + "license": "MIT" + }, + "node_modules/@types/mapbox__vector-tile": { + "version": "1.3.4", + "license": "MIT", + "dependencies": { + "@types/geojson": "*", + "@types/mapbox__point-geometry": "*", + "@types/pbf": "*" + } + }, "node_modules/@types/mdast": { "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", - "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", "dev": true, "license": "MIT", "dependencies": { @@ -3054,18 +3084,23 @@ }, "node_modules/@types/node": { "version": "24.6.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.6.0.tgz", - "integrity": "sha512-F1CBxgqwOMc4GKJ7eY22hWhBVQuMYTtqI8L0FcszYcpYX0fzfDGpez22Xau8Mgm7O9fI+zA/TYIdq3tGWfweBA==", "devOptional": true, "license": "MIT", "dependencies": { "undici-types": "~7.13.0" } }, + "node_modules/@types/pbf": { + "version": "3.0.5", + "license": "MIT" + }, + "node_modules/@types/plotly.js": { + "version": "3.0.10", + "dev": true, + "license": "MIT" + }, "node_modules/@types/react": { "version": "19.2.2", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.2.tgz", - "integrity": "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA==", "devOptional": true, "license": "MIT", "dependencies": { @@ -3074,31 +3109,39 @@ }, "node_modules/@types/react-dom": { "version": "19.2.2", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.2.tgz", - "integrity": "sha512-9KQPoO6mZCi7jcIStSnlOWn2nEF3mNmyr3rIAsGnAbQKYbRLyqmeSc39EVgtxXVia+LMT8j3knZLAZAh+xLmrw==", "devOptional": true, "license": "MIT", "peerDependencies": { "@types/react": "^19.2.0" } }, + "node_modules/@types/react-plotly.js": { + "version": "2.6.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/plotly.js": "*", + "@types/react": "*" + } + }, + "node_modules/@types/supercluster": { + "version": "7.1.3", + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, "node_modules/@types/unist": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", - "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", "dev": true, "license": "MIT" }, "node_modules/@types/use-sync-external-store": { "version": "0.0.6", - "resolved": "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz", - "integrity": "sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==", "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.45.0.tgz", - "integrity": "sha512-HC3y9CVuevvWCl/oyZuI47dOeDF9ztdMEfMH8/DW/Mhwa9cCLnK1oD7JoTVGW/u7kFzNZUKUoyJEqkaJh5y3Wg==", "dev": true, "license": "MIT", "dependencies": { @@ -3127,8 +3170,6 @@ }, "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { "version": "7.0.5", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", - "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", "dev": true, "license": "MIT", "engines": { @@ -3137,8 +3178,6 @@ }, "node_modules/@typescript-eslint/parser": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.45.0.tgz", - "integrity": "sha512-TGf22kon8KW+DeKaUmOibKWktRY8b2NSAZNdtWh798COm1NWx8+xJ6iFBtk3IvLdv6+LGLJLRlyhrhEDZWargQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3162,8 +3201,6 @@ }, "node_modules/@typescript-eslint/project-service": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.45.0.tgz", - "integrity": "sha512-3pcVHwMG/iA8afdGLMuTibGR7pDsn9RjDev6CCB+naRsSYs2pns5QbinF4Xqw6YC/Sj3lMrm/Im0eMfaa61WUg==", "dev": true, "license": "MIT", "dependencies": { @@ -3184,8 +3221,6 @@ }, "node_modules/@typescript-eslint/scope-manager": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.45.0.tgz", - "integrity": "sha512-clmm8XSNj/1dGvJeO6VGH7EUSeA0FMs+5au/u3lrA3KfG8iJ4u8ym9/j2tTEoacAffdW1TVUzXO30W1JTJS7dA==", "dev": true, "license": "MIT", "dependencies": { @@ -3202,8 +3237,6 @@ }, "node_modules/@typescript-eslint/tsconfig-utils": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.45.0.tgz", - "integrity": "sha512-aFdr+c37sc+jqNMGhH+ajxPXwjv9UtFZk79k8pLoJ6p4y0snmYpPA52GuWHgt2ZF4gRRW6odsEj41uZLojDt5w==", "dev": true, "license": "MIT", "engines": { @@ -3219,8 +3252,6 @@ }, "node_modules/@typescript-eslint/type-utils": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.45.0.tgz", - "integrity": "sha512-bpjepLlHceKgyMEPglAeULX1vixJDgaKocp0RVJ5u4wLJIMNuKtUXIczpJCPcn2waII0yuvks/5m5/h3ZQKs0A==", "dev": true, "license": "MIT", "dependencies": { @@ -3244,8 +3275,6 @@ }, "node_modules/@typescript-eslint/types": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.45.0.tgz", - "integrity": "sha512-WugXLuOIq67BMgQInIxxnsSyRLFxdkJEJu8r4ngLR56q/4Q5LrbfkFRH27vMTjxEK8Pyz7QfzuZe/G15qQnVRA==", "dev": true, "license": "MIT", "engines": { @@ -3258,8 +3287,6 @@ }, "node_modules/@typescript-eslint/typescript-estree": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.45.0.tgz", - "integrity": "sha512-GfE1NfVbLam6XQ0LcERKwdTTPlLvHvXXhOeUGC1OXi4eQBoyy1iVsW+uzJ/J9jtCz6/7GCQ9MtrQ0fml/jWCnA==", "dev": true, "license": "MIT", "dependencies": { @@ -3287,8 +3314,6 @@ }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3297,8 +3322,6 @@ }, "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { "version": "9.0.9", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", - "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", "dev": true, "license": "ISC", "dependencies": { @@ -3313,8 +3336,6 @@ }, "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { "version": "7.7.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", - "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -3326,8 +3347,6 @@ }, "node_modules/@typescript-eslint/utils": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.45.0.tgz", - "integrity": "sha512-bxi1ht+tLYg4+XV2knz/F7RVhU0k6VrSMc9sb8DQ6fyCTrGQLHfo7lDtN0QJjZjKkLA2ThrKuCdHEvLReqtIGg==", "dev": true, "license": "MIT", "dependencies": { @@ -3350,8 +3369,6 @@ }, "node_modules/@typescript-eslint/visitor-keys": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.45.0.tgz", - "integrity": "sha512-qsaFBA3e09MIDAGFUrTk+dzqtfv1XPVz8t8d1f0ybTzrCY7BKiMC5cjrl1O/P7UmHsNyW90EYSkU/ZWpmXelag==", "dev": true, "license": "MIT", "dependencies": { @@ -3368,15 +3385,11 @@ }, "node_modules/@ungap/structured-clone": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", - "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "dev": true, "license": "ISC" }, "node_modules/@vitejs/plugin-react": { "version": "5.0.4", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.0.4.tgz", - "integrity": "sha512-La0KD0vGkVkSk6K+piWDKRUyg8Rl5iAIKRMH0vMJI0Eg47bq1eOxmoObAaQG37WMW9MSyk7Cs8EIWwJC1PtzKA==", "dev": true, "license": "MIT", "dependencies": { @@ -3394,10 +3407,12 @@ "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, + "node_modules/abs-svg-path": { + "version": "0.1.1", + "license": "MIT" + }, "node_modules/acorn": { "version": "8.15.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", - "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "license": "MIT", "bin": { "acorn": "bin/acorn" @@ -3408,8 +3423,6 @@ }, "node_modules/acorn-jsx": { "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, "license": "MIT", "peerDependencies": { @@ -3418,8 +3431,6 @@ }, "node_modules/ajv": { "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, "license": "MIT", "dependencies": { @@ -3435,8 +3446,6 @@ }, "node_modules/ansi-styles": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "license": "MIT", "dependencies": { @@ -3451,8 +3460,6 @@ }, "node_modules/ansis": { "version": "4.2.0", - "resolved": "https://registry.npmjs.org/ansis/-/ansis-4.2.0.tgz", - "integrity": "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==", "license": "ISC", "engines": { "node": ">=14" @@ -3460,8 +3467,6 @@ }, "node_modules/anymatch": { "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", "license": "ISC", "dependencies": { "normalize-path": "^3.0.0", @@ -3473,15 +3478,11 @@ }, "node_modules/argparse": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true, "license": "Python-2.0" }, "node_modules/aria-hidden": { "version": "1.2.6", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", - "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", "license": "MIT", "dependencies": { "tslib": "^2.0.0" @@ -3490,10 +3491,30 @@ "node": ">=10" } }, + "node_modules/array-bounds": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/array-find-index": { + "version": "1.0.2", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-normalize": { + "version": "1.1.4", + "license": "MIT", + "dependencies": { + "array-bounds": "^1.0.0" + } + }, + "node_modules/array-range": { + "version": "1.0.1", + "license": "MIT" + }, "node_modules/ast-types": { "version": "0.16.1", - "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.16.1.tgz", - "integrity": "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==", "license": "MIT", "dependencies": { "tslib": "^2.0.1" @@ -3504,8 +3525,6 @@ }, "node_modules/autoprefixer": { "version": "10.4.21", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz", - "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", "dev": true, "funding": [ { @@ -3542,8 +3561,6 @@ }, "node_modules/babel-dead-code-elimination": { "version": "1.0.10", - "resolved": "https://registry.npmjs.org/babel-dead-code-elimination/-/babel-dead-code-elimination-1.0.10.tgz", - "integrity": "sha512-DV5bdJZTzZ0zn0DC24v3jD7Mnidh6xhKa4GfKCbq3sfW8kaWhDdZjP3i81geA8T33tdYqWKw4D3fVv0CwEgKVA==", "license": "MIT", "dependencies": { "@babel/core": "^7.23.7", @@ -3554,15 +3571,18 @@ }, "node_modules/balanced-match": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true, "license": "MIT" }, + "node_modules/base64-arraybuffer": { + "version": "1.0.2", + "license": "MIT", + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/baseline-browser-mapping": { "version": "2.8.16", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.16.tgz", - "integrity": "sha512-OMu3BGQ4E7P1ErFsIPpbJh0qvDudM/UuJeHgkAvfWe+0HFJCXh+t/l8L6fVLR55RI/UbKrVLnAXZSVwd9ysWYw==", "license": "Apache-2.0", "bin": { "baseline-browser-mapping": "dist/cli.js" @@ -3570,8 +3590,6 @@ }, "node_modules/binary-extensions": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", "license": "MIT", "engines": { "node": ">=8" @@ -3580,10 +3598,28 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/binary-search-bounds": { + "version": "2.0.5", + "license": "MIT" + }, + "node_modules/bit-twiddle": { + "version": "1.0.2", + "license": "MIT" + }, + "node_modules/bitmap-sdf": { + "version": "1.0.4", + "license": "MIT" + }, + "node_modules/bl": { + "version": "2.2.1", + "license": "MIT", + "dependencies": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, "node_modules/brace-expansion": { "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -3593,8 +3629,6 @@ }, "node_modules/braces": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "license": "MIT", "dependencies": { "fill-range": "^7.1.1" @@ -3605,8 +3639,6 @@ }, "node_modules/browserslist": { "version": "4.26.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.3.tgz", - "integrity": "sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==", "funding": [ { "type": "opencollective", @@ -3636,10 +3668,12 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, + "node_modules/buffer-from": { + "version": "1.1.2", + "license": "MIT" + }, "node_modules/callsites": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, "license": "MIT", "engines": { @@ -3648,8 +3682,6 @@ }, "node_modules/caniuse-lite": { "version": "1.0.30001750", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001750.tgz", - "integrity": "sha512-cuom0g5sdX6rw00qOoLNSFCJ9/mYIsuSOA+yzpDw8eopiFqcVwQvZHqov0vmEighRxX++cfC0Vg1G+1Iy/mSpQ==", "funding": [ { "type": "opencollective", @@ -3666,10 +3698,15 @@ ], "license": "CC-BY-4.0" }, + "node_modules/canvas-fit": { + "version": "1.5.0", + "license": "MIT", + "dependencies": { + "element-size": "^1.1.1" + } + }, "node_modules/ccount": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", - "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", "dev": true, "license": "MIT", "funding": { @@ -3679,8 +3716,6 @@ }, "node_modules/chalk": { "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "license": "MIT", "dependencies": { @@ -3696,8 +3731,6 @@ }, "node_modules/character-entities-html4": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", - "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", "dev": true, "license": "MIT", "funding": { @@ -3707,8 +3740,6 @@ }, "node_modules/character-entities-legacy": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", - "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", "dev": true, "license": "MIT", "funding": { @@ -3718,8 +3749,6 @@ }, "node_modules/chokidar": { "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "license": "MIT", "dependencies": { "anymatch": "~3.1.2", @@ -3742,8 +3771,6 @@ }, "node_modules/chokidar/node_modules/glob-parent": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "license": "ISC", "dependencies": { "is-glob": "^4.0.1" @@ -3752,10 +3779,12 @@ "node": ">= 6" } }, + "node_modules/clamp": { + "version": "1.0.1", + "license": "MIT" + }, "node_modules/class-variance-authority": { "version": "0.7.1", - "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz", - "integrity": "sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==", "license": "Apache-2.0", "dependencies": { "clsx": "^2.1.1" @@ -3766,8 +3795,6 @@ }, "node_modules/cliui": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", "dev": true, "license": "ISC", "dependencies": { @@ -3781,8 +3808,6 @@ }, "node_modules/cliui/node_modules/ansi-regex": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "license": "MIT", "engines": { @@ -3791,15 +3816,11 @@ }, "node_modules/cliui/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true, "license": "MIT" }, "node_modules/cliui/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "license": "MIT", "dependencies": { @@ -3813,8 +3834,6 @@ }, "node_modules/cliui/node_modules/strip-ansi": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "license": "MIT", "dependencies": { @@ -3826,8 +3845,6 @@ }, "node_modules/cliui/node_modules/wrap-ansi": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "license": "MIT", "dependencies": { @@ -3844,17 +3861,27 @@ }, "node_modules/clsx": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", - "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", "license": "MIT", "engines": { "node": ">=6" } }, + "node_modules/color-alpha": { + "version": "1.0.4", + "license": "MIT", + "dependencies": { + "color-parse": "^1.3.8" + } + }, + "node_modules/color-alpha/node_modules/color-parse": { + "version": "1.4.3", + "license": "MIT", + "dependencies": { + "color-name": "^1.0.0" + } + }, "node_modules/color-convert": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3864,47 +3891,109 @@ "node": ">=7.0.0" } }, + "node_modules/color-id": { + "version": "1.1.0", + "license": "MIT", + "dependencies": { + "clamp": "^1.0.1" + } + }, "node_modules/color-name": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, "license": "MIT" }, - "node_modules/comma-separated-tokens": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", - "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", - "dev": true, + "node_modules/color-normalize": { + "version": "1.5.0", "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "dependencies": { + "clamp": "^1.0.1", + "color-rgba": "^2.1.1", + "dtype": "^2.0.0" } }, + "node_modules/color-normalize/node_modules/color-parse": { + "version": "1.4.3", + "license": "MIT", + "dependencies": { + "color-name": "^1.0.0" + } + }, + "node_modules/color-normalize/node_modules/color-rgba": { + "version": "2.4.0", + "license": "MIT", + "dependencies": { + "color-parse": "^1.4.2", + "color-space": "^2.0.0" + } + }, + "node_modules/color-parse": { + "version": "2.0.0", + "license": "MIT", + "dependencies": { + "color-name": "^1.0.0" + } + }, + "node_modules/color-rgba": { + "version": "3.0.0", + "license": "MIT", + "dependencies": { + "color-parse": "^2.0.0", + "color-space": "^2.0.0" + } + }, + "node_modules/color-space": { + "version": "2.3.2", + "license": "Unlicense" + }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "license": "MIT" + }, "node_modules/concat-map": { "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true, "license": "MIT" }, + "node_modules/concat-stream": { + "version": "1.6.2", + "engines": [ + "node >= 0.8" + ], + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, "node_modules/convert-source-map": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", "license": "MIT" }, "node_modules/cookie-es": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cookie-es/-/cookie-es-2.0.0.tgz", - "integrity": "sha512-RAj4E421UYRgqokKUmotqAwuplYw15qtdXfY+hGzgCJ/MBjCVZcSoHK/kH9kocfjRjcDME7IiDWR/1WX1TM2Pg==", + "license": "MIT" + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "license": "MIT" + }, + "node_modules/country-regex": { + "version": "1.1.0", "license": "MIT" }, "node_modules/cross-spawn": { "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, "license": "MIT", "dependencies": { @@ -3916,16 +4005,66 @@ "node": ">= 8" } }, + "node_modules/css-font": { + "version": "1.2.0", + "license": "MIT", + "dependencies": { + "css-font-size-keywords": "^1.0.0", + "css-font-stretch-keywords": "^1.0.1", + "css-font-style-keywords": "^1.0.1", + "css-font-weight-keywords": "^1.0.0", + "css-global-keywords": "^1.0.1", + "css-system-font-keywords": "^1.0.0", + "pick-by-alias": "^1.2.0", + "string-split-by": "^1.0.0", + "unquote": "^1.1.0" + } + }, + "node_modules/css-font-size-keywords": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/css-font-stretch-keywords": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/css-font-style-keywords": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/css-font-weight-keywords": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/css-global-keywords": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/css-system-font-keywords": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/csscolorparser": { + "version": "1.0.3", + "license": "MIT" + }, "node_modules/csstype": { "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", "license": "MIT" }, + "node_modules/d": { + "version": "1.0.2", + "license": "ISC", + "dependencies": { + "es5-ext": "^0.10.64", + "type": "^2.7.2" + }, + "engines": { + "node": ">=0.12" + } + }, "node_modules/d3-array": { "version": "3.2.4", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", - "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", "license": "ISC", "dependencies": { "internmap": "1 - 2" @@ -3934,37 +4073,87 @@ "node": ">=12" } }, + "node_modules/d3-collection": { + "version": "1.0.7", + "license": "BSD-3-Clause" + }, "node_modules/d3-color": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", "license": "ISC", "engines": { "node": ">=12" } }, + "node_modules/d3-dispatch": { + "version": "1.0.6", + "license": "BSD-3-Clause" + }, "node_modules/d3-ease": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", - "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", "license": "BSD-3-Clause", "engines": { "node": ">=12" } }, + "node_modules/d3-force": { + "version": "1.2.1", + "license": "BSD-3-Clause", + "dependencies": { + "d3-collection": "1", + "d3-dispatch": "1", + "d3-quadtree": "1", + "d3-timer": "1" + } + }, + "node_modules/d3-force/node_modules/d3-timer": { + "version": "1.0.10", + "license": "BSD-3-Clause" + }, "node_modules/d3-format": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", - "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", "license": "ISC", "engines": { "node": ">=12" } }, + "node_modules/d3-geo": { + "version": "1.12.1", + "license": "BSD-3-Clause", + "dependencies": { + "d3-array": "1" + } + }, + "node_modules/d3-geo-projection": { + "version": "2.9.0", + "license": "BSD-3-Clause", + "dependencies": { + "commander": "2", + "d3-array": "1", + "d3-geo": "^1.12.0", + "resolve": "^1.1.10" + }, + "bin": { + "geo2svg": "bin/geo2svg", + "geograticule": "bin/geograticule", + "geoproject": "bin/geoproject", + "geoquantize": "bin/geoquantize", + "geostitch": "bin/geostitch" + } + }, + "node_modules/d3-geo-projection/node_modules/d3-array": { + "version": "1.2.4", + "license": "BSD-3-Clause" + }, + "node_modules/d3-geo/node_modules/d3-array": { + "version": "1.2.4", + "license": "BSD-3-Clause" + }, + "node_modules/d3-hierarchy": { + "version": "1.1.9", + "license": "BSD-3-Clause" + }, "node_modules/d3-interpolate": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", "license": "ISC", "dependencies": { "d3-color": "1 - 3" @@ -3975,17 +4164,17 @@ }, "node_modules/d3-path": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", - "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", "license": "ISC", "engines": { "node": ">=12" } }, + "node_modules/d3-quadtree": { + "version": "1.0.7", + "license": "BSD-3-Clause" + }, "node_modules/d3-scale": { "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", - "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", "license": "ISC", "dependencies": { "d3-array": "2.10.0 - 3", @@ -4000,8 +4189,6 @@ }, "node_modules/d3-shape": { "version": "3.2.0", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", - "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", "license": "ISC", "dependencies": { "d3-path": "^3.1.0" @@ -4012,8 +4199,6 @@ }, "node_modules/d3-time": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", - "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", "license": "ISC", "dependencies": { "d3-array": "2 - 3" @@ -4024,8 +4209,6 @@ }, "node_modules/d3-time-format": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", - "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", "license": "ISC", "dependencies": { "d3-time": "1 - 3" @@ -4036,8 +4219,6 @@ }, "node_modules/d3-timer": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", - "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", "license": "ISC", "engines": { "node": ">=12" @@ -4045,8 +4226,6 @@ }, "node_modules/debug": { "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -4062,31 +4241,34 @@ }, "node_modules/decimal.js-light": { "version": "2.5.1", - "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", - "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", "license": "MIT" }, "node_modules/deep-is": { "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true, "license": "MIT" }, + "node_modules/defined": { + "version": "1.0.1", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/dequal": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", "dev": true, "license": "MIT", "engines": { "node": ">=6" } }, + "node_modules/detect-kerning": { + "version": "2.1.2", + "license": "MIT" + }, "node_modules/detect-libc": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", - "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", "license": "Apache-2.0", "engines": { "node": ">=8" @@ -4094,14 +4276,10 @@ }, "node_modules/detect-node-es": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", - "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", "license": "MIT" }, "node_modules/devlop": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", - "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", "dev": true, "license": "MIT", "dependencies": { @@ -4114,23 +4292,68 @@ }, "node_modules/diff": { "version": "8.0.3", - "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.3.tgz", - "integrity": "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ==", "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" } }, + "node_modules/draw-svg-path": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "abs-svg-path": "~0.1.1", + "normalize-svg-path": "~0.1.0" + } + }, + "node_modules/dtype": { + "version": "2.0.0", + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/dup": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/duplexify": { + "version": "3.7.1", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", + "stream-shift": "^1.0.0" + } + }, + "node_modules/earcut": { + "version": "2.2.4", + "license": "ISC" + }, "node_modules/electron-to-chromium": { "version": "1.5.237", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.237.tgz", - "integrity": "sha512-icUt1NvfhGLar5lSWH3tHNzablaA5js3HVHacQimfP8ViEBOQv+L7DKEuHdbTZ0SKCO1ogTJTIL1Gwk9S6Qvcg==", "license": "ISC" }, + "node_modules/element-size": { + "version": "1.1.1", + "license": "MIT" + }, + "node_modules/elementary-circuits-directed-graph": { + "version": "1.3.1", + "license": "MIT", + "dependencies": { + "strongly-connected-components": "^1.0.1" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, "node_modules/enhanced-resolve": { "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", "dev": true, "license": "MIT", "dependencies": { @@ -4141,20 +4364,67 @@ "node": ">=10.13.0" } }, + "node_modules/es-errors": { + "version": "1.3.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-toolkit": { "version": "1.42.0", - "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.42.0.tgz", - "integrity": "sha512-SLHIyY7VfDJBM8clz4+T2oquwTQxEzu263AyhVK4jREOAwJ+8eebaa4wM3nlvnAqhDrMm2EsA6hWHaQsMPQ1nA==", "license": "MIT", "workspaces": [ "docs", "benchmarks" ] }, + "node_modules/es5-ext": { + "version": "0.10.64", + "hasInstallScript": true, + "license": "ISC", + "dependencies": { + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.3", + "esniff": "^2.0.1", + "next-tick": "^1.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/es6-iterator": { + "version": "2.0.3", + "license": "MIT", + "dependencies": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/es6-symbol": { + "version": "3.1.4", + "license": "ISC", + "dependencies": { + "d": "^1.0.2", + "ext": "^1.7.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/es6-weak-map": { + "version": "2.0.3", + "license": "ISC", + "dependencies": { + "d": "1", + "es5-ext": "^0.10.46", + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.1" + } + }, "node_modules/esbuild": { "version": "0.25.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", - "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", "hasInstallScript": true, "license": "MIT", "bin": { @@ -4194,8 +4464,6 @@ }, "node_modules/escalade": { "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "license": "MIT", "engines": { "node": ">=6" @@ -4203,8 +4471,6 @@ }, "node_modules/escape-string-regexp": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, "license": "MIT", "engines": { @@ -4214,10 +4480,35 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/escodegen": { + "version": "2.1.0", + "license": "BSD-2-Clause", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "license": "BSD-3-Clause", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/eslint": { "version": "9.36.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.36.0.tgz", - "integrity": "sha512-hB4FIzXovouYzwzECDcUkJ4OcfOEkXTv2zRY6B9bkwjx/cprAq0uvm1nl7zvQ0/TsUk0zQiN4uPfJpB9m+rPMQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4277,8 +4568,6 @@ }, "node_modules/eslint-plugin-react-hooks": { "version": "5.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.2.0.tgz", - "integrity": "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==", "dev": true, "license": "MIT", "engines": { @@ -4290,8 +4579,6 @@ }, "node_modules/eslint-plugin-react-refresh": { "version": "0.4.22", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.22.tgz", - "integrity": "sha512-atkAG6QaJMGoTLc4MDAP+rqZcfwQuTIh2IqHWFLy2TEjxr0MOK+5BSG4RzL2564AAPpZkDRsZXAUz68kjnU6Ug==", "dev": true, "license": "MIT", "peerDependencies": { @@ -4300,8 +4587,6 @@ }, "node_modules/eslint-scope": { "version": "8.4.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", - "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -4317,8 +4602,6 @@ }, "node_modules/eslint-visitor-keys": { "version": "4.2.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", - "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -4328,10 +4611,21 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/esniff": { + "version": "2.0.1", + "license": "ISC", + "dependencies": { + "d": "^1.0.1", + "es5-ext": "^0.10.62", + "event-emitter": "^0.3.5", + "type": "^2.7.2" + }, + "engines": { + "node": ">=0.10" + } + }, "node_modules/espree": { "version": "10.4.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", - "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -4348,8 +4642,6 @@ }, "node_modules/esprima": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "license": "BSD-2-Clause", "bin": { "esparse": "bin/esparse.js", @@ -4361,8 +4653,6 @@ }, "node_modules/esquery": { "version": "1.6.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", - "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -4374,8 +4664,6 @@ }, "node_modules/esrecurse": { "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -4387,9 +4675,6 @@ }, "node_modules/estraverse": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, "license": "BSD-2-Clause", "engines": { "node": ">=4.0" @@ -4397,31 +4682,65 @@ }, "node_modules/esutils": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, "license": "BSD-2-Clause", "engines": { "node": ">=0.10.0" } }, + "node_modules/event-emitter": { + "version": "0.3.5", + "license": "MIT", + "dependencies": { + "d": "1", + "es5-ext": "~0.10.14" + } + }, "node_modules/eventemitter3": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", "license": "MIT" }, + "node_modules/events": { + "version": "3.3.0", + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/ext": { + "version": "1.7.0", + "license": "ISC", + "dependencies": { + "type": "^2.7.2" + } + }, + "node_modules/falafel": { + "version": "2.2.5", + "license": "MIT", + "dependencies": { + "acorn": "^7.1.1", + "isarray": "^2.0.1" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/falafel/node_modules/acorn": { + "version": "7.4.1", + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "dev": true, "license": "MIT" }, "node_modules/fast-glob": { "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", "dev": true, "license": "MIT", "dependencies": { @@ -4437,8 +4756,6 @@ }, "node_modules/fast-glob/node_modules/glob-parent": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, "license": "ISC", "dependencies": { @@ -4448,24 +4765,25 @@ "node": ">= 6" } }, + "node_modules/fast-isnumeric": { + "version": "1.1.4", + "license": "MIT", + "dependencies": { + "is-string-blank": "^1.0.1" + } + }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true, "license": "MIT" }, "node_modules/fast-levenshtein": { "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true, "license": "MIT" }, "node_modules/fastq": { "version": "1.20.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", - "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", "dev": true, "license": "ISC", "dependencies": { @@ -4474,8 +4792,6 @@ }, "node_modules/file-entry-cache": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", - "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4487,8 +4803,6 @@ }, "node_modules/fill-range": { "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" @@ -4499,8 +4813,6 @@ }, "node_modules/find-up": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, "license": "MIT", "dependencies": { @@ -4516,8 +4828,6 @@ }, "node_modules/flat-cache": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", - "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", "dev": true, "license": "MIT", "dependencies": { @@ -4530,15 +4840,32 @@ }, "node_modules/flatted": { "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "dev": true, "license": "ISC" }, + "node_modules/flatten-vertex-data": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "dtype": "^2.0.0" + } + }, + "node_modules/font-atlas": { + "version": "2.1.0", + "license": "MIT", + "dependencies": { + "css-font": "^1.0.0" + } + }, + "node_modules/font-measure": { + "version": "1.2.2", + "license": "MIT", + "dependencies": { + "css-font": "^1.2.0" + } + }, "node_modules/fraction.js": { "version": "4.3.7", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", "dev": true, "license": "MIT", "engines": { @@ -4549,11 +4876,16 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/from2": { + "version": "2.3.0", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + } + }, "node_modules/fsevents": { "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "hasInstallScript": true, "license": "MIT", "optional": true, "os": [ @@ -4563,38 +4895,55 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, + "node_modules/function-bind": { + "version": "1.1.2", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/gensync": { "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, + "node_modules/geojson-vt": { + "version": "3.2.1", + "license": "ISC" + }, "node_modules/get-caller-file": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/get-canvas-context": { + "version": "1.0.2", + "license": "MIT" + }, "node_modules/get-nonce": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", - "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", "license": "MIT", "engines": { "node": ">=6" } }, + "node_modules/get-stream": { + "version": "6.0.1", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/get-tsconfig": { "version": "4.13.0", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz", - "integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==", "license": "MIT", "dependencies": { "resolve-pkg-maps": "^1.0.0" @@ -4603,10 +4952,52 @@ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, + "node_modules/gl-mat4": { + "version": "1.2.0", + "license": "Zlib" + }, + "node_modules/gl-matrix": { + "version": "3.4.4", + "license": "MIT" + }, + "node_modules/gl-text": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "bit-twiddle": "^1.0.2", + "color-normalize": "^1.5.0", + "css-font": "^1.2.0", + "detect-kerning": "^2.1.2", + "es6-weak-map": "^2.0.3", + "flatten-vertex-data": "^1.0.2", + "font-atlas": "^2.1.0", + "font-measure": "^1.2.2", + "gl-util": "^3.1.2", + "is-plain-obj": "^1.1.0", + "object-assign": "^4.1.1", + "parse-rect": "^1.2.0", + "parse-unit": "^1.0.1", + "pick-by-alias": "^1.2.0", + "regl": "^2.0.0", + "to-px": "^1.0.1", + "typedarray-pool": "^1.1.0" + } + }, + "node_modules/gl-util": { + "version": "3.1.3", + "license": "MIT", + "dependencies": { + "is-browser": "^2.0.1", + "is-firefox": "^1.0.3", + "is-plain-obj": "^1.1.0", + "number-is-integer": "^1.0.1", + "object-assign": "^4.1.0", + "pick-by-alias": "^1.2.0", + "weak-map": "^1.0.5" + } + }, "node_modules/glob-parent": { "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, "license": "ISC", "dependencies": { @@ -4616,10 +5007,40 @@ "node": ">=10.13.0" } }, + "node_modules/global-prefix": { + "version": "4.0.0", + "license": "MIT", + "dependencies": { + "ini": "^4.1.3", + "kind-of": "^6.0.3", + "which": "^4.0.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/global-prefix/node_modules/isexe": { + "version": "3.1.5", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } + }, + "node_modules/global-prefix/node_modules/which": { + "version": "4.0.0", + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, "node_modules/globals": { "version": "16.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-16.4.0.tgz", - "integrity": "sha512-ob/2LcVVaVGCYN+r14cnwnoDPUufjiYgSqRhiFD0Q1iI4Odora5RE8Iv1D24hAz5oMophRGkGz+yuvQmmUMnMw==", "dev": true, "license": "MIT", "engines": { @@ -4629,10 +5050,167 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/glsl-inject-defines": { + "version": "1.0.3", + "license": "MIT", + "dependencies": { + "glsl-token-inject-block": "^1.0.0", + "glsl-token-string": "^1.0.1", + "glsl-tokenizer": "^2.0.2" + } + }, + "node_modules/glsl-resolve": { + "version": "0.0.1", + "license": "MIT", + "dependencies": { + "resolve": "^0.6.1", + "xtend": "^2.1.2" + } + }, + "node_modules/glsl-resolve/node_modules/resolve": { + "version": "0.6.3", + "license": "MIT" + }, + "node_modules/glsl-resolve/node_modules/xtend": { + "version": "2.2.0", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/glsl-token-assignments": { + "version": "2.0.2", + "license": "MIT" + }, + "node_modules/glsl-token-defines": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "glsl-tokenizer": "^2.0.0" + } + }, + "node_modules/glsl-token-depth": { + "version": "1.1.2", + "license": "MIT" + }, + "node_modules/glsl-token-descope": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "glsl-token-assignments": "^2.0.0", + "glsl-token-depth": "^1.1.0", + "glsl-token-properties": "^1.0.0", + "glsl-token-scope": "^1.1.0" + } + }, + "node_modules/glsl-token-inject-block": { + "version": "1.1.0", + "license": "MIT" + }, + "node_modules/glsl-token-properties": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/glsl-token-scope": { + "version": "1.1.2", + "license": "MIT" + }, + "node_modules/glsl-token-string": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/glsl-token-whitespace-trim": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/glsl-tokenizer": { + "version": "2.1.5", + "license": "MIT", + "dependencies": { + "through2": "^0.6.3" + } + }, + "node_modules/glsl-tokenizer/node_modules/isarray": { + "version": "0.0.1", + "license": "MIT" + }, + "node_modules/glsl-tokenizer/node_modules/readable-stream": { + "version": "1.0.34", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/glsl-tokenizer/node_modules/string_decoder": { + "version": "0.10.31", + "license": "MIT" + }, + "node_modules/glsl-tokenizer/node_modules/through2": { + "version": "0.6.5", + "license": "MIT", + "dependencies": { + "readable-stream": ">=1.0.33-1 <1.1.0-0", + "xtend": ">=4.0.0 <4.1.0-0" + } + }, + "node_modules/glslify": { + "version": "7.1.1", + "license": "MIT", + "dependencies": { + "bl": "^2.2.1", + "concat-stream": "^1.5.2", + "duplexify": "^3.4.5", + "falafel": "^2.1.0", + "from2": "^2.3.0", + "glsl-resolve": "0.0.1", + "glsl-token-whitespace-trim": "^1.0.0", + "glslify-bundle": "^5.0.0", + "glslify-deps": "^1.2.5", + "minimist": "^1.2.5", + "resolve": "^1.1.5", + "stack-trace": "0.0.9", + "static-eval": "^2.0.5", + "through2": "^2.0.1", + "xtend": "^4.0.0" + }, + "bin": { + "glslify": "bin.js" + } + }, + "node_modules/glslify-bundle": { + "version": "5.1.1", + "license": "MIT", + "dependencies": { + "glsl-inject-defines": "^1.0.1", + "glsl-token-defines": "^1.0.0", + "glsl-token-depth": "^1.1.1", + "glsl-token-descope": "^1.0.2", + "glsl-token-scope": "^1.1.1", + "glsl-token-string": "^1.0.1", + "glsl-token-whitespace-trim": "^1.0.0", + "glsl-tokenizer": "^2.0.2", + "murmurhash-js": "^1.0.0", + "shallow-copy": "0.0.1" + } + }, + "node_modules/glslify-deps": { + "version": "1.3.2", + "license": "ISC", + "dependencies": { + "@choojs/findup": "^0.2.0", + "events": "^3.2.0", + "glsl-resolve": "0.0.1", + "glsl-tokenizer": "^2.0.0", + "graceful-fs": "^4.1.2", + "inherits": "^2.0.1", + "map-limit": "0.0.1", + "resolve": "^1.0.0" + } + }, "node_modules/goober": { "version": "2.1.18", - "resolved": "https://registry.npmjs.org/goober/-/goober-2.1.18.tgz", - "integrity": "sha512-2vFqsaDVIT9Gz7N6kAL++pLpp41l3PfDuusHcjnGLfR6+huZkl6ziX+zgVC3ZxpqWhzH6pyDdGrCeDhMIvwaxw==", "license": "MIT", "peerDependencies": { "csstype": "^3.0.10" @@ -4640,32 +5218,51 @@ }, "node_modules/graceful-fs": { "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, "license": "ISC" }, "node_modules/graphemer": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true, "license": "MIT" }, + "node_modules/grid-index": { + "version": "1.1.0", + "license": "ISC" + }, "node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/has-hover": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "is-browser": "^2.0.1" + } + }, + "node_modules/has-passive-events": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "is-browser": "^2.0.1" + } + }, + "node_modules/hasown": { + "version": "2.0.3", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/hast-util-to-html": { "version": "9.0.5", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", - "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", "dev": true, "license": "MIT", "dependencies": { @@ -4688,8 +5285,6 @@ }, "node_modules/hast-util-whitespace": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", - "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", "dev": true, "license": "MIT", "dependencies": { @@ -4702,8 +5297,6 @@ }, "node_modules/html-void-elements": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", - "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", "dev": true, "license": "MIT", "funding": { @@ -4711,10 +5304,36 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, "node_modules/ignore": { "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, "license": "MIT", "engines": { @@ -4723,8 +5342,6 @@ }, "node_modules/immer": { "version": "10.2.0", - "resolved": "https://registry.npmjs.org/immer/-/immer-10.2.0.tgz", - "integrity": "sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw==", "license": "MIT", "funding": { "type": "opencollective", @@ -4733,8 +5350,6 @@ }, "node_modules/import-fresh": { "version": "3.3.1", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", - "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4750,18 +5365,25 @@ }, "node_modules/imurmurhash": { "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, "license": "MIT", "engines": { "node": ">=0.8.19" } }, + "node_modules/inherits": { + "version": "2.0.4", + "license": "ISC" + }, + "node_modules/ini": { + "version": "4.1.3", + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/internmap": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", - "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", "license": "ISC", "engines": { "node": ">=12" @@ -4769,8 +5391,6 @@ }, "node_modules/is-binary-path": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "license": "MIT", "dependencies": { "binary-extensions": "^2.0.0" @@ -4779,10 +5399,42 @@ "node": ">=8" } }, + "node_modules/is-browser": { + "version": "2.1.0", + "license": "MIT" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-extglob": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finite": { + "version": "1.1.0", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-firefox": { + "version": "1.0.3", "license": "MIT", "engines": { "node": ">=0.10.0" @@ -4790,8 +5442,6 @@ }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, "license": "MIT", "engines": { @@ -4800,8 +5450,6 @@ }, "node_modules/is-glob": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" @@ -4810,19 +5458,45 @@ "node": ">=0.10.0" } }, + "node_modules/is-mobile": { + "version": "4.0.0", + "license": "MIT" + }, "node_modules/is-number": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "license": "MIT", "engines": { "node": ">=0.12.0" } }, + "node_modules/is-obj": { + "version": "1.0.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-string-blank": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/is-svg-path": { + "version": "1.0.2", + "license": "MIT" + }, + "node_modules/isarray": { + "version": "2.0.5", + "license": "MIT" + }, "node_modules/isbot": { "version": "5.1.31", - "resolved": "https://registry.npmjs.org/isbot/-/isbot-5.1.31.tgz", - "integrity": "sha512-DPgQshehErHAqSCKDb3rNW03pa2wS/v5evvUqtxt6TTnHRqAG8FdzcSSJs9656pK6Y+NT7K9R4acEYXLHYfpUQ==", "license": "Unlicense", "engines": { "node": ">=18" @@ -4830,15 +5504,11 @@ }, "node_modules/isexe": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true, "license": "ISC" }, "node_modules/jiti": { "version": "2.6.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", - "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", "devOptional": true, "license": "MIT", "bin": { @@ -4847,14 +5517,10 @@ }, "node_modules/js-tokens": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "license": "MIT" }, "node_modules/js-yaml": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, "license": "MIT", "dependencies": { @@ -4866,8 +5532,6 @@ }, "node_modules/jsesc": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", "license": "MIT", "bin": { "jsesc": "bin/jsesc" @@ -4878,29 +5542,25 @@ }, "node_modules/json-buffer": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", "dev": true, "license": "MIT" }, "node_modules/json-schema-traverse": { "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true, "license": "MIT" }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true, "license": "MIT" }, + "node_modules/json-stringify-pretty-compact": { + "version": "4.0.0", + "license": "MIT" + }, "node_modules/json5": { "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "license": "MIT", "bin": { "json5": "lib/cli.js" @@ -4909,20 +5569,27 @@ "node": ">=6" } }, + "node_modules/kdbush": { + "version": "4.0.2", + "license": "ISC" + }, "node_modules/keyv": { "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "dev": true, "license": "MIT", "dependencies": { "json-buffer": "3.0.1" } }, + "node_modules/kind-of": { + "version": "6.0.3", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/levn": { "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, "license": "MIT", "dependencies": { @@ -4935,8 +5602,6 @@ }, "node_modules/lightningcss": { "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", - "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", "license": "MPL-2.0", "dependencies": { "detect-libc": "^2.0.3" @@ -4984,8 +5649,6 @@ }, "node_modules/lightningcss-darwin-arm64": { "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", - "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", "cpu": [ "arm64" ], @@ -5184,8 +5847,6 @@ }, "node_modules/locate-path": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, "license": "MIT", "dependencies": { @@ -5200,15 +5861,20 @@ }, "node_modules/lodash.merge": { "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, "license": "MIT" }, + "node_modules/loose-envify": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, "node_modules/lru-cache": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "license": "ISC", "dependencies": { "yallist": "^3.0.2" @@ -5216,8 +5882,6 @@ }, "node_modules/lucide-react": { "version": "0.546.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.546.0.tgz", - "integrity": "sha512-Z94u6fKT43lKeYHiVyvyR8fT7pwCzDu7RyMPpTvh054+xahSgj4HFQ+NmflvzdXsoAjYGdCguGaFKYuvq0ThCQ==", "license": "ISC", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -5225,18 +5889,141 @@ }, "node_modules/magic-string": { "version": "0.30.21", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", - "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", "dev": true, "license": "MIT", "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, + "node_modules/map-limit": { + "version": "0.0.1", + "license": "MIT", + "dependencies": { + "once": "~1.3.0" + } + }, + "node_modules/map-limit/node_modules/once": { + "version": "1.3.3", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/mapbox-gl": { + "version": "1.13.3", + "license": "SEE LICENSE IN LICENSE.txt", + "peer": true, + "dependencies": { + "@mapbox/geojson-rewind": "^0.5.2", + "@mapbox/geojson-types": "^1.0.2", + "@mapbox/jsonlint-lines-primitives": "^2.0.2", + "@mapbox/mapbox-gl-supported": "^1.5.0", + "@mapbox/point-geometry": "^0.1.0", + "@mapbox/tiny-sdf": "^1.1.1", + "@mapbox/unitbezier": "^0.0.0", + "@mapbox/vector-tile": "^1.3.1", + "@mapbox/whoots-js": "^3.1.0", + "csscolorparser": "~1.0.3", + "earcut": "^2.2.2", + "geojson-vt": "^3.2.1", + "gl-matrix": "^3.2.1", + "grid-index": "^1.1.0", + "murmurhash-js": "^1.0.0", + "pbf": "^3.2.1", + "potpack": "^1.0.1", + "quickselect": "^2.0.0", + "rw": "^1.3.3", + "supercluster": "^7.1.0", + "tinyqueue": "^2.0.3", + "vt-pbf": "^3.1.1" + }, + "engines": { + "node": ">=6.4.0" + } + }, + "node_modules/maplibre-gl": { + "version": "4.7.1", + "license": "BSD-3-Clause", + "dependencies": { + "@mapbox/geojson-rewind": "^0.5.2", + "@mapbox/jsonlint-lines-primitives": "^2.0.2", + "@mapbox/point-geometry": "^0.1.0", + "@mapbox/tiny-sdf": "^2.0.6", + "@mapbox/unitbezier": "^0.0.1", + "@mapbox/vector-tile": "^1.3.1", + "@mapbox/whoots-js": "^3.1.0", + "@maplibre/maplibre-gl-style-spec": "^20.3.1", + "@types/geojson": "^7946.0.14", + "@types/geojson-vt": "3.2.5", + "@types/mapbox__point-geometry": "^0.1.4", + "@types/mapbox__vector-tile": "^1.3.4", + "@types/pbf": "^3.0.5", + "@types/supercluster": "^7.1.3", + "earcut": "^3.0.0", + "geojson-vt": "^4.0.2", + "gl-matrix": "^3.4.3", + "global-prefix": "^4.0.0", + "kdbush": "^4.0.2", + "murmurhash-js": "^1.0.0", + "pbf": "^3.3.0", + "potpack": "^2.0.0", + "quickselect": "^3.0.0", + "supercluster": "^8.0.1", + "tinyqueue": "^3.0.0", + "vt-pbf": "^3.1.3" + }, + "engines": { + "node": ">=16.14.0", + "npm": ">=8.1.0" + }, + "funding": { + "url": "https://github.com/maplibre/maplibre-gl-js?sponsor=1" + } + }, + "node_modules/maplibre-gl/node_modules/@mapbox/tiny-sdf": { + "version": "2.1.0", + "license": "BSD-2-Clause" + }, + "node_modules/maplibre-gl/node_modules/@mapbox/unitbezier": { + "version": "0.0.1", + "license": "BSD-2-Clause" + }, + "node_modules/maplibre-gl/node_modules/earcut": { + "version": "3.0.2", + "license": "ISC" + }, + "node_modules/maplibre-gl/node_modules/geojson-vt": { + "version": "4.0.2", + "license": "ISC" + }, + "node_modules/maplibre-gl/node_modules/potpack": { + "version": "2.1.0", + "license": "ISC" + }, + "node_modules/maplibre-gl/node_modules/quickselect": { + "version": "3.0.0", + "license": "ISC" + }, + "node_modules/maplibre-gl/node_modules/supercluster": { + "version": "8.0.1", + "license": "ISC", + "dependencies": { + "kdbush": "^4.0.2" + } + }, + "node_modules/maplibre-gl/node_modules/tinyqueue": { + "version": "3.0.0", + "license": "ISC" + }, + "node_modules/math-log2": { + "version": "1.0.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/mdast-util-to-hast": { "version": "13.2.1", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", - "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", "dev": true, "license": "MIT", "dependencies": { @@ -5257,8 +6044,6 @@ }, "node_modules/merge2": { "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "dev": true, "license": "MIT", "engines": { @@ -5267,8 +6052,6 @@ }, "node_modules/micromark-util-character": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", "dev": true, "funding": [ { @@ -5288,8 +6071,6 @@ }, "node_modules/micromark-util-encode": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", - "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", "dev": true, "funding": [ { @@ -5305,8 +6086,6 @@ }, "node_modules/micromark-util-sanitize-uri": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", - "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", "dev": true, "funding": [ { @@ -5327,8 +6106,6 @@ }, "node_modules/micromark-util-symbol": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", "dev": true, "funding": [ { @@ -5344,8 +6121,6 @@ }, "node_modules/micromark-util-types": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", - "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", "dev": true, "funding": [ { @@ -5361,8 +6136,6 @@ }, "node_modules/micromatch": { "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "license": "MIT", "dependencies": { @@ -5375,8 +6148,6 @@ }, "node_modules/minimatch": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, "license": "ISC", "dependencies": { @@ -5386,16 +6157,47 @@ "node": "*" } }, + "node_modules/minimist": { + "version": "1.2.8", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mouse-change": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "mouse-event": "^1.0.0" + } + }, + "node_modules/mouse-event": { + "version": "1.0.5", + "license": "MIT" + }, + "node_modules/mouse-event-offset": { + "version": "3.0.2", + "license": "MIT" + }, + "node_modules/mouse-wheel": { + "version": "1.2.0", + "license": "MIT", + "dependencies": { + "right-now": "^1.0.0", + "signum": "^1.0.0", + "to-px": "^1.0.1" + } + }, "node_modules/ms": { "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/murmurhash-js": { + "version": "1.0.0", "license": "MIT" }, "node_modules/nanoid": { "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "funding": [ { "type": "github", @@ -5410,23 +6212,47 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/native-promise-only": { + "version": "0.8.1", + "license": "MIT" + }, "node_modules/natural-compare": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true, "license": "MIT" }, + "node_modules/needle": { + "version": "2.9.1", + "license": "MIT", + "dependencies": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "bin": { + "needle": "bin/needle" + }, + "engines": { + "node": ">= 4.4.x" + } + }, + "node_modules/needle/node_modules/debug": { + "version": "3.2.7", + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/next-tick": { + "version": "1.1.0", + "license": "ISC" + }, "node_modules/node-releases": { "version": "2.0.23", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.23.tgz", - "integrity": "sha512-cCmFDMSm26S6tQSDpBCg/NR8NENrVPhAJSf+XbxBG4rPFaaonlEoE9wHQmun+cls499TQGSb7ZyPBRlzgKfpeg==", "license": "MIT" }, "node_modules/normalize-path": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", "license": "MIT", "engines": { "node": ">=0.10.0" @@ -5434,25 +6260,47 @@ }, "node_modules/normalize-range": { "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" } }, + "node_modules/normalize-svg-path": { + "version": "0.1.0", + "license": "MIT" + }, + "node_modules/number-is-integer": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "is-finite": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/oniguruma-parser": { "version": "0.12.1", - "resolved": "https://registry.npmjs.org/oniguruma-parser/-/oniguruma-parser-0.12.1.tgz", - "integrity": "sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==", "dev": true, "license": "MIT" }, "node_modules/oniguruma-to-es": { "version": "4.3.4", - "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-4.3.4.tgz", - "integrity": "sha512-3VhUGN3w2eYxnTzHn+ikMI+fp/96KoRSVK9/kMTcFqj1NRDh2IhQCKvYxDnWePKRXY/AqH+Fuiyb7VHSzBjHfA==", "dev": true, "license": "MIT", "dependencies": { @@ -5463,8 +6311,6 @@ }, "node_modules/optionator": { "version": "0.9.4", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", - "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "license": "MIT", "dependencies": { @@ -5481,8 +6327,6 @@ }, "node_modules/p-limit": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5497,8 +6341,6 @@ }, "node_modules/p-locate": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, "license": "MIT", "dependencies": { @@ -5513,8 +6355,6 @@ }, "node_modules/parent-module": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, "license": "MIT", "dependencies": { @@ -5524,10 +6364,27 @@ "node": ">=6" } }, + "node_modules/parenthesis": { + "version": "3.1.8", + "license": "MIT" + }, + "node_modules/parse-rect": { + "version": "1.2.0", + "license": "MIT", + "dependencies": { + "pick-by-alias": "^1.2.0" + } + }, + "node_modules/parse-svg-path": { + "version": "0.1.2", + "license": "MIT" + }, + "node_modules/parse-unit": { + "version": "1.0.1", + "license": "MIT" + }, "node_modules/path-exists": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, "license": "MIT", "engines": { @@ -5536,30 +6393,45 @@ }, "node_modules/path-key": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/path-parse": { + "version": "1.0.7", + "license": "MIT" + }, "node_modules/pathe": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, + "node_modules/pbf": { + "version": "3.3.0", + "license": "BSD-3-Clause", + "dependencies": { + "ieee754": "^1.1.12", + "resolve-protobuf-schema": "^2.1.0" + }, + "bin": { + "pbf": "bin/pbf" + } + }, + "node_modules/performance-now": { + "version": "2.1.0", + "license": "MIT" + }, + "node_modules/pick-by-alias": { + "version": "1.2.0", "license": "MIT" }, "node_modules/picocolors": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "license": "ISC" }, "node_modules/picomatch": { "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "license": "MIT", "engines": { "node": ">=8.6" @@ -5568,10 +6440,90 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/plotly.js": { + "version": "3.5.0", + "license": "MIT", + "dependencies": { + "@plotly/d3": "3.8.2", + "@plotly/d3-sankey": "0.7.2", + "@plotly/d3-sankey-circular": "0.33.1", + "@plotly/mapbox-gl": "1.13.4", + "@plotly/regl": "^2.1.2", + "@turf/area": "^7.1.0", + "@turf/bbox": "^7.1.0", + "@turf/centroid": "^7.1.0", + "base64-arraybuffer": "^1.0.2", + "canvas-fit": "^1.5.0", + "color-alpha": "1.0.4", + "color-normalize": "1.5.0", + "color-parse": "2.0.0", + "color-rgba": "3.0.0", + "country-regex": "^1.1.0", + "d3-force": "^1.2.1", + "d3-format": "^1.4.5", + "d3-geo": "^1.12.1", + "d3-geo-projection": "^2.9.0", + "d3-hierarchy": "^1.1.9", + "d3-interpolate": "^3.0.1", + "d3-time": "^1.1.0", + "d3-time-format": "^2.2.3", + "fast-isnumeric": "^1.1.4", + "gl-mat4": "^1.2.0", + "gl-text": "^1.4.0", + "has-hover": "^1.0.1", + "has-passive-events": "^1.0.0", + "is-mobile": "^4.0.0", + "maplibre-gl": "^4.7.1", + "mouse-change": "^1.4.0", + "mouse-event-offset": "^3.0.2", + "mouse-wheel": "^1.2.0", + "native-promise-only": "^0.8.1", + "parse-svg-path": "^0.1.2", + "point-in-polygon": "^1.1.0", + "polybooljs": "^1.2.2", + "probe-image-size": "^7.2.3", + "regl-error2d": "^2.0.12", + "regl-line2d": "^3.1.3", + "regl-scatter2d": "^3.3.1", + "regl-splom": "^1.0.14", + "strongly-connected-components": "^1.0.1", + "superscript-text": "^1.0.0", + "svg-path-sdf": "^1.1.3", + "tinycolor2": "^1.4.2", + "to-px": "1.0.1", + "topojson-client": "^3.1.0", + "webgl-context": "^2.2.0", + "world-calendars": "^1.0.4" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/plotly.js/node_modules/d3-format": { + "version": "1.4.5", + "license": "BSD-3-Clause" + }, + "node_modules/plotly.js/node_modules/d3-time": { + "version": "1.1.0", + "license": "BSD-3-Clause" + }, + "node_modules/plotly.js/node_modules/d3-time-format": { + "version": "2.3.0", + "license": "BSD-3-Clause", + "dependencies": { + "d3-time": "1" + } + }, + "node_modules/point-in-polygon": { + "version": "1.1.0", + "license": "MIT" + }, + "node_modules/polybooljs": { + "version": "1.2.2", + "license": "MIT" + }, "node_modules/postcss": { "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "funding": [ { "type": "opencollective", @@ -5598,15 +6550,15 @@ }, "node_modules/postcss-value-parser": { "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", "dev": true, "license": "MIT" }, + "node_modules/potpack": { + "version": "1.0.2", + "license": "ISC" + }, "node_modules/prelude-ls": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, "license": "MIT", "engines": { @@ -5615,8 +6567,6 @@ }, "node_modules/prettier": { "version": "3.8.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz", - "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==", "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" @@ -5628,10 +6578,34 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/probe-image-size": { + "version": "7.2.3", + "license": "MIT", + "dependencies": { + "lodash.merge": "^4.6.2", + "needle": "^2.5.2", + "stream-parser": "~0.3.1" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "license": "MIT" + }, + "node_modules/prop-types": { + "version": "15.8.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/prop-types/node_modules/react-is": { + "version": "16.13.1", + "license": "MIT" + }, "node_modules/property-information": { "version": "7.1.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", - "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", "dev": true, "license": "MIT", "funding": { @@ -5639,10 +6613,12 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/protocol-buffers-schema": { + "version": "3.6.1", + "license": "MIT" + }, "node_modules/punycode": { "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "license": "MIT", "engines": { @@ -5651,8 +6627,6 @@ }, "node_modules/queue-microtask": { "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "dev": true, "funding": [ { @@ -5670,10 +6644,19 @@ ], "license": "MIT" }, + "node_modules/quickselect": { + "version": "2.0.0", + "license": "ISC" + }, + "node_modules/raf": { + "version": "3.4.1", + "license": "MIT", + "dependencies": { + "performance-now": "^2.1.0" + } + }, "node_modules/react": { "version": "19.2.0", - "resolved": "https://registry.npmjs.org/react/-/react-19.2.0.tgz", - "integrity": "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==", "license": "MIT", "engines": { "node": ">=0.10.0" @@ -5681,8 +6664,6 @@ }, "node_modules/react-dom": { "version": "19.2.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.0.tgz", - "integrity": "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ==", "license": "MIT", "dependencies": { "scheduler": "^0.27.0" @@ -5693,15 +6674,22 @@ }, "node_modules/react-is": { "version": "18.3.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", - "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", "license": "MIT", "peer": true }, + "node_modules/react-plotly.js": { + "version": "2.6.0", + "license": "MIT", + "dependencies": { + "prop-types": "^15.8.1" + }, + "peerDependencies": { + "plotly.js": ">1.34.0", + "react": ">0.13.0" + } + }, "node_modules/react-redux": { "version": "9.2.0", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.2.0.tgz", - "integrity": "sha512-ROY9fvHhwOD9ySfrF0wmvu//bKCQ6AeZZq1nJNtbDC+kk5DuSuNX/n6YWYF/SYy7bSba4D4FSz8DJeKY/S/r+g==", "license": "MIT", "dependencies": { "@types/use-sync-external-store": "^0.0.6", @@ -5723,8 +6711,6 @@ }, "node_modules/react-refresh": { "version": "0.17.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", - "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", "dev": true, "license": "MIT", "engines": { @@ -5733,8 +6719,6 @@ }, "node_modules/react-remove-scroll": { "version": "2.7.1", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.1.tgz", - "integrity": "sha512-HpMh8+oahmIdOuS5aFKKY6Pyog+FNaZV/XyJOq7b4YFwsFHe5yYfdbIalI4k3vU2nSDql7YskmUseHsRrJqIPA==", "license": "MIT", "dependencies": { "react-remove-scroll-bar": "^2.3.7", @@ -5758,8 +6742,6 @@ }, "node_modules/react-remove-scroll-bar": { "version": "2.3.8", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", - "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", "license": "MIT", "dependencies": { "react-style-singleton": "^2.2.2", @@ -5780,8 +6762,6 @@ }, "node_modules/react-style-singleton": { "version": "2.2.3", - "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", - "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", "license": "MIT", "dependencies": { "get-nonce": "^1.0.0", @@ -5800,10 +6780,29 @@ } } }, + "node_modules/readable-stream": { + "version": "2.3.8", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/isarray": { + "version": "1.0.0", + "license": "MIT" + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "license": "MIT" + }, "node_modules/readdirp": { "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", "license": "MIT", "dependencies": { "picomatch": "^2.2.1" @@ -5814,8 +6813,6 @@ }, "node_modules/recast": { "version": "0.23.11", - "resolved": "https://registry.npmjs.org/recast/-/recast-0.23.11.tgz", - "integrity": "sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA==", "license": "MIT", "dependencies": { "ast-types": "^0.16.1", @@ -5830,8 +6827,6 @@ }, "node_modules/recast/node_modules/source-map": { "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" @@ -5839,8 +6834,6 @@ }, "node_modules/recharts": { "version": "3.4.1", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.4.1.tgz", - "integrity": "sha512-35kYg6JoOgwq8sE4rhYkVWwa6aAIgOtT+Ob0gitnShjwUwZmhrmy7Jco/5kJNF4PnLXgt9Hwq+geEMS+WrjU1g==", "license": "MIT", "workspaces": [ "www" @@ -5869,14 +6862,10 @@ }, "node_modules/redux": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/redux/-/redux-5.0.1.tgz", - "integrity": "sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==", "license": "MIT" }, "node_modules/redux-thunk": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-3.1.0.tgz", - "integrity": "sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw==", "license": "MIT", "peerDependencies": { "redux": "^5.0.0" @@ -5884,8 +6873,6 @@ }, "node_modules/regex": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/regex/-/regex-6.0.1.tgz", - "integrity": "sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==", "dev": true, "license": "MIT", "dependencies": { @@ -5894,8 +6881,6 @@ }, "node_modules/regex-recursion": { "version": "6.0.2", - "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-6.0.2.tgz", - "integrity": "sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==", "dev": true, "license": "MIT", "dependencies": { @@ -5904,15 +6889,75 @@ }, "node_modules/regex-utilities": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/regex-utilities/-/regex-utilities-2.3.0.tgz", - "integrity": "sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==", "dev": true, "license": "MIT" }, + "node_modules/regl": { + "version": "2.1.1", + "license": "MIT" + }, + "node_modules/regl-error2d": { + "version": "2.0.12", + "license": "MIT", + "dependencies": { + "array-bounds": "^1.0.1", + "color-normalize": "^1.5.0", + "flatten-vertex-data": "^1.0.2", + "object-assign": "^4.1.1", + "pick-by-alias": "^1.2.0", + "to-float32": "^1.1.0", + "update-diff": "^1.1.0" + } + }, + "node_modules/regl-line2d": { + "version": "3.1.3", + "license": "MIT", + "dependencies": { + "array-bounds": "^1.0.1", + "array-find-index": "^1.0.2", + "array-normalize": "^1.1.4", + "color-normalize": "^1.5.0", + "earcut": "^2.1.5", + "es6-weak-map": "^2.0.3", + "flatten-vertex-data": "^1.0.2", + "object-assign": "^4.1.1", + "parse-rect": "^1.2.0", + "pick-by-alias": "^1.2.0", + "to-float32": "^1.1.0" + } + }, + "node_modules/regl-scatter2d": { + "version": "3.4.0", + "license": "MIT", + "dependencies": { + "@plotly/point-cluster": "^3.1.9", + "array-bounds": "^1.0.1", + "color-id": "^1.1.0", + "color-normalize": "^1.5.0", + "flatten-vertex-data": "^1.0.2", + "glslify": "^7.0.0", + "parse-rect": "^1.2.0", + "pick-by-alias": "^1.2.0", + "to-float32": "^1.1.0", + "update-diff": "^1.1.0" + } + }, + "node_modules/regl-splom": { + "version": "1.0.14", + "license": "MIT", + "dependencies": { + "array-bounds": "^1.0.1", + "array-range": "^1.0.1", + "color-alpha": "^1.0.4", + "flatten-vertex-data": "^1.0.2", + "parse-rect": "^1.2.0", + "pick-by-alias": "^1.2.0", + "raf": "^3.4.1", + "regl-scatter2d": "^3.2.3" + } + }, "node_modules/require-directory": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, "license": "MIT", "engines": { @@ -5921,14 +6966,29 @@ }, "node_modules/reselect": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/reselect/-/reselect-5.1.1.tgz", - "integrity": "sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==", "license": "MIT" }, + "node_modules/resolve": { + "version": "1.22.12", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/resolve-from": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, "license": "MIT", "engines": { @@ -5937,17 +6997,20 @@ }, "node_modules/resolve-pkg-maps": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", - "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", "license": "MIT", "funding": { "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" } }, + "node_modules/resolve-protobuf-schema": { + "version": "2.1.0", + "license": "MIT", + "dependencies": { + "protocol-buffers-schema": "^3.3.1" + } + }, "node_modules/reusify": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", "dev": true, "license": "MIT", "engines": { @@ -5955,10 +7018,12 @@ "node": ">=0.10.0" } }, + "node_modules/right-now": { + "version": "1.0.0", + "license": "MIT" + }, "node_modules/rolldown": { "version": "1.0.0-beta.41", - "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-beta.41.tgz", - "integrity": "sha512-U+NPR0Bkg3wm61dteD2L4nAM1U9dtaqVrpDXwC36IKRHpEO/Ubpid4Nijpa2imPchcVNHfxVFwSSMJdwdGFUbg==", "license": "MIT", "dependencies": { "@oxc-project/types": "=0.93.0", @@ -5990,14 +7055,10 @@ }, "node_modules/rolldown/node_modules/@rolldown/pluginutils": { "version": "1.0.0-beta.41", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.41.tgz", - "integrity": "sha512-ycMEPrS3StOIeb87BT3/+bu+blEtyvwQ4zmo2IcJQy0Rd1DAAhKksA0iUZ3MYSpJtjlPhg0Eo6mvVS6ggPhRbw==", "license": "MIT" }, "node_modules/run-parallel": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "dev": true, "funding": [ { @@ -6018,16 +7079,45 @@ "queue-microtask": "^1.2.2" } }, + "node_modules/rw": { + "version": "1.3.3", + "license": "BSD-3-Clause" + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "license": "MIT" + }, + "node_modules/sax": { + "version": "1.6.0", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=11.0.0" + } + }, "node_modules/scheduler": { "version": "0.27.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", - "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", "license": "MIT" }, "node_modules/semver": { "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -6035,8 +7125,6 @@ }, "node_modules/seroval": { "version": "1.5.1", - "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.5.1.tgz", - "integrity": "sha512-OwrZRZAfhHww0WEnKHDY8OM0U/Qs8OTfIDWhUD4BLpNJUfXK4cGmjiagGze086m+mhI+V2nD0gfbHEnJjb9STA==", "license": "MIT", "engines": { "node": ">=10" @@ -6044,8 +7132,6 @@ }, "node_modules/seroval-plugins": { "version": "1.5.1", - "resolved": "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.5.1.tgz", - "integrity": "sha512-4FbuZ/TMl02sqv0RTFexu0SP6V+ywaIe5bAWCCEik0fk17BhALgwvUDVF7e3Uvf9pxmwCEJsRPmlkUE6HdzLAw==", "license": "MIT", "engines": { "node": ">=10" @@ -6054,10 +7140,12 @@ "seroval": "^1.0" } }, + "node_modules/shallow-copy": { + "version": "0.0.1", + "license": "MIT" + }, "node_modules/shebang-command": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dev": true, "license": "MIT", "dependencies": { @@ -6069,8 +7157,6 @@ }, "node_modules/shebang-regex": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true, "license": "MIT", "engines": { @@ -6079,8 +7165,6 @@ }, "node_modules/shiki": { "version": "3.15.0", - "resolved": "https://registry.npmjs.org/shiki/-/shiki-3.15.0.tgz", - "integrity": "sha512-kLdkY6iV3dYbtPwS9KXU7mjfmDm25f5m0IPNFnaXO7TBPcvbUOY72PYXSuSqDzwp+vlH/d7MXpHlKO/x+QoLXw==", "dev": true, "license": "MIT", "dependencies": { @@ -6094,10 +7178,12 @@ "@types/hast": "^3.0.4" } }, + "node_modules/signum": { + "version": "1.0.0", + "license": "MIT" + }, "node_modules/solid-js": { "version": "1.9.11", - "resolved": "https://registry.npmjs.org/solid-js/-/solid-js-1.9.11.tgz", - "integrity": "sha512-WEJtcc5mkh/BnHA6Yrg4whlF8g6QwpmXXRg4P2ztPmcKeHHlH4+djYecBLhSpecZY2RRECXYUwIc/C2r3yzQ4Q==", "license": "MIT", "peer": true, "dependencies": { @@ -6108,8 +7194,6 @@ }, "node_modules/source-map": { "version": "0.7.6", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", - "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", "license": "BSD-3-Clause", "engines": { "node": ">= 12" @@ -6117,8 +7201,6 @@ }, "node_modules/source-map-js": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" @@ -6126,8 +7208,6 @@ }, "node_modules/space-separated-tokens": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", - "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", "dev": true, "license": "MIT", "funding": { @@ -6135,10 +7215,61 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/stack-trace": { + "version": "0.0.9", + "engines": { + "node": "*" + } + }, + "node_modules/static-eval": { + "version": "2.1.1", + "license": "MIT", + "dependencies": { + "escodegen": "^2.1.0" + } + }, + "node_modules/stream-parser": { + "version": "0.3.1", + "license": "MIT", + "dependencies": { + "debug": "2" + } + }, + "node_modules/stream-parser/node_modules/debug": { + "version": "2.6.9", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/stream-parser/node_modules/ms": { + "version": "2.0.0", + "license": "MIT" + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "license": "MIT" + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "license": "MIT" + }, + "node_modules/string-split-by": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "parenthesis": "^3.1.5" + } + }, "node_modules/stringify-entities": { "version": "4.0.4", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", - "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", "dev": true, "license": "MIT", "dependencies": { @@ -6152,8 +7283,6 @@ }, "node_modules/strip-json-comments": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, "license": "MIT", "engines": { @@ -6163,10 +7292,27 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strongly-connected-components": { + "version": "1.0.1", + "license": "MIT" + }, + "node_modules/supercluster": { + "version": "7.1.5", + "license": "ISC", + "dependencies": { + "kdbush": "^3.0.0" + } + }, + "node_modules/supercluster/node_modules/kdbush": { + "version": "3.0.0", + "license": "ISC" + }, + "node_modules/superscript-text": { + "version": "1.0.0", + "license": "MIT" + }, "node_modules/supports-color": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "license": "MIT", "dependencies": { @@ -6176,10 +7322,50 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svg-arc-to-cubic-bezier": { + "version": "3.2.0", + "license": "ISC" + }, + "node_modules/svg-path-bounds": { + "version": "1.0.2", + "license": "MIT", + "dependencies": { + "abs-svg-path": "^0.1.1", + "is-svg-path": "^1.0.1", + "normalize-svg-path": "^1.0.0", + "parse-svg-path": "^0.1.2" + } + }, + "node_modules/svg-path-bounds/node_modules/normalize-svg-path": { + "version": "1.1.0", + "license": "MIT", + "dependencies": { + "svg-arc-to-cubic-bezier": "^3.0.0" + } + }, + "node_modules/svg-path-sdf": { + "version": "1.1.3", + "license": "MIT", + "dependencies": { + "bitmap-sdf": "^1.0.0", + "draw-svg-path": "^1.0.0", + "is-svg-path": "^1.0.1", + "parse-svg-path": "^0.1.2", + "svg-path-bounds": "^1.0.1" + } + }, "node_modules/tailwind-merge": { "version": "3.3.1", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.3.1.tgz", - "integrity": "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==", "license": "MIT", "funding": { "type": "github", @@ -6188,14 +7374,10 @@ }, "node_modules/tailwindcss": { "version": "4.1.17", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.17.tgz", - "integrity": "sha512-j9Ee2YjuQqYT9bbRTfTZht9W/ytp5H+jJpZKiYdP/bpnXARAuELt9ofP0lPnmHjbga7SNQIxdTAXCmtKVYjN+Q==", "license": "MIT" }, "node_modules/tailwindcss-animate": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz", - "integrity": "sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==", "license": "MIT", "peerDependencies": { "tailwindcss": ">=3.0.0 || insiders" @@ -6203,8 +7385,6 @@ }, "node_modules/tapable": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", "dev": true, "license": "MIT", "engines": { @@ -6215,22 +7395,28 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/through2": { + "version": "2.0.5", + "license": "MIT", + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, "node_modules/tiny-invariant": { "version": "1.3.3", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", - "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, "node_modules/tiny-warning": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", + "license": "MIT" + }, + "node_modules/tinycolor2": { + "version": "1.6.0", "license": "MIT" }, "node_modules/tinyglobby": { "version": "0.2.15", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", - "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", "license": "MIT", "dependencies": { "fdir": "^6.5.0", @@ -6245,8 +7431,6 @@ }, "node_modules/tinyglobby/node_modules/fdir": { "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "license": "MIT", "engines": { "node": ">=12.0.0" @@ -6262,8 +7446,6 @@ }, "node_modules/tinyglobby/node_modules/picomatch": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", "engines": { "node": ">=12" @@ -6272,10 +7454,23 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/tinyqueue": { + "version": "2.0.3", + "license": "ISC" + }, + "node_modules/to-float32": { + "version": "1.1.0", + "license": "MIT" + }, + "node_modules/to-px": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "parse-unit": "^1.0.1" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "license": "MIT", "dependencies": { "is-number": "^7.0.0" @@ -6284,10 +7479,20 @@ "node": ">=8.0" } }, + "node_modules/topojson-client": { + "version": "3.1.0", + "license": "ISC", + "dependencies": { + "commander": "2" + }, + "bin": { + "topo2geo": "bin/topo2geo", + "topomerge": "bin/topomerge", + "topoquantize": "bin/topoquantize" + } + }, "node_modules/trim-lines": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", - "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", "dev": true, "license": "MIT", "funding": { @@ -6297,8 +7502,6 @@ }, "node_modules/ts-api-utils": { "version": "2.4.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz", - "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", "dev": true, "license": "MIT", "engines": { @@ -6310,14 +7513,10 @@ }, "node_modules/tslib": { "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, "node_modules/tsx": { "version": "4.20.6", - "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.20.6.tgz", - "integrity": "sha512-ytQKuwgmrrkDTFP4LjR0ToE2nqgy886GpvRSpU0JAnrdBYppuY5rLkRUYPU1yCryb24SsKBTL/hlDQAEFVwtZg==", "license": "MIT", "dependencies": { "esbuild": "~0.25.0", @@ -6335,17 +7534,17 @@ }, "node_modules/tw-animate-css": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/tw-animate-css/-/tw-animate-css-1.4.0.tgz", - "integrity": "sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/Wombosvideo" } }, + "node_modules/type": { + "version": "2.7.3", + "license": "ISC" + }, "node_modules/type-check": { "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, "license": "MIT", "dependencies": { @@ -6355,10 +7554,20 @@ "node": ">= 0.8.0" } }, + "node_modules/typedarray": { + "version": "0.0.6", + "license": "MIT" + }, + "node_modules/typedarray-pool": { + "version": "1.2.0", + "license": "MIT", + "dependencies": { + "bit-twiddle": "^1.0.0", + "dup": "^1.0.0" + } + }, "node_modules/typescript": { "version": "5.9.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", - "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", "bin": { @@ -6371,8 +7580,6 @@ }, "node_modules/typescript-eslint": { "version": "8.45.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.45.0.tgz", - "integrity": "sha512-qzDmZw/Z5beNLUrXfd0HIW6MzIaAV5WNDxmMs9/3ojGOpYavofgNAAD/nC6tGV2PczIi0iw8vot2eAe/sBn7zg==", "dev": true, "license": "MIT", "dependencies": { @@ -6395,15 +7602,11 @@ }, "node_modules/undici-types": { "version": "7.13.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.13.0.tgz", - "integrity": "sha512-Ov2Rr9Sx+fRgagJ5AX0qvItZG/JKKoBRAVITs1zk7IqZGTJUwgUr7qoYBpWwakpWilTZFM98rG/AFRocu10iIQ==", "devOptional": true, "license": "MIT" }, "node_modules/unist-util-is": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", - "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", "dev": true, "license": "MIT", "dependencies": { @@ -6416,8 +7619,6 @@ }, "node_modules/unist-util-position": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", - "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", "dev": true, "license": "MIT", "dependencies": { @@ -6430,8 +7631,6 @@ }, "node_modules/unist-util-stringify-position": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", - "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", "dev": true, "license": "MIT", "dependencies": { @@ -6444,8 +7643,6 @@ }, "node_modules/unist-util-visit": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", - "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", "dev": true, "license": "MIT", "dependencies": { @@ -6460,8 +7657,6 @@ }, "node_modules/unist-util-visit-parents": { "version": "6.0.2", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", - "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", "dev": true, "license": "MIT", "dependencies": { @@ -6475,8 +7670,6 @@ }, "node_modules/unplugin": { "version": "2.3.10", - "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-2.3.10.tgz", - "integrity": "sha512-6NCPkv1ClwH+/BGE9QeoTIl09nuiAt0gS28nn1PvYXsGKRwM2TCbFA2QiilmehPDTXIe684k4rZI1yl3A1PCUw==", "license": "MIT", "dependencies": { "@jridgewell/remapping": "^2.3.5", @@ -6490,8 +7683,6 @@ }, "node_modules/unplugin/node_modules/picomatch": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", "engines": { "node": ">=12" @@ -6500,10 +7691,12 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/unquote": { + "version": "1.1.1", + "license": "MIT" + }, "node_modules/update-browserslist-db": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", "funding": [ { "type": "opencollective", @@ -6530,10 +7723,12 @@ "browserslist": ">= 4.21.0" } }, + "node_modules/update-diff": { + "version": "1.1.0", + "license": "MIT" + }, "node_modules/uri-js": { "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -6542,8 +7737,6 @@ }, "node_modules/use-callback-ref": { "version": "1.3.3", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", - "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", "license": "MIT", "dependencies": { "tslib": "^2.0.0" @@ -6563,8 +7756,6 @@ }, "node_modules/use-sidecar": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", - "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", "license": "MIT", "dependencies": { "detect-node-es": "^1.1.0", @@ -6585,17 +7776,17 @@ }, "node_modules/use-sync-external-store": { "version": "1.6.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", - "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", "license": "MIT", "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "license": "MIT" + }, "node_modules/vfile": { "version": "6.0.3", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", - "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", "dev": true, "license": "MIT", "dependencies": { @@ -6609,8 +7800,6 @@ }, "node_modules/vfile-message": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", - "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", "dev": true, "license": "MIT", "dependencies": { @@ -6624,8 +7813,6 @@ }, "node_modules/victory-vendor": { "version": "37.3.6", - "resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-37.3.6.tgz", - "integrity": "sha512-SbPDPdDBYp+5MJHhBCAyI7wKM3d5ivekigc2Dk2s7pgbZ9wIgIBYGVw4zGHBml/qTFbexrofXW6Gu4noGxrOwQ==", "license": "MIT AND ISC", "dependencies": { "@types/d3-array": "^3.0.3", @@ -6647,8 +7834,6 @@ "node_modules/vite": { "name": "rolldown-vite", "version": "7.1.14", - "resolved": "https://registry.npmjs.org/rolldown-vite/-/rolldown-vite-7.1.14.tgz", - "integrity": "sha512-eSiiRJmovt8qDJkGyZuLnbxAOAdie6NCmmd0NkTC0RJI9duiSBTfr8X2mBYJOUFzxQa2USaHmL99J9uMxkjCyw==", "license": "MIT", "dependencies": { "@oxc-project/runtime": "0.92.0", @@ -6722,8 +7907,6 @@ }, "node_modules/vite/node_modules/fdir": { "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "license": "MIT", "engines": { "node": ">=12.0.0" @@ -6739,8 +7922,6 @@ }, "node_modules/vite/node_modules/picomatch": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", "engines": { "node": ">=12" @@ -6749,16 +7930,32 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/vt-pbf": { + "version": "3.1.3", + "license": "MIT", + "dependencies": { + "@mapbox/point-geometry": "0.1.0", + "@mapbox/vector-tile": "^1.3.1", + "pbf": "^3.2.1" + } + }, + "node_modules/weak-map": { + "version": "1.0.8", + "license": "Apache-2.0" + }, + "node_modules/webgl-context": { + "version": "2.2.0", + "license": "MIT", + "dependencies": { + "get-canvas-context": "^1.0.1" + } + }, "node_modules/webpack-virtual-modules": { "version": "0.6.2", - "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", - "integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==", "license": "MIT" }, "node_modules/which": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dev": true, "license": "ISC", "dependencies": { @@ -6773,18 +7970,32 @@ }, "node_modules/word-wrap": { "version": "1.2.5", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", - "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" } }, + "node_modules/world-calendars": { + "version": "1.0.4", + "license": "MIT", + "dependencies": { + "object-assign": "^4.1.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "license": "ISC" + }, + "node_modules/xtend": { + "version": "4.0.2", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, "node_modules/y18n": { "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true, "license": "ISC", "engines": { @@ -6793,14 +8004,10 @@ }, "node_modules/yallist": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", "license": "ISC" }, "node_modules/yargs": { "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dev": true, "license": "MIT", "dependencies": { @@ -6818,8 +8025,6 @@ }, "node_modules/yargs-parser": { "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "dev": true, "license": "ISC", "engines": { @@ -6828,8 +8033,6 @@ }, "node_modules/yargs/node_modules/ansi-regex": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, "license": "MIT", "engines": { @@ -6838,15 +8041,11 @@ }, "node_modules/yargs/node_modules/emoji-regex": { "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true, "license": "MIT" }, "node_modules/yargs/node_modules/string-width": { "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "license": "MIT", "dependencies": { @@ -6860,8 +8059,6 @@ }, "node_modules/yargs/node_modules/strip-ansi": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "license": "MIT", "dependencies": { @@ -6873,8 +8070,6 @@ }, "node_modules/yocto-queue": { "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, "license": "MIT", "engines": { @@ -6886,8 +8081,6 @@ }, "node_modules/zod": { "version": "3.25.76", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", - "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" @@ -6895,35 +8088,12 @@ }, "node_modules/zwitch": { "version": "2.0.4", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", - "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", "dev": true, "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } - }, - "package": { - "name": "@databricks/appkit-ui", - "version": "1.0.0", - "extraneous": true, - "dependencies": { - "clsx": "^2.1.1", - "tailwind-merge": "^3.4.0" - }, - "devDependencies": { - "@types/react": "^19.0.0", - "@types/react-dom": "^19.0.0", - "react": "^19.0.0", - "react-dom": "^19.0.0", - "recharts": "^3.4.1" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0", - "recharts": "^2.0.0 || ^3.0.0" - } } } } diff --git a/apps/dev-playground/client/package.json b/apps/dev-playground/client/package.json index 9bf90c3fd..bf0470a20 100644 --- a/apps/dev-playground/client/package.json +++ b/apps/dev-playground/client/package.json @@ -21,8 +21,10 @@ "class-variance-authority": "0.7.1", "clsx": "2.1.1", "lucide-react": "0.546.0", + "plotly.js": "^3.5.0", "react": "19.2.0", "react-dom": "19.2.0", + "react-plotly.js": "^2.6.0", "recharts": "3.4.1", "tailwind-merge": "3.3.1", "tailwindcss-animate": "1.0.7", @@ -30,10 +32,12 @@ }, "devDependencies": { "@eslint/js": "9.36.0", + "@tailwindcss/postcss": "4.1.17", "@tanstack/router-cli": "1.133.20", "@types/node": "24.6.0", "@types/react": "19.2.2", "@types/react-dom": "19.2.2", + "@types/react-plotly.js": "^2.6.4", "@vitejs/plugin-react": "5.0.4", "autoprefixer": "10.4.21", "eslint": "9.36.0", @@ -43,7 +47,6 @@ "postcss": "8.5.6", "shiki": "3.15.0", "tailwindcss": "4.1.17", - "@tailwindcss/postcss": "4.1.17", "typescript": "5.9.3", "typescript-eslint": "8.45.0", "vite": "npm:rolldown-vite@7.1.14" diff --git a/apps/dev-playground/client/src/main.tsx b/apps/dev-playground/client/src/main.tsx index 5297b637a..30d056abc 100644 --- a/apps/dev-playground/client/src/main.tsx +++ b/apps/dev-playground/client/src/main.tsx @@ -1,9 +1,26 @@ +import { + type MetricsMetadataBundle, + registerMetricsMetadata, +} from "@databricks/appkit-ui/format"; import { createRouter, RouterProvider } from "@tanstack/react-router"; import React from "react"; import ReactDOM from "react-dom/client"; +// Build-time-emitted metadata bundle. In a production app this file is +// regenerated by `npx @databricks/appkit metric sync` (or the Vite plugin) +// from `config/queries/metric.json`. Phase 7 ships a hand-authored copy so +// the demo route at `/metrics` can wire format specs into Plotly even when +// the dev workspace does not host the underlying UC metric views. +import metricsMetadata from "../../shared/appkit-types/metrics.metadata.json"; import { routeTree } from "./routeTree.gen"; import "./index.css"; +// Register the metric metadata bundle once at startup so `useMetricView()`'s +// `metadata` field is populated for every consumer in the app. The cast is +// load-bearing: TypeScript widens JSON-imported string literals (`"sp"`) to +// `string`, which the bundle's `lane: "sp" | "obo"` discriminant rejects. +// The actual JSON values are hand-checked against the schema. +registerMetricsMetadata(metricsMetadata as unknown as MetricsMetadataBundle); + const router = createRouter({ routeTree, defaultPreload: "intent", diff --git a/apps/dev-playground/client/src/routeTree.gen.ts b/apps/dev-playground/client/src/routeTree.gen.ts index 45e280700..0ccf094cc 100644 --- a/apps/dev-playground/client/src/routeTree.gen.ts +++ b/apps/dev-playground/client/src/routeTree.gen.ts @@ -16,6 +16,7 @@ import { Route as SqlHelpersRouteRouteImport } from './routes/sql-helpers.route' import { Route as ServingRouteRouteImport } from './routes/serving.route' import { Route as ReconnectRouteRouteImport } from './routes/reconnect.route' import { Route as PolicyMatrixRouteRouteImport } from './routes/policy-matrix.route' +import { Route as MetricsRouteRouteImport } from './routes/metrics.route' import { Route as LakebaseRouteRouteImport } from './routes/lakebase.route' import { Route as JobsRouteRouteImport } from './routes/jobs.route' import { Route as GenieRouteRouteImport } from './routes/genie.route' @@ -61,6 +62,11 @@ const PolicyMatrixRouteRoute = PolicyMatrixRouteRouteImport.update({ path: '/policy-matrix', getParentRoute: () => rootRouteImport, } as any) +const MetricsRouteRoute = MetricsRouteRouteImport.update({ + id: '/metrics', + path: '/metrics', + getParentRoute: () => rootRouteImport, +} as any) const LakebaseRouteRoute = LakebaseRouteRouteImport.update({ id: '/lakebase', path: '/lakebase', @@ -117,6 +123,7 @@ export interface FileRoutesByFullPath { '/genie': typeof GenieRouteRoute '/jobs': typeof JobsRouteRoute '/lakebase': typeof LakebaseRouteRoute + '/metrics': typeof MetricsRouteRoute '/policy-matrix': typeof PolicyMatrixRouteRoute '/reconnect': typeof ReconnectRouteRoute '/serving': typeof ServingRouteRoute @@ -135,6 +142,7 @@ export interface FileRoutesByTo { '/genie': typeof GenieRouteRoute '/jobs': typeof JobsRouteRoute '/lakebase': typeof LakebaseRouteRoute + '/metrics': typeof MetricsRouteRoute '/policy-matrix': typeof PolicyMatrixRouteRoute '/reconnect': typeof ReconnectRouteRoute '/serving': typeof ServingRouteRoute @@ -154,6 +162,7 @@ export interface FileRoutesById { '/genie': typeof GenieRouteRoute '/jobs': typeof JobsRouteRoute '/lakebase': typeof LakebaseRouteRoute + '/metrics': typeof MetricsRouteRoute '/policy-matrix': typeof PolicyMatrixRouteRoute '/reconnect': typeof ReconnectRouteRoute '/serving': typeof ServingRouteRoute @@ -174,6 +183,7 @@ export interface FileRouteTypes { | '/genie' | '/jobs' | '/lakebase' + | '/metrics' | '/policy-matrix' | '/reconnect' | '/serving' @@ -192,6 +202,7 @@ export interface FileRouteTypes { | '/genie' | '/jobs' | '/lakebase' + | '/metrics' | '/policy-matrix' | '/reconnect' | '/serving' @@ -210,6 +221,7 @@ export interface FileRouteTypes { | '/genie' | '/jobs' | '/lakebase' + | '/metrics' | '/policy-matrix' | '/reconnect' | '/serving' @@ -229,6 +241,7 @@ export interface RootRouteChildren { GenieRouteRoute: typeof GenieRouteRoute JobsRouteRoute: typeof JobsRouteRoute LakebaseRouteRoute: typeof LakebaseRouteRoute + MetricsRouteRoute: typeof MetricsRouteRoute PolicyMatrixRouteRoute: typeof PolicyMatrixRouteRoute ReconnectRouteRoute: typeof ReconnectRouteRoute ServingRouteRoute: typeof ServingRouteRoute @@ -289,6 +302,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof PolicyMatrixRouteRouteImport parentRoute: typeof rootRouteImport } + '/metrics': { + id: '/metrics' + path: '/metrics' + fullPath: '/metrics' + preLoaderRoute: typeof MetricsRouteRouteImport + parentRoute: typeof rootRouteImport + } '/lakebase': { id: '/lakebase' path: '/lakebase' @@ -365,6 +385,7 @@ const rootRouteChildren: RootRouteChildren = { GenieRouteRoute: GenieRouteRoute, JobsRouteRoute: JobsRouteRoute, LakebaseRouteRoute: LakebaseRouteRoute, + MetricsRouteRoute: MetricsRouteRoute, PolicyMatrixRouteRoute: PolicyMatrixRouteRoute, ReconnectRouteRoute: ReconnectRouteRoute, ServingRouteRoute: ServingRouteRoute, diff --git a/apps/dev-playground/client/src/routes/__root.tsx b/apps/dev-playground/client/src/routes/__root.tsx index db42fdafb..ea7659ef2 100644 --- a/apps/dev-playground/client/src/routes/__root.tsx +++ b/apps/dev-playground/client/src/routes/__root.tsx @@ -40,6 +40,14 @@ function RootComponent() { Analytics + + + + + +

diff --git a/apps/dev-playground/client/src/routes/metrics.route.tsx b/apps/dev-playground/client/src/routes/metrics.route.tsx new file mode 100644 index 000000000..32d66aa4f --- /dev/null +++ b/apps/dev-playground/client/src/routes/metrics.route.tsx @@ -0,0 +1,450 @@ +import { + formatLabel, + formatValue, + toD3Format, +} from "@databricks/appkit-ui/format"; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, + useMetricView, +} from "@databricks/appkit-ui/react"; +import { createFileRoute } from "@tanstack/react-router"; +import { useEffect, useMemo, useState } from "react"; +import Plot from "react-plotly.js"; +import { Header } from "@/components/layout/header"; + +export const Route = createFileRoute("/metrics")({ + component: MetricsRoute, +}); + +/** + * Phase 7 demo route — exercises the full UC Metric View stack: + * + * 1. `useMetricView("revenue", { measures, dimensions, timeGrain, filter })` + * against the SP-lane revenue metric. Plotly chart wires `metadata.measures.arr.format` + * into `layout.yaxis.tickformat` via `toD3Format()` and `metadata.measures.arr.display_name` + * into the trace name + axis title via `formatLabel()`. + * + * 2. `useMetricView("customer_metrics", ...)` against the OBO-lane customer metric. + * The dev-playground exposes `/whoami` so the route can show "executing as ". + * Cache keys for OBO entries incorporate the hashed user identity (Phase 4), + * so the SP and OBO panels live independently. + * + * 3. A hardcoded structured filter (`region in [EMEA, APAC]`) demonstrates the + * 12-op filter spec — the server validates the predicate and parameterizes + * the values before they reach the SQL Warehouse. + * + * Graceful degradation: the demo workspace does not host the underlying UC + * metric views, so both queries surface a server error in real dev sessions. + * The route renders the metadata flow + the typed surface either way; the + * "Could not load metric" panel is the v1 demo's expected behavior. + */ + +interface WhoamiResponse { + xForwardedUser: string | null; + adminUserId: string | null; + isAdmin: boolean; +} + +function useWhoami() { + const [user, setUser] = useState(null); + const [loading, setLoading] = useState(true); + + useEffect(() => { + let cancelled = false; + fetch("/whoami") + .then((res) => res.json() as Promise) + .then((data) => { + if (cancelled) return; + setUser(data.xForwardedUser); + setLoading(false); + }) + .catch(() => { + if (!cancelled) setLoading(false); + }); + return () => { + cancelled = true; + }; + }, []); + + return { user, loading }; +} + +/** + * Plotly trace data shape for the revenue line chart. Built from the result of + * `useMetricView("revenue", ...)`. Each row carries the chosen measure (`arr`) + * and the chosen dimensions (`region`, `created_at`). + */ +type RevenueRow = { + arr: number; + region: string; + created_at: string; +}; + +/** + * Watch the dark-mode class on `` so Plotly's layout can swap palettes + * when the ThemeSelector flips. Plotly does not auto-detect themes — the + * defaults assume a light background, which clash with the playground's dark + * mode (white text-on-card becomes invisible on a near-black plot bg). + */ +function useIsDarkMode(): boolean { + const [isDark, setIsDark] = useState(() => + typeof document !== "undefined" + ? document.documentElement.classList.contains("dark") + : false, + ); + + useEffect(() => { + if (typeof document === "undefined") return; + const root = document.documentElement; + const update = () => setIsDark(root.classList.contains("dark")); + const observer = new MutationObserver(update); + observer.observe(root, { attributes: true, attributeFilter: ["class"] }); + update(); + return () => observer.disconnect(); + }, []); + + return isDark; +} + +/** + * Theme-aware color palette for Plotly. Backgrounds are transparent so the + * Card behind the plot shows through (the card's bg is already theme-aware). + * Foreground / grid / axis colors are hardcoded to neutral hex matching the + * AppKit tailwind palette — close enough to the design tokens to feel native + * without plumbing CSS-var reads at render time. + */ +function plotlyThemeColors(isDark: boolean) { + return isDark + ? { + paper: "transparent", + plot: "transparent", + font: "#e5e7eb", // zinc-200 + grid: "#27272a", // zinc-800 + axis: "#71717a", // zinc-500 + } + : { + paper: "transparent", + plot: "transparent", + font: "#18181b", // zinc-900 + grid: "#e4e4e7", // zinc-200 + axis: "#71717a", // zinc-500 + }; +} + +function RevenueChart() { + // Wrap args in `useMemo` so reference stability prevents infinite refetches. + const args = useMemo( + () => + ({ + measures: ["arr"] as const, + dimensions: ["region", "created_at"] as const, + timeGrain: "month" as const, + filter: { + member: "region", + operator: "in", + values: ["EMEA", "APAC", "AMER"], + }, + }) as const, + [], + ); + + const { data, metadata, loading, error } = useMetricView("revenue", args); + const isDark = useIsDarkMode(); + const colors = plotlyThemeColors(isDark); + + if (loading) { + return ( +
+ Loading revenue metric… +
+ ); + } + + if (error) { + return ( +
+

+ Could not load the revenue metric. +

+

+ The dev workspace does not host the demo metric view at{" "} + + appkit_demo.public.revenue_metrics + + . The typed surface and metadata flow still compile — this panel would + render a Plotly line chart with{" "} + + $#,##0.00 + {" "} + tick formatting once the metric view exists in your warehouse. +

+

{error}

+
+ ); + } + + if (!data || data.length === 0) { + return ( +
+ No rows returned. +
+ ); + } + + // Group by region — one Plotly trace per series. + const rows = data as RevenueRow[]; + const byRegion = new Map(); + for (const row of rows) { + if (!byRegion.has(row.region)) { + byRegion.set(row.region, { x: [], y: [] }); + } + const entry = byRegion.get(row.region); + if (!entry) continue; + entry.x.push(row.created_at); + entry.y.push(row.arr); + } + + const traces = Array.from(byRegion.entries()).map(([region, series]) => ({ + type: "scatter" as const, + mode: "lines+markers" as const, + name: region, + x: series.x, + y: series.y, + hovertemplate: `${region}
%{x|%b %Y}
%{y}`, + })); + + // Wire metadata into Plotly layout. `formatLabel` returns the YAML-defined + // display name; `toD3Format` converts the YAML's printf-style format spec + // into the d3-format syntax that Plotly's `tickformat` understands. + // Defensive: `metric sync` may have emitted an empty measures/dimensions + // record (e.g., DESCRIBE response shape mismatch for joined metric views). + // formatLabel + toD3Format both fall back gracefully when given undefined, + // so this stays safe even when the metadata bundle is incomplete. + const arrLabel = formatLabel("arr", metadata?.measures.arr); + const arrTickFormat = toD3Format(metadata?.measures.arr?.format); + + return ( + + ); +} + +function CustomerMetricsPanel({ user }: { user: string | null }) { + const args = useMemo( + () => + ({ + measures: ["active_accounts", "churn_rate"] as const, + dimensions: ["segment"] as const, + }) as const, + [], + ); + + const { data, metadata, loading, error } = useMetricView( + "customer_metrics", + args, + ); + + return ( +
+

+ Executing as{" "} + + {user ?? ""} + + . OBO entries scope cache keys per user — different users see different + rows, even with identical args. +

+ {loading && ( +
+ Loading customer metrics… +
+ )} + {error && ( +
+

+ Could not load customer metrics. +

+

+ The dev workspace does not host the demo metric view at{" "} + + appkit_demo.public.customer_metrics + + . When wired to a real OBO-lane metric view, this panel would show + row-level scoping driven by{" "} + + x-forwarded-access-token + + . +

+

{error}

+
+ )} + {data && data.length > 0 && ( + + + + + + + + + + {( + data as Array<{ + segment: string; + active_accounts: number; + churn_rate: number; + }> + ).map((row) => ( + + + + + + ))} + +
+ {formatLabel("segment", metadata?.dimensions.segment)} + + {formatLabel( + "active_accounts", + metadata?.measures.active_accounts, + )} + + {formatLabel("churn_rate", metadata?.measures.churn_rate)} +
{row.segment} + {formatValue( + row.active_accounts, + metadata?.measures.active_accounts?.format, + )} + + {formatValue( + row.churn_rate, + metadata?.measures.churn_rate?.format, + )} +
+ )} +
+ ); +} + +function MetricsRoute() { + const { user } = useWhoami(); + + return ( +
+
+
+ +
+ + + Revenue (SP lane) + + Annual Recurring Revenue by region, monthly grain. Filter: + region in {`{EMEA, APAC, AMER}`}. The Y-axis tick format and + trace name are sourced from the metric view's YAML metadata via{" "} + toD3Format() and{" "} + formatLabel(). + + + + + + + + + + Customer Metrics (OBO lane) + + Active accounts and churn rate, grouped by segment. OBO entries + scope cache keys per requesting user. + + + + + + + + + + How this demo wires together + + +
    +
  1. + config/queries/metric.json declares two metric + sources — revenue (SP lane) and{" "} + customer_metrics (OBO lane). +
  2. +
  3. + npx appkit metric sync regenerates a typed{" "} + metric.d.ts (augmenting{" "} + MetricRegistry) and a{" "} + metrics.metadata.json bundle. +
  4. +
  5. + main.tsx imports the metadata bundle once at + startup and calls registerMetricsMetadata(). +
  6. +
  7. + useMetricView("revenue", ...) narrows + measures, dimensions, and time grains to the registry-known + literals — typos fail at compile time. +
  8. +
  9. + formatLabel, formatValue, and{" "} + toD3Format turn the YAML metadata into Plotly / + table-cell strings — no chart-library lock-in. +
  10. +
+
+
+
+
+
+ ); +} diff --git a/apps/dev-playground/client/tsconfig.app.json b/apps/dev-playground/client/tsconfig.app.json index 6e2a3b464..ddf430cdc 100644 --- a/apps/dev-playground/client/tsconfig.app.json +++ b/apps/dev-playground/client/tsconfig.app.json @@ -15,6 +15,7 @@ "moduleDetection": "force", "noEmit": true, "jsx": "react-jsx", + "resolveJsonModule": true, /* Linting */ "strict": true, diff --git a/apps/dev-playground/config/queries/metric.json b/apps/dev-playground/config/queries/metric.json new file mode 100644 index 000000000..05d1218c0 --- /dev/null +++ b/apps/dev-playground/config/queries/metric.json @@ -0,0 +1,13 @@ +{ + "$schema": "https://databricks.github.io/appkit/schemas/metric-source.schema.json", + "sp": { + "revenue": { + "source": "appkit_demo.public.revenue_metrics" + } + }, + "obo": { + "customer_metrics": { + "source": "appkit_demo.public.customer_metrics" + } + } +} diff --git a/apps/dev-playground/shared/appkit-types/metric.d.ts b/apps/dev-playground/shared/appkit-types/metric.d.ts new file mode 100644 index 000000000..31e893cec --- /dev/null +++ b/apps/dev-playground/shared/appkit-types/metric.d.ts @@ -0,0 +1,129 @@ +// Auto-generated by AppKit - DO NOT EDIT +// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build +import "@databricks/appkit-ui/react"; +declare module "@databricks/appkit-ui/react" { + interface MetricRegistry { + "revenue": { + key: "revenue"; + source: "appkit_demo.public.revenue_metrics"; + lane: "sp"; + measures: { + /** @sqlType double */ + "mrr": number; + /** @sqlType double */ + "arr": number; + /** @sqlType double */ + "new_arr": number; + /** @sqlType double */ + "churned_arr": number; + }; + dimensions: { + /** @sqlType string */ + "region": string; + /** @sqlType string */ + "segment": string; + /** @sqlType timestamp_ltz @timeGrain day|hour|minute|month|quarter|week|year */ + "created_at": string; + }; + measureKeys: "mrr" | "arr" | "new_arr" | "churned_arr"; + dimensionKeys: "region" | "segment" | "created_at"; + timeGrains: "day" | "hour" | "minute" | "month" | "quarter" | "week" | "year"; + metadata: { + measures: { + "mrr": { + type: "double"; + display_name: "Monthly Recurring Revenue"; + format: "$#,##0.00"; + }; + "arr": { + type: "double"; + display_name: "Annual Recurring Revenue"; + format: "$#,##0.00"; + description: "Annualized contract value across all active subscriptions"; + }; + "new_arr": { + type: "double"; + display_name: "New ARR"; + format: "$#,##0.00"; + }; + "churned_arr": { + type: "double"; + display_name: "Churned ARR"; + format: "$#,##0.00"; + }; + }; + dimensions: { + "region": { + type: "string"; + display_name: "Region"; + }; + "segment": { + type: "string"; + display_name: "Customer Segment"; + }; + "created_at": { + type: "timestamp_ltz"; + display_name: "Subscription Start"; + time_grain: readonly ["day", "hour", "minute", "month", "quarter", "week", "year"]; + }; + }; + }; + }; + "customer_metrics": { + key: "customer_metrics"; + source: "appkit_demo.public.customer_metrics"; + lane: "obo"; + measures: { + /** @sqlType bigint */ + "active_accounts": number; + /** @sqlType decimal */ + "churn_rate": number; + /** @sqlType double */ + "avg_ltv": number; + }; + dimensions: { + /** @sqlType string */ + "segment": string; + /** @sqlType string */ + "region": string; + /** @sqlType string */ + "csm_email": string; + }; + measureKeys: "active_accounts" | "churn_rate" | "avg_ltv"; + dimensionKeys: "segment" | "region" | "csm_email"; + timeGrains: never; + metadata: { + measures: { + "active_accounts": { + type: "bigint"; + display_name: "Active Accounts"; + format: "#,##0"; + }; + "churn_rate": { + type: "decimal"; + display_name: "Churn Rate"; + }; + "avg_ltv": { + type: "double"; + display_name: "Average LTV"; + format: "$#,##0.00"; + }; + }; + dimensions: { + "segment": { + type: "string"; + display_name: "Customer Segment"; + }; + "region": { + type: "string"; + display_name: "Region"; + }; + "csm_email": { + type: "string"; + display_name: "CSM Email"; + }; + }; + }; + }; + } +} diff --git a/apps/dev-playground/shared/appkit-types/metrics.metadata.json b/apps/dev-playground/shared/appkit-types/metrics.metadata.json new file mode 100644 index 000000000..457ed04a5 --- /dev/null +++ b/apps/dev-playground/shared/appkit-types/metrics.metadata.json @@ -0,0 +1,82 @@ +{ + "customer_metrics": { + "measures": { + "active_accounts": { + "type": "bigint", + "display_name": "Active Accounts", + "format": "#,##0" + }, + "churn_rate": { + "type": "decimal", + "display_name": "Churn Rate" + }, + "avg_ltv": { + "type": "double", + "display_name": "Average LTV", + "format": "$#,##0.00" + } + }, + "dimensions": { + "segment": { + "type": "string", + "display_name": "Customer Segment" + }, + "region": { + "type": "string", + "display_name": "Region" + }, + "csm_email": { + "type": "string", + "display_name": "CSM Email" + } + } + }, + "revenue": { + "measures": { + "mrr": { + "type": "double", + "display_name": "Monthly Recurring Revenue", + "format": "$#,##0.00" + }, + "arr": { + "type": "double", + "display_name": "Annual Recurring Revenue", + "format": "$#,##0.00", + "description": "Annualized contract value across all active subscriptions" + }, + "new_arr": { + "type": "double", + "display_name": "New ARR", + "format": "$#,##0.00" + }, + "churned_arr": { + "type": "double", + "display_name": "Churned ARR", + "format": "$#,##0.00" + } + }, + "dimensions": { + "region": { + "type": "string", + "display_name": "Region" + }, + "segment": { + "type": "string", + "display_name": "Customer Segment" + }, + "created_at": { + "type": "timestamp_ltz", + "display_name": "Subscription Start", + "time_grain": [ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year" + ] + } + } + } +} diff --git a/apps/dev-playground/tests/metrics.spec.ts b/apps/dev-playground/tests/metrics.spec.ts new file mode 100644 index 000000000..2092a7afb --- /dev/null +++ b/apps/dev-playground/tests/metrics.spec.ts @@ -0,0 +1,156 @@ +import { expect, test } from "@playwright/test"; + +/** + * Phase 7 acceptance test for the `/metrics` demo route. Exercises the full + * metric-view path through dev mode for one happy-path case (revenue, SP lane + * with metadata flow) and one error case (customer_metrics, OBO lane returns + * 404 because the demo workspace does not host the metric view). + * + * The mocks bypass the real SQL Warehouse so the test does not require live + * Databricks credentials. SSE response envelopes match the existing analytics + * route's shape (`{ type: "result", data }`). + */ + +const SSE_HEADERS = { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", +}; + +function sseEvent(payload: unknown): string { + return `data: ${JSON.stringify(payload)}\n\n`; +} + +const REVENUE_ROWS = [ + { region: "EMEA", created_at: "2026-01-01T00:00:00Z", arr: 1_250_000 }, + { region: "EMEA", created_at: "2026-02-01T00:00:00Z", arr: 1_310_000 }, + { region: "APAC", created_at: "2026-01-01T00:00:00Z", arr: 720_000 }, + { region: "APAC", created_at: "2026-02-01T00:00:00Z", arr: 760_000 }, + { region: "AMER", created_at: "2026-01-01T00:00:00Z", arr: 2_400_000 }, + { region: "AMER", created_at: "2026-02-01T00:00:00Z", arr: 2_470_000 }, +]; + +test.describe("Metric Views Route Tests", () => { + test.beforeEach(async ({ page }) => { + // Happy-path mock: revenue (SP lane) returns six rows. + await page.route("**/api/analytics/metric/revenue", async (route) => { + // Best-effort: confirm the request body carries the expected + // measures/dimensions/timeGrain/filter shape — the demo's call site is + // contractual. + const body = route.request().postDataJSON(); + expect(body).toMatchObject({ + measures: ["arr"], + dimensions: ["region", "created_at"], + timeGrain: "month", + filter: { + member: "region", + operator: "in", + values: ["EMEA", "APAC", "AMER"], + }, + }); + + return route.fulfill({ + status: 200, + headers: SSE_HEADERS, + body: sseEvent({ type: "result", data: REVENUE_ROWS }), + }); + }); + + // Error-path mock: customer_metrics (OBO lane) returns a 404-shaped error + // event. Mirrors the experience when the dev workspace does not host the + // OBO metric view. + await page.route( + "**/api/analytics/metric/customer_metrics", + async (route) => { + return route.fulfill({ + status: 200, + headers: SSE_HEADERS, + body: sseEvent({ + type: "error", + error: "Metric view not found", + code: "METRIC_NOT_FOUND", + }), + }); + }, + ); + + // /whoami stub — the OBO panel surfaces the user identity. + await page.route("**/whoami", async (route) => { + return route.fulfill({ + json: { + xForwardedUser: "demo-user@databricks.com", + adminUserId: null, + isAdmin: false, + }, + }); + }); + }); + + test("metrics page loads and renders the route header", async ({ page }) => { + await page.goto("/metrics", { waitUntil: "networkidle" }); + + await expect(page).toHaveURL("/metrics"); + await expect( + page.getByRole("heading", { name: "Metric Views" }), + ).toBeVisible(); + }); + + test("revenue chart renders with metadata-formatted axis", async ({ + page, + }) => { + await page.goto("/metrics", { waitUntil: "networkidle" }); + + // Plotly renders an SVG inside `.js-plotly-plot`; its presence is the + // load-bearing assertion that the metric query resolved + the metadata + // flowed into the chart layout. + const plotContainer = page.locator(".js-plotly-plot").first(); + await expect(plotContainer).toBeVisible({ timeout: 10000 }); + + // The Y-axis title comes from `formatLabel("arr", metadata.measures.arr)` + // — the metadata's `display_name` field. Two instances appear (chart title + // + Y-axis title), so we assert at least one. + await expect( + page.getByText("Annual Recurring Revenue").first(), + ).toBeVisible(); + }); + + test("OBO panel shows the requesting user identity", async ({ page }) => { + await page.goto("/metrics", { waitUntil: "networkidle" }); + + // The /whoami response surfaces the mock user; the OBO panel exposes it. + await expect(page.getByText("demo-user@databricks.com")).toBeVisible(); + }); + + test("OBO error path renders the graceful fallback banner", async ({ + page, + }) => { + await page.goto("/metrics", { waitUntil: "networkidle" }); + + // The error mock returns `code: "METRIC_NOT_FOUND"`. The route renders + // a banner with the literal "Could not load customer metrics." message + // when the OBO query fails — the v1 demo's expected fallback. + await expect( + page.getByText("Could not load customer metrics."), + ).toBeVisible({ timeout: 10000 }); + }); + + test("calls expected metric endpoints on page load", async ({ page }) => { + // Match the codebase idiom for "this navigation should fire request X" — + // see reconnect.spec.ts. `waitForRequest` registers a one-shot matcher + // before the navigation triggers it, so listener-attachment ordering + // (a known sharp edge with `page.on("request")` plus `page.route()` mocks + // attached in beforeEach) is no longer in play. Each await doubles as the + // "received >= 1" assertion — it throws on timeout otherwise. + const revenuePromise = page.waitForRequest((req) => + req.url().endsWith("/api/analytics/metric/revenue"), + ); + const customerPromise = page.waitForRequest((req) => + req.url().endsWith("/api/analytics/metric/customer_metrics"), + ); + + await page.goto("/metrics", { waitUntil: "networkidle" }); + + await revenuePromise; + await customerPromise; + }); +}); diff --git a/docs/docs/plugins/analytics-metric-views.md b/docs/docs/plugins/analytics-metric-views.md new file mode 100644 index 000000000..4d7d2cd21 --- /dev/null +++ b/docs/docs/plugins/analytics-metric-views.md @@ -0,0 +1,585 @@ +--- +sidebar_position: 4 +--- + +# Analytics — Metric Views + +UC Metric View consumption built on top of the analytics plugin: declarative metric registration, a typed React hook, structured filters, time-grain truncation, and library-agnostic format utilities. + +**Key features:** +- Declarative `metric.json` config with `sp` and `obo` execution lanes +- `useMetricView` React hook with measure/dimension narrowing at the call site +- Structured filter spec — 12 operators, AND/OR composition, schema-validated members +- Time-grain truncation on time-typed dimensions +- Build-time semantic metadata bundle + library-agnostic format utilities +- `npx appkit metric sync` CLI for non-Vite builds, CI checks, pre-commit hooks +- OBO row scoping with cross-user cache isolation + +The metric-view path lives inside the existing analytics plugin — apps without `metric.json` pay no bundle or runtime cost. See the [Analytics plugin](./analytics.md) for the underlying SQL execution machinery. + +## Configuration: `metric.json` + +Place a `metric.json` file alongside your `.sql` query files: + +```json title="config/queries/metric.json" +{ + "$schema": "https://databricks.github.io/appkit/schemas/metric-source.schema.json", + "sp": { + "revenue": { + "source": "appkit_demo.public.revenue_metrics" + } + }, + "obo": { + "customer_metrics": { + "source": "appkit_demo.public.customer_metrics" + } + } +} +``` + +| Field | Type | Description | +|-------|------|-------------| +| `sp` | `Record` | Metrics executed as the service principal — shared cache. | +| `obo` | `Record` | Metrics executed on-behalf-of the requesting user — per-user cache. | +| `.source` | `string` | Three-part Unity Catalog FQN of the metric view (`..`). | + +The map key (`revenue`, `customer_metrics`) is the **single identity** that flows through every other surface: the route key in `POST /api/analytics/metric/:key`, the hook argument in `useMetricView("", ...)`, the `MetricRegistry` augmentation key, and the cache-key segment. + +The entry-object shape (`{ source }` at v1) is the forward-compat seam — future per-entry options (`cacheTtl`, `defaultFilter`, allowlists) grow non-breakingly. v1 deliberately rejects unknown fields. + +The [JSON Schema](https://databricks.github.io/appkit/schemas/metric-source.schema.json) ships with AppKit; configure your IDE to validate `metric.json` against it. + +## HTTP endpoint + +The analytics plugin exposes one new endpoint (mounted under `/api/analytics`): + +- `POST /api/analytics/metric/:key` + +The Arrow secondary path (`GET /api/analytics/arrow-result/:jobId`) is reused unchanged. + +### Request body + +```ts +{ + measures: string[]; // Required. Subset of declared measures. + dimensions?: string[]; // Optional. Subset of declared dimensions. + filter?: Filter; // Optional. Recursive AND/OR/Predicate tree. + timeGrain?: string; // Optional. Applies to time-typed dimensions. + limit?: number; // Optional. Row cap. + format?: "JSON"; // Optional. ARROW deferred to a future release. +} +``` + +### Response envelope + +The route emits the same SSE event shape as `/api/analytics/query/:query_key`: + +| Event | Description | +|-------|-------------| +| `result` | Final result payload (JSON rows). | +| `arrow` | Reserved — ARROW format is out of scope at v1. | +| `error` | Error event with `code` + `message`. | +| `warning` | Non-fatal advisory (e.g., row cap applied). | + +## Frontend usage + +### `useMetricView` + +```ts +import { useMetricView } from "@databricks/appkit-ui/react"; + +const { data, metadata, loading, error } = useMetricView(metricKey, args, options); +``` + +Signature: + +```ts +function useMetricView< + K extends MetricKey, + M extends ReadonlyArray>, + D extends ReadonlyArray>, + F extends AnalyticsFormat = "JSON", +>( + metricKey: K, + args: { + measures: M; + dimensions?: D; + filter?: Filter; + timeGrain?: TimeGrain; + limit?: number; + }, + options?: { + format?: F; + autoStart?: boolean; + maxParametersSize?: number; + }, +): { + data: Pick, M[number] | D[number]>[] | null; + metadata: MetricMetadata | null; + loading: boolean; + error: string | null; +}; +``` + +**Generic narrowing:** +- `K` narrows to a registered metric key when `MetricRegistry` is augmented. +- `M` and `D` carry `const` modifiers — pass `as const` on the arrays to preserve literal types. +- The result row type is `Pick, M[number] | D[number]>` — the IDE shows exactly the columns you projected. + +**Return shape:** + +| Field | Type | Description | +|-------|------|-------------| +| `data` | Row array \| `null` | Picked-down rows once the query completes. | +| `metadata` | `MetricMetadata` \| `null` | Build-time metadata for the queried metric (measures + dimensions). Available **before** `data` loads; stable across re-renders. | +| `loading` | `boolean` | `true` while the request is in flight. | +| `error` | `string \| null` | Error message; `null` on success. | + +**Options:** + +| Option | Type | Default | Description | +|--------|------|---------|-------------| +| `format` | `"JSON"` | `"JSON"` | Response format. ARROW deferred. | +| `autoStart` | `boolean` | `true` | Fire the request on mount. | +| `maxParametersSize` | `number` | `102400` | Max serialized request body size in bytes. | + +**Example:** + +```tsx +import { useMetricView } from "@databricks/appkit-ui/react"; +import { useMemo } from "react"; + +function RevenueChart() { + const args = useMemo( + () => + ({ + measures: ["arr"] as const, + dimensions: ["region", "created_at"] as const, + timeGrain: "month" as const, + }) as const, + [], + ); + + const { data, metadata, loading, error } = useMetricView("revenue", args); + + if (loading) return ; + if (error) return ; + if (!data?.length) return ; + + // data: Array<{ arr: number; region: string; created_at: string }> + // metadata.measures.arr.format → "$#,##0.00" + // metadata.measures.arr.display_name → "Annual Recurring Revenue" + return ; +} +``` + +:::tip Memoize args (optional) + +The hook deduplicates `args` by content (JSON-serialized), so re-rendering with a fresh-but-equivalent `args` object does not refetch. Wrapping `args` in `useMemo` is still recommended for very hot render paths to skip the per-render serialization, but it is no longer required for correctness. +::: + +### Type-safe registration + +The build-time pipeline augments the `MetricRegistry` interface declared in `@databricks/appkit-ui/react`. The generated `metrics.d.ts` looks like: + +```ts +declare module "@databricks/appkit-ui/react" { + interface MetricRegistry { + revenue: { + key: "revenue"; + source: "appkit_demo.public.revenue_metrics"; + lane: "sp"; + measures: { arr: number; mrr: number }; + dimensions: { region: string; created_at: string }; + measureKeys: "arr" | "mrr"; + dimensionKeys: "region" | "created_at"; + timeGrains: "day" | "week" | "month"; + metadata: { + measures: { + arr: { + type: "DECIMAL(38,2)"; + display_name: "Annual Recurring Revenue"; + format: "$#,##0.00"; + }; + }; + dimensions: { + region: { type: "STRING" }; + created_at: { + type: "TIMESTAMP"; + time_grain: readonly ["day", "week", "month"]; + }; + }; + }; + }; + } +} +``` + +Once augmented, `useMetricView("revenue", { measures: ["arr"] })` autocompletes measure names, rejects typos at compile time, and narrows the result row type at the call site. + +## Filter spec + +The structured filter is a recursive type: + +```ts +type Filter = + | Predicate + | { and: ReadonlyArray> } + | { or: ReadonlyArray> }; + +interface Predicate { + member: DimensionKey; + operator: MetricFilterOperator; + values?: ReadonlyArray; +} +``` + +The 12 v1 operators: + +| Category | Operators | Cardinality | Notes | +|----------|-----------|-------------|-------| +| Equality | `equals`, `notEquals` | exactly one value | Any dimension type. | +| Set membership | `in`, `notIn` | one or more values | Any dimension type. | +| Range | `gt`, `gte`, `lt`, `lte` | exactly one value | Numeric / date-typed dimensions only. | +| String search | `contains`, `notContains` | exactly one value | String-typed dimensions only. | +| NULL checks | `set`, `notSet` | no values (rejected if present) | Any dimension type. | + +`startsWith`, `endsWith`, `between`, and date-range helpers are reserved for v1.5. + +### Examples + +**Single predicate:** + +```ts +filter: { member: "region", operator: "in", values: ["EMEA", "APAC"] } +``` + +**Implicit AND (predicate list inside an `and` group):** + +```ts +filter: { + and: [ + { member: "region", operator: "in", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], +} +``` + +**Nested OR:** + +```ts +filter: { + or: [ + { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }, + { member: "region", operator: "equals", values: ["APAC"] }, + ], +} +``` + +**NULL check:** + +```ts +filter: { member: "csm_email", operator: "set" } +``` + +The server enforces: +- `member` is a registered dimension on the metric (typos return 400). +- `operator` is one of the 12 v1 names. +- Operator-vs-type compatibility (e.g., `gt` on a string dimension returns 400). +- `values` cardinality matches the operator. +- Recursion depth ≤ 8 (defense against malformed payloads). +- All values bind as parameters via the Statement Execution bind-var path — **no value from the request body flows into the rendered SQL string**. + +## Time grain + +`timeGrain` is a single optional top-level field on the request body. When set, it applies to every time-typed dimension in `dimensions`: + +```ts +useMetricView("revenue", { + measures: ["arr"] as const, + dimensions: ["created_at"] as const, + timeGrain: "month", +}); +``` + +Generated SQL: + +```sql +SELECT date_trunc('month', created_at) AS created_at, MEASURE(arr) AS arr +FROM appkit_demo.public.revenue_metrics +GROUP BY ALL +``` + +The `TimeGrain` type narrows to the union of grains the metric view's YAML 1.1 `time_grain` attribute declares. Setting `timeGrain` without including a time-typed dimension in `dimensions` returns 400 with `timeGrain specified but no time-typed dimension grouped`. + +Date ranges are expressed via the structured filter spec (`gte`/`lte` predicates on the time dimension), not a separate `dateRange` field. + +## Semantic metadata + format utilities + +Build-time, the type-generator emits `metrics.metadata.json` alongside the typed `.d.ts`: + +```json +{ + "revenue": { + "source": "appkit_demo.public.revenue_metrics", + "lane": "sp", + "measures": { + "arr": { + "type": "DECIMAL(38,2)", + "display_name": "Annual Recurring Revenue", + "format": "$#,##0.00", + "description": "Annualized contract value across active subscriptions" + } + }, + "dimensions": { + "created_at": { + "type": "TIMESTAMP", + "display_name": "Subscription Start", + "time_grain": ["day", "week", "month"] + } + } + } +} +``` + +Register the bundle once at app startup: + +```ts title="src/main.tsx" +import { registerMetricsMetadata } from "@databricks/appkit-ui/format"; +import metricsMetadata from "../shared/appkit-types/metrics.metadata.json"; + +registerMetricsMetadata(metricsMetadata); +``` + +`useMetricView` then returns the relevant subset (measures + dimensions for the queried metric) in `metadata`. The reference is stable across re-renders for the same metric key. + +### Library-agnostic format utilities + +Three pure functions in `@databricks/appkit-ui/format`: + +```ts +import { formatLabel, formatValue, toD3Format } from "@databricks/appkit-ui/format"; +``` + +| Function | Signature | Purpose | +|----------|-----------|---------| +| `formatValue(value, format?)` | `(value, format?) => string` | Turns a raw value + UC format spec into a display string. | +| `formatLabel(name, columnMetadata?)` | `(name, columnMetadata?) => string` | Returns `display_name` or humanizes the column name. | +| `toD3Format(format?)` | `(format?) => string` | Converts a UC printf-style spec to a d3-format-compatible string. | + +Recognized format specs (passthrough — UC's YAML 1.1 emits printf-style strings, AppKit forwards them): + +| YAML format | `formatValue(1234.56, ...)` | `toD3Format(...)` | +|-------------|----------------------------|-------------------| +| `$#,##0.00` | `"$1,234.56"` | `"$,.2f"` | +| `0.00%` | `"123,456.00%"` (use `0.0%` for ratios) | `".2%"` | +| `0.0%` | `"42.7%"` (input `0.427`) | `".1%"` | +| `#,##0` | `"1,235"` | `",.0f"` | +| `0.000` | `"1234.560"` | `".3f"` | +| (omitted) | localized number formatting | `""` (let chart use defaults) | + +### Plotly example + +```tsx +import { formatLabel, toD3Format } from "@databricks/appkit-ui/format"; +import { useMetricView } from "@databricks/appkit-ui/react"; +import Plot from "react-plotly.js"; + +function ARRChart() { + const { data, metadata } = useMetricView("revenue", { + measures: ["arr"] as const, + dimensions: ["created_at"] as const, + timeGrain: "month", + }); + + if (!data || !metadata) return null; + + return ( + row.created_at), + y: data.map((row) => row.arr), + }, + ]} + layout={{ + title: { text: formatLabel("arr", metadata.measures.arr) }, + yaxis: { tickformat: toD3Format(metadata.measures.arr.format) }, + }} + /> + ); +} +``` + +### ECharts example + +```tsx +import { formatLabel, toD3Format } from "@databricks/appkit-ui/format"; +import { useMetricView } from "@databricks/appkit-ui/react"; +import ReactECharts from "echarts-for-react"; + +function ARRChart() { + const { data, metadata } = useMetricView("revenue", { + measures: ["arr"] as const, + dimensions: ["created_at"] as const, + timeGrain: "month", + }); + + if (!data || !metadata) return null; + + return ( + r.created_at) }, + yAxis: { + type: "value", + axisLabel: { + // ECharts accepts a d3-format-compatible string via formatter, + // or a function form for full control. + formatter: toD3Format(metadata.measures.arr.format), + }, + }, + series: [ + { + type: "line", + data: data.map((r) => r.arr), + name: formatLabel("arr", metadata.measures.arr), + }, + ], + }} + /> + ); +} +``` + +The format utilities are deliberately library-agnostic — they emit strings the consumer's chart library decides how to consume. Wrapping a specific chart-library API is glue customers can write in tens of lines, not the framework's responsibility. + +## CLI + +```bash +npx appkit metric sync +``` + +The `metric sync` subcommand calls the same `syncMetrics()` core that the Vite type-generator runs in dev mode. Useful for: + +- CI checks (verify generated types are committed and match the warehouse). +- Non-Vite builds (Webpack, Rspack, Turbopack, raw `tsc`). +- Manual refresh after a teammate's `metric.json` change. +- Pre-commit hooks. + +Flags: + +| Flag | Description | +|------|-------------| +| `--warehouse-id ` | Override the default warehouse. | +| `--metric-json-path ` | Override the default `config/queries/metric.json` location. | +| `--output-dir ` | Override where the generated `metrics.d.ts` and `metrics.metadata.json` land. | +| `--silent` | Suppress non-error output. | + +The CLI exits with: +- `0` on success. +- Non-zero with a recognizable message for missing FQN, unreachable warehouse, malformed `metric.json`, or schema-fetch authentication failure. + +Future subcommands (`list`, `validate`, `describe`) plug into the same parent command. + +## Security model + +The metric-view path inherits AppKit's plugin-best-practices defaults and adds a few metric-specific reinforcements: + +1. **Validator-first.** Every column name (`measures`, `dimensions`, filter `member`) is checked against the build-time schema snapshot before SQL construction. **No user-supplied string is ever interpolated into the generated SQL.** Unknown columns return 400. + +2. **Operator allowlist.** The 12 v1 operator names are an exhaustive enum — any other string in `operator` returns 400. + +3. **Operator-vs-type compatibility.** `gt` on a string dim returns 400. `contains` on a numeric dim returns 400. The validator is the source of truth. + +4. **Parameterized values.** Every value in a predicate is bound as a parameter via the Statement Execution bind-var path. SQL injection via filter values is structurally impossible. + +5. **Recursion depth cap.** AND/OR nesting is limited to 8 levels — defense against stack-abuse via hostile payloads. + +6. **OBO row scoping.** Entries in the `obo` lane dispatch via the `asUser(req)` Proxy, threading the user's `x-forwarded-access-token` through every Databricks call. The warehouse executes the query under the end-user's identity. + +7. **Cross-user cache isolation.** OBO cache keys take the form `metric:{key}:{argsHash}:{sha256(userIdentity)}`. The raw email/principal name never reaches the cache layer. SP-lane keys use literal `"sp"` as the executor key — shared cache by design. + +8. **Sort-before-hash on order-insensitive args.** Measures, dimensions, and filter predicates within each AND/OR group are stable-sorted before hashing, so semantically equivalent calls collapse to the same cache entry. + +The server emits four metric-specific telemetry spans: `analytics.metric.query`, `analytics.metric.validate`, `analytics.metric.cache.hit`, `analytics.metric.cache.miss`. Metrics: `metric_query_duration_seconds`, `metric_cache_hit_ratio`, `metric_validation_failures_total`. + +## Migration from hand-rolled metric SQL + +If you previously consumed metric views by hand-writing SQL with `MEASURE(...)` in `.sql` files: + +```sql title="config/queries/revenue.sql (legacy approach)" +SELECT + date_trunc(:grain, created_at) AS created_at, + region, + MEASURE(arr) AS arr +FROM appkit_demo.public.revenue_metrics +WHERE region IN (:r1, :r2) +GROUP BY ALL +``` + +Migrate by: + +1. **Move the FQN into `metric.json`** under `sp` or `obo`: + + ```json title="config/queries/metric.json" + { + "sp": { + "revenue": { "source": "appkit_demo.public.revenue_metrics" } + } + } + ``` + +2. **Replace `useAnalyticsQuery` with `useMetricView`** at the call site: + + ```tsx + // Before + const { data } = useAnalyticsQuery("revenue", { + grain: sql.string("month"), + r1: sql.string("EMEA"), + r2: sql.string("APAC"), + }); + + // After + const { data, metadata } = useMetricView("revenue", { + measures: ["arr"] as const, + dimensions: ["region", "created_at"] as const, + timeGrain: "month", + filter: { + member: "region", + operator: "in", + values: ["EMEA", "APAC"], + }, + }); + ``` + +3. **Delete the `.sql` file.** The server constructs SQL deterministically from the structured args. + +4. **Run `npx appkit metric sync`** (or rely on the Vite plugin) to regenerate `metrics.d.ts` and `metrics.metadata.json`. The `MetricRegistry` augmentation lights up call-site narrowing. + +5. **Optional: wire metadata into your chart.** Use `formatLabel` / `formatValue` / `toD3Format` to consume the YAML's `display_name` and `format` instead of re-typing them in TypeScript. + +The metric-view path is purely additive — your other `.sql` files keep working unchanged. Apps that don't use metric views never load `useMetricView` or the format utilities. + +## Out of scope at v1 + +- **ARROW format.** v1 is JSON-only; metric-view results are typically aggregated and small. +- **Per-entry growth options** (`cacheTtl`, `defaultFilter`, `dimensions` allowlist). +- **Filter ops beyond v1** (`startsWith`, `endsWith`, `between`, date-range family). +- **HAVING (filtering on measures).** v1 restricts `member` to dimensions. +- **Runtime schema refresh.** Build-time only; deploys reset the snapshot. +- **Metric view CRUD.** Read-only consumption at v1. +- **Auto-discovery from UC.** Explicit declaration in `metric.json` is required. +- **Multi-view joins.** One query targets one metric view. +- **Chart-library adapters.** Format utilities are the framework's contribution; chart wrapping is glue customers write in tens of lines. + +Each of these is a non-breaking additive change when concrete demand arrives. diff --git a/docs/static/appkit-ui/styles.gen.css b/docs/static/appkit-ui/styles.gen.css index a2192039d..60da8251b 100644 --- a/docs/static/appkit-ui/styles.gen.css +++ b/docs/static/appkit-ui/styles.gen.css @@ -219,6 +219,9 @@ .pointer-events-none { pointer-events: none; } + .collapse { + visibility: collapse; + } .invisible { visibility: hidden; } diff --git a/docs/static/schemas/metric-source.schema.json b/docs/static/schemas/metric-source.schema.json new file mode 100644 index 000000000..a41ef9679 --- /dev/null +++ b/docs/static/schemas/metric-source.schema.json @@ -0,0 +1,58 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://databricks.github.io/appkit/schemas/metric-source.schema.json", + "title": "AppKit Metric Source Configuration", + "description": "Schema for AppKit metric.json — declares Unity Catalog Metric View sources for the analytics plugin's metric-view path. Each entry under sp/obo binds a metric key to a UC metric view FQN. Object form (rather than bare string) at v1 enables future per-entry option growth without breaking changes.", + "type": "object", + "properties": { + "$schema": { + "type": "string", + "description": "Reference to the JSON Schema for validation" + }, + "sp": { + "type": "object", + "description": "Metric views queried as the service principal. Cache scope is shared across all users.", + "additionalProperties": { + "$ref": "#/$defs/metricEntry" + }, + "propertyNames": { + "$ref": "#/$defs/metricKey" + } + }, + "obo": { + "type": "object", + "description": "Metric views queried as the requesting user (on-behalf-of). Cache scope is per-user.", + "additionalProperties": { + "$ref": "#/$defs/metricEntry" + }, + "propertyNames": { + "$ref": "#/$defs/metricKey" + } + } + }, + "additionalProperties": false, + "$defs": { + "metricKey": { + "type": "string", + "pattern": "^[a-zA-Z_][a-zA-Z0-9_]*$", + "description": "Metric key. Must be a valid identifier (letters, digits, underscores; cannot start with a digit). Becomes the route key in POST /api/analytics/metric/:key, the hook argument in useMetricView('', ...), and the MetricRegistry augmentation key." + }, + "metricEntry": { + "type": "object", + "description": "A single metric view source declaration. v1 only accepts the 'source' field; future per-entry options (cacheTtl, defaultFilter, allowlists) ship as additive properties.", + "required": ["source"], + "properties": { + "source": { + "type": "string", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9_-]*\\.[a-zA-Z0-9_][a-zA-Z0-9_-]*\\.[a-zA-Z0-9_][a-zA-Z0-9_-]*$", + "description": "Three-part Unity Catalog FQN of the metric view: ..", + "examples": [ + "appkit_demo.public.revenue_metrics", + "main.analytics.customer_metrics" + ] + } + }, + "additionalProperties": false + } + } +} diff --git a/knip.json b/knip.json index b777d8c2a..bde14adc6 100644 --- a/knip.json +++ b/knip.json @@ -8,19 +8,15 @@ ], "workspaces": { "packages/appkit": {}, - "packages/appkit-ui": { - "ignoreDependencies": ["tailwindcss", "tw-animate-css"] - } + "packages/appkit-ui": {} }, "ignore": [ "**/*.generated.ts", "**/*.example.tsx", - "**/*.css", "packages/appkit/src/plugins/vector-search/**", "template/**", "tools/**", "docs/**" ], - "ignoreDependencies": ["json-schema-to-typescript"], "ignoreBinaries": ["tarball"] } diff --git a/packages/appkit-ui/package.json b/packages/appkit-ui/package.json index e7e91d5d0..2105929cd 100644 --- a/packages/appkit-ui/package.json +++ b/packages/appkit-ui/package.json @@ -23,6 +23,10 @@ "sbom.cdx.json" ], "exports": { + "./format": { + "development": "./src/format/index.ts", + "default": "./dist/format/index.js" + }, "./js": { "development": "./src/js/index.ts", "default": "./dist/js/index.js" @@ -118,6 +122,7 @@ }, "publishConfig": { "exports": { + "./format": "./dist/format/index.js", "./js": "./dist/js/index.js", "./js/beta": "./dist/js/beta.js", "./react": "./dist/react/index.js", diff --git a/packages/appkit-ui/src/format/__tests__/format.test.ts b/packages/appkit-ui/src/format/__tests__/format.test.ts new file mode 100644 index 000000000..d1c48cd39 --- /dev/null +++ b/packages/appkit-ui/src/format/__tests__/format.test.ts @@ -0,0 +1,269 @@ +import { describe, expect, test } from "vitest"; +import { formatLabel, formatValue, toD3Format } from "../format"; + +/** + * Format utility tests for Phase 5 of the metric-view PRD. + * + * Coverage matrix per the task acceptance criteria: + * - formatValue: currency / percent / number / unknown-fallback / null cases + * - formatLabel: camelCase / snake_case / display-name-override + * - toD3Format: currency / percent / integer / unknown-fallback + */ + +describe("formatValue", () => { + test("formats currency with two decimals", () => { + expect(formatValue(1234.56, "$#,##0.00")).toBe("$1,234.56"); + }); + + test("formats currency with thousands separator", () => { + expect(formatValue(1234567.89, "$#,##0.00")).toBe("$1,234,567.89"); + }); + + test("formats negative currency with sign before symbol", () => { + expect(formatValue(-1234.56, "$#,##0.00")).toBe("-$1,234.56"); + }); + + test("formats zero currency correctly", () => { + expect(formatValue(0, "$#,##0.00")).toBe("$0.00"); + }); + + test("formats percent with one decimal", () => { + expect(formatValue(0.427, "0.0%")).toBe("42.7%"); + }); + + test("formats percent with no decimals", () => { + expect(formatValue(0.5, "0%")).toBe("50%"); + }); + + test("formats percent with two decimals", () => { + expect(formatValue(0.12345, "0.00%")).toBe("12.35%"); + }); + + test("formats integer with thousands separator", () => { + expect(formatValue(1234, "#,##0")).toBe("1,234"); + }); + + test("formats fixed-precision number", () => { + expect(formatValue(1.23456, "0.000")).toBe("1.235"); + }); + + test("formats number without grouping when format omits comma", () => { + expect(formatValue(1234, "0")).toBe("1234"); + }); + + test("falls back to localized formatting for unrecognized format spec", () => { + // Unknown spec → Intl.NumberFormat default. Just assert it's a non-empty + // string that contains the digits — locale-specific separators vary. + const result = formatValue(1234.5, "weird-spec-xyz"); + expect(typeof result).toBe("string"); + expect(result.length).toBeGreaterThan(0); + }); + + test("falls back to localized formatting when format is undefined", () => { + const result = formatValue(42); + expect(typeof result).toBe("string"); + expect(result).toContain("42"); + }); + + test("returns empty string for null value", () => { + expect(formatValue(null, "$#,##0.00")).toBe(""); + }); + + test("returns empty string for undefined value", () => { + expect(formatValue(undefined, "$#,##0.00")).toBe(""); + }); + + test("returns String(value) for string input regardless of format", () => { + expect(formatValue("EMEA", "$#,##0.00")).toBe("EMEA"); + }); + + test("returns String(value) for boolean input", () => { + expect(formatValue(true)).toBe("true"); + }); + + test("handles bigint input by converting to number", () => { + expect(formatValue(1234n, "#,##0")).toBe("1,234"); + }); + + test("returns String(NaN) when value is non-finite", () => { + expect(formatValue(Number.NaN)).toBe("NaN"); + expect(formatValue(Number.POSITIVE_INFINITY)).toBe("Infinity"); + }); + + test("recognizes suffix-style currency (e.g. 0.00 kr)", () => { + // Common Nordic format + expect(formatValue(1234.56, "#,##0.00 kr")).toContain("1,234.56"); + expect(formatValue(1234.56, "#,##0.00 kr")).toContain("kr"); + }); +}); + +describe("formatLabel", () => { + test("returns display_name from metadata when present", () => { + expect( + formatLabel("arr", { + type: "DECIMAL", + display_name: "Annual Recurring Revenue", + }), + ).toBe("Annual Recurring Revenue"); + }); + + test("falls back to humanization when display_name is absent", () => { + expect(formatLabel("arr", { type: "DECIMAL" })).toBe("Arr"); + }); + + test("falls back to humanization when metadata is undefined", () => { + expect(formatLabel("revenue")).toBe("Revenue"); + }); + + test("humanizes snake_case", () => { + expect(formatLabel("total_revenue")).toBe("Total Revenue"); + expect(formatLabel("user_name")).toBe("User Name"); + expect(formatLabel("annual_recurring_revenue")).toBe( + "Annual Recurring Revenue", + ); + }); + + test("humanizes camelCase", () => { + expect(formatLabel("customerCount")).toBe("Customer Count"); + expect(formatLabel("totalCost")).toBe("Total Cost"); + expect(formatLabel("annualRecurringRevenue")).toBe( + "Annual Recurring Revenue", + ); + }); + + test("humanizes PascalCase", () => { + expect(formatLabel("UserId")).toBe("User Id"); + expect(formatLabel("CustomerCount")).toBe("Customer Count"); + }); + + test("humanizes SCREAMING_SNAKE_CASE", () => { + expect(formatLabel("USER_ID")).toBe("User Id"); + expect(formatLabel("ANNUAL_REVENUE")).toBe("Annual Revenue"); + }); + + test("preserves already-spaced input with title-case normalization", () => { + expect(formatLabel("annual revenue")).toBe("Annual Revenue"); + }); + + test("ignores empty/whitespace display_name and falls back to humanization", () => { + expect(formatLabel("arr", { type: "DECIMAL", display_name: " " })).toBe( + "Arr", + ); + expect(formatLabel("arr", { type: "DECIMAL", display_name: "" })).toBe( + "Arr", + ); + }); + + test("strips dangerous non-identifier characters before humanizing", () => { + expect(formatLabel("user")).toBe( + "Userscriptnamescript", + ); + }); + + test("returns empty string for an empty input name", () => { + expect(formatLabel("")).toBe(""); + }); + + test("handles single-word lowercase identifier", () => { + expect(formatLabel("revenue")).toBe("Revenue"); + }); + + test("handles consecutive capitals (acronyms)", () => { + expect(formatLabel("ARRGrowth")).toBe("Arr Growth"); + }); +}); + +describe("toD3Format", () => { + test("converts currency with two decimals", () => { + expect(toD3Format("$#,##0.00")).toBe("$,.2f"); + }); + + test("converts currency with no decimals", () => { + expect(toD3Format("$#,##0")).toBe("$,.0f"); + }); + + test("converts percent with one decimal", () => { + expect(toD3Format("0.0%")).toBe(".1%"); + }); + + test("converts percent with two decimals", () => { + expect(toD3Format("0.00%")).toBe(".2%"); + }); + + test("converts percent with thousands separator", () => { + expect(toD3Format("#,##0.0%")).toBe(",.1%"); + }); + + test("converts integer with thousands separator", () => { + expect(toD3Format("#,##0")).toBe(",.0f"); + }); + + test("converts integer without thousands separator", () => { + expect(toD3Format("0")).toBe(".0f"); + }); + + test("converts fixed-precision number", () => { + expect(toD3Format("0.000")).toBe(".3f"); + }); + + test("falls back to identity for unrecognized format spec", () => { + expect(toD3Format("weird-spec-xyz")).toBe("weird-spec-xyz"); + }); + + test("returns empty string for undefined format", () => { + expect(toD3Format()).toBe(""); + }); + + test("returns empty string for empty format", () => { + expect(toD3Format("")).toBe(""); + }); + + test("treats already-d3 format as identity (acceptable: chart consumes it)", () => { + expect(toD3Format(".2f")).toBe(".2f"); + }); +}); + +// ── End-to-end utility flow: simulating chart consumption ──────────────── +describe("library-agnostic chart consumption flow", () => { + test("Plotly tickformat workflow: metadata → toD3Format → tickformat string", () => { + // Customer would do: { tickformat: toD3Format(metadata.measures.arr.format) } + const metadataFormat = "$#,##0.00"; + const tickformat = toD3Format(metadataFormat); + expect(tickformat).toBe("$,.2f"); + }); + + test("ECharts valueFormatter workflow: format function from metadata", () => { + const metadata = { + type: "DECIMAL", + display_name: "ARR", + format: "$#,##0.00", + }; + // ECharts valueFormatter receives raw values and returns strings. + const valueFormatter = (v: number) => formatValue(v, metadata.format); + expect(valueFormatter(1234.56)).toBe("$1,234.56"); + }); + + test("Table cell workflow: formatValue per row, formatLabel per column", () => { + const arrMetadata: import("../types").ColumnMetadata = { + type: "DECIMAL", + display_name: "Annual Recurring Revenue", + format: "$#,##0.00", + }; + const regionMetadata: import("../types").ColumnMetadata = { + type: "STRING", + }; + + expect(formatLabel("arr", arrMetadata)).toBe("Annual Recurring Revenue"); + expect(formatLabel("region", regionMetadata)).toBe("Region"); + expect(formatValue(1234.56, arrMetadata.format)).toBe("$1,234.56"); + // No format spec on the dimension; passes through value as-is. + expect(formatValue("EMEA", regionMetadata.format)).toBe("EMEA"); + }); + + test("KPI tile workflow: scalar value with optional unknown format", () => { + // Customer KPI tile is a single value display. + expect(formatValue(0.427, "0.0%")).toBe("42.7%"); + // Falls back gracefully when the metric YAML lacks a format spec. + expect(formatValue(0.427)).toBeTruthy(); + }); +}); diff --git a/packages/appkit-ui/src/format/__tests__/registry.test.ts b/packages/appkit-ui/src/format/__tests__/registry.test.ts new file mode 100644 index 000000000..4c7d37024 --- /dev/null +++ b/packages/appkit-ui/src/format/__tests__/registry.test.ts @@ -0,0 +1,103 @@ +import { afterEach, describe, expect, test } from "vitest"; +import { + _getRegisteredBundleForTesting, + clearMetricsMetadata, + getMetricMetadata, + registerMetricsMetadata, +} from "../registry"; +import type { MetricsMetadataBundle } from "../types"; + +afterEach(() => { + clearMetricsMetadata(); +}); + +const sampleBundle: MetricsMetadataBundle = { + revenue: { + measures: { + arr: { + type: "DECIMAL(38,2)", + display_name: "Annual Recurring Revenue", + format: "$#,##0.00", + }, + }, + dimensions: { + region: { type: "STRING" }, + }, + }, + customer_metrics: { + measures: { + churn: { type: "DOUBLE", format: "0.0%" }, + }, + dimensions: { + csm_email: { type: "STRING" }, + }, + }, +}; + +describe("registerMetricsMetadata + getMetricMetadata", () => { + test("returns null for any key when no bundle has been registered", () => { + expect(getMetricMetadata("revenue")).toBeNull(); + }); + + test("returns the registered metadata for a known metric key", () => { + registerMetricsMetadata(sampleBundle); + const metadata = getMetricMetadata("revenue"); + expect(metadata).not.toBeNull(); + expect(metadata?.measures.arr.format).toBe("$#,##0.00"); + expect(metadata?.measures.arr.display_name).toBe( + "Annual Recurring Revenue", + ); + }); + + test("returns null for an unregistered metric key", () => { + registerMetricsMetadata(sampleBundle); + expect(getMetricMetadata("nonexistent")).toBeNull(); + }); + + test("returns the same object reference on repeated lookups (stable identity)", () => { + registerMetricsMetadata(sampleBundle); + const ref1 = getMetricMetadata("revenue"); + const ref2 = getMetricMetadata("revenue"); + expect(ref1).toBe(ref2); + }); + + test("calling register replaces the previous bundle wholesale", () => { + registerMetricsMetadata(sampleBundle); + expect(getMetricMetadata("revenue")).not.toBeNull(); + + const newBundle: MetricsMetadataBundle = { + orders: { + measures: { count: { type: "BIGINT" } }, + dimensions: {}, + }, + }; + registerMetricsMetadata(newBundle); + expect(getMetricMetadata("revenue")).toBeNull(); + expect(getMetricMetadata("orders")).not.toBeNull(); + }); + + test("registering null clears the bundle", () => { + registerMetricsMetadata(sampleBundle); + expect(getMetricMetadata("revenue")).not.toBeNull(); + registerMetricsMetadata(null); + expect(getMetricMetadata("revenue")).toBeNull(); + }); + + test("clearMetricsMetadata resets the registry to unregistered state", () => { + registerMetricsMetadata(sampleBundle); + clearMetricsMetadata(); + expect(getMetricMetadata("revenue")).toBeNull(); + expect(_getRegisteredBundleForTesting()).toBeNull(); + }); + + test("returns metadata for any registered key regardless of execution lane", () => { + // Lane is a server-side concern (lives in metric.json) and is not part + // of the client-facing bundle. The hook returns metadata uniformly for + // both SP-lane and OBO-lane metrics. + registerMetricsMetadata(sampleBundle); + expect(getMetricMetadata("revenue")?.measures.arr.format).toBe("$#,##0.00"); + expect(getMetricMetadata("customer_metrics")?.measures.churn.format).toBe( + "0.0%", + ); + }); +}); diff --git a/packages/appkit-ui/src/format/format.ts b/packages/appkit-ui/src/format/format.ts new file mode 100644 index 000000000..6bc5ea677 --- /dev/null +++ b/packages/appkit-ui/src/format/format.ts @@ -0,0 +1,362 @@ +import type { ColumnMetadata, FormatSpec } from "./types"; + +/** + * Module-level cache for parsed format specs. Format strings are pinned by + * the metric view's YAML and reused for every cell render — without caching, + * `parseFormatSpec` runs ~5 regex matches per call. Cardinality in + * production is tiny (one entry per measure/dim column with a format), so + * an unbounded `Map` is safe; the cache lives for the lifetime of the + * module and clears with the page. + */ +const parsedFormatCache = new Map(); + +/** + * Module-level cache for `Intl.NumberFormat` instances. Allocation is + * notoriously slow in V8, and chart cells call `formatValue` per row. A + * 1000-row × 5-column table would otherwise pay ~5000 instantiations per + * render. Keyed on a stringified options bundle so identical option sets + * share an instance. + */ +const numberFormatCache = new Map(); + +function getNumberFormat(options: Intl.NumberFormatOptions): Intl.NumberFormat { + // Locale is left at the runtime default (`undefined`) — same as the + // pre-cache code — so options serialization is the only key dimension. + const key = JSON.stringify(options); + let fmt = numberFormatCache.get(key); + if (fmt === undefined) { + fmt = new Intl.NumberFormat(undefined, options); + numberFormatCache.set(key, fmt); + } + return fmt; +} + +/** + * Library-agnostic format utilities for UC Metric View consumption. + * + * Phase 5 of the analytics-metric-view PRD: customers wire metric metadata + * into Plotly / ECharts / table cells / KPI tiles via these three helpers. + * No chart-library lock-in, no AppKit-specific chart prop — the utilities + * accept the YAML 1.1 format spec verbatim and produce strings the consumer + * passes into their chart of choice. + * + * Design decisions: + * - **Format-string passthrough.** UC YAML emits printf-style strings; we + * forward them. We do NOT design our own format DSL. Consumers see exactly + * what their data engineers wrote in the metric view spec. + * - **Tolerant fallbacks.** Unrecognized format strings fall back to + * sensible defaults (`Intl.NumberFormat` for `formatValue`, identity for + * `toD3Format`) rather than throwing. Charts continue to render even when + * the metric view's format spec uses an unsupported pattern. + * - **No `d3-format` dependency.** `toD3Format` is a pure string conversion + * — d3-format itself is the consumer (Plotly's tickformat, ECharts' + * valueFormatter, etc.). + * - **No null/undefined surprises.** All three helpers handle nullish + * inputs gracefully so chart code can pass values straight through + * without pre-checking. + */ + +/** + * Format a raw value into a display string per a YAML 1.1 format spec. + * + * When `format` is provided and recognized: + * - `$#,##0.00` style → currency (`"$1,234.56"`) + * - `#,##0.00` / `0.000` style → fixed-precision number (`"1,234.57"`) + * - `0.0%` / `#,##0%` style → percentage (`"42.7%"`) + * - `#,##0` style → integer with thousands separator (`"1,234"`) + * + * When `format` is unset / unrecognized / unparseable, falls back to: + * - localized number formatting via `Intl.NumberFormat` for numeric values + * - `String(value)` for non-numeric values + * + * Null / undefined input always returns the empty string — chart code can + * pass row cells straight through without pre-checking. + * + * @example + * formatValue(1234.56, "$#,##0.00") // "$1,234.56" + * formatValue(0.427, "0.0%") // "42.7%" + * formatValue(1234, "#,##0") // "1,234" + * formatValue(42, undefined) // "42" + * formatValue("EMEA", undefined) // "EMEA" + * formatValue(null, "$#,##0.00") // "" + */ +export function formatValue(value: unknown, format?: FormatSpec): string { + if (value == null) return ""; + + // Non-numeric values are returned as their string form regardless of format + // spec — the spec only makes sense for numeric output and the printf style + // does not have a defined meaning over strings/booleans/dates. + if (typeof value !== "number" && typeof value !== "bigint") { + return String(value); + } + + const numeric = typeof value === "bigint" ? Number(value) : value; + if (!Number.isFinite(numeric)) return String(numeric); + + const parsed = format ? parseFormatSpec(format) : null; + if (parsed == null) { + // No format / unrecognized format → localized number formatting. Using + // the user's locale (no explicit "en-US") so numbers render correctly in + // EU/JP/etc apps without the customer wiring locale plumbing. + return getNumberFormat({ maximumFractionDigits: 6 }).format(numeric); + } + + const { kind, fractionDigits, useGrouping, currencyPrefix, currencySuffix } = + parsed; + + switch (kind) { + case "percent": + return getNumberFormat({ + style: "percent", + minimumFractionDigits: fractionDigits, + maximumFractionDigits: fractionDigits, + useGrouping, + }).format(numeric); + case "currency": { + // We emit the currency symbol verbatim from the format spec rather than + // relying on `Intl.NumberFormat({ style: "currency", currency: "USD" })` + // — the YAML's `$#,##0.00` does not specify ISO currency code, and + // assuming USD would be wrong for non-US deployments. Passthrough lets + // data engineers pin the symbol they intend. + const numberPart = getNumberFormat({ + minimumFractionDigits: fractionDigits, + maximumFractionDigits: fractionDigits, + useGrouping, + }).format(Math.abs(numeric)); + const sign = numeric < 0 ? "-" : ""; + return `${sign}${currencyPrefix ?? ""}${numberPart}${currencySuffix ?? ""}`; + } + case "number": + return getNumberFormat({ + minimumFractionDigits: fractionDigits, + maximumFractionDigits: fractionDigits, + useGrouping, + }).format(numeric); + } +} + +/** + * Render a column's display label. + * + * Returns `display_name` from the metadata when present (the YAML 1.1 + * canonical label). When metadata is absent or `display_name` is missing, + * humanizes the column name: + * - snake_case (`total_revenue`) → "Total Revenue" + * - camelCase (`customerCount`) → "Customer Count" + * - PascalCase (`UserId`) → "User Id" + * - SCREAMING_SNAKE (`USER_ID`) → "User Id" + * - already-spaced (`Annual Recurring Revenue`) → unchanged (title-case) + * + * @example + * formatLabel("arr", { type: "DECIMAL", display_name: "Annual Recurring Revenue" }) + * // "Annual Recurring Revenue" + * formatLabel("total_revenue") // "Total Revenue" + * formatLabel("customerCount") // "Customer Count" + * formatLabel("revenue") // "Revenue" + */ +export function formatLabel( + name: string, + columnMetadata?: ColumnMetadata, +): string { + if ( + columnMetadata?.display_name && + columnMetadata.display_name.trim().length > 0 + ) { + return columnMetadata.display_name; + } + return humanizeIdentifier(name); +} + +/** + * Convert a UC YAML 1.1 printf-style format spec to a d3-format-compatible + * string. The output is consumed by Plotly's `tickformat`, ECharts' + * `valueFormatter`, table cell formatters, and any other library that + * understands d3-format syntax. + * + * Conversions: + * - `$#,##0.00` → `"$,.2f"` + * - `0.00%` → `".2%"` + * - `#,##0` → `",.0f"` + * - `0.000` → `".3f"` + * + * Unrecognized specs fall back to identity (the input string) so the chart + * library either consumes it directly (if it happens to be d3-format already) + * or surfaces its own warning. Returns the empty string for nullish input + * (chart libraries treat `""` as "use default"). + * + * @example + * toD3Format("$#,##0.00") // "$,.2f" + * toD3Format("0.0%") // ".1%" + * toD3Format("#,##0") // ",.0f" + * toD3Format(undefined) // "" + */ +export function toD3Format(format?: FormatSpec): string { + if (!format) return ""; + const parsed = parseFormatSpec(format); + if (parsed == null) { + // Unrecognized → identity. The consumer's chart library decides whether + // to consume it (e.g. Plotly silently ignores invalid tickformats) or to + // surface its own warning. We don't throw because chart libraries + // typically can't propagate exceptions out of their render path. + return format; + } + + const groupPart = parsed.useGrouping ? "," : ""; + switch (parsed.kind) { + case "currency": + // d3-format's `$` prefix is the standard "use locale's currency + // symbol" — most Plotly users want the YAML's literal symbol though. + // We emit `$` here so existing d3-format docs match; consumers that + // need a non-USD symbol pass `format` directly into their chart. + return `$${groupPart}.${parsed.fractionDigits}f`; + case "percent": + return `${groupPart}.${parsed.fractionDigits}%`; + case "number": + return `${groupPart}.${parsed.fractionDigits}f`; + } +} + +/** + * Parsed shape of a printf-style format spec. The parser is intentionally + * narrow: it recognizes the shapes UC documents (`$#,##0.00`, `0.0%`, + * `#,##0`, `0.000`, etc.) and returns null for anything else so callers can + * fall back to a sensible default. + * + * @internal + */ +interface ParsedFormat { + kind: "currency" | "percent" | "number"; + fractionDigits: number; + useGrouping: boolean; + /** Currency prefix (e.g. `"$"`, `"€"`) when the format starts with a symbol. */ + currencyPrefix?: string; + /** Currency suffix (e.g. `" kr"`) when the format ends with a non-digit token. */ + currencySuffix?: string; +} + +/** + * Recognize the small grammar of printf-style format specs we forward. + * + * Approach: strip percent / currency markers, count fractional digits via + * the substring after `.`, detect grouping via the presence of `,`. Anything + * not matching the recognized shape returns null. + * + * Result is memoized in {@link parsedFormatCache} — format strings are + * pinned by the metric view's YAML and reused for every cell render, so we + * pay the regex cost once per distinct spec. + */ +function parseFormatSpec(spec: FormatSpec): ParsedFormat | null { + const cached = parsedFormatCache.get(spec); + if (cached !== undefined) return cached; + const result = parseFormatSpecImpl(spec); + parsedFormatCache.set(spec, result); + return result; +} + +function parseFormatSpecImpl(spec: FormatSpec): ParsedFormat | null { + const trimmed = spec.trim(); + if (trimmed.length === 0) return null; + + // Percent forms: `0.00%`, `#,##0%`, `0.0%`, `0%`. + const percentMatch = trimmed.match(/^([#,]*[0]+(?:\.[0]+)?)\s*%$/); + if (percentMatch) { + const numericPart = percentMatch[1]; + return { + kind: "percent", + fractionDigits: countFractionDigits(numericPart), + useGrouping: numericPart.includes(","), + }; + } + + // Currency forms: `$#,##0.00`, `€#,##0`, `$0.000`. Currency prefix is one + // or more leading non-digit/non-`#`/non-`,`/non-`.` characters, followed by + // the numeric portion. + const currencyPrefixMatch = trimmed.match(/^([^#,0.]+)([#,0.]+)$/); + if (currencyPrefixMatch) { + const prefix = currencyPrefixMatch[1]; + const numericPart = currencyPrefixMatch[2]; + if (isNumericFormat(numericPart)) { + return { + kind: "currency", + fractionDigits: countFractionDigits(numericPart), + useGrouping: numericPart.includes(","), + currencyPrefix: prefix, + }; + } + } + + // Suffix-symbol currency: `#,##0.00 kr`, `0.00 €`. Numeric portion first, + // suffix second (separated by a space or directly adjacent). + const currencySuffixMatch = trimmed.match(/^([#,0.]+)(\s*[^#,0.]+)$/); + if (currencySuffixMatch) { + const numericPart = currencySuffixMatch[1]; + const suffix = currencySuffixMatch[2]; + if (isNumericFormat(numericPart)) { + return { + kind: "currency", + fractionDigits: countFractionDigits(numericPart), + useGrouping: numericPart.includes(","), + currencySuffix: suffix, + }; + } + } + + // Plain number forms: `#,##0`, `#,##0.00`, `0.000`, `0`. + if (isNumericFormat(trimmed)) { + return { + kind: "number", + fractionDigits: countFractionDigits(trimmed), + useGrouping: trimmed.includes(","), + }; + } + + return null; +} + +/** + * Whether a string is a printf-numeric pattern of `#`, `0`, `,`, and `.`. + * A valid pattern has at least one digit placeholder (`0` or `#`). + */ +function isNumericFormat(s: string): boolean { + if (!/^[#,0.]+$/.test(s)) return false; + return /[0#]/.test(s); +} + +/** Count the number of `0` or `#` placeholders after the decimal point. */ +function countFractionDigits(s: string): number { + const dotIdx = s.indexOf("."); + if (dotIdx === -1) return 0; + const fractional = s.slice(dotIdx + 1); + // Fractional part should be all `0` and `#` after the decimal — count the + // total digit-placeholder count to determine fraction width. + return (fractional.match(/[0#]/g) ?? []).length; +} + +/** + * Humanize a column identifier into a Title-Case display string. + * + * Handles snake_case, camelCase, PascalCase, SCREAMING_SNAKE_CASE, and + * already-spaced inputs. Sanitizes non-identifier characters (the same + * pattern as the existing `formatFieldLabel`'s safe-key regex) so user- + * supplied names cannot inject markup. + */ +function humanizeIdentifier(name: string): string { + const safe = name.replace(/[^a-zA-Z0-9_\- ]/g, ""); + if (safe.length === 0) return ""; + + // Insert a space before capitals (camelCase / PascalCase boundaries), + // replace `_` and `-` with spaces, collapse runs, then title-case each word. + const withSpaces = safe + .replace(/([a-z0-9])([A-Z])/g, "$1 $2") + .replace(/([A-Z]+)([A-Z][a-z])/g, "$1 $2") + .replace(/[_-]+/g, " ") + .replace(/\s+/g, " ") + .trim(); + + if (withSpaces.length === 0) return ""; + + return withSpaces + .split(" ") + .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + .join(" "); +} diff --git a/packages/appkit-ui/src/format/index.ts b/packages/appkit-ui/src/format/index.ts new file mode 100644 index 000000000..fabc10ac8 --- /dev/null +++ b/packages/appkit-ui/src/format/index.ts @@ -0,0 +1,32 @@ +/** + * Library-agnostic format utilities for UC Metric View consumption. + * + * Phase 5 of `prd/analytics-metric-view-source.md` ships these so customers + * can wire metric metadata into Plotly / ECharts / table cells / KPI tiles + * without an AppKit-specific chart-prop lock-in. + * + * Three primary functions: + * - {@link formatValue} — turns a raw value + format spec into a display string. + * - {@link formatLabel} — returns `display_name` from metadata, falls back to humanized column name. + * - {@link toD3Format} — converts UC printf-style specs to d3-format strings. + * + * Plus a registration API for the build-time metadata bundle: + * - {@link registerMetricsMetadata} — call once at app startup with the + * imported `metrics.metadata.json`. + * - {@link getMetricMetadata} — used by `useMetricView` (and any custom + * glue code) to read per-metric metadata back out. + */ + +export { formatLabel, formatValue, toD3Format } from "./format"; +export { + _getRegisteredBundleForTesting, + clearMetricsMetadata, + getMetricMetadata, + registerMetricsMetadata, +} from "./registry"; +export type { + ColumnMetadata, + FormatSpec, + MetricMetadata, + MetricsMetadataBundle, +} from "./types"; diff --git a/packages/appkit-ui/src/format/registry.ts b/packages/appkit-ui/src/format/registry.ts new file mode 100644 index 000000000..33c0827c8 --- /dev/null +++ b/packages/appkit-ui/src/format/registry.ts @@ -0,0 +1,78 @@ +import type { MetricMetadata, MetricsMetadataBundle } from "./types"; + +/** + * In-memory store for the build-time-bundled metric semantic metadata. + * + * The `metrics.metadata.json` artifact emitted by the AppKit type-generator + * is opt-in: the consuming app imports it and calls + * {@link registerMetricsMetadata} once at startup (typically in the same + * module that mounts the React tree). The `useMetricView` hook reads from + * this store on every render via {@link getMetricMetadata}; the returned + * object reference is stable across re-renders for the same metric key. + * + * The store is process-global by design — the metadata is inert data + * (display names, format specs, descriptions) and there is no per-user or + * per-request variation. Using a module-level singleton keeps the surface + * minimal: the customer touches this once, the hook reads it many times. + */ +let registeredBundle: MetricsMetadataBundle | null = null; + +/** + * Register the build-time semantic-metadata bundle for the running app. + * + * Typical usage at app startup: + * + * ```ts + * import metricsMetadata from "../shared/appkit-types/metrics.metadata.json"; + * import { registerMetricsMetadata } from "@databricks/appkit-ui/format"; + * + * registerMetricsMetadata(metricsMetadata); + * ``` + * + * Calling this multiple times replaces the previous bundle — useful in dev + * mode if the type-generator regenerates the file mid-session, but the hook + * is intentionally not reactive to bundle changes (the metadata is + * build-time-frozen at deploy by the PRD's contract). Tests reset between + * runs via {@link clearMetricsMetadata}. + */ +export function registerMetricsMetadata( + bundle: MetricsMetadataBundle | null, +): void { + registeredBundle = bundle ?? null; +} + +/** + * Retrieve the metadata for one registered metric. + * + * Returns `null` when: + * - no bundle has been registered (the app didn't opt into the metadata flow), or + * - the bundle has no entry for `metricKey` (typo / out-of-sync registration). + * + * The returned object is a direct reference into the registered bundle — + * {@link useMetricView} relies on this for stable identity across re-renders. + * Callers must not mutate it. + */ +export function getMetricMetadata(metricKey: string): MetricMetadata | null { + if (registeredBundle == null) return null; + const entry = registeredBundle[metricKey]; + return entry ?? null; +} + +/** + * Test-only seam: reset the registry between tests so leftover state from a + * previous test cannot affect the next one. Production code never calls this. + * + * @internal + */ +export function clearMetricsMetadata(): void { + registeredBundle = null; +} + +/** + * Test-only seam: introspect the registered bundle. + * + * @internal + */ +export function _getRegisteredBundleForTesting(): MetricsMetadataBundle | null { + return registeredBundle; +} diff --git a/packages/appkit-ui/src/format/types.ts b/packages/appkit-ui/src/format/types.ts new file mode 100644 index 000000000..6a51d85c9 --- /dev/null +++ b/packages/appkit-ui/src/format/types.ts @@ -0,0 +1,72 @@ +/** + * Library-agnostic semantic-metadata types used by the format utilities and + * the `useMetricView` hook's `metadata` return field. + * + * Lives in `@databricks/appkit-ui/format` — no React dependency, no SSE + * dependency. The shape mirrors the build-time `metrics.metadata.json` + * artifact one-for-one so consumers can typecheck against the file they + * imported without an extra cast. + * + * Source of truth: the YAML 1.1 metric-view spec on Unity Catalog. Every + * field except `type` is optional in the YAML, so every consumer is required + * to defend against absence (the format utilities all have sensible + * fallbacks; the hook returns `null` when the bundle has not been registered). + */ + +/** + * Printf-style format spec sourced from a YAML 1.1 metric view. The framework + * forwards the verbatim string — we deliberately do not invent a format DSL. + * + * Examples (from the UC metric-view docs): + * - `"$#,##0.00"` — currency with two decimals (`"$1,234.56"`) + * - `"0.0%"` — percentage with one decimal (`"42.7%"`) + * - `"#,##0"` — integer with thousands separator (`"1,234"`) + * - `"0.000"` — fixed-precision number (`"1.235"`) + * + * Unrecognized specs fall back to localized number formatting (`formatValue`) + * or identity (`toD3Format`). + */ +export type FormatSpec = string; + +/** + * Per-column metadata as emitted into the build-time bundle and returned by + * the hook. Mirrors {@link MetricColumnMetadata} in + * `@databricks/appkit-ui/react` — duplicated here because format utilities + * must not import from the React subpath. + */ +export interface ColumnMetadata { + /** Databricks SQL type ("STRING", "DECIMAL(38,2)", "TIMESTAMP", ...). */ + type: string; + /** YAML 1.1 `display_name` — used by `formatLabel` as the canonical title. */ + display_name?: string; + /** YAML 1.1 `format` spec — printf-style passthrough. */ + format?: FormatSpec; + /** Column-level documentation. */ + description?: string; + /** Allowed time-grains (only present on time-typed dimensions). */ + time_grain?: readonly string[]; +} + +/** + * One metric's complete semantic-metadata bundle. + * + * Top-level matches the shape in the build-time `metrics.metadata.json` file: + * `Record`. Each entry carries per-column metadata + * for measures and dimensions — display names, format specs, descriptions, + * time-grain hints. Server-side concerns (UC FQN, execution lane) live in + * `metric.json` and are deliberately NOT part of this artifact: it ships to + * the client. + */ +export interface MetricMetadata { + measures: Record; + dimensions: Record; +} + +/** + * The full registered metadata bundle. + * + * Top-level keys are metric keys; each entry is a {@link MetricMetadata} for + * one metric. Pass the imported JSON to `registerMetricsMetadata()` once at + * app startup; the hook reads it back on every render via a stable lookup. + */ +export type MetricsMetadataBundle = Record; diff --git a/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view-metadata.test.ts b/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view-metadata.test.ts new file mode 100644 index 000000000..3e0fedf6a --- /dev/null +++ b/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view-metadata.test.ts @@ -0,0 +1,209 @@ +import { renderHook } from "@testing-library/react"; +import { afterEach, describe, expect, test, vi } from "vitest"; +import { + clearMetricsMetadata, + type MetricsMetadataBundle, + registerMetricsMetadata, +} from "@/format"; + +// Mock connectSSE so the hook's render path doesn't fire a real network call +// (we don't need the data flow here — just metadata reading). +vi.mock("@/js", () => ({ + connectSSE: vi.fn().mockImplementation( + () => + new Promise((resolve) => { + setTimeout(resolve, 0); + }), + ), +})); + +import { useMetricView } from "../use-metric-view"; + +const REVENUE_BUNDLE: MetricsMetadataBundle = { + revenue: { + measures: { + arr: { + type: "DECIMAL(38,2)", + display_name: "Annual Recurring Revenue", + format: "$#,##0.00", + }, + mrr: { + type: "DECIMAL(38,2)", + display_name: "Monthly Recurring Revenue", + format: "$#,##0.00", + }, + }, + dimensions: { + region: { type: "STRING", display_name: "Region" }, + created_at: { + type: "TIMESTAMP", + display_name: "Period", + time_grain: ["day", "week", "month"], + }, + }, + }, + other_metric: { + measures: { count: { type: "BIGINT" } }, + dimensions: {}, + }, +}; + +describe("useMetricView — Phase 5 metadata return field", () => { + afterEach(() => { + clearMetricsMetadata(); + vi.clearAllMocks(); + }); + + test("metadata is null when no bundle has been registered", () => { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + expect(result.current.metadata).toBeNull(); + }); + + test("metadata returns the per-metric subset when registered", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + expect(result.current.metadata).not.toBeNull(); + expect(result.current.metadata?.measures.arr.format).toBe("$#,##0.00"); + expect(result.current.metadata?.measures.arr.display_name).toBe( + "Annual Recurring Revenue", + ); + }); + + test("metadata excludes other metrics in the same bundle", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + const meta = result.current.metadata; + expect(meta).not.toBeNull(); + // The metadata is the revenue entry only — `other_metric` is not nested in. + expect(Object.keys(meta?.measures ?? {})).toEqual(["arr", "mrr"]); + // No "other_metric" key leaks through. + expect( + (meta as unknown as Record).other_metric, + ).toBeUndefined(); + }); + + test("metadata is available immediately on first render (before data resolves)", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + // PRD contract: metadata is build-time-bundled, not fetched, so it's + // available even when the data is still loading. + expect(result.current.data).toBeNull(); + expect(result.current.loading).toBe(true); + expect(result.current.metadata).not.toBeNull(); + }); + + test("metadata is stable across re-renders for the same metric key", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result, rerender } = renderHook( + ({ measures }) => + useMetricView("revenue", { measures } as { measures: ["arr" | "mrr"] }), + { + initialProps: { measures: ["arr"] as ["arr" | "mrr"] }, + }, + ); + + const firstRef = result.current.metadata; + rerender({ measures: ["mrr"] }); + const secondRef = result.current.metadata; + rerender({ measures: ["arr"] }); + const thirdRef = result.current.metadata; + + // Same metric key → same metadata reference across re-renders, regardless + // of how `args` changes. + expect(firstRef).toBe(secondRef); + expect(firstRef).toBe(thirdRef); + }); + + test("metadata changes when the metric key changes", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + // The cast escapes the cross-file MetricRegistry augmentation that the + // sibling type-tests file declares — those augmentations leak into the + // global type universe of the test project, but we want this hook test to + // exercise the runtime metadata-resolution logic with synthetic keys. + const { result, rerender } = renderHook( + ({ key }: { key: string }) => + useMetricView(key as never, { measures: ["count"] } as never), + { + initialProps: { key: "revenue" }, + }, + ); + + // The `key as never` cast above narrows the metadata return type to + // `never`; assert on the runtime shape with a structural cast. + const revenueMetadata = result.current.metadata as unknown as { + measures: Record; + } | null; + rerender({ key: "other_metric" }); + const otherMetadata = result.current.metadata as unknown as { + measures: Record; + } | null; + + expect(revenueMetadata).not.toBe(otherMetadata); + expect(revenueMetadata?.measures).toHaveProperty("arr"); + expect(otherMetadata?.measures).toHaveProperty("count"); + }); + + test("metadata is null when the metric key is not in the registered bundle", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result } = renderHook(() => + // Deliberate test of runtime fallback when the metric key is missing + // from the registered bundle. The cast escapes the augmented-registry + // type narrowing — the runtime semantics are what matter here. + useMetricView("not_in_bundle" as never, { measures: ["x"] } as never), + ); + expect(result.current.metadata).toBeNull(); + }); + + test("metadata exposes time_grain on time-typed dimensions", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + // The cross-file MetricRegistry augmentation narrows the dimensions + // shape, so we read it back as the structural metadata type to inspect + // runtime values. + const dims = (result.current.metadata?.dimensions ?? {}) as Record< + string, + { time_grain?: readonly string[] } + >; + expect(dims.created_at?.time_grain).toEqual(["day", "week", "month"]); + expect(dims.region?.time_grain).toBeUndefined(); + }); + + test("metadata reference is stable when bundle is re-registered with the same metric key (PRD's stable-not-reactive contract)", () => { + registerMetricsMetadata(REVENUE_BUNDLE); + const { result, rerender } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + const firstRef = result.current.metadata; + expect(firstRef).not.toBeNull(); + + // Re-register a new bundle with the same key but different data. The + // hook is intentionally NOT reactive to bundle changes — the PRD says + // metadata is build-time-frozen and stable for the lifetime of a metric + // key. Re-registration during a session is a dev-mode hot-reload signal + // that requires a remount to pick up; mid-render swaps would break the + // "stable across re-renders" contract that downstream memoization + // depends on. + const newBundle: MetricsMetadataBundle = { + revenue: { + measures: { arr: { type: "DECIMAL", format: "0.00" } }, + dimensions: {}, + }, + }; + registerMetricsMetadata(newBundle); + rerender(); + const refAfterRegister = result.current.metadata; + // Same reference — useMemo keys on metricKey, so within a single mount + // the hook returns the originally-resolved metadata until a remount. + expect(refAfterRegister).toBe(firstRef); + }); +}); diff --git a/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view-types.test.ts b/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view-types.test.ts new file mode 100644 index 000000000..30271c44d --- /dev/null +++ b/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view-types.test.ts @@ -0,0 +1,293 @@ +import { describe, expectTypeOf, test } from "vitest"; +import type { + DimensionKey, + Filter, + MeasureKey, + MetricFilterOperator, + MetricMetadata, + MetricSemanticMetadata, + Predicate, + TimeGrain, + UseMetricViewArgs, + UseMetricViewResult, + UseMetricViewRow, +} from "../types"; + +/** + * Compile-time type tests for useMetricView's narrowing behaviour. + * + * These tests use `expectTypeOf` and never invoke the hook at runtime — they + * verify that the registry-derived helpers (`MeasureKey`, `DimensionKey`, + * `TimeGrain`, `UseMetricViewRow`) compose correctly when the registry is + * augmented. + * + * The MetricRegistry interface is augmented locally inside this file via + * module declaration. The augmentation only affects the type universe of + * this test file; production code is untouched. + */ + +declare module "../types" { + interface MetricRegistry { + revenue: { + key: "revenue"; + source: "appkit_demo.public.revenue_metrics"; + lane: "sp"; + measures: { arr: number; mrr: number }; + dimensions: { region: string; segment: string; created_at: string }; + measureKeys: "arr" | "mrr"; + dimensionKeys: "region" | "segment" | "created_at"; + timeGrains: "day" | "month" | "week"; + metadata: { + measures: { + arr: { + type: "DECIMAL(38,2)"; + display_name: "Annual Recurring Revenue"; + format: "$#,##0.00"; + }; + mrr: { type: "DECIMAL(38,2)" }; + }; + dimensions: { + region: { type: "STRING" }; + segment: { type: "STRING" }; + created_at: { + type: "TIMESTAMP"; + time_grain: readonly ["day", "month", "week"]; + }; + }; + }; + }; + flat_metric: { + key: "flat_metric"; + source: "demo.public.flat"; + lane: "sp"; + measures: { count: number }; + dimensions: Record; + measureKeys: "count"; + dimensionKeys: never; + timeGrains: never; + metadata: { + measures: { count: { type: "BIGINT" } }; + dimensions: Record; + }; + }; + } +} + +describe("MeasureKey / DimensionKey / TimeGrain", () => { + test("MeasureKey narrows to the registry's declared measure union", () => { + expectTypeOf>().toEqualTypeOf<"arr" | "mrr">(); + }); + + test("DimensionKey narrows to the registry's declared dimension union", () => { + expectTypeOf>().toEqualTypeOf< + "region" | "segment" | "created_at" + >(); + }); + + test("TimeGrain narrows to the union of YAML-allowed grains", () => { + expectTypeOf>().toEqualTypeOf< + "day" | "month" | "week" + >(); + }); + + test("DimensionKey is `never` when the registry declares no dimensions", () => { + expectTypeOf>().toEqualTypeOf(); + }); + + test("TimeGrain is `never` when the registry declares no time-typed dims", () => { + expectTypeOf>().toEqualTypeOf(); + }); + + test("TimeGrain falls back to `string` for unregistered keys", () => { + type DynamicGrain = TimeGrain; + expectTypeOf().toEqualTypeOf(); + }); +}); + +describe("UseMetricViewArgs — call-site narrowing", () => { + test("measures + dimensions tuples preserve literal types under `const` modifiers", () => { + type Args = UseMetricViewArgs< + "revenue", + readonly ["arr"], + readonly ["region"] + >; + expectTypeOf().toEqualTypeOf(); + expectTypeOf().toEqualTypeOf< + readonly ["region"] | undefined + >(); + }); + + test("timeGrain is constrained to TimeGrain when provided", () => { + type Args = UseMetricViewArgs< + "revenue", + readonly ["arr"], + readonly ["created_at"] + >; + expectTypeOf().toEqualTypeOf< + "day" | "month" | "week" | undefined + >(); + }); +}); + +describe("UseMetricViewRow — row narrowing via Pick", () => { + test("measures-only call narrows the row to just the chosen measures", () => { + type Row = UseMetricViewRow<"revenue", readonly ["arr"], readonly []>; + expectTypeOf().toEqualTypeOf<{ arr: number }>(); + }); + + test("measures + one dimension narrows to the union of both", () => { + type Row = UseMetricViewRow< + "revenue", + readonly ["arr"], + readonly ["region"] + >; + expectTypeOf().toEqualTypeOf<{ arr: number; region: string }>(); + }); + + test("multiple measures + multiple dimensions composes correctly", () => { + type Row = UseMetricViewRow< + "revenue", + readonly ["arr", "mrr"], + readonly ["region", "created_at"] + >; + expectTypeOf().toEqualTypeOf<{ + arr: number; + mrr: number; + region: string; + created_at: string; + }>(); + }); + + test("dimensions-only call narrows the row to just the dimensions", () => { + type Row = UseMetricViewRow<"revenue", readonly [], readonly ["segment"]>; + expectTypeOf().toEqualTypeOf<{ segment: string }>(); + }); +}); + +describe("Filter / Predicate — recursive shape and registry narrowing", () => { + test("Predicate.member narrows to DimensionKey", () => { + type RevenueMember = Predicate<"revenue">["member"]; + expectTypeOf().toEqualTypeOf< + "region" | "segment" | "created_at" + >(); + }); + + test("Predicate.operator narrows to MetricFilterOperator (12 v1 ops)", () => { + type Op = Predicate<"revenue">["operator"]; + expectTypeOf().toEqualTypeOf(); + }); + + test("MetricFilterOperator union has exactly 12 members", () => { + type Op = MetricFilterOperator; + // exactness guard: assignability both ways + expectTypeOf().toEqualTypeOf< + | "equals" + | "notEquals" + | "in" + | "notIn" + | "gt" + | "gte" + | "lt" + | "lte" + | "contains" + | "notContains" + | "set" + | "notSet" + >(); + }); + + test("Filter accepts a leaf Predicate", () => { + const leaf: Filter<"revenue"> = { + member: "region", + operator: "equals", + values: ["EMEA"], + }; + expectTypeOf(leaf).toMatchTypeOf>(); + }); + + test("Filter accepts an { and: Filter[] } group (recursive)", () => { + const grouped: Filter<"revenue"> = { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }; + expectTypeOf(grouped).toMatchTypeOf>(); + }); + + test("Filter accepts an { or: Filter[] } group with nested AND (recursive)", () => { + const nested: Filter<"revenue"> = { + or: [ + { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }, + { member: "region", operator: "equals", values: ["APAC"] }, + ], + }; + expectTypeOf(nested).toMatchTypeOf>(); + }); + + test("UseMetricViewArgs accepts an optional filter narrowing to DimensionKey", () => { + type Args = UseMetricViewArgs< + "revenue", + readonly ["arr"], + readonly ["region"] + >; + expectTypeOf().toEqualTypeOf< + Filter<"revenue"> | undefined + >(); + }); + + test("Predicate.member is `never` when the registry declares no dimensions", () => { + type Member = Predicate<"flat_metric">["member"]; + expectTypeOf().toEqualTypeOf(); + }); +}); + +// ── Phase 5: MetricMetadata narrows per-metric, hook return shape carries metadata ── +describe("MetricMetadata — Phase 5 metadata narrowing", () => { + test("MetricMetadata narrows to the registry's metadata shape for registered keys", () => { + type Meta = MetricMetadata<"revenue">; + expectTypeOf< + Meta["measures"]["arr"]["format"] + >().toEqualTypeOf<"$#,##0.00">(); + expectTypeOf< + Meta["measures"]["arr"]["display_name"] + >().toEqualTypeOf<"Annual Recurring Revenue">(); + }); + + test("MetricMetadata exposes time_grain literal tuple on time-typed dims", () => { + type Meta = MetricMetadata<"revenue">; + expectTypeOf< + Meta["dimensions"]["created_at"]["time_grain"] + >().toEqualTypeOf(); + }); + + test("MetricMetadata's measures only contain the metric's own keys (not other metrics')", () => { + type Meta = MetricMetadata<"revenue">; + type MeasureKeys = keyof Meta["measures"]; + expectTypeOf().toEqualTypeOf<"arr" | "mrr">(); + + type FlatMeta = MetricMetadata<"flat_metric">; + type FlatKeys = keyof FlatMeta["measures"]; + expectTypeOf().toEqualTypeOf<"count">(); + }); + + test("MetricMetadata falls back to the structural shape for unregistered keys", () => { + type Meta = MetricMetadata; + expectTypeOf().toEqualTypeOf(); + }); + + test("UseMetricViewResult carries metadata typed per K", () => { + type Result = UseMetricViewResult< + { arr: number }, + MetricMetadata<"revenue"> + >; + type MetaField = Result["metadata"]; + // metadata is the metric's literal-typed metadata or null. + expectTypeOf().toEqualTypeOf | null>(); + }); +}); diff --git a/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view.test.ts b/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view.test.ts new file mode 100644 index 000000000..37c2412b5 --- /dev/null +++ b/packages/appkit-ui/src/react/hooks/__tests__/use-metric-view.test.ts @@ -0,0 +1,464 @@ +import { act, renderHook, waitFor } from "@testing-library/react"; +import { afterEach, describe, expect, test, vi } from "vitest"; +import { clearMetricsMetadata } from "@/format"; + +// Mock connectSSE — capture callbacks so we can simulate SSE events. +let capturedCallbacks: { + onMessage?: (msg: { data: string }) => void; + onError?: (err: Error) => void; + signal?: AbortSignal; +} = {}; + +const mockConnectSSE = vi.fn().mockImplementation((opts: any) => { + capturedCallbacks = { + onMessage: opts.onMessage, + onError: opts.onError, + signal: opts.signal, + }; + return new Promise((resolve) => { + setTimeout(resolve, 0); + }); +}); + +vi.mock("@/js", () => ({ + connectSSE: (...args: unknown[]) => mockConnectSSE(...args), +})); + +import { useMetricView } from "../use-metric-view"; + +describe("useMetricView", () => { + afterEach(() => { + capturedCallbacks = {}; + vi.clearAllMocks(); + clearMetricsMetadata(); + }); + + test("initial state is loading=true with autoStart (default)", () => { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + expect(result.current.data).toBeNull(); + // autoStart triggers connect synchronously inside useEffect, so + // loading flips to true before the test inspects state. + expect(result.current.loading).toBe(true); + expect(result.current.error).toBeNull(); + // Phase 5: metadata is null when no bundle has been registered. + expect(result.current.metadata).toBeNull(); + }); + + test("connects to /api/analytics/metric/ with the request payload", () => { + renderHook(() => useMetricView("revenue", { measures: ["arr"] })); + + expect(mockConnectSSE).toHaveBeenCalledWith( + expect.objectContaining({ + url: "/api/analytics/metric/revenue", + payload: JSON.stringify({ + measures: ["arr"], + format: "JSON", + }), + }), + ); + }); + + test("includes limit in the payload when provided", () => { + renderHook(() => + useMetricView("revenue", { measures: ["arr"], limit: 10 }), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload).toEqual({ + measures: ["arr"], + limit: 10, + format: "JSON", + }); + }); + + // ── Phase 2: dimensions + timeGrain payload assembly ──────────────────── + test("includes dimensions in the payload when non-empty", () => { + renderHook(() => + useMetricView("revenue", { + measures: ["arr"], + dimensions: ["region", "segment"], + }), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload).toEqual({ + measures: ["arr"], + dimensions: ["region", "segment"], + format: "JSON", + }); + }); + + test("omits dimensions from the payload when empty (ungrouped query)", () => { + renderHook(() => + useMetricView("revenue", { measures: ["arr"], dimensions: [] }), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload).toEqual({ + measures: ["arr"], + format: "JSON", + }); + expect(payload.dimensions).toBeUndefined(); + }); + + test("includes timeGrain in the payload when provided", () => { + renderHook(() => + useMetricView("revenue", { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "month", + }), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload).toEqual({ + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "month", + format: "JSON", + }); + }); + + test("combines dimensions, timeGrain, and limit in the payload", () => { + renderHook(() => + useMetricView("revenue", { + measures: ["arr", "mrr"], + dimensions: ["created_at"], + timeGrain: "week", + limit: 50, + }), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload).toEqual({ + measures: ["arr", "mrr"], + dimensions: ["created_at"], + timeGrain: "week", + limit: 50, + format: "JSON", + }); + }); + + // ── Phase 3: filter payload assembly ───────────────────────────────────── + test("includes a leaf Predicate filter in the payload", () => { + renderHook(() => + useMetricView("revenue", { + measures: ["arr"], + dimensions: ["region"], + filter: { + member: "region", + operator: "equals", + values: ["EMEA"], + }, + } as any), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload.filter).toEqual({ + member: "region", + operator: "equals", + values: ["EMEA"], + }); + }); + + test("preserves recursive { and: [...] } filter structure verbatim", () => { + const filter = { + and: [ + { member: "region", operator: "in", values: ["EMEA", "APAC"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }; + renderHook(() => + useMetricView("revenue", { + measures: ["arr"], + filter, + } as any), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload.filter).toEqual(filter); + }); + + test("preserves deeply-nested OR-of-AND structure", () => { + const filter = { + or: [ + { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }, + { member: "region", operator: "equals", values: ["APAC"] }, + ], + }; + renderHook(() => + useMetricView("revenue", { + measures: ["arr"], + filter, + } as any), + ); + + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload.filter).toEqual(filter); + }); + + test("omits filter from the payload when not provided", () => { + renderHook(() => useMetricView("revenue", { measures: ["arr"] })); + const payload = JSON.parse( + (mockConnectSSE.mock.calls[0][0] as any).payload, + ); + expect(payload.filter).toBeUndefined(); + }); + + test("populates data on a result event", async () => { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + act(() => { + capturedCallbacks.onMessage?.({ + data: JSON.stringify({ + type: "result", + data: [{ arr: 1234567 }], + }), + }); + }); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + }); + expect(result.current.data).toEqual([{ arr: 1234567 }]); + expect(result.current.error).toBeNull(); + }); + + test("sets error on a server error event", async () => { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + act(() => { + capturedCallbacks.onMessage?.({ + data: JSON.stringify({ + type: "error", + error: "Bad measures", + code: "VALIDATION_ERROR", + }), + }); + }); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + }); + expect(result.current.error).toBe("Bad measures"); + expect(result.current.data).toBeNull(); + }); + + test("surfaces a network failure via onError", async () => { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + act(() => { + capturedCallbacks.onError?.(new Error("Failed to fetch")); + }); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + }); + expect(result.current.error).toMatch(/Network error/); + }); + + test("in dev, surfaces the actual error message via onError", async () => { + // Vitest sets import.meta.env.DEV = true by default, mirroring Vite dev. + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + act(() => { + capturedCallbacks.onError?.( + new Error( + "[TABLE_OR_VIEW_NOT_FOUND] appkit_demo.public.revenue_metrics", + ), + ); + }); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + }); + expect(result.current.error).toBe( + "[TABLE_OR_VIEW_NOT_FOUND] appkit_demo.public.revenue_metrics", + ); + }); + + test("in prod, falls back to the generic message via onError", async () => { + vi.stubEnv("DEV", false); + try { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + act(() => { + capturedCallbacks.onError?.( + new Error("[TABLE_OR_VIEW_NOT_FOUND] schema.foo.bar"), + ); + }); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + }); + expect(result.current.error).toBe( + "Unable to load data, please try again", + ); + } finally { + vi.unstubAllEnvs(); + } + }); + + test("does NOT auto-start when autoStart=false", () => { + renderHook(() => + useMetricView("revenue", { measures: ["arr"] }, { autoStart: false }), + ); + expect(mockConnectSSE).not.toHaveBeenCalled(); + }); + + test("aborts the in-flight request on unmount", () => { + const { unmount } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + expect(capturedCallbacks.signal?.aborted).toBe(false); + unmount(); + expect(capturedCallbacks.signal?.aborted).toBe(true); + }); + + test("ignores late SSE events arriving after the controller was aborted", async () => { + // Regression: under React StrictMode the first mount's cleanup aborts + // the controller it owns, but the server-side SSE writer can still + // emit a final `event: error` envelope on the already-open stream + // (cancellation hand-off). Without an early `aborted` guard in + // onMessage, that envelope hit the error branch and surfaced a + // transient user-visible error before the second mount's data arrived. + // The fix mirrors the guard already present at the top of `onError`. + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }), + ); + + // Simulate the abort that StrictMode's cleanup phase performs. + const sig = capturedCallbacks.signal as + | (AbortSignal & { _override?: boolean }) + | undefined; + Object.defineProperty(sig, "aborted", { value: true, configurable: true }); + + // Now deliver a late error event for that aborted stream. + act(() => { + capturedCallbacks.onMessage?.({ + data: JSON.stringify({ + type: "error", + error: "Statement was canceled", + code: "EXECUTION_ERROR", + }), + }); + }); + + expect(result.current.error).toBeNull(); + expect(result.current.loading).toBe(true); + }); + + test("does not refetch when re-rendered with structurally identical inline args", () => { + const { rerender } = renderHook( + ({ args }: { args: any }) => useMetricView("revenue", args), + { + initialProps: { + args: { + measures: ["arr"], + dimensions: ["region"], + filter: { member: "region", operator: "in", values: ["EMEA"] }, + }, + }, + }, + ); + expect(mockConnectSSE).toHaveBeenCalledTimes(1); + + // Fresh `args` reference with identical content — simulates a consumer + // that forgot to wrap args in useMemo. + rerender({ + args: { + measures: ["arr"], + dimensions: ["region"], + filter: { member: "region", operator: "in", values: ["EMEA"] }, + }, + }); + rerender({ + args: { + measures: ["arr"], + dimensions: ["region"], + filter: { member: "region", operator: "in", values: ["EMEA"] }, + }, + }); + + expect(mockConnectSSE).toHaveBeenCalledTimes(1); + }); + + test("refetches when args content actually changes", () => { + const { rerender } = renderHook( + ({ args }: { args: any }) => useMetricView("revenue", args), + { + initialProps: { args: { measures: ["arr"] } }, + }, + ); + expect(mockConnectSSE).toHaveBeenCalledTimes(1); + + rerender({ args: { measures: ["mrr"] } }); + expect(mockConnectSSE).toHaveBeenCalledTimes(2); + }); + + test("rejects an empty metric key", () => { + expect(() => + // Cast to any so the runtime guard ("non-empty string") is what fails, + // not the compile-time MetricKey union (which is augmented in the + // sibling type-tests file). + renderHook(() => useMetricView("" as any, { measures: ["arr"] } as any)), + ).toThrowError(/non-empty string/); + }); + + test("rejects ARROW format with a clear error (out of v1 scope)", async () => { + const { result } = renderHook(() => + useMetricView("revenue", { measures: ["arr"] }, { + format: "ARROW", + } as any), + ); + + act(() => { + capturedCallbacks.onMessage?.({ + data: JSON.stringify({ + type: "arrow", + statement_id: "s-1", + }), + }); + }); + + await waitFor(() => { + expect(result.current.loading).toBe(false); + }); + expect(result.current.error).toMatch(/ARROW format is not supported/); + }); +}); diff --git a/packages/appkit-ui/src/react/hooks/index.ts b/packages/appkit-ui/src/react/hooks/index.ts index a425b0109..7e586855b 100644 --- a/packages/appkit-ui/src/react/hooks/index.ts +++ b/packages/appkit-ui/src/react/hooks/index.ts @@ -1,17 +1,33 @@ export type { AnalyticsFormat, + DimensionKey, + Filter, InferResultByFormat, InferRowType, InferServingChunk, InferServingRequest, InferServingResponse, + MeasureKey, + MetricColumnMetadata, + MetricFilterOperator, + MetricKey, + MetricMetadata, + MetricRegistry, + MetricRow, + MetricSemanticMetadata, PluginRegistry, + Predicate, QueryRegistry, ServingAlias, ServingEndpointRegistry, + TimeGrain, TypedArrowTable, UseAnalyticsQueryOptions, UseAnalyticsQueryResult, + UseMetricViewArgs, + UseMetricViewOptions, + UseMetricViewResult, + UseMetricViewRow, } from "./types"; export { useAnalyticsQuery } from "./use-analytics-query"; export { @@ -19,6 +35,7 @@ export { type UseChartDataResult, useChartData, } from "./use-chart-data"; +export { useMetricView } from "./use-metric-view"; export { usePluginClientConfig } from "./use-plugin-config"; export { type UseServingInvokeOptions, diff --git a/packages/appkit-ui/src/react/hooks/types.ts b/packages/appkit-ui/src/react/hooks/types.ts index 03e943e2a..2da830587 100644 --- a/packages/appkit-ui/src/react/hooks/types.ts +++ b/packages/appkit-ui/src/react/hooks/types.ts @@ -140,6 +140,354 @@ export interface ServingClientConfig { aliases: string[]; } +// ============================================================================ +// Metric View Registry (Phase 2 — measures + dimensions + time grain) +// ============================================================================ + +/** + * Metric View Registry — populated via TypeScript module augmentation by the + * AppKit type-generator (parallel to {@link QueryRegistry}). + * + * Each registered metric key contributes an entry whose shape carries the + * FQN, lane, the structured measure / dimension lists harvested from the + * build-time DESCRIBE TABLE EXTENDED ... AS JSON call, and (Phase 5) the + * per-column semantic metadata bundle (display name, format spec, + * description, time-grain options). + * + * @example + * ```ts + * declare module "@databricks/appkit-ui/react" { + * interface MetricRegistry { + * revenue: { + * key: "revenue"; + * source: "appkit_demo.public.revenue_metrics"; + * lane: "sp"; + * measures: { arr: number; mrr: number }; + * dimensions: { region: string; created_at: string }; + * measureKeys: "arr" | "mrr"; + * dimensionKeys: "region" | "created_at"; + * timeGrains: "day" | "week" | "month"; + * metadata: { + * measures: { + * arr: { type: "DECIMAL(38,2)"; display_name: "Annual Recurring Revenue"; format: "$#,##0.00" }; + * mrr: { type: "DECIMAL(38,2)" }; + * }; + * dimensions: { + * region: { type: "STRING" }; + * created_at: { type: "TIMESTAMP"; time_grain: readonly ["day", "week", "month"] }; + * }; + * }; + * }; + * } + * } + * ``` + */ +// biome-ignore lint/suspicious/noEmptyInterface: intentionally empty — populated via module augmentation +export interface MetricRegistry {} + +/** Resolves to MetricRegistry keys if any are populated, otherwise string. */ +export type MetricKey = AugmentedRegistry extends never + ? string + : AugmentedRegistry; + +/** The union of declared measure names for a registered metric key. */ +export type MeasureKey = K extends AugmentedRegistry + ? MetricRegistry[K] extends { measureKeys: infer M } + ? M extends string + ? M + : string + : string + : string; + +/** The union of declared dimension names for a registered metric key. */ +export type DimensionKey = K extends AugmentedRegistry + ? MetricRegistry[K] extends { dimensionKeys: infer D } + ? D extends string + ? D + : never + : never + : never; + +/** + * The union of allowed time-grains for a registered metric key — derived from + * the YAML 1.1 `time_grain` attributes on time-typed dimensions. Resolves to + * `string` for unregistered keys (so dynamic callers don't compile-error) and + * to `never` for registered metrics that have zero time-typed dimensions. + */ +export type TimeGrain = K extends AugmentedRegistry + ? MetricRegistry[K] extends { timeGrains: infer G } + ? G extends string + ? G + : never + : never + : string; + +/** The "measures" entry on a registered metric — a record of name → row type. */ +type MetricMeasureMap = K extends AugmentedRegistry + ? MetricRegistry[K] extends { measures: infer M } + ? M extends Record + ? M + : Record + : Record + : Record; + +/** The "dimensions" entry on a registered metric — a record of name → row type. */ +type MetricDimensionMap = K extends AugmentedRegistry + ? MetricRegistry[K] extends { dimensions: infer D } + ? D extends Record + ? D + : Record + : Record + : Record; + +/** Full result row type for a registered metric (measures + dimensions). */ +export type MetricRow = MetricMeasureMap & MetricDimensionMap; + +// ============================================================================ +// Metric View Semantic Metadata (Phase 5 — display names, format specs, ...) +// ============================================================================ + +/** + * The per-column semantic-metadata shape exposed via `useMetricView`'s + * `metadata` return field and `formatLabel` / `formatValue`'s second argument. + * + * Mirrors the `metrics.metadata.json` build-time artifact one-for-one. Every + * field except `type` is optional — the YAML 1.1 metric view spec marks them + * as opt-in, so the consumer's chart code defends against absence (e.g. + * `formatLabel` falls back to camelCase humanization when `display_name` is + * absent). + */ +export interface MetricColumnMetadata { + /** Databricks SQL type ("STRING", "DECIMAL(38,2)", "TIMESTAMP", ...). */ + type: string; + /** YAML 1.1 `display_name` — used by `formatLabel` as the canonical title. */ + display_name?: string; + /** + * YAML 1.1 `format` — printf-style spec (`"$#,##0.00"`, `"0.0%"`, etc). + * Consumed by `formatValue` (returns formatted string) and `toD3Format` + * (returns d3-format-compatible string for Plotly's `tickformat` / + * ECharts' `valueFormatter`). + */ + format?: string; + /** Column-level documentation, surfaced in tooltips by chart components. */ + description?: string; + /** + * Allowed time-grains (only present on time-typed dimensions). Phase 2 + * widening: lets the call-site narrow `timeGrain` to the dim's allowed list. + */ + time_grain?: readonly string[]; +} + +/** + * One metric's complete semantic-metadata bundle. + * + * Mirrors the entry shape inside `metrics.metadata.json`. Returned verbatim + * by `useMetricView` in its `metadata` field — TypeScript narrows + * `metadata.measures.` and `metadata.dimensions.` from the + * registry's per-metric `metadata` augmentation when `K` is a registered key. + * + * Server-side concerns (UC FQN, execution lane) are deliberately NOT part of + * this shape — they live in `metric.json` and never reach the client bundle. + */ +export interface MetricSemanticMetadata { + measures: Record; + dimensions: Record; +} + +/** + * Type-narrowed metadata for a registered metric `K`. + * + * When `K` is a registered key, resolves to the registry's `metadata` shape + * (per-column literal-typed `display_name` / `format` / `time_grain`). When + * `K` is `string` (no augmentation), resolves to the structural + * {@link MetricSemanticMetadata}. + * + * Consumers usually destructure: `metadata.measures.arr.format`, + * `metadata.dimensions.created_at.time_grain`, etc. + */ +export type MetricMetadata = K extends AugmentedRegistry + ? MetricRegistry[K] extends { metadata: infer Meta } + ? Meta + : MetricSemanticMetadata + : MetricSemanticMetadata; + +// ============================================================================ +// Filter Specification (Phase 3 — recursive AND/OR with 12 v1 operators) +// ============================================================================ + +/** + * The v1 filter operator vocabulary. Twelve operators, exactly: + * + * - Equality: `equals`, `notEquals` + * - Set membership: `in`, `notIn` + * - Range: `gt`, `gte`, `lt`, `lte` + * - String search: `contains`, `notContains` + * - NULL checks: `set`, `notSet` + * + * Operator-vs-type rules (enforced server-side): + * - Range ops (`gt`, `gte`, `lt`, `lte`) require a numeric / date-typed dim. + * - String ops (`contains`, `notContains`) require a string-typed dim. + * - The remaining six accept any dimension type. + * + * Cardinality rules (enforced server-side): + * - Single-value ops (`equals`, `notEquals`, `gt`, `gte`, `lt`, `lte`, + * `contains`, `notContains`) require exactly one value. + * - List ops (`in`, `notIn`) require at least one value. + * - NULL ops (`set`, `notSet`) reject `values` entirely. + */ +export type MetricFilterOperator = + | "equals" + | "notEquals" + | "in" + | "notIn" + | "gt" + | "gte" + | "lt" + | "lte" + | "contains" + | "notContains" + | "set" + | "notSet"; + +/** + * A single filter predicate — leaf node of the recursive {@link Filter} tree. + * + * `member` narrows to the union of dimension names declared on the metric + * view (HAVING — filtering on measures — is reserved for v1.5). + * + * `values` is optional; the validator rejects requests where `values` is + * present for `set`/`notSet` and absent for every other operator. + */ +export interface Predicate { + member: DimensionKey; + operator: MetricFilterOperator; + values?: ReadonlyArray; +} + +/** + * The recursive filter type for metric views. + * + * A `Filter` is one of: + * - a leaf {@link Predicate} + * - an `{ and: Filter[] }` group — every child predicate must match + * - an `{ or: Filter[] }` group — at least one child predicate must match + * + * The shape supports nesting from v1; flat consumers can pass an array of + * predicates either via `{ and: [...] }` (explicit AND) or — since the wire + * shape carries the full union — by composing a single-level `{ and }` + * wrapper on the client. + * + * Server-side, recursion is depth-capped so a malformed or hostile payload + * cannot stack-overflow the validator. + */ +export type Filter = + | Predicate + | { and: ReadonlyArray> } + | { or: ReadonlyArray> }; + +/** + * Phase 2 args: measures + dimensions + optional time grain. + * + * Generics: + * - `K` — the metric key (narrows to the registry literal at the call site). + * - `M` — the chosen measure tuple (narrows to the literal subset). + * - `D` — the chosen dimension tuple (narrows to the literal subset). + * + * Use `const` modifiers on `M` and `D` at the call site for literal-preserving + * inference (matches the Phase 1 measures-only pattern): + * + * ```tsx + * useMetricView("revenue", { + * measures: ["arr"] as const, + * dimensions: ["region", "created_at"] as const, + * timeGrain: "month", + * }); + * ``` + */ +export interface UseMetricViewArgs< + K extends MetricKey, + M extends ReadonlyArray> = ReadonlyArray>, + D extends ReadonlyArray> = ReadonlyArray>, +> { + measures: M; + /** + * Dimensions to GROUP BY. Empty (or omitted) → ungrouped query. Only + * dimensions declared on the metric view are accepted. + */ + dimensions?: D; + /** + * Time-grain truncation applied to every time-typed dimension in + * `dimensions`. Narrows to the union of grains the metric view declares. + * + * If the metric view has no time-typed dimensions, this field cannot be set + * (the type resolves to `never`). + * + * Setting `timeGrain` without including any time-typed dimension in + * `dimensions` is a server-side 400. + */ + timeGrain?: TimeGrain; + /** + * Optional structured filter — recursive AND/OR composition of predicates. + * + * `member` narrows to the metric's declared dimension names (the IDE + * catches typos at the call site). `operator` narrows to the 12 v1 + * operators. All `values` are bound as parameters server-side; nothing + * from the request body flows into the rendered SQL string. + * + * The filter shape is recursive from day one — flat callers can wrap a + * predicate list in `{ and: [...] }`; nested callers can mix `and`/`or` + * groups freely. The server enforces a depth cap to prevent stack abuse. + */ + filter?: Filter; + /** Optional row cap. */ + limit?: number; +} + +/** + * Row narrowing helper: produce the row type containing only the chosen + * measures and dimensions, matching what the server projects. + * + * If callers omit dimensions, the row contains only measures; if callers omit + * measures (not allowed at v1, but the type stays sound), the row contains + * only dimensions. + */ +export type UseMetricViewRow< + K extends MetricKey, + M extends ReadonlyArray>, + D extends ReadonlyArray>, +> = Pick, (M[number] | D[number]) & keyof MetricRow>; + +/** Phase 2 options: format passthrough + autoStart toggle. */ +export interface UseMetricViewOptions { + format?: F; + /** Whether to fire the request automatically on mount. Default: true. */ + autoStart?: boolean; + /** Maximum size of the serialized request body in bytes. Default: 100 KiB. */ + maxParametersSize?: number; +} + +/** + * Phase 5 result shape: `{ data, metadata, loading, error }`. + * + * `metadata` is the build-time-bundled semantic metadata for the queried + * metric (measures + dimensions only — not other metrics in the registry). + * It is available **before** the data loads (it comes from the build-time + * artifact, not the network) and is stable across re-renders for the same + * metric key (the runtime registry returns the same object reference). + * + * When the consuming app has not registered the metadata bundle (via + * `registerMetricsMetadata` in `@databricks/appkit-ui/format`), `metadata` + * resolves to `null`. The PRD's contract is "available even when data is + * null" — but the bundle itself is opt-in, so a `null` here is the + * unregistered-app signal. + */ +export interface UseMetricViewResult { + data: TRow[] | null; + metadata: TMetadata | null; + loading: boolean; + error: string | null; +} + // ============================================================================ // Serving Endpoint Registry // ============================================================================ diff --git a/packages/appkit-ui/src/react/hooks/use-metric-view.ts b/packages/appkit-ui/src/react/hooks/use-metric-view.ts new file mode 100644 index 000000000..039e23ae9 --- /dev/null +++ b/packages/appkit-ui/src/react/hooks/use-metric-view.ts @@ -0,0 +1,265 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { getMetricMetadata } from "@/format"; +import { connectSSE } from "@/js"; +import type { + AnalyticsFormat, + DimensionKey, + MeasureKey, + MetricKey, + MetricMetadata, + UseMetricViewArgs, + UseMetricViewOptions, + UseMetricViewResult, + UseMetricViewRow, +} from "./types"; + +/** + * Module-level singleton — `new TextEncoder()` is cheap but constructing + * one per byte-count call is still wasted allocation. The encoder is + * stateless, so a single shared instance is safe. + */ +const TEXT_ENCODER = new TextEncoder(); + +/** + * Subscribe to a metric-view query over SSE. + * + * Phase 5 surface — accepts `{ measures, dimensions?, timeGrain?, filter?, limit? }`. + * The result row type narrows at the call site to + * `Pick, M[number] | D[number]>` based on the chosen measures + * and dimensions, so chart code receives the exact shape it asked for. + * + * Returns `{ data, metadata, loading, error }`. The `metadata` field carries + * the build-time-bundled semantic metadata for the queried metric (display + * names, format specs, descriptions). `metadata` is available **before** the + * data loads and is stable across re-renders for the same metric key. + * + * Use `as const` on the `measures` and `dimensions` arrays at the call site + * to preserve literal types (the same pattern used elsewhere in AppKit for + * registry-narrowed APIs). + * + * @example + * ```tsx + * const { data, metadata, loading, error } = useMetricView("revenue", { + * measures: ["arr"] as const, + * dimensions: ["region", "created_at"] as const, + * timeGrain: "month", + * }); + * // data: Array<{ arr: number; region: string; created_at: string }> | null + * // metadata.measures.arr.format → "$#,##0.00" + * // metadata.measures.arr.display_name → "Annual Recurring Revenue" + * ``` + */ +export function useMetricView< + K extends MetricKey = MetricKey, + const M extends ReadonlyArray> = ReadonlyArray>, + const D extends ReadonlyArray> = ReadonlyArray< + DimensionKey + >, + F extends AnalyticsFormat = "JSON", +>( + metricKey: K, + args: UseMetricViewArgs, + options: UseMetricViewOptions = {} as UseMetricViewOptions, +): UseMetricViewResult, MetricMetadata> { + if (!metricKey || metricKey.trim().length === 0) { + throw new Error("useMetricView: 'metricKey' must be a non-empty string."); + } + + const format = options.format ?? "JSON"; + const autoStart = options.autoStart ?? true; + const maxParametersSize = options.maxParametersSize ?? 100 * 1024; + + const url = `/api/analytics/metric/${encodeURIComponent(metricKey)}`; + + type ResultType = UseMetricViewRow; + const [data, setData] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const abortControllerRef = useRef(null); + + // Read the build-time semantic-metadata bundle from the format registry. + // The lookup is keyed only by `metricKey`, so the returned reference is + // stable across re-renders for the same metric (the PRD's contract: + // "metadata is stable, not reactive"). Memoizing here is also defense- + // in-depth — even if a customer hot-reloads the metadata bundle, this hook + // still returns the same object reference for the lifetime of the render + // cycle. + const metadata = useMemo( + () => getMetricMetadata(metricKey) as MetricMetadata | null, + [metricKey], + ); + + // Stable serialization key — defends against consumers passing inline + // `args` (new object every render) without `useMemo`. JSON.stringify runs + // once per render and is bounded by `maxParametersSize`; the payload memo + // (and the downstream effect) only re-fires when the request body actually + // changes by content. Without this, every render with fresh references + // would reset state and refetch, producing an infinite loop. + const argsKey = JSON.stringify(args); + + // Hold the latest `args` in a ref so the payload memo can read fresh + // values without listing each `args.*` field as a dep. The ref always + // matches the closed-over `argsKey`: when content changes, both update + // in the same render before the memo body runs. + const argsRef = useRef(args); + argsRef.current = args; + + // biome-ignore lint/correctness/useExhaustiveDependencies: argsKey is the trigger; args read via argsRef + const payload = useMemo(() => { + try { + const a = argsRef.current; + const dimensions = a.dimensions ? [...a.dimensions] : undefined; + const body: Record = { + measures: [...a.measures], + format, + }; + if (dimensions && dimensions.length > 0) { + body.dimensions = dimensions; + } + if (typeof a.timeGrain === "string" && a.timeGrain.length > 0) { + body.timeGrain = a.timeGrain; + } + if (a.filter !== undefined) { + // Filter is a recursive AND/OR/Predicate tree; preserve structure + // verbatim — the server validates and translates it into SQL. + body.filter = a.filter; + } + if (typeof a.limit === "number") { + body.limit = a.limit; + } + const serialized = JSON.stringify(body); + // Avoid the Blob allocation just to count bytes — it's a hot path + // on dashboards with many metric tiles. `TextEncoder.encode()` is + // O(n) over the serialized bytes (same big-O as Blob's internal + // encoding) but skips the Blob wrapper allocation. The encoder is + // hoisted to module scope so we don't allocate one per call either. + const sizeInBytes = TEXT_ENCODER.encode(serialized).length; + if (sizeInBytes > maxParametersSize) { + throw new Error( + "useMetricView: Request body size exceeds the maximum allowed size", + ); + } + return serialized; + } catch (err) { + console.error("useMetricView: Failed to serialize request body", err); + return null; + } + }, [argsKey, format, maxParametersSize]); + + const start = useCallback(() => { + if (payload === null) { + setError("Failed to serialize metric request body"); + return; + } + + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + } + + setLoading(true); + setError(null); + setData(null); + + const abortController = new AbortController(); + abortControllerRef.current = abortController; + + connectSSE({ + url, + payload, + signal: abortController.signal, + onMessage: async (message) => { + // Bail silently if the controller we own is already aborted. Mirrors + // the guard at the top of `onError` below. Without this, the server- + // side SSE writer's final error envelope (emitted in response to our + // own cleanup-driven abort) lands on this handler, hits the + // `parsed.type === "error"` branch, and surfaces a transient error to + // the user — visible in dev under React StrictMode's double-mount, + // hidden in prod where StrictMode is a no-op. The next mount/refetch + // creates a fresh controller and runs cleanly. + if (abortController.signal.aborted) return; + try { + const parsed = JSON.parse(message.data); + + if (parsed.type === "result") { + setLoading(false); + setData(parsed.data as ResultType[]); + return; + } + + if (parsed.type === "arrow") { + // Arrow path is wired by the analytics route but Phase 1 of + // metric views does not officially support ARROW (out-of-scope + // per the PRD). Surface the absence as a clear error so apps + // using a future ARROW path get a deterministic signal. + setLoading(false); + setError( + "useMetricView: ARROW format is not supported at v1. Use format: 'JSON'.", + ); + return; + } + + if (parsed.type === "error" || parsed.error || parsed.code) { + const rawMsg = + parsed.error || parsed.message || "Unable to execute metric"; + // Defense-in-depth: do not echo raw warehouse / server error + // text (which can contain SQL fragments, FQNs, schema detail) to + // the user in production. Dev mode keeps the passthrough so + // developers can diagnose schema-not-found, auth-failed, etc. + // The full message is still logged via console.error for ops. + const userMsg = import.meta.env.DEV + ? rawMsg + : "Unable to execute metric"; + setLoading(false); + setError(userMsg); + if (parsed.code || rawMsg !== userMsg) { + console.error( + `[useMetricView] Code: ${parsed.code ?? "(none)"}, Message: ${rawMsg}`, + ); + } + return; + } + } catch (err) { + console.warn("[useMetricView] Malformed message received", err); + } + }, + onError: (err) => { + if (abortController.signal.aborted) return; + setLoading(false); + + let userMessage = "Unable to load data, please try again"; + + if (err instanceof Error) { + if (err.name === "AbortError") { + userMessage = "Request timed out, please try again"; + } else if (err.message.includes("Failed to fetch")) { + userMessage = "Network error. Please check your connection."; + } else if (import.meta.env.DEV && err.message) { + // In dev, surface the actual error so developers can diagnose + // schema-not-found, auth-failed, and other server-thrown + // failures that didn't make it into an SSE error event. + // Production keeps the generic message — the full error is + // still in the console.error below for ops. + userMessage = err.message; + } + console.error("[useMetricView] Error", { + metricKey, + error: err.message, + stack: err.stack, + }); + } + setError(userMessage); + }, + }); + }, [metricKey, payload, url]); + + useEffect(() => { + if (autoStart) { + start(); + } + return () => { + abortControllerRef.current?.abort(); + }; + }, [start, autoStart]); + + return { data, metadata, loading, error }; +} diff --git a/packages/appkit-ui/tsdown.config.ts b/packages/appkit-ui/tsdown.config.ts index f55a51457..50f2f8a49 100644 --- a/packages/appkit-ui/tsdown.config.ts +++ b/packages/appkit-ui/tsdown.config.ts @@ -9,6 +9,7 @@ export default defineConfig([ "src/js/beta.ts", "src/react/index.ts", "src/react/beta.ts", + "src/format/index.ts", ], outDir: "dist", platform: "browser", diff --git a/packages/appkit/src/connectors/sql-warehouse/client.ts b/packages/appkit/src/connectors/sql-warehouse/client.ts index d0a1c1816..ce52a93dc 100644 --- a/packages/appkit/src/connectors/sql-warehouse/client.ts +++ b/packages/appkit/src/connectors/sql-warehouse/client.ts @@ -243,6 +243,15 @@ export class SQLWarehouseConnector { ); } + // Preserve native AbortError identity. Without this, the wrap below + // overwrites `name` (to "ExecutionError") and the downstream + // stream-manager._categorizeError can no longer distinguish a + // legitimate client cancellation from a real upstream failure — + // the duck-type `statusCode` fallback would route every aborted + // SQL through SSEErrorCode.UPSTREAM_ERROR. + if (error instanceof Error && error.name === "AbortError") { + throw error; + } if (error instanceof AppKitError) { throw error; } @@ -383,6 +392,11 @@ export class SQLWarehouseConnector { }); // error logging is handled by executeStatement's catch block (gated on isAborted) + if (error instanceof Error && error.name === "AbortError") { + // Preserve AbortError identity for stream-manager classification — + // see executeStatement's catch for the rationale. + throw error; + } if (error instanceof AppKitError) { throw error; } diff --git a/packages/appkit/src/plugin/plugin.ts b/packages/appkit/src/plugin/plugin.ts index 75d994d88..f5f825431 100644 --- a/packages/appkit/src/plugin/plugin.ts +++ b/packages/appkit/src/plugin/plugin.ts @@ -323,8 +323,12 @@ export abstract class Plugin< * @throws AuthenticationError in production when no user header is present. */ protected resolveUserId(req: express.Request): string { - const userId = req.header("x-forwarded-user"); - if (userId) return userId; + // Trim before truthiness — a whitespace-only `x-forwarded-user` would + // otherwise pass through as a valid identity, and downstream consumers + // (cache key derivation, OBO proxy, telemetry) would treat distinct + // misconfigured callers as one. Reject it as a missing identity instead. + const userId = req.header("x-forwarded-user")?.trim(); + if (userId && userId.length > 0) return userId; if (process.env.NODE_ENV === "development") return getCurrentUserId(); throw AuthenticationError.missingToken( "Missing x-forwarded-user header. Cannot resolve user ID.", @@ -342,8 +346,12 @@ export abstract class Plugin< * In development mode (`NODE_ENV=development`), skips user impersonation instead of throwing. */ asUser(req: express.Request): this { - const token = req.header("x-forwarded-access-token"); - const userId = req.header("x-forwarded-user"); + const token = req.header("x-forwarded-access-token")?.trim(); + // Trim before truthiness — a whitespace-only header would otherwise + // pass downstream as a valid identity / token. The cache-key path + // would collapse distinct misconfigured callers into a shared scope, + // and the OBO proxy would attempt to authenticate with whitespace. + const userId = req.header("x-forwarded-user")?.trim(); const isDev = process.env.NODE_ENV === "development"; // In local development, skip user impersonation diff --git a/packages/appkit/src/plugins/analytics/analytics.ts b/packages/appkit/src/plugins/analytics/analytics.ts index d591e32f0..59a962d69 100644 --- a/packages/appkit/src/plugins/analytics/analytics.ts +++ b/packages/appkit/src/plugins/analytics/analytics.ts @@ -8,16 +8,25 @@ import type { } from "shared"; import { SQLWarehouseConnector } from "../../connectors"; import { getWarehouseId, getWorkspaceClient } from "../../context"; +import { AppKitError, ExecutionError } from "../../errors"; import { createLogger } from "../../logging/logger"; import { Plugin, toPlugin } from "../../plugin"; import type { PluginManifest } from "../../registry"; import { queryDefaults } from "./defaults"; import manifest from "./manifest.json"; +import { + buildMetricSql, + composeMetricCacheKey, + deriveMetricExecutorKey, + loadMetricRegistry, + validateMetricRequest, +} from "./metric"; import { QueryProcessor } from "./query"; import type { AnalyticsQueryResponse, IAnalyticsConfig, IAnalyticsQueryRequest, + MetricRegistration, } from "./types"; const logger = createLogger("analytics"); @@ -33,6 +42,25 @@ export class AnalyticsPlugin extends Plugin { private SQLClient: SQLWarehouseConnector; private queryProcessor: QueryProcessor; + /** + * Metric-view registry loaded from `config/queries/metric.json` at server + * startup. Keys are stable; values carry the FQN, lane, and known + * measure/dimension names. Empty when no `metric.json` is present. + */ + private metricRegistry: Record = {}; + + /** + * Latched error from the most recent `loadMetricRegistry()` attempt. + * `null` means the registry loaded cleanly (or `metric.json` was absent + * — also fine; metric views are an opt-in feature). When non-null, every + * `/metric/:key` request returns 503 with code `METRIC_REGISTRY_LOAD_FAILED` + * so deployment errors (malformed JSON, schema violations, missing + * required fields) surface as a clear server status rather than + * masquerading as 404s for every metric. Surfaces via the route only — + * the rest of the analytics plugin stays available. + */ + private metricRegistryLoadError: string | null = null; + constructor(config: IAnalyticsConfig) { super(config); this.config = config; @@ -44,6 +72,34 @@ export class AnalyticsPlugin extends Plugin { }); } + /** + * Eagerly load the metric registry. + * + * `setup()` does not throw — failures here would otherwise prevent the + * whole app (including unrelated plugins) from starting, which is too + * blunt for what is conceptually a single-route configuration error. + * Instead, latch the failure on `metricRegistryLoadError`: the metric + * route then returns 503 with a clear code so deployment pipelines + the + * /metric/:key surface itself reflect the broken state. Other analytics + * routes (`/query/:key`, `/arrow-result/:jobId`) continue to work. + * + * The previous behavior — empty `metricRegistry` plus a warn log — made + * malformed `metric.json` indistinguishable from missing keys (every + * metric returned 404 "Metric not found"), which masked deployment + * errors and matched a recurring review pattern across multiple rounds. + */ + async setup(): Promise { + try { + this.metricRegistry = await loadMetricRegistry(); + this.metricRegistryLoadError = null; + } catch (err) { + const reason = err instanceof Error ? err.message : String(err); + logger.warn("Failed to load metric registry: %s", reason); + this.metricRegistry = {}; + this.metricRegistryLoadError = reason; + } + } + injectRoutes(router: IAppRouter) { // Arrow data downloads always run as service principal and bypass the // interceptor chain (execute/executeStream). The original query execution @@ -66,6 +122,15 @@ export class AnalyticsPlugin extends Plugin { await this._handleQueryRoute(req, res); }, }); + + this.route(router, { + name: "metric", + method: "post", + path: "/metric/:key", + handler: async (req: express.Request, res: express.Response) => { + await this._handleMetricRoute(req, res); + }, + }); } /** @@ -209,6 +274,301 @@ export class AnalyticsPlugin extends Plugin { ); } + /** + * Handle a metric-view query against `POST /api/analytics/metric/:key`. + * + * Phase 4 surface: + * - body validated by zod (rejects unknown measures, dimensions, + * operators, and timeGrain values per the registry's build-time + * metadata) + * - SQL constructed via {@link buildMetricSql} with sorted SELECT list, + * parameterized filter, and `GROUP BY ALL` when dimensions are present + * - response uses the same SSE envelope as the existing query route + * - reuses the interceptor chain via `executeStream()` (telemetry, + * timeout, retry, cache) — default 1-hour TTL via `queryDefaults` + * - OBO dispatch: `lane === "obo"` entries route through `this.asUser(req)`, + * same Proxy pattern that `.obo.sql` files use today; SP entries route + * through the plugin's default executor. + * - Cache executor key: `"sp"` for SP-lane entries; sha256 hash of the + * user identity for OBO entries (raw `x-forwarded-user` value never + * reaches the cache layer — see {@link deriveMetricExecutorKey}). + */ + async _handleMetricRoute( + req: express.Request, + res: express.Response, + ): Promise { + const { key } = req.params; + + logger.debug(req, "Executing metric: %s", key); + + const event = logger.event(req); + event?.setComponent("analytics", "executeMetric").setContext("analytics", { + metric_key: key, + plugin: this.name, + }); + + if (!key) { + res.status(400).json({ error: "metric key is required" }); + return; + } + + // Surface a startup-time registry-load failure on the route. Without + // this, a malformed metric.json would yield 404 for every key — which + // looks identical to "key never registered" and hides the deployment + // error. The full reason goes to telemetry only. + if (this.metricRegistryLoadError !== null) { + event?.setContext("analytics", { + metric_registry_load_error: this.metricRegistryLoadError, + }); + res.status(503).json({ + error: "Metric registry not available", + code: "METRIC_REGISTRY_LOAD_FAILED", + }); + return; + } + + const registration = this.metricRegistry[key]; + if (!registration) { + // Don't echo the user-supplied `key` back in the public response. + // Confirming "metric X is not registered" lets an unauthenticated + // probe enumerate registered keys by elimination. The 404 status + // stays — it's useful for tooling — but the body is generic; full + // detail goes to telemetry only. + event?.setContext("analytics", { unknown_metric_key: key }); + res.status(404).json({ error: "Metric not found" }); + return; + } + + // Fail-closed: if the build-time DESCRIBE never produced a measure list + // for this metric, the body validator falls open (no allowlist) and the + // SQL constructor would let arbitrary measure references through to + // the warehouse. Refuse the request so an empty/missing + // `metrics.metadata.json` cannot become a schema-enumeration vector. + // The clear server-side fix is to (re-)run `pnpm exec appkit metric sync`. + // + // We deliberately do NOT gate on `knownDimensions.length === 0` here — + // a measure-only KPI metric legitimately has zero dimensions and must + // continue to work. The validator-side tightening below rejects + // `dimensions` / `filter` payloads against an empty `knownDimensions`, + // which closes the fall-open path without blocking the legitimate case. + if (registration.knownMeasures.length === 0) { + logger.warn( + req, + "Metric %s registered but build-time metadata is empty — refusing the request. Run `appkit metric sync` to populate metrics.metadata.json.", + key, + ); + res.status(503).json({ + error: "Metric registry not initialized", + code: "METRIC_REGISTRY_NOT_READY", + }); + return; + } + + // Single try/catch covering both body validation and executor setup — + // OBO lane's `asUser(req)` and `resolveUserId(req)` can throw on a + // missing/invalid `x-forwarded-access-token` (AuthenticationError). If + // they bubble up unwrapped, the route returns a malformed response + // outside the canonical error envelope. We compute the executor inside + // the same `try` so the auth error lands on the canonical 401 path. + let request: ReturnType; + let executor: AnalyticsPlugin; + let executorKey: string; + let isAsUser: boolean; + try { + request = validateMetricRequest(registration, req.body ?? {}); + isAsUser = registration.lane === "obo"; + // OBO lane: dispatch via the existing asUser(req) Proxy — same pattern + // used by .obo.sql files in `_handleQueryRoute`. The Proxy threads the + // user's `x-forwarded-access-token` through every Databricks call so + // the warehouse executes the query under the end user's identity. + executor = isAsUser ? this.asUser(req) : this; + // OBO cache key: hash the user identity so the raw email/principal name + // never reaches the cache layer. SP cache key: literal "sp" — the cache + // is shared across every caller of the SP-lane metric. + executorKey = deriveMetricExecutorKey({ + lane: registration.lane, + userIdentity: isAsUser ? this.resolveUserId(req) : null, + }); + } catch (err) { + if (err instanceof AppKitError) { + res.status(err.statusCode).json({ + error: err.message, + code: err.code, + }); + return; + } + // Validator throws ValidationError; asUser/resolveUserId throw + // AuthenticationError — both are AppKitError. This branch only fires + // for unexpected errors. Hard-code the public message (do not echo + // err.message — it could contain stack-adjacent internals from any + // unwrapped throw site). The full detail goes to telemetry only. + event?.setContext("analytics", { + unexpected_error: err instanceof Error ? err.message : String(err), + metric_key: key, + }); + logger.warn( + req, + "Unexpected throw during metric request setup for %s: %s", + key, + err instanceof Error ? err.message : String(err), + ); + res.status(400).json({ error: "Invalid request body" }); + return; + } + + const format = request.format ?? "JSON"; + + const queryParameters = + format === "ARROW" + ? { + formatParameters: { + disposition: "EXTERNAL_LINKS", + format: "ARROW_STREAM", + }, + type: "arrow", + } + : { type: "result" }; + + const cacheKey = composeMetricCacheKey({ + metricKey: key, + measures: request.measures, + dimensions: request.dimensions, + timeGrain: request.timeGrain, + filter: request.filter, + format, + executorKey, + limit: request.limit, + }); + + const defaultConfig: PluginExecuteConfig = { + ...queryDefaults, + cache: { + ...queryDefaults.cache, + cacheKey, + }, + }; + + const streamExecutionSettings: StreamExecutionSettings = { + default: defaultConfig, + }; + + await executor.executeStream( + res, + async (signal) => { + try { + const { statement, parameters } = buildMetricSql( + registration, + request, + ); + const result = await executor.query( + statement, + Object.keys(parameters).length > 0 ? parameters : undefined, + queryParameters.formatParameters, + signal, + ); + return { type: queryParameters.type, ...result }; + } catch (err) { + // Cancellation must pass through to the framework's stream layer + // SHAPED AS an AbortError so `StreamManager._categorizeError` + // classifies it as STREAM_ABORTED (it checks `name === "AbortError"` + // or `message.includes("operation was aborted")`). Re-throwing + // unchanged would otherwise let `ExecutionError.canceled()` fall + // through to UPSTREAM_ERROR. + // + // We identify cancellations by error CLASS/MESSAGE rather than + // by `signal.aborted`. The signal-state check raced concurrent + // warehouse errors: if `TimeoutInterceptor` aborted the signal + // exactly when the warehouse returned `TABLE_OR_VIEW_NOT_FOUND`, + // the bypass would rethrow the raw warehouse text unscrubbed + // (round-6 security finding). Two narrow shapes: + // + // 1. real `AbortError` (`fetch` / `AbortController.abort()`) + // 2. `ExecutionError` with the static message constructed by + // `ExecutionError.canceled()` ("Statement was canceled") — + // the only ExecutionError variant that means cancellation. + // Match the message exactly so warehouse errors that happen + // to mention "canceled" cannot trick the bypass. + const isCancellation = + (err instanceof Error && err.name === "AbortError") || + (err instanceof ExecutionError && + err.message === "Statement was canceled"); + if (isCancellation) { + if (err instanceof Error && err.name === "AbortError") { + throw err; + } + // Normalize the connector's `ExecutionError.canceled()` to the + // AbortError shape `_categorizeError` recognizes. + const normalized = new Error("operation was aborted"); + normalized.name = "AbortError"; + throw normalized; + } + // Server-side scrub for the SSE error envelope. Without this, any + // 4xx from the warehouse (e.g. TABLE_OR_VIEW_NOT_FOUND with a UC + // FQN attached) flows verbatim through the framework's pass-through + // for client-status errors — visible to anyone hitting the route, + // including non-React consumers that the round-1 client-side scrub + // does not protect. Production gets a generic message; dev keeps + // the original for diagnostics. Telemetry always carries the raw. + // + // Fail-closed env check: only an explicit "development" treats as + // dev. Containers / serverless runtimes that leave NODE_ENV unset + // must not leak warehouse internals. + event?.setContext("analytics", { + metric_query_error: + err instanceof Error ? err.message : String(err), + metric_key: key, + }); + const isProd = process.env.NODE_ENV !== "development"; + if (err instanceof AppKitError) { + // 5xx-class AppKitErrors (notably ExecutionError raised by the + // SQL connector on warehouse failures) carry raw warehouse text + // in their message. Scrub those in prod; let 4xx-class + // AppKitErrors (ValidationError, AuthenticationError) through + // since their messages are constructed by us with known-clean + // content. + if (isProd && err.statusCode >= 500) { + throw new ExecutionError("Failed to execute metric query"); + } + throw err; + } + throw new ExecutionError( + isProd + ? "Failed to execute metric query" + : err instanceof Error + ? err.message + : "Failed to execute metric query", + ); + } + }, + streamExecutionSettings, + executorKey, + ); + } + + /** + * Test-only seam: populate the metric registry without going through + * `setup()` (which reads `config/queries/metric.json` from disk). Tests + * exercise the route handler directly with synthetic registrations. + * + * @internal + */ + _setMetricRegistryForTesting( + registry: Record, + ): void { + this.metricRegistry = registry; + } + + /** + * Test-only seam: simulate a `loadMetricRegistry()` failure latched by + * `setup()`. Production code never calls this — `setup()` is the sole + * setter of `metricRegistryLoadError`. + * + * @internal + */ + _setMetricRegistryLoadErrorForTesting(reason: string | null): void { + this.metricRegistryLoadError = reason; + } + /** * Execute a SQL query using the current execution context. * diff --git a/packages/appkit/src/plugins/analytics/index.ts b/packages/appkit/src/plugins/analytics/index.ts index 9ad02125e..78d793663 100644 --- a/packages/appkit/src/plugins/analytics/index.ts +++ b/packages/appkit/src/plugins/analytics/index.ts @@ -1,2 +1,3 @@ export * from "./analytics"; +export * from "./metric"; export * from "./types"; diff --git a/packages/appkit/src/plugins/analytics/metric.ts b/packages/appkit/src/plugins/analytics/metric.ts new file mode 100644 index 000000000..756ecfdab --- /dev/null +++ b/packages/appkit/src/plugins/analytics/metric.ts @@ -0,0 +1,1615 @@ +import { createHash } from "node:crypto"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { type SQLTypeMarker, sql as sqlHelpers } from "shared"; +import { z } from "zod"; +import { AuthenticationError, ValidationError } from "../../errors"; +import { createLogger } from "../../logging/logger"; +import type { + IAnalyticsMetricRequest, + MetricDimensionTypeClass, + MetricFilter, + MetricFilterOperatorName, + MetricLane, + MetricPredicate, + MetricRegistration, +} from "./types"; + +const logger = createLogger("analytics:metric"); + +/** + * The exact twelve filter operators allowed at v1. The runtime tuple is the + * server-side source of truth; the client-side type union + * `MetricFilterOperator` mirrors these names statically. + */ +const METRIC_FILTER_OPERATORS = [ + "equals", + "notEquals", + "in", + "notIn", + "gt", + "gte", + "lt", + "lte", + "contains", + "notContains", + "set", + "notSet", +] as const satisfies readonly MetricFilterOperatorName[]; + +/** + * Maximum AND/OR nesting depth. The PRD documents 8 as a sensible cap — + * enough for any real BI filter UI, low enough that a hostile or malformed + * payload cannot stack-overflow the recursive validator or translator. + * + * The depth count is the number of nested `{ and }` / `{ or }` wrappers + * encountered while descending — leaf predicates do not count toward depth. + */ +const METRIC_FILTER_MAX_DEPTH = 8; + +/** + * Cardinality caps on user-controlled arrays. Closes the recurring + * `unbounded-request-parameters` finding: a hostile caller could otherwise + * send `values: [...10M items...]` and exhaust the validator + the named + * bind-var binding step. The limits below are deliberately generous — + * higher than any real BI UI would emit — so legitimate traffic never trips + * them. If a customer scenario needs more, expose a per-metric override. + */ +const METRIC_MEASURES_MAX = 50; +const METRIC_DIMENSIONS_MAX = 20; +const METRIC_FILTER_VALUES_MAX = 1000; +const METRIC_LIMIT_MAX = 100_000; + +/** + * Maximum number of children per AND/OR group node. Without this cap a + * single flat group like `{ and: [...10M empty objects...] }` would push + * tens of millions of frames onto the iterative pre-check's stack — OOM + * before validation even gets to Zod. The Zod schema enforces the same + * cap so the rejection point is consistent regardless of which validator + * catches it first. + */ +const METRIC_FILTER_GROUP_MAX = 100; + +/** + * Range ops — require numeric or date-typed dimensions. The remaining ops + * split into: + * - any-type: equals, notEquals, in, notIn, set, notSet + * - string-only: contains, notContains + */ +const RANGE_OPERATORS = new Set([ + "gt", + "gte", + "lt", + "lte", +]); + +/** String ops — require string-typed dimensions. */ +const STRING_OPERATORS = new Set([ + "contains", + "notContains", +]); + +/** Operators that require exactly one value. */ +const SINGLE_VALUE_OPERATORS = new Set([ + "equals", + "notEquals", + "gt", + "gte", + "lt", + "lte", + "contains", + "notContains", +]); + +/** Operators that require at least one value. */ +const LIST_VALUE_OPERATORS = new Set(["in", "notIn"]); + +/** Operators that reject `values` entirely. */ +const NULL_OPERATORS = new Set(["set", "notSet"]); + +/** + * Default queries directory. Mirrors `AppManager.queriesDir` so dev mode and + * production share a single source of truth. + */ +const QUERIES_DIR = path.resolve(process.cwd(), "config/queries"); +const METRIC_CONFIG_FILE = "metric.json"; +/** + * Default location of the build-time metadata bundle emitted by + * `metric sync` and the Vite type-generator plugin. The path mirrors the + * default `metricMetadataOutFile` in `packages/appkit/src/type-generator/`. + */ +const METRIC_METADATA_PATH = path.resolve( + process.cwd(), + "shared/appkit-types/metrics.metadata.json", +); + +const METRIC_KEY_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]*$/; +const FQN_PATTERN = + /^[a-zA-Z0-9_][a-zA-Z0-9_-]*\.[a-zA-Z0-9_][a-zA-Z0-9_-]*\.[a-zA-Z0-9_][a-zA-Z0-9_-]*$/; + +/** + * v1 entry shape — only `source` is allowed. Future per-entry options grow + * additively without breaking changes. + */ +const metricEntrySchema = z + .object({ + source: z.string().regex(FQN_PATTERN, { + message: + "metric.source must be a three-part UC FQN ..", + }), + }) + .strict(); + +const metricLaneSchema = z + .record( + z.string().regex(METRIC_KEY_PATTERN, { + message: + "metric key must match /^[a-zA-Z_][a-zA-Z0-9_]*$/ (letters, digits, underscores; cannot start with a digit)", + }), + metricEntrySchema, + ) + .optional(); + +/** Top-level shape of metric.json. */ +const metricConfigSchema = z + .object({ + $schema: z.string().optional(), + sp: metricLaneSchema, + obo: metricLaneSchema, + }) + .strict(); + +/** + * Per-metric metadata threaded from the type-generator into the runtime + * registry. Phase 1 supplied measures + dimensions; Phase 2 adds the + * per-dim time-grain map for time-typed dimensions. + * + * Internal to this module — the type-generator wires the JSON metadata blob + * (Phase 5) into `loadMetricRegistry` via the inferred function parameter + * shape, so external consumers never name this interface directly. + */ +interface MetricBuildTimeMetadata { + measures?: string[]; + dimensions?: string[]; + /** + * Dimension name → allowed time-grains. Only populated for time-typed + * dimensions; regular dimensions are absent from this map. + */ + timeGrainsByDim?: Record; + /** + * Dimension name → SQL type. Drives op-vs-type compatibility checks in the + * filter validator. Empty/missing → validator falls open on type checks. + */ + dimensionTypes?: Record; +} + +/** + * Read the build-time metadata bundle (`metrics.metadata.json`) emitted by + * `metric sync` / the Vite type-generator plugin, and transform it into the + * shape `loadMetricRegistry` expects. + * + * Returns `null` when the file is absent — apps that haven't run `metric sync` + * fall back to the validator's open mode. Logs and returns null on parse + * failures so a stale bundle never takes the server down. + */ +async function readMetricsMetadataBundle( + metadataPath: string = METRIC_METADATA_PATH, +): Promise | null> { + let raw: string; + try { + raw = await fs.readFile(metadataPath, "utf8"); + } catch (err) { + if ((err as NodeJS.ErrnoException).code === "ENOENT") { + return null; + } + logger.warn( + "Failed to read metrics.metadata.json at %s: %s", + metadataPath, + err instanceof Error ? err.message : String(err), + ); + return null; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch (err) { + logger.warn( + "metrics.metadata.json at %s is not valid JSON: %s", + metadataPath, + err instanceof Error ? err.message : String(err), + ); + return null; + } + + if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { + return null; + } + + const result: Record = {}; + for (const [metricKey, value] of Object.entries( + parsed as Record, + )) { + if (!value || typeof value !== "object") continue; + const v = value as Record; + const measuresObj = + v.measures && typeof v.measures === "object" && !Array.isArray(v.measures) + ? (v.measures as Record) + : {}; + const dimensionsObj = + v.dimensions && + typeof v.dimensions === "object" && + !Array.isArray(v.dimensions) + ? (v.dimensions as Record) + : {}; + + const measures = Object.keys(measuresObj).sort(); + const dimensions = Object.keys(dimensionsObj).sort(); + + const timeGrainsByDim: Record = {}; + const dimensionTypes: Record = {}; + for (const [dimName, dimMeta] of Object.entries(dimensionsObj)) { + if (!dimMeta || typeof dimMeta !== "object") continue; + const m = dimMeta as Record; + if (typeof m.type === "string") { + dimensionTypes[dimName] = m.type; + } + if (Array.isArray(m.time_grain)) { + const grains = m.time_grain.filter( + (g): g is string => typeof g === "string", + ); + if (grains.length > 0) { + timeGrainsByDim[dimName] = grains; + } + } + } + + result[metricKey] = { + measures, + dimensions, + timeGrainsByDim, + dimensionTypes, + }; + } + + return result; +} + +/** + * Read and validate `config/queries/metric.json`. + * + * Returns an empty registry when the file is absent — the metric-view path is + * additive; apps that never adopt metric views must not pay any cost. + * + * The optional `metadata` argument carries build-time-extracted measure / + * dimension names produced by the type-generator. When omitted, the registry + * still loads but `knownMeasures` is empty and the validator can only do + * structural checks. + */ +export async function loadMetricRegistry( + metadata?: Record, + queriesDir: string = QUERIES_DIR, +): Promise> { + const metricPath = path.join(queriesDir, METRIC_CONFIG_FILE); + + // Auto-discover the build-time metadata bundle if the caller didn't + // pass one explicitly. This wires up Phase 5's metrics.metadata.json + // to the server-side validator so it knows which dimensions are time- + // typed (and therefore which `timeGrain` values to accept). + const resolvedMetadata = + metadata ?? (await readMetricsMetadataBundle()) ?? undefined; + + let raw: string; + try { + raw = await fs.readFile(metricPath, "utf8"); + } catch (err) { + if ((err as NodeJS.ErrnoException).code === "ENOENT") { + return {}; + } + throw err; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch (err) { + throw new Error( + `Failed to parse metric.json at ${metricPath}: ${(err as Error).message}`, + ); + } + + const result = metricConfigSchema.safeParse(parsed); + if (!result.success) { + const issues = result.error.issues + .map((i) => `${i.path.join(".")}: ${i.message}`) + .join("; "); + throw new Error(`Invalid metric.json at ${metricPath}: ${issues}`); + } + + const registry: Record = {}; + const lanes: Array<[MetricLane, Record]> = [ + ["sp", result.data.sp ?? {}], + ["obo", result.data.obo ?? {}], + ]; + + for (const [lane, laneMap] of lanes) { + for (const [key, entry] of Object.entries(laneMap)) { + if (key in registry) { + throw new Error( + `Duplicate metric key "${key}": cannot appear in both sp and obo lanes.`, + ); + } + const meta = resolvedMetadata?.[key]; + registry[key] = { + key, + source: entry.source, + lane, + knownMeasures: meta?.measures ?? [], + knownDimensions: meta?.dimensions ?? [], + knownTimeGrainsByDim: meta?.timeGrainsByDim ?? {}, + knownDimensionTypes: meta?.dimensionTypes, + }; + } + } + + logger.debug( + "Loaded metric registry: %d entry(ies)", + Object.keys(registry).length, + ); + return registry; +} + +/** + * Build a zod schema for the request body of POST /api/analytics/metric/:key. + * + * The schema is dynamic per metric: when `knownMeasures` is non-empty the + * `measures` array is constrained to that set. When empty (no build-time + * metadata available) any non-empty string is accepted and validation defers + * to the warehouse. + * + * Phase 3 body shape: `{ measures, dimensions?, timeGrain?, filter?, format?, limit? }`. + * + * Validation matrix: + * - `measures` — must be a non-empty array; constrained to `knownMeasures` + * when build-time metadata is available. + * - `dimensions` — optional array; constrained to `knownDimensions`. + * - `timeGrain` — optional string; constrained to the union of grains + * declared across all time-typed dimensions; rejected unless the + * `dimensions` array contains at least one time-typed dimension. + * - `filter` — optional recursive AND/OR tree of predicates; `member` + * constrained to `knownDimensions`; `operator` constrained to the v1 + * twelve; op⇄type compatibility enforced when dimension types are + * available; values cardinality enforced per operator; AND/OR depth + * capped at {@link METRIC_FILTER_MAX_DEPTH}. + */ +export function makeMetricRequestSchema( + registration: MetricRegistration, +): z.ZodType { + const baseMeasureSchema = z + .string() + .min(1, { message: "measure name cannot be empty" }); + + // When the registry has build-time metadata, narrow the measure schema to + // the declared measure names. Use a refinement (rather than `z.enum`) so we + // can construct the schema dynamically at runtime. + const knownMeasures = registration.knownMeasures; + const measureItemSchema = + knownMeasures.length > 0 + ? baseMeasureSchema.refine( + (name: string) => knownMeasures.includes(name), + { + message: `measure must be one of: ${knownMeasures.join(", ")}`, + }, + ) + : baseMeasureSchema; + + const knownDimensions = registration.knownDimensions; + const baseDimensionSchema = z + .string() + .min(1, { message: "dimension name cannot be empty" }); + // When the metric has no registered dimensions (a measure-only KPI), + // `dimensions` must be empty/omitted: any non-empty entry is rejected. + // This closes the previous fall-open path (where empty `knownDimensions` + // skipped the allowlist refinement) without blocking the legitimate + // measure-only case — `dimensions: undefined` and `dimensions: []` still + // pass the surrounding `.optional()` and `.min(0)` shape. + const dimensionItemSchema = + knownDimensions.length > 0 + ? baseDimensionSchema.refine( + (name: string) => knownDimensions.includes(name), + { + message: `dimension must be one of: ${knownDimensions.join(", ")}`, + }, + ) + : (z.never() as unknown as z.ZodType); + + // Aggregate the union of grains the metric view supports. Empty union means + // no time-typed dimensions are declared — `timeGrain` cannot be set. + // Mirrors the dimensions/filter tightening: when the metric has no + // time-typed dimensions, `timeGrain` is rejected at validation time + // (typed as `z.never()`). The previous fall-open path silently accepted + // arbitrary grain tokens; the SQL came out identical (no time-typed dim + // → no `date_trunc` clause), but `composeMetricCacheKey` salts the + // cache entry with the raw token, so a hostile caller could vary + // `timeGrain` to force unbounded cache misses + warehouse re-execution. + const grainsByDim = registration.knownTimeGrainsByDim; + const allowedGrains = collectAllowedGrains(grainsByDim); + const baseTimeGrainSchema = z + .string() + .min(1, { message: "timeGrain cannot be empty" }); + const timeGrainSchema = + allowedGrains.length > 0 + ? baseTimeGrainSchema.refine((g: string) => allowedGrains.includes(g), { + message: `timeGrain must be one of: ${allowedGrains.join(", ")}`, + }) + : (z.never() as unknown as z.ZodType); + + // ── Filter sub-schema (Phase 3) ────────────────────────────────────────── + // + // The filter shape is recursive (`Predicate | { and: [...] } | { or: [...] }`). + // Zod's recursive support uses `z.lazy(() => ...)` — the depth cap and the + // op⇄type compatibility check live in a `superRefine` on the parent (so we + // can walk the tree once with full context). + const filterPredicateSchema: z.ZodType = z + .object({ + member: z + .string() + .min(1, { message: "filter predicate 'member' cannot be empty" }), + operator: z.string().min(1, { + message: "filter predicate 'operator' cannot be empty", + }) as z.ZodType, + values: z + .array(z.union([z.string(), z.number()])) + .max(METRIC_FILTER_VALUES_MAX, { + message: `filter predicate 'values' length exceeds the maximum of ${METRIC_FILTER_VALUES_MAX}`, + }) + .optional(), + }) + .strict(); + + const filterSchema: z.ZodType = z.lazy(() => + z.union([ + filterPredicateSchema, + z + .object({ + and: z.array(filterSchema).max(METRIC_FILTER_GROUP_MAX, { + message: `filter 'and' group exceeds the maximum of ${METRIC_FILTER_GROUP_MAX} children`, + }), + }) + .strict(), + z + .object({ + or: z.array(filterSchema).max(METRIC_FILTER_GROUP_MAX, { + message: `filter 'or' group exceeds the maximum of ${METRIC_FILTER_GROUP_MAX} children`, + }), + }) + .strict(), + ]), + ); + + const knownDimensionTypes = registration.knownDimensionTypes ?? {}; + + const baseObject = z + .object({ + measures: z + .array(measureItemSchema) + .min(1, { message: "measures must contain at least one entry" }) + .max(METRIC_MEASURES_MAX, { + message: `measures length exceeds the maximum of ${METRIC_MEASURES_MAX}`, + }), + dimensions: z + .array(dimensionItemSchema) + .max(METRIC_DIMENSIONS_MAX, { + message: `dimensions length exceeds the maximum of ${METRIC_DIMENSIONS_MAX}`, + }) + .optional(), + timeGrain: timeGrainSchema.optional(), + filter: filterSchema.optional(), + format: z.enum(["JSON", "ARROW"]).optional(), + limit: z + .number() + .int({ message: "limit must be an integer" }) + .positive({ message: "limit must be positive" }) + .max(METRIC_LIMIT_MAX, { + message: `limit exceeds the maximum of ${METRIC_LIMIT_MAX}`, + }) + .optional(), + }) + .strict(); + + // Cross-field rules: + // 1. timeGrain is meaningless without a time-typed dimension in the + // dimensions list. Failing fast here keeps the SQL constructor honest + // (no `date_trunc(, )` without a real column to truncate). + // 2. The recursive `filter` tree is depth-walked once: every predicate's + // member must be a registered dimension; every operator must be one of + // the twelve; op⇄type compatibility is enforced when dimension types + // are available; values cardinality is enforced per operator; AND/OR + // nesting is capped at METRIC_FILTER_MAX_DEPTH. + return baseObject.superRefine((value, ctx) => { + // Cross-field rule for timeGrain. Tight check whenever the registry has + // ANY dimension metadata: if the metric has dims registered but none are + // time-typed (`grainsByDim` empty), `timeGrain` is meaningless on this + // metric and we reject. The pure-fall-open path now only fires when no + // dimension metadata is available at all — which the route's fail-closed + // gate (`knownMeasures.length === 0` → 503) prevents in practice. + if (value.timeGrain != null && knownDimensions.length > 0) { + const grainsByDimKeys = Object.keys(grainsByDim); + if (grainsByDimKeys.length === 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["timeGrain"], + message: + "timeGrain specified but the metric has no time-typed dimensions", + }); + } else { + const dims = value.dimensions ?? []; + const hasTimeDim = dims.some( + (d) => Array.isArray(grainsByDim[d]) && grainsByDim[d].length > 0, + ); + if (!hasTimeDim) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["timeGrain"], + message: + "timeGrain specified but no time-typed dimension is included in 'dimensions'", + }); + } + } + } + + if (value.filter != null) { + validateFilterTree(value.filter, ctx, ["filter"], 0, { + knownDimensions, + knownDimensionTypes, + }); + } + }) as z.ZodType; +} + +/** + * Recursive zod-time validator for the filter tree. + * + * Pushes structured issues into the zod refinement context with stable paths + * (`filter.and.0.or.2.member`, etc.) so the canonical 400 error shape carries + * actionable diagnostics. Keeps three concerns in one descent: + * + * 1. Member is a registered dimension (when registry has metadata). + * 2. Operator is one of the twelve; values cardinality matches. + * 3. Op⇄type compatibility (string ops on string-typed dims, range ops on + * numeric/date-typed dims, equality/set/null ops on any type). + * 4. Depth cap (AND/OR nesting limit). + * + * Returns void; issues are accumulated on `ctx`. The caller's + * `safeParse(...).success` flips false when any issue is added. + */ +function validateFilterTree( + node: MetricFilter, + ctx: z.RefinementCtx, + path: Array, + depth: number, + registry: { + knownDimensions: string[]; + knownDimensionTypes: Record; + }, +): void { + if (node === null || typeof node !== "object") { + // The base schema rejects this case earlier via the union, but be + // defensive in case a future refactor leaves the door ajar. + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path, + message: "filter node must be a Predicate or { and } / { or } group", + }); + return; + } + + if ("and" in node || "or" in node) { + if (depth + 1 > METRIC_FILTER_MAX_DEPTH) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path, + message: `filter AND/OR nesting exceeds the maximum depth of ${METRIC_FILTER_MAX_DEPTH}`, + }); + return; + } + + const groupKey = "and" in node ? "and" : "or"; + const children = ( + node as { and?: ReadonlyArray } & { + or?: ReadonlyArray; + } + )[groupKey]; + + if (!Array.isArray(children)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, groupKey], + message: `filter ${groupKey} group must be an array of predicates or nested groups`, + }); + return; + } + + // Reject empty `or` groups: SQL-wise an empty disjunction is vacuously + // false, which silently drops the surrounding intent. Empty `and` is OK + // (vacuously true → no constraint contributed). Forcing the caller to + // omit the predicate entirely is the only unambiguous choice. + if (groupKey === "or" && children.length === 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "or"], + message: "filter 'or' group must contain at least one predicate", + }); + return; + } + + children.forEach((child, idx) => { + validateFilterTree( + child, + ctx, + [...path, groupKey, idx], + depth + 1, + registry, + ); + }); + return; + } + + // Leaf predicate. The base schema already enforced shape; here we layer in + // the registry-aware constraints. + const predicate = node as MetricPredicate; + + if (registry.knownDimensions.length === 0) { + // The metric has no registered dimensions (measure-only KPI) — any + // filter predicate references a member that cannot exist. Reject + // rather than fall open. This complements the validator-level + // dimensionItemSchema tightening: filter and dimensions are both + // gated by the same registry signal. + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "member"], + message: `filter member "${predicate.member}" is not a declared dimension`, + }); + } else if (!registry.knownDimensions.includes(predicate.member)) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "member"], + message: `filter member "${predicate.member}" is not a declared dimension (allowed: ${registry.knownDimensions.join(", ")})`, + }); + } + + if ( + !METRIC_FILTER_OPERATORS.includes( + predicate.operator as MetricFilterOperatorName, + ) + ) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "operator"], + message: `filter operator "${predicate.operator}" is not one of: ${METRIC_FILTER_OPERATORS.join(", ")}`, + }); + // No further checks meaningful when the operator is unknown. + return; + } + + const op = predicate.operator; + const values = predicate.values; + const valuesLen = values?.length ?? 0; + + if (NULL_OPERATORS.has(op)) { + if (values != null && valuesLen > 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "values"], + message: `filter operator "${op}" must not carry values`, + }); + } + } else if (SINGLE_VALUE_OPERATORS.has(op)) { + if (valuesLen !== 1) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "values"], + message: `filter operator "${op}" requires exactly one value (got ${valuesLen})`, + }); + } + } else if (LIST_VALUE_OPERATORS.has(op)) { + if (valuesLen < 1) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "values"], + message: `filter operator "${op}" requires at least one value`, + }); + } + } + + // Op⇄type compatibility — only enforced when we have a registered type. + // Falls open (no error) when the registry didn't supply a type for the dim. + const declaredType = registry.knownDimensionTypes[predicate.member]; + if (declaredType) { + const cls = classifyDimensionType(declaredType); + if (RANGE_OPERATORS.has(op) && cls === "string") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "operator"], + message: `filter operator "${op}" is incompatible with string-typed dimension "${predicate.member}"`, + }); + } + if (STRING_OPERATORS.has(op) && cls !== "string" && cls !== "unknown") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "operator"], + message: `filter operator "${op}" is incompatible with non-string dimension "${predicate.member}" (type ${declaredType})`, + }); + } + } + + // Op⇄value-type compatibility. Catches the malformed-value case at + // validation time (returns 400) instead of letting it surface as a + // synchronous Error from `buildMetricSql` (which would render as 500). + // String operators always require a string value regardless of the + // dimension's declared type. Range operators require a numeric value when + // the dim is numeric — date-typed dims accept ISO date strings, so we + // don't tighten there. + if (STRING_OPERATORS.has(op) && valuesLen > 0) { + const v = predicate.values?.[0]; + if (typeof v !== "string") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "values"], + message: `filter operator "${op}" requires a string value (got ${typeof v})`, + }); + } + } + if ( + RANGE_OPERATORS.has(op) && + declaredType && + classifyDimensionType(declaredType) === "numeric" && + valuesLen > 0 + ) { + const v = predicate.values?.[0]; + if (typeof v !== "number") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: [...path, "values"], + message: `filter operator "${op}" on numeric dimension "${predicate.member}" requires a numeric value (got ${typeof v})`, + }); + } + } +} + +/** + * Classify a Databricks SQL type string into a coarse compatibility class. + * + * The classification is conservative: `STRING` and adjacent text types map to + * `string`; numeric, integral, and float types map to `numeric`; `DATE` and + * `TIMESTAMP` map to `date`; everything else maps to `unknown`. Accepting the + * fallback as `unknown` lets the validator stay deterministic when the + * registry has no type metadata for the dim. + */ +function classifyDimensionType(sqlType: string): MetricDimensionTypeClass { + const normalized = sqlType + .toUpperCase() + .replace(/\(.*\)$/, "") + .replace(/<.*>$/, "") + .split(" ")[0]; + + switch (normalized) { + case "STRING": + case "VARCHAR": + case "CHAR": + case "TEXT": + return "string"; + case "TINYINT": + case "SMALLINT": + case "INT": + case "INTEGER": + case "BIGINT": + case "FLOAT": + case "DOUBLE": + case "DECIMAL": + case "NUMERIC": + return "numeric"; + case "DATE": + case "TIMESTAMP": + case "TIMESTAMP_NTZ": + case "TIMESTAMP_LTZ": + return "date"; + default: + return "unknown"; + } +} + +/** + * Aggregate the set of allowed time-grains across every time-typed dimension. + * + * Sorted + deduplicated so the validator's error messages and the cache-key + * construction are deterministic. Memoized per `grainsByDim` reference: the + * input is static per `MetricRegistration`, so steady-state hits reuse the + * sorted array without re-walking + re-sorting on every `buildMetricSql` + * call. (The validator's path only invokes this once per metric thanks to + * the schema cache; the SQL builder still calls it per request.) + */ +const collectAllowedGrainsCache = new WeakMap< + Record, + string[] +>(); + +function collectAllowedGrains(grainsByDim: Record): string[] { + const cached = collectAllowedGrainsCache.get(grainsByDim); + if (cached !== undefined) return cached; + const set = new Set(); + for (const grains of Object.values(grainsByDim)) { + for (const g of grains) { + set.add(g); + } + } + const result = [...set].sort(); + collectAllowedGrainsCache.set(grainsByDim, result); + return result; +} + +/** + * Validate the request body against the metric's schema. + * + * Returns the parsed body on success; throws {@link ValidationError} with the + * canonical 400 shape on failure. Throwing keeps the route handler simple — + * the AppKit error pipeline handles the response shape. + * + * The thrown error's public `message` carries only the offending field paths + * (`measures.0`, `filter.and.0.member`, etc.) — never the registry's allowed + * values or the metric's measure/dimension names. The full Zod issue list, + * including allowlists embedded in per-issue messages, is preserved on + * `context.issues` for server-side telemetry. This prevents an unauthenticated + * caller from enumerating the registered schema by sending malformed bodies. + */ +/** + * Per-registration Zod schema cache. The schema is recursive (filter tree + * with `z.lazy`) and constructs ~10 chained refinements, which is non-trivial + * to rebuild on every request. Keyed on the registration object so the cache + * empties automatically when the registry is reloaded (e.g., dev hot-reload + * of `metric.json`) — old registration objects become unreferenced and the + * `WeakMap` entry is garbage-collected. + */ +const metricRequestSchemaCache = new WeakMap< + MetricRegistration, + z.ZodType +>(); + +/** + * Iterative pre-parse depth check. Zod's union/object parsers walk the input + * recursively before our `superRefine` depth cap fires, so a deeply-nested + * `{ and: [{ and: [...] }] }` payload could stack-overflow during parse, + * never reaching the validator's depth check. This walk is iterative (uses + * an explicit stack) and aborts as soon as `METRIC_FILTER_MAX_DEPTH` is + * exceeded, so a hostile payload of any size cannot drive the call stack. + * + * Walks BOTH `and` and `or` branches when both are present on the same node + * — Zod's `.strict()` will reject the multi-key shape downstream, but the + * pre-check has to inspect every branch Zod might recurse into. An earlier + * version used `else if` and was bypassed by `{ and: [], or: }`. + * + * Group-children breadth is also capped: a flat `{ and: [...10M items...] }` + * payload cannot push 10M frames onto the explicit stack here. The Zod + * schema enforces the same `.max()` so the failure surfaces at the same + * point regardless of which validator catches it first. + * + * Predicate leaves do NOT count toward depth — only nested `and` / `or` + * wrappers — matching the rule the in-tree validator enforces in + * {@link validateFilterTree}. + */ +function preCheckFilterDepth(filter: unknown): void { + if (filter == null || typeof filter !== "object") return; + const stack: Array<[unknown, number]> = [[filter, 0]]; + while (stack.length > 0) { + const popped = stack.pop(); + if (popped === undefined) continue; + const [node, depth] = popped; + if (node == null || typeof node !== "object") continue; + const obj = node as Record; + // Inspect BOTH `and` and `or` if present. Using `else if` here was a + // critical bypass: a payload of `{ and: [], or: }` slid + // past the pre-check (empty `and` walked, `or` ignored) and Zod's + // union recursion then stack-overflowed on the `or` branch. + for (const groupKey of ["and", "or"] as const) { + const children = obj[groupKey]; + if (!Array.isArray(children)) continue; + if (children.length > METRIC_FILTER_GROUP_MAX) { + throw new ValidationError( + "Invalid metric request body (fields: filter)", + { + context: { + reason: `filter ${groupKey} group has ${children.length} children; the maximum is ${METRIC_FILTER_GROUP_MAX}`, + }, + }, + ); + } + if (depth + 1 > METRIC_FILTER_MAX_DEPTH) { + throw new ValidationError( + "Invalid metric request body (fields: filter)", + { + context: { + reason: `filter AND/OR nesting exceeds the maximum depth of ${METRIC_FILTER_MAX_DEPTH}`, + }, + }, + ); + } + for (const child of children) { + stack.push([child, depth + 1]); + } + } + } +} + +export function validateMetricRequest( + registration: MetricRegistration, + body: unknown, +): IAnalyticsMetricRequest { + // Bound the recursion depth before Zod sees the input — the schema's + // own depth check fires inside `superRefine` which only runs after Zod's + // recursive parse has already walked the tree on the call stack. + if (body != null && typeof body === "object") { + preCheckFilterDepth((body as { filter?: unknown }).filter); + } + let schema = metricRequestSchemaCache.get(registration); + if (schema === undefined) { + schema = makeMetricRequestSchema(registration); + metricRequestSchemaCache.set(registration, schema); + } + const result = schema.safeParse(body); + if (!result.success) { + const fieldPaths = result.error.issues + .map((i) => i.path.join(".") || "(root)") + .join(", "); + throw new ValidationError( + fieldPaths.length > 0 + ? `Invalid metric request body (fields: ${fieldPaths})` + : "Invalid metric request body", + { + context: { + metric: registration.key, + issues: result.error.issues, + }, + }, + ); + } + return result.data; +} + +/** + * SQL identifier safety guard — the FQN ships in the SQL string (it cannot be + * parameterized) so we belt-and-suspender the regex check at construction time. + * + * The build-time loader already enforces FQN_PATTERN; this is a runtime fence + * for any future code path that constructs SQL outside of the registry. + */ +function assertSafeFqn(fqn: string): void { + if (!FQN_PATTERN.test(fqn)) { + throw new Error( + `Refusing to build SQL: "${fqn}" is not a valid three-part UC FQN.`, + ); + } +} + +/** + * Validate measure names before they are interpolated into MEASURE(). + * + * Measure names cannot be parameterized — they are SQL identifiers, not + * literals. We restrict to a conservative identifier shape and assert + * presence in the build-time registry when known. + */ +const MEASURE_NAME_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]*$/; + +/** + * Dimension name pattern. Matches the identifier shape we accept for measures + * — column references cannot be parameterized in SQL, so they must be + * conservatively safe identifiers (no spaces, no quotes, no SQL operators). + */ +const DIMENSION_NAME_PATTERN = /^[a-zA-Z_][a-zA-Z0-9_]*$/; + +/** + * Time-grain enum values that are safe to interpolate into `date_trunc()`. + * The build-time metadata supplies these as YAML 1.1 lowercase tokens — we + * only accept that shape; anything else (mixed case, quoted strings, + * SQL operators) is rejected before reaching the SQL string. + */ +const TIME_GRAIN_PATTERN = /^[a-z][a-z_]*$/; + +/** + * Construct the Phase 3 metric SQL. + * + * Shape: + * + * SELECT MEASURE(m), date_trunc('', ) AS , + * FROM + * [WHERE ] + * [GROUP BY ALL] + * [LIMIT n] + * + * Notes: + * - All column references (measures, dimensions, filter members) are + * validated against the registry and against the conservative identifier + * pattern. No user-supplied string flows into the SQL string without + * passing both gates. + * - `date_trunc('', col) AS col` is emitted for every time-typed + * dimension when `timeGrain` is set. The grain literal is single-quoted in + * the SQL — we cannot use a bind variable for `date_trunc`'s first + * argument, so we restrict to the registry's allowed grain enum. + * - `GROUP BY ALL` is added when at least one dimension is requested. UC + * requires GROUP BY when MEASURE() is mixed with non-aggregated columns; + * `GROUP BY ALL` is the documented form that works without re-listing each + * dimension. + * - `WHERE` clause is rendered from the recursive filter tree. Every value + * flows through Statement Execution's named bind-var path (`:f_`); + * no value is ever interpolated as a literal. Member identifiers come + * from the validated registry, not the request body. + * + * Returns `{ statement, parameters }` where `parameters` is the named + * bind-var dictionary the analytics plugin's `query()` method consumes. + */ +export function buildMetricSql( + registration: MetricRegistration, + request: IAnalyticsMetricRequest, +): { + statement: string; + parameters: Record; +} { + assertSafeFqn(registration.source); + + if (request.measures.length === 0) { + throw new Error("buildMetricSql requires at least one measure."); + } + + for (const m of request.measures) { + if (!MEASURE_NAME_PATTERN.test(m)) { + throw new Error( + `Refusing to build SQL: measure "${m}" is not a valid identifier.`, + ); + } + if ( + registration.knownMeasures.length > 0 && + !registration.knownMeasures.includes(m) + ) { + throw new Error( + `Refusing to build SQL: unknown measure "${m}" for metric "${registration.key}".`, + ); + } + } + + const dimensions = request.dimensions ?? []; + for (const d of dimensions) { + if (!DIMENSION_NAME_PATTERN.test(d)) { + throw new Error( + `Refusing to build SQL: dimension "${d}" is not a valid identifier.`, + ); + } + if ( + registration.knownDimensions.length > 0 && + !registration.knownDimensions.includes(d) + ) { + throw new Error( + `Refusing to build SQL: unknown dimension "${d}" for metric "${registration.key}".`, + ); + } + } + + if (request.timeGrain !== undefined) { + if (!TIME_GRAIN_PATTERN.test(request.timeGrain)) { + throw new Error( + `Refusing to build SQL: timeGrain "${request.timeGrain}" is not a valid grain token.`, + ); + } + const allowed = collectAllowedGrains(registration.knownTimeGrainsByDim); + if (allowed.length > 0 && !allowed.includes(request.timeGrain)) { + throw new Error( + `Refusing to build SQL: unknown timeGrain "${request.timeGrain}" for metric "${registration.key}".`, + ); + } + // Same fall-open rule as the validator: only enforce when metadata is + // available. Without registry knowledge we trust the warehouse to reject + // an incompatible grain at SQL execution time. + if (Object.keys(registration.knownTimeGrainsByDim).length > 0) { + const hasTimeDim = dimensions.some((d) => + isTimeTypedDim(registration, d), + ); + if (!hasTimeDim) { + throw new Error( + `Refusing to build SQL: timeGrain "${request.timeGrain}" set but no time-typed dimension is in 'dimensions'.`, + ); + } + } + } + + // Deterministic order so cache keys collapse semantically equivalent calls. + // Sort-before-hash composition is finalized in Phase 4; sorting the SELECT + // list here is the same idea applied to the SQL itself. + // Alias each measure to its plain name so result rows have keys matching + // the registered measure (`{ arr: 1234 }`) rather than the SQL-function + // serialization Databricks returns by default (`{ "measure(arr)": 1234 }`). + // The measure name has already been validated against MEASURE_NAME_PATTERN + // and the registry's known measure list, so it's safe to interpolate. + const measureClauses = [...request.measures] + .sort() + .map((m) => `MEASURE(${m}) AS ${m}`); + + const dimensionClauses = [...dimensions] + .sort() + .map((d) => renderDimensionClause(registration, d, request.timeGrain)); + + const selectList = [...measureClauses, ...dimensionClauses].join(", "); + const groupByClause = dimensions.length > 0 ? " GROUP BY ALL" : ""; + + const limitClause = + typeof request.limit === "number" && request.limit > 0 + ? ` LIMIT ${Math.floor(request.limit)}` + : ""; + + // Filter translation. Every value is bound through `:f_` named params; + // every column identifier is gated by the registry-membership check above + // (recursively, via `renderFilter`). Empty filter or no filter → no WHERE. + const parameters: Record = {}; + let whereClause = ""; + if (request.filter !== undefined) { + const fragment = renderFilter(request.filter, registration, parameters, { + counter: 0, + depth: 0, + }); + if (fragment !== null && fragment.length > 0) { + whereClause = ` WHERE ${fragment}`; + } + } + + const statement = `SELECT ${selectList} FROM ${registration.source}${whereClause}${groupByClause}${limitClause}`; + return { statement, parameters }; +} + +/** + * Mutable counter / depth threaded through {@link renderFilter}. Fresh per + * `buildMetricSql` call, so two requests never share bind-var indexes. + */ +interface FilterRenderState { + counter: number; + depth: number; +} + +/** + * Recursively render a filter tree into a SQL fragment, pushing bind values + * into `params` keyed by `:f_` names. + * + * Returns `null` for an empty group (no WHERE clause needed). The caller's + * `buildMetricSql` only emits `WHERE` when this returns a non-null, + * non-empty fragment. Empty `and: []` and `or: []` groups collapse to null — + * matching SQL's vacuous-truth semantics for AND, and the validator-permitted + * "no predicates" shape. + * + * Defense-in-depth: even though the request body's filter has already been + * validated by the zod schema, every member name is re-checked against the + * registry here. If validation is ever bypassed, the SQL constructor still + * refuses to interpolate an unknown identifier. + */ +function renderFilter( + node: MetricFilter, + registration: MetricRegistration, + params: Record, + state: FilterRenderState, +): string | null { + if (node === null || typeof node !== "object") { + throw new Error( + "Refusing to build SQL: filter node must be an object Predicate or { and } / { or } group.", + ); + } + + if ("and" in node || "or" in node) { + const groupKey = "and" in node ? "and" : "or"; + if (state.depth + 1 > METRIC_FILTER_MAX_DEPTH) { + throw new Error( + `Refusing to build SQL: filter AND/OR nesting exceeds the maximum depth of ${METRIC_FILTER_MAX_DEPTH}.`, + ); + } + + const children = ( + node as { and?: ReadonlyArray } & { + or?: ReadonlyArray; + } + )[groupKey]; + + if (!Array.isArray(children) || children.length === 0) { + // Empty AND → vacuously true, render as no constraint (null). + // Empty OR → vacuously false. The validator rejects this case before + // reaching the SQL builder, but if it slips through, render `1 = 0` + // rather than dropping the predicate silently — defense in depth so a + // future validator bypass cannot turn `or: []` into "match everything". + if (groupKey === "or") { + return "1 = 0"; + } + return null; + } + + // Sort-before-hash discipline (Phase 3 incremental). Within a group, + // predicate leaves are stable-sorted by (member, operator) before + // contributing to the rendered fragment, so semantically equivalent calls + // produce the same SQL string and (downstream) the same cache key. + const sortedChildren = sortFilterChildren(children); + + const fragments: string[] = []; + const childState: FilterRenderState = { + counter: state.counter, + depth: state.depth + 1, + }; + for (const child of sortedChildren) { + const rendered = renderFilter(child, registration, params, childState); + if (rendered != null && rendered.length > 0) { + fragments.push(rendered); + } + } + state.counter = childState.counter; + + if (fragments.length === 0) return null; + if (fragments.length === 1) return fragments[0]; + const joiner = groupKey === "and" ? " AND " : " OR "; + return `(${fragments.join(joiner)})`; + } + + // Leaf predicate — validate against the registry one more time, then render. + const predicate = node as MetricPredicate; + + if (!DIMENSION_NAME_PATTERN.test(predicate.member)) { + throw new Error( + `Refusing to build SQL: filter member "${predicate.member}" is not a valid identifier.`, + ); + } + if ( + registration.knownDimensions.length > 0 && + !registration.knownDimensions.includes(predicate.member) + ) { + throw new Error( + `Refusing to build SQL: unknown filter member "${predicate.member}" for metric "${registration.key}".`, + ); + } + if ( + !METRIC_FILTER_OPERATORS.includes( + predicate.operator as MetricFilterOperatorName, + ) + ) { + throw new Error( + `Refusing to build SQL: unknown filter operator "${predicate.operator}".`, + ); + } + + return renderPredicate(predicate, params, state); +} + +/** + * Stable-sort filter children inside an AND/OR group by `(member, operator)`. + * + * Predicates carry both fields and sort by their pair; nested groups sort + * after predicates and stay in their original relative order (a nested group + * is opaque from the outside — we cannot collapse it to a single key). This + * is the sort-before-hash invariant applied at the SQL-fragment level so + * downstream cache keys collapse semantically equivalent calls. + */ +function sortFilterChildren( + children: ReadonlyArray, +): MetricFilter[] { + const indexed = children.map((child, idx) => { + let key: string; + let isPredicate: boolean; + if ( + child !== null && + typeof child === "object" && + !("and" in child) && + !("or" in child) + ) { + const p = child as MetricPredicate; + key = `${p.member}${p.operator}`; + isPredicate = true; + } else { + // Nested groups don't have a single (member, operator) — keep their + // original index so multiple nested groups within the same parent + // remain stable relative to each other. + key = ""; + isPredicate = false; + } + return { child, idx, key, isPredicate }; + }); + + indexed.sort((a, b) => { + if (a.isPredicate && !b.isPredicate) return -1; + if (!a.isPredicate && b.isPredicate) return 1; + if (a.isPredicate && b.isPredicate) { + if (a.key < b.key) return -1; + if (a.key > b.key) return 1; + } + return a.idx - b.idx; + }); + + return indexed.map((entry) => entry.child); +} + +/** + * Translate a single predicate into a SQL fragment. + * + * Every value flows through a freshly-allocated `:f_` named bind var. + * Nothing from the request body is ever interpolated as a literal — the + * fragment carries identifiers (registry-validated) and operators + * (whitelisted), then references the bind name for each value. + * + * `set` and `notSet` emit `IS NULL` / `IS NOT NULL` with no bind value. + * `in` and `notIn` emit `IN (:f_0, :f_1, ...)`. `contains` and `notContains` + * emit `LIKE :f_0` and pre-bind the value with `%` wrapping. + */ +function renderPredicate( + predicate: MetricPredicate, + params: Record, + state: FilterRenderState, +): string { + const col = predicate.member; + const op = predicate.operator; + const values = predicate.values ?? []; + + switch (op) { + case "equals": + return `${col} = ${bindValue(values[0], params, state)}`; + case "notEquals": + return `${col} <> ${bindValue(values[0], params, state)}`; + case "gt": + return `${col} > ${bindValue(values[0], params, state)}`; + case "gte": + return `${col} >= ${bindValue(values[0], params, state)}`; + case "lt": + return `${col} < ${bindValue(values[0], params, state)}`; + case "lte": + return `${col} <= ${bindValue(values[0], params, state)}`; + case "in": { + const placeholders = values.map((v) => bindValue(v, params, state)); + return `${col} IN (${placeholders.join(", ")})`; + } + case "notIn": { + const placeholders = values.map((v) => bindValue(v, params, state)); + return `${col} NOT IN (${placeholders.join(", ")})`; + } + case "contains": { + const raw = values[0]; + if (typeof raw !== "string") { + throw new Error( + `Refusing to build SQL: filter operator "contains" requires a string value (got ${typeof raw}).`, + ); + } + return `${col} LIKE ${bindLikeValue(raw, params, state)}`; + } + case "notContains": { + const raw = values[0]; + if (typeof raw !== "string") { + throw new Error( + `Refusing to build SQL: filter operator "notContains" requires a string value (got ${typeof raw}).`, + ); + } + return `${col} NOT LIKE ${bindLikeValue(raw, params, state)}`; + } + case "set": + return `${col} IS NOT NULL`; + case "notSet": + return `${col} IS NULL`; + default: { + // Exhaustiveness — the operator union is closed; if this is reached + // the operator vocabulary widened without updating the switch. + const _exhaustive: never = op; + throw new Error( + `Refusing to build SQL: unhandled filter operator "${_exhaustive as string}".`, + ); + } + } +} + +/** + * Allocate a fresh `:f_` bind name for `value`, push the typed marker + * into `params`, and return the placeholder string. Bumps the counter. + */ +function bindValue( + value: string | number | undefined, + params: Record, + state: FilterRenderState, +): string { + if (value === undefined) { + throw new Error( + "Refusing to build SQL: filter predicate is missing a required value.", + ); + } + const name = `f_${state.counter}`; + state.counter += 1; + if (typeof value === "number") { + params[name] = sqlHelpers.number(value); + } else if (typeof value === "string") { + params[name] = sqlHelpers.string(value); + } else { + throw new Error( + `Refusing to build SQL: filter value must be a string or number (got ${typeof value}).`, + ); + } + return `:${name}`; +} + +/** + * Like {@link bindValue}, but wraps the value in `%...%` for `LIKE` / + * `NOT LIKE`. SQL wildcards in the user-supplied string remain in the value + * (matching the documented "contains" semantics) — escape-on-receive could + * be added later as an opt-in if customers request strict-substring matching. + */ +function bindLikeValue( + value: string, + params: Record, + state: FilterRenderState, +): string { + const name = `f_${state.counter}`; + state.counter += 1; + params[name] = sqlHelpers.string(`%${value}%`); + return `:${name}`; +} + +/** + * Whether a dimension name is registered as time-typed (carries a non-empty + * `time_grain` attribute in the YAML). + */ +function isTimeTypedDim( + registration: MetricRegistration, + dim: string, +): boolean { + const grains = registration.knownTimeGrainsByDim[dim]; + return Array.isArray(grains) && grains.length > 0; +} + +/** + * Render a single SELECT-list clause for a dimension. + * + * Time-typed dimensions are wrapped in `date_trunc('', ) AS ` + * when `timeGrain` is set; non-time dimensions render as the bare column name. + * + * The grain literal is whitelisted by `collectAllowedGrains(registration)` and + * the column name has already passed the identifier-pattern guard above, so + * neither flows through user-controlled bytes. + */ +function renderDimensionClause( + registration: MetricRegistration, + dim: string, + timeGrain: string | undefined, +): string { + if (timeGrain && isTimeTypedDim(registration, dim)) { + return `date_trunc('${timeGrain}', ${dim}) AS ${dim}`; + } + return dim; +} + +/** + * Compose the cache key — final Phase 4 form. + * + * Reserved namespace `metric:` separates metric-view caches from query + * caches. The key shape is `metric:{metric_key}:{argsHash}:{executorKey}`, + * where: + * - `metric_key` is the registry's stable map key (readable in debug logs). + * - `argsHash` is a deterministic serialization of the request body's + * canonical form. Order-insensitive components are sorted before they + * contribute to the hash so semantically equivalent calls collapse to the + * same cache entry. + * - `executorKey` is `"sp"` for SP-lane entries and a sha256 hash of the + * end-user's identity for OBO-lane entries. The raw identity is never + * placed in the cache key (privacy concern: cache stores log keys). + * + * Sort-before-hash applies to: + * - `measures`: lexicographic sort + * - `dimensions`: lexicographic sort + * - `filter`: predicates inside each AND/OR group are stable-sorted by + * `(member, operator)`; group kind (`and` vs `or`) is preserved by + * {@link canonicalizeFilter} + * + * The returned array is consumed by `CacheManager.generateKey` which + * concatenates and sha256-hashes the parts. The structure (one element per + * concern) makes the cache key inspectable in tests and debug logs without + * giving up determinism. + */ +export function composeMetricCacheKey(input: { + metricKey: string; + measures: string[]; + dimensions?: string[]; + timeGrain?: string; + filter?: MetricFilter; + format: string; + executorKey: string; + limit?: number; +}): string[] { + const sortedMeasures = [...input.measures].sort(); + const sortedDimensions = [...(input.dimensions ?? [])].sort(); + const filterFingerprint = + input.filter !== undefined ? canonicalizeFilter(input.filter) : "_"; + return [ + "metric", + input.metricKey, + input.format, + sortedMeasures.join(","), + sortedDimensions.join(","), + input.timeGrain ?? "_", + filterFingerprint, + typeof input.limit === "number" ? String(input.limit) : "_", + input.executorKey, + ]; +} + +/** + * Derive the cache executor key from a metric registration's lane and the + * caller's user identity. + * + * Returns `"sp"` for SP-lane entries (every caller shares the cache) and a + * sha256 hex digest of the user identity for OBO-lane entries (each user + * gets an isolated cache scope). + * + * The user identity is hashed — never stored verbatim — so the cache layer + * (which logs keys at debug level and persists them in any cache backend) + * never sees raw user emails or principal names. A stable, opaque token is + * what we need: same user → same key (so cache hits work), different users + * → different keys (so isolation holds), and reverse lookup is infeasible. + * + * For OBO requests without a resolvable identity (missing or whitespace- + * only `x-forwarded-user`), throw `AuthenticationError.missingUserId()` + * rather than falling back to a shared `"anonymous"` sentinel — distinct + * misconfigured callers would otherwise share the same hash and read each + * other's cached results. The route's existing try/catch wraps this call, + * so the throw lands on the canonical 401 envelope. + */ +export function deriveMetricExecutorKey(input: { + lane: MetricLane; + userIdentity?: string | null; +}): string { + if (input.lane === "sp") { + return "sp"; + } + // OBO lane — hash the user identity so the raw email/principal never + // reaches the cache layer. Missing/whitespace identity is treated as a + // hard auth failure: the alternative ("anonymous" sentinel) collides + // every misconfigured caller into a single cache scope, so user A's + // results could leak to user B. + const identity = input.userIdentity?.trim(); + if (!identity || identity.length === 0) { + throw AuthenticationError.missingUserId(); + } + return createHash("sha256").update(identity).digest("hex"); +} + +/** + * Produce a deterministic string fingerprint of the filter tree. + * + * The fingerprint sorts predicates within each AND/OR group by + * `(member, operator)` and recursively canonicalizes nested groups. Values + * are included verbatim so cache entries differ when the filter targets + * different values (`region in [EMEA]` vs `region in [APAC]` — different + * keys; `equals A` vs `equals B` — different keys), while order-insensitive + * predicate lists collapse to the same key. + */ +function canonicalizeFilter(node: MetricFilter): string { + if (node === null || typeof node !== "object") { + return "_"; + } + + if ("and" in node || "or" in node) { + const groupKey = "and" in node ? "and" : "or"; + const children = ( + node as { and?: ReadonlyArray } & { + or?: ReadonlyArray; + } + )[groupKey]; + + if (!Array.isArray(children) || children.length === 0) { + return `${groupKey}()`; + } + + const sorted = sortFilterChildren(children); + const childFingerprints = sorted.map(canonicalizeFilter); + return `${groupKey}(${childFingerprints.join(",")})`; + } + + // Leaf predicate. Use JSON.stringify (not String) for the value segment so + // strings carrying the `|` separator cannot collide with split arrays — + // e.g. `["a", "b"]` and `["a|string:b"]` are now distinct fingerprints. + const p = node as MetricPredicate; + const valuesPart = p.values + ? p.values.map((v) => `${typeof v}:${JSON.stringify(v)}`).join("|") + : ""; + return `p(${p.member}/${p.operator}/${valuesPart})`; +} diff --git a/packages/appkit/src/plugins/analytics/tests/analytics.test.ts b/packages/appkit/src/plugins/analytics/tests/analytics.test.ts index ce351021e..cf2eb97b2 100644 --- a/packages/appkit/src/plugins/analytics/tests/analytics.test.ts +++ b/packages/appkit/src/plugins/analytics/tests/analytics.test.ts @@ -79,18 +79,23 @@ describe("Analytics Plugin", () => { }); describe("injectRoutes", () => { - test("should register single POST route for queries", () => { + test("should register POST routes for queries and metrics", () => { const plugin = new AnalyticsPlugin(config); const { router } = createMockRouter(); plugin.injectRoutes(router); - // Only 1 POST route - asUser is determined by .obo.sql file convention - expect(router.post).toHaveBeenCalledTimes(1); + // 2 POST routes: /query/:query_key (asUser via .obo.sql convention) + // and /metric/:key (asUser via metric.json lane). + expect(router.post).toHaveBeenCalledTimes(2); expect(router.post).toHaveBeenCalledWith( "/query/:query_key", expect.any(Function), ); + expect(router.post).toHaveBeenCalledWith( + "/metric/:key", + expect.any(Function), + ); }); test("should register GET route for arrow results", () => { diff --git a/packages/appkit/src/plugins/analytics/tests/metric.test.ts b/packages/appkit/src/plugins/analytics/tests/metric.test.ts new file mode 100644 index 000000000..be861bd3a --- /dev/null +++ b/packages/appkit/src/plugins/analytics/tests/metric.test.ts @@ -0,0 +1,2604 @@ +import { + createMockRequest, + createMockResponse, + createMockRouter, + mockServiceContext, + setupDatabricksEnv, +} from "@tools/test-helpers"; +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; +import { ServiceContext } from "../../../context/service-context"; +import { AnalyticsPlugin } from "../analytics"; +import { + buildMetricSql, + composeMetricCacheKey, + deriveMetricExecutorKey, + loadMetricRegistry, + makeMetricRequestSchema, + validateMetricRequest, +} from "../metric"; +import type { IAnalyticsConfig, MetricRegistration } from "../types"; + +// Mirror the analytics test cache mock so the interceptor chain wiring is +// real but storage is in-memory and synchronous. +const { mockCacheStore, mockCacheInstance } = vi.hoisted(() => { + const store = new Map(); + + const generateKey = (parts: unknown[], userKey: string): string => { + const { createHash } = require("node:crypto"); + const allParts = [userKey, ...parts]; + const serialized = JSON.stringify(allParts); + return createHash("sha256").update(serialized).digest("hex"); + }; + + const instance = { + get: vi.fn(), + set: vi.fn(), + delete: vi.fn(), + getOrExecute: vi.fn( + async (key: unknown[], fn: () => Promise, userKey: string) => { + const cacheKey = generateKey(key, userKey); + if (store.has(cacheKey)) { + return store.get(cacheKey); + } + const result = await fn(); + store.set(cacheKey, result); + return result; + }, + ), + generateKey: vi.fn((parts: unknown[], userKey: string) => + generateKey(parts, userKey), + ), + }; + + return { mockCacheStore: store, mockCacheInstance: instance }; +}); + +vi.mock("../../../cache", () => ({ + CacheManager: { + getInstanceSync: vi.fn(() => mockCacheInstance), + }, +})); + +const REVENUE_REGISTRATION: MetricRegistration = { + key: "revenue", + source: "appkit_demo.public.revenue_metrics", + lane: "sp", + knownMeasures: ["arr", "mrr"], + knownDimensions: ["region", "segment", "created_at"], + knownTimeGrainsByDim: { + created_at: ["day", "month", "week"], + }, +}; + +/** + * Phase 3 fixture — adds a numeric dim (`deal_size`) and registered + * `knownDimensionTypes` so op⇄type compatibility tests can exercise both + * branches (range ops on numeric dim, string ops on string dim). + */ +const REVENUE_PHASE3_REGISTRATION: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: ["region", "segment", "created_at", "deal_size"], + knownDimensionTypes: { + region: "STRING", + segment: "STRING", + created_at: "TIMESTAMP", + deal_size: "DOUBLE", + }, +}; + +describe("metric — pure helpers", () => { + describe("makeMetricRequestSchema / validateMetricRequest", () => { + test("accepts a request with a known measure", () => { + const parsed = validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + }); + expect(parsed.measures).toEqual(["arr"]); + expect(parsed.format).toBeUndefined(); + }); + + test("accepts format=ARROW (handled, even if hook discourages it)", () => { + const parsed = validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + format: "ARROW", + }); + expect(parsed.format).toBe("ARROW"); + }); + + test("rejects an unknown measure with a clear error", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["bogus"], + }), + ).toThrowError(/measures\.0/); + }); + + test("rejects an empty measures array", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: [], + }), + ).toThrowError(/fields:.*measures/); + }); + + test("rejects a non-positive limit", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + limit: -1, + }), + ).toThrowError(/fields:.*limit/); + }); + + test("rejects limit exceeding the cap (unbounded-request-parameters)", () => { + // Recurring pattern from prior reviews — caps prevent a hostile caller + // from passing absurdly large `limit` values that would force the + // warehouse to materialize unbounded result sets. + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + limit: 10_000_000, + }), + ).toThrowError(/fields:.*limit/); + }); + + test("rejects measures exceeding the cap", () => { + const tooMany = Array.from({ length: 100 }, () => "arr"); + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { measures: tooMany }), + ).toThrowError(/fields:.*measures/); + }); + + test("rejects a filter predicate with too many values (DoS guard)", () => { + const big = Array.from({ length: 2000 }, (_, i) => `v${i}`); + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + filter: { member: "region", operator: "in", values: big }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("rejects unknown top-level fields (strict)", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + // 'someUnknownField' is not in the v1 contract and the strict() + // schema must reject it. (filter is now a Phase 3 field.) + someUnknownField: 123, + } as any), + ).toThrowError(); + }); + + test("rejects filter passed as a bare array (not a Predicate or { and }/{or} group)", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + filter: [{ member: "region", operator: "in", values: ["EMEA"] }], + } as any), + ).toThrowError(); + }); + + test("falls open when knownMeasures is empty", () => { + const looseRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownMeasures: [], + }; + const parsed = validateMetricRequest(looseRegistration, { + measures: ["anything"], + }); + expect(parsed.measures).toEqual(["anything"]); + }); + + test("schema construction is stable across calls", () => { + const a = makeMetricRequestSchema(REVENUE_REGISTRATION); + const b = makeMetricRequestSchema(REVENUE_REGISTRATION); + expect(a.safeParse({ measures: ["arr"] }).success).toBe(true); + expect(b.safeParse({ measures: ["arr"] }).success).toBe(true); + }); + + // ── Phase 2: dimensions ───────────────────────────────────────────── + test("accepts a request with known dimensions", () => { + const parsed = validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["region"], + }); + expect(parsed.dimensions).toEqual(["region"]); + }); + + test("accepts an empty dimensions array (ungrouped)", () => { + const parsed = validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: [], + }); + expect(parsed.dimensions).toEqual([]); + }); + + test("rejects an unknown dimension with a clear error", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["nonexistent"], + }), + ).toThrowError(/dimensions\.0/); + }); + + test("rejects non-empty `dimensions` when knownDimensions is empty (measure-only metric)", () => { + // Round-4 tightening: a measure-only metric registers + // `knownDimensions: []`, and any non-empty `dimensions` request + // must be rejected. The old fall-open behavior here was the + // round-4 security finding ("knownDimensions=[] fall-open" — empty + // registry let arbitrary dimension identifiers through to SQL). + const looseRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: [], + knownTimeGrainsByDim: {}, + }; + expect(() => + validateMetricRequest(looseRegistration, { + measures: ["arr"], + dimensions: ["any_column"], + }), + ).toThrowError(/fields:.*dimensions/); + }); + + test("accepts measure-only requests when knownDimensions is empty", () => { + // Complement of the above — a measure-only metric must still + // accept requests that simply omit dimensions and filter. + const looseRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: [], + knownTimeGrainsByDim: {}, + }; + const parsed = validateMetricRequest(looseRegistration, { + measures: ["arr"], + }); + expect(parsed.dimensions).toBeUndefined(); + }); + + // ── Phase 2: time grain ───────────────────────────────────────────── + test("accepts a known timeGrain when a time-typed dim is present", () => { + const parsed = validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "month", + }); + expect(parsed.timeGrain).toBe("month"); + }); + + test("rejects a timeGrain not in the metric's allowed enum", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "year", + }), + ).toThrowError(/fields:.*timeGrain/); + }); + + test("rejects timeGrain when no time-typed dim is in dimensions", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["region"], + timeGrain: "month", + }), + ).toThrowError(/fields:.*timeGrain/); + }); + + test("rejects timeGrain when dimensions is omitted entirely", () => { + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + timeGrain: "month", + }), + ).toThrowError(/fields:.*timeGrain/); + }); + + test("rejects timeGrain on a measure-only metric (cache-bypass guard)", () => { + // Round-5 finding: a measure-only metric has empty + // knownTimeGrainsByDim, so allowedGrains is empty. The previous + // schema fell open and accepted any timeGrain string. The SQL came + // out identical (no time-typed dim → no date_trunc), but + // composeMetricCacheKey salts the cache key with the raw token — + // an attacker could vary `month/week/foo_bar/...` to force + // unbounded cache misses + warehouse re-execution. + const measureOnlyRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: [], + knownTimeGrainsByDim: {}, + }; + expect(() => + validateMetricRequest(measureOnlyRegistration, { + measures: ["arr"], + timeGrain: "month", + }), + ).toThrowError(/fields:.*timeGrain/); + }); + + test("rejects timeGrain when metric has registered dims but none are time-typed", () => { + // Tighter validation: when the registry knows the metric's dims but + // none of them carry a time-grain set, `timeGrain` is meaningless on + // this metric. Earlier this case fell open (validator skipped on empty + // grainsByDim). + const noTimeRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: ["region", "segment"], + knownTimeGrainsByDim: {}, + }; + expect(() => + validateMetricRequest(noTimeRegistration, { + measures: ["arr"], + dimensions: ["region"], + timeGrain: "month", + }), + ).toThrowError(/fields:.*timeGrain/); + }); + + test("rejects timeGrain when none of the requested dims are time-typed (metadata available)", () => { + // Some dims are time-typed in the registry, but the request only + // includes a non-time dim. The validator must catch the mismatch. + const partialRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: ["region", "segment", "created_at"], + knownTimeGrainsByDim: { created_at: ["day", "week", "month"] }, + }; + expect(() => + validateMetricRequest(partialRegistration, { + measures: ["arr"], + dimensions: ["region"], + timeGrain: "month", + }), + ).toThrowError(); + }); + + // Note: the previous "falls open on timeGrain when metadata is empty" + // test was deleted in round 4. Its premise depended on `dimensions` + // also falling open, which the round-4 validator tightening removed: + // a request with empty `knownDimensions` and a non-empty `dimensions` + // array now hits the rejection path before timeGrain is reached. + // Empty-metadata registrations are also blocked at the route's 503 + // fail-closed gate via `knownMeasures.length === 0`, so this code + // path is no longer reachable in practice. + }); + + describe("buildMetricSql", () => { + test("renders SELECT MEASURE() FROM ", () => { + const { statement, parameters } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + }); + expect(statement).toBe( + "SELECT MEASURE(arr) AS arr FROM appkit_demo.public.revenue_metrics", + ); + // No filter present → no bind params. + expect(parameters).toEqual({}); + }); + + test("sorts measures lexicographically for deterministic SQL", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["mrr", "arr"], + }); + expect(statement).toBe( + "SELECT MEASURE(arr) AS arr, MEASURE(mrr) AS mrr FROM appkit_demo.public.revenue_metrics", + ); + }); + + test("appends LIMIT clause when limit is provided", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + limit: 10, + }); + expect(statement).toBe( + "SELECT MEASURE(arr) AS arr FROM appkit_demo.public.revenue_metrics LIMIT 10", + ); + }); + + test("rejects unknown measures (defense in depth past the validator)", () => { + expect(() => + buildMetricSql(REVENUE_REGISTRATION, { + measures: ["bogus"], + }), + ).toThrowError(/unknown measure/i); + }); + + test("rejects measures that are not valid identifiers", () => { + const looseRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownMeasures: [], + }; + expect(() => + buildMetricSql(looseRegistration, { + measures: ["arr; DROP TABLE foo --"], + }), + ).toThrowError(/not a valid identifier/); + }); + + test("rejects FQNs that are not three-part", () => { + const badRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + source: "some.bad", + knownMeasures: ["arr"], + }; + expect(() => + buildMetricSql(badRegistration, { measures: ["arr"] }), + ).toThrowError(/three-part UC FQN/); + }); + + test("rejects empty measures", () => { + expect(() => + buildMetricSql(REVENUE_REGISTRATION, { measures: [] }), + ).toThrowError(/at least one measure/); + }); + + // ── Phase 2: dimensions + GROUP BY ALL ────────────────────────────── + test("emits GROUP BY ALL when dimensions are present (snapshot — measures-only Phase 1 case)", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + }); + expect(statement).toMatchInlineSnapshot( + `"SELECT MEASURE(arr) AS arr FROM appkit_demo.public.revenue_metrics"`, + ); + }); + + test("emits dimensions + GROUP BY ALL (snapshot — dims-only)", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["region"], + }); + expect(statement).toMatchInlineSnapshot( + `"SELECT MEASURE(arr) AS arr, region FROM appkit_demo.public.revenue_metrics GROUP BY ALL"`, + ); + }); + + test("emits date_trunc for time-typed dim with timeGrain (snapshot — dims+time-grain)", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["created_at", "region"], + timeGrain: "month", + }); + expect(statement).toMatchInlineSnapshot( + `"SELECT MEASURE(arr) AS arr, date_trunc('month', created_at) AS created_at, region FROM appkit_demo.public.revenue_metrics GROUP BY ALL"`, + ); + }); + + test("emits dims + time-grain + limit together (snapshot — dims+time-grain+limit)", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr", "mrr"], + dimensions: ["created_at"], + timeGrain: "week", + limit: 50, + }); + expect(statement).toMatchInlineSnapshot( + `"SELECT MEASURE(arr) AS arr, MEASURE(mrr) AS mrr, date_trunc('week', created_at) AS created_at FROM appkit_demo.public.revenue_metrics GROUP BY ALL LIMIT 50"`, + ); + }); + + test("does not wrap regular (non-time) dims in date_trunc when timeGrain is set", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["region", "created_at"], + timeGrain: "day", + }); + // Only created_at is wrapped; region renders as the bare column. + expect(statement).toContain("date_trunc('day', created_at)"); + expect(statement).toContain(", region"); + expect(statement).not.toContain("date_trunc('day', region)"); + }); + + test("rejects unknown dimensions (defense in depth past the validator)", () => { + expect(() => + buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["nonexistent"], + }), + ).toThrowError(/unknown dimension/i); + }); + + test("rejects dimensions that are not valid identifiers", () => { + const looseRegistration: MetricRegistration = { + ...REVENUE_REGISTRATION, + knownDimensions: [], + knownTimeGrainsByDim: {}, + }; + expect(() => + buildMetricSql(looseRegistration, { + measures: ["arr"], + dimensions: ["region; DROP TABLE foo --"], + }), + ).toThrowError(/not a valid identifier/); + }); + + test("rejects unknown timeGrain values", () => { + expect(() => + buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "year", + }), + ).toThrowError(/unknown timeGrain/i); + }); + + test("rejects timeGrain when no time-typed dim is in dimensions", () => { + expect(() => + buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["region"], + timeGrain: "month", + }), + ).toThrowError(/no time-typed dimension/); + }); + + test("rejects timeGrain values that do not match the safe token shape", () => { + expect(() => + buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "Month' OR 1=1 --", + }), + ).toThrowError(/not a valid grain token/); + }); + + test("sorts dimensions lexicographically for deterministic SQL", () => { + const { statement } = buildMetricSql(REVENUE_REGISTRATION, { + measures: ["arr"], + dimensions: ["segment", "region"], + }); + // region comes before segment alphabetically. + expect(statement).toBe( + "SELECT MEASURE(arr) AS arr, region, segment FROM appkit_demo.public.revenue_metrics GROUP BY ALL", + ); + }); + }); + + describe("composeMetricCacheKey", () => { + test("reserves the metric: namespace prefix", () => { + const key = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + }); + expect(key[0]).toBe("metric"); + }); + + test("normalizes measure order for cache hits across equivalent calls", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr", "mrr"], + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["mrr", "arr"], + format: "JSON", + executorKey: "sp", + }); + expect(a).toEqual(b); + }); + + test("differentiates SP vs OBO via executorKey", () => { + const sp = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + }); + const obo = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "user-1", + }); + expect(sp).not.toEqual(obo); + }); + + test("differentiates calls with different limit values", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + limit: 10, + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + limit: 100, + }); + expect(a).not.toEqual(b); + }); + + test("filter values containing the `|` separator do not collide across distinct shapes", () => { + // Regression: an earlier `String(v)` join used `|` as a separator, + // making `["a", "b"]` collapse with `["a|string:b"]`. The fingerprint + // must distinguish them so the SP cache cannot serve a different + // user's results to a caller with a colliding-shaped filter. + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + filter: { member: "region", operator: "in", values: ["a", "b"] }, + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + filter: { + member: "region", + operator: "in", + values: ["a|string:b"], + }, + format: "JSON", + executorKey: "sp", + }); + expect(a).not.toEqual(b); + }); + + // ── Phase 2: dimensions + timeGrain ───────────────────────────────── + test("normalizes dimension order for cache hits across equivalent calls", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["region", "segment"], + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["segment", "region"], + format: "JSON", + executorKey: "sp", + }); + expect(a).toEqual(b); + }); + + test("differentiates calls with different dimensions", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["region"], + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["segment"], + format: "JSON", + executorKey: "sp", + }); + expect(a).not.toEqual(b); + }); + + test("differentiates calls with different timeGrain values", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "day", + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "month", + format: "JSON", + executorKey: "sp", + }); + expect(a).not.toEqual(b); + }); + + test("differentiates a request with timeGrain from one without", () => { + const withGrain = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "day", + format: "JSON", + executorKey: "sp", + }); + const withoutGrain = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["created_at"], + format: "JSON", + executorKey: "sp", + }); + expect(withGrain).not.toEqual(withoutGrain); + }); + }); +}); + +describe("loadMetricRegistry", () => { + let tmpDir: string; + + beforeEach(async () => { + const fs = await import("node:fs/promises"); + const os = await import("node:os"); + const path = await import("node:path"); + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "appkit-metric-test-")); + }); + + afterEach(async () => { + const fs = await import("node:fs/promises"); + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + test("returns an empty object when metric.json is absent", async () => { + const registry = await loadMetricRegistry(undefined, tmpDir); + expect(registry).toEqual({}); + }); + + test("loads a basic metric.json with a single SP entry", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { revenue: { source: "demo.public.revenue" } }, + }), + ); + const registry = await loadMetricRegistry(undefined, tmpDir); + expect(registry.revenue).toEqual({ + key: "revenue", + source: "demo.public.revenue", + lane: "sp", + knownMeasures: [], + knownDimensions: [], + knownTimeGrainsByDim: {}, + }); + }); + + test("merges build-time metadata into knownMeasures/knownDimensions", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { revenue: { source: "demo.public.revenue" } }, + }), + ); + const registry = await loadMetricRegistry( + { revenue: { measures: ["arr"], dimensions: ["region"] } }, + tmpDir, + ); + expect(registry.revenue.knownMeasures).toEqual(["arr"]); + expect(registry.revenue.knownDimensions).toEqual(["region"]); + }); + + test("merges build-time time-grain metadata into knownTimeGrainsByDim", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { revenue: { source: "demo.public.revenue" } }, + }), + ); + const registry = await loadMetricRegistry( + { + revenue: { + measures: ["arr"], + dimensions: ["region", "created_at"], + timeGrainsByDim: { created_at: ["day", "month"] }, + }, + }, + tmpDir, + ); + expect(registry.revenue.knownTimeGrainsByDim).toEqual({ + created_at: ["day", "month"], + }); + }); + + test("rejects unknown fields on entries (strict)", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { + revenue: { + source: "demo.public.revenue", + cacheTtl: 60, // not allowed at v1 + }, + }, + }), + ); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrowError( + /Invalid metric.json/, + ); + }); + + test("rejects bad FQN format", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { revenue: { source: "not.a.three.part" } }, + }), + ); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrowError( + /three-part UC FQN/, + ); + }); + + test("rejects duplicate keys across sp/obo lanes", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { revenue: { source: "demo.public.revenue" } }, + obo: { revenue: { source: "demo.public.revenue" } }, + }), + ); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrowError( + /Duplicate metric key/, + ); + }); + + test("rejects an obo entry with the same key as sp", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { foo: { source: "demo.public.foo" } }, + obo: { foo: { source: "demo.public.foo" } }, + }), + ); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrow(); + }); + + test("rejects malformed JSON", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile(path.join(tmpDir, "metric.json"), "{not json"); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrowError( + /parse metric.json/, + ); + }); + + test("rejects metric keys that start with a digit", async () => { + const fs = await import("node:fs/promises"); + const path = await import("node:path"); + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { "1revenue": { source: "demo.public.revenue" } }, + }), + ); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrow(); + }); +}); + +describe("AnalyticsPlugin — metric route handler", () => { + let config: IAnalyticsConfig; + let serviceContextMock: Awaited>; + + beforeEach(async () => { + config = { timeout: 5000 }; + setupDatabricksEnv(); + mockCacheStore.clear(); + ServiceContext.reset(); + serviceContextMock = await mockServiceContext(); + }); + + afterEach(() => { + serviceContextMock?.restore(); + }); + + test("returns 404 for an unregistered metric key", async () => { + const plugin = new AnalyticsPlugin(config); + const { router, getHandler } = createMockRouter(); + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "ghost" }, + body: { measures: ["arr"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(404); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.error).toBe("Metric not found"); + // Defense-in-depth: the public 404 must not echo the user-supplied key + // back. Confirming "metric X is not registered" lets unauthenticated + // probes enumerate registered keys by elimination. + expect(errorPayload.error).not.toMatch(/ghost/); + }); + + test("returns 503 when setup() failed to load metric.json (no silent 404)", async () => { + // Recurring review pattern: a malformed metric.json silently turned + // every metric request into 404 "Metric not found" because setup() + // swallowed the error and reset the registry to {}. That hid the + // deployment-config error from operators. Now a startup-time load + // failure is latched and surfaced on the route as 503 with a + // distinct code so deployment pipelines + the route reflect the + // broken state. The full reason stays in telemetry only. + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({}); + plugin._setMetricRegistryLoadErrorForTesting( + "Invalid metric.json at /path: sp.0.source must be a three-part FQN", + ); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(503); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("METRIC_REGISTRY_LOAD_FAILED"); + // Public message must not echo the parser failure (which would + // include filesystem paths). The detail is logged via setContext. + expect(errorPayload.error).toBe("Metric registry not available"); + expect(errorPayload.error).not.toContain("metric.json"); + }); + + test("returns 503 when the registered metric has no build-time metadata (fail-closed)", async () => { + // Defense-in-depth: when `metrics.metadata.json` is missing or didn't + // populate measures for this metric, the validator falls open and the + // SQL constructor would let arbitrary identifiers through to the + // warehouse — a schema-enumeration vector. Refuse the request. + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: { + ...REVENUE_REGISTRATION, + knownMeasures: [], + knownDimensions: [], + }, + }); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(503); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("METRIC_REGISTRY_NOT_READY"); + // Generic message — does not name the metric or the build-time tooling. + expect(errorPayload.error).toBe("Metric registry not initialized"); + }); + + test("accepts a measure-only request when knownDimensions is empty (KPI metric)", async () => { + // Measure-only metric views are a legitimate shape — the public + // contract declares `dimensions?: string[]`. The route must not 503 + // a request that omits dimensions just because the registry has none. + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: { + ...REVENUE_REGISTRATION, + knownMeasures: ["arr"], + knownDimensions: [], + }, + }); + (plugin as any).SQLClient.executeStatement = vi + .fn() + .mockResolvedValue({ result: { data: [{ arr: 1234 }] } }); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + expect(mockRes.status).not.toHaveBeenCalledWith(503); + }); + + test("rejects a non-empty `dimensions` request against a measure-only metric", async () => { + // Closes the round-3 fall-open path: when knownDimensions is empty, + // the validator must reject any non-empty `dimensions` entry — those + // identifiers would otherwise flow into the SELECT/GROUP BY clauses + // of a metric view that has no dimensions registered. + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: { + ...REVENUE_REGISTRATION, + knownMeasures: ["arr"], + knownDimensions: [], + }, + }); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"], dimensions: ["secret_col"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + expect(errorPayload.error).toMatch(/fields:.*dimensions/); + }); + + test("rejects a `filter` request against a measure-only metric", async () => { + // Same fall-open closure as the dimensions case: filter members + // would otherwise interpolate into the WHERE clause for a metric + // that has no dimensions to filter on. + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: { + ...REVENUE_REGISTRATION, + knownMeasures: ["arr"], + knownDimensions: [], + }, + }); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { + measures: ["arr"], + filter: { + member: "secret_col", + operator: "equals", + values: ["x"], + }, + }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + expect(errorPayload.error).toMatch(/fields:.*filter/); + }); + + test("returns 400 with the canonical error shape on validator failure", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["bogus"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.error).toMatch(/Invalid metric request body/); + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + }); + + test("returns 400 when measures array is missing", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: {}, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(400); + }); + + test("executes a valid SP metric request and streams a result event", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi.fn().mockResolvedValue({ + result: { data: [{ arr: 1234567 }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + // Verify the constructed SQL hit the warehouse connector. + expect(executeMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + statement: + "SELECT MEASURE(arr) AS arr FROM appkit_demo.public.revenue_metrics", + warehouse_id: "test-warehouse-id", + }), + expect.any(AbortSignal), + ); + + expect(mockRes.setHeader).toHaveBeenCalledWith( + "Content-Type", + expect.stringContaining("text/event-stream"), + ); + expect(mockRes.write).toHaveBeenCalledWith("event: result\n"); + expect(mockRes.write).toHaveBeenCalledWith( + expect.stringContaining('"arr":1234567'), + ); + expect(mockRes.end).toHaveBeenCalled(); + }); + + test("hits the cache on the second identical SP request", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi.fn().mockResolvedValue({ + result: { data: [{ arr: 1234567 }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + + const mockReq1 = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"] }, + }); + const mockReq2 = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"] }, + }); + + await handler(mockReq1, createMockResponse()); + await handler(mockReq2, createMockResponse()); + + expect(executeMock).toHaveBeenCalledTimes(1); + }); + + // ── Phase 2: dimensions + time grain via the full route ─────────────── + test("constructs GROUP BY ALL SQL when dimensions are requested", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi.fn().mockResolvedValue({ + result: { data: [{ arr: 1, region: "EMEA" }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"], dimensions: ["region"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(executeMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + statement: + "SELECT MEASURE(arr) AS arr, region FROM appkit_demo.public.revenue_metrics GROUP BY ALL", + }), + expect.any(AbortSignal), + ); + }); + + test("constructs date_trunc SQL when timeGrain is set on a time-typed dim", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi.fn().mockResolvedValue({ + result: { data: [{ arr: 1, created_at: "2026-01-01" }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "month", + }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(executeMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + statement: + "SELECT MEASURE(arr) AS arr, date_trunc('month', created_at) AS created_at FROM appkit_demo.public.revenue_metrics GROUP BY ALL", + }), + expect.any(AbortSignal), + ); + }); + + test("returns 400 when timeGrain is requested but no time-typed dim is grouped", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { + measures: ["arr"], + dimensions: ["region"], + timeGrain: "month", + }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.error).toMatch(/fields:.*timeGrain/); + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + // Defense-in-depth: the public 400 must not enumerate the registered + // schema (allowed grain enum, dim allowlist, etc.) — only the field path. + expect(errorPayload.error).not.toMatch(/must be one of|no time-typed/); + }); + + test("returns 400 when an unknown dimension is requested", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { measures: ["arr"], dimensions: ["nonexistent"] }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + }); + + test("returns 400 when an unknown timeGrain is requested", async () => { + const plugin = new AnalyticsPlugin(config); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { + measures: ["arr"], + dimensions: ["created_at"], + timeGrain: "year", + }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + }); +}); + +// ============================================================================ +// Phase 3 — Filter spec (recursive AND/OR with 12 v1 operators) +// ============================================================================ + +describe("metric — filter translator", () => { + // Helper: render filter via buildMetricSql and return WHERE fragment + params. + function render( + filter: any, + registration: MetricRegistration = REVENUE_PHASE3_REGISTRATION, + ) { + const { statement, parameters } = buildMetricSql(registration, { + measures: ["arr"], + filter, + }); + // Pull just the WHERE portion (between ` WHERE ` and ` GROUP BY` / ` LIMIT` / end). + const match = statement.match(/ WHERE (.+?)( GROUP BY| LIMIT|$)/); + const where = match ? match[1] : null; + return { statement, where, parameters }; + } + + describe("operators (12 unit tests)", () => { + test("equals → ` = :f_0`", () => { + const { where, parameters } = render({ + member: "region", + operator: "equals", + values: ["EMEA"], + }); + expect(where).toBe("region = :f_0"); + expect(parameters).toEqual({ + f_0: { __sql_type: "STRING", value: "EMEA" }, + }); + }); + + test("notEquals → ` <> :f_0`", () => { + const { where, parameters } = render({ + member: "region", + operator: "notEquals", + values: ["EMEA"], + }); + expect(where).toBe("region <> :f_0"); + expect(parameters.f_0).toEqual({ __sql_type: "STRING", value: "EMEA" }); + }); + + test("in → ` IN (:f_0, :f_1, ...)`", () => { + const { where, parameters } = render({ + member: "region", + operator: "in", + values: ["EMEA", "APAC", "AMER"], + }); + expect(where).toBe("region IN (:f_0, :f_1, :f_2)"); + expect(parameters.f_0).toEqual({ __sql_type: "STRING", value: "EMEA" }); + expect(parameters.f_1).toEqual({ __sql_type: "STRING", value: "APAC" }); + expect(parameters.f_2).toEqual({ __sql_type: "STRING", value: "AMER" }); + }); + + test("notIn → ` NOT IN (:f_0, :f_1, ...)`", () => { + const { where, parameters } = render({ + member: "region", + operator: "notIn", + values: ["EMEA", "APAC"], + }); + expect(where).toBe("region NOT IN (:f_0, :f_1)"); + expect(Object.keys(parameters)).toHaveLength(2); + }); + + test("gt → ` > :f_0`", () => { + const { where, parameters } = render({ + member: "deal_size", + operator: "gt", + values: [10000], + }); + expect(where).toBe("deal_size > :f_0"); + expect(parameters.f_0).toEqual({ __sql_type: "NUMERIC", value: "10000" }); + }); + + test("gte → ` >= :f_0`", () => { + const { where } = render({ + member: "deal_size", + operator: "gte", + values: [5000], + }); + expect(where).toBe("deal_size >= :f_0"); + }); + + test("lt → ` < :f_0`", () => { + const { where } = render({ + member: "deal_size", + operator: "lt", + values: [100], + }); + expect(where).toBe("deal_size < :f_0"); + }); + + test("lte → ` <= :f_0`", () => { + const { where } = render({ + member: "deal_size", + operator: "lte", + values: [50000], + }); + expect(where).toBe("deal_size <= :f_0"); + }); + + test("contains → ` LIKE :f_0` (value wrapped in %...%)", () => { + const { where, parameters } = render({ + member: "region", + operator: "contains", + values: ["MEA"], + }); + expect(where).toBe("region LIKE :f_0"); + expect(parameters.f_0).toEqual({ __sql_type: "STRING", value: "%MEA%" }); + }); + + test("notContains → ` NOT LIKE :f_0`", () => { + const { where, parameters } = render({ + member: "region", + operator: "notContains", + values: ["test"], + }); + expect(where).toBe("region NOT LIKE :f_0"); + expect(parameters.f_0).toEqual({ + __sql_type: "STRING", + value: "%test%", + }); + }); + + test("set → ` IS NOT NULL` (no bind)", () => { + const { where, parameters } = render({ + member: "region", + operator: "set", + }); + expect(where).toBe("region IS NOT NULL"); + expect(parameters).toEqual({}); + }); + + test("notSet → ` IS NULL` (no bind)", () => { + const { where, parameters } = render({ + member: "region", + operator: "notSet", + }); + expect(where).toBe("region IS NULL"); + expect(parameters).toEqual({}); + }); + }); + + describe("AND/OR composition", () => { + test("flat AND group renders predicates joined by AND", () => { + const { where, parameters } = render({ + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }); + expect(where).toBe("(region = :f_0 AND segment = :f_1)"); + expect(parameters.f_0.value).toBe("EMEA"); + expect(parameters.f_1.value).toBe("Enterprise"); + }); + + test("flat OR group renders predicates joined by OR", () => { + const { where } = render({ + or: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "region", operator: "equals", values: ["APAC"] }, + ], + }); + // Sort-before-hash: same member+operator pair sorts stably; both are + // (region, equals). The OR fragment renders both predicates. + expect(where).toBe("(region = :f_0 OR region = :f_1)"); + }); + + test("AND-of-OR composes nested groups", () => { + const { where } = render({ + and: [ + { member: "region", operator: "in", values: ["EMEA", "APAC"] }, + { + or: [ + { member: "segment", operator: "equals", values: ["Enterprise"] }, + { member: "deal_size", operator: "gt", values: [50000] }, + ], + }, + ], + }); + expect(where).toContain("(region IN ("); + expect(where).toContain(" AND "); + expect(where).toContain("OR"); + }); + + test("OR-of-AND composes nested groups", () => { + const { where } = render({ + or: [ + { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Enterprise"] }, + ], + }, + { member: "region", operator: "equals", values: ["APAC"] }, + ], + }); + // Outer is OR of (AND group, leaf predicate). + expect(where).toMatch(/^\(.+ OR .+\)$/); + expect(where).toContain(" AND "); + }); + + test("deeply nested mix of AND/OR (4 levels)", () => { + const { where, parameters } = render({ + and: [ + { + or: [ + { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { + or: [ + { + member: "segment", + operator: "equals", + values: ["Enterprise"], + }, + ], + }, + ], + }, + ], + }, + ], + }); + // Single-leaf groups collapse; multi-leaf groups parenthesize. + expect(where).toBeTruthy(); + // All values are bound. + expect(parameters.f_0.value).toBe("EMEA"); + expect(parameters.f_1.value).toBe("Enterprise"); + }); + + test("empty AND/OR group emits no WHERE clause", () => { + const { statement, parameters } = buildMetricSql( + REVENUE_PHASE3_REGISTRATION, + { + measures: ["arr"], + filter: { and: [] }, + }, + ); + expect(statement).not.toContain("WHERE"); + expect(parameters).toEqual({}); + }); + }); + + describe("depth cap", () => { + test("rejects 9 levels of AND nesting (validator)", () => { + // Build 9-deep AND nesting: { and: [ { and: [ ... { equals } ] } ] } + let node: any = { + member: "region", + operator: "equals", + values: ["EMEA"], + }; + for (let i = 0; i < 9; i += 1) { + node = { and: [node] }; + } + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: node, + }), + ).toThrowError(/fields:.*filter/); + }); + + test("accepts exactly 8 levels of AND nesting (validator)", () => { + let node: any = { + member: "region", + operator: "equals", + values: ["EMEA"], + }; + for (let i = 0; i < 8; i += 1) { + node = { and: [node] }; + } + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: node, + }), + ).not.toThrow(); + }); + }); + + describe("parameterization safety (no values in rendered SQL)", () => { + test("string values do not appear verbatim in the SQL string", () => { + const sneaky = "EMEA' OR '1'='1"; + const { statement } = render({ + member: "region", + operator: "equals", + values: [sneaky], + }); + expect(statement).not.toContain(sneaky); + expect(statement).toContain(":f_0"); + }); + + test("numeric values do not appear verbatim in the SQL string", () => { + const { statement } = render({ + member: "deal_size", + operator: "gt", + values: [987654321], + }); + expect(statement).not.toContain("987654321"); + expect(statement).toContain(":f_0"); + }); + + test("LIKE wildcard is the only value transformation; the original string is not in SQL", () => { + const { statement } = render({ + member: "region", + operator: "contains", + values: ["dangerous%"], + }); + expect(statement).not.toContain("dangerous"); + expect(statement).toContain(":f_0"); + }); + + test("IN values are individually bound (not concatenated)", () => { + const { statement, parameters } = render({ + member: "region", + operator: "in", + values: ["A", "B", "C"], + }); + // No raw value appears in the SQL string. + expect(statement).not.toMatch(/region IN \([^:]/); + expect(Object.keys(parameters)).toHaveLength(3); + }); + + test("identifier names in SQL come from the registry, not the request", () => { + // Even if a hostile member somehow bypasses validation, the SQL + // constructor's identifier guard rejects it before SQL emission. + expect(() => + render({ + member: "region; DROP TABLE foo --", + operator: "equals", + values: ["x"], + }), + ).toThrowError(/not a valid identifier|unknown filter member/); + }); + }); + + describe("validator rejection cases", () => { + test("rejects an unknown member", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "ghost", + operator: "equals", + values: ["x"], + }, + }), + ).toThrowError(/fields:.*filter\.member/); + }); + + test("rejects an unknown operator", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "startsWith" as any, + values: ["E"], + }, + }), + ).toThrowError(/fields:.*filter\.operator/); + }); + + test("rejects gt on a string-typed dimension (op⇄type)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "gt", + values: ["EMEA"], + }, + }), + ).toThrowError(/fields:.*filter\.operator/); + }); + + test("rejects contains on a numeric-typed dimension (op⇄type)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "deal_size", + operator: "contains", + values: ["1000"], + }, + }), + ).toThrowError(/fields:.*filter\.operator/); + }); + + test("rejects contains on a date-typed dimension (op⇄type)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "created_at", + operator: "contains", + values: ["2026"], + }, + }), + ).toThrowError(/fields:.*filter\.operator/); + }); + + test("accepts gt on a date-typed dimension", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "created_at", + operator: "gt", + values: ["2026-01-01"], + }, + }), + ).not.toThrow(); + }); + + test("rejects equals with zero values (cardinality)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "equals", + values: [], + }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("rejects equals with multiple values (cardinality)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "equals", + values: ["A", "B"], + }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("rejects in with empty values (cardinality)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "in", + values: [], + }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("rejects set with values (cardinality — must be absent)", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "set", + values: ["EMEA"], + }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("accepts set with no values", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "set", + }, + }), + ).not.toThrow(); + }); + + test("accepts notSet with empty values array", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "notSet", + values: [], + }, + }), + ).not.toThrow(); + }); + + test("falls open on op⇄type when registry has no type metadata", () => { + // Without knownDimensionTypes, the validator cannot enforce op⇄type + // and accepts any op on any registered dim (defense-in-depth — the + // SQL constructor still enforces identifier shape and registry + // membership). + expect(() => + validateMetricRequest(REVENUE_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "gt", + values: ["EMEA"], + }, + }), + ).not.toThrow(); + }); + + test("rejects deep `or` even when paired with empty `and` (else-if bypass guard)", () => { + // Regression for the round-4 finding: the previous pre-check used + // `if (and) ... else if (or) ...` and walked only one branch. A + // payload of `{ and: [], or: }` slid past the + // empty-`and` walk and Zod's union recursion then stack-overflowed + // on the `or` branch. The pre-check now inspects BOTH keys. + let deepOr: any = { + member: "region", + operator: "equals", + values: ["EMEA"], + }; + for (let i = 0; i < 10_000; i += 1) { + deepOr = { or: [deepOr] }; + } + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { and: [], or: [deepOr] } as any, + }), + ).toThrowError(/fields:.*filter/); + }); + + test("rejects breadth-DoS: a single group with too many children", () => { + // Without the breadth cap, `{ and: [...100k empty nodes...] }` would + // push 100k frames onto the iterative pre-check's stack — eventual + // OOM. Cap the per-group child count at the validation boundary. + const wide = Array.from({ length: 1000 }, () => ({ + member: "region", + operator: "equals" as const, + values: ["EMEA"], + })); + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { and: wide }, + }), + ).toThrowError(/fields:.*filter/); + }); + + test("rejects pathologically deep filter without stack-overflow (pre-parse cap)", () => { + // Without the iterative pre-parse depth check, Zod's recursive parse + // walks the union/object tree on the call stack BEFORE the validator's + // own depth cap fires inside `superRefine`. A 10000-deep payload would + // stack-overflow the Node process. The pre-walk caps it iteratively. + let node: any = { + member: "region", + operator: "equals", + values: ["EMEA"], + }; + for (let i = 0; i < 10_000; i += 1) { + node = { and: [node] }; + } + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: node, + }), + ).toThrowError(/fields:.*filter/); + }); + + test("rejects empty `or` group (empty disjunction is vacuously false)", () => { + // Empty AND is vacuously true (no constraint). Empty OR would be + // vacuously false — silently dropping the predicate. Force the caller + // to omit the predicate entirely so intent stays explicit. + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { or: [] }, + }), + ).toThrowError(/fields:.*filter\.or/); + }); + + test("accepts empty `and` group (no constraint contributed)", () => { + // Empty AND is the validator's "do not contribute" shape — accepted. + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { and: [] }, + }), + ).not.toThrow(); + }); + + test("rejects `contains` with a non-string value", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "region", + operator: "contains", + values: [42 as unknown as string], + }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("rejects range op with a non-numeric value on a numeric dim", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + member: "deal_size", + operator: "gt", + values: ["large" as unknown as number], + }, + }), + ).toThrowError(/fields:.*filter\.values/); + }); + + test("rejects member at depth — nested filter with unknown member", () => { + expect(() => + validateMetricRequest(REVENUE_PHASE3_REGISTRATION, { + measures: ["arr"], + filter: { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "ghost", operator: "equals", values: ["X"] }, + ], + }, + }), + ).toThrowError(/fields:.*filter\.and\.1\.member/); + }); + }); + + describe("sort-before-hash (predicate ordering inside groups)", () => { + test("predicate order does not affect the rendered SQL within an AND group", () => { + const a = render({ + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Ent"] }, + ], + }); + const b = render({ + and: [ + { member: "segment", operator: "equals", values: ["Ent"] }, + { member: "region", operator: "equals", values: ["EMEA"] }, + ], + }); + // The bind-var indices may differ but the textual fragment shape + // sorts predicates by (member, operator), so both calls render the + // same WHERE clause. + expect(a.where).toBe(b.where); + }); + + test("predicate order does not affect cache key", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Ent"] }, + ], + }, + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + and: [ + { member: "segment", operator: "equals", values: ["Ent"] }, + { member: "region", operator: "equals", values: ["EMEA"] }, + ], + }, + }); + expect(a).toEqual(b); + }); + + test("differentiates filtered vs unfiltered cache keys", () => { + const filtered = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + member: "region", + operator: "equals", + values: ["EMEA"], + }, + }); + const unfiltered = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + }); + expect(filtered).not.toEqual(unfiltered); + }); + + test("differentiates filters with different values", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + member: "region", + operator: "equals", + values: ["EMEA"], + }, + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + member: "region", + operator: "equals", + values: ["APAC"], + }, + }); + expect(a).not.toEqual(b); + }); + }); + + describe("route handler — filter integration", () => { + let serviceContextMock: Awaited>; + beforeEach(async () => { + setupDatabricksEnv(); + mockCacheStore.clear(); + ServiceContext.reset(); + serviceContextMock = await mockServiceContext(); + }); + afterEach(() => { + serviceContextMock?.restore(); + }); + + test("constructs WHERE clause from a structured filter", async () => { + const plugin = new AnalyticsPlugin({ timeout: 5000 }); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_PHASE3_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi.fn().mockResolvedValue({ + result: { data: [{ arr: 1234567 }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { + measures: ["arr"], + filter: { + member: "region", + operator: "in", + values: ["EMEA", "APAC"], + }, + }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(executeMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + statement: expect.stringContaining("WHERE region IN (:f_0, :f_1)"), + parameters: expect.arrayContaining([ + expect.objectContaining({ + name: "f_0", + value: "EMEA", + type: "STRING", + }), + expect.objectContaining({ + name: "f_1", + value: "APAC", + type: "STRING", + }), + ]), + }), + expect.any(AbortSignal), + ); + }); + + test("returns 400 with the canonical error shape on filter rejection", async () => { + const plugin = new AnalyticsPlugin({ timeout: 5000 }); + plugin._setMetricRegistryForTesting({ + revenue: REVENUE_PHASE3_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + const mockReq = createMockRequest({ + params: { key: "revenue" }, + body: { + measures: ["arr"], + filter: { + member: "ghost", + operator: "equals", + values: ["X"], + }, + }, + }); + const mockRes = createMockResponse(); + + await handler(mockReq, mockRes); + + expect(mockRes.status).toHaveBeenCalledWith(400); + const errorPayload = (mockRes.json as any).mock.calls[0][0]; + expect(errorPayload.code).toBe("VALIDATION_ERROR"); + expect(errorPayload.error).toMatch(/fields:.*filter\.member/); + // Defense-in-depth: the public 400 must not name the registry's + // allowed dimensions. The full Zod issues stay in telemetry context. + expect(errorPayload.error).not.toMatch( + /not a declared dimension|allowed:|must be one of/, + ); + }); + }); +}); + +// ============================================================================ +// Phase 4 — OBO lane + cache key composition (final form) +// +// Activates the OBO execution lane and finalizes cache-key composition. The +// cache executor key for OBO entries is a sha256 hash of the user identity — +// the raw header value never reaches the cache layer (privacy). Cross-user +// isolation, cross-lane isolation, and sort-before-hash on measures and +// dimensions are exercised here. +// ============================================================================ + +const CUSTOMER_OBO_REGISTRATION: MetricRegistration = { + key: "customer_metrics", + source: "appkit_demo.public.customer_metrics", + lane: "obo", + knownMeasures: ["churn_rate", "arpu"], + knownDimensions: ["csm_email", "region"], + knownTimeGrainsByDim: {}, +}; + +describe("metric — Phase 4 cache executor key", () => { + describe("deriveMetricExecutorKey", () => { + test("returns the literal 'sp' for SP-lane entries", () => { + const key = deriveMetricExecutorKey({ lane: "sp" }); + expect(key).toBe("sp"); + }); + + test("ignores userIdentity for SP-lane entries (caller cannot escalate)", () => { + // Even if a caller passes a userIdentity for an SP-lane entry, the + // function must return "sp" — SP-lane caches are inherently shared. + const key = deriveMetricExecutorKey({ + lane: "sp", + userIdentity: "alice@example.com", + }); + expect(key).toBe("sp"); + }); + + test("returns a sha256 hex digest for OBO-lane entries", () => { + const key = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }); + // sha256 hex digest is 64 chars long. + expect(key).toMatch(/^[0-9a-f]{64}$/); + }); + + test("OBO digest is stable across calls for the same identity", () => { + const a = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }); + const b = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }); + expect(a).toBe(b); + }); + + test("OBO digests differ for different identities", () => { + const alice = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }); + const bob = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "bob@example.com", + }); + expect(alice).not.toBe(bob); + }); + + test("does not contain the raw user identity (privacy)", () => { + // The hash output must not include the raw email — the whole point of + // hashing is that the cache layer (which logs keys) never sees PII. + const identity = "alice@example.com"; + const key = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: identity, + }); + expect(key).not.toContain(identity); + expect(key).not.toContain("alice"); + expect(key).not.toContain("@"); + }); + + test("OBO-lane missing/whitespace identity throws AuthenticationError (no shared sentinel)", () => { + // Round-5 hardening: the previous "anonymous" sentinel let multiple + // misconfigured OBO callers share the same hashed cache scope — + // user A's results could leak to user B if both arrived with bad + // headers. Reject the request hard instead so missing identity + // fails fast on the canonical 401 path. + for (const userIdentity of [null, undefined, "", " "]) { + expect(() => + deriveMetricExecutorKey({ lane: "obo", userIdentity }), + ).toThrowError(/user/i); + } + }); + + test("SP key differs from any OBO key (cross-lane isolation)", () => { + const sp = deriveMetricExecutorKey({ lane: "sp" }); + const obo = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }); + expect(sp).not.toBe(obo); + }); + }); + + describe("composeMetricCacheKey — Phase 4 invariants", () => { + test("same args, different measure order → same key", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr", "mrr"], + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["mrr", "arr"], + format: "JSON", + executorKey: "sp", + }); + expect(a).toEqual(b); + }); + + test("same args, different dimension order → same key", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["region", "segment"], + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + dimensions: ["segment", "region"], + format: "JSON", + executorKey: "sp", + }); + expect(a).toEqual(b); + }); + + test("same args, different filter predicate order → same key", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + and: [ + { member: "region", operator: "equals", values: ["EMEA"] }, + { member: "segment", operator: "equals", values: ["Ent"] }, + ], + }, + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + filter: { + and: [ + { member: "segment", operator: "equals", values: ["Ent"] }, + { member: "region", operator: "equals", values: ["EMEA"] }, + ], + }, + }); + expect(a).toEqual(b); + }); + + test("different args → different key", () => { + const a = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: "sp", + }); + const b = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["mrr"], + format: "JSON", + executorKey: "sp", + }); + expect(a).not.toEqual(b); + }); + + test("SP vs OBO same args → different keys (cross-lane isolation)", () => { + const sp = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ lane: "sp" }), + }); + const obo = composeMetricCacheKey({ + metricKey: "revenue", + measures: ["arr"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }), + }); + expect(sp).not.toEqual(obo); + }); + + test("OBO different users → different keys (cross-user isolation)", () => { + const alice = composeMetricCacheKey({ + metricKey: "customer_metrics", + measures: ["churn_rate"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }), + }); + const bob = composeMetricCacheKey({ + metricKey: "customer_metrics", + measures: ["churn_rate"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "bob@example.com", + }), + }); + expect(alice).not.toEqual(bob); + }); + + test("OBO same user, same args → same key (cache hit)", () => { + const a = composeMetricCacheKey({ + metricKey: "customer_metrics", + measures: ["churn_rate"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }), + }); + const b = composeMetricCacheKey({ + metricKey: "customer_metrics", + measures: ["churn_rate"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "alice@example.com", + }), + }); + expect(a).toEqual(b); + }); + + test("the raw user identity is not present in the cache key (privacy)", () => { + const identity = "alice@example.com"; + const key = composeMetricCacheKey({ + metricKey: "customer_metrics", + measures: ["churn_rate"], + format: "JSON", + executorKey: deriveMetricExecutorKey({ + lane: "obo", + userIdentity: identity, + }), + }); + // Inspect every part — none should contain the raw identity. + for (const part of key) { + expect(part).not.toContain(identity); + expect(part).not.toContain("alice"); + expect(part).not.toContain("@example.com"); + } + }); + }); +}); + +describe("AnalyticsPlugin — Phase 4 OBO + cache executor key", () => { + let serviceContextMock: Awaited>; + + beforeEach(async () => { + setupDatabricksEnv(); + mockCacheStore.clear(); + ServiceContext.reset(); + serviceContextMock = await mockServiceContext(); + }); + + afterEach(() => { + serviceContextMock?.restore(); + }); + + test("OBO lane: same args, different mock users → both queries execute (no cache leak)", async () => { + const plugin = new AnalyticsPlugin({ timeout: 5000 }); + plugin._setMetricRegistryForTesting({ + customer_metrics: CUSTOMER_OBO_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi + .fn() + .mockResolvedValueOnce({ + result: { data: [{ csm_email: "alice@x.com", churn_rate: 0.1 }] }, + }) + .mockResolvedValueOnce({ + result: { data: [{ csm_email: "bob@x.com", churn_rate: 0.2 }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + + const aliceReq = createMockRequest({ + params: { key: "customer_metrics" }, + body: { measures: ["churn_rate"] }, + headers: { + "x-forwarded-access-token": "alice-token", + "x-forwarded-user": "alice@example.com", + }, + }); + const aliceRes = createMockResponse(); + await handler(aliceReq, aliceRes); + + const bobReq = createMockRequest({ + params: { key: "customer_metrics" }, + body: { measures: ["churn_rate"] }, + headers: { + "x-forwarded-access-token": "bob-token", + "x-forwarded-user": "bob@example.com", + }, + }); + const bobRes = createMockResponse(); + await handler(bobReq, bobRes); + + // Different users, same query — the OBO cache must be partitioned per + // user, so both calls hit the warehouse. + expect(executeMock).toHaveBeenCalledTimes(2); + + // Each user sees their own row (no cache cross-contamination). + expect(aliceRes.write).toHaveBeenCalledWith( + expect.stringContaining("alice@x.com"), + ); + expect(bobRes.write).toHaveBeenCalledWith( + expect.stringContaining("bob@x.com"), + ); + }); + + test("OBO lane: same user, same args twice → second request hits cache", async () => { + const plugin = new AnalyticsPlugin({ timeout: 5000 }); + plugin._setMetricRegistryForTesting({ + customer_metrics: CUSTOMER_OBO_REGISTRATION, + }); + const { router, getHandler } = createMockRouter(); + + const executeMock = vi.fn().mockResolvedValue({ + result: { data: [{ csm_email: "alice@x.com", churn_rate: 0.1 }] }, + }); + (plugin as any).SQLClient.executeStatement = executeMock; + + plugin.injectRoutes(router); + + const handler = getHandler("POST", "/metric/:key"); + + const makeReq = () => + createMockRequest({ + params: { key: "customer_metrics" }, + body: { measures: ["churn_rate"] }, + headers: { + "x-forwarded-access-token": "alice-token", + "x-forwarded-user": "alice@example.com", + }, + }); + + await handler(makeReq(), createMockResponse()); + await handler(makeReq(), createMockResponse()); + + // Second request is served from cache. + expect(executeMock).toHaveBeenCalledTimes(1); + }); + + test("cross-lane isolation: SP user 'sp' literal does not collide with an OBO user named 'sp'", async () => { + // Defense-in-depth — the executor-key derivation must not let a user + // identity collide with the literal "sp" cache scope. Hashing the + // identity ensures this collision is structurally impossible. + const sp = deriveMetricExecutorKey({ lane: "sp" }); + const obo = deriveMetricExecutorKey({ + lane: "obo", + userIdentity: "sp", + }); + expect(sp).not.toBe(obo); + }); + + test("cache TTL defaults to 1 hour (3600 seconds) — matches existing analytics", async () => { + // The route handler builds its `defaultConfig` from `queryDefaults` — + // assert the TTL is unchanged so a future refactor that swaps defaults + // is caught by this test. + const { queryDefaults } = await import("../defaults"); + expect(queryDefaults.cache?.ttl).toBe(3600); + }); + + test("metric.json registry rejects same key in both sp and obo lanes (cross-lane key uniqueness)", async () => { + // Acceptance criterion 7: a metric key registered in both `sp` and + // `obo` is rejected at config-load time. Re-exercise the existing + // loader test here under the Phase 4 banner so the requirement is + // discoverable when reading Phase 4 tests. + const fs = await import("node:fs/promises"); + const os = await import("node:os"); + const path = await import("node:path"); + const tmpDir = await fs.mkdtemp( + path.join(os.tmpdir(), "appkit-metric-phase4-"), + ); + try { + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ + sp: { revenue: { source: "demo.public.revenue" } }, + obo: { revenue: { source: "demo.public.revenue" } }, + }), + ); + await expect(loadMetricRegistry(undefined, tmpDir)).rejects.toThrowError( + /Duplicate metric key/, + ); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } + }); +}); diff --git a/packages/appkit/src/plugins/analytics/types.ts b/packages/appkit/src/plugins/analytics/types.ts index c58b6ecfe..52bad4664 100644 --- a/packages/appkit/src/plugins/analytics/types.ts +++ b/packages/appkit/src/plugins/analytics/types.ts @@ -16,3 +16,145 @@ export interface AnalyticsQueryResponse { row_count: number; data: any[]; } + +/** + * Lane an entry sits in inside metric.json. SP = service principal (shared + * cache), OBO = on-behalf-of (per-user cache). + */ +export type MetricLane = "sp" | "obo"; + +/** + * Resolved metric-view registration loaded at server startup. + * + * The registration carries the FQN (used by the SQL constructor) plus the + * known measure/dimension names produced by the build-time DESCRIBE call + * (used by the body validator to reject unknown measures fast). + */ +export interface MetricRegistration { + /** Stable map key from metric.json. */ + key: string; + /** Three-part Unity Catalog FQN of the metric view. */ + source: string; + /** Lane this metric was registered under. */ + lane: MetricLane; + /** + * Names of measures known at build time. Empty array means "unknown" — the + * server falls open in that case and the warehouse rejects bad column + * references. + */ + knownMeasures: string[]; + /** + * Names of dimensions known at build time. Phase 2 consumes this for + * dimension validation and `GROUP BY ALL` emission. + */ + knownDimensions: string[]; + /** + * Map of dimension name → allowed time-grains for that dimension. Only + * populated for time-typed dimensions (those with a `time_grain` attribute + * in the YAML); regular dimensions do not appear in this map. + * + * Empty map means "no time-typed dimensions" — `timeGrain` cannot be set + * on requests for this metric. + */ + knownTimeGrainsByDim: Record; + /** + * Map of dimension name → registered SQL type (Phase 3). Drives op-vs-type + * compatibility checks in the filter validator (string ops on string-typed + * dims, range ops on numeric/date-typed dims). Empty map means + * "compatibility checks fall open"; the dimension still passes the + * identifier guard and the registry-membership check. + */ + knownDimensionTypes?: Record; +} + +/** + * Coarse classification of a dimension's column type, used by the filter + * validator to enforce op-vs-type compatibility. + * + * - `string` — STRING / VARCHAR / CHAR / TEXT (accepts string ops) + * - `numeric` — INT / BIGINT / DOUBLE / DECIMAL / etc (accepts range ops) + * - `date` — DATE / TIMESTAMP (accepts range ops) + * - `unknown` — fall-open: validator only enforces structural rules + */ +export type MetricDimensionTypeClass = + | "string" + | "numeric" + | "date" + | "unknown"; + +/** + * A single filter predicate — leaf node of the recursive {@link MetricFilter}. + * + * Server-side `IAnalyticsMetricRequest` uses the structural shape (no + * registry generic); the per-metric narrowing lives client-side via + * `Predicate` in `@databricks/appkit-ui/react`. + */ +export interface MetricPredicate { + member: string; + operator: MetricFilterOperatorName; + values?: ReadonlyArray; +} + +/** + * The recursive filter type for the metric-view request body. + * + * Server-side use of this shape is intentionally non-generic — the registry + * generic only affects compile-time autocomplete and lives in + * `@databricks/appkit-ui/react`. + */ +export type MetricFilter = + | MetricPredicate + | { and: ReadonlyArray } + | { or: ReadonlyArray }; + +/** + * v1 filter operator vocabulary — exactly twelve names. Mirrored on the + * client as `MetricFilterOperator` in `@databricks/appkit-ui/react`. The + * runtime tuple `METRIC_FILTER_OPERATORS` lives next to the validator in + * `metric.ts`. + */ +export type MetricFilterOperatorName = + | "equals" + | "notEquals" + | "in" + | "notIn" + | "gt" + | "gte" + | "lt" + | "lte" + | "contains" + | "notContains" + | "set" + | "notSet"; + +/** + * Body of POST /api/analytics/metric/:key at Phase 3. + * + * Phase 1 shape: `{ measures, format?, limit? }`. Phase 2 added + * `dimensions: string[]` and optional `timeGrain`. Phase 3 adds optional + * structured `filter`. + */ +export interface IAnalyticsMetricRequest { + measures: string[]; + /** + * Dimensions to GROUP BY. When non-empty the SQL constructor adds + * `GROUP BY ALL`. When omitted the query is ungrouped (Phase 1 behaviour). + */ + dimensions?: string[]; + /** + * Time-grain truncation applied to every time-typed dimension in + * `dimensions`. The validator rejects this field when no time-typed + * dimension is in `dimensions` (400) and when the value is not in the + * metric view's allowed grain enum (400). + */ + timeGrain?: string; + /** + * Structured filter expression — recursive AND/OR composition of predicates. + * All values are bound as parameters via the existing Statement Execution + * bind-var path; no value flows into the rendered SQL string. + */ + filter?: MetricFilter; + format?: AnalyticsFormat; + /** Optional row cap. */ + limit?: number; +} diff --git a/packages/appkit/src/stream/stream-manager.ts b/packages/appkit/src/stream/stream-manager.ts index 6a1a70d06..d042e35ba 100644 --- a/packages/appkit/src/stream/stream-manager.ts +++ b/packages/appkit/src/stream/stream-manager.ts @@ -400,6 +400,19 @@ export class StreamManager { return SSEErrorCode.STREAM_ABORTED; } + // Defense-in-depth: when an upstream layer (e.g., SQL warehouse client) + // wraps an AbortError into a domain error, the original `name` is lost + // but the message survives. Detect aborts via message substring before + // falling through to the statusCode-based UPSTREAM_ERROR classification — + // otherwise legitimate client cancellations get logged at error level + // and surfaced to consumers as if the warehouse failed. + if ( + message.includes("operation was aborted") || + message.includes("the request was aborted") + ) { + return SSEErrorCode.STREAM_ABORTED; + } + // Detect upstream API errors (e.g., from Databricks SDK ApiError) if ( "statusCode" in error && diff --git a/packages/appkit/src/stream/tests/stream.test.ts b/packages/appkit/src/stream/tests/stream.test.ts index a10e1edc4..9da079cc0 100644 --- a/packages/appkit/src/stream/tests/stream.test.ts +++ b/packages/appkit/src/stream/tests/stream.test.ts @@ -346,6 +346,96 @@ describe("StreamManager", () => { }); }); + describe("error categorization", () => { + // Helper: capture the SSE error code emitted by streaming a generator + // that throws the supplied error. + async function captureCategorizedCode( + manager: StreamManager, + thrown: unknown, + ): Promise { + const { mockRes, events } = createMockResponse(); + async function* generator() { + yield { type: "start" }; + throw thrown; + } + await manager.stream(mockRes as any, generator); + const errorEvent = events.find((e) => e.startsWith("event: error")); + if (!errorEvent) return undefined; + const dataLine = events + .find((e) => e.startsWith("data:") && e.includes('"code"')) + ?.replace(/^data: /, "") + .replace(/\n\n$/, ""); + if (!dataLine) return undefined; + const parsed = JSON.parse(dataLine); + return parsed.code as string; + } + + test("classifies native AbortError as STREAM_ABORTED", async () => { + const err = new Error("operation aborted"); + err.name = "AbortError"; + const code = await captureCategorizedCode(streamManager, err); + expect(code).toBe("STREAM_ABORTED"); + }); + + test("classifies wrapped AbortError (whose name was overwritten) as STREAM_ABORTED", async () => { + // Simulates the SQL warehouse client's wrap behavior: an AbortError + // gets re-wrapped as ExecutionError, losing `name === "AbortError"` — + // but the message survives. The classifier MUST detect this via the + // message substring fallback. + class FakeExecutionError extends Error { + statusCode = 500; + constructor() { + super("Statement failed: The operation was aborted."); + this.name = "ExecutionError"; + } + } + const code = await captureCategorizedCode( + streamManager, + new FakeExecutionError(), + ); + expect(code).toBe("STREAM_ABORTED"); + }); + + test("classifies real upstream API errors (statusCode + non-abort message) as UPSTREAM_ERROR", async () => { + class FakeApiError extends Error { + statusCode = 503; + constructor() { + super("Statement failed: Internal warehouse error"); + this.name = "ExecutionError"; + } + } + const code = await captureCategorizedCode( + streamManager, + new FakeApiError(), + ); + expect(code).toBe("UPSTREAM_ERROR"); + }); + + test("classifies timeouts as TIMEOUT", async () => { + const code = await captureCategorizedCode( + streamManager, + new Error("Request timed out"), + ); + expect(code).toBe("TIMEOUT"); + }); + + test("classifies ECONNREFUSED / unavailable as TEMPORARY_UNAVAILABLE", async () => { + const code = await captureCategorizedCode( + streamManager, + new Error("connect ECONNREFUSED 127.0.0.1:443"), + ); + expect(code).toBe("TEMPORARY_UNAVAILABLE"); + }); + + test("falls through to INTERNAL_ERROR for opaque errors", async () => { + const code = await captureCategorizedCode( + streamManager, + new Error("something unexpected"), + ); + expect(code).toBe("INTERNAL_ERROR"); + }); + }); + describe("heartbeat", () => { test("should send heartbeat messages periodically", async () => { vi.useFakeTimers(); diff --git a/packages/appkit/src/type-generator/index.ts b/packages/appkit/src/type-generator/index.ts index c9a528fe7..384924dc5 100644 --- a/packages/appkit/src/type-generator/index.ts +++ b/packages/appkit/src/type-generator/index.ts @@ -2,6 +2,20 @@ import fs from "node:fs/promises"; import path from "node:path"; import dotenv from "dotenv"; import { createLogger } from "../logging/logger"; +import { + createWorkspaceDescribeFetcher, + type DescribeFetcher, + generateMetricsMetadataJson, + generateMetricTypeDeclarations, + type MetricColumnMetadata, + type MetricLane, + type MetricSchema, + type MetricSyncFailure, + type MetricSyncResult, + readMetricConfig, + resolveMetricConfig, + syncMetrics, +} from "./metric-registry"; import { migrateProjectConfig, removeOldGeneratedTypes, @@ -50,15 +64,35 @@ declare module "@databricks/appkit-ui/react" { * @param options - the options for the generation * @param options.entryPoint - the entry point file * @param options.outFile - the output file - * @param options.querySchemaFile - optional path to query schema file (e.g. config/queries/schema.ts) + * @param options.metricOutFile - optional output file for the MetricRegistry + * augmentation. Defaults to a sibling `metric.d.ts` file under the same + * directory as `outFile`. Skipped entirely if `metric.json` is absent. + * @param options.metricMetadataOutFile - optional output file for the + * build-time semantic metadata JSON bundle (`metrics.metadata.json`). + * Defaults to a sibling of `metricOutFile`. Skipped entirely if + * `metric.json` is absent. + * @param options.metricFetcher - optional DescribeFetcher used by + * {@link syncMetrics}. Tests inject a mock; production builds let the + * default WorkspaceClient-backed fetcher be created lazily. */ export async function generateFromEntryPoint(options: { outFile: string; queryFolder?: string; warehouseId: string; noCache?: boolean; + metricOutFile?: string; + metricMetadataOutFile?: string; + metricFetcher?: DescribeFetcher; }) { - const { outFile, queryFolder, warehouseId, noCache } = options; + const { + outFile, + queryFolder, + warehouseId, + noCache, + metricOutFile, + metricMetadataOutFile, + metricFetcher, + } = options; const projectRoot = resolveProjectRoot(outFile); logger.debug("Starting type generation..."); @@ -93,6 +127,58 @@ export async function generateFromEntryPoint(options: { await fs.mkdir(path.dirname(outFile), { recursive: true }); await fs.writeFile(outFile, typeDeclarations, "utf-8"); + // Metric-view types: only emit when metric.json exists. The path is purely + // additive — apps that never adopt metric views must not produce empty noise. + if (queryFolder) { + const metricConfig = await readMetricConfig(queryFolder); + if (metricConfig) { + const resolution = resolveMetricConfig(metricConfig); + const fetcher = + metricFetcher ?? createWorkspaceDescribeFetcher(warehouseId); + const { schemas: metricSchemas, failures } = await syncMetrics( + resolution, + fetcher, + ); + + // Surface DESCRIBE failures loudly so a misconfigured metric.json or a + // workspace-side typo doesn't silently ship an empty bundle entry. The + // route's runtime fail-closed gate would 503 these in production — + // catching the issue at type-gen time is the cheaper signal. + if (failures.length > 0) { + for (const f of failures) { + logger.warn( + "metric sync failed for %s (%s): %s", + f.key, + f.source, + f.reason, + ); + } + } + + const metricFile = + metricOutFile ?? path.join(path.dirname(outFile), METRIC_TYPES_FILE); + const metricDeclarations = generateMetricTypeDeclarations(metricSchemas); + await fs.mkdir(path.dirname(metricFile), { recursive: true }); + await fs.writeFile(metricFile, metricDeclarations, "utf-8"); + + // Phase 5: emit the semantic-metadata JSON bundle alongside the .d.ts. + // The hook imports this artifact (via a registration call from the + // consuming app) and exposes the per-metric subset on its return value. + const metadataFile = + metricMetadataOutFile ?? + path.join(path.dirname(metricFile), METRIC_METADATA_FILE); + const metadataJson = generateMetricsMetadataJson(metricSchemas); + await fs.mkdir(path.dirname(metadataFile), { recursive: true }); + await fs.writeFile(metadataFile, metadataJson, "utf-8"); + + logger.debug( + "Wrote MetricRegistry augmentation + metadata bundle for %d metric(s)%s", + metricSchemas.length, + failures.length > 0 ? ` (${failures.length} failure(s))` : "", + ); + } + } + // One-time migration: remove old generated file and patch project configs await removeOldGeneratedTypes(projectRoot, "appKitTypes.d.ts"); await migrateProjectConfig(projectRoot); @@ -105,9 +191,34 @@ export async function generateFromEntryPoint(options: { // mirroring how generateFromEntryPoint (also defined here) is preserved via the analytics vite plugin. export const generateServingTypes = generateServingTypesImpl; +// Re-export the metric-registry types so consumers (CLI, the type-generator +// .d.ts shim in `packages/shared`) can pick them up from this entry point — +// the .d.ts shim documents these as part of the package's public surface. +export type { + MetricColumnMetadata, + MetricLane, + MetricSchema, + MetricSyncFailure, + MetricSyncResult, +}; + /** Directory name for generated AppKit type declaration files. */ export const TYPES_DIR = "appkit-types"; /** Default filename for analytics query type declarations. */ export const ANALYTICS_TYPES_FILE = "analytics.d.ts"; /** Default filename for serving endpoint type declarations. */ export const SERVING_TYPES_FILE = "serving.d.ts"; +/** Default filename for metric-view registry type declarations. */ +export const METRIC_TYPES_FILE = "metric.d.ts"; +/** + * Default filename for the build-time semantic-metadata JSON bundle. + * + * Sibling of {@link METRIC_TYPES_FILE}. The JSON shape is + * `Record` — see + * `MetricsMetadataBundle` in `metric-registry.ts`. The consuming app imports + * this file at build time (via Vite's JSON loader / Webpack's `import` etc.) + * and registers it through `@databricks/appkit-ui/format`'s + * `registerMetricsMetadata()` so the React hook can return per-metric + * `metadata` without a second network round-trip. + */ +export const METRIC_METADATA_FILE = "metrics.metadata.json"; diff --git a/packages/appkit/src/type-generator/metric-registry.ts b/packages/appkit/src/type-generator/metric-registry.ts new file mode 100644 index 000000000..95a6773fe --- /dev/null +++ b/packages/appkit/src/type-generator/metric-registry.ts @@ -0,0 +1,1057 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { WorkspaceClient } from "@databricks/sdk-experimental"; +import { createLogger } from "../logging/logger"; +import type { DatabricksStatementExecutionResponse } from "./types"; + +const logger = createLogger("type-generator:metric-registry"); + +/** + * Default filename for the metric source declarations. + * Lives at config/queries/metric.json by convention. + */ +const METRIC_CONFIG_FILE = "metric.json"; + +/** + * The lane an entry sits in: `sp` (service principal, shared cache) + * or `obo` (on-behalf-of, per-user cache). + */ +export type MetricLane = "sp" | "obo"; + +/** + * Single entry in metric.json. + * + * v1 only allows `source`. Object form (rather than bare string) is the + * forward-compat seam for future per-entry options (cacheTtl, defaultFilter, ...). + */ +interface MetricEntryConfig { + source: string; +} + +/** + * Shape of metric.json (matches MetricSourceConfiguration generated from the JSON Schema). + * Inlined here so the type-generator does not pull in the shared schema package at runtime. + */ +interface MetricSourceConfig { + $schema?: string; + sp?: Record; + obo?: Record; +} + +/** + * Resolved entry consumed by the rest of the metric-view pipeline. + * Lane is denormalized onto the entry so downstream code does not have to + * track which top-level key it came from. + */ +interface ResolvedMetricEntry { + /** Stable map key shared across route, hook, registry, and cache. */ + key: string; + /** Three-part Unity Catalog FQN of the metric view. */ + source: string; + /** Execution lane — sp = service principal, obo = on-behalf-of. */ + lane: MetricLane; +} + +/** + * Per-column metadata extracted from DESCRIBE TABLE EXTENDED ... AS JSON. + * + * Phase 1 captured measure flags + types. Phase 2 widens to time-typed + * dimensions: grain qualification is inferred from the column's SQL type + * (TIMESTAMP* / DATE) — the UC metric-view YAML schema has no per-column + * `time_grain` attribute, so the type is the only signal available. + * + * Phase 5 captures the YAML 1.1 semantic-metadata fields so the build-time + * artifact is a complete record of what the metric view declares: display name + * (used by `formatLabel` to render axis titles / legend entries / tooltips), + * format spec (printf-like string consumed by `formatValue` and `toD3Format`), + * and description (column-level documentation). All three are optional in the + * YAML; the extractor leaves the field undefined when absent. + */ +export interface MetricColumnMetadata { + name: string; + type: string; + /** UC marks columns produced by `MEASURE()` as measures; everything else is a dimension. */ + isMeasure: boolean; + /** Optional column comment / display description (best-effort). */ + description?: string; + /** + * Human-readable display name from the YAML 1.1 `display_name` attribute. + * Used by `formatLabel` as the canonical axis / legend / tooltip text; + * absent → callers fall back to camelCase / snake_case humanization of `name`. + */ + displayName?: string; + /** + * Printf-style format spec from the YAML 1.1 `format` attribute (e.g. + * `"$#,##0.00"`, `"0.0%"`, `"#,##0"`). `formatValue` and `toD3Format` + * consume this passthrough — the framework deliberately does not invent a + * format DSL; we forward the YAML's verbatim string and fall back to + * sensible defaults when the spec is absent or unrecognized. + */ + format?: string; + /** + * Standard time-grain set for this column, inferred from the SQL data type: + * TIMESTAMP* → 7 grains (minute..year); DATE → 5 grains (day..year). + * Undefined means the column is not time-typed. Measures never get grains. + */ + timeGrains?: string[]; +} + +/** + * Per-metric schema captured at type-generation time. + * + * The full row type is the union of measure + dimension column types. Phase 1 + * uses only `measures`; Phase 2 widens to `dimensions` and `timeGrains`. + */ +export interface MetricSchema { + /** Stable metric key (the map key in metric.json). */ + key: string; + /** Three-part FQN of the metric view. */ + source: string; + /** Execution lane this metric was registered under. */ + lane: MetricLane; + /** Measure columns (those exposed by MEASURE()). */ + measures: MetricColumnMetadata[]; + /** Dimension columns (everything that is not a measure). */ + dimensions: MetricColumnMetadata[]; +} + +/** + * Result of reading and resolving metric.json — split by lane plus a flat + * list with lane denormalized for iteration. + */ +interface MetricConfigResolution { + entries: ResolvedMetricEntry[]; +} + +/** + * Read metric.json from a queries folder. + * + * Returns `null` if the file does not exist (the metric-view path is + * additive — apps without metric.json must not be penalized). + * + * Throws on JSON parse errors so misconfiguration surfaces loudly. + */ +export async function readMetricConfig( + queryFolder: string, +): Promise { + const metricPath = path.join(queryFolder, METRIC_CONFIG_FILE); + let raw: string; + try { + raw = await fs.readFile(metricPath, "utf8"); + } catch (err) { + if ((err as NodeJS.ErrnoException).code === "ENOENT") { + return null; + } + throw err; + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch (err) { + throw new Error( + `Failed to parse metric.json at ${metricPath}: ${(err as Error).message}`, + ); + } + + if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { + throw new Error( + `Invalid metric.json at ${metricPath}: expected an object with sp/obo keys.`, + ); + } + + return parsed as MetricSourceConfig; +} + +/** + * Validate a key against the JSON Schema's metricKey pattern. Phase 1 keeps + * this lightweight — the JSON Schema is the canonical contract for IDE/CI. + */ +function isValidMetricKey(key: string): boolean { + return /^[a-zA-Z_][a-zA-Z0-9_]*$/.test(key); +} + +/** + * Validate a UC FQN against the JSON Schema's source pattern. + */ +function isValidFqn(fqn: string): boolean { + return /^[a-zA-Z0-9_][a-zA-Z0-9_-]*\.[a-zA-Z0-9_][a-zA-Z0-9_-]*\.[a-zA-Z0-9_][a-zA-Z0-9_-]*$/.test( + fqn, + ); +} + +/** + * Flatten the sp/obo map into a single list of resolved entries. + * + * Throws on duplicate keys across lanes (the same key cannot live in both), + * invalid keys, or invalid FQNs. Stable ordering: sp lane first, alphabetical. + */ +export function resolveMetricConfig( + config: MetricSourceConfig, +): MetricConfigResolution { + const entries: ResolvedMetricEntry[] = []; + const seen = new Set(); + + const lanes: Array<[MetricLane, Record]> = [ + ["sp", config.sp ?? {}], + ["obo", config.obo ?? {}], + ]; + + for (const [lane, laneMap] of lanes) { + const sortedKeys = Object.keys(laneMap).sort(); + for (const key of sortedKeys) { + if (!isValidMetricKey(key)) { + throw new Error( + `Invalid metric key "${key}" in lane "${lane}": must match /^[a-zA-Z_][a-zA-Z0-9_]*$/.`, + ); + } + + if (seen.has(key)) { + throw new Error( + `Duplicate metric key "${key}": cannot appear in both sp and obo lanes.`, + ); + } + seen.add(key); + + const entry = laneMap[key]; + if (!entry || typeof entry !== "object" || Array.isArray(entry)) { + throw new Error( + `Invalid metric entry "${key}" in lane "${lane}": expected an object with a 'source' field.`, + ); + } + + // v1 explicitly rejects unknown fields so future additions cannot be + // silently consumed today. + const allowed = new Set(["source"]); + for (const field of Object.keys(entry)) { + if (!allowed.has(field)) { + throw new Error( + `Invalid field "${field}" on metric entry "${key}": only 'source' is allowed at v1.`, + ); + } + } + + if (typeof entry.source !== "string" || entry.source.trim() === "") { + throw new Error( + `Invalid metric entry "${key}" in lane "${lane}": 'source' must be a non-empty string.`, + ); + } + + if (!isValidFqn(entry.source)) { + throw new Error( + `Invalid metric source "${entry.source}" for "${key}": expected a three-part UC FQN ...`, + ); + } + + entries.push({ key, source: entry.source, lane }); + } + } + + return { entries }; +} + +/** + * Parse the JSON payload returned by DESCRIBE TABLE EXTENDED ... AS JSON. + * + * The Statement Execution API returns a single string cell — this normalizer + * unwraps it. Handles both the production (real warehouse) shape and the + * shape produced by mocked test responses. + */ +export function parseDescribeTableExtendedJson( + response: DatabricksStatementExecutionResponse, +): unknown { + if (response.status?.state === "FAILED") { + const msg = response.status.error?.message ?? "DESCRIBE failed"; + throw new Error(`DESCRIBE TABLE EXTENDED failed: ${msg}`); + } + + const rows = response.result?.data_array ?? []; + if (rows.length === 0) { + throw new Error( + "DESCRIBE TABLE EXTENDED returned no rows. Verify the FQN points to a metric view.", + ); + } + + const cell = rows[0]?.[0]; + if (typeof cell !== "string") { + throw new Error( + "DESCRIBE TABLE EXTENDED first cell was not a JSON string. Confirm the AS JSON suffix is supported.", + ); + } + + try { + return JSON.parse(cell); + } catch (err) { + throw new Error( + `Failed to parse DESCRIBE TABLE EXTENDED JSON: ${(err as Error).message}`, + ); + } +} + +/** + * Pure function: turn the parsed DESCRIBE JSON into structured column metadata. + * + * Tolerant of multiple JSON shapes (the field may be `columns` or `schema.fields`, + * type may be a string or `{ name }` object, the measure marker may be `is_measure` + * or under `metadata.is_measure`). Phase 1's job is to find names + measure flags; + * later phases can tighten this if a more authoritative shape stabilizes. + */ +export function extractMetricColumns(parsed: unknown): MetricColumnMetadata[] { + if (!parsed || typeof parsed !== "object") { + return []; + } + + const root = parsed as Record; + const columnsCandidate = (root.columns ?? + (root.schema && typeof root.schema === "object" + ? (root.schema as Record).fields + : undefined)) as unknown; + + if (!Array.isArray(columnsCandidate)) { + return []; + } + + const columns: MetricColumnMetadata[] = []; + for (const raw of columnsCandidate) { + if (!raw || typeof raw !== "object") continue; + const obj = raw as Record; + const name = + typeof obj.name === "string" + ? obj.name + : typeof obj.column_name === "string" + ? obj.column_name + : undefined; + if (!name) continue; + + const typeRaw = obj.type ?? obj.data_type ?? obj.type_name; + let type = "STRING"; + if (typeof typeRaw === "string") { + type = typeRaw; + } else if (typeRaw && typeof typeRaw === "object") { + const inner = (typeRaw as Record).name; + if (typeof inner === "string") type = inner; + } + + let isMeasure = false; + if (typeof obj.is_measure === "boolean") { + isMeasure = obj.is_measure; + } else if ( + obj.metadata && + typeof obj.metadata === "object" && + typeof (obj.metadata as Record).is_measure === "boolean" + ) { + isMeasure = (obj.metadata as Record) + .is_measure as boolean; + } else if (obj.kind === "measure" || obj.role === "measure") { + isMeasure = true; + } + + const description = + typeof obj.comment === "string" + ? obj.comment + : typeof obj.description === "string" + ? obj.description + : undefined; + + const displayName = extractStringFromAny(obj, [ + "display_name", + "displayName", + ]); + const format = extractFormatString(obj); + + // Time-grain inference is type-driven, not YAML-attribute-driven. + // Earlier versions of this code looked for a `time_grain` field on each + // column, but that field does not exist in UC's metric-view schema — + // the Rust serde at universe/reyden/metric-view-serde/src/v11/column.rs + // enumerates the 7 known column properties (window, expr, format, + // display_name, name, comment, synonyms). CREATE rejects `time_grain` + // with "Unrecognized field". Measures don't get grouped, so skip them. + const timeGrains = isMeasure ? undefined : inferTimeGrains(type); + + columns.push({ + name, + type, + isMeasure, + description, + ...(displayName ? { displayName } : {}), + ...(format ? { format } : {}), + ...(timeGrains ? { timeGrains } : {}), + }); + } + + return columns; +} + +/** + * Read a non-empty string attribute from a DESCRIBE column entry, tolerating + * the multiple shapes UC has shipped for this metadata over time. + * + * For each candidate name, we check the column object directly, then under + * `metadata.`. The first non-empty trimmed string wins. Empty / missing + * → undefined (the caller leaves the field off the emitted artifact). + */ +function extractStringFromAny( + obj: Record, + candidates: readonly string[], +): string | undefined { + for (const key of candidates) { + const direct = obj[key]; + if (typeof direct === "string" && direct.trim().length > 0) { + return direct; + } + const meta = obj.metadata; + if (meta && typeof meta === "object" && !Array.isArray(meta)) { + const nested = (meta as Record)[key]; + if (typeof nested === "string" && nested.trim().length > 0) { + return nested; + } + } + } + return undefined; +} + +/** + * Read the column's `format` attribute from a DESCRIBE entry and return a + * printf-like format string suitable for `formatValue` and `toD3Format`. + * + * Tolerates two source shapes: + * + * 1. **Legacy / hand-authored** — `format: "$#,##0.00"` (already a printf + * string). Returned as-is. + * + * 2. **YAML 1.1 structured** — DESCRIBE TABLE EXTENDED ... AS JSON for a + * UC Metric View wraps the column's format type as the outer key: + * + * ``` + * { "currency": { "decimal_places": { "places": 2 }, "currency_code": "USD" } } + * { "percent": { "decimal_places": { "places": 1 } } } + * { "number": { "decimal_places": { "places": 0 } } } + * ``` + * + * Both shapes are checked at top-level (`obj.format` / `obj.format_spec`) + * and under `metadata.` for parity with extractStringFromAny. + * + * Unrecognized objects return undefined; downstream consumers fall back to + * default locale formatting. + */ +function extractFormatString(obj: Record): string | undefined { + for (const key of ["format", "format_spec"]) { + const direct = obj[key]; + const fromDirect = formatStringFromValue(direct); + if (fromDirect) return fromDirect; + + const meta = obj.metadata; + if (meta && typeof meta === "object" && !Array.isArray(meta)) { + const nested = (meta as Record)[key]; + const fromMeta = formatStringFromValue(nested); + if (fromMeta) return fromMeta; + } + } + return undefined; +} + +function formatStringFromValue(value: unknown): string | undefined { + if (typeof value === "string" && value.trim().length > 0) return value.trim(); + if (value && typeof value === "object" && !Array.isArray(value)) { + return translateStructuredFormat(value as Record); + } + return undefined; +} + +/** + * Translate the structured `format` object emitted by DESCRIBE TABLE EXTENDED + * AS JSON into a printf-like format string. Recognizes the three YAML 1.1 + * shapes; returns undefined for anything else. + */ +function translateStructuredFormat( + spec: Record, +): string | undefined { + if (spec.currency && typeof spec.currency === "object") { + return currencyFormatString(spec.currency as Record); + } + if (spec.percent && typeof spec.percent === "object") { + return percentFormatString(spec.percent as Record); + } + if (spec.number && typeof spec.number === "object") { + return numberFormatString(spec.number as Record); + } + return undefined; +} + +function currencyFormatString(c: Record): string { + const places = readDecimalPlaces(c) ?? 2; + const codeRaw = c.currency_code; + const code = + typeof codeRaw === "string" && codeRaw.trim().length > 0 + ? codeRaw.toUpperCase() + : "USD"; + const symbol = currencySymbol(code); + return `${symbol}#,##0${fractionalSuffix(places)}`; +} + +function percentFormatString(p: Record): string { + const places = readDecimalPlaces(p) ?? 0; + return `0${fractionalSuffix(places)}%`; +} + +function numberFormatString(n: Record): string { + const places = readDecimalPlaces(n) ?? 0; + return `#,##0${fractionalSuffix(places)}`; +} + +function fractionalSuffix(places: number): string { + return places > 0 ? `.${"0".repeat(places)}` : ""; +} + +function readDecimalPlaces(obj: Record): number | undefined { + const dp = obj.decimal_places; + if (typeof dp === "number" && Number.isFinite(dp) && dp >= 0) { + return Math.floor(dp); + } + if (dp && typeof dp === "object" && !Array.isArray(dp)) { + const places = (dp as Record).places; + if (typeof places === "number" && Number.isFinite(places) && places >= 0) { + return Math.floor(places); + } + } + return undefined; +} + +/** + * Map ISO currency codes to their conventional prefix symbol. Unknown codes + * fall back to the literal code + space (e.g., "AUD #,##0.00") so the value + * is never lost — `formatValue` and `toD3Format` will still render correctly, + * just without a single-character glyph. + */ +function currencySymbol(code: string): string { + switch (code) { + case "USD": + return "$"; + case "EUR": + return "€"; + case "GBP": + return "£"; + case "JPY": + case "CNY": + return "¥"; + case "INR": + return "₹"; + case "BRL": + return "R$"; + default: + return `${code} `; + } +} + +/** + * Infer the standard set of valid time grains for a dimension based on its + * SQL data type. + * + * TIMESTAMP / TIMESTAMP_LTZ / TIMESTAMP_NTZ → all 7 standard grains + * DATE → [day, week, month, quarter, year] (no sub-day grains) + * anything else → undefined (not time-typed) + * + * Earlier code looked for a `time_grain` attribute on the YAML column. That + * field does not exist in the UC metric-view schema (see the v11 Rust serde + * — Column has 7 known properties: window, expr, format, display_name, + * name, comment, synonyms; CREATE fails with "Unrecognized field + * 'time_grain'"). So grain qualification has to come from the column's + * resolved SQL type instead. + */ +function inferTimeGrains(type: string): string[] | undefined { + // Strip parameterized suffixes ("TIMESTAMP(6)" → "TIMESTAMP") and trim. + const normalized = type + .toLowerCase() + .replace(/\(.*\)$/, "") + .trim(); + if ( + normalized === "timestamp" || + normalized === "timestamp_ltz" || + normalized === "timestamp_ntz" + ) { + return ["day", "hour", "minute", "month", "quarter", "week", "year"]; + } + if (normalized === "date") { + return ["day", "month", "quarter", "week", "year"]; + } + return undefined; +} + +/** + * Map a Databricks SQL type to a TypeScript primitive. + * Centralized here (not imported from query-registry) so this module + * stays self-contained at Phase 1. + */ +function tsTypeFor(sqlType: string): string { + const normalized = sqlType + .toUpperCase() + .replace(/\(.*\)$/, "") + .replace(/<.*>$/, "") + .split(" ")[0]; + + switch (normalized) { + case "BOOLEAN": + return "boolean"; + case "TINYINT": + case "SMALLINT": + case "INT": + case "INTEGER": + case "BIGINT": + case "FLOAT": + case "DOUBLE": + case "DECIMAL": + case "NUMERIC": + return "number"; + default: + return "string"; + } +} + +/** + * Render a MetricRegistry interface entry from a MetricSchema. + */ +function renderMetricEntry(schema: MetricSchema): string { + const indent = " "; + const measures = + schema.measures.length > 0 + ? schema.measures + .map( + (m) => `${indent}/** @sqlType ${m.type} */ +${indent}${JSON.stringify(m.name)}: ${tsTypeFor(m.type)}`, + ) + .join(";\n") + : ""; + const dimensions = + schema.dimensions.length > 0 + ? schema.dimensions + .map((d) => { + const grainComment = d.timeGrains?.length + ? ` @timeGrain ${d.timeGrains.join("|")}` + : ""; + return `${indent}/** @sqlType ${d.type}${grainComment} */ +${indent}${JSON.stringify(d.name)}: ${tsTypeFor(d.type)}`; + }) + .join(";\n") + : ""; + + const measureKeys = schema.measures.map((m) => JSON.stringify(m.name)); + const dimensionKeys = schema.dimensions.map((d) => JSON.stringify(d.name)); + + const measuresBlock = measures + ? `{ +${measures}; + }` + : "Record"; + + const dimensionsBlock = dimensions + ? `{ +${dimensions}; + }` + : "Record"; + + const measureUnion = + measureKeys.length > 0 ? measureKeys.join(" | ") : "never"; + const dimensionUnion = + dimensionKeys.length > 0 ? dimensionKeys.join(" | ") : "never"; + + // Union of allowed time-grains across every time-typed dimension. The PRD + // documents the v1 contract: a single top-level `timeGrain` applies to all + // time-typed dims. Therefore the type-level constraint is the union (any of + // the dim-allowed grains is acceptable; per-dim narrowing is a future + // widening to `TimeGrain | Record, TimeGrain>`). + const timeGrainSet = new Set(); + for (const d of schema.dimensions) { + for (const g of d.timeGrains ?? []) { + timeGrainSet.add(g); + } + } + const timeGrainUnion = + timeGrainSet.size > 0 + ? [...timeGrainSet] + .sort() + .map((g) => JSON.stringify(g)) + .join(" | ") + : "never"; + + const measureMetadata = renderMetadataMap(schema.measures, indent); + const dimensionMetadata = renderMetadataMap(schema.dimensions, indent, true); + + return ` ${JSON.stringify(schema.key)}: { + key: ${JSON.stringify(schema.key)}; + source: ${JSON.stringify(schema.source)}; + lane: ${JSON.stringify(schema.lane)}; + measures: ${measuresBlock}; + dimensions: ${dimensionsBlock}; + measureKeys: ${measureUnion}; + dimensionKeys: ${dimensionUnion}; + timeGrains: ${timeGrainUnion}; + metadata: { + measures: ${measureMetadata}; + dimensions: ${dimensionMetadata}; + }; + }`; +} + +/** + * Render the type-level shape of a column's semantic-metadata map for the + * `metadata` field of a MetricRegistry entry. + * + * The shape mirrors {@link MetricColumnSemanticMetadata}: each column emits an + * object literal with `type` (string literal) plus optional `display_name`, + * `format`, `description` (string literals when known, dropped when absent), + * and — for dimensions only — `time_grain` (the column's allowed-grain tuple + * literal). + * + * When the column list is empty, the type collapses to `Record` + * so consumers can still index into `metadata.measures` / `metadata.dimensions` + * without TypeScript errors. + */ +function renderMetadataMap( + cols: MetricColumnMetadata[], + indent: string, + includeTimeGrain = false, +): string { + if (cols.length === 0) return "Record"; + + const inner = cols + .map((col) => { + const fields: string[] = [`type: ${JSON.stringify(col.type)}`]; + if (col.displayName) { + fields.push(`display_name: ${JSON.stringify(col.displayName)}`); + } + if (col.format) { + fields.push(`format: ${JSON.stringify(col.format)}`); + } + if (col.description) { + fields.push(`description: ${JSON.stringify(col.description)}`); + } + if (includeTimeGrain && col.timeGrains && col.timeGrains.length > 0) { + const grainTuple = col.timeGrains + .map((g) => JSON.stringify(g)) + .join(", "); + fields.push(`time_grain: readonly [${grainTuple}]`); + } + const fieldsBlock = fields.map((f) => `${indent} ${f}`).join(";\n"); + return `${indent}${JSON.stringify(col.name)}: { +${fieldsBlock}; +${indent}}`; + }) + .join(";\n"); + + return `{ +${inner}; + }`; +} + +/** + * Render the augmentation block for the appkit-ui MetricRegistry interface. + * + * Mirrors the pattern in `generateTypeDeclarations` for QueryRegistry — emits + * a `declare module` block that consumers in `@databricks/appkit-ui/react` + * pick up via TypeScript module augmentation. + */ +function renderMetricRegistry(schemas: MetricSchema[]): string { + if (schemas.length === 0) { + return `declare module "@databricks/appkit-ui/react" { + interface MetricRegistry {} +} +`; + } + const entries = schemas.map(renderMetricEntry).join(";\n"); + return `declare module "@databricks/appkit-ui/react" { + interface MetricRegistry { +${entries}; + } +} +`; +} + +/** + * Default header for the generated metric.d.ts file. The file is consumed by + * TypeScript via module augmentation only, so no runtime import is needed. + */ +function metricFileHeader(): string { + return `// Auto-generated by AppKit - DO NOT EDIT +// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build +import "@databricks/appkit-ui/react"; +`; +} + +/** + * Build the full metric.d.ts file from a list of metric schemas. + */ +export function generateMetricTypeDeclarations( + schemas: MetricSchema[], +): string { + return metricFileHeader() + renderMetricRegistry(schemas); +} + +/** + * Per-column metadata as emitted into the build-time JSON artifact. + * + * The shape is deliberately narrow — we forward what the YAML 1.1 declared + * (type, display name, format spec, description) plus the time-grain list for + * dimensions. Consumers (the React hook, the format utilities) destructure + * only the fields they need; absent fields stay absent rather than carrying + * empty-string sentinels so JSON.stringify output is minimal. + * + * Internal — exposed via the {@link buildMetricsMetadataBundle} return shape. + * Library consumers see this shape mirrored verbatim in + * `@databricks/appkit-ui/format`'s `ColumnMetadata` (they import there, not + * here). + */ +interface MetricColumnSemanticMetadata { + type: string; + display_name?: string; + format?: string; + description?: string; + /** Only emitted on dimension entries that resolved to a TIMESTAMP* or DATE SQL type (grain set inferred from type). */ + time_grain?: readonly string[]; +} + +/** + * One metric's complete semantic-metadata bundle. + * + * Splits cleanly into measures + dimensions so the consuming hook can return + * the exact subset for the queried metric without scanning the rest of the + * registry. + * + * Server-side concerns — UC FQN (`source`) and execution lane (`lane`) — are + * deliberately NOT part of this artifact. They live in `metric.json` and are + * consumed by the server only. The bundle ships to the client in + * `metrics.metadata.json` and must contain frontend-safe metadata only + * (display names, format specs, descriptions, time-grain hints). + */ +interface MetricSemanticMetadataEntry { + measures: Record; + dimensions: Record; +} + +/** + * Top-level shape of `metrics.metadata.json` — keyed by metric key. + * + * Loaded by: + * - the server-side `loadMetricRegistry` (for body-validator awareness of + * display names + types in error messages, when wired up in a follow-on) + * - the client-side `useMetricView` hook (returned in the `metadata` field) + * - any chart-library glue code that wants direct access to format specs / + * display names (Plotly tickformat, ECharts valueFormatter, table cells, ...) + */ +type MetricsMetadataBundle = Record; + +/** + * Pure function: turn a list of metric schemas into the JSON metadata bundle. + * + * Deterministic key order: outer object keys are sorted alphabetically; + * measures and dimensions are emitted in the order they appeared in DESCRIBE + * (Phase 1's preserved-from-YAML order), but each per-column object's fields + * follow a fixed declaration order so snapshot diffs are stable. + * + * The output is `JSON.stringify`'d with two-space indentation by the file + * emitter — keeping the data structure pure here lets unit tests assert on the + * structure without parsing. + */ +export function buildMetricsMetadataBundle( + schemas: MetricSchema[], +): MetricsMetadataBundle { + const bundle: MetricsMetadataBundle = {}; + const sortedSchemas = [...schemas].sort((a, b) => a.key.localeCompare(b.key)); + + for (const schema of sortedSchemas) { + const measures: Record = {}; + for (const m of schema.measures) { + measures[m.name] = buildColumnMetadata(m); + } + + const dimensions: Record = {}; + for (const d of schema.dimensions) { + dimensions[d.name] = buildColumnMetadata(d); + } + + bundle[schema.key] = { + measures, + dimensions, + }; + } + + return bundle; +} + +/** + * Render one column's emitted semantic-metadata object. + * + * Field order is fixed (`type`, `display_name`, `format`, `description`, + * `time_grain`) and absent fields are simply not included, so the snapshot + * diff is always minimal — consumers receive only what the YAML declared. + * + * `time_grain` is only emitted on dimensions whose SQL type is TIMESTAMP* or + * DATE — measures never receive a grain since they aren't grouped on. The + * caller (extractMetricColumns) skips inference for `isMeasure: true` columns. + */ +function buildColumnMetadata( + col: MetricColumnMetadata, +): MetricColumnSemanticMetadata { + const entry: MetricColumnSemanticMetadata = { type: col.type }; + if (col.displayName) entry.display_name = col.displayName; + if (col.format) entry.format = col.format; + if (col.description) entry.description = col.description; + if (!col.isMeasure && col.timeGrains && col.timeGrains.length > 0) { + entry.time_grain = [...col.timeGrains]; + } + return entry; +} + +/** + * Serialize the metadata bundle to a stable, human-readable JSON string. + * + * Uses two-space indentation and a trailing newline so file diffs are clean + * across regenerations; the bundle's own key order is already sorted by + * {@link buildMetricsMetadataBundle}. + */ +export function generateMetricsMetadataJson(schemas: MetricSchema[]): string { + const bundle = buildMetricsMetadataBundle(schemas); + return `${JSON.stringify(bundle, null, 2)}\n`; +} + +/** + * Optional dependency-injection seam: the function used to fetch DESCRIBE + * results for a given FQN. Production wires this through the WorkspaceClient; + * tests inject a mock that returns a representative DESCRIBE response. + */ +export type DescribeFetcher = ( + fqn: string, +) => Promise; + +/** + * Build a DescribeFetcher from a real WorkspaceClient + warehouseId. + * + * Kept narrow so it does not require importing the SDK at test time. + * + * `wait_timeout: "30s"` makes the API wait synchronously for the statement + * to complete (matching the SDK's own example pattern). Without an explicit + * wait, the call can return while the statement is still PENDING/RUNNING — + * the response carries no `data_array` yet, `parseDescribeTableExtendedJson` + * reads that as "returned no rows", and the registry ships empty. The + * runtime fail-closed gate then 503s every metric request, which is exactly + * the symptom we hit on a cold warehouse. + */ +export function createWorkspaceDescribeFetcher( + warehouseId: string, +): DescribeFetcher { + const client = new WorkspaceClient({}); + return async (fqn: string) => { + const result = (await client.statementExecution.executeStatement({ + statement: `DESCRIBE TABLE EXTENDED ${fqn} AS JSON`, + warehouse_id: warehouseId, + wait_timeout: "30s", + })) as DatabricksStatementExecutionResponse; + return result; + }; +} + +/** + * One per-entry sync failure recorded by {@link syncMetrics}. Failures are + * surfaced to the caller (CLI / Vite plugin) so they can decide whether to + * exit non-zero. Without this, a silently-empty bundle would ship to + * production and the route's runtime fail-closed gate would 503 every + * affected metric. + */ +export interface MetricSyncFailure { + /** Stable metric key — matches the key in metric.json. */ + key: string; + /** Three-part FQN that failed to resolve. */ + source: string; + /** Single human-readable reason (DESCRIBE failed, parse failed, zero columns). */ + reason: string; +} + +/** + * Result shape from {@link syncMetrics}: the schemas (one per entry, possibly + * empty if the entry failed) plus a list of per-entry failures so the caller + * can emit a non-zero exit / build error when something didn't resolve. + */ +export interface MetricSyncResult { + schemas: MetricSchema[]; + failures: MetricSyncFailure[]; +} + +/** + * Run schema synchronization for every entry in `metric.json`. + * + * `fetcher` is injected so the same code path serves Vite, the CLI, and unit + * tests with a mock that returns a representative DESCRIBE response. + * + * Returns `{ schemas, failures }`. The schemas array always carries one + * entry per registered metric (even on failure — the entry has empty + * measures/dimensions). The failures array is populated for any entry that + * (a) the DESCRIBE call rejected, (b) the response could not be parsed, or + * (c) extraction yielded zero columns. Callers (the CLI, the Vite plugin) + * inspect `failures` to decide whether to exit non-zero. + */ +export async function syncMetrics( + resolution: MetricConfigResolution, + fetcher: DescribeFetcher, +): Promise { + const schemas: MetricSchema[] = []; + const failures: MetricSyncFailure[] = []; + + for (const entry of resolution.entries) { + let response: DatabricksStatementExecutionResponse; + try { + response = await fetcher(entry.source); + } catch (err) { + const reason = `DESCRIBE TABLE EXTENDED failed: ${(err as Error).message}`; + logger.warn("%s for %s", reason, entry.source); + failures.push({ key: entry.key, source: entry.source, reason }); + schemas.push({ + key: entry.key, + source: entry.source, + lane: entry.lane, + measures: [], + dimensions: [], + }); + continue; + } + + let columns: MetricColumnMetadata[] = []; + let parseError: string | null = null; + try { + const parsed = parseDescribeTableExtendedJson(response); + columns = extractMetricColumns(parsed); + } catch (err) { + parseError = `Failed to extract columns from DESCRIBE response: ${(err as Error).message}`; + logger.warn("%s for %s", parseError, entry.source); + } + + const measures = columns.filter((c) => c.isMeasure); + const dimensions = columns.filter((c) => !c.isMeasure); + + if (parseError) { + failures.push({ + key: entry.key, + source: entry.source, + reason: parseError, + }); + } else if (columns.length === 0) { + // Extraction succeeded but yielded no columns. The most common cause + // is a DESCRIBE response shape that `extractMetricColumns` doesn't + // recognize. Treat as a failure so CI catches it instead of letting an + // empty bundle entry ship — the route's fail-closed gate would then + // 503 every request to this metric in production. + const reason = + "DESCRIBE response yielded zero columns — check the response shape (top-level `columns` array or `schema.fields`)."; + logger.warn("%s for %s", reason, entry.source); + failures.push({ key: entry.key, source: entry.source, reason }); + } + + schemas.push({ + key: entry.key, + source: entry.source, + lane: entry.lane, + measures, + dimensions, + }); + } + + return { schemas, failures }; +} diff --git a/packages/appkit/src/type-generator/tests/__snapshots__/metric-registry.test.ts.snap b/packages/appkit/src/type-generator/tests/__snapshots__/metric-registry.test.ts.snap new file mode 100644 index 000000000..f8fbaef9c --- /dev/null +++ b/packages/appkit/src/type-generator/tests/__snapshots__/metric-registry.test.ts.snap @@ -0,0 +1,168 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`generateMetricTypeDeclarations — snapshot > emits TimeGrain union for a metric view with time-typed + regular dimensions 1`] = ` +"// Auto-generated by AppKit - DO NOT EDIT +// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build +import "@databricks/appkit-ui/react"; +declare module "@databricks/appkit-ui/react" { + interface MetricRegistry { + "revenue": { + key: "revenue"; + source: "appkit_demo.public.revenue_metrics_v2"; + lane: "sp"; + measures: { + /** @sqlType DECIMAL(38,2) */ + "arr": number; + }; + dimensions: { + /** @sqlType TIMESTAMP @timeGrain day|hour|minute|month|quarter|week|year */ + "created_at": string; + /** @sqlType STRING */ + "region": string; + /** @sqlType STRING */ + "segment": string; + }; + measureKeys: "arr"; + dimensionKeys: "created_at" | "region" | "segment"; + timeGrains: "day" | "hour" | "minute" | "month" | "quarter" | "week" | "year"; + metadata: { + measures: { + "arr": { + type: "DECIMAL(38,2)"; + description: "Annual recurring revenue"; + }; + }; + dimensions: { + "created_at": { + type: "TIMESTAMP"; + time_grain: readonly ["day", "hour", "minute", "month", "quarter", "week", "year"]; + }; + "region": { + type: "STRING"; + }; + "segment": { + type: "STRING"; + }; + }; + }; + }; + } +} +" +`; + +exports[`generateMetricTypeDeclarations — snapshot > emits a stable MetricRegistry augmentation for a representative input 1`] = ` +"// Auto-generated by AppKit - DO NOT EDIT +// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build +import "@databricks/appkit-ui/react"; +declare module "@databricks/appkit-ui/react" { + interface MetricRegistry { + "revenue": { + key: "revenue"; + source: "appkit_demo.public.revenue_metrics"; + lane: "sp"; + measures: { + /** @sqlType DECIMAL(38,2) */ + "arr": number; + /** @sqlType DECIMAL(38,2) */ + "mrr": number; + }; + dimensions: { + /** @sqlType STRING */ + "region": string; + /** @sqlType STRING */ + "segment": string; + }; + measureKeys: "arr" | "mrr"; + dimensionKeys: "region" | "segment"; + timeGrains: never; + metadata: { + measures: { + "arr": { + type: "DECIMAL(38,2)"; + description: "Annual recurring revenue"; + }; + "mrr": { + type: "DECIMAL(38,2)"; + description: "Monthly recurring revenue"; + }; + }; + dimensions: { + "region": { + type: "STRING"; + }; + "segment": { + type: "STRING"; + }; + }; + }; + }; + } +} +" +`; + +exports[`generateMetricTypeDeclarations — snapshot > emits an empty MetricRegistry interface when no metrics are registered 1`] = ` +"// Auto-generated by AppKit - DO NOT EDIT +// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build +import "@databricks/appkit-ui/react"; +declare module "@databricks/appkit-ui/react" { + interface MetricRegistry {} +} +" +`; + +exports[`generateMetricsMetadataJson — snapshot > serializes a representative metric view with display_name + format + time_grain 1`] = ` +"{ + "customer_metrics": { + "measures": { + "churn_rate": { + "type": "DOUBLE", + "display_name": "Churn Rate", + "format": "0.0%" + } + }, + "dimensions": { + "csm_email": { + "type": "STRING", + "display_name": "CSM Email" + } + } + }, + "revenue": { + "measures": { + "arr": { + "type": "DECIMAL(38,2)", + "display_name": "Annual Recurring Revenue", + "format": "$#,##0.00", + "description": "ARR per quarter" + }, + "growth_rate": { + "type": "DOUBLE", + "display_name": "Growth Rate", + "format": "0.0%" + } + }, + "dimensions": { + "region": { + "type": "STRING", + "display_name": "Region" + }, + "created_at": { + "type": "TIMESTAMP", + "display_name": "Period", + "time_grain": [ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year" + ] + } + } + } +} +" +`; diff --git a/packages/appkit/src/type-generator/tests/metric-registry.test.ts b/packages/appkit/src/type-generator/tests/metric-registry.test.ts new file mode 100644 index 000000000..ec4d32608 --- /dev/null +++ b/packages/appkit/src/type-generator/tests/metric-registry.test.ts @@ -0,0 +1,978 @@ +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, beforeEach, describe, expect, test } from "vitest"; +import { + buildMetricsMetadataBundle, + extractMetricColumns, + generateMetricsMetadataJson, + generateMetricTypeDeclarations, + parseDescribeTableExtendedJson, + readMetricConfig, + resolveMetricConfig, + syncMetrics, +} from "../metric-registry"; +import type { DatabricksStatementExecutionResponse } from "../types"; + +/** + * Build a representative DESCRIBE TABLE EXTENDED ... AS JSON response. + * + * The Statement Execution API returns one row, one cell — a JSON string + * payload. The shape is broadly: + * + * ```json + * { + * "table_name": "...", + * "columns": [ + * { "name": "arr", "type": "DECIMAL(38,2)", "is_measure": true, "comment": "..." }, + * { "name": "region", "type": "STRING", "is_measure": false } + * ] + * } + * ``` + * + * Phase 1 mocks this. Live integration ships in Phase 7. + */ +function mockDescribeResponse( + payload: unknown, +): DatabricksStatementExecutionResponse { + return { + statement_id: "stmt-mock", + status: { state: "SUCCEEDED" }, + result: { + data_array: [[JSON.stringify(payload)]], + }, + }; +} + +describe("readMetricConfig", () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), "appkit-metric-typegen-")); + }); + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + test("returns null when metric.json is absent", async () => { + expect(await readMetricConfig(tmpDir)).toBeNull(); + }); + + test("parses a valid metric.json", async () => { + await fs.writeFile( + path.join(tmpDir, "metric.json"), + JSON.stringify({ sp: { revenue: { source: "demo.public.revenue" } } }), + ); + const cfg = await readMetricConfig(tmpDir); + expect(cfg?.sp?.revenue.source).toBe("demo.public.revenue"); + }); + + test("throws on malformed JSON", async () => { + await fs.writeFile(path.join(tmpDir, "metric.json"), "{not json"); + await expect(readMetricConfig(tmpDir)).rejects.toThrowError( + /parse metric.json/, + ); + }); +}); + +describe("resolveMetricConfig", () => { + test("flattens sp + obo lanes into a sorted entries list", () => { + const cfg = { + sp: { b_metric: { source: "a.b.c" }, a_metric: { source: "a.b.d" } }, + obo: { c_metric: { source: "a.b.e" } }, + }; + const { entries } = resolveMetricConfig(cfg); + expect(entries.map((e) => e.key)).toEqual([ + "a_metric", + "b_metric", + "c_metric", + ]); + expect(entries[0].lane).toBe("sp"); + expect(entries[2].lane).toBe("obo"); + }); + + test("rejects duplicate keys across lanes", () => { + const cfg = { + sp: { revenue: { source: "a.b.c" } }, + obo: { revenue: { source: "a.b.d" } }, + }; + expect(() => resolveMetricConfig(cfg)).toThrowError(/Duplicate metric/); + }); + + test("rejects unknown entry fields", () => { + const cfg = { + sp: { revenue: { source: "a.b.c", cacheTtl: 60 } as any }, + }; + expect(() => resolveMetricConfig(cfg)).toThrowError(/'source' is allowed/); + }); + + test("rejects bad FQN format", () => { + const cfg = { sp: { revenue: { source: "not.three.part.parts" } } }; + expect(() => resolveMetricConfig(cfg)).toThrowError(/three-part UC FQN/); + }); + + test("rejects a metric key starting with a digit", () => { + const cfg = { sp: { "1revenue": { source: "a.b.c" } } }; + expect(() => resolveMetricConfig(cfg)).toThrowError(/Invalid metric key/); + }); +}); + +describe("parseDescribeTableExtendedJson", () => { + test("parses the JSON payload from the first cell", () => { + const response = mockDescribeResponse({ + columns: [{ name: "arr", type: "DECIMAL", is_measure: true }], + }); + const parsed = parseDescribeTableExtendedJson(response); + expect(parsed).toMatchObject({ + columns: [{ name: "arr", type: "DECIMAL", is_measure: true }], + }); + }); + + test("throws on a FAILED status", () => { + expect(() => + parseDescribeTableExtendedJson({ + statement_id: "x", + status: { state: "FAILED", error: { message: "no such table" } }, + }), + ).toThrowError(/no such table/); + }); + + test("throws when the response is empty", () => { + expect(() => + parseDescribeTableExtendedJson({ + statement_id: "x", + status: { state: "SUCCEEDED" }, + result: { data_array: [] }, + }), + ).toThrowError(/no rows/); + }); + + test("throws when the cell is not a JSON string", () => { + expect(() => + parseDescribeTableExtendedJson({ + statement_id: "x", + status: { state: "SUCCEEDED" }, + result: { data_array: [[null]] }, + }), + ).toThrowError(/JSON string/); + }); +}); + +describe("extractMetricColumns", () => { + test("extracts measures and dimensions from the standard shape", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + comment: "Annual recurring revenue", + }, + { name: "region", type: "STRING", is_measure: false }, + ], + }); + expect(cols).toHaveLength(2); + expect(cols[0]).toMatchObject({ + name: "arr", + type: "DECIMAL(38,2)", + isMeasure: true, + description: "Annual recurring revenue", + }); + expect(cols[1]).toMatchObject({ + name: "region", + type: "STRING", + isMeasure: false, + }); + }); + + test("falls back to schema.fields shape", () => { + const cols = extractMetricColumns({ + schema: { + fields: [ + { + name: "mrr", + type: { name: "DOUBLE" }, + metadata: { is_measure: true }, + }, + ], + }, + }); + expect(cols).toHaveLength(1); + expect(cols[0]).toMatchObject({ + name: "mrr", + type: "DOUBLE", + isMeasure: true, + }); + }); + + test("returns empty array on unrecognized shape", () => { + expect(extractMetricColumns({ unrelated: true })).toEqual([]); + }); + + // ── Phase 2: time-typed dimensions ──────────────────────────────────── + test("infers all 7 standard grains for a TIMESTAMP dimension", () => { + const cols = extractMetricColumns({ + columns: [ + { name: "created_at", type: "TIMESTAMP", is_measure: false }, + { name: "region", type: "STRING", is_measure: false }, + ], + }); + expect(cols).toHaveLength(2); + expect(cols[0]).toMatchObject({ + name: "created_at", + isMeasure: false, + timeGrains: ["day", "hour", "minute", "month", "quarter", "week", "year"], + }); + // Non-time dim has no timeGrains key. + expect(cols[1].timeGrains).toBeUndefined(); + }); + + test("infers 5 standard grains (no sub-day) for a DATE dimension", () => { + const cols = extractMetricColumns({ + columns: [{ name: "billing_date", type: "DATE", is_measure: false }], + }); + expect(cols[0].timeGrains).toEqual([ + "day", + "month", + "quarter", + "week", + "year", + ]); + }); + + test("recognizes TIMESTAMP_LTZ and TIMESTAMP_NTZ aliases", () => { + const cols = extractMetricColumns({ + columns: [ + { name: "ts_ltz", type: "TIMESTAMP_LTZ", is_measure: false }, + { name: "ts_ntz", type: "TIMESTAMP_NTZ", is_measure: false }, + ], + }); + expect(cols[0].timeGrains).toEqual([ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year", + ]); + expect(cols[1].timeGrains).toEqual([ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year", + ]); + }); + + test("type matching is case-insensitive", () => { + const cols = extractMetricColumns({ + columns: [ + { name: "a", type: "timestamp", is_measure: false }, + { name: "b", type: "Timestamp", is_measure: false }, + { name: "c", type: "DATE", is_measure: false }, + { name: "d", type: "date", is_measure: false }, + ], + }); + expect(cols[0].timeGrains?.length).toBe(7); + expect(cols[1].timeGrains?.length).toBe(7); + expect(cols[2].timeGrains?.length).toBe(5); + expect(cols[3].timeGrains?.length).toBe(5); + }); + + test("strips parameterized type suffixes like TIMESTAMP(6)", () => { + const cols = extractMetricColumns({ + columns: [{ name: "ts", type: "TIMESTAMP(6)", is_measure: false }], + }); + expect(cols[0].timeGrains?.length).toBe(7); + }); + + test("does not infer grains for non-temporal types", () => { + const cols = extractMetricColumns({ + columns: [ + { name: "id", type: "BIGINT", is_measure: false }, + { name: "name", type: "STRING", is_measure: false }, + { name: "amount", type: "DECIMAL(38,2)", is_measure: false }, + ], + }); + for (const col of cols) { + expect(col.timeGrains).toBeUndefined(); + } + }); + + test("does not infer grains on measures even if their type is TIMESTAMP", () => { + // Measures are aggregated, never grouped on — grain inference is + // dimension-only. Defends against an unusual MEASURE() expression + // resolving to a temporal type. + const cols = extractMetricColumns({ + columns: [{ name: "last_event_at", type: "TIMESTAMP", is_measure: true }], + }); + expect(cols[0].timeGrains).toBeUndefined(); + }); +}); + +describe("syncMetrics", () => { + test("returns one schema per resolved entry, columns split by measure flag", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "demo.public.revenue" } }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [ + { name: "arr", type: "DECIMAL(38,2)", is_measure: true }, + { name: "mrr", type: "DECIMAL(38,2)", is_measure: true }, + { name: "region", type: "STRING", is_measure: false }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + expect(schemas).toHaveLength(1); + const [schema] = schemas; + expect(schema.key).toBe("revenue"); + expect(schema.measures.map((m) => m.name)).toEqual(["arr", "mrr"]); + expect(schema.dimensions.map((d) => d.name)).toEqual(["region"]); + }); + + test("falls back to empty columns when DESCRIBE throws (does not crash typegen)", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "demo.public.revenue" } }, + }); + + const fetcher = async () => { + throw new Error("warehouse unreachable"); + }; + + const { schemas } = await syncMetrics(resolution, fetcher); + expect(schemas[0].measures).toEqual([]); + expect(schemas[0].dimensions).toEqual([]); + }); +}); + +describe("generateMetricTypeDeclarations — snapshot", () => { + test("emits a stable MetricRegistry augmentation for a representative input", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "appkit_demo.public.revenue_metrics" } }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + comment: "Annual recurring revenue", + }, + { + name: "mrr", + type: "DECIMAL(38,2)", + is_measure: true, + comment: "Monthly recurring revenue", + }, + { name: "region", type: "STRING", is_measure: false }, + { name: "segment", type: "STRING", is_measure: false }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const output = generateMetricTypeDeclarations(schemas); + expect(output).toMatchSnapshot(); + }); + + test("emits an empty MetricRegistry interface when no metrics are registered", () => { + const output = generateMetricTypeDeclarations([]); + expect(output).toMatchSnapshot(); + }); + + // ── Phase 2: time-typed dim + multiple non-time dims fixture ───────── + test("emits TimeGrain union for a metric view with time-typed + regular dimensions", async () => { + const resolution = resolveMetricConfig({ + sp: { + revenue: { source: "appkit_demo.public.revenue_metrics_v2" }, + }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + comment: "Annual recurring revenue", + }, + { name: "created_at", type: "TIMESTAMP", is_measure: false }, + { name: "region", type: "STRING", is_measure: false }, + { name: "segment", type: "STRING", is_measure: false }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const output = generateMetricTypeDeclarations(schemas); + expect(output).toMatchSnapshot(); + + // Sanity assertions in addition to the snapshot, so future drift surfaces + // even when snapshots are blindly updated. TIMESTAMP → all 7 standard grains. + expect(output).toContain( + 'timeGrains: "day" | "hour" | "minute" | "month" | "quarter" | "week" | "year"', + ); + expect(output).toContain( + "@timeGrain day|hour|minute|month|quarter|week|year", + ); + expect(output).toContain('"created_at": string'); + expect(output).toContain('"region": string'); + }); +}); + +// ── Phase 5: semantic-metadata extraction (display_name + format) ───────── +describe("extractMetricColumns — Phase 5 semantic metadata", () => { + test("captures display_name from a measure column", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + display_name: "Annual Recurring Revenue", + comment: "ARR for the period", + }, + ], + }); + expect(cols[0]).toMatchObject({ + name: "arr", + type: "DECIMAL(38,2)", + isMeasure: true, + displayName: "Annual Recurring Revenue", + description: "ARR for the period", + }); + }); + + test("captures format spec from a measure column", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + format: "$#,##0.00", + }, + ], + }); + expect(cols[0].format).toBe("$#,##0.00"); + }); + + test("captures display_name + format on a dimension column", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "region", + type: "STRING", + is_measure: false, + display_name: "Region", + format: undefined, + }, + ], + }); + expect(cols[0]).toMatchObject({ + name: "region", + isMeasure: false, + displayName: "Region", + }); + expect(cols[0].format).toBeUndefined(); + }); + + test("falls back to displayName camelCase variant", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "mrr", + type: "DECIMAL", + is_measure: true, + displayName: "Monthly Recurring Revenue", + }, + ], + }); + expect(cols[0].displayName).toBe("Monthly Recurring Revenue"); + }); + + test("reads display_name + format from metadata. (DESCRIBE wrap)", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + metadata: { + is_measure: true, + display_name: "ARR", + format: "$#,##0.00", + }, + }, + ], + }); + expect(cols[0]).toMatchObject({ + isMeasure: true, + displayName: "ARR", + format: "$#,##0.00", + }); + }); + + test("treats empty / whitespace display_name as absent", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DECIMAL", + is_measure: true, + display_name: " ", + format: "", + }, + ], + }); + expect(cols[0].displayName).toBeUndefined(); + expect(cols[0].format).toBeUndefined(); + }); + + test("captures format from format_spec alias", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DECIMAL", + is_measure: true, + format_spec: "$#,##0.00", + }, + ], + }); + expect(cols[0].format).toBe("$#,##0.00"); + }); + + // ── Structured-format translation (UC YAML 1.1 → printf string) ──────── + // DESCRIBE TABLE EXTENDED ... AS JSON wraps the format type as the outer + // key: { currency: { ... } } / { percent: { ... } } / { number: { ... } }. + // The extractor translates these into printf strings consumable by + // formatValue / toD3Format. + test("translates structured currency format with USD", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "arr", + type: "DOUBLE", + is_measure: true, + metadata: { + format: { + currency: { + decimal_places: { type: "EXACT", places: 2 }, + currency_code: "USD", + }, + }, + }, + }, + ], + }); + expect(cols[0].format).toBe("$#,##0.00"); + }); + + test("translates structured currency format with EUR + 0 decimal places", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "ticket_price", + type: "DOUBLE", + is_measure: true, + metadata: { + format: { + currency: { + decimal_places: { places: 0 }, + currency_code: "EUR", + }, + }, + }, + }, + ], + }); + expect(cols[0].format).toBe("€#,##0"); + }); + + test("falls back to ISO code as literal prefix for unknown currencies", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "amount", + type: "DOUBLE", + is_measure: true, + metadata: { + format: { + currency: { + decimal_places: { places: 2 }, + currency_code: "AUD", + }, + }, + }, + }, + ], + }); + expect(cols[0].format).toBe("AUD #,##0.00"); + }); + + test("translates structured percent format with 1 decimal place", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "churn_rate", + type: "DECIMAL", + is_measure: true, + metadata: { + format: { + percent: { decimal_places: { places: 1 } }, + }, + }, + }, + ], + }); + expect(cols[0].format).toBe("0.0%"); + }); + + test("translates structured percent with 0 decimal places", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "rate", + type: "DECIMAL", + is_measure: true, + metadata: { format: { percent: { decimal_places: { places: 0 } } } }, + }, + ], + }); + expect(cols[0].format).toBe("0%"); + }); + + test("translates structured number format with comma grouping", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "active_accounts", + type: "BIGINT", + is_measure: true, + metadata: { + format: { number: { decimal_places: { places: 0 } } }, + }, + }, + ], + }); + expect(cols[0].format).toBe("#,##0"); + }); + + test("returns undefined for unrecognized structured format shapes", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "weirdo", + type: "DOUBLE", + is_measure: true, + metadata: { + format: { custom_thing: { whatever: 1 } }, + }, + }, + ], + }); + expect(cols[0].format).toBeUndefined(); + }); + + test("currency format defaults to USD + 2 places when fields are missing", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "amount", + type: "DOUBLE", + is_measure: true, + metadata: { format: { currency: {} } }, + }, + ], + }); + expect(cols[0].format).toBe("$#,##0.00"); + }); + + test("accepts decimal_places as a bare number (legacy shape)", () => { + const cols = extractMetricColumns({ + columns: [ + { + name: "amount", + type: "DOUBLE", + is_measure: true, + metadata: { + format: { currency: { decimal_places: 4, currency_code: "USD" } }, + }, + }, + ], + }); + expect(cols[0].format).toBe("$#,##0.0000"); + }); +}); + +// ── Phase 5: metadata bundle generation ─────────────────────────────────── +describe("buildMetricsMetadataBundle", () => { + test("emits per-metric measures + dimensions records keyed by name", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "appkit_demo.public.revenue_metrics" } }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + display_name: "Annual Recurring Revenue", + format: "$#,##0.00", + comment: "ARR for the period", + }, + { name: "region", type: "STRING", is_measure: false }, + { name: "created_at", type: "TIMESTAMP", is_measure: false }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const bundle = buildMetricsMetadataBundle(schemas); + + expect(bundle.revenue).toMatchObject({ + measures: { + arr: { + type: "DECIMAL(38,2)", + display_name: "Annual Recurring Revenue", + format: "$#,##0.00", + description: "ARR for the period", + }, + }, + dimensions: { + region: { + type: "STRING", + }, + created_at: { + type: "TIMESTAMP", + time_grain: [ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year", + ], + }, + }, + }); + // Defense-in-depth: the client-shipped bundle must not carry server-side + // concerns (UC FQN, execution lane). They live in metric.json server-side. + expect(bundle.revenue).not.toHaveProperty("source"); + expect(bundle.revenue).not.toHaveProperty("lane"); + }); + + test("preserves stable alphabetical key order across metrics", async () => { + const resolution = resolveMetricConfig({ + sp: { + z_metric: { source: "demo.public.z_metric" }, + a_metric: { source: "demo.public.a_metric" }, + }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [{ name: "v", type: "DECIMAL", is_measure: true }], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const bundle = buildMetricsMetadataBundle(schemas); + expect(Object.keys(bundle)).toEqual(["a_metric", "z_metric"]); + }); + + test("omits absent fields rather than emitting null/empty placeholders", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "demo.public.revenue" } }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [{ name: "arr", type: "DECIMAL", is_measure: true }], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const bundle = buildMetricsMetadataBundle(schemas); + const arr = bundle.revenue.measures.arr; + expect(arr.type).toBe("DECIMAL"); + expect(arr.display_name).toBeUndefined(); + expect(arr.format).toBeUndefined(); + expect(arr.description).toBeUndefined(); + expect(arr.time_grain).toBeUndefined(); + }); + + test("only emits time_grain on time-typed dimensions, never on measures", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "demo.public.revenue" } }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [ + // Even when a measure resolves to a temporal type (rare but possible + // for MEASURE() expressions like MAX(event_at)), no grains should be + // emitted — measures aren't grouped on. Grain inference is gated on + // is_measure: false in extractMetricColumns. + { name: "last_event_at", type: "TIMESTAMP", is_measure: true }, + { name: "ts", type: "TIMESTAMP", is_measure: false }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const bundle = buildMetricsMetadataBundle(schemas); + expect(bundle.revenue.measures.last_event_at.time_grain).toBeUndefined(); + expect(bundle.revenue.dimensions.ts.time_grain).toEqual([ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year", + ]); + }); +}); + +// ── Phase 5: metadata JSON serialization ────────────────────────────────── +describe("generateMetricsMetadataJson — snapshot", () => { + test("serializes a representative metric view with display_name + format + time_grain", async () => { + const resolution = resolveMetricConfig({ + sp: { + revenue: { source: "appkit_demo.public.revenue_metrics" }, + }, + obo: { + customer_metrics: { + source: "appkit_demo.public.customer_metrics", + }, + }, + }); + + const fetcher = async (fqn: string) => + fqn.endsWith("revenue_metrics") + ? mockDescribeResponse({ + columns: [ + { + name: "arr", + type: "DECIMAL(38,2)", + is_measure: true, + display_name: "Annual Recurring Revenue", + format: "$#,##0.00", + comment: "ARR per quarter", + }, + { + name: "growth_rate", + type: "DOUBLE", + is_measure: true, + display_name: "Growth Rate", + format: "0.0%", + }, + { + name: "region", + type: "STRING", + is_measure: false, + display_name: "Region", + }, + { + name: "created_at", + type: "TIMESTAMP", + is_measure: false, + display_name: "Period", + }, + ], + }) + : mockDescribeResponse({ + columns: [ + { + name: "churn_rate", + type: "DOUBLE", + is_measure: true, + display_name: "Churn Rate", + format: "0.0%", + }, + { + name: "csm_email", + type: "STRING", + is_measure: false, + display_name: "CSM Email", + }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + const json = generateMetricsMetadataJson(schemas); + expect(json).toMatchSnapshot(); + + // Guard against snapshot blind-update: structural assertions on the parsed JSON. + const parsed = JSON.parse(json); + expect(Object.keys(parsed)).toEqual(["customer_metrics", "revenue"]); + expect(parsed.revenue.measures.arr.format).toBe("$#,##0.00"); + expect(parsed.revenue.measures.arr.display_name).toBe( + "Annual Recurring Revenue", + ); + // Time grains are inferred from the SQL type and ordered lexicographically. + // TIMESTAMP → all 7 standard grains. + expect(parsed.revenue.dimensions.created_at.time_grain).toEqual([ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year", + ]); + // The client-shipped artifact must not carry server-side concerns: + // UC FQN (`source`) and execution lane (`lane`) live in metric.json + // and are consumed only on the server. Asserting their absence catches + // accidental re-introduction in code review or refactors. + expect(parsed.revenue).not.toHaveProperty("source"); + expect(parsed.revenue).not.toHaveProperty("lane"); + expect(parsed.customer_metrics).not.toHaveProperty("source"); + expect(parsed.customer_metrics).not.toHaveProperty("lane"); + }); + + test("emits `{}` when no metrics are registered", () => { + expect(generateMetricsMetadataJson([])).toBe("{}\n"); + }); +}); + +// ── Phase 2: syncMetrics propagates timeGrains end-to-end ──────────────── +describe("syncMetrics — time-typed dimension propagation", () => { + test("propagates inferred grains onto the resulting MetricSchema", async () => { + const resolution = resolveMetricConfig({ + sp: { revenue: { source: "demo.public.revenue" } }, + }); + + const fetcher = async () => + mockDescribeResponse({ + columns: [ + { name: "arr", type: "DECIMAL", is_measure: true }, + { name: "ts", type: "TIMESTAMP", is_measure: false }, + { name: "region", type: "STRING", is_measure: false }, + ], + }); + + const { schemas } = await syncMetrics(resolution, fetcher); + expect(schemas[0].dimensions).toHaveLength(2); + const tsDim = schemas[0].dimensions.find((d) => d.name === "ts"); + expect(tsDim?.timeGrains).toEqual([ + "day", + "hour", + "minute", + "month", + "quarter", + "week", + "year", + ]); + const regionDim = schemas[0].dimensions.find((d) => d.name === "region"); + expect(regionDim?.timeGrains).toBeUndefined(); + }); +}); diff --git a/packages/appkit/src/type-generator/vite-plugin.ts b/packages/appkit/src/type-generator/vite-plugin.ts index 5f4a0d4b1..f9e75496e 100644 --- a/packages/appkit/src/type-generator/vite-plugin.ts +++ b/packages/appkit/src/type-generator/vite-plugin.ts @@ -5,6 +5,8 @@ import { createLogger } from "../logging/logger"; import { ANALYTICS_TYPES_FILE, generateFromEntryPoint, + METRIC_METADATA_FILE, + METRIC_TYPES_FILE, TYPES_DIR, } from "./index"; @@ -16,6 +18,14 @@ const logger = createLogger("type-generator:vite-plugin"); interface AppKitTypesPluginOptions { /* Path to the output d.ts file (relative to client folder). */ outFile?: string; + /** Path to the metric registry d.ts file (relative to client folder). */ + metricOutFile?: string; + /** + * Path to the metric semantic-metadata JSON file (relative to client folder). + * Phase 5 build-time artifact — sibling of {@link metricOutFile}. Skipped + * automatically when `metric.json` is absent. + */ + metricMetadataOutFile?: string; /** Folders to watch for changes. */ watchFolders?: string[]; } @@ -28,6 +38,8 @@ interface AppKitTypesPluginOptions { */ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { let outFile: string; + let metricOutFile: string; + let metricMetadataOutFile: string; let watchFolders: string[]; async function generate() { @@ -44,6 +56,8 @@ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { queryFolder: watchFolders[0], warehouseId, noCache: false, + metricOutFile, + metricMetadataOutFile, }); } catch (error) { // throw in production to fail the build @@ -78,6 +92,15 @@ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { projectRoot, options?.outFile ?? `shared/${TYPES_DIR}/${ANALYTICS_TYPES_FILE}`, ); + metricOutFile = path.resolve( + projectRoot, + options?.metricOutFile ?? `shared/${TYPES_DIR}/${METRIC_TYPES_FILE}`, + ); + metricMetadataOutFile = path.resolve( + projectRoot, + options?.metricMetadataOutFile ?? + `shared/${TYPES_DIR}/${METRIC_METADATA_FILE}`, + ); watchFolders = options?.watchFolders ?? [ path.join(process.cwd(), "config", "queries"), ]; @@ -95,7 +118,10 @@ export function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin { changedFile.startsWith(folder), ); - if (isWatchedFile && changedFile.endsWith(".sql")) { + if ( + isWatchedFile && + (changedFile.endsWith(".sql") || changedFile.endsWith("metric.json")) + ) { generate(); } }); diff --git a/packages/shared/src/cli/commands/metric/index.ts b/packages/shared/src/cli/commands/metric/index.ts new file mode 100644 index 000000000..ce57743d9 --- /dev/null +++ b/packages/shared/src/cli/commands/metric/index.ts @@ -0,0 +1,23 @@ +import { Command } from "commander"; +import { metricSyncCommand } from "./sync/sync"; + +/** + * Parent command for metric-view operations. + * + * Currently exposes a single subcommand (`sync`); future v1+ subcommands + * (`list`, `validate`, `describe`) plug in here so users have a single + * top-level surface for everything related to UC Metric Views. + * + * Sibling of `plugin`, `setup`, `generate-types`, `lint`, `docs`, `codemod`. + */ +export const metricCommand = new Command("metric") + .description("Metric-view management commands (UC Metric Views)") + .addCommand(metricSyncCommand) + .addHelpText( + "after", + ` +Examples: + $ appkit metric sync + $ appkit metric sync --warehouse-id 1234abcd5678efgh --metric-json-path config/queries/metric.json + $ appkit metric sync --silent`, + ); diff --git a/packages/shared/src/cli/commands/metric/sync/__snapshots__/sync.test.ts.snap b/packages/shared/src/cli/commands/metric/sync/__snapshots__/sync.test.ts.snap new file mode 100644 index 000000000..6eab08804 --- /dev/null +++ b/packages/shared/src/cli/commands/metric/sync/__snapshots__/sync.test.ts.snap @@ -0,0 +1,9 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`runMetricSync — success > produces a stable success-stdout snapshot 1`] = ` +"Syncing N metric(s) from config/queries/metric.json via warehouse stub-warehouse... +✓ Wrote shared/appkit-types/metric.d.ts +✓ Wrote shared/appkit-types/metrics.metadata.json" +`; + +exports[`runMetricSync — success > treats an empty metric.json as a no-op (no fetch call) 1`] = `"No metric entries found. Nothing to sync."`; diff --git a/packages/shared/src/cli/commands/metric/sync/__snapshots__/validate-metric-source.test.ts.snap b/packages/shared/src/cli/commands/metric/sync/__snapshots__/validate-metric-source.test.ts.snap new file mode 100644 index 000000000..3fefd9e39 --- /dev/null +++ b/packages/shared/src/cli/commands/metric/sync/__snapshots__/validate-metric-source.test.ts.snap @@ -0,0 +1,8 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`formatMetricSourceErrors > produces a stable error message for a multi-issue input 1`] = ` +" sp: does not match expected pattern (must match pattern "^[a-zA-Z_][a-zA-Z0-9_]*$") + sp: property name must be valid + sp.revenue: missing required property "source" + sp.1bad.source: does not match expected pattern (must match pattern "^[a-zA-Z0-9_][a-zA-Z0-9_-]*\\.[a-zA-Z0-9_][a-zA-Z0-9_-]*\\.[a-zA-Z0-9_][a-zA-Z0-9_-]*$")" +`; diff --git a/packages/shared/src/cli/commands/metric/sync/sync.test.ts b/packages/shared/src/cli/commands/metric/sync/sync.test.ts new file mode 100644 index 000000000..4a6602ced --- /dev/null +++ b/packages/shared/src/cli/commands/metric/sync/sync.test.ts @@ -0,0 +1,551 @@ +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { + classifyFetchError, + exitCodeFor, + type MetricSyncDependencies, + MetricSyncError, + runMetricSync, +} from "./sync"; + +// ── Helpers ──────────────────────────────────────────────────────────────── + +function makeTempDir(prefix: string): string { + return fs.mkdtempSync(path.join(os.tmpdir(), `${prefix}-`)); +} + +function cleanDir(dir: string): void { + try { + fs.rmSync(dir, { recursive: true, force: true }); + } catch { + // best-effort + } +} + +/** + * Build a fully-mocked {@link MetricSyncDependencies} that records what was + * called. Tests override individual fields (e.g. swap in a throwing fetcher + * factory to simulate auth-failed) without re-stating the rest. + */ +function makeDeps( + overrides: Partial = {}, +): MetricSyncDependencies { + return { + syncMetrics: vi.fn(async (resolution, fetcher) => { + // Default: call the fetcher once per entry and emit a stub schema. + const schemas: Array<{ + key: string; + source: string; + lane: "sp" | "obo"; + measures: never[]; + dimensions: never[]; + }> = []; + for (const entry of resolution.entries) { + try { + await fetcher(entry.source); + } catch { + // mirror the real syncMetrics behavior — it tolerates per-entry + // failures and emits empty schemas. The CLI's wrapped fetcher + // captures the first failure and re-throws after this returns. + } + schemas.push({ + key: entry.key, + source: entry.source, + lane: entry.lane, + measures: [], + dimensions: [], + }); + } + // The mock returns no failures by default — tests that need to + // exercise the failures-surfacing path override this seam. + return { schemas, failures: [] }; + }), + resolveMetricConfig: vi.fn((config) => { + const cfg = config as { + sp?: Record; + obo?: Record; + }; + // Mirror the real `resolveMetricConfig`: sp first, then obo, each + // alphabetically sorted by key. This is the contract callers (and + // syncMetrics) rely on for deterministic ordering. + const entries: Array<{ + key: string; + source: string; + lane: "sp" | "obo"; + }> = []; + for (const lane of ["sp", "obo"] as const) { + const laneMap = cfg[lane] ?? {}; + for (const key of Object.keys(laneMap).sort()) { + entries.push({ key, source: laneMap[key].source, lane }); + } + } + return { entries }; + }), + createWorkspaceDescribeFetcher: vi.fn(() => async (_fqn: string) => ({ + ok: true, + })), + generateMetricTypeDeclarations: vi.fn(() => "// generated metric.d.ts\n"), + generateMetricsMetadataJson: vi.fn(() => "{}\n"), + metricTypesFile: "metric.d.ts", + metricMetadataFile: "metrics.metadata.json", + ...overrides, + }; +} + +/** + * Capture console writes through the IO seam so snapshots are deterministic. + */ +function captureIO() { + const log: string[] = []; + const error: string[] = []; + return { + log: (msg: string) => log.push(msg), + error: (msg: string) => error.push(msg), + output: () => log.join("\n"), + errors: () => error.join("\n"), + }; +} + +const VALID_METRIC_JSON = { + $schema: + "https://databricks.github.io/appkit/schemas/metric-source.schema.json", + sp: { + revenue: { source: "demo.public.revenue" }, + }, + obo: { + customer_metrics: { source: "demo.public.customer_metrics" }, + }, +}; + +// ── classifyFetchError ───────────────────────────────────────────────────── + +describe("classifyFetchError", () => { + it("classifies 401 unauthorized as auth-failed", () => { + const err = new Error("Request failed: 401 Unauthorized"); + const classified = classifyFetchError(err, "demo.public.x"); + expect(classified.code).toBe("auth-failed"); + expect(classified.message).toMatch(/Authentication failed/); + }); + + it("classifies 403 forbidden as auth-failed", () => { + const err = new Error("HTTP 403 forbidden"); + expect(classifyFetchError(err, "x.y.z").code).toBe("auth-failed"); + }); + + it("classifies token-expired as auth-failed", () => { + const err = new Error("OAuth token expired; please refresh"); + expect(classifyFetchError(err, "x.y.z").code).toBe("auth-failed"); + }); + + it("classifies 'not found' as missing-fqn", () => { + const err = new Error("TABLE_OR_VIEW_NOT_FOUND: relation x.y.z not found"); + const classified = classifyFetchError(err, "x.y.z"); + expect(classified.code).toBe("missing-fqn"); + expect(classified.message).toContain("'x.y.z'"); + }); + + it("classifies 'does not exist' as missing-fqn", () => { + const err = new Error("Table x.y.z does not exist"); + expect(classifyFetchError(err, "x.y.z").code).toBe("missing-fqn"); + }); + + it("classifies ECONNREFUSED as warehouse-unreach", () => { + const err = new Error("connect ECONNREFUSED 127.0.0.1:443"); + expect(classifyFetchError(err, "x.y.z").code).toBe("warehouse-unreach"); + }); + + it("classifies ETIMEDOUT as warehouse-unreach", () => { + const err = new Error("Request ETIMEDOUT after 30s"); + expect(classifyFetchError(err, "x.y.z").code).toBe("warehouse-unreach"); + }); + + it("classifies unknown errors as unknown", () => { + const err = new Error("Unexpected internal error"); + expect(classifyFetchError(err, "x.y.z").code).toBe("unknown"); + }); +}); + +// ── exitCodeFor ──────────────────────────────────────────────────────────── + +describe("exitCodeFor", () => { + it("maps each MetricSyncErrorCode to its canonical exit code", () => { + expect(exitCodeFor("missing-fqn")).toBe(1); + expect(exitCodeFor("warehouse-unreach")).toBe(2); + expect(exitCodeFor("malformed-config")).toBe(3); + expect(exitCodeFor("auth-failed")).toBe(4); + expect(exitCodeFor("unknown")).toBe(5); + }); +}); + +// ── runMetricSync — happy paths ──────────────────────────────────────────── + +describe("runMetricSync — success", () => { + let tmp: string; + + beforeEach(() => { + tmp = makeTempDir("metric-sync-success"); + fs.mkdirSync(path.join(tmp, "config", "queries"), { recursive: true }); + fs.writeFileSync( + path.join(tmp, "config", "queries", "metric.json"), + JSON.stringify(VALID_METRIC_JSON, null, 2), + ); + }); + + afterEach(() => cleanDir(tmp)); + + it("emits metric.d.ts and metrics.metadata.json on success", async () => { + const io = captureIO(); + const deps = makeDeps(); + const ctx = await runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + silent: true, + }, + { ...io, deps, interactive: false }, + ); + + expect(ctx.warehouseId).toBe("stub-warehouse"); + expect(fs.existsSync(ctx.metricTypesPath)).toBe(true); + expect(fs.existsSync(ctx.metricMetadataPath)).toBe(true); + expect(deps.syncMetrics).toHaveBeenCalledTimes(1); + }); + + it("produces a stable success-stdout snapshot", async () => { + const io = captureIO(); + const deps = makeDeps(); + await runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + }, + { ...io, deps, interactive: false }, + ); + + // Normalize the warehouse-relative path component for portability. + const snapshot = io + .output() + .replace(/Syncing \d+ metric\(s\) from /g, "Syncing N metric(s) from "); + expect(snapshot).toMatchSnapshot(); + }); + + it("treats an empty metric.json as a no-op (no fetch call)", async () => { + fs.writeFileSync( + path.join(tmp, "config", "queries", "metric.json"), + JSON.stringify({ sp: {}, obo: {} }, null, 2), + ); + + const io = captureIO(); + const deps = makeDeps(); + await runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + }, + { ...io, deps, interactive: false }, + ); + + expect(deps.syncMetrics).not.toHaveBeenCalled(); + expect(io.output()).toMatchSnapshot(); + }); + + it("respects --metric-json-path and --output-dir overrides", async () => { + const altDir = path.join(tmp, "alt-config"); + fs.mkdirSync(altDir, { recursive: true }); + const altPath = path.join(altDir, "metrics.json"); + fs.writeFileSync(altPath, JSON.stringify(VALID_METRIC_JSON, null, 2)); + + const altOut = path.join(tmp, "build-out"); + + const io = captureIO(); + const deps = makeDeps(); + const ctx = await runMetricSync( + { + warehouseId: "stub-warehouse", + metricJsonPath: altPath, + outputDir: altOut, + rootDir: tmp, + silent: true, + }, + { ...io, deps, interactive: false }, + ); + + expect(ctx.metricJsonPath).toBe(altPath); + expect(ctx.outputDir).toBe(altOut); + expect(fs.existsSync(path.join(altOut, "metric.d.ts"))).toBe(true); + expect(fs.existsSync(path.join(altOut, "metrics.metadata.json"))).toBe( + true, + ); + }); +}); + +// ── runMetricSync — failure modes ────────────────────────────────────────── + +describe("runMetricSync — failure modes", () => { + let tmp: string; + + beforeEach(() => { + tmp = makeTempDir("metric-sync-failure"); + fs.mkdirSync(path.join(tmp, "config", "queries"), { recursive: true }); + fs.writeFileSync( + path.join(tmp, "config", "queries", "metric.json"), + JSON.stringify(VALID_METRIC_JSON, null, 2), + ); + }); + + afterEach(() => cleanDir(tmp)); + + it("rejects malformed JSON with malformed-config", async () => { + fs.writeFileSync( + path.join(tmp, "config", "queries", "metric.json"), + "{not valid json", + ); + + const io = captureIO(); + await expect( + runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + silent: true, + }, + { ...io, deps: makeDeps(), interactive: false }, + ), + ).rejects.toMatchObject({ + code: "malformed-config", + }); + }); + + it("rejects missing metric.json with malformed-config", async () => { + fs.unlinkSync(path.join(tmp, "config", "queries", "metric.json")); + + const io = captureIO(); + await expect( + runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + silent: true, + }, + { ...io, deps: makeDeps(), interactive: false }, + ), + ).rejects.toMatchObject({ + code: "malformed-config", + }); + }); + + it("rejects schema-invalid metric.json with malformed-config", async () => { + fs.writeFileSync( + path.join(tmp, "config", "queries", "metric.json"), + JSON.stringify({ sp: { "1bad-key": { source: "x" } } }), + ); + + const io = captureIO(); + let captured: MetricSyncError | null = null; + try { + await runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + silent: true, + }, + { ...io, deps: makeDeps(), interactive: false }, + ); + } catch (err) { + captured = err as MetricSyncError; + } + expect(captured).toBeInstanceOf(MetricSyncError); + expect(captured?.code).toBe("malformed-config"); + // Stable summary line, message body varies by AJV version so we don't + // snapshot the full error. + expect(captured?.message).toContain("Invalid metric.json"); + }); + + it("rejects metric.json with bare-string source as malformed-config", async () => { + fs.writeFileSync( + path.join(tmp, "config", "queries", "metric.json"), + JSON.stringify({ sp: { revenue: "demo.public.revenue" } }), + ); + + const io = captureIO(); + await expect( + runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + silent: true, + }, + { ...io, deps: makeDeps(), interactive: false }, + ), + ).rejects.toMatchObject({ + code: "malformed-config", + }); + }); + + it("surfaces missing-fqn when the fetcher rejects with 'not found'", async () => { + const deps = makeDeps({ + createWorkspaceDescribeFetcher: vi.fn(() => async (_fqn: string) => { + throw new Error("TABLE_OR_VIEW_NOT_FOUND: relation does not exist"); + }), + }); + + const io = captureIO(); + let captured: MetricSyncError | null = null; + try { + await runMetricSync( + { + warehouseId: "stub-warehouse", + rootDir: tmp, + silent: true, + }, + { ...io, deps, interactive: false }, + ); + } catch (err) { + captured = err as MetricSyncError; + } + expect(captured?.code).toBe("missing-fqn"); + expect(captured?.fqn).toBe("demo.public.revenue"); + expect(captured?.message).toContain("demo.public.revenue"); + }); + + it("surfaces warehouse-unreach with the warehouse ID embedded", async () => { + const deps = makeDeps({ + createWorkspaceDescribeFetcher: vi.fn(() => async (_fqn: string) => { + throw new Error( + "connect ECONNREFUSED 127.0.0.1:443 unreachable warehouse", + ); + }), + }); + + const io = captureIO(); + let captured: MetricSyncError | null = null; + try { + await runMetricSync( + { + warehouseId: "wh-12345", + rootDir: tmp, + silent: true, + }, + { ...io, deps, interactive: false }, + ); + } catch (err) { + captured = err as MetricSyncError; + } + expect(captured?.code).toBe("warehouse-unreach"); + expect(captured?.message).toContain("'wh-12345'"); + }); + + it("surfaces auth-failed when the fetcher rejects with 401", async () => { + const deps = makeDeps({ + createWorkspaceDescribeFetcher: vi.fn(() => async (_fqn: string) => { + throw new Error("401 Unauthorized — invalid OAuth token"); + }), + }); + + const io = captureIO(); + let captured: MetricSyncError | null = null; + try { + await runMetricSync( + { + warehouseId: "wh-12345", + rootDir: tmp, + silent: true, + }, + { ...io, deps, interactive: false }, + ); + } catch (err) { + captured = err as MetricSyncError; + } + expect(captured?.code).toBe("auth-failed"); + expect(captured?.message).toContain("Authentication failed"); + }); + + it("surfaces unknown for an unexpected error", async () => { + const deps = makeDeps({ + createWorkspaceDescribeFetcher: vi.fn(() => async (_fqn: string) => { + throw new Error("internal error: kernel panic"); + }), + }); + + const io = captureIO(); + let captured: MetricSyncError | null = null; + try { + await runMetricSync( + { + warehouseId: "wh-12345", + rootDir: tmp, + silent: true, + }, + { ...io, deps, interactive: false }, + ); + } catch (err) { + captured = err as MetricSyncError; + } + expect(captured?.code).toBe("unknown"); + }); + + it("rejects --silent with no warehouse ID resolved", async () => { + const previousEnv = process.env.DATABRICKS_WAREHOUSE_ID; + delete process.env.DATABRICKS_WAREHOUSE_ID; + + try { + const io = captureIO(); + await expect( + runMetricSync( + { + rootDir: tmp, + silent: true, + }, + { ...io, deps: makeDeps(), interactive: false }, + ), + ).rejects.toMatchObject({ + code: "warehouse-unreach", + }); + } finally { + if (previousEnv !== undefined) { + process.env.DATABRICKS_WAREHOUSE_ID = previousEnv; + } + } + }); + + it("surfaces per-entry sync failures (parse / zero-column) as a typed error", async () => { + // Simulates the case where DESCRIBE returned successfully but extraction + // produced an empty bundle — without surfacing this, an empty bundle + // would ship and the runtime fail-closed gate would 503 every request. + const io = captureIO(); + const deps = makeDeps({ + syncMetrics: vi.fn( + async (resolution: { + entries: Array<{ key: string; source: string; lane: "sp" | "obo" }>; + }) => ({ + schemas: resolution.entries.map((e) => ({ + key: e.key, + source: e.source, + lane: e.lane, + measures: [], + dimensions: [], + })), + failures: [ + { + key: "revenue", + source: "appkit_demo.public.revenue_metrics", + reason: "DESCRIBE response yielded zero columns", + }, + ], + }), + ), + }); + + await expect( + runMetricSync( + { + warehouseId: "wh-x", + rootDir: tmp, + }, + { ...io, deps, interactive: false }, + ), + ).rejects.toThrowError(/zero columns/); + }); +}); diff --git a/packages/shared/src/cli/commands/metric/sync/sync.ts b/packages/shared/src/cli/commands/metric/sync/sync.ts new file mode 100644 index 000000000..2b20d9358 --- /dev/null +++ b/packages/shared/src/cli/commands/metric/sync/sync.ts @@ -0,0 +1,697 @@ +import fs from "node:fs"; +import path from "node:path"; +import process from "node:process"; +import { cancel, intro, isCancel, outro, text } from "@clack/prompts"; +import { Command } from "commander"; +import { + formatMetricSourceErrors, + validateMetricSource, +} from "./validate-metric-source"; + +/** + * Recognizable error categories surfaced from `syncMetrics()` and the CLI's + * preflight checks. The taxonomy maps 1:1 onto exit codes so wrappers + * (CI scripts, pre-commit hooks) can branch on the failure mode. + * + * Mapping: + * missing-fqn → 1 "Metric view '' not found or not accessible" + * warehouse-unreach → 2 "Could not reach SQL warehouse ''" + * malformed-config → 3 "Invalid metric.json" + * auth-failed → 4 "Authentication failed" + * unknown → 5 catch-all + */ +export type MetricSyncErrorCode = + | "missing-fqn" + | "warehouse-unreach" + | "malformed-config" + | "auth-failed" + | "unknown"; + +const EXIT_CODE_BY_CATEGORY: Record = { + "missing-fqn": 1, + "warehouse-unreach": 2, + "malformed-config": 3, + "auth-failed": 4, + unknown: 5, +}; + +/** + * Typed error wrapper used by the CLI to bubble a recognizable failure mode + * (and its associated exit code) up from helper functions to the command's + * top-level catch. + */ +export class MetricSyncError extends Error { + readonly code: MetricSyncErrorCode; + readonly fqn?: string; + + constructor(code: MetricSyncErrorCode, message: string, fqn?: string) { + super(message); + this.name = "MetricSyncError"; + this.code = code; + if (fqn !== undefined) this.fqn = fqn; + } +} + +/** + * Classify an arbitrary error thrown by the DescribeFetcher (i.e. the + * Statement Execution API call inside `createWorkspaceDescribeFetcher`) into + * a recognizable {@link MetricSyncErrorCode}. + * + * The classification is intentionally string-shaped (no SDK type imports) + * because: + * - the CLI runs as a thin wrapper and we don't want to pull the Databricks + * SDK into the shared CLI package's hot path; + * - the error shapes are fluid across SDK releases — matching on substrings + * of the message gives us a stable contract even when the SDK shifts. + * + * The resulting categorization is conservative: when nothing matches we fall + * through to "unknown" so the catch-all exit code (5) carries the original + * message verbatim. Callers should always preserve the underlying message in + * stderr — the category is just a routing key. + */ +export function classifyFetchError(err: Error, fqn: string): MetricSyncError { + const msg = (err.message ?? "").toLowerCase(); + + // Auth failure signals — these come from the SDK's bearer/OAuth flows or + // from the workspace returning 401/403 directly. Match before the more + // generic "not found" / "unreachable" buckets so an auth-flavored 403 + // doesn't get bucketed as warehouse-unreach. + if ( + msg.includes("unauthorized") || + msg.includes("authentication") || + msg.includes("403") || + msg.includes("401") || + msg.includes("forbidden") || + msg.includes("invalid_grant") || + (msg.includes("token") && + (msg.includes("expired") || msg.includes("invalid"))) + ) { + return new MetricSyncError( + "auth-failed", + `Authentication failed: ${err.message}`, + fqn, + ); + } + + // Missing FQN signals — a SQL "table not found" / "doesn't exist" comes + // back as a FAILED statement, but if the SDK throws on a 404-style HTTP + // we catch it here. Match on the FQN word itself when present. + if ( + msg.includes("not found") || + msg.includes("does not exist") || + msg.includes("doesn't exist") || + msg.includes("no such table") + ) { + return new MetricSyncError( + "missing-fqn", + `Metric view '${fqn}' not found or not accessible: ${err.message}`, + fqn, + ); + } + + // Warehouse-reach signals — connection failures, host unreachable, timeouts. + // The warehouse ID itself isn't part of the message, so we can't echo it + // here; the caller appends it when constructing the final message. + if ( + msg.includes("econnrefused") || + msg.includes("etimedout") || + msg.includes("enotfound") || + msg.includes("network") || + msg.includes("unreachable") || + (msg.includes("warehouse") && msg.includes("not")) + ) { + return new MetricSyncError( + "warehouse-unreach", + `Could not reach SQL warehouse: ${err.message}`, + fqn, + ); + } + + return new MetricSyncError("unknown", err.message, fqn); +} + +/** + * Read and parse `metric.json` from a path. Throws a {@link MetricSyncError} + * with `malformed-config` on missing/parse errors so the top-level catch can + * route to the right exit code. + */ +function readMetricJson(metricJsonPath: string): unknown { + let raw: string; + try { + raw = fs.readFileSync(metricJsonPath, "utf-8"); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + throw new MetricSyncError( + "malformed-config", + `Could not read metric.json at ${metricJsonPath}: ${msg}`, + ); + } + + try { + return JSON.parse(raw); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + throw new MetricSyncError( + "malformed-config", + `Failed to parse metric.json at ${metricJsonPath}: ${msg}`, + ); + } +} + +/** + * Resolve the metric.json path. Honors --metric-json-path, otherwise looks at + * the conventional `/config/queries/metric.json` location. + */ +function resolveMetricJsonPath(rootDir: string, override?: string): string { + if (override) { + return path.isAbsolute(override) + ? override + : path.resolve(rootDir, override); + } + return path.resolve(rootDir, "config", "queries", "metric.json"); +} + +/** + * Resolve the output directory for the generated `.d.ts` and metadata bundle. + * Defaults to `/shared/appkit-types` to match the Vite-plugin output + * convention (`appKitTypesPlugin` writes to that location by default). + */ +function resolveOutputDir(rootDir: string, override?: string): string { + if (override) { + return path.isAbsolute(override) + ? override + : path.resolve(rootDir, override); + } + return path.resolve(rootDir, "shared", "appkit-types"); +} + +/** + * Inputs produced by `runMetricSync`'s preflight phase: everything the + * implementation needs after env vars / flags / prompts have been resolved. + * + * Exported (along with `runMetricSync`) for snapshot tests that want to + * inject deterministic seams. + */ +export interface MetricSyncContext { + warehouseId: string; + metricJsonPath: string; + outputDir: string; + metricTypesPath: string; + metricMetadataPath: string; +} + +/** + * Minimal column-metadata shape — mirrors `MetricColumnMetadata` from + * `@databricks/appkit/type-generator`. Kept here (rather than imported) so + * the shared CLI package compiles even when @databricks/appkit isn't built. + */ +export interface MetricSyncColumnMetadata { + name: string; + type: string; + isMeasure: boolean; + description?: string; + displayName?: string; + format?: string; + timeGrains?: string[]; +} + +/** + * Minimal MetricSchema shape — mirrors `MetricSchema` from the type-generator + * package. Tests construct stubs with empty `measures` / `dimensions` arrays, + * and the structural compatibility carries through to the real implementation. + */ +export interface MetricSyncSchema { + key: string; + source: string; + lane: "sp" | "obo"; + measures: MetricSyncColumnMetadata[]; + dimensions: MetricSyncColumnMetadata[]; +} + +/** + * The subset of `@databricks/appkit/type-generator` that the CLI consumes. + * Defined as a structural interface so tests can substitute a mock without + * loading the full ESM module graph (which would require `@databricks/appkit` + * to be built before tests run). + */ +/** + * Per-entry sync failure surfaced by `syncMetrics()`. Mirrors the shape in + * `@databricks/appkit/type-generator`'s `MetricSyncFailure`. Defined + * structurally here so the CLI doesn't load the appkit package at type-time. + */ +export interface MetricSyncFailureLite { + key: string; + source: string; + reason: string; +} + +export interface MetricSyncDependencies { + syncMetrics: ( + resolution: { + entries: Array<{ key: string; source: string; lane: "sp" | "obo" }>; + }, + fetcher: (fqn: string) => Promise, + ) => Promise<{ + schemas: MetricSyncSchema[]; + failures: MetricSyncFailureLite[]; + }>; + resolveMetricConfig: (config: unknown) => { + entries: Array<{ key: string; source: string; lane: "sp" | "obo" }>; + }; + createWorkspaceDescribeFetcher: ( + warehouseId: string, + ) => (fqn: string) => Promise; + generateMetricTypeDeclarations: (schemas: MetricSyncSchema[]) => string; + generateMetricsMetadataJson: (schemas: MetricSyncSchema[]) => string; + metricTypesFile: string; + metricMetadataFile: string; +} + +/** + * Lazy-load `@databricks/appkit/type-generator`. Mirrors the dynamic-import + * pattern in `generate-types.ts` so the CLI does not hard-depend on the + * appkit package being installed (the published `appkit` CLI does, but the + * raw `shared` CLI package needs to compile cleanly without it). + */ +async function loadDefaultDependencies(): Promise { + try { + const mod = await import("@databricks/appkit/type-generator"); + return { + syncMetrics: mod.syncMetrics, + resolveMetricConfig: + mod.resolveMetricConfig as MetricSyncDependencies["resolveMetricConfig"], + createWorkspaceDescribeFetcher: mod.createWorkspaceDescribeFetcher, + generateMetricTypeDeclarations: mod.generateMetricTypeDeclarations, + generateMetricsMetadataJson: mod.generateMetricsMetadataJson, + metricTypesFile: mod.METRIC_TYPES_FILE, + metricMetadataFile: mod.METRIC_METADATA_FILE, + }; + } catch (err) { + if (err instanceof Error && err.message.includes("Cannot find module")) { + throw new MetricSyncError( + "unknown", + "The 'metric sync' command requires @databricks/appkit. Install it to use this command.", + ); + } + throw err; + } +} + +/** + * The fully-resolved set of flag/env/prompt inputs the command needs. + * + * `metricJsonPath` and `outputDir` are required (env-var-or-flag-or-prompt + * resolved); `warehouseId` is also required because `syncMetrics()` cannot + * issue the DESCRIBE without it. + */ +interface ResolvedInputs { + warehouseId: string; + metricJsonPath: string; + outputDir: string; + rootDir: string; +} + +/** + * Resolve inputs from the priority chain: explicit flags > env vars > interactive + * prompts. Matches the convention used by `plugin sync` and `plugin add-resource` + * — no prompt fires when the value is already known via flag or env. + * + * In `--silent` / `--json` modes we skip prompts entirely and surface a + * malformed-config error if a required field is unresolved (the wrapper script + * shouldn't see TTY prompts). + */ +async function resolveInputs( + options: MetricSyncFlags, + rootDir: string, + silent: boolean, +): Promise { + // Warehouse ID: --warehouse-id > DATABRICKS_WAREHOUSE_ID env var > prompt + let warehouseId = + options.warehouseId ?? process.env.DATABRICKS_WAREHOUSE_ID ?? ""; + + // metric.json path: --metric-json-path > /config/queries/metric.json + let metricJsonPath = resolveMetricJsonPath(rootDir, options.metricJsonPath); + + // Output dir: --output-dir > /shared/appkit-types + const outputDir = resolveOutputDir(rootDir, options.outputDir); + + if (silent) { + if (!warehouseId) { + throw new MetricSyncError( + "warehouse-unreach", + "No warehouse ID. Set DATABRICKS_WAREHOUSE_ID, pass --warehouse-id , or run interactively.", + ); + } + return { warehouseId, metricJsonPath, outputDir, rootDir }; + } + + // Interactive: only prompt for fields that weren't already resolved. + if (!warehouseId) { + const answer = await text({ + message: "Databricks SQL Warehouse ID?", + placeholder: "e.g. 1234abcd5678efgh", + validate(value) { + if (!value || value.trim().length === 0) { + return "Warehouse ID is required"; + } + return undefined; + }, + }); + if (isCancel(answer)) { + cancel("Cancelled."); + process.exit(0); + } + warehouseId = (answer as string).trim(); + } + + if (!options.metricJsonPath) { + // Only prompt if the conventional location does not exist; otherwise we + // assume the user meant the default and proceed without nagging. + if (!fs.existsSync(metricJsonPath)) { + const answer = await text({ + message: "Path to metric.json?", + placeholder: "config/queries/metric.json", + initialValue: path.relative(rootDir, metricJsonPath), + validate(value) { + if (!value || value.trim().length === 0) { + return "metric.json path is required"; + } + return undefined; + }, + }); + if (isCancel(answer)) { + cancel("Cancelled."); + process.exit(0); + } + const resolved = (answer as string).trim(); + metricJsonPath = path.isAbsolute(resolved) + ? resolved + : path.resolve(rootDir, resolved); + } + } + + if (!options.outputDir) { + // Use the default — no prompt unless the user explicitly opts in via flag. + // Mirroring `generate-types.ts`'s convention. + } + + return { warehouseId, metricJsonPath, outputDir, rootDir }; +} + +/** + * CLI flags accepted by `appkit metric sync`. Exposed for test wiring. + */ +export interface MetricSyncFlags { + warehouseId?: string; + metricJsonPath?: string; + outputDir?: string; + rootDir?: string; + silent?: boolean; + json?: boolean; +} + +/** + * The full implementation of `appkit metric sync`. Pure-ish: takes a `deps` + * seam so tests can inject a mock {@link MetricSyncDependencies} and a mock + * console writer. Production wires {@link loadDefaultDependencies} and + * `console.log` / `console.error`. + * + * Design notes: + * - We deliberately do **not** start the dependency load until after the + * metric.json path / schema validation step. This keeps the CLI usable + * for "did I write a valid metric.json?" checks even in environments + * where `@databricks/appkit` is missing. + * - `syncMetrics()` is tolerant by design (it returns empty schemas on a + * per-entry fetch error). To surface those errors at the CLI seam, we + * wrap the fetcher to capture the first failure and re-throw a typed + * {@link MetricSyncError}; subsequent entries are skipped. + */ +export async function runMetricSync( + options: MetricSyncFlags, + io: { + log: (msg: string) => void; + error: (msg: string) => void; + deps?: MetricSyncDependencies; + interactive?: boolean; + }, +): Promise { + const rootDir = options.rootDir + ? path.resolve(options.rootDir) + : process.cwd(); + const silent = Boolean(options.silent || options.json); + const interactive = io.interactive ?? !silent; + + if (interactive) { + intro("Sync metric-view types"); + } + + const inputs = await resolveInputs(options, rootDir, !interactive); + + // Step 1: Read + validate metric.json against the JSON Schema. + const parsed = readMetricJson(inputs.metricJsonPath); + const schemaResult = validateMetricSource(parsed); + if (!schemaResult.valid || !schemaResult.config) { + const details = schemaResult.errors?.length + ? formatMetricSourceErrors(schemaResult.errors) + : "(no validator output)"; + throw new MetricSyncError( + "malformed-config", + `Invalid metric.json at ${inputs.metricJsonPath}:\n${details}`, + ); + } + + // Step 2: Load deps (or use the injected seam) and resolve the config into + // a MetricConfigResolution. `resolveMetricConfig` performs the additional + // structural checks the JSON Schema can't express (duplicate keys across + // lanes, unknown fields). It throws plain Error; we re-shape into + // malformed-config so the CLI surfaces the right exit code. + const deps = io.deps ?? (await loadDefaultDependencies()); + + let resolution: ReturnType; + try { + resolution = deps.resolveMetricConfig(schemaResult.config); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + throw new MetricSyncError( + "malformed-config", + `Invalid metric.json at ${inputs.metricJsonPath}: ${msg}`, + ); + } + + if (resolution.entries.length === 0) { + if (!silent) { + io.log("No metric entries found. Nothing to sync."); + } + if (interactive) { + outro("Done."); + } + return { + warehouseId: inputs.warehouseId, + metricJsonPath: inputs.metricJsonPath, + outputDir: inputs.outputDir, + metricTypesPath: path.join(inputs.outputDir, deps.metricTypesFile), + metricMetadataPath: path.join(inputs.outputDir, deps.metricMetadataFile), + }; + } + + // Step 3: Build a fetcher that classifies the first failure into a typed + // MetricSyncError. We can't rely on `syncMetrics()` to throw — it captures + // and continues — so we wrap before passing it in. Only the *first* failure + // wins so the surfaced exit code reflects the earliest problem the user + // hit (subsequent entries are best-effort and may show different symptoms). + const baseFetcher = deps.createWorkspaceDescribeFetcher(inputs.warehouseId); + let firstFailure: MetricSyncError | null = null; + const wrappedFetcher: (fqn: string) => Promise = async (fqn) => { + try { + return await baseFetcher(fqn); + } catch (err) { + const e = err instanceof Error ? err : new Error(String(err)); + if (firstFailure === null) { + const classified = classifyFetchError(e, fqn); + // Refine warehouse-unreach to include the warehouse ID in the message + // (the SDK's error doesn't carry it). + firstFailure = + classified.code === "warehouse-unreach" + ? new MetricSyncError( + "warehouse-unreach", + `Could not reach SQL warehouse '${inputs.warehouseId}': ${e.message}`, + fqn, + ) + : classified; + } + throw e; + } + }; + + if (!silent) { + io.log( + `Syncing ${resolution.entries.length} metric(s) from ${path.relative(rootDir, inputs.metricJsonPath)} via warehouse ${inputs.warehouseId}...`, + ); + } + + const { schemas, failures } = await deps.syncMetrics( + resolution, + wrappedFetcher, + ); + + // If any entry's fetch failed, surface the first failure as a typed error. + // We deliberately defer this until after `syncMetrics()` returns so the + // emitted artifact (if we choose to emit it) reflects what we know. + if (firstFailure) { + throw firstFailure; + } + + // Even when no fetch threw (typed network errors above), `syncMetrics` may + // record per-entry failures: response parse errors, zero-column extraction. + // These represent silent corruption — the bundle would ship empty and the + // route's runtime fail-closed gate would 503 every request. Surface them + // as a typed CLI error so CI catches it instead of letting an empty bundle + // ship. + if (failures.length > 0) { + const summary = failures + .map((f) => ` ${f.key} (${f.source}): ${f.reason}`) + .join("\n"); + throw new MetricSyncError( + "unknown", + `metric sync produced ${failures.length} failure(s):\n${summary}`, + failures[0]?.source, + ); + } + + // Step 4: Emit artifacts. `outputDir` is created (recursively) on first use. + fs.mkdirSync(inputs.outputDir, { recursive: true }); + + const metricTypesPath = path.join(inputs.outputDir, deps.metricTypesFile); + const metricMetadataPath = path.join( + inputs.outputDir, + deps.metricMetadataFile, + ); + + fs.writeFileSync( + metricTypesPath, + deps.generateMetricTypeDeclarations(schemas), + "utf-8", + ); + fs.writeFileSync( + metricMetadataPath, + deps.generateMetricsMetadataJson(schemas), + "utf-8", + ); + + if (!silent) { + io.log(`✓ Wrote ${path.relative(rootDir, metricTypesPath)}`); + io.log(`✓ Wrote ${path.relative(rootDir, metricMetadataPath)}`); + } + + if (interactive) { + outro(`Synced ${schemas.length} metric(s).`); + } + + return { + warehouseId: inputs.warehouseId, + metricJsonPath: inputs.metricJsonPath, + outputDir: inputs.outputDir, + metricTypesPath, + metricMetadataPath, + }; +} + +/** + * Map a {@link MetricSyncErrorCode} to the canonical exit code. Test consumers + * import this directly to assert exit-code expectations without spawning a + * subprocess. + */ +export function exitCodeFor(code: MetricSyncErrorCode): number { + return EXIT_CODE_BY_CATEGORY[code]; +} + +export const metricSyncCommand = new Command("sync") + .description( + "Sync metric-view schemas from Databricks: fetch DESCRIBE TABLE EXTENDED for every entry in metric.json, then emit metric.d.ts + metrics.metadata.json.", + ) + .option( + "--warehouse-id ", + "Databricks SQL Warehouse ID (overrides DATABRICKS_WAREHOUSE_ID env var)", + ) + .option( + "--metric-json-path ", + "Path to metric.json (default: config/queries/metric.json)", + ) + .option( + "--output-dir ", + "Output directory for metric.d.ts and metrics.metadata.json (default: shared/appkit-types)", + ) + .option( + "--root-dir ", + "Project root used to resolve relative defaults (default: cwd)", + ) + .option( + "-s, --silent", + "Suppress non-error output and never enter interactive mode", + ) + .option("--json", "Emit a single-line JSON summary on success") + .addHelpText( + "after", + ` +Examples: + $ appkit metric sync + $ appkit metric sync --warehouse-id 1234abcd5678efgh + $ appkit metric sync --metric-json-path config/queries/metric.json + $ appkit metric sync --output-dir shared/appkit-types --silent + +Environment variables: + DATABRICKS_WAREHOUSE_ID SQL warehouse ID (used when --warehouse-id is omitted) + DATABRICKS_HOST Databricks workspace URL (consumed by the SDK)`, + ) + .action((opts: MetricSyncFlags) => { + runMetricSync(opts, { + log: (msg) => { + if (opts.json) return; + console.log(msg); + }, + error: (msg) => console.error(msg), + }) + .then((ctx) => { + if (opts.json) { + console.log( + JSON.stringify({ + ok: true, + warehouseId: ctx.warehouseId, + metricJsonPath: ctx.metricJsonPath, + outputDir: ctx.outputDir, + metricTypesPath: ctx.metricTypesPath, + metricMetadataPath: ctx.metricMetadataPath, + }), + ); + } + process.exit(0); + }) + .catch((err: unknown) => { + if (err instanceof MetricSyncError) { + if (opts.json) { + console.log( + JSON.stringify({ + ok: false, + code: err.code, + message: err.message, + ...(err.fqn ? { fqn: err.fqn } : {}), + }), + ); + } else { + console.error(`Error: ${err.message}`); + } + process.exit(exitCodeFor(err.code)); + } + + // Unexpected — preserve the raw error and exit 5. + const msg = err instanceof Error ? err.message : String(err); + if (opts.json) { + console.log( + JSON.stringify({ ok: false, code: "unknown", message: msg }), + ); + } else { + console.error(`Error: ${msg}`); + } + process.exit(exitCodeFor("unknown")); + }); + }); diff --git a/packages/shared/src/cli/commands/metric/sync/validate-metric-source.test.ts b/packages/shared/src/cli/commands/metric/sync/validate-metric-source.test.ts new file mode 100644 index 000000000..0986b192c --- /dev/null +++ b/packages/shared/src/cli/commands/metric/sync/validate-metric-source.test.ts @@ -0,0 +1,102 @@ +import { describe, expect, it } from "vitest"; +import { + formatMetricSourceErrors, + validateMetricSource, +} from "./validate-metric-source"; + +describe("validateMetricSource", () => { + it("accepts a valid SP-only configuration", () => { + const result = validateMetricSource({ + sp: { revenue: { source: "demo.public.revenue" } }, + }); + expect(result.valid).toBe(true); + expect(result.config).toBeDefined(); + }); + + it("accepts an empty configuration", () => { + expect(validateMetricSource({}).valid).toBe(true); + expect(validateMetricSource({ sp: {}, obo: {} }).valid).toBe(true); + }); + + it("rejects null/non-object inputs", () => { + expect(validateMetricSource(null).valid).toBe(false); + expect(validateMetricSource(undefined).valid).toBe(false); + expect(validateMetricSource("not an object").valid).toBe(false); + expect(validateMetricSource([1, 2, 3]).valid).toBe(false); + }); + + it("rejects bare-string source entries (must be {source})", () => { + const result = validateMetricSource({ + sp: { revenue: "demo.public.revenue" }, + }); + expect(result.valid).toBe(false); + expect(result.errors?.length).toBeGreaterThan(0); + }); + + it("rejects entries with unknown fields (closed v1 contract)", () => { + const result = validateMetricSource({ + sp: { revenue: { source: "demo.public.revenue", cacheTtl: 60 } }, + }); + expect(result.valid).toBe(false); + }); + + it("rejects metric keys starting with a digit", () => { + const result = validateMetricSource({ + sp: { "1bad-key": { source: "demo.public.revenue" } }, + }); + expect(result.valid).toBe(false); + }); + + it("rejects non-three-part FQNs", () => { + const result = validateMetricSource({ + sp: { revenue: { source: "two.parts" } }, + }); + expect(result.valid).toBe(false); + }); + + it("rejects unknown top-level keys", () => { + const result = validateMetricSource({ + sp: {}, + obo: {}, + extra: {}, + }); + expect(result.valid).toBe(false); + }); +}); + +describe("formatMetricSourceErrors", () => { + it("formats a 'required' error with property name", () => { + const result = validateMetricSource({ + sp: { revenue: {} }, + }); + expect(result.valid).toBe(false); + const formatted = formatMetricSourceErrors(result.errors ?? []); + expect(formatted).toContain('missing required property "source"'); + }); + + it("formats an 'additionalProperties' error with property name", () => { + const result = validateMetricSource({ + sp: { revenue: { source: "demo.public.revenue", cacheTtl: 60 } }, + }); + expect(result.valid).toBe(false); + const formatted = formatMetricSourceErrors(result.errors ?? []); + expect(formatted).toContain('unknown property "cacheTtl"'); + }); + + it("formats a 'pattern' error", () => { + const result = validateMetricSource({ + sp: { revenue: { source: "two.parts" } }, + }); + expect(result.valid).toBe(false); + const formatted = formatMetricSourceErrors(result.errors ?? []); + expect(formatted).toContain("does not match expected pattern"); + }); + + it("produces a stable error message for a multi-issue input", () => { + const result = validateMetricSource({ + sp: { revenue: {}, "1bad": { source: "two.parts" } }, + }); + expect(result.valid).toBe(false); + expect(formatMetricSourceErrors(result.errors ?? [])).toMatchSnapshot(); + }); +}); diff --git a/packages/shared/src/cli/commands/metric/sync/validate-metric-source.ts b/packages/shared/src/cli/commands/metric/sync/validate-metric-source.ts new file mode 100644 index 000000000..d87b145f0 --- /dev/null +++ b/packages/shared/src/cli/commands/metric/sync/validate-metric-source.ts @@ -0,0 +1,151 @@ +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import Ajv, { type ErrorObject } from "ajv"; +import addFormats from "ajv-formats"; +import type { MetricSourceConfiguration } from "../../../../schemas/metric-source.generated"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +/** + * Resolve the metric-source schema path. The schema is copied to `dist/schemas` + * by tsdown at build time so the published CLI can locate it; in dev (running + * from `src/`) we walk back to the `src/schemas` checkout. + */ +const SCHEMAS_DIR = path.join(__dirname, "..", "..", "..", "..", "schemas"); +const METRIC_SOURCE_SCHEMA_PATH = path.join( + SCHEMAS_DIR, + "metric-source.schema.json", +); + +export interface ValidateMetricSourceResult { + valid: boolean; + config?: MetricSourceConfiguration; + errors?: ErrorObject[]; +} + +let compiledValidator: ReturnType | null = null; +let schemaLoadWarned = false; + +function loadSchema(schemaPath: string): object | null { + try { + return JSON.parse(fs.readFileSync(schemaPath, "utf-8")) as object; + } catch (err) { + if (!schemaLoadWarned) { + schemaLoadWarned = true; + const msg = err instanceof Error ? err.message : String(err); + console.warn( + `Warning: Could not load metric-source schema at ${schemaPath}: ${msg}. Falling back to basic validation.`, + ); + } + return null; + } +} + +function getValidator(): ReturnType | null { + if (compiledValidator) return compiledValidator; + const schema = loadSchema(METRIC_SOURCE_SCHEMA_PATH); + if (!schema) return null; + try { + const ajv = new Ajv({ allErrors: true, strict: false }); + addFormats(ajv); + compiledValidator = ajv.compile(schema); + return compiledValidator; + } catch { + return null; + } +} + +/** + * Validate a parsed metric.json object against the metric-source JSON Schema. + * + * The schema is the canonical contract — any malformed input is rejected at the + * CLI seam before we hand off to `syncMetrics()`. This mirrors the plugin + * `validate-manifest` pattern: when the schema cannot be loaded (e.g. dist not + * built yet) we fall back to a structural check so the CLI is still usable in + * mid-build situations, but the full schema is the source of truth. + */ +export function validateMetricSource(obj: unknown): ValidateMetricSourceResult { + if (!obj || typeof obj !== "object" || Array.isArray(obj)) { + return { + valid: false, + errors: [ + { + instancePath: "", + message: "Metric source is not a valid object", + } as ErrorObject, + ], + }; + } + + const validate = getValidator(); + if (!validate) { + // Defensive fallback when the schema can't be loaded — accept any object + // that has the rough sp/obo shape so the CLI does not hard-fail in a + // partially-built tree. The dedicated schema test exercises the strict + // path; this branch only fires in development edge cases. + const m = obj as Record; + const spOk = + m.sp == null || (typeof m.sp === "object" && !Array.isArray(m.sp)); + const oboOk = + m.obo == null || (typeof m.obo === "object" && !Array.isArray(m.obo)); + if (spOk && oboOk) { + return { valid: true, config: obj as MetricSourceConfiguration }; + } + return { + valid: false, + errors: [ + { + instancePath: "", + message: "Invalid metric.json structure", + } as ErrorObject, + ], + }; + } + + const valid = validate(obj); + if (valid) return { valid: true, config: obj as MetricSourceConfiguration }; + return { valid: false, errors: validate.errors ?? [] }; +} + +/** + * Convert a JSON pointer like /sp/revenue/source to a readable path + * like sp.revenue.source for CLI output. + */ +function humanizePath(instancePath: string): string { + if (!instancePath) return "(root)"; + return instancePath.replace(/^\//, "").replace(/\//g, "."); +} + +/** + * Format AJV errors for CLI output. + * + * The output is a multi-line block (one issue per line, two-space indent) so + * it can be embedded directly under a "Invalid metric.json:" header by the + * caller. Mirrors the plugin manifest validator's formatter shape so the CLI + * UX stays consistent across `plugin validate` and `metric sync`. + */ +export function formatMetricSourceErrors(errors: ErrorObject[]): string { + const lines: string[] = []; + for (const err of errors) { + const readable = humanizePath(err.instancePath); + if (err.keyword === "required") { + lines.push( + ` ${readable}: missing required property "${err.params?.missingProperty}"`, + ); + } else if (err.keyword === "additionalProperties") { + lines.push( + ` ${readable}: unknown property "${err.params?.additionalProperty}"`, + ); + } else if (err.keyword === "pattern") { + lines.push( + ` ${readable}: does not match expected pattern${err.message ? ` (${err.message})` : ""}`, + ); + } else if (err.keyword === "type") { + lines.push(` ${readable}: expected type "${err.params?.type}"`); + } else { + lines.push(` ${readable}: ${err.message ?? "validation error"}`); + } + } + return lines.join("\n"); +} diff --git a/packages/shared/src/cli/commands/type-generator.d.ts b/packages/shared/src/cli/commands/type-generator.d.ts index ce69781fa..b67e5d525 100644 --- a/packages/shared/src/cli/commands/type-generator.d.ts +++ b/packages/shared/src/cli/commands/type-generator.d.ts @@ -11,4 +11,81 @@ declare module "@databricks/appkit/type-generator" { outFile: string; noCache?: boolean; }): Promise; + + // ── Metric-view sync seam (consumed by the `metric sync` CLI subcommand) ── + /** + * Single column emitted into the build-time metric registry / metadata bundle. + * Mirrors `MetricColumnMetadata` in the type-generator package. + */ + export interface MetricColumnMetadata { + name: string; + type: string; + isMeasure: boolean; + description?: string; + displayName?: string; + format?: string; + timeGrains?: string[]; + } + + /** Per-metric schema captured at type-generation time. */ + export interface MetricSchema { + key: string; + source: string; + lane: "sp" | "obo"; + measures: MetricColumnMetadata[]; + dimensions: MetricColumnMetadata[]; + } + + /** Resolved entry consumed by the metric-view pipeline. */ + export interface MetricConfigResolution { + entries: Array<{ + key: string; + source: string; + lane: "sp" | "obo"; + }>; + } + + /** Shape of metric.json. */ + export interface MetricSourceConfig { + $schema?: string; + sp?: Record; + obo?: Record; + } + + export type DescribeFetcher = (fqn: string) => Promise; + + /** Per-entry sync failure surfaced by `syncMetrics()`. */ + export interface MetricSyncFailure { + key: string; + source: string; + reason: string; + } + + /** Result of `syncMetrics()`: schemas + per-entry failures. */ + export interface MetricSyncResult { + schemas: MetricSchema[]; + failures: MetricSyncFailure[]; + } + + export function readMetricConfig( + queryFolder: string, + ): Promise; + export function resolveMetricConfig( + config: MetricSourceConfig, + ): MetricConfigResolution; + export function syncMetrics( + resolution: MetricConfigResolution, + fetcher: DescribeFetcher, + ): Promise; + export function createWorkspaceDescribeFetcher( + warehouseId: string, + ): DescribeFetcher; + export function generateMetricTypeDeclarations( + schemas: MetricSchema[], + ): string; + export function generateMetricsMetadataJson(schemas: MetricSchema[]): string; + + export const METRIC_TYPES_FILE: string; + export const METRIC_METADATA_FILE: string; + export const TYPES_DIR: string; } diff --git a/packages/shared/src/cli/index.ts b/packages/shared/src/cli/index.ts index 4d0ed65b7..64cbadd48 100644 --- a/packages/shared/src/cli/index.ts +++ b/packages/shared/src/cli/index.ts @@ -8,6 +8,7 @@ import { codemodCommand } from "./commands/codemod/index.js"; import { docsCommand } from "./commands/docs.js"; import { generateTypesCommand } from "./commands/generate-types.js"; import { lintCommand } from "./commands/lint.js"; +import { metricCommand } from "./commands/metric/index.js"; import { pluginCommand } from "./commands/plugin/index.js"; import { setupCommand } from "./commands/setup.js"; @@ -27,6 +28,7 @@ cmd.addCommand(generateTypesCommand); cmd.addCommand(lintCommand); cmd.addCommand(docsCommand); cmd.addCommand(pluginCommand); +cmd.addCommand(metricCommand); cmd.addCommand(codemodCommand); cmd.parse(); diff --git a/packages/shared/src/schemas/metric-source.generated.ts b/packages/shared/src/schemas/metric-source.generated.ts new file mode 100644 index 000000000..8a5a68eaf --- /dev/null +++ b/packages/shared/src/schemas/metric-source.generated.ts @@ -0,0 +1,43 @@ +// AUTO-GENERATED from metric-source.schema.json — do not edit. +// Run: pnpm exec tsx tools/generate-schema-types.ts +/** + * Metric key. Must be a valid identifier (letters, digits, underscores; cannot start with a digit). Becomes the route key in POST /api/analytics/metric/:key, the hook argument in useMetricView('', ...), and the MetricRegistry augmentation key. + * + * This interface was referenced by `MetricSourceConfiguration`'s JSON-Schema + * via the `definition` "metricKey". + */ +export type MetricKey = string; + +/** + * Schema for AppKit metric.json — declares Unity Catalog Metric View sources for the analytics plugin's metric-view path. Each entry under sp/obo binds a metric key to a UC metric view FQN. Object form (rather than bare string) at v1 enables future per-entry option growth without breaking changes. + */ +export interface MetricSourceConfiguration { + /** + * Reference to the JSON Schema for validation + */ + $schema?: string; + /** + * Metric views queried as the service principal. Cache scope is shared across all users. + */ + sp?: { + [k: string]: MetricEntry; + }; + /** + * Metric views queried as the requesting user (on-behalf-of). Cache scope is per-user. + */ + obo?: { + [k: string]: MetricEntry; + }; +} +/** + * A single metric view source declaration. v1 only accepts the 'source' field; future per-entry options (cacheTtl, defaultFilter, allowlists) ship as additive properties. + * + * This interface was referenced by `MetricSourceConfiguration`'s JSON-Schema + * via the `definition` "metricEntry". + */ +export interface MetricEntry { + /** + * Three-part Unity Catalog FQN of the metric view: .. + */ + source: string; +} diff --git a/packages/shared/src/schemas/metric-source.schema.json b/packages/shared/src/schemas/metric-source.schema.json new file mode 100644 index 000000000..a41ef9679 --- /dev/null +++ b/packages/shared/src/schemas/metric-source.schema.json @@ -0,0 +1,58 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://databricks.github.io/appkit/schemas/metric-source.schema.json", + "title": "AppKit Metric Source Configuration", + "description": "Schema for AppKit metric.json — declares Unity Catalog Metric View sources for the analytics plugin's metric-view path. Each entry under sp/obo binds a metric key to a UC metric view FQN. Object form (rather than bare string) at v1 enables future per-entry option growth without breaking changes.", + "type": "object", + "properties": { + "$schema": { + "type": "string", + "description": "Reference to the JSON Schema for validation" + }, + "sp": { + "type": "object", + "description": "Metric views queried as the service principal. Cache scope is shared across all users.", + "additionalProperties": { + "$ref": "#/$defs/metricEntry" + }, + "propertyNames": { + "$ref": "#/$defs/metricKey" + } + }, + "obo": { + "type": "object", + "description": "Metric views queried as the requesting user (on-behalf-of). Cache scope is per-user.", + "additionalProperties": { + "$ref": "#/$defs/metricEntry" + }, + "propertyNames": { + "$ref": "#/$defs/metricKey" + } + } + }, + "additionalProperties": false, + "$defs": { + "metricKey": { + "type": "string", + "pattern": "^[a-zA-Z_][a-zA-Z0-9_]*$", + "description": "Metric key. Must be a valid identifier (letters, digits, underscores; cannot start with a digit). Becomes the route key in POST /api/analytics/metric/:key, the hook argument in useMetricView('', ...), and the MetricRegistry augmentation key." + }, + "metricEntry": { + "type": "object", + "description": "A single metric view source declaration. v1 only accepts the 'source' field; future per-entry options (cacheTtl, defaultFilter, allowlists) ship as additive properties.", + "required": ["source"], + "properties": { + "source": { + "type": "string", + "pattern": "^[a-zA-Z0-9_][a-zA-Z0-9_-]*\\.[a-zA-Z0-9_][a-zA-Z0-9_-]*\\.[a-zA-Z0-9_][a-zA-Z0-9_-]*$", + "description": "Three-part Unity Catalog FQN of the metric view: ..", + "examples": [ + "appkit_demo.public.revenue_metrics", + "main.analytics.customer_metrics" + ] + } + }, + "additionalProperties": false + } + } +} diff --git a/packages/shared/src/schemas/metric-source.schema.test.ts b/packages/shared/src/schemas/metric-source.schema.test.ts new file mode 100644 index 000000000..c818490c2 --- /dev/null +++ b/packages/shared/src/schemas/metric-source.schema.test.ts @@ -0,0 +1,108 @@ +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import Ajv from "ajv"; +import addFormats from "ajv-formats"; +import { describe, expect, test } from "vitest"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const SCHEMA_PATH = path.join(__dirname, "metric-source.schema.json"); + +function loadValidator() { + const schema = JSON.parse(fs.readFileSync(SCHEMA_PATH, "utf-8")); + const ajv = new Ajv({ allErrors: true, strict: false }); + addFormats(ajv); + return ajv.compile(schema); +} + +describe("metric-source.schema.json", () => { + const validate = loadValidator(); + + test("accepts a valid SP-only configuration", () => { + const config = { + $schema: + "https://databricks.github.io/appkit/schemas/metric-source.schema.json", + sp: { + revenue: { source: "appkit_demo.public.revenue_metrics" }, + }, + obo: {}, + }; + expect(validate(config)).toBe(true); + }); + + test("accepts mixed sp + obo lanes", () => { + const config = { + sp: { revenue: { source: "demo.public.revenue" } }, + obo: { customer: { source: "demo.public.customer_metrics" } }, + }; + expect(validate(config)).toBe(true); + }); + + test("accepts an empty configuration", () => { + expect(validate({})).toBe(true); + expect(validate({ sp: {}, obo: {} })).toBe(true); + }); + + test("rejects a bare-string entry (must be an object)", () => { + const config = { + sp: { revenue: "demo.public.revenue" as any }, + }; + expect(validate(config)).toBe(false); + }); + + test("rejects an entry without source", () => { + const config = { + sp: { revenue: {} }, + }; + expect(validate(config)).toBe(false); + }); + + test("rejects unknown fields on entries", () => { + const config = { + sp: { + revenue: { + source: "demo.public.revenue", + cacheTtl: 60, // future option, not in v1 + }, + }, + }; + expect(validate(config)).toBe(false); + }); + + test("rejects unknown top-level keys", () => { + const config = { + sp: {}, + obo: {}, + unknown: {}, + }; + expect(validate(config)).toBe(false); + }); + + test("rejects a non-three-part FQN", () => { + const cases = [ + "single", + "two.parts", + "four.parts.really.bad", + ".starts.with.dot", + "ends.with.dot.", + ]; + for (const source of cases) { + const config = { sp: { revenue: { source } as any } }; + expect(validate(config)).toBe(false); + } + }); + + test("rejects metric keys that start with a digit", () => { + const config = { + sp: { "1revenue": { source: "demo.public.revenue" } }, + }; + expect(validate(config)).toBe(false); + }); + + test("accepts metric keys with underscores", () => { + const config = { + sp: { customer_metrics: { source: "demo.public.customer_metrics" } }, + }; + expect(validate(config)).toBe(true); + }); +}); diff --git a/packages/shared/tsdown.config.ts b/packages/shared/tsdown.config.ts index d118f7ab9..28e3cd604 100644 --- a/packages/shared/tsdown.config.ts +++ b/packages/shared/tsdown.config.ts @@ -30,5 +30,9 @@ export default defineConfig({ from: "src/schemas/template-plugins.schema.json", to: "dist/schemas", }, + { + from: "src/schemas/metric-source.schema.json", + to: "dist/schemas", + }, ], }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 684f6e2e4..947f00758 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2157,15 +2157,9 @@ packages: resolution: {integrity: sha512-lBSBiRruFurFKXr5Hbsl2thmGweAPmddhF3jb99U4EMDA5L+e5Y1rAkOS07Nvrup7HUMBDrCV45meaxZnt28nQ==} engines: {node: '>=20.0'} - '@emnapi/core@1.7.1': - resolution: {integrity: sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==} - '@emnapi/core@1.8.1': resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==} - '@emnapi/runtime@1.7.1': - resolution: {integrity: sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==} - '@emnapi/runtime@1.8.1': resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==} @@ -2667,9 +2661,6 @@ packages: '@mermaid-js/parser@0.6.3': resolution: {integrity: sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==} - '@napi-rs/wasm-runtime@1.0.7': - resolution: {integrity: sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==} - '@napi-rs/wasm-runtime@1.1.1': resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==} @@ -5002,6 +4993,9 @@ packages: '@types/node@24.10.1': resolution: {integrity: sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==} + '@types/node@24.6.0': + resolution: {integrity: sha512-F1CBxgqwOMc4GKJ7eY22hWhBVQuMYTtqI8L0FcszYcpYX0fzfDGpez22Xau8Mgm7O9fI+zA/TYIdq3tGWfweBA==} + '@types/node@24.7.2': resolution: {integrity: sha512-/NbVmcGTP+lj5oa4yiYxxeBjRivKQ5Ns1eSZeB99ExsEQ6rX5XYU1Zy/gGxY/ilqtD4Etx9mKyrPxZRetiahhA==} @@ -5540,10 +5534,6 @@ packages: base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - baseline-browser-mapping@2.8.32: - resolution: {integrity: sha512-OPz5aBThlyLFgxyhdwf/s2+8ab3OvT7AdTNvKHBwpXomIYeXqpUUuT8LrdtxZSsWJ4R4CU1un4XGh5Ez3nlTpw==} - hasBin: true - baseline-browser-mapping@2.9.7: resolution: {integrity: sha512-k9xFKplee6KIio3IDbwj+uaCLpqzOwakOgmqzPezM0sFJlFKcg30vk2wOiAJtkTSfx0SSQDSe8q+mWA/fSH5Zg==} hasBin: true @@ -5624,11 +5614,6 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - browserslist@4.28.0: - resolution: {integrity: sha512-tbydkR/CxfMwelN0vwdP/pLkDwyAASZ+VfWm4EOwlB6SWhx1sYnWLqo8N5j0rAzPfzfRaxt0mM/4wPU/Su84RQ==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - browserslist@4.28.1: resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} @@ -5724,9 +5709,6 @@ packages: caniuse-api@3.0.0: resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} - caniuse-lite@1.0.30001757: - resolution: {integrity: sha512-r0nnL/I28Zi/yjk1el6ilj27tKcdjLsNqAOZr0yVjWPrSQyHgKI2INaEWw21bAQSv2LXRt1XuCS/GomNpWOxsQ==} - caniuse-lite@1.0.30001760: resolution: {integrity: sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==} @@ -6797,9 +6779,6 @@ packages: ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - electron-to-chromium@1.5.262: - resolution: {integrity: sha512-NlAsMteRHek05jRUxUR0a5jpjYq9ykk6+kO0yRaMi5moe7u0fVIOeQ3Y30A8dIiWFBNUoQGi1ljb1i5VtS9WQQ==} - electron-to-chromium@1.5.267: resolution: {integrity: sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==} @@ -11299,6 +11278,9 @@ packages: undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici-types@7.13.0: + resolution: {integrity: sha512-Ov2Rr9Sx+fRgagJ5AX0qvItZG/JKKoBRAVITs1zk7IqZGTJUwgUr7qoYBpWwakpWilTZFM98rG/AFRocu10iIQ==} + undici-types@7.14.0: resolution: {integrity: sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==} @@ -11404,12 +11386,6 @@ packages: synckit: optional: true - update-browserslist-db@1.1.4: - resolution: {integrity: sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' - update-browserslist-db@1.2.2: resolution: {integrity: sha512-E85pfNzMQ9jpKkA7+TJAi4TJN+tBCuWh5rUcS/sv6cFi+1q9LYDwDI5dpUL0u/73EElyQ8d3TEaeW4sPedBqYA==} hasBin: true @@ -12200,15 +12176,15 @@ snapshots: '@babel/core@7.28.5': dependencies: - '@babel/code-frame': 7.27.1 - '@babel/generator': 7.28.5 + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 '@babel/helper-compilation-targets': 7.27.2 '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.5) '@babel/helpers': 7.28.4 - '@babel/parser': 7.28.5 - '@babel/template': 7.27.2 - '@babel/traverse': 7.28.5 - '@babel/types': 7.28.5 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 '@jridgewell/remapping': 2.3.5 convert-source-map: 2.0.0 debug: 4.4.3 @@ -12245,13 +12221,13 @@ snapshots: '@babel/helper-annotate-as-pure@7.27.3': dependencies: - '@babel/types': 7.28.5 + '@babel/types': 7.29.0 '@babel/helper-compilation-targets@7.27.2': dependencies: '@babel/compat-data': 7.28.5 '@babel/helper-validator-option': 7.27.1 - browserslist: 4.28.0 + browserslist: 4.28.1 lru-cache: 5.1.1 semver: 6.3.1 @@ -12263,7 +12239,7 @@ snapshots: '@babel/helper-optimise-call-expression': 7.27.1 '@babel/helper-replace-supers': 7.27.1(@babel/core@7.28.5) '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 - '@babel/traverse': 7.28.5 + '@babel/traverse': 7.29.0 semver: 6.3.1 transitivePeerDependencies: - supports-color @@ -12290,15 +12266,15 @@ snapshots: '@babel/helper-member-expression-to-functions@7.28.5': dependencies: - '@babel/traverse': 7.28.5 - '@babel/types': 7.28.5 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 transitivePeerDependencies: - supports-color '@babel/helper-module-imports@7.27.1': dependencies: - '@babel/traverse': 7.28.5 - '@babel/types': 7.28.5 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 transitivePeerDependencies: - supports-color @@ -12307,13 +12283,13 @@ snapshots: '@babel/core': 7.28.5 '@babel/helper-module-imports': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 - '@babel/traverse': 7.28.5 + '@babel/traverse': 7.29.0 transitivePeerDependencies: - supports-color '@babel/helper-optimise-call-expression@7.27.1': dependencies: - '@babel/types': 7.28.5 + '@babel/types': 7.29.0 '@babel/helper-plugin-utils@7.27.1': {} @@ -12331,14 +12307,14 @@ snapshots: '@babel/core': 7.28.5 '@babel/helper-member-expression-to-functions': 7.28.5 '@babel/helper-optimise-call-expression': 7.27.1 - '@babel/traverse': 7.28.5 + '@babel/traverse': 7.29.0 transitivePeerDependencies: - supports-color '@babel/helper-skip-transparent-expression-wrappers@7.27.1': dependencies: - '@babel/traverse': 7.28.5 - '@babel/types': 7.28.5 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 transitivePeerDependencies: - supports-color @@ -12364,8 +12340,8 @@ snapshots: '@babel/helpers@7.28.4': dependencies: - '@babel/template': 7.27.2 - '@babel/types': 7.28.5 + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 '@babel/parser@7.28.4': dependencies: @@ -14430,23 +14406,12 @@ snapshots: - uglify-js - webpack-cli - '@emnapi/core@1.7.1': - dependencies: - '@emnapi/wasi-threads': 1.1.0 - tslib: 2.8.1 - optional: true - '@emnapi/core@1.8.1': dependencies: '@emnapi/wasi-threads': 1.1.0 tslib: 2.8.1 optional: true - '@emnapi/runtime@1.7.1': - dependencies: - tslib: 2.8.1 - optional: true - '@emnapi/runtime@1.8.1': dependencies: tslib: 2.8.1 @@ -14916,13 +14881,6 @@ snapshots: dependencies: langium: 3.3.1 - '@napi-rs/wasm-runtime@1.0.7': - dependencies: - '@emnapi/core': 1.7.1 - '@emnapi/runtime': 1.7.1 - '@tybys/wasm-util': 0.10.1 - optional: true - '@napi-rs/wasm-runtime@1.1.1': dependencies: '@emnapi/core': 1.8.1 @@ -16653,7 +16611,7 @@ snapshots: '@rolldown/binding-wasm32-wasi@1.0.0-beta.41': dependencies: - '@napi-rs/wasm-runtime': 1.0.7 + '@napi-rs/wasm-runtime': 1.1.1 optional: true '@rolldown/binding-wasm32-wasi@1.0.0-rc.3': @@ -17062,24 +17020,24 @@ snapshots: '@types/babel__core@7.20.5': dependencies: - '@babel/parser': 7.28.5 - '@babel/types': 7.28.5 + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 '@types/babel__generator': 7.27.0 '@types/babel__template': 7.4.4 '@types/babel__traverse': 7.28.0 '@types/babel__generator@7.27.0': dependencies: - '@babel/types': 7.28.5 + '@babel/types': 7.29.0 '@types/babel__template@7.4.4': dependencies: - '@babel/parser': 7.28.5 - '@babel/types': 7.28.5 + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 '@types/babel__traverse@7.28.0': dependencies: - '@babel/types': 7.28.5 + '@babel/types': 7.29.0 '@types/body-parser@1.19.6': dependencies: @@ -17351,6 +17309,10 @@ snapshots: dependencies: undici-types: 7.16.0 + '@types/node@24.6.0': + dependencies: + undici-types: 7.13.0 + '@types/node@24.7.2': dependencies: undici-types: 7.14.0 @@ -17799,9 +17761,9 @@ snapshots: '@opentelemetry/api': 1.9.0 zod: 4.3.6 - ajv-formats@2.1.1(ajv@8.17.1): + ajv-formats@2.1.1(ajv@8.18.0): optionalDependencies: - ajv: 8.17.1 + ajv: 8.18.0 ajv-formats@3.0.1(ajv@8.17.1): optionalDependencies: @@ -17815,9 +17777,9 @@ snapshots: dependencies: ajv: 6.12.6 - ajv-keywords@5.1.0(ajv@8.17.1): + ajv-keywords@5.1.0(ajv@8.18.0): dependencies: - ajv: 8.17.1 + ajv: 8.18.0 fast-deep-equal: 3.1.3 ajv@6.12.6: @@ -18020,8 +17982,6 @@ snapshots: base64-js@1.5.1: {} - baseline-browser-mapping@2.8.32: {} - baseline-browser-mapping@2.9.7: {} basic-ftp@5.0.5: {} @@ -18139,14 +18099,6 @@ snapshots: dependencies: fill-range: 7.1.1 - browserslist@4.28.0: - dependencies: - baseline-browser-mapping: 2.8.32 - caniuse-lite: 1.0.30001757 - electron-to-chromium: 1.5.262 - node-releases: 2.0.27 - update-browserslist-db: 1.1.4(browserslist@4.28.0) - browserslist@4.28.1: dependencies: baseline-browser-mapping: 2.9.7 @@ -18275,8 +18227,6 @@ snapshots: lodash.memoize: 4.1.2 lodash.uniq: 4.5.0 - caniuse-lite@1.0.30001757: {} - caniuse-lite@1.0.30001760: {} ccount@2.0.1: {} @@ -19308,8 +19258,6 @@ snapshots: ee-first@1.1.1: {} - electron-to-chromium@1.5.262: {} - electron-to-chromium@1.5.267: {} embla-carousel-react@8.6.0(react@19.2.0): @@ -20870,7 +20818,7 @@ snapshots: jest-worker@27.5.1: dependencies: - '@types/node': 25.2.3 + '@types/node': 24.6.0 merge-stream: 2.0.0 supports-color: 8.1.1 @@ -23772,9 +23720,9 @@ snapshots: schema-utils@4.3.3: dependencies: '@types/json-schema': 7.0.15 - ajv: 8.17.1 - ajv-formats: 2.1.1(ajv@8.17.1) - ajv-keywords: 5.1.0(ajv@8.17.1) + ajv: 8.18.0 + ajv-formats: 2.1.1(ajv@8.18.0) + ajv-keywords: 5.1.0(ajv@8.18.0) search-insights@2.17.3: {} @@ -24592,6 +24540,8 @@ snapshots: undici-types@6.21.0: {} + undici-types@7.13.0: {} + undici-types@7.14.0: {} undici-types@7.16.0: {} @@ -24702,12 +24652,6 @@ snapshots: dependencies: rolldown: 1.0.0-rc.5 - update-browserslist-db@1.1.4(browserslist@4.28.0): - dependencies: - browserslist: 4.28.0 - escalade: 3.2.0 - picocolors: 1.1.1 - update-browserslist-db@1.2.2(browserslist@4.28.1): dependencies: browserslist: 4.28.1 diff --git a/tools/generate-schema-types.ts b/tools/generate-schema-types.ts index 18360fb2f..f9f5e7e3d 100644 --- a/tools/generate-schema-types.ts +++ b/tools/generate-schema-types.ts @@ -1,7 +1,11 @@ /** - * Generates TypeScript interfaces from plugin-manifest.schema.json using + * Generates TypeScript interfaces from JSON Schemas using * json-schema-to-typescript. Single source of truth for structural types - * (ResourceFieldEntry, ResourceRequirement, PluginManifest). + * shared between packages. + * + * Currently generates: + * - plugin-manifest.generated.ts (PluginManifest, ResourceRequirement, ...) + * - metric-source.generated.ts (MetricSourceConfiguration) * * Run from repo root: pnpm exec tsx tools/generate-schema-types.ts */ @@ -13,31 +17,63 @@ import { formatWithBiome } from "./format-with-biome.ts"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const REPO_ROOT = path.join(__dirname, ".."); -const SCHEMA_PATH = path.join( - REPO_ROOT, - "packages/shared/src/schemas/plugin-manifest.schema.json", -); -const OUT_PATH = path.join( - REPO_ROOT, - "packages/shared/src/schemas/plugin-manifest.generated.ts", -); - -const BANNER = `// AUTO-GENERATED from plugin-manifest.schema.json — do not edit. + +interface SchemaJob { + schemaPath: string; + outPath: string; + bannerSource: string; + rootRename?: { fromTitle: string; toName: string }; +} + +const JOBS: SchemaJob[] = [ + { + schemaPath: path.join( + REPO_ROOT, + "packages/shared/src/schemas/plugin-manifest.schema.json", + ), + outPath: path.join( + REPO_ROOT, + "packages/shared/src/schemas/plugin-manifest.generated.ts", + ), + bannerSource: "plugin-manifest.schema.json", + rootRename: { + fromTitle: "AppKit Plugin Manifest", + toName: "PluginManifest", + }, + }, + { + schemaPath: path.join( + REPO_ROOT, + "packages/shared/src/schemas/metric-source.schema.json", + ), + outPath: path.join( + REPO_ROOT, + "packages/shared/src/schemas/metric-source.generated.ts", + ), + bannerSource: "metric-source.schema.json", + rootRename: { + fromTitle: "AppKit Metric Source Configuration", + toName: "MetricSourceConfiguration", + }, + }, +]; + +async function compileOne(job: SchemaJob): Promise { + const banner = `// AUTO-GENERATED from ${job.bannerSource} — do not edit. // Run: pnpm exec tsx tools/generate-schema-types.ts `; -async function main(): Promise { - const raw = await compileFromFile(SCHEMA_PATH, { + const raw = await compileFromFile(job.schemaPath, { bannerComment: "", additionalProperties: false, strictIndexSignatures: false, unreachableDefinitions: true, format: false, style: { semi: true, singleQuote: false }, - // Rename the root type (derived from schema title "AppKit Plugin Manifest") - // to "PluginManifest" for ergonomic imports. customName: (schema) => - schema.title === "AppKit Plugin Manifest" ? "PluginManifest" : undefined, + job.rootRename && schema.title === job.rootRename.fromTitle + ? job.rootRename.toName + : undefined, }); // Post-processing: work around json-schema-to-typescript limitations that @@ -45,12 +81,18 @@ async function main(): Promise { // allOf/if-then produces `{ [k: string]: unknown } & { … }` — strip the index-signature part. const output = raw.replace(/\{\s*\[k: string\]: unknown;?\s*\}\s*&\s*/g, ""); - const result = BANNER + output; + const result = banner + output; + + fs.mkdirSync(path.dirname(job.outPath), { recursive: true }); + fs.writeFileSync(job.outPath, result, "utf-8"); + formatWithBiome(job.outPath); + console.log("Wrote", job.outPath); +} - fs.mkdirSync(path.dirname(OUT_PATH), { recursive: true }); - fs.writeFileSync(OUT_PATH, result, "utf-8"); - formatWithBiome(OUT_PATH); - console.log("Wrote", OUT_PATH); +async function main(): Promise { + for (const job of JOBS) { + await compileOne(job); + } } main();