From c003f41d63860291e9645c0d9d9e4738208664c4 Mon Sep 17 00:00:00 2001 From: Alex Simonok Date: Wed, 29 Nov 2023 07:31:21 +0300 Subject: [PATCH] Add Grafana utils (#14) * Add grafana utils package * Copy grafana 10.2.1 transformers from public/app/features/transformers * Adopt private transformers * Add query utils * Update readme and changelog for grafana-utils --- package-lock.json | 748 ++++++++++++++++-- package.json | 3 +- packages/grafana-utils/.eslintignore | 4 + packages/grafana-utils/.eslintrc | 12 + packages/grafana-utils/.prettierrc.js | 10 + packages/grafana-utils/CHANGELOG.md | 7 + packages/grafana-utils/LICENSE | 201 +++++ packages/grafana-utils/README.md | 9 + packages/grafana-utils/jest-setup.ts | 6 + packages/grafana-utils/jest.config.js | 63 ++ packages/grafana-utils/package.json | 55 ++ packages/grafana-utils/rollup.config.mjs | 53 ++ .../calculateHeatmap/heatmap.test.ts | 111 +++ .../transformers/calculateHeatmap/heatmap.ts | 586 ++++++++++++++ .../transformers/calculateHeatmap/utils.ts | 119 +++ .../configFromQuery/configFromQuery.test.ts | 159 ++++ .../configFromQuery/configFromQuery.ts | 106 +++ .../extractFields/extractFields.test.ts | 307 +++++++ .../extractFields/extractFields.ts | 120 +++ .../extractFields/fieldExtractor.test.ts | 114 +++ .../extractFields/fieldExtractors.ts | 132 ++++ .../transformers/extractFields/types.ts | 17 + .../fieldToConfigMapping.ts | 345 ++++++++ .../joinByLabels/joinByLabels.test.ts | 152 ++++ .../transformers/joinByLabels/joinByLabels.ts | 138 ++++ .../partitionByValues/partition.ts | 52 ++ .../partitionByValues.test.ts | 267 +++++++ .../partitionByValues/partitionByValues.ts | 171 ++++ .../prepareTimeSeries.test.ts.snap | 142 ++++ .../prepareTimeSeries.test.ts | 433 ++++++++++ .../prepareTimeSeries/prepareTimeSeries.ts | 352 +++++++++ .../rowsToFields/rowsToFields.test.ts | 177 +++++ .../transformers/rowsToFields/rowsToFields.ts | 89 +++ .../timeSeriesTableTransformer.test.ts | 133 ++++ .../timeSeriesTableTransformer.ts | 137 ++++ .../src/grafana/transformers/utils.test.ts | 47 ++ .../src/grafana/transformers/utils.ts | 84 ++ .../src/grafana/utils/dimensions.ts | 24 + .../src/grafana/utils/filterByName.ts | 36 + packages/grafana-utils/src/index.test.ts | 7 + packages/grafana-utils/src/index.ts | 4 + .../grafana-utils/src/privateTransformers.ts | 22 + packages/grafana-utils/src/query.ts | 117 +++ packages/grafana-utils/src/types.ts | 63 ++ packages/grafana-utils/tsconfig.json | 16 + 45 files changed, 5878 insertions(+), 72 deletions(-) create mode 100644 packages/grafana-utils/.eslintignore create mode 100644 packages/grafana-utils/.eslintrc create mode 100644 packages/grafana-utils/.prettierrc.js create mode 100644 packages/grafana-utils/CHANGELOG.md create mode 100644 packages/grafana-utils/LICENSE create mode 100644 packages/grafana-utils/README.md create mode 100644 packages/grafana-utils/jest-setup.ts create mode 100644 packages/grafana-utils/jest.config.js create mode 100644 packages/grafana-utils/package.json create mode 100644 packages/grafana-utils/rollup.config.mjs create mode 100644 packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/calculateHeatmap/utils.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractor.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractors.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/extractFields/types.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/fieldToConfigMapping/fieldToConfigMapping.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/partitionByValues/partition.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/__snapshots__/prepareTimeSeries.test.ts.snap create mode 100644 packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/utils.test.ts create mode 100644 packages/grafana-utils/src/grafana/transformers/utils.ts create mode 100644 packages/grafana-utils/src/grafana/utils/dimensions.ts create mode 100644 packages/grafana-utils/src/grafana/utils/filterByName.ts create mode 100644 packages/grafana-utils/src/index.test.ts create mode 100644 packages/grafana-utils/src/index.ts create mode 100644 packages/grafana-utils/src/privateTransformers.ts create mode 100644 packages/grafana-utils/src/query.ts create mode 100644 packages/grafana-utils/src/types.ts create mode 100644 packages/grafana-utils/tsconfig.json diff --git a/package-lock.json b/package-lock.json index 3f2c3bf..8ba6586 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,8 @@ "workspaces": [ "packages/eslint-config", "packages/jest-selectors", - "packages/components" + "packages/components", + "packages/grafana-utils" ] }, "node_modules/@aashutoshrathi/word-wrap": { @@ -2461,6 +2462,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": ">=12" } @@ -2477,6 +2479,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": ">=12" } @@ -2493,6 +2496,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": ">=12" } @@ -2509,6 +2513,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": ">=12" } @@ -2525,6 +2530,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": ">=12" } @@ -2541,6 +2547,7 @@ "os": [ "freebsd" ], + "peer": true, "engines": { "node": ">=12" } @@ -2557,6 +2564,7 @@ "os": [ "freebsd" ], + "peer": true, "engines": { "node": ">=12" } @@ -2573,6 +2581,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2589,6 +2598,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2605,6 +2615,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2621,6 +2632,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2637,6 +2649,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2653,6 +2666,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2669,6 +2683,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2685,6 +2700,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2701,6 +2717,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=12" } @@ -2717,6 +2734,7 @@ "os": [ "netbsd" ], + "peer": true, "engines": { "node": ">=12" } @@ -2733,6 +2751,7 @@ "os": [ "openbsd" ], + "peer": true, "engines": { "node": ">=12" } @@ -2749,6 +2768,7 @@ "os": [ "sunos" ], + "peer": true, "engines": { "node": ">=12" } @@ -2765,6 +2785,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": ">=12" } @@ -2781,6 +2802,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": ">=12" } @@ -2797,6 +2819,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": ">=12" } @@ -2866,9 +2889,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.53.0.tgz", - "integrity": "sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.54.0.tgz", + "integrity": "sha512-ut5V+D+fOoWPgGGNj83GGjnntO39xDy6DWxO0wb7Jp3DcMX0TfIqdzHF85VTQkerdyGmuuMD9AKAo5KiNlf/AQ==", "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } @@ -3484,7 +3507,6 @@ "version": "27.5.1", "resolved": "https://registry.npmjs.org/@jest/create-cache-key-function/-/create-cache-key-function-27.5.1.tgz", "integrity": "sha512-dmH1yW+makpTSURTy8VzdUwFnfQh1G8R+DxO2Ho2FFmBbKFEVm+3jWdvFhE2VqB/LATCTokkP0dotjyQyw5/AQ==", - "dev": true, "dependencies": { "@jest/types": "^27.5.1" }, @@ -3496,7 +3518,6 @@ "version": "27.5.1", "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz", "integrity": "sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw==", - "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "^2.0.0", "@types/istanbul-reports": "^3.0.0", @@ -3512,7 +3533,6 @@ "version": "16.0.8", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.8.tgz", "integrity": "sha512-1GwLEkmFafeb/HbE6pC7tFlgYSQ4Iqh2qlWCq8xN+Qfaiaxr2PcLfuhfRFRYqI6XJyeFoLYyKnhFbNsst9FMtQ==", - "dev": true, "dependencies": { "@types/yargs-parser": "*" } @@ -5342,6 +5362,109 @@ "react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0" } }, + "node_modules/@rollup/plugin-commonjs": { + "version": "25.0.7", + "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.7.tgz", + "integrity": "sha512-nEvcR+LRjEjsaSsc4x3XZfCCvZIaSMenZu/OiwOKGN2UhQpAYI7ru7czFvyWbErlpoGjnSX3D5Ch5FcMA3kRWQ==", + "dev": true, + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "commondir": "^1.0.1", + "estree-walker": "^2.0.2", + "glob": "^8.0.3", + "is-reference": "1.2.1", + "magic-string": "^0.30.3" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.68.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-commonjs/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@rollup/plugin-commonjs/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@rollup/plugin-inject": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/@rollup/plugin-inject/-/plugin-inject-5.0.5.tgz", + "integrity": "sha512-2+DEJbNBoPROPkgTDNe8/1YXWcqxbN5DTjASVIOx8HS+pITXushyNiBV56RB08zuptzz8gT3YfkqriTBVycepg==", + "dev": true, + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.3" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/plugin-node-resolve": { + "version": "15.2.3", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.3.tgz", + "integrity": "sha512-j/lym8nf5E21LwBT4Df1VD6hRO2L2iwUeUmP7litikRsVp1H6NWx20NEp0Y7su+7XGc476GnXXc4kFeZNGmaSQ==", + "dev": true, + "dependencies": { + "@rollup/pluginutils": "^5.0.1", + "@types/resolve": "1.20.2", + "deepmerge": "^4.2.2", + "is-builtin-module": "^3.2.1", + "is-module": "^1.0.0", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^2.78.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, "node_modules/@rollup/plugin-terser": { "version": "0.4.4", "resolved": "https://registry.npmjs.org/@rollup/plugin-terser/-/plugin-terser-0.4.4.tgz", @@ -7217,7 +7340,6 @@ "version": "1.3.96", "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.96.tgz", "integrity": "sha512-zwE3TLgoZwJfQygdv2SdCK9mRLYluwDOM53I+dT6Z5ZvrgVENmY3txvWDvduzkV+/8IuvrRbVezMpxcojadRdQ==", - "dev": true, "hasInstallScript": true, "dependencies": { "@swc/counter": "^0.1.1", @@ -7258,11 +7380,11 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "darwin" ], + "peer": true, "engines": { "node": ">=10" } @@ -7274,11 +7396,11 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "darwin" ], + "peer": true, "engines": { "node": ">=10" } @@ -7290,11 +7412,11 @@ "cpu": [ "arm" ], - "dev": true, "optional": true, "os": [ "linux" ], + "peer": true, "engines": { "node": ">=10" } @@ -7306,11 +7428,11 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "linux" ], + "peer": true, "engines": { "node": ">=10" } @@ -7322,11 +7444,11 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "linux" ], + "peer": true, "engines": { "node": ">=10" } @@ -7338,11 +7460,11 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "linux" ], + "peer": true, "engines": { "node": ">=10" } @@ -7354,11 +7476,11 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "linux" ], + "peer": true, "engines": { "node": ">=10" } @@ -7370,11 +7492,11 @@ "cpu": [ "arm64" ], - "dev": true, "optional": true, "os": [ "win32" ], + "peer": true, "engines": { "node": ">=10" } @@ -7386,11 +7508,11 @@ "cpu": [ "ia32" ], - "dev": true, "optional": true, "os": [ "win32" ], + "peer": true, "engines": { "node": ">=10" } @@ -7402,11 +7524,11 @@ "cpu": [ "x64" ], - "dev": true, "optional": true, "os": [ "win32" ], + "peer": true, "engines": { "node": ">=10" } @@ -7414,8 +7536,7 @@ "node_modules/@swc/counter": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.2.tgz", - "integrity": "sha512-9F4ys4C74eSTEUNndnER3VJ15oru2NumfQxS8geE+f3eB5xvfxpWyqE5XlVnxb/R14uoXi6SLbBwwiDSkv+XEw==", - "dev": true + "integrity": "sha512-9F4ys4C74eSTEUNndnER3VJ15oru2NumfQxS8geE+f3eB5xvfxpWyqE5XlVnxb/R14uoXi6SLbBwwiDSkv+XEw==" }, "node_modules/@swc/helpers": { "version": "0.5.3", @@ -7429,7 +7550,6 @@ "version": "0.2.29", "resolved": "https://registry.npmjs.org/@swc/jest/-/jest-0.2.29.tgz", "integrity": "sha512-8reh5RvHBsSikDC3WGCd5ZTd2BXKkyOdK7QwynrCH58jk2cQFhhHhFBg/jvnWZehUQe/EoOImLENc9/DwbBFow==", - "dev": true, "dependencies": { "@jest/create-cache-key-function": "^27.4.2", "jsonc-parser": "^3.2.0" @@ -7444,8 +7564,7 @@ "node_modules/@swc/types": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", - "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", - "dev": true + "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==" }, "node_modules/@testing-library/dom": { "version": "9.3.3", @@ -7764,14 +7883,12 @@ "node_modules/@types/istanbul-lib-coverage": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "dev": true + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==" }, "node_modules/@types/istanbul-lib-report": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", - "dev": true, "dependencies": { "@types/istanbul-lib-coverage": "*" } @@ -7780,15 +7897,14 @@ "version": "3.0.4", "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", - "dev": true, "dependencies": { "@types/istanbul-lib-report": "*" } }, "node_modules/@types/jest": { - "version": "29.5.8", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.8.tgz", - "integrity": "sha512-fXEFTxMV2Co8ZF5aYFJv+YeA08RTYJfhtN5c9JSv/mFEMe+xxjufCb+PHL+bJcMs/ebPUsBu+UNTEz+ydXrR6g==", + "version": "29.5.10", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.10.tgz", + "integrity": "sha512-tE4yxKEphEyxj9s4inideLHktW/x6DwesIwWZ9NN1FKf9zbJYsnhBoA9vrHA/IuIOKwPa5PcFBNV4lpMIOEzyQ==", "dev": true, "dependencies": { "expect": "^29.0.0", @@ -7849,9 +7965,9 @@ "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==" }, "node_modules/@types/lodash": { - "version": "4.14.201", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.201.tgz", - "integrity": "sha512-y9euML0cim1JrykNxADLfaG0FgD1g/yTHwUs/Jg9ZIU7WKj2/4IW9Lbb1WZbvck78W/lfGXFfe+u2EGfIJXdLQ==" + "version": "4.14.202", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.202.tgz", + "integrity": "sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==" }, "node_modules/@types/lodash.memoize": { "version": "4.1.9", @@ -7880,10 +7996,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.9.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.9.1.tgz", - "integrity": "sha512-HhmzZh5LSJNS5O8jQKpJ/3ZcrrlG6L70hpGqMIAoM9YVD0YBRNWYsfwcXq8VnSjlNpCpgLzMXdiPo+dxcvSmiA==", - "dev": true, + "version": "20.10.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.0.tgz", + "integrity": "sha512-D0WfRmU9TQ8I9PFx9Yc+EBHw+vSpIub4IDvQivcp26PtPrdMGAq5SDcpXEo/epqa/DXotVpekHiLNTg3iaKXBQ==", "dependencies": { "undici-types": "~5.26.4" } @@ -7969,6 +8084,12 @@ "@types/react": "*" } }, + "node_modules/@types/resolve": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", + "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", + "dev": true + }, "node_modules/@types/scheduler": { "version": "0.16.6", "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.6.tgz", @@ -8035,19 +8156,18 @@ "node_modules/@types/yargs-parser": { "version": "21.0.3", "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "dev": true + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.11.0.tgz", - "integrity": "sha512-uXnpZDc4VRjY4iuypDBKzW1rz9T5YBBK0snMn8MaTSNd2kMlj50LnLBABELjJiOL5YHk7ZD8hbSpI9ubzqYI0w==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.12.0.tgz", + "integrity": "sha512-XOpZ3IyJUIV1b15M7HVOpgQxPPF7lGXgsfcEIu3yDxFPaf/xZKt7s9QO/pbk7vpWQyVulpJbu4E5LwpZiQo4kA==", "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.11.0", - "@typescript-eslint/type-utils": "6.11.0", - "@typescript-eslint/utils": "6.11.0", - "@typescript-eslint/visitor-keys": "6.11.0", + "@typescript-eslint/scope-manager": "6.12.0", + "@typescript-eslint/type-utils": "6.12.0", + "@typescript-eslint/utils": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -8072,6 +8192,50 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", + "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", + "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", + "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/parser": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.11.0.tgz", @@ -8116,12 +8280,12 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.11.0.tgz", - "integrity": "sha512-nA4IOXwZtqBjIoYrJcYxLRO+F9ri+leVGoJcMW1uqr4r1Hq7vW5cyWrA43lFbpRvQ9XgNrnfLpIkO3i1emDBIA==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.12.0.tgz", + "integrity": "sha512-WWmRXxhm1X8Wlquj+MhsAG4dU/Blvf1xDgGaYCzfvStP2NwPQh6KBvCDbiOEvaE0filhranjIlK/2fSTVwtBng==", "dependencies": { - "@typescript-eslint/typescript-estree": "6.11.0", - "@typescript-eslint/utils": "6.11.0", + "@typescript-eslint/typescript-estree": "6.12.0", + "@typescript-eslint/utils": "6.12.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -8141,6 +8305,60 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", + "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", + "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", + "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/types": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.11.0.tgz", @@ -8180,16 +8398,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.11.0.tgz", - "integrity": "sha512-p23ibf68fxoZy605dc0dQAEoUsoiNoP3MD9WQGiHLDuTSOuqoTsa4oAy+h3KDkTcxbbfOtUjb9h3Ta0gT4ug2g==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.12.0.tgz", + "integrity": "sha512-LywPm8h3tGEbgfyjYnu3dauZ0U7R60m+miXgKcZS8c7QALO9uWJdvNoP+duKTk2XMWc7/Q3d/QiCuLN9X6SWyQ==", "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.11.0", - "@typescript-eslint/types": "6.11.0", - "@typescript-eslint/typescript-estree": "6.11.0", + "@typescript-eslint/scope-manager": "6.12.0", + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/typescript-estree": "6.12.0", "semver": "^7.5.4" }, "engines": { @@ -8203,6 +8421,76 @@ "eslint": "^7.0.0 || ^8.0.0" } }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", + "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", + "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", + "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", + "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/visitor-keys": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.11.0.tgz", @@ -8232,6 +8520,10 @@ "resolved": "packages/eslint-config", "link": true }, + "node_modules/@volkovlabs/grafana-utils": { + "resolved": "packages/grafana-utils", + "link": true + }, "node_modules/@volkovlabs/jest-selectors": { "resolved": "packages/jest-selectors", "link": true @@ -9574,6 +9866,18 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/bundle-name": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", @@ -11613,6 +11917,11 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, + "node_modules/dom-walk": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" + }, "node_modules/domelementtype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", @@ -12038,14 +12347,14 @@ } }, "node_modules/eslint": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.53.0.tgz", - "integrity": "sha512-N4VuiPjXDUa4xVeV/GC/RV3hQW9Nw+Y463lkWaKKXKYMvmRiRDAtfpuPFLN+E1/6ZhyR8J2ig+eVREnYgUsiag==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.54.0.tgz", + "integrity": "sha512-NY0DfAkM8BIZDVl6PgSa1ttZbx3xHgJzSNJKYcQglem6CppHyMhRIQkBVSSMaSRnLhig3jsDbEzOjwCVt4AmmA==", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.3", - "@eslint/js": "8.53.0", + "@eslint/js": "8.54.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -13454,6 +13763,15 @@ "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", "dev": true }, + "node_modules/global": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", + "dependencies": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, "node_modules/globals": { "version": "13.23.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz", @@ -14162,6 +14480,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -14325,6 +14658,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", + "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", + "dev": true + }, "node_modules/is-nan": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz", @@ -14397,6 +14736,15 @@ "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, + "node_modules/is-reference": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", + "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", + "dev": true, + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -15991,8 +16339,7 @@ "node_modules/jsonc-parser": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", - "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", - "dev": true + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==" }, "node_modules/jsonfile": { "version": "6.1.0", @@ -16432,6 +16779,14 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/min-document": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ==", + "dependencies": { + "dom-walk": "^0.1.0" + } + }, "node_modules/min-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", @@ -17562,7 +17917,6 @@ "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, "engines": { "node": ">= 0.6.0" } @@ -21212,9 +21566,9 @@ "dev": true }, "node_modules/typescript": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", - "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.2.tgz", + "integrity": "sha512-6l+RyNy7oAHDfxC4FzSJcz9vnjTKxrLpDG5M2Vu4SHRVNg6xzqZp6LYSR9zjqQTu8DU/f5xwxUdADOkbrIX2gQ==", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -21261,8 +21615,7 @@ "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, "node_modules/unicode-canonical-property-names-ecmascript": { "version": "2.0.0", @@ -22281,7 +22634,7 @@ }, "packages/components": { "name": "@volkovlabs/components", - "version": "1.1.0", + "version": "1.2.0", "license": "Apache-2.0", "dependencies": { "@emotion/css": "^11.11.2", @@ -22797,7 +23150,7 @@ }, "packages/eslint-config": { "name": "@volkovlabs/eslint-config", - "version": "1.1.0", + "version": "1.2.1", "license": "Apache-2.0", "dependencies": { "@typescript-eslint/eslint-plugin": "^6.0.0", @@ -22809,6 +23162,259 @@ "eslint": "^8.0.0" } }, + "packages/grafana-utils": { + "version": "10.2.1", + "license": "Apache-2.0", + "dependencies": { + "@grafana/data": "^10.2.1", + "@swc/jest": "^0.2.29", + "global": "^4.4.0" + }, + "devDependencies": { + "@rollup/plugin-commonjs": "^25.0.7", + "@rollup/plugin-inject": "^5.0.5", + "@rollup/plugin-node-resolve": "^15.2.3", + "@rollup/plugin-terser": "^0.4.4", + "@types/jest": "^29.5.10", + "@types/lodash": "^4.14.202", + "@types/node": "^20.10.0", + "@typescript-eslint/eslint-plugin": "^6.12.0", + "@volkovlabs/eslint-config": "^1.2.1", + "eslint": "^8.54.0", + "eslint-config-prettier": "^9.0.0", + "eslint-plugin-prettier": "^5.0.1", + "jest": "^29.7.0", + "rollup": "^4.6.0", + "rollup-plugin-dts": "^6.1.0", + "rollup-plugin-esbuild": "^6.1.0", + "typescript": "^5.3.2" + } + }, + "packages/grafana-utils/node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.6.0.tgz", + "integrity": "sha512-keHkkWAe7OtdALGoutLY3utvthkGF+Y17ws9LYT8pxMBYXaCoH/8dXS2uzo6e8+sEhY7y/zi5RFo22Dy2lFpDw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-android-arm64": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.6.0.tgz", + "integrity": "sha512-y3Kt+34smKQNWilicPbBz/MXEY7QwDzMFNgwEWeYiOhUt9MTWKjHqe3EVkXwT2fR7izOvHpDWZ0o2IyD9SWX7A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.6.0.tgz", + "integrity": "sha512-oLzzxcUIHltHxOCmaXl+pkIlU+uhSxef5HfntW7RsLh1eHm+vJzjD9Oo4oUKso4YuP4PpbFJNlZjJuOrxo8dPg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-darwin-x64": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.6.0.tgz", + "integrity": "sha512-+ANnmjkcOBaV25n0+M0Bere3roeVAnwlKW65qagtuAfIxXF9YxUneRyAn/RDcIdRa7QrjRNJL3jR7T43ObGe8Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.6.0.tgz", + "integrity": "sha512-tBTSIkjSVUyrekddpkAqKOosnj1Fc0ZY0rJL2bIEWPKqlEQk0paORL9pUIlt7lcGJi3LzMIlUGXvtNi1Z6MOCQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.6.0.tgz", + "integrity": "sha512-Ed8uJI3kM11de9S0j67wAV07JUNhbAqIrDYhQBrQW42jGopgheyk/cdcshgGO4fW5Wjq97COCY/BHogdGvKVNQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.6.0.tgz", + "integrity": "sha512-mZoNQ/qK4D7SSY8v6kEsAAyDgznzLLuSFCA3aBHZTmf3HP/dW4tNLTtWh9+LfyO0Z1aUn+ecpT7IQ3WtIg3ViQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.6.0.tgz", + "integrity": "sha512-rouezFHpwCqdEXsqAfNsTgSWO0FoZ5hKv5p+TGO5KFhyN/dvYXNMqMolOb8BkyKcPqjYRBeT+Z6V3aM26rPaYg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.6.0.tgz", + "integrity": "sha512-Bbm+fyn3S6u51urfj3YnqBXg5vI2jQPncRRELaucmhBVyZkbWClQ1fEsRmdnCPpQOQfkpg9gZArvtMVkOMsh1w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.6.0.tgz", + "integrity": "sha512-+MRMcyx9L2kTrTUzYmR61+XVsliMG4odFb5UmqtiT8xOfEicfYAGEuF/D1Pww1+uZkYhBqAHpvju7VN+GnC3ng==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.6.0.tgz", + "integrity": "sha512-rxfeE6K6s/Xl2HGeK6cO8SiQq3k/3BYpw7cfhW5Bk2euXNEpuzi2cc7llxx1si1QgwfjNtdRNTGqdBzGlFZGFw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "packages/grafana-utils/node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.6.0.tgz", + "integrity": "sha512-QqmCsydHS172Y0Kc13bkMXvipbJSvzeglBncJG3LsYJSiPlxYACz7MmJBs4A8l1oU+jfhYEIC/+AUSlvjmiX/g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "packages/grafana-utils/node_modules/rollup": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.6.0.tgz", + "integrity": "sha512-R8i5Her4oO1LiMQ3jKf7MUglYV/mhQ5g5OKeld5CnkmPdIGo79FDDQYqPhq/PCVuTQVuxsWgIbDy9F+zdHn80w==", + "dev": true, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.6.0", + "@rollup/rollup-android-arm64": "4.6.0", + "@rollup/rollup-darwin-arm64": "4.6.0", + "@rollup/rollup-darwin-x64": "4.6.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.6.0", + "@rollup/rollup-linux-arm64-gnu": "4.6.0", + "@rollup/rollup-linux-arm64-musl": "4.6.0", + "@rollup/rollup-linux-x64-gnu": "4.6.0", + "@rollup/rollup-linux-x64-musl": "4.6.0", + "@rollup/rollup-win32-arm64-msvc": "4.6.0", + "@rollup/rollup-win32-ia32-msvc": "4.6.0", + "@rollup/rollup-win32-x64-msvc": "4.6.0", + "fsevents": "~2.3.2" + } + }, + "packages/grafana-utils/node_modules/rollup-plugin-dts": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/rollup-plugin-dts/-/rollup-plugin-dts-6.1.0.tgz", + "integrity": "sha512-ijSCPICkRMDKDLBK9torss07+8dl9UpY9z1N/zTeA1cIqdzMlpkV3MOOC7zukyvQfDyxa1s3Dl2+DeiP/G6DOw==", + "dev": true, + "dependencies": { + "magic-string": "^0.30.4" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/Swatinem" + }, + "optionalDependencies": { + "@babel/code-frame": "^7.22.13" + }, + "peerDependencies": { + "rollup": "^3.29.4 || ^4", + "typescript": "^4.5 || ^5.0" + } + }, + "packages/grafana-utils/node_modules/rollup-plugin-esbuild": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/rollup-plugin-esbuild/-/rollup-plugin-esbuild-6.1.0.tgz", + "integrity": "sha512-HPpXU65V8bSpW8eSYPahtUJaJHmbxJGybuf/M8B3bz/6i11YaYHlNNJIQ38gSEV0FyohQOgVxJ2YMEEZtEmwvA==", + "dev": true, + "dependencies": { + "@rollup/pluginutils": "^5.0.5", + "debug": "^4.3.4", + "es-module-lexer": "^1.3.1", + "get-tsconfig": "^4.7.2" + }, + "engines": { + "node": ">=14.18.0" + }, + "peerDependencies": { + "esbuild": ">=0.18.0", + "rollup": "^1.20.0 || ^2.0.0 || ^3.0.0 || ^4.0.0" + } + }, "packages/jest-selectors": { "name": "@volkovlabs/jest-selectors", "version": "1.2.0", diff --git a/package.json b/package.json index 81fa992..4fbe1f7 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "workspaces": [ "packages/eslint-config", "packages/jest-selectors", - "packages/components" + "packages/components", + "packages/grafana-utils" ] } diff --git a/packages/grafana-utils/.eslintignore b/packages/grafana-utils/.eslintignore new file mode 100644 index 0000000..0215f2f --- /dev/null +++ b/packages/grafana-utils/.eslintignore @@ -0,0 +1,4 @@ +dist/ + +src/grafana/ +test/matchers/ diff --git a/packages/grafana-utils/.eslintrc b/packages/grafana-utils/.eslintrc new file mode 100644 index 0000000..e19a44a --- /dev/null +++ b/packages/grafana-utils/.eslintrc @@ -0,0 +1,12 @@ +{ + "env": { + "jest": true + }, + "extends": ["plugin:prettier/recommended", "@volkovlabs/eslint-config"], + "plugins": ["@typescript-eslint/eslint-plugin"], + "parser": "@typescript-eslint/parser", + "parserOptions": { + "project": "tsconfig.json", + "sourceType": "module" + } +} diff --git a/packages/grafana-utils/.prettierrc.js b/packages/grafana-utils/.prettierrc.js new file mode 100644 index 0000000..b76db6d --- /dev/null +++ b/packages/grafana-utils/.prettierrc.js @@ -0,0 +1,10 @@ +module.exports = { + endOfLine: 'auto', + printWidth: 120, + trailingComma: 'es5', + semi: true, + jsxSingleQuote: false, + singleQuote: true, + useTabs: false, + tabWidth: 2, +}; diff --git a/packages/grafana-utils/CHANGELOG.md b/packages/grafana-utils/CHANGELOG.md new file mode 100644 index 0000000..426b1c0 --- /dev/null +++ b/packages/grafana-utils/CHANGELOG.md @@ -0,0 +1,7 @@ +# Change Log + +## 1.0.0 (2023-11-28) + +### Features / Enhancements + +- Initial Release based on @grafana/data 10.2.1 (#14) diff --git a/packages/grafana-utils/LICENSE b/packages/grafana-utils/LICENSE new file mode 100644 index 0000000..9130e4c --- /dev/null +++ b/packages/grafana-utils/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 Volkov Labs + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/grafana-utils/README.md b/packages/grafana-utils/README.md new file mode 100644 index 0000000..74911f3 --- /dev/null +++ b/packages/grafana-utils/README.md @@ -0,0 +1,9 @@ +# Grafana Utils + +- Export `@grafana/data` as a commonjs module +- Transformations from `@grafana/grafana` +- Query utils + +## License + +Apache License Version 2.0. diff --git a/packages/grafana-utils/jest-setup.ts b/packages/grafana-utils/jest-setup.ts new file mode 100644 index 0000000..b9a23d2 --- /dev/null +++ b/packages/grafana-utils/jest-setup.ts @@ -0,0 +1,6 @@ +/** + * Mock window to fix failing tests + */ +Object.defineProperty(global, 'window', { + get() {}, +}); diff --git a/packages/grafana-utils/jest.config.js b/packages/grafana-utils/jest.config.js new file mode 100644 index 0000000..ccc5479 --- /dev/null +++ b/packages/grafana-utils/jest.config.js @@ -0,0 +1,63 @@ +// We set this specifically for 2 reasons. +// 1. It makes sense for both CI tests and local tests to behave the same so issues are found earlier +// 2. Any wrong timezone handling could be hidden if we use UTC/GMT local time (which would happen in CI). +process.env.TZ = 'Pacific/Easter'; // UTC-06:00 or UTC-05:00 depending on daylight savings + +/** + * Node Modules To Transform + * @param moduleNames + * @returns {`node_modules\/(?!.*(${*})\/.*)`} + */ +const nodeModulesToTransform = (moduleNames) => `node_modules\/(?!.*(${moduleNames.join('|')})\/.*)`; + +/** + * Array of known nested grafana package dependencies that only bundle an ESM version + */ +const grafanaESModules = [ + '.pnpm', // Support using pnpm symlinked packages + '@grafana/schema', + 'd3', + 'd3-color', + 'd3-force', + 'd3-interpolate', + 'd3-scale-chromatic', + 'ol', + 'react-colorful', + 'rxjs', + 'uuid', +]; + +module.exports = { + moduleNameMapper: { + '\\.(css|scss|sass)$': 'identity-obj-proxy', + }, + modulePaths: ['/src'], + setupFilesAfterEnv: ['/jest-setup.ts'], + testEnvironment: 'node', + testMatch: [ + '/src/**/__tests__/**/*.{js,jsx,ts,tsx}', + '/src/**/*.{spec,test,jest}.{js,jsx,ts,tsx}', + '/src/**/*.{spec,test,jest}.{js,jsx,ts,tsx}', + ], + transform: { + '^.+\\.(t|j)sx?$': [ + '@swc/jest', + { + sourceMaps: 'inline', + jsc: { + parser: { + syntax: 'typescript', + tsx: true, + decorators: false, + dynamicImport: true, + }, + }, + }, + ], + }, + /** + * Jest will throw `Cannot use import statement outside module` if it tries to load an + * ES module without it being transformed first. ./config/README.md#esm-errors-with-jest + */ + transformIgnorePatterns: [nodeModulesToTransform(grafanaESModules)], +}; diff --git a/packages/grafana-utils/package.json b/packages/grafana-utils/package.json new file mode 100644 index 0000000..f361332 --- /dev/null +++ b/packages/grafana-utils/package.json @@ -0,0 +1,55 @@ +{ + "author": "Volkov Labs", + "dependencies": { + "@grafana/data": "^10.2.1", + "@swc/jest": "^0.2.29", + "global": "^4.4.0" + }, + "description": "Utils for Grafana", + "devDependencies": { + "@rollup/plugin-commonjs": "^25.0.7", + "@rollup/plugin-inject": "^5.0.5", + "@rollup/plugin-node-resolve": "^15.2.3", + "@rollup/plugin-terser": "^0.4.4", + "@types/jest": "^29.5.10", + "@types/lodash": "^4.14.202", + "@types/node": "^20.10.0", + "@typescript-eslint/eslint-plugin": "^6.12.0", + "@volkovlabs/eslint-config": "^1.2.1", + "eslint": "^8.54.0", + "eslint-config-prettier": "^9.0.0", + "eslint-plugin-prettier": "^5.0.1", + "jest": "^29.7.0", + "rollup": "^4.6.0", + "rollup-plugin-dts": "^6.1.0", + "rollup-plugin-esbuild": "^6.1.0", + "typescript": "^5.3.2" + }, + "files": [ + "dist", + "./README.md", + "./CHANGELOG.md", + "./LICENSE" + ], + "keywords": [], + "license": "Apache-2.0", + "main": "dist/index.js", + "name": "@volkovlabs/grafana-utils", + "publishConfig": { + "access": "public", + "main": "dist/index.js", + "types": "dist/index.d.ts" + }, + "scripts": { + "build": "rollup -c", + "lint": "eslint \"{src,apps,libs,test}/**/*.{ts,tsx}\"", + "lint:fix": "eslint \"{src,apps,libs,test}/**/*.{ts,tsx}\" --fix", + "prepack": "npm run clean & npm run build", + "prettier": "prettier . --write", + "test": "jest --watch --onlyChanged", + "test:ci": "jest --maxWorkers 4 --coverage", + "typecheck": "tsc --emitDeclarationOnly false --noEmit" + }, + "types": "dist/index.d.ts", + "version": "1.0.0" +} diff --git a/packages/grafana-utils/rollup.config.mjs b/packages/grafana-utils/rollup.config.mjs new file mode 100644 index 0000000..4f1c8b5 --- /dev/null +++ b/packages/grafana-utils/rollup.config.mjs @@ -0,0 +1,53 @@ +import dts from 'rollup-plugin-dts'; +import esbuild from 'rollup-plugin-esbuild'; +import terser from '@rollup/plugin-terser'; +import { nodeResolve } from '@rollup/plugin-node-resolve'; +import commonjs from '@rollup/plugin-commonjs'; +import inject from '@rollup/plugin-inject'; +import packageJson from './package.json' assert { type: 'json' }; + +const name = packageJson.main.replace(/\.js$/, ''); + +export default [ + { + input: `src/index.ts`, + plugins: [ + commonjs(), + nodeResolve({ + resolveOnly: [ + '@grafana/data', + '@grafana/schema', + 'd3', + 'd3-color', + 'd3-force', + 'd3-interpolate', + 'd3-scale-chromatic', + 'ol', + 'react-colorful', + 'rxjs', + 'uuid', + ], + }), + inject({ + window: 'global/window', + }), + esbuild(), + terser(), + ], + output: [ + { + file: `${name}.js`, + format: 'cjs', + sourcemap: true, + }, + ], + }, + { + input: `src/index.ts`, + plugins: [dts()], + output: { + file: `${name}.d.ts`, + format: 'es', + }, + }, +]; diff --git a/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.test.ts b/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.test.ts new file mode 100644 index 0000000..256cd57 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.test.ts @@ -0,0 +1,111 @@ +import { FieldType, toDataFrame } from '@grafana/data'; +import { HeatmapCalculationOptions } from '@grafana/schema'; + +import { rowsToCellsHeatmap, calculateHeatmapFromData } from './heatmap'; + +describe('Heatmap transformer', () => { + it('calculate heatmap from input data', async () => { + const options: HeatmapCalculationOptions = { + // + }; + + const data = toDataFrame({ + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3, 4] }, + { name: 'temp', type: FieldType.number, config: { unit: 'm2' }, values: [1.1, 2.2, 3.3, 4.4] }, + ], + }); + + const heatmap = calculateHeatmapFromData([data], options); + expect(heatmap.fields.map((f) => ({ name: f.name, type: f.type, config: f.config }))).toMatchInlineSnapshot(` + [ + { + "config": {}, + "name": "xMin", + "type": "time", + }, + { + "config": { + "custom": { + "scaleDistribution": { + "type": "linear", + }, + }, + "unit": "m2", + }, + "name": "yMin", + "type": "number", + }, + { + "config": { + "unit": "short", + }, + "name": "Count", + "type": "number", + }, + ] + `); + }); + + it('convert heatmap buckets to scanlines', async () => { + const frame = toDataFrame({ + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3] }, + { name: 'A', type: FieldType.number, config: { unit: 'm2' }, values: [1.1, 1.2, 1.3] }, + { name: 'B', type: FieldType.number, config: { unit: 'm2' }, values: [2.1, 2.2, 2.3] }, + { name: 'C', type: FieldType.number, config: { unit: 'm2' }, values: [3.1, 3.2, 3.3] }, + ], + }); + + const heatmap = rowsToCellsHeatmap({ frame, value: 'Speed' }); + expect(heatmap.fields.map((f) => ({ name: f.name, type: f.type, config: f.config }))).toMatchInlineSnapshot(` + [ + { + "config": {}, + "name": "xMax", + "type": "time", + }, + { + "config": { + "unit": "short", + }, + "name": "y", + "type": "number", + }, + { + "config": { + "unit": "m2", + }, + "name": "Speed", + "type": "number", + }, + ] + `); + expect(heatmap.meta).toMatchInlineSnapshot(` + { + "custom": { + "yMatchWithLabel": undefined, + "yOrdinalDisplay": [ + "A", + "B", + "C", + ], + }, + "type": "heatmap-cells", + } + `); + expect(heatmap.fields[1].values).toMatchInlineSnapshot(` + [ + 0, + 1, + 2, + 0, + 1, + 2, + 0, + 1, + 2, + ] + `); + }); +}); diff --git a/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.ts b/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.ts new file mode 100644 index 0000000..36f38ae --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/heatmap.ts @@ -0,0 +1,586 @@ +import { map } from 'rxjs'; + +import { + DataFrame, + DataTransformerID, + FieldType, + incrRoundUp, + incrRoundDn, + SynchronousDataTransformerInfo, + DataFrameType, + getFieldDisplayName, + Field, + getValueFormat, + formattedValueToString, + durationToMilliseconds, + parseDuration, + TransformationApplicabilityLevels, +} from '@grafana/data'; +import { isLikelyAscendingVector } from '@grafana/data'; +import { + ScaleDistribution, + HeatmapCellLayout, + HeatmapCalculationMode, + HeatmapCalculationOptions, +} from '@grafana/schema'; + +import { niceLinearIncrs, niceTimeIncrs } from './utils'; + +export interface HeatmapTransformerOptions extends HeatmapCalculationOptions { + /** the raw values will still exist in results after transformation */ + keepOriginalData?: boolean; +} + +const transformationsVariableSupport = true; + +export const heatmapTransformer: SynchronousDataTransformerInfo = { + id: DataTransformerID.heatmap, + name: 'Create heatmap', + description: 'Generate heatmap data from source data.', + defaultOptions: {}, + isApplicable: (data) => { + const { xField, yField, xs, ys } = findHeatmapFields(data); + + if (xField || yField) { + return TransformationApplicabilityLevels.NotPossible; + } + + if (!xs.length || !ys.length) { + return TransformationApplicabilityLevels.NotPossible; + } + + return TransformationApplicabilityLevels.Applicable; + }, + isApplicableDescription: + 'The Heatmap transformation requires fields with Heatmap compatible data. No fields with Heatmap data could be found.', + operator: (options, ctx) => (source) => + source.pipe( + map((data) => { + if (transformationsVariableSupport) { + const optionsCopy = { + ...options, + xBuckets: { ...options.xBuckets } ?? undefined, + yBuckets: { ...options.yBuckets } ?? undefined, + }; + + if (optionsCopy.xBuckets?.value) { + optionsCopy.xBuckets.value = ctx.interpolate(optionsCopy.xBuckets.value); + } + + if (optionsCopy.yBuckets?.value) { + optionsCopy.yBuckets.value = ctx.interpolate(optionsCopy.yBuckets.value); + } + + return heatmapTransformer.transformer(optionsCopy, ctx)(data); + } else { + return heatmapTransformer.transformer(options, ctx)(data); + } + }) + ), + + transformer: (options: HeatmapTransformerOptions) => { + return (data: DataFrame[]) => { + const v = calculateHeatmapFromData(data, options); + if (options.keepOriginalData) { + return [v, ...data]; + } + return [v]; + }; + }, +}; + +function parseNumeric(v?: string | null) { + return v === '+Inf' ? Infinity : v === '-Inf' ? -Infinity : +(v ?? 0); +} + +export function sortAscStrInf(aName?: string | null, bName?: string | null) { + return parseNumeric(aName) - parseNumeric(bName); +} + +export interface HeatmapRowsCustomMeta { + /** This provides the lookup values */ + yOrdinalDisplay: string[]; + yOrdinalLabel?: string[]; + yMatchWithLabel?: string; + yMinDisplay?: string; +} + +/** simple utility to get heatmap metadata from a frame */ +export function readHeatmapRowsCustomMeta(frame?: DataFrame): HeatmapRowsCustomMeta { + return (frame?.meta?.custom ?? {}) as HeatmapRowsCustomMeta; +} + +export function isHeatmapCellsDense(frame: DataFrame) { + let foundY = false; + + for (let field of frame.fields) { + // dense heatmap frames can only have one of these fields + switch (field.name) { + case 'y': + case 'yMin': + case 'yMax': + if (foundY) { + return false; + } + + foundY = true; + } + } + + return foundY; +} + +export interface RowsHeatmapOptions { + frame: DataFrame; + value?: string; // the field value name + unit?: string; + decimals?: number; + layout?: HeatmapCellLayout; +} + +/** Given existing buckets, create a values style frame */ +// Assumes frames have already been sorted ASC and de-accumulated. +export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame { + // TODO: handle null-filling w/ fields[0].config.interval? + const xField = opts.frame.fields[0]; + const xValues = xField.values; + const yFields = opts.frame.fields.filter((f, idx) => f.type === FieldType.number && idx > 0); + + // similar to initBins() below + const len = xValues.length * yFields.length; + const xs = new Array(len); + const ys = new Array(len); + const counts2 = new Array(len); + + const counts = yFields.map((field) => field.values.slice()); + + // transpose + counts.forEach((bucketCounts, bi) => { + for (let i = 0; i < bucketCounts.length; i++) { + counts2[counts.length * i + bi] = bucketCounts[i]; + } + }); + + const bucketBounds = Array.from({ length: yFields.length }, (v, i) => i); + + // fill flat/repeating array + for (let i = 0, yi = 0, xi = 0; i < len; yi = ++i % bucketBounds.length) { + ys[i] = bucketBounds[yi]; + + if (yi === 0 && i >= bucketBounds.length) { + xi++; + } + + xs[i] = xValues[xi]; + } + + // this name determines whether cells are drawn above, below, or centered on the values + let ordinalFieldName = yFields[0].labels?.le != null ? 'yMax' : 'y'; + switch (opts.layout) { + case HeatmapCellLayout.le: + ordinalFieldName = 'yMax'; + break; + case HeatmapCellLayout.ge: + ordinalFieldName = 'yMin'; + break; + case HeatmapCellLayout.unknown: + ordinalFieldName = 'y'; + break; + } + + const custom: HeatmapRowsCustomMeta = { + yOrdinalDisplay: yFields.map((f) => getFieldDisplayName(f, opts.frame)), + yMatchWithLabel: Object.keys(yFields[0].labels ?? {})[0], + }; + if (custom.yMatchWithLabel) { + custom.yOrdinalLabel = yFields.map((f) => f.labels?.[custom.yMatchWithLabel!] ?? ''); + if (custom.yMatchWithLabel === 'le') { + custom.yMinDisplay = '0.0'; + } + } + + // Format the labels as a value + // TODO: this leaves the internally prepended '0.0' without this formatting treatment + if (opts.unit?.length || opts.decimals != null) { + const fmt = getValueFormat(opts.unit ?? 'short'); + if (custom.yMinDisplay) { + custom.yMinDisplay = formattedValueToString(fmt(0, opts.decimals)); + } + custom.yOrdinalDisplay = custom.yOrdinalDisplay.map((name) => { + let num = +name; + + if (!Number.isNaN(num)) { + return formattedValueToString(fmt(num, opts.decimals)); + } + + return name; + }); + } + + const valueCfg = { + ...yFields[0].config, + }; + + if (valueCfg.displayNameFromDS) { + delete valueCfg.displayNameFromDS; + } + + return { + length: xs.length, + refId: opts.frame.refId, + meta: { + type: DataFrameType.HeatmapCells, + custom, + }, + fields: [ + { + name: xField.type === FieldType.time ? 'xMax' : 'x', + type: xField.type, + values: xs, + config: xField.config, + }, + { + name: ordinalFieldName, + type: FieldType.number, + values: ys, + config: { + unit: 'short', // ordinal lookup + }, + }, + { + name: opts.value?.length ? opts.value : 'Value', + type: FieldType.number, + values: counts2, + config: valueCfg, + display: yFields[0].display, + }, + ], + }; +} + +// Sorts frames ASC by numeric bucket name and de-accumulates values in each frame's Value field [1] +// similar to Prometheus result_transformer.ts -> transformToHistogramOverTime() +export function prepBucketFrames(frames: DataFrame[]): DataFrame[] { + frames = frames.slice(); + + // sort ASC by frame.name (Prometheus bucket bound) + // or use frame.fields[1].config.displayNameFromDS ? + frames.sort((a, b) => sortAscStrInf(a.name, b.name)); + + // cumulative counts + const counts = frames.map((frame) => frame.fields[1].values.slice()); + + // de-accumulate + counts.reverse(); + counts.forEach((bucketCounts, bi) => { + if (bi < counts.length - 1) { + for (let i = 0; i < bucketCounts.length; i++) { + bucketCounts[i] -= counts[bi + 1][i]; + } + } + }); + counts.reverse(); + + return frames.map((frame, i) => ({ + ...frame, + fields: [ + frame.fields[0], + { + ...frame.fields[1], + values: counts[i], + }, + ], + })); +} + +export function calculateHeatmapFromData(frames: DataFrame[], options: HeatmapCalculationOptions): DataFrame { + // Find fields in the heatmap + const { xField, yField, xs, ys } = findHeatmapFields(frames); + + if (!xField || !yField) { + throw 'no heatmap fields found'; + } + + if (!xs.length || !ys.length) { + throw 'no values found'; + } + + const xBucketsCfg = options.xBuckets ?? {}; + const yBucketsCfg = options.yBuckets ?? {}; + + if (xBucketsCfg.scale?.type === ScaleDistribution.Log) { + throw 'X axis only supports linear buckets'; + } + + const scaleDistribution = options.yBuckets?.scale ?? { + type: ScaleDistribution.Linear, + }; + + const heat2d = heatmap(xs, ys, { + xSorted: isLikelyAscendingVector(xs), + xTime: xField.type === FieldType.time, + xMode: xBucketsCfg.mode, + xSize: + xBucketsCfg.mode === HeatmapCalculationMode.Size + ? durationToMilliseconds(parseDuration(xBucketsCfg.value ?? '')) + : xBucketsCfg.value + ? +xBucketsCfg.value + : undefined, + yMode: yBucketsCfg.mode, + ySize: yBucketsCfg.value ? +yBucketsCfg.value : undefined, + yLog: scaleDistribution?.type === ScaleDistribution.Log ? (scaleDistribution?.log as any) : undefined, + }); + + const frame = { + length: heat2d.x.length, + name: getFieldDisplayName(yField), + meta: { + type: DataFrameType.HeatmapCells, + }, + fields: [ + { + name: 'xMin', + type: xField.type, + values: heat2d.x, + config: xField.config, + }, + { + name: 'yMin', + type: FieldType.number, + values: heat2d.y, + config: { + ...yField.config, // keep units from the original source + custom: { + scaleDistribution, + }, + }, + }, + { + name: 'Count', + type: FieldType.number, + values: heat2d.count, + config: { + unit: 'short', // always integer + }, + }, + ], + }; + + return frame; +} + +/** + * Find fields that can be used within a heatmap + * + * @param frames + * An array of DataFrames + */ +function findHeatmapFields(frames: DataFrame[]) { + let xField: Field | undefined = undefined; + let yField: Field | undefined = undefined; + let dataLen = 0; + + // pre-allocate arrays + for (let frame of frames) { + // TODO: assumes numeric timestamps, ordered asc, without nulls + const x = frame.fields.find((f) => f.type === FieldType.time); + if (x) { + dataLen += frame.length; + } + } + + let xs: number[] = Array(dataLen); + let ys: number[] = Array(dataLen); + let j = 0; + + for (let frame of frames) { + // TODO: assumes numeric timestamps, ordered asc, without nulls + const x = frame.fields.find((f) => f.type === FieldType.time); + if (!x) { + continue; + } + + if (!xField) { + xField = x; // the first X + } + + const xValues = x.values; + for (let field of frame.fields) { + if (field !== x && field.type === FieldType.number) { + const yValues = field.values; + + for (let i = 0; i < xValues.length; i++, j++) { + xs[j] = xValues[i]; + ys[j] = yValues[i]; + } + + if (!yField) { + yField = field; + } + } + } + } + + return { xField, yField, xs, ys }; +} + +interface HeatmapOpts { + // default is 10% of data range, snapped to a "nice" increment + xMode?: HeatmapCalculationMode; + yMode?: HeatmapCalculationMode; + xSize?: number; + ySize?: number; + + // use Math.ceil instead of Math.floor for bucketing + xCeil?: boolean; + yCeil?: boolean; + + // log2 or log10 buckets + xLog?: 2 | 10; + yLog?: 2 | 10; + + xTime?: boolean; + yTime?: boolean; + + // optimization hints for known data ranges (sorted, pre-scanned, etc) + xMin?: number; + xMax?: number; + yMin?: number; + yMax?: number; + + xSorted?: boolean; + ySorted?: boolean; +} + +// TODO: handle NaN, Inf, -Inf, null, undefined values in xs & ys +function heatmap(xs: number[], ys: number[], opts?: HeatmapOpts) { + let len = xs.length; + + let xSorted = opts?.xSorted ?? false; + let ySorted = opts?.ySorted ?? false; + + // find x and y limits to pre-compute buckets struct + let minX = xSorted ? xs[0] : Infinity; + let minY = ySorted ? ys[0] : Infinity; + let maxX = xSorted ? xs[len - 1] : -Infinity; + let maxY = ySorted ? ys[len - 1] : -Infinity; + + let yExp = opts?.yLog; + + for (let i = 0; i < len; i++) { + if (!xSorted) { + minX = Math.min(minX, xs[i]); + maxX = Math.max(maxX, xs[i]); + } + + if (!ySorted) { + if (!yExp || ys[i] > 0) { + minY = Math.min(minY, ys[i]); + maxY = Math.max(maxY, ys[i]); + } + } + } + + //let scaleX = opts?.xLog === 10 ? Math.log10 : opts?.xLog === 2 ? Math.log2 : (v: number) => v; + //let scaleY = opts?.yLog === 10 ? Math.log10 : opts?.yLog === 2 ? Math.log2 : (v: number) => v; + + let xBinIncr = opts?.xSize ?? 0; + let yBinIncr = opts?.ySize ?? 0; + let xMode = opts?.xMode; + let yMode = opts?.yMode; + + // fall back to 10 buckets if invalid settings + if (!Number.isFinite(xBinIncr) || xBinIncr <= 0) { + xMode = HeatmapCalculationMode.Count; + xBinIncr = 20; + } + if (!Number.isFinite(yBinIncr) || yBinIncr <= 0) { + yMode = HeatmapCalculationMode.Count; + yBinIncr = 10; + } + + if (xMode === HeatmapCalculationMode.Count) { + // TODO: optionally use view range min/max instead of data range for bucket sizing + let approx = (maxX - minX) / Math.max(xBinIncr - 1, 1); + // nice-ify + let xIncrs = opts?.xTime ? niceTimeIncrs : niceLinearIncrs; + let xIncrIdx = xIncrs.findIndex((bucketSize) => bucketSize > approx) - 1; + xBinIncr = xIncrs[Math.max(xIncrIdx, 0)]; + } + + if (yMode === HeatmapCalculationMode.Count) { + // TODO: optionally use view range min/max instead of data range for bucket sizing + let approx = (maxY - minY) / Math.max(yBinIncr - 1, 1); + // nice-ify + let yIncrs = opts?.yTime ? niceTimeIncrs : niceLinearIncrs; + let yIncrIdx = yIncrs.findIndex((bucketSize) => bucketSize > approx) - 1; + yBinIncr = yIncrs[Math.max(yIncrIdx, 0)]; + } + + // console.log({ + // yBinIncr, + // xBinIncr, + // }); + + let binX = opts?.xCeil ? (v: number) => incrRoundUp(v, xBinIncr) : (v: number) => incrRoundDn(v, xBinIncr); + let binY = opts?.yCeil ? (v: number) => incrRoundUp(v, yBinIncr) : (v: number) => incrRoundDn(v, yBinIncr); + + if (yExp) { + yBinIncr = 1 / (opts?.ySize ?? 1); // sub-divides log exponents + let yLog = yExp === 2 ? Math.log2 : Math.log10; + binY = opts?.yCeil ? (v: number) => incrRoundUp(yLog(v), yBinIncr) : (v: number) => incrRoundDn(yLog(v), yBinIncr); + } + + let minXBin = binX(minX); + let maxXBin = binX(maxX); + let minYBin = binY(minY); + let maxYBin = binY(maxY); + + let xBinQty = Math.round((maxXBin - minXBin) / xBinIncr) + 1; + let yBinQty = Math.round((maxYBin - minYBin) / yBinIncr) + 1; + + let [xs2, ys2, counts] = initBins(xBinQty, yBinQty, minXBin, xBinIncr, minYBin, yBinIncr, yExp); + + for (let i = 0; i < len; i++) { + if (yExp && ys[i] <= 0) { + continue; + } + + const xi = (binX(xs[i]) - minXBin) / xBinIncr; + const yi = (binY(ys[i]) - minYBin) / yBinIncr; + const ci = xi * yBinQty + yi; + + counts[ci]++; + } + + return { + x: xs2, + y: ys2, + count: counts, + }; +} + +function initBins(xQty: number, yQty: number, xMin: number, xIncr: number, yMin: number, yIncr: number, yExp?: number) { + const len = xQty * yQty; + const xs = new Array(len); + const ys = new Array(len); + const counts = new Array(len); + + for (let i = 0, yi = 0, x = xMin; i < len; yi = ++i % yQty) { + counts[i] = 0; + + if (yExp) { + ys[i] = yExp ** (yMin + yi * yIncr); + } else { + ys[i] = yMin + yi * yIncr; + } + + if (yi === 0 && i >= yQty) { + x += xIncr; + } + + xs[i] = x; + } + + return [xs, ys, counts]; +} diff --git a/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/utils.ts b/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/utils.ts new file mode 100644 index 0000000..07a457d --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/calculateHeatmap/utils.ts @@ -0,0 +1,119 @@ +import { guessDecimals, roundDecimals } from '@grafana/data'; + +const { abs, pow } = Math; + +export const fixedDec = new Map(); + +export function genIncrs(base: number, minExp: number, maxExp: number, mults: number[]) { + let incrs = []; + + let multDec = mults.map(guessDecimals); + + for (let exp = minExp; exp < maxExp; exp++) { + let expa = abs(exp); + let mag = roundDecimals(pow(base, exp), expa); + + for (let i = 0; i < mults.length; i++) { + let _incr = mults[i] * mag; + let dec = (_incr >= 0 && exp >= 0 ? 0 : expa) + (exp >= multDec[i] ? 0 : multDec[i]); + let incr = roundDecimals(_incr, dec); + incrs.push(incr); + fixedDec.set(incr, dec); + } + } + + return incrs; +} + +const onlyWhole = (v: number) => v % 1 === 0; + +const allMults = [1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5, 5.5, 6, 6.5, 7, 7.5, 8, 8.5, 9, 9.5]; + +// ...0.01, 0.02, 0.025, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.2, 0.25, 0.3, 0.4, 0.5... +export const decIncrs = genIncrs(10, -16, 0, allMults); + +// 1, 2, 2.5, 3, 4, 5, 6, 7, 8, 9, 10, 20, 25, 30, 40, 50... +export const oneIncrs = genIncrs(10, 0, 16, allMults); + +// 1, 2, 3, 4, 5, 10, 20, 25, 50... +export const wholeIncrs = oneIncrs.filter(onlyWhole); + +export const numIncrs = decIncrs.concat(oneIncrs); + +export const niceLinearIncrs = decIncrs.concat(wholeIncrs); + +const sec = 1 * 1e3; +const min = 60 * sec; +const hour = 60 * min; +const day = 24 * hour; +const year = 365 * day; + +// in milliseconds +export const niceTimeIncrs = [ + 1, + 2, + 4, + 5, + 10, + 20, + 25, + 40, + 50, + 100, + 200, + 250, + 400, + 500, + + sec, + 2 * sec, + 4 * sec, + 5 * sec, + 10 * sec, + 15 * sec, + 20 * sec, + 30 * sec, + + min, + 2 * min, + 4 * min, + 5 * min, + 10 * min, + 15 * min, + 20 * min, + 30 * min, + + hour, + 2 * hour, + 4 * hour, + 6 * hour, + 8 * hour, + 12 * hour, + 18 * hour, + + day, + 2 * day, + 3 * day, + 4 * day, + 5 * day, + 6 * day, + 7 * day, + 10 * day, + 15 * day, + 30 * day, + 45 * day, + 60 * day, + 90 * day, + 180 * day, + + year, + 2 * year, + 3 * year, + 4 * year, + 5 * year, + 6 * year, + 7 * year, + 8 * year, + 9 * year, + 10 * year, +]; diff --git a/packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.test.ts b/packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.test.ts new file mode 100644 index 0000000..08f0f7e --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.test.ts @@ -0,0 +1,159 @@ +import { toDataFrame, FieldType, ReducerID } from '@grafana/data'; + +import { FieldConfigHandlerKey } from '../fieldToConfigMapping/fieldToConfigMapping'; + +import { extractConfigFromQuery, ConfigFromQueryTransformOptions } from './configFromQuery'; + +describe('config from data', () => { + const config = toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [1, 2] }, + { name: 'Max', type: FieldType.number, values: [1, 10, 50] }, + { name: 'Min', type: FieldType.number, values: [1, 10, 5] }, + { name: 'Names', type: FieldType.string, values: ['first-name', 'middle', 'last-name'] }, + ], + refId: 'A', + }); + + const seriesA = toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [1, 2, 3] }, + { + name: 'Value', + type: FieldType.number, + values: [2, 3, 4], + config: { displayName: 'SeriesA' }, + }, + ], + }); + + it('Select and apply with two frames and default mappings and reducer', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'A', + mappings: [], + }; + + const results = extractConfigFromQuery(options, [config, seriesA]); + expect(results.length).toBe(1); + expect(results[0].fields[1].config.max).toBe(50); + expect(results[0].fields[1].config.min).toBe(5); + }); + + it('Can apply to config frame if there is only one frame', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'A', + mappings: [], + }; + + const results = extractConfigFromQuery(options, [config]); + expect(results.length).toBe(1); + expect(results[0].fields[1].name).toBe('Max'); + expect(results[0].fields[1].config.max).toBe(50); + }); + + it('With ignore mappings', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'A', + mappings: [{ fieldName: 'Min', handlerKey: FieldConfigHandlerKey.Ignore }], + }; + + const results = extractConfigFromQuery(options, [config, seriesA]); + expect(results.length).toBe(1); + expect(results[0].fields[1].config.min).toEqual(undefined); + expect(results[0].fields[1].config.max).toEqual(50); + }); + + it('With custom mappings', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'A', + mappings: [{ fieldName: 'Min', handlerKey: 'decimals' }], + }; + + const results = extractConfigFromQuery(options, [config, seriesA]); + expect(results.length).toBe(1); + expect(results[0].fields[1].config.decimals).toBe(5); + }); + + it('With custom reducer', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'A', + mappings: [{ fieldName: 'Max', handlerKey: 'max', reducerId: ReducerID.min }], + }; + + const results = extractConfigFromQuery(options, [config, seriesA]); + expect(results.length).toBe(1); + expect(results[0].fields[1].config.max).toBe(1); + }); + + it('With custom matcher and displayName mapping', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'A', + mappings: [{ fieldName: 'Names', handlerKey: 'displayName', reducerId: ReducerID.first }], + applyTo: { id: 'byName', options: 'Value' }, + }; + + const results = extractConfigFromQuery(options, [config, seriesA]); + expect(results.length).toBe(1); + expect(results[0].fields[1].config.displayName).toBe('first-name'); + }); +}); + +describe('value mapping from data', () => { + const config = toDataFrame({ + fields: [ + { name: 'value', type: FieldType.number, values: [1, 2, 3] }, + { name: 'text', type: FieldType.string, values: ['one', 'two', 'three'] }, + { name: 'color', type: FieldType.string, values: ['red', 'blue', 'green'] }, + ], + refId: 'config', + }); + + const seriesA = toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [1, 2, 3] }, + { + name: 'Value', + type: FieldType.number, + values: [1, 2, 3], + config: {}, + }, + ], + }); + + it('Should take all field values and map to value mappings', () => { + const options: ConfigFromQueryTransformOptions = { + configRefId: 'config', + mappings: [ + { fieldName: 'value', handlerKey: 'mappings.value' }, + { fieldName: 'color', handlerKey: 'mappings.color' }, + { fieldName: 'text', handlerKey: 'mappings.text' }, + ], + }; + + const results = extractConfigFromQuery(options, [config, seriesA]); + expect(results[0].fields[1].config.mappings).toMatchInlineSnapshot(` + [ + { + "options": { + "1": { + "color": "red", + "index": 0, + "text": "one", + }, + "2": { + "color": "blue", + "index": 1, + "text": "two", + }, + "3": { + "color": "green", + "index": 2, + "text": "three", + }, + }, + "type": "value", + }, + ] + `); + }); +}); diff --git a/packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.ts b/packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.ts new file mode 100644 index 0000000..511e525 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/configFromQuery/configFromQuery.ts @@ -0,0 +1,106 @@ +import { map } from 'rxjs/operators'; + +import { + DataFrame, + DataTransformerID, + DataTransformerInfo, + FieldMatcherID, + getFieldDisplayName, + getFieldMatcher, + MatcherConfig, + reduceField, +} from '@grafana/data'; + +import { + evaluteFieldMappings, + FieldToConfigMapping, + getFieldConfigFromFrame, +} from '../fieldToConfigMapping/fieldToConfigMapping'; + +export interface ConfigFromQueryTransformOptions { + configRefId?: string; + mappings: FieldToConfigMapping[]; + applyTo?: MatcherConfig; +} + +export function extractConfigFromQuery(options: ConfigFromQueryTransformOptions, data: DataFrame[]) { + let configFrame: DataFrame | null = null; + + for (const frame of data) { + if (frame.refId === options.configRefId) { + configFrame = frame; + break; + } + } + + if (!configFrame) { + return data; + } + + const reducedConfigFrame: DataFrame = { + fields: [], + length: 1, + }; + + const mappingResult = evaluteFieldMappings(configFrame, options.mappings ?? [], false); + + // reduce config frame + for (const field of configFrame.fields) { + const newField = { ...field }; + const fieldName = getFieldDisplayName(field, configFrame); + const fieldMapping = mappingResult.index[fieldName]; + const result = reduceField({ field, reducers: [fieldMapping.reducerId] }); + newField.values = [result[fieldMapping.reducerId]]; + reducedConfigFrame.fields.push(newField); + } + + const output: DataFrame[] = []; + const matcher = getFieldMatcher(options.applyTo || { id: FieldMatcherID.numeric }); + + for (const frame of data) { + // Skip config frame in output + if (frame === configFrame && data.length > 1) { + continue; + } + + const outputFrame: DataFrame = { + fields: [], + length: frame.length, + refId: frame.refId, + }; + + for (const field of frame.fields) { + if (matcher(field, frame, data)) { + const dataConfig = getFieldConfigFromFrame(reducedConfigFrame, 0, mappingResult); + outputFrame.fields.push({ + ...field, + config: { + ...field.config, + ...dataConfig, + }, + }); + } else { + outputFrame.fields.push(field); + } + } + + output.push(outputFrame); + } + return output; +} + +export const configFromDataTransformer: DataTransformerInfo = { + id: DataTransformerID.configFromData, + name: 'Config from query results', + description: 'Set unit, min, max and more from data.', + defaultOptions: { + configRefId: 'config', + mappings: [], + }, + + /** + * Return a modified copy of the series. If the transform is not or should not + * be applied, just return the input series + */ + operator: (options) => (source) => source.pipe(map((data) => extractConfigFromQuery(options, data))), +}; diff --git a/packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.test.ts b/packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.test.ts new file mode 100644 index 0000000..fccfcc5 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.test.ts @@ -0,0 +1,307 @@ +import { DataFrame, Field, FieldType, toDataFrame } from '@grafana/data'; + +import { extractFieldsTransformer } from './extractFields'; +import { ExtractFieldsOptions, FieldExtractorID } from './types'; + +describe('Fields from JSON', () => { + it('adds fields from JSON in string', async () => { + const cfg: ExtractFieldsOptions = { + source: 'line', + replace: true, + }; + const ctx = { interpolate: (v: string) => v }; + const data = toDataFrame({ + columns: ['ts', 'line'], + rows: appl, + }); + + const frames = extractFieldsTransformer.transformer(cfg, ctx)([data]); + expect(frames.length).toEqual(1); + expect( + frames[0].fields.reduce>((acc, v) => { + acc[v.name] = v.type; + return acc; + }, {}) + ).toMatchInlineSnapshot(` + { + "a": "string", + "av": "number", + "c": "string", + "e": "number", + "ev": "string", + "h": "string", + "l": "string", + "o": "string", + "op": "string", + "s": "number", + "sym": "string", + "v": "number", + "vw": "string", + "z": "number", + } + `); + }); + + it('Get nested path values', () => { + const cfg: ExtractFieldsOptions = { + replace: true, + source: 'JSON', + format: FieldExtractorID.JSON, + jsonPaths: [ + { path: 'object.nestedArray[0]' }, + { path: 'object.nestedArray[1]' }, + { path: 'object.nestedString' }, + ], + }; + const ctx = { interpolate: (v: string) => v }; + + const frames = extractFieldsTransformer.transformer(cfg, ctx)([testDataFrame]); + expect(frames.length).toEqual(1); + expect(frames[0]).toMatchInlineSnapshot(` + { + "fields": [ + { + "config": {}, + "name": "object.nestedArray[0]", + "type": "number", + "values": [ + 1, + ], + }, + { + "config": {}, + "name": "object.nestedArray[1]", + "type": "number", + "values": [ + 2, + ], + }, + { + "config": {}, + "name": "object.nestedString", + "type": "string", + "values": [ + "Hallo World", + ], + }, + ], + "length": 1, + } + `); + }); + + it('Keep time field on replace', () => { + const cfg: ExtractFieldsOptions = { + replace: true, + keepTime: true, + source: 'JSON', + format: FieldExtractorID.JSON, + jsonPaths: [ + { path: 'object.nestedArray[2]' }, + { path: 'object.nestedArray[3]' }, + { path: 'object.nestedString' }, + ], + }; + const ctx = { interpolate: (v: string) => v }; + + const frames = extractFieldsTransformer.transformer(cfg, ctx)([testDataFrame]); + expect(frames.length).toEqual(1); + expect(frames[0]).toMatchInlineSnapshot(` + { + "fields": [ + { + "config": {}, + "name": "Time", + "state": { + "displayName": "Time", + "multipleFrames": false, + }, + "type": "time", + "values": [ + 1669638911691, + ], + }, + { + "config": {}, + "name": "object.nestedArray[2]", + "type": "number", + "values": [ + 3, + ], + }, + { + "config": {}, + "name": "object.nestedArray[3]", + "type": "number", + "values": [ + 4, + ], + }, + { + "config": {}, + "name": "object.nestedString", + "type": "string", + "values": [ + "Hallo World", + ], + }, + ], + "length": 1, + } + `); + }); + + it('Path is invalid', () => { + const cfg: ExtractFieldsOptions = { + replace: true, + source: 'JSON', + format: FieldExtractorID.JSON, + jsonPaths: [{ path: 'object.nestedString' }, { path: 'invalid.path' }], + }; + const ctx = { interpolate: (v: string) => v }; + + const frames = extractFieldsTransformer.transformer(cfg, ctx)([testDataFrame]); + expect(frames.length).toEqual(1); + expect(frames[0]).toMatchInlineSnapshot(` + { + "fields": [ + { + "config": {}, + "name": "object.nestedString", + "type": "string", + "values": [ + "Hallo World", + ], + }, + { + "config": {}, + "name": "invalid.path", + "type": "string", + "values": [ + "Not Found", + ], + }, + ], + "length": 1, + } + `); + }); + + it('skips null values', async () => { + const cfg: ExtractFieldsOptions = { + source: 'line', + replace: false, + }; + const ctx = { interpolate: (v: string) => v }; + + const testDataFrame: DataFrame = { + fields: [ + { config: {}, name: 'Time', type: FieldType.time, values: [1, 2] }, + { config: {}, name: 'line', type: FieldType.other, values: ['{"foo":"bar"}', null] }, + ], + length: 2, + }; + + const frames = extractFieldsTransformer.transformer(cfg, ctx)([testDataFrame]); + expect(frames.length).toEqual(1); + expect(frames[0]).toEqual({ + fields: [ + { + config: {}, + name: 'Time', + type: 'time', + values: [1, 2], + state: { + displayName: 'Time', + multipleFrames: false, + }, + }, + { + config: {}, + name: 'line', + type: 'other', + values: ['{"foo":"bar"}', null], + }, + { + name: 'foo', + values: ['bar', undefined], + type: 'string', + config: {}, + }, + ], + length: 2, + }); + }); +}); + +const testFieldTime: Field = { + config: {}, + name: 'Time', + type: FieldType.time, + values: [1669638911691], +}; + +const testFieldString: Field = { + config: {}, + name: 'String', + type: FieldType.string, + values: ['Hallo World'], +}; + +const testFieldJSON: Field = { + config: {}, + name: 'JSON', + type: FieldType.string, + values: [ + JSON.stringify({ + object: { + nestedArray: [1, 2, 3, 4], + nestedString: 'Hallo World', + }, + }), + ], +}; + +const testDataFrame: DataFrame = { + fields: [testFieldTime, testFieldString, testFieldJSON], + length: 1, +}; + +const appl = [ + [ + '1636678740000000000', + '{"a":"148.1673","av":41941752,"c":"148.25","e":1636678800000,"ev":"AM","h":"148.28","l":"148.22","o":"148.25","op":"148.96","s":1636678740000,"sym":"AAPL","v":2903,"vw":"148.2545","z":152}', + ], + [ + '1636678680000000000', + '{"a":"148.1673","av":41938849,"c":"148.25","e":1636678740000,"ev":"AM","h":"148.27","l":"148.25","o":"148.26","op":"148.96","s":1636678680000,"sym":"AAPL","v":7589,"vw":"148.2515","z":329}', + ], + [ + '1636678620000000000', + '{"a":"148.1672","av":41931260,"c":"148.27","e":1636678680000,"ev":"AM","h":"148.27","l":"148.25","o":"148.27","op":"148.96","s":1636678620000,"sym":"AAPL","v":6138,"vw":"148.2541","z":245}', + ], + [ + '1636678560000000000', + '{"a":"148.1672","av":41925122,"c":"148.28","e":1636678620000,"ev":"AM","h":"148.29","l":"148.27","o":"148.27","op":"148.96","s":1636678560000,"sym":"AAPL","v":1367,"vw":"148.2816","z":56}', + ], + [ + '1636678500000000000', + '{"a":"148.1672","av":41923755,"c":"148.25","e":1636678560000,"ev":"AM","h":"148.27","l":"148.25","o":"148.25","op":"148.96","s":1636678500000,"sym":"AAPL","v":556,"vw":"148.2539","z":55}', + ], + [ + '1636678440000000000', + '{"a":"148.1672","av":41923199,"c":"148.28","e":1636678500000,"ev":"AM","h":"148.28","l":"148.25","o":"148.25","op":"148.96","s":1636678440000,"sym":"AAPL","v":451,"vw":"148.2614","z":56}', + ], + [ + '1636678380000000000', + '{"a":"148.1672","av":41922748,"c":"148.24","e":1636678440000,"ev":"AM","h":"148.24","l":"148.24","o":"148.24","op":"148.96","s":1636678380000,"sym":"AAPL","v":344,"vw":"148.2521","z":24}', + ], + [ + '1636678320000000000', + '{"a":"148.1672","av":41922404,"c":"148.28","e":1636678380000,"ev":"AM","h":"148.28","l":"148.24","o":"148.24","op":"148.96","s":1636678320000,"sym":"AAPL","v":705,"vw":"148.2543","z":64}', + ], + [ + '1636678260000000000', + '{"a":"148.1672","av":41921699,"c":"148.25","e":1636678320000,"ev":"AM","h":"148.25","l":"148.25","o":"148.25","op":"148.96","s":1636678260000,"sym":"AAPL","v":1054,"vw":"148.2513","z":131}', + ], +]; diff --git a/packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.ts b/packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.ts new file mode 100644 index 0000000..812f938 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/extractFields/extractFields.ts @@ -0,0 +1,120 @@ +import { isString, get } from 'lodash'; +import { map } from 'rxjs/operators'; + +import { + DataFrame, + DataTransformerID, + Field, + FieldType, + getFieldTypeFromValue, + SynchronousDataTransformerInfo, +} from '@grafana/data'; +import { findField } from '../../utils/dimensions'; + +import { fieldExtractors } from './fieldExtractors'; +import { ExtractFieldsOptions, FieldExtractorID, JSONPath } from './types'; + +export const extractFieldsTransformer: SynchronousDataTransformerInfo = { + id: DataTransformerID.extractFields, + name: 'Extract fields', + description: 'Parse fields from the contends of another', + defaultOptions: {}, + + operator: (options, ctx) => (source) => + source.pipe(map((data) => extractFieldsTransformer.transformer(options, ctx)(data))), + + transformer: (options: ExtractFieldsOptions) => { + return (data: DataFrame[]) => { + return data.map((v) => addExtractedFields(v, options)); + }; + }, +}; + +function addExtractedFields(frame: DataFrame, options: ExtractFieldsOptions): DataFrame { + if (!options.source) { + return frame; + } + + const source = findField(frame, options.source); + + if (!source) { + // this case can happen when there are multiple queries + return frame; + } + + const ext = fieldExtractors.getIfExists(options.format ?? FieldExtractorID.Auto); + if (!ext) { + throw new Error('unkonwn extractor'); + } + + const count = frame.length; + const names: string[] = []; // keep order + const values = new Map(); + + for (let i = 0; i < count; i++) { + let obj = source.values[i]; + + if (isString(obj)) { + try { + obj = ext.parse(obj); + } catch { + obj = {}; // empty + } + } + + if (obj == null) { + continue; + } + + if (options.format === FieldExtractorID.JSON && options.jsonPaths && options.jsonPaths?.length > 0) { + const newObj: { [k: string]: unknown } = {}; + // filter out empty paths + const filteredPaths = options.jsonPaths.filter((path: JSONPath) => path.path); + + if (filteredPaths.length > 0) { + filteredPaths.forEach((path: JSONPath) => { + const key = path.alias && path.alias.length > 0 ? path.alias : path.path; + newObj[key] = get(obj, path.path) ?? 'Not Found'; + }); + + obj = newObj; + } + } + + for (const [key, val] of Object.entries(obj)) { + let buffer = values.get(key); + if (buffer == null) { + buffer = new Array(count); + values.set(key, buffer); + names.push(key); + } + buffer[i] = val; + } + } + + const fields = names.map((name) => { + const buffer = values.get(name); + return { + name, + values: buffer, + type: buffer ? getFieldTypeFromValue(buffer.find((v) => v != null)) : FieldType.other, + config: {}, + } as Field; + }); + + if (options.keepTime) { + const sourceTime = findField(frame, 'Time') || findField(frame, 'time'); + if (sourceTime) { + fields.unshift(sourceTime); + } + } + + if (!options.replace) { + fields.unshift(...frame.fields); + } + + return { + ...frame, + fields, + }; +} diff --git a/packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractor.test.ts b/packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractor.test.ts new file mode 100644 index 0000000..8cb5838 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractor.test.ts @@ -0,0 +1,114 @@ +import { fieldExtractors } from './fieldExtractors'; +import { FieldExtractorID } from './types'; + +describe('Extract fields from text', () => { + it('JSON extractor', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.JSON); + const out = extractor.parse('{"a":"148.1672","av":41923755,"c":148.25}'); + + expect(out).toMatchInlineSnapshot(` + { + "a": "148.1672", + "av": 41923755, + "c": 148.25, + } + `); + }); + + it('Test key-values with single/double quotes', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse('a="1", "b"=\'2\',c=3 x:y ;\r\nz="d and 4"'); + expect(out).toMatchInlineSnapshot(` + { + "a": "1", + "b": "2", + "c": "3", + "x": "y", + "z": "d and 4", + } + `); + }); + + it('Test key-values with nested single/double quotes', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse( + `a="1", "b"=\'2\',c=3 x:y ;\r\nz="dbl_quotes=\\"Double Quotes\\" sgl_quotes='Single Quotes'"` + ); + + expect(out).toMatchInlineSnapshot(` + { + "a": "1", + "b": "2", + "c": "3", + "x": "y", + "z": "dbl_quotes="Double Quotes" sgl_quotes='Single Quotes'", + } + `); + }); + + it('Test key-values with nested separator characters', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse(`a="1", "b"=\'2\',c=3 x:y ;\r\nz="This is; testing& validating, 1=:2"`); + + expect(out).toMatchInlineSnapshot(` + { + "a": "1", + "b": "2", + "c": "3", + "x": "y", + "z": "This is; testing& validating, 1=:2", + } + `); + }); + + it('Test key-values where some values are null', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse(`a=, "b"=\'2\',c=3 x: `); + + expect(out).toMatchInlineSnapshot(` + { + "a": "", + "b": "2", + "c": "3", + "x": "", + } + `); + }); + + it('Split key+values', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse('a="1", "b"=\'2\',c=3 x:y ;\r\nz="7"'); + expect(out).toMatchInlineSnapshot(` + { + "a": "1", + "b": "2", + "c": "3", + "x": "y", + "z": "7", + } + `); + }); + + it('Split URL style parameters', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse('a=b&c=d&x=123'); + expect(out).toMatchInlineSnapshot(` + { + "a": "b", + "c": "d", + "x": "123", + } + `); + }); + + it('Prometheus labels style (not really supported)', async () => { + const extractor = fieldExtractors.get(FieldExtractorID.KeyValues); + const out = extractor.parse('{foo="bar", baz="42"}'); + expect(out).toMatchInlineSnapshot(` + { + "baz": "42", + "foo": "bar", + } + `); + }); +}); diff --git a/packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractors.ts b/packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractors.ts new file mode 100644 index 0000000..ba8af2b --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/extractFields/fieldExtractors.ts @@ -0,0 +1,132 @@ +import { Registry, RegistryItem } from '@grafana/data'; + +import { FieldExtractorID } from './types'; + +export interface FieldExtractor extends RegistryItem { + parse: (v: string) => Record | undefined; +} + +const extJSON: FieldExtractor = { + id: FieldExtractorID.JSON, + name: 'JSON', + description: 'Parse JSON string', + parse: (v: string) => { + return JSON.parse(v); + }, +}; + +function parseKeyValuePairs(raw: string): Record { + const buff: string[] = []; // array of characters + let esc = ''; + let key = ''; + const obj: Record = {}; + for (let i = 0; i < raw.length; i++) { + let c = raw[i]; + if (c === esc) { + esc = ''; + c = raw[++i]; + } + + const isEscaped = c === '\\'; + if (isEscaped) { + c = raw[++i]; + } + + // When escaped just append + if (isEscaped || esc.length) { + buff.push(c); + continue; + } + + if (c === `"` || c === `'`) { + esc = c; + } + + switch (c) { + case ':': + case '=': + if (buff.length) { + if (key) { + obj[key] = ''; + } + key = buff.join(''); + buff.length = 0; // clear values + } + break; + + // escape chars + case `"`: + case `'`: + // whitespace + case ` `: + case `\n`: + case `\t`: + case `\r`: + case `\n`: + if (buff.length && key === '') { + obj[buff.join('')] = ''; + buff.length = 0; + } + // seperators + case ',': + case ';': + case '&': + case '{': + case '}': + if (buff.length) { + const val = buff.join(''); + if (key.length) { + obj[key] = val; + key = ''; + } else { + key = val; + } + buff.length = 0; // clear values + } + break; + + // append our buffer + default: + buff.push(c); + if (i === raw.length - 1) { + if (key === '' && buff.length) { + obj[buff.join('')] = ''; + buff.length = 0; + } + } + } + } + + if (key.length) { + obj[key] = buff.join(''); + } + return obj; +} + +const extLabels: FieldExtractor = { + id: FieldExtractorID.KeyValues, + name: 'Key+value pairs', + description: 'Look for a=b, c: d values in the line', + parse: parseKeyValuePairs, +}; + +const fmts = [extJSON, extLabels]; + +const extAuto: FieldExtractor = { + id: FieldExtractorID.Auto, + name: 'Auto', + description: 'parse new fields automatically', + parse: (v: string) => { + for (const f of fmts) { + try { + const r = f.parse(v); + if (r != null) { + return r; + } + } catch {} // ignore errors + } + return undefined; + }, +}; + +export const fieldExtractors = new Registry(() => [...fmts, extAuto]); diff --git a/packages/grafana-utils/src/grafana/transformers/extractFields/types.ts b/packages/grafana-utils/src/grafana/transformers/extractFields/types.ts new file mode 100644 index 0000000..e0f63aa --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/extractFields/types.ts @@ -0,0 +1,17 @@ +export enum FieldExtractorID { + JSON = 'json', + KeyValues = 'kvp', + Auto = 'auto', +} + +export interface JSONPath { + path: string; + alias?: string; +} +export interface ExtractFieldsOptions { + source?: string; + jsonPaths?: JSONPath[]; + format?: FieldExtractorID; + replace?: boolean; + keepTime?: boolean; +} diff --git a/packages/grafana-utils/src/grafana/transformers/fieldToConfigMapping/fieldToConfigMapping.ts b/packages/grafana-utils/src/grafana/transformers/fieldToConfigMapping/fieldToConfigMapping.ts new file mode 100644 index 0000000..8eca18f --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/fieldToConfigMapping/fieldToConfigMapping.ts @@ -0,0 +1,345 @@ +import { isArray } from 'lodash'; + +import { + anyToNumber, + DataFrame, + FieldColorModeId, + FieldConfig, + getFieldDisplayName, + MappingType, + ReducerID, + ThresholdsMode, + ValueMapping, + ValueMap, + Field, + FieldType, +} from '@grafana/data'; + +export interface FieldToConfigMapping { + fieldName: string; + reducerId?: ReducerID; + handlerKey: string | null; +} + +/** + * Transforms a frame with fields to a map of field configs + * + * Input + * | Unit | Min | Max | + * -------------------------------- + * | Temperature | 0 | 30 | + * | Pressure | 0 | 100 | + * + * Outputs + * { + { min: 0, max: 100 }, + * } + */ + +export function getFieldConfigFromFrame( + frame: DataFrame, + rowIndex: number, + evaluatedMappings: EvaluatedMappingResult +): FieldConfig { + const config: FieldConfig = {}; + const context: FieldToConfigContext = {}; + + for (const field of frame.fields) { + const fieldName = getFieldDisplayName(field, frame); + const mapping = evaluatedMappings.index[fieldName]; + const handler = mapping.handler; + + if (!handler) { + continue; + } + + const configValue = field.values[rowIndex]; + + if (configValue === null || configValue === undefined) { + continue; + } + + const newValue = handler.processor(configValue, config, context); + if (newValue != null) { + (config as any)[handler.targetProperty ?? handler.key] = newValue; + } + } + + if (context.mappingValues) { + config.mappings = combineValueMappings(context); + } + + return config; +} + +interface FieldToConfigContext { + mappingValues?: any[]; + mappingColors?: string[]; + mappingTexts?: string[]; +} + +type FieldToConfigMapHandlerProcessor = (value: any, config: FieldConfig, context: FieldToConfigContext) => any; + +export interface FieldToConfigMapHandler { + key: string; + targetProperty?: string; + name?: string; + processor: FieldToConfigMapHandlerProcessor; + defaultReducer?: ReducerID; +} + +export enum FieldConfigHandlerKey { + Name = 'field.name', + Value = 'field.value', + Label = 'field.label', + Ignore = '__ignore', +} + +export const configMapHandlers: FieldToConfigMapHandler[] = [ + { + key: FieldConfigHandlerKey.Name, + name: 'Field name', + processor: () => {}, + }, + { + key: FieldConfigHandlerKey.Value, + name: 'Field value', + processor: () => {}, + }, + { + key: FieldConfigHandlerKey.Label, + name: 'Field label', + processor: () => {}, + }, + { + key: FieldConfigHandlerKey.Ignore, + name: 'Ignore', + processor: () => {}, + }, + { + key: 'max', + processor: toNumericOrUndefined, + }, + { + key: 'min', + processor: toNumericOrUndefined, + }, + { + key: 'unit', + processor: (value) => value.toString(), + }, + { + key: 'decimals', + processor: toNumericOrUndefined, + }, + { + key: 'displayName', + name: 'Display name', + processor: (value) => value.toString(), + }, + { + key: 'color', + processor: (value) => ({ fixedColor: value, mode: FieldColorModeId.Fixed }), + }, + { + key: 'threshold1', + targetProperty: 'thresholds', + processor: (value, config) => { + const numeric = anyToNumber(value); + + if (isNaN(numeric)) { + return; + } + + if (!config.thresholds) { + config.thresholds = { + mode: ThresholdsMode.Absolute, + steps: [{ value: -Infinity, color: 'green' }], + }; + } + + config.thresholds.steps.push({ + value: numeric, + color: 'red', + }); + + return config.thresholds; + }, + }, + { + key: 'mappings.value', + name: 'Value mappings / Value', + targetProperty: 'mappings', + defaultReducer: ReducerID.allValues, + processor: (value, config, context) => { + if (!isArray(value)) { + return; + } + + context.mappingValues = value; + return config.mappings; + }, + }, + { + key: 'mappings.color', + name: 'Value mappings / Color', + targetProperty: 'mappings', + defaultReducer: ReducerID.allValues, + processor: (value, config, context) => { + if (!isArray(value)) { + return; + } + + context.mappingColors = value; + return config.mappings; + }, + }, + { + key: 'mappings.text', + name: 'Value mappings / Display text', + targetProperty: 'mappings', + defaultReducer: ReducerID.allValues, + processor: (value, config, context) => { + if (!isArray(value)) { + return; + } + + context.mappingTexts = value; + return config.mappings; + }, + }, +]; + +function combineValueMappings(context: FieldToConfigContext): ValueMapping[] { + const valueMap: ValueMap = { + type: MappingType.ValueToText, + options: {}, + }; + + if (!context.mappingValues) { + return []; + } + + for (let i = 0; i < context.mappingValues.length; i++) { + const value = context.mappingValues[i]; + if (value != null) { + valueMap.options[value.toString()] = { + color: context.mappingColors && context.mappingColors[i], + text: context.mappingTexts && context.mappingTexts[i], + index: i, + }; + } + } + + return [valueMap]; +} + +let configMapHandlersIndex: Record | null = null; + +export function getConfigMapHandlersIndex() { + if (configMapHandlersIndex === null) { + configMapHandlersIndex = {}; + for (const def of configMapHandlers) { + configMapHandlersIndex[def.key] = def; + } + } + + return configMapHandlersIndex; +} + +function toNumericOrUndefined(value: unknown) { + const numeric = anyToNumber(value); + + if (isNaN(numeric)) { + return; + } + + return numeric; +} + +export function getConfigHandlerKeyForField(fieldName: string, mappings: FieldToConfigMapping[]) { + for (const map of mappings) { + if (fieldName === map.fieldName) { + return map.handlerKey; + } + } + + return fieldName.toLowerCase(); +} + +export function lookUpConfigHandler(key: string | null): FieldToConfigMapHandler | null { + if (!key) { + return null; + } + + return getConfigMapHandlersIndex()[key]; +} + +export interface EvaluatedMapping { + automatic: boolean; + handler: FieldToConfigMapHandler | null; + reducerId: ReducerID; +} +export interface EvaluatedMappingResult { + index: Record; + nameField?: Field; + valueField?: Field; +} + +export function evaluteFieldMappings( + frame: DataFrame, + mappings: FieldToConfigMapping[], + withNameAndValue?: boolean +): EvaluatedMappingResult { + const result: EvaluatedMappingResult = { + index: {}, + }; + + // Look up name and value field in mappings + let nameFieldMappping = mappings.find((x) => x.handlerKey === FieldConfigHandlerKey.Name); + let valueFieldMapping = mappings.find((x) => x.handlerKey === FieldConfigHandlerKey.Value); + + for (const field of frame.fields) { + const fieldName = getFieldDisplayName(field, frame); + const mapping = mappings.find((x) => x.fieldName === fieldName); + const key = mapping ? mapping.handlerKey : fieldName.toLowerCase(); + let handler = lookUpConfigHandler(key); + + // Name and value handlers are a special as their auto logic is based on first matching criteria + if (withNameAndValue) { + // If we have a handler it means manually specified field + if (handler) { + if (handler.key === FieldConfigHandlerKey.Name) { + result.nameField = field; + } + if (handler.key === FieldConfigHandlerKey.Value) { + result.valueField = field; + } + } else if (!mapping) { + // We have no name field and no mapping for it, pick first string + if (!result.nameField && !nameFieldMappping && field.type === FieldType.string) { + result.nameField = field; + handler = lookUpConfigHandler(FieldConfigHandlerKey.Name); + } + + if (!result.valueField && !valueFieldMapping && field.type === FieldType.number) { + result.valueField = field; + handler = lookUpConfigHandler(FieldConfigHandlerKey.Value); + } + } + } + + // If no handle and when in name and value mode (Rows to fields) default to labels + if (!handler && withNameAndValue) { + handler = lookUpConfigHandler(FieldConfigHandlerKey.Label); + } + + result.index[fieldName] = { + automatic: !mapping, + handler: handler, + reducerId: mapping?.reducerId ?? handler?.defaultReducer ?? ReducerID.lastNotNull, + }; + } + + return result; +} diff --git a/packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.test.ts b/packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.test.ts new file mode 100644 index 0000000..d2d0574 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.test.ts @@ -0,0 +1,152 @@ +import { toDataFrame, FieldType, DataFrame } from '@grafana/data'; + +import { joinByLabels } from './joinByLabels'; + +describe('Join by labels', () => { + it('Simple join', () => { + const input = [ + toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [1, 2] }, + { + name: 'Value', + type: FieldType.number, + config: { + displayNameFromDS: '111', + }, + values: [10, 200], + labels: { what: 'Temp', cluster: 'A', job: 'J1' }, + }, + ], + }), + toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [1, 2] }, + { + name: 'Value', + type: FieldType.number, + config: { + displayNameFromDS: '222', + }, + values: [10, 200], + labels: { what: 'Temp', cluster: 'B', job: 'J1' }, + }, + ], + }), + toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [22, 28] }, + { + name: 'Value', + type: FieldType.number, + config: { + displayNameFromDS: '333', + }, + values: [22, 77], + labels: { what: 'Speed', cluster: 'B', job: 'J1' }, + }, + ], + }), + ]; + + const result = joinByLabels( + { + value: 'what', + }, + input + ); + expect(result.fields[result.fields.length - 1].config).toMatchInlineSnapshot(`{}`); + expect(toRowsSnapshow(result)).toMatchInlineSnapshot(` + { + "columns": [ + "cluster", + "job", + "Temp", + "Speed", + ], + "rows": [ + [ + "A", + "J1", + 10, + undefined, + ], + [ + "A", + "J1", + 200, + undefined, + ], + [ + "B", + "J1", + 10, + 22, + ], + [ + "B", + "J1", + 200, + 77, + ], + ], + } + `); + }); + + it('Error handling (no labels)', () => { + const input = [ + toDataFrame({ + fields: [ + { name: 'Time', type: FieldType.time, values: [1, 2] }, + { + name: 'Value', + type: FieldType.number, + values: [10, 200], + }, + ], + }), + ]; + + const result = joinByLabels( + { + value: 'what', + }, + input + ); + expect(result).toMatchInlineSnapshot(` + { + "fields": [ + { + "config": {}, + "name": "Error", + "type": "string", + "values": [ + "No labels in result", + ], + }, + ], + "length": 0, + "meta": { + "notices": [ + { + "severity": "error", + "text": "No labels in result", + }, + ], + }, + } + `); + }); +}); + +function toRowsSnapshow(frame: DataFrame) { + const columns = frame.fields.map((f) => f.name); + const rows = frame.fields[0].values.map((v, idx) => { + return frame.fields.map((f) => f.values[idx]); + }); + return { + columns, + rows, + }; +} diff --git a/packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.ts b/packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.ts new file mode 100644 index 0000000..463c6d5 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/joinByLabels/joinByLabels.ts @@ -0,0 +1,138 @@ +import { map } from 'rxjs/operators'; + +import { DataFrame, DataTransformerID, Field, FieldType, SynchronousDataTransformerInfo } from '@grafana/data'; + +import { getDistinctLabels } from '../utils'; + +export interface JoinByLabelsTransformOptions { + value: string; // something must be defined + join?: string[]; +} + +export const joinByLabelsTransformer: SynchronousDataTransformerInfo = { + id: DataTransformerID.joinByLabels, + name: 'Join by labels', + description: 'Flatten labeled results into a table joined by labels.', + defaultOptions: {}, + + operator: (options, ctx) => (source) => + source.pipe(map((data) => joinByLabelsTransformer.transformer(options, ctx)(data))), + + transformer: (options: JoinByLabelsTransformOptions) => { + return (data: DataFrame[]) => { + if (!data || !data.length) { + return data; + } + return [joinByLabels(options, data)]; + }; + }, +}; + +interface JoinValues { + keys: string[]; + values: Record; +} + +export function joinByLabels(options: JoinByLabelsTransformOptions, data: DataFrame[]): DataFrame { + if (!options.value?.length) { + return getErrorFrame('No value labele configured'); + } + const distinctLabels = getDistinctLabels(data); + if (distinctLabels.size < 1) { + return getErrorFrame('No labels in result'); + } + if (!distinctLabels.has(options.value)) { + return getErrorFrame('Value label not found'); + } + + let join = options.join?.length ? options.join : Array.from(distinctLabels); + join = join.filter((f) => f !== options.value); + + const names = new Set(); + const found = new Map(); + const inputFields: Record = {}; + for (const frame of data) { + for (const field of frame.fields) { + if (field.labels && field.type !== FieldType.time) { + const keys = join.map((v) => field.labels![v]); + const key = keys.join(','); + let item = found.get(key); + if (!item) { + item = { + keys, + values: {}, + }; + found.set(key, item); + } + const name = field.labels[options.value]; + const vals = field.values; + const old = item.values[name]; + if (old) { + item.values[name] = old.concat(vals); + } else { + item.values[name] = vals; + } + if (!inputFields[name]) { + inputFields[name] = field; // keep the config + } + names.add(name); + } + } + } + + const allNames = Array.from(names); + const joinValues = join.map((): string[] => []); + const nameValues = allNames.map((): number[] => []); + + for (const item of found.values()) { + let valueOffset = -1; + let done = false; + while (!done) { + valueOffset++; + done = true; + for (let i = 0; i < join.length; i++) { + joinValues[i].push(item.keys[i]); + } + for (let i = 0; i < allNames.length; i++) { + const name = allNames[i]; + const values = item.values[name] ?? []; + nameValues[i].push(values[valueOffset]); + if (values.length > valueOffset + 1) { + done = false; + } + } + } + } + + const frame: DataFrame = { fields: [], length: nameValues[0].length }; + for (let i = 0; i < join.length; i++) { + frame.fields.push({ + name: join[i], + config: {}, + type: FieldType.string, + values: joinValues[i], + }); + } + + for (let i = 0; i < allNames.length; i++) { + const old = inputFields[allNames[i]]; + frame.fields.push({ + name: allNames[i], + config: {}, + type: old.type ?? FieldType.number, + values: nameValues[i], + }); + } + + return frame; +} + +function getErrorFrame(text: string): DataFrame { + return { + meta: { + notices: [{ severity: 'error', text }], + }, + fields: [{ name: 'Error', type: FieldType.string, config: {}, values: [text] }], + length: 0, + }; +} diff --git a/packages/grafana-utils/src/grafana/transformers/partitionByValues/partition.ts b/packages/grafana-utils/src/grafana/transformers/partitionByValues/partition.ts new file mode 100644 index 0000000..3980e5a --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/partitionByValues/partition.ts @@ -0,0 +1,52 @@ +type Idxs = number[]; +type KeyMap = Map; +type Accum = Idxs[]; + +/** The totally type-aware flavor is much slower, so we prefer to disable the lint rule in this case */ +/* eslint-disable @typescript-eslint/consistent-type-assertions */ +const digArrs = (map: KeyMap | Idxs, depth: number, acc: Accum = []) => { + // the leaf nodes are always Idxs + if (depth === 0) { + acc.push(map as Idxs); + } + // the branch nodes are always KeyMaps + else { + (map as KeyMap).forEach((v) => { + digArrs(v, depth - 1, acc); + }); + } + + return acc; +}; + +// in: [['a','b','z','b'], ['c','c','x','c']] +// out: [[0], [1,3], [2]] +export function partition(keys: unknown[][]) { + const len = keys[0].length; + const klen = keys.length; + + const rootMap: KeyMap = new Map(); + + for (let i = 0; i < len; i++) { + let cur: KeyMap | Idxs = rootMap; + + for (let j = 0; j < klen; j++) { + let key = keys[j][i]; + + let next: KeyMap | Idxs | undefined = (cur as KeyMap).get(key); + + if (next == null) { + next = j === klen - 1 ? [] : new Map(); + (cur as KeyMap).set(key, next); + } + + cur = next; + } + + (cur as Idxs).push(i); + } + + return digArrs(rootMap, klen); +} + +/* eslint-enable @typescript-eslint/consistent-type-assertions */ diff --git a/packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.test.ts b/packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.test.ts new file mode 100644 index 0000000..3741ff8 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.test.ts @@ -0,0 +1,267 @@ +import { toDataFrame, FieldType } from '@grafana/data'; + +import { partitionByValuesTransformer, PartitionByValuesTransformerOptions } from './partitionByValues'; + +const ctx = { + interpolate: (v: string) => v, +}; + +describe('Partition by values transformer', () => { + it('should partition by one field', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region'], + keepFields: true, + naming: { + asLabels: false, + }, + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + expect(partitioned.length).toEqual(2); + + expect(partitioned[0].length).toEqual(3); + expect(partitioned[0].name).toEqual('Europe'); + expect(partitioned[0].fields[0].name).toEqual('model'); + expect(partitioned[0].fields[1].name).toEqual('region'); + expect(partitioned[0].fields[0].values).toEqual(['E1', 'E2', 'E3']); + expect(partitioned[0].fields[1].values).toEqual(['Europe', 'Europe', 'Europe']); + + expect(partitioned[1].length).toEqual(3); + expect(partitioned[1].name).toEqual('China'); + expect(partitioned[1].fields[0].name).toEqual('model'); + expect(partitioned[1].fields[1].name).toEqual('region'); + expect(partitioned[1].fields[0].values).toEqual(['C1', 'C2', 'C3']); + expect(partitioned[1].fields[1].values).toEqual(['China', 'China', 'China']); + }); + + it('should partition by multiple fields', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + { name: 'status', type: FieldType.string, values: ['OK', 'FAIL', 'OK', 'FAIL', 'OK', 'FAIL'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region', 'status'], + keepFields: true, + naming: { + asLabels: false, + }, + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + expect(partitioned.length).toEqual(4); + + expect(partitioned[0].length).toEqual(1); + expect(partitioned[0].name).toEqual('Europe OK'); + expect(partitioned[0].fields[0].name).toEqual('model'); + expect(partitioned[0].fields[1].name).toEqual('region'); + expect(partitioned[0].fields[2].name).toEqual('status'); + expect(partitioned[0].fields[0].values).toEqual(['E1']); + expect(partitioned[0].fields[1].values).toEqual(['Europe']); + expect(partitioned[0].fields[2].values).toEqual(['OK']); + + expect(partitioned[1].length).toEqual(2); + expect(partitioned[1].name).toEqual('Europe FAIL'); + expect(partitioned[1].fields[0].name).toEqual('model'); + expect(partitioned[1].fields[1].name).toEqual('region'); + expect(partitioned[1].fields[2].name).toEqual('status'); + expect(partitioned[1].fields[0].values).toEqual(['E2', 'E3']); + expect(partitioned[1].fields[1].values).toEqual(['Europe', 'Europe']); + expect(partitioned[1].fields[2].values).toEqual(['FAIL', 'FAIL']); + + expect(partitioned[2].length).toEqual(2); + expect(partitioned[2].name).toEqual('China OK'); + expect(partitioned[2].fields[0].name).toEqual('model'); + expect(partitioned[2].fields[1].name).toEqual('region'); + expect(partitioned[2].fields[2].name).toEqual('status'); + expect(partitioned[2].fields[0].values).toEqual(['C1', 'C2']); + expect(partitioned[2].fields[1].values).toEqual(['China', 'China']); + expect(partitioned[2].fields[2].values).toEqual(['OK', 'OK']); + + expect(partitioned[3].length).toEqual(1); + expect(partitioned[3].name).toEqual('China FAIL'); + expect(partitioned[3].fields[0].name).toEqual('model'); + expect(partitioned[3].fields[1].name).toEqual('region'); + expect(partitioned[3].fields[2].name).toEqual('status'); + expect(partitioned[3].fields[0].values).toEqual(['C3']); + expect(partitioned[3].fields[1].values).toEqual(['China']); + expect(partitioned[3].fields[2].values).toEqual(['FAIL']); + }); + + it('should partition by multiple fields with custom frame naming {withNames: true}', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + { name: 'status', type: FieldType.string, values: ['OK', 'FAIL', 'OK', 'FAIL', 'OK', 'FAIL'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region', 'status'], + keepFields: true, + naming: { + asLabels: false, + withNames: true, + }, + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + expect(partitioned[0].name).toEqual('region=Europe status=OK'); + expect(partitioned[1].name).toEqual('region=Europe status=FAIL'); + expect(partitioned[2].name).toEqual('region=China status=OK'); + expect(partitioned[3].name).toEqual('region=China status=FAIL'); + }); + + it('should partition by multiple fields with custom frame naming {append: true}', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + { name: 'status', type: FieldType.string, values: ['OK', 'FAIL', 'OK', 'FAIL', 'OK', 'FAIL'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region', 'status'], + keepFields: true, + naming: { + asLabels: false, + append: true, + }, + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + expect(partitioned[0].name).toEqual('XYZ Europe OK'); + expect(partitioned[1].name).toEqual('XYZ Europe FAIL'); + expect(partitioned[2].name).toEqual('XYZ China OK'); + expect(partitioned[3].name).toEqual('XYZ China FAIL'); + }); + + it('should partition by multiple fields with custom frame naming {withNames: true, append: true}', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + { name: 'status', type: FieldType.string, values: ['OK', 'FAIL', 'OK', 'FAIL', 'OK', 'FAIL'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region', 'status'], + keepFields: true, + naming: { + asLabels: false, + withNames: true, + append: true, + }, + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + expect(partitioned[0].name).toEqual('XYZ region=Europe status=OK'); + expect(partitioned[1].name).toEqual('XYZ region=Europe status=FAIL'); + expect(partitioned[2].name).toEqual('XYZ region=China status=OK'); + expect(partitioned[3].name).toEqual('XYZ region=China status=FAIL'); + }); + + it('should partition by multiple fields naming: {asLabels: true}', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + { name: 'status', type: FieldType.string, values: ['OK', 'FAIL', 'OK', 'FAIL', 'OK', 'FAIL'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region', 'status'], + keepFields: true, + naming: { + asLabels: true, + }, + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + // all frame names are same + expect(partitioned[0].name).toEqual('XYZ'); + expect(partitioned[1].name).toEqual('XYZ'); + expect(partitioned[2].name).toEqual('XYZ'); + expect(partitioned[3].name).toEqual('XYZ'); + + // all frames contain all fields + expect(partitioned[0].fields[0].name).toEqual('model'); + expect(partitioned[0].fields[1].name).toEqual('region'); + expect(partitioned[0].fields[2].name).toEqual('status'); + + // in each frame, every field has same labels + expect(partitioned[0].fields[0].labels).toEqual({ region: 'Europe', status: 'OK' }); + expect(partitioned[1].fields[0].labels).toEqual({ region: 'Europe', status: 'FAIL' }); + expect(partitioned[2].fields[0].labels).toEqual({ region: 'China', status: 'OK' }); + expect(partitioned[3].fields[0].labels).toEqual({ region: 'China', status: 'FAIL' }); + }); + + it('should partition by multiple fields and omit those fields in result', () => { + const source = [ + toDataFrame({ + name: 'XYZ', + refId: 'A', + fields: [ + { name: 'model', type: FieldType.string, values: ['E1', 'E2', 'C1', 'E3', 'C2', 'C3'] }, + { name: 'region', type: FieldType.string, values: ['Europe', 'Europe', 'China', 'Europe', 'China', 'China'] }, + { name: 'status', type: FieldType.string, values: ['OK', 'FAIL', 'OK', 'FAIL', 'OK', 'FAIL'] }, + ], + }), + ]; + + const config: PartitionByValuesTransformerOptions = { + fields: ['region', 'status'], + }; + + let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source); + + // all frames contain only model field + expect(partitioned[0].fields.length).toEqual(1); + expect(partitioned[0].fields[0].name).toEqual('model'); + expect(partitioned[0].fields[0].labels).toEqual({ region: 'Europe', status: 'OK' }); + }); +}); diff --git a/packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.ts b/packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.ts new file mode 100644 index 0000000..0b4b781 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/partitionByValues/partitionByValues.ts @@ -0,0 +1,171 @@ +import { map } from 'rxjs'; + +import { + DataFrame, + DataTransformerID, + SynchronousDataTransformerInfo, + getFieldMatcher, + DataTransformContext, + FieldMatcher, + standardTransformers, +} from '@grafana/data'; +import { getMatcherConfig } from '../../utils/filterByName'; + +import { partition } from './partition'; + +const noopTransformer = standardTransformers.noopTransformer; + +export interface FrameNamingOptions { + /** when true, the frame name is copied unmodified, and discriminator fields' names+values become field labels in new frames */ + asLabels?: boolean; + + /** opts below are used only when asLabels: false */ + + /** whether to append to existing frame name, false -> replace */ + append?: boolean; // false + /** whether to include discriminator field names, e.g. true -> Region=Europe Profession=Chef, false -> 'Europe Chef' */ + withNames?: boolean; // false + /** name/value separator, e.g. '=' in 'Region=Europe' */ + separator1?: string; + /** name/value pair separator, e.g. ' ' in 'Region=Europe Profession=Chef' */ + separator2?: string; +} + +const defaultFrameNameOptions: FrameNamingOptions = { + asLabels: true, + + append: false, + withNames: false, + separator1: '=', + separator2: ' ', +}; + +export interface PartitionByValuesTransformerOptions { + /** field names whose values should be used as discriminator keys (typically enum fields) */ + fields: string[]; + /** how the split frames' names should be suffixed (ends up as field prefixes) */ + naming?: FrameNamingOptions; + /** should the discriminator fields be kept in the output */ + keepFields?: boolean; +} + +function buildFrameName(opts: FrameNamingOptions, names: string[], values: unknown[]): string { + return names + .map((name, i) => (opts.withNames ? `${name}${opts.separator1}${values[i]}` : values[i])) + .join(opts.separator2); +} + +function buildFieldLabels(names: string[], values: unknown[]) { + const labels: Record = {}; + + names.forEach((name, i) => { + labels[name] = String(values[i]); + }); + + return labels; +} + +export const partitionByValuesTransformer: SynchronousDataTransformerInfo = { + id: DataTransformerID.partitionByValues, + name: 'Partition by values', + description: `Splits a one-frame dataset into multiple series discriminated by unique/enum values in one or more fields.`, + defaultOptions: {}, + + operator: (options, ctx) => (source) => + source.pipe(map((data) => partitionByValuesTransformer.transformer(options, ctx)(data))), + + transformer: (options: PartitionByValuesTransformerOptions, ctx: DataTransformContext) => { + const matcherConfig = getMatcherConfig(ctx, { names: options.fields }); + + if (!matcherConfig) { + return noopTransformer.transformer({}, ctx); + } + + const matcher = getFieldMatcher(matcherConfig); + + return (data: DataFrame[]) => { + if (!data.length) { + return data; + } + // error if > 1 frame? + return partitionByValues(data[0], matcher, options); + }; + }, +}; + +// Split a single frame dataset into multiple frames based on values in a set of fields +export function partitionByValues( + frame: DataFrame, + matcher: FieldMatcher, + options?: PartitionByValuesTransformerOptions +): DataFrame[] { + const keyFields = frame.fields.filter((f) => matcher(f, frame, [frame]))!; + + if (!keyFields.length) { + return [frame]; + } + + const keyFieldsVals = keyFields.map((f) => f.values); + const names = keyFields.map((f) => f.name); + + const frameNameOpts = { + ...defaultFrameNameOptions, + ...options?.naming, + }; + + return partition(keyFieldsVals).map((idxs: number[]) => { + let frameName = frame.name; + let fieldLabels = {}; + + if (frameNameOpts.asLabels) { + fieldLabels = buildFieldLabels( + names, + keyFields.map((f, i) => keyFieldsVals[i][idxs[0]]) + ); + } else { + let name = buildFrameName( + frameNameOpts, + names, + keyFields.map((f, i) => keyFieldsVals[i][idxs[0]]) + ); + + if (frameNameOpts?.append && frame.name) { + name = `${frame.name} ${name}`; + } + + frameName = name; + } + + let filteredFields = frame.fields; + + if (!options?.keepFields) { + const keyFieldNames = new Set(names); + filteredFields = frame.fields.filter((field) => !keyFieldNames.has(field.name)); + } + + return { + name: frameName, + meta: frame.meta, + length: idxs.length, + fields: filteredFields.map((f) => { + const vals = f.values; + const vals2 = Array(idxs.length); + + for (let i = 0; i < idxs.length; i++) { + vals2[i] = vals[idxs[i]]; + } + + return { + name: f.name, + type: f.type, + config: f.config, + labels: { + ...f.labels, + ...fieldLabels, + }, + values: vals2, + }; + }), + }; + }); +} diff --git a/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/__snapshots__/prepareTimeSeries.test.ts.snap b/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/__snapshots__/prepareTimeSeries.test.ts.snap new file mode 100644 index 0000000..c368e43 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/__snapshots__/prepareTimeSeries.test.ts.snap @@ -0,0 +1,142 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Prepare time series transformer should handle long to multi 1`] = ` +[ + { + "fields": [ + { + "config": {}, + "name": "time", + "type": "time", + "values": [ + 1, + 2, + 3, + ], + }, + { + "config": {}, + "labels": { + "sensor": "a", + }, + "name": "speed", + "type": "number", + "values": [ + 4, + 6, + 8, + ], + }, + ], + "length": 3, + "meta": { + "type": "timeseries-multi", + }, + "name": undefined, + "refId": "A", + }, + { + "fields": [ + { + "config": {}, + "name": "time", + "type": "time", + "values": [ + 1, + 2, + 3, + ], + }, + { + "config": {}, + "labels": { + "sensor": "b", + }, + "name": "speed", + "type": "number", + "values": [ + 5, + 7, + 9, + ], + }, + ], + "length": 3, + "meta": { + "type": "timeseries-multi", + }, + "name": undefined, + "refId": "A", + }, +] +`; + +exports[`Prepare time series transformer should handle long to wide 1`] = ` +[ + { + "fields": [ + { + "config": {}, + "labels": { + "sensor": "a", + }, + "name": "time", + "state": { + "origin": { + "fieldIndex": 0, + "frameIndex": 0, + }, + }, + "type": "time", + "values": [ + 1, + 2, + 3, + ], + }, + { + "config": {}, + "labels": { + "sensor": "a", + }, + "name": "speed", + "state": { + "origin": { + "fieldIndex": 1, + "frameIndex": 0, + }, + }, + "type": "number", + "values": [ + 4, + 6, + 8, + ], + }, + { + "config": {}, + "labels": { + "sensor": "b", + }, + "name": "speed", + "state": { + "origin": { + "fieldIndex": 1, + "frameIndex": 1, + }, + }, + "type": "number", + "values": [ + 5, + 7, + 9, + ], + }, + ], + "length": 3, + "meta": { + "type": "timeseries-wide", + }, + }, +] +`; diff --git a/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.test.ts b/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.test.ts new file mode 100644 index 0000000..c27ea9c --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.test.ts @@ -0,0 +1,433 @@ +import { + toDataFrame, + DataFrame, + FieldType, + toDataFrameDTO, + DataFrameDTO, + DataFrameType, + getFrameDisplayName, +} from '@grafana/data'; + +import { prepareTimeSeriesTransformer, PrepareTimeSeriesOptions, timeSeriesFormat } from './prepareTimeSeries'; + +const ctx = { + interpolate: (v: string) => v, +}; + +describe('Prepare time series transformer', () => { + it('should transform wide to multi', () => { + const source = [ + toDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3, 4, 5, 6] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + { name: 'more', type: FieldType.number, values: [2, 3, 4, 5, 6, 7] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMulti, + }; + + expect(prepareTimeSeriesTransformer.transformer(config, ctx)(source)).toEqual([ + toEquableDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3, 4, 5, 6] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + ], + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + length: 6, + }), + toEquableDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3, 4, 5, 6] }, + { name: 'more', type: FieldType.number, values: [2, 3, 4, 5, 6, 7] }, + ], + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + length: 6, + }), + ]); + }); + + it('should treat string fields as labels', () => { + const source = [ + toDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 1, 2, 2] }, + { name: 'region', type: FieldType.string, values: ['a', 'b', 'a', 'b'] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40] }, + { name: 'more', type: FieldType.number, values: [2, 3, 4, 5] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMulti, + }; + + const frames = prepareTimeSeriesTransformer.transformer(config, ctx)(source); + expect(frames.length).toEqual(4); + expect( + frames.map((f) => ({ + name: getFrameDisplayName(f), + labels: f.fields[1].labels, + time: f.fields[0].values, + values: f.fields[1].values, + })) + ).toMatchInlineSnapshot(` + [ + { + "labels": { + "region": "a", + }, + "name": "wide", + "time": [ + 1, + 2, + ], + "values": [ + 10, + 30, + ], + }, + { + "labels": { + "region": "b", + }, + "name": "wide", + "time": [ + 1, + 2, + ], + "values": [ + 20, + 40, + ], + }, + { + "labels": { + "region": "a", + }, + "name": "wide", + "time": [ + 1, + 2, + ], + "values": [ + 2, + 4, + ], + }, + { + "labels": { + "region": "b", + }, + "name": "wide", + "time": [ + 1, + 2, + ], + "values": [ + 3, + 5, + ], + }, + ] + `); + }); + + it('should transform all wide to multi when mixed', () => { + const source = [ + toDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [0, 1, 2, 3, 4, 5] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + { name: 'another', type: FieldType.number, values: [2, 3, 4, 5, 6, 7] }, + ], + }), + toDataFrame({ + name: 'long', + refId: 'B', + fields: [ + { name: 'time', type: FieldType.time, values: [4, 5, 6, 7, 8, 9] }, + { name: 'value', type: FieldType.number, values: [2, 3, 4, 5, 6, 7] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMulti, + }; + + expect(prepareTimeSeriesTransformer.transformer(config, ctx)(source)).toEqual([ + toEquableDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [0, 1, 2, 3, 4, 5] }, + { name: 'another', type: FieldType.number, values: [2, 3, 4, 5, 6, 7] }, + ], + length: 6, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + toEquableDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [0, 1, 2, 3, 4, 5] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + ], + length: 6, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + toEquableDataFrame({ + name: 'long', + refId: 'B', + fields: [ + { name: 'time', type: FieldType.time, values: [4, 5, 6, 7, 8, 9] }, + { name: 'value', type: FieldType.number, values: [2, 3, 4, 5, 6, 7] }, + ], + length: 6, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + ]); + }); + + it('should transform none when source only has long frames', () => { + const source = [ + toDataFrame({ + name: 'long', + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3, 4, 5, 6] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + ], + }), + toDataFrame({ + name: 'long', + refId: 'B', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3, 4, 5, 6] }, + { name: 'count', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMulti, + }; + + expect(toEquableDataFrames(prepareTimeSeriesTransformer.transformer(config, ctx)(source))).toEqual( + toEquableDataFrames( + source.map((frame) => ({ + ...frame, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + })) + ) + ); + }); + + it('should return empty array when no timeseries exist', () => { + const source = [ + toDataFrame({ + name: 'wide', + refId: 'A', + fields: [ + { name: 'text', type: FieldType.string, values: ['a', 'z', 'b', 'x', 'c', 'b'] }, + { name: 'text', type: FieldType.string, values: ['a', 'z', 'b', 'x', 'c', 'b'] }, + { name: 'text', type: FieldType.string, values: ['a', 'z', 'b', 'x', 'c', 'b'] }, + ], + }), + toDataFrame({ + name: 'wide', + refId: 'B', + fields: [ + { name: 'text', type: FieldType.string, values: ['a', 'z', 'b', 'x', 'c', 'b'] }, + { name: 'text', type: FieldType.string, values: ['a', 'z', 'b', 'x', 'c', 'b'] }, + { name: 'text', type: FieldType.string, values: ['a', 'z', 'b', 'x', 'c', 'b'] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMulti, + }; + + expect(prepareTimeSeriesTransformer.transformer(config, ctx)(source)).toEqual([]); + }); + + it('should convert long to multi', () => { + const source = [ + toDataFrame({ + name: 'long', + refId: 'X', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 1, 2, 2, 3, 3] }, + { name: 'value', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + { name: 'region', type: FieldType.string, values: ['a', 'b', 'a', 'b', 'a', 'b'] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMulti, + }; + + const frames = prepareTimeSeriesTransformer.transformer(config, ctx)(source); + expect(frames).toEqual([ + toEquableDataFrame({ + name: 'long', + refId: 'X', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3] }, + { name: 'value', labels: { region: 'a' }, type: FieldType.number, values: [10, 30, 50] }, + ], + length: 3, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + toEquableDataFrame({ + name: 'long', + refId: 'X', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3] }, + { name: 'value', labels: { region: 'b' }, type: FieldType.number, values: [20, 40, 60] }, + ], + length: 3, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + ]); + }); + + it('should migrate many to multi and still convert correctly', () => { + const source = [ + toDataFrame({ + name: 'wants-to-be-many', + refId: 'X', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 1, 2, 2, 3, 3] }, + { name: 'value', type: FieldType.number, values: [10, 20, 30, 40, 50, 60] }, + { name: 'region', type: FieldType.string, values: ['a', 'b', 'a', 'b', 'a', 'b'] }, + ], + }), + ]; + + const config: PrepareTimeSeriesOptions = { + format: timeSeriesFormat.TimeSeriesMany, + }; + + const frames = prepareTimeSeriesTransformer.transformer(config, ctx)(source); + + expect(frames).toEqual([ + toEquableDataFrame({ + name: 'wants-to-be-many', + refId: 'X', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3] }, + { name: 'value', labels: { region: 'a' }, type: FieldType.number, values: [10, 30, 50] }, + ], + length: 3, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + toEquableDataFrame({ + name: 'wants-to-be-many', + refId: 'X', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 2, 3] }, + { name: 'value', labels: { region: 'b' }, type: FieldType.number, values: [20, 40, 60] }, + ], + length: 3, + meta: { + type: DataFrameType.TimeSeriesMulti, + }, + }), + ]); + }); + + it('should handle long to wide', () => { + expect( + prepareTimeSeriesTransformer.transformer( + { + format: timeSeriesFormat.TimeSeriesWide, + }, + ctx + )([ + toDataFrame({ + meta: { type: DataFrameType.TimeSeriesLong }, + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 1, 2, 2, 3, 3] }, + { name: 'speed', type: FieldType.number, values: [4, 5, 6, 7, 8, 9] }, + { name: 'sensor', type: FieldType.string, values: ['a', 'b', 'a', 'b', 'a', 'b'] }, + ], + }), + ]) + ).toMatchSnapshot(); + }); + + it('should handle long to multi', () => { + expect( + prepareTimeSeriesTransformer.transformer( + { + format: timeSeriesFormat.TimeSeriesMulti, + }, + ctx + )([ + toDataFrame({ + meta: { type: DataFrameType.TimeSeriesLong }, + refId: 'A', + fields: [ + { name: 'time', type: FieldType.time, values: [1, 1, 2, 2, 3, 3] }, + { name: 'speed', type: FieldType.number, values: [4, 5, 6, 7, 8, 9] }, + { name: 'sensor', type: FieldType.string, values: ['a', 'b', 'a', 'b', 'a', 'b'] }, + ], + }), + ]) + ).toMatchSnapshot(); // ???? expecting a single frame!!!! + }); +}); + +function toEquableDataFrame(source: any): DataFrame { + return toDataFrame({ + meta: undefined, + ...source, + fields: source.fields.map((field: any) => { + return { + ...field, + config: {}, + }; + }), + }); +} + +function toEquableDataFrames(data: DataFrame[]): DataFrameDTO[] { + return data.map((frame) => toDataFrameDTO(frame)); +} diff --git a/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.ts b/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.ts new file mode 100644 index 0000000..e191827 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/prepareTimeSeries/prepareTimeSeries.ts @@ -0,0 +1,352 @@ +import { map } from 'rxjs/operators'; + +import { + SynchronousDataTransformerInfo, + DataFrame, + DataFrameType, + FieldType, + DataTransformerID, + outerJoinDataFrames, + fieldMatchers, + FieldMatcherID, + Field, + MutableDataFrame, + Labels, +} from '@grafana/data'; + +import { partitionByValues } from '../partitionByValues/partitionByValues'; + +/** + * There is currently an effort to figure out consistent names + * for the various formats/types we produce and use. + * + * This transformer will eventually include the required metadata that can assert + * a DataFrame[] is of a given type + * + * @internal -- TBD + */ + +export enum timeSeriesFormat { + TimeSeriesWide = 'wide', + TimeSeriesLong = 'long', + TimeSeriesMulti = 'multi', + + /** @deprecated use multi */ + TimeSeriesMany = 'many', +} + +export type PrepareTimeSeriesOptions = { + format: timeSeriesFormat; +}; + +/** + * Convert to [][time,number] + */ +export function toTimeSeriesMulti(data: DataFrame[]): DataFrame[] { + if (!Array.isArray(data) || data.length === 0) { + return data; + } + + const result: DataFrame[] = []; + for (const frame of toTimeSeriesLong(data)) { + const timeField = frame.fields[0]; + if (!timeField || timeField.type !== FieldType.time) { + continue; + } + const valueFields: Field[] = []; + const labelFields: Field[] = []; + for (const field of frame.fields) { + switch (field.type) { + case FieldType.number: + case FieldType.boolean: + valueFields.push(field); + break; + case FieldType.string: + labelFields.push(field); + break; + } + } + + for (const field of valueFields) { + if (labelFields.length) { + // new frame for each label key + type frameBuilder = { + time: number[]; + value: number[]; + key: string; + labels: Labels; + }; + const builders = new Map(); + for (let i = 0; i < frame.length; i++) { + const time = timeField.values[i]; + const value = field.values[i]; + if (value === undefined || time == null) { + continue; // skip values left over from join + } + + const key = labelFields.map((f) => f.values[i]).join('/'); + let builder = builders.get(key); + if (!builder) { + builder = { + key, + time: [], + value: [], + labels: {}, + }; + for (const label of labelFields) { + builder.labels[label.name] = label.values[i]; + } + builders.set(key, builder); + } + builder.time.push(time); + builder.value.push(value); + } + + // Add a frame for each distinct value + for (const b of builders.values()) { + result.push({ + name: frame.name, + refId: frame.refId, + meta: { + ...frame.meta, + type: DataFrameType.TimeSeriesMulti, + }, + fields: [ + { + ...timeField, + values: b.time, + }, + { + ...field, + values: b.value, + labels: b.labels, + }, + ], + length: b.time.length, + }); + } + } else { + result.push({ + name: frame.name, + refId: frame.refId, + meta: { + ...frame.meta, + type: DataFrameType.TimeSeriesMulti, + }, + fields: [timeField, field], + length: frame.length, + }); + } + } + } + return result; +} + +export function toTimeSeriesLong(data: DataFrame[]): DataFrame[] { + if (!Array.isArray(data) || data.length === 0) { + return data; + } + + const result: DataFrame[] = []; + for (const frame of data) { + let timeField: Field | undefined; + const uniqueValueNames: string[] = []; + const uniqueValueNamesToType: Record = {}; + const uniqueLabelKeys: Record = {}; + const labelKeyToWideIndices: Record = {}; + const uniqueFactorNamesToWideIndex: Record = {}; + + for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) { + const field = frame.fields[fieldIndex]; + + switch (field.type) { + case FieldType.string: + case FieldType.boolean: + if (field.name in uniqueFactorNamesToWideIndex) { + // TODO error? + } else { + uniqueFactorNamesToWideIndex[field.name] = fieldIndex; + uniqueLabelKeys[field.name] = true; + } + break; + case FieldType.time: + if (!timeField) { + timeField = field; + break; + } + default: + if (field.name in uniqueValueNamesToType) { + const type = uniqueValueNamesToType[field.name]; + + if (field.type !== type) { + // TODO error? + continue; + } + } else { + uniqueValueNamesToType[field.name] = field.type; + uniqueValueNames.push(field.name); + } + + const tKey = JSON.stringify(field.labels); + const wideIndices = labelKeyToWideIndices[tKey]; + + if (wideIndices !== undefined) { + wideIndices.push(fieldIndex); + } else { + labelKeyToWideIndices[tKey] = [fieldIndex]; + } + + if (field.labels != null) { + for (const labelKey in field.labels) { + uniqueLabelKeys[labelKey] = true; + } + } + } + } + + if (!timeField) { + continue; + } + + type TimeWideRowIndex = { + time: any; + wideRowIndex: number; + }; + const sortedTimeRowIndices: TimeWideRowIndex[] = []; + const sortedUniqueLabelKeys: string[] = []; + const uniqueFactorNames: string[] = []; + const uniqueFactorNamesWithWideIndices: string[] = []; + + for (let wideRowIndex = 0; wideRowIndex < frame.length; wideRowIndex++) { + sortedTimeRowIndices.push({ time: timeField.values[wideRowIndex], wideRowIndex: wideRowIndex }); + } + + for (const labelKeys in labelKeyToWideIndices) { + sortedUniqueLabelKeys.push(labelKeys); + } + for (const labelKey in uniqueLabelKeys) { + uniqueFactorNames.push(labelKey); + } + for (const name in uniqueFactorNamesToWideIndex) { + uniqueFactorNamesWithWideIndices.push(name); + } + + sortedTimeRowIndices.sort((a, b) => a.time - b.time); + sortedUniqueLabelKeys.sort(); + uniqueFactorNames.sort(); + uniqueValueNames.sort(); + + const longFrame = new MutableDataFrame({ + ...frame, + meta: { ...frame.meta, type: DataFrameType.TimeSeriesLong }, + fields: [{ name: timeField.name, type: timeField.type }], + }); + + for (const name of uniqueValueNames) { + longFrame.addField({ name: name, type: uniqueValueNamesToType[name] }); + } + + for (const name of uniqueFactorNames) { + longFrame.addField({ name: name, type: FieldType.string }); + } + + for (const timeWideRowIndex of sortedTimeRowIndices) { + const { time, wideRowIndex } = timeWideRowIndex; + + for (const labelKeys of sortedUniqueLabelKeys) { + const rowValues: Record = {}; + + for (const name of uniqueFactorNamesWithWideIndices) { + rowValues[name] = frame.fields[uniqueFactorNamesToWideIndex[name]].values[wideRowIndex]; + } + + let index = 0; + + for (const wideFieldIndex of labelKeyToWideIndices[labelKeys]) { + const wideField = frame.fields[wideFieldIndex]; + + if (index++ === 0 && wideField.labels != null) { + for (const labelKey in wideField.labels) { + rowValues[labelKey] = wideField.labels[labelKey]; + } + } + + rowValues[wideField.name] = wideField.values[wideRowIndex]; + } + + rowValues[timeField.name] = time; + longFrame.add(rowValues); + } + } + + result.push(longFrame); + } + + return result; +} + +export function longToMultiTimeSeries(frame: DataFrame): DataFrame[] { + // All the string fields + const matcher = (field: Field) => field.type === FieldType.string; + + // transform one dataFrame at a time and concat into DataFrame[] + return partitionByValues(frame, matcher).map((frame) => { + if (!frame.meta) { + frame.meta = {}; + } + frame.meta.type = DataFrameType.TimeSeriesMulti; + return frame; + }); +} + +export const prepareTimeSeriesTransformer: SynchronousDataTransformerInfo = { + id: DataTransformerID.prepareTimeSeries, + name: 'Prepare time series', + description: `Will stretch data frames from the wide format into the long format. This is really helpful to be able to keep backwards compatibility for panels not supporting the new wide format.`, + defaultOptions: {}, + + operator: (options, ctx) => (source) => + source.pipe(map((data) => prepareTimeSeriesTransformer.transformer(options, ctx)(data))), + + transformer: (options: PrepareTimeSeriesOptions) => { + const format = options?.format ?? timeSeriesFormat.TimeSeriesWide; + if (format === timeSeriesFormat.TimeSeriesMany || format === timeSeriesFormat.TimeSeriesMulti) { + return toTimeSeriesMulti; + } else if (format === timeSeriesFormat.TimeSeriesLong) { + return toTimeSeriesLong; + } + const joinBy = fieldMatchers.get(FieldMatcherID.firstTimeField).get({}); + + // Single TimeSeriesWide frame (joined by time) + return (data: DataFrame[]) => { + if (!data.length) { + return []; + } + + // Convert long to wide first + const join: DataFrame[] = []; + for (const df of data) { + if (df.meta?.type === DataFrameType.TimeSeriesLong) { + longToMultiTimeSeries(df).forEach((v) => join.push(v)); + } else { + join.push(df); + } + } + + // Join by the first frame + const frame = outerJoinDataFrames({ + frames: join, + joinBy, + keepOriginIndices: true, + }); + if (frame) { + if (!frame.meta) { + frame.meta = {}; + } + frame.meta.type = DataFrameType.TimeSeriesWide; + return [frame]; + } + return []; + }; + }, +}; diff --git a/packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.test.ts b/packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.test.ts new file mode 100644 index 0000000..333fa1a --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.test.ts @@ -0,0 +1,177 @@ +import { toDataFrame, FieldType } from '@grafana/data'; + +import { rowsToFields } from './rowsToFields'; + +describe('Rows to fields', () => { + it('Will extract min & max from field', () => { + const input = toDataFrame({ + fields: [ + { name: 'Name', type: FieldType.string, values: ['Temperature', 'Pressure'] }, + { name: 'Value', type: FieldType.number, values: [10, 200] }, + { name: 'Unit', type: FieldType.string, values: ['degree', 'pressurebar'] }, + { name: 'Miiin', type: FieldType.number, values: [3, 100] }, + { name: 'max', type: FieldType.string, values: [15, 200] }, + ], + }); + + const result = rowsToFields( + { + mappings: [ + { + fieldName: 'Miiin', + handlerKey: 'min', + }, + ], + }, + input + ); + + expect(result).toMatchInlineSnapshot(` + { + "fields": [ + { + "config": { + "max": 15, + "min": 3, + "unit": "degree", + }, + "labels": {}, + "name": "Temperature", + "type": "number", + "values": [ + 10, + ], + }, + { + "config": { + "max": 200, + "min": 100, + "unit": "pressurebar", + }, + "labels": {}, + "name": "Pressure", + "type": "number", + "values": [ + 200, + ], + }, + ], + "length": 1, + } + `); + }); + + it('Can handle custom name and value field mapping', () => { + const input = toDataFrame({ + fields: [ + { name: 'Name', type: FieldType.string, values: ['Ignore'] }, + { name: 'SensorName', type: FieldType.string, values: ['Temperature'] }, + { name: 'Value', type: FieldType.number, values: [10] }, + { name: 'SensorReading', type: FieldType.number, values: [100] }, + ], + }); + + const result = rowsToFields( + { + mappings: [ + { fieldName: 'SensorName', handlerKey: 'field.name' }, + { fieldName: 'SensorReading', handlerKey: 'field.value' }, + ], + }, + input + ); + + expect(result.fields[0].name).toBe('Temperature'); + expect(result.fields[0].config).toEqual({}); + expect(result.fields[0].values[0]).toBe(100); + }); + + it('Can handle colors', () => { + const input = toDataFrame({ + fields: [ + { name: 'Name', type: FieldType.string, values: ['Temperature'] }, + { name: 'Value', type: FieldType.number, values: [10] }, + { name: 'Color', type: FieldType.string, values: ['blue'] }, + ], + }); + + const result = rowsToFields({}, input); + + expect(result.fields[0].config.color?.fixedColor).toBe('blue'); + }); + + it('Can handle thresholds', () => { + const input = toDataFrame({ + fields: [ + { name: 'Name', type: FieldType.string, values: ['Temperature'] }, + { name: 'Value', type: FieldType.number, values: [10] }, + { name: 'threshold1', type: FieldType.string, values: [30] }, + { name: 'threshold2', type: FieldType.string, values: [500] }, + ], + }); + + const result = rowsToFields({}, input); + expect(result.fields[0].config.thresholds?.steps[1].value).toBe(30); + }); + + it('Will extract other string fields to labels', () => { + const input = toDataFrame({ + fields: [ + { name: 'Name', type: FieldType.string, values: ['Temperature', 'Pressure'] }, + { name: 'Value', type: FieldType.number, values: [10, 200] }, + { name: 'City', type: FieldType.string, values: ['Stockholm', 'New York'] }, + ], + }); + + const result = rowsToFields({}, input); + + expect(result.fields[0].labels).toEqual({ City: 'Stockholm' }); + expect(result.fields[1].labels).toEqual({ City: 'New York' }); + }); + + it('Can ignore field as auto picked for value or name', () => { + const input = toDataFrame({ + fields: [ + { name: 'Name', type: FieldType.string, values: ['Temperature'] }, + { name: 'Value', type: FieldType.number, values: [10] }, + { name: 'City', type: FieldType.string, values: ['Stockholm'] }, + { name: 'Value2', type: FieldType.number, values: [20] }, + ], + }); + + const result = rowsToFields( + { + mappings: [ + { fieldName: 'Name', handlerKey: '__ignore' }, + { fieldName: 'Value', handlerKey: '__ignore' }, + ], + }, + input + ); + + expect(result.fields[0].name).toEqual('Stockholm'); + expect(result.fields[0].values[0]).toEqual(20); + }); + + it('Can handle number fields as name field', () => { + const input = toDataFrame({ + fields: [ + { name: 'SensorID', type: FieldType.number, values: [10, 20, 30] }, + { name: 'Value', type: FieldType.number, values: [1, 2, 3] }, + ], + }); + + const result = rowsToFields( + { + mappings: [ + { fieldName: 'SensorID', handlerKey: 'field.name' }, + { fieldName: 'Value', handlerKey: 'field.value' }, + ], + }, + input + ); + + expect(result.fields[0].name).toEqual('10'); + expect(result.fields[0].values[0]).toEqual(1); + }); +}); diff --git a/packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.ts b/packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.ts new file mode 100644 index 0000000..51f0b9f --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/rowsToFields/rowsToFields.ts @@ -0,0 +1,89 @@ +import { map } from 'rxjs/operators'; + +import { DataFrame, DataTransformerID, DataTransformerInfo, Field, getFieldDisplayName, Labels } from '@grafana/data'; + +import { + EvaluatedMappingResult, + evaluteFieldMappings, + FieldConfigHandlerKey, + FieldToConfigMapping, + getFieldConfigFromFrame, +} from '../fieldToConfigMapping/fieldToConfigMapping'; + +export interface RowToFieldsTransformOptions { + nameField?: string; + valueField?: string; + mappings?: FieldToConfigMapping[]; +} + +export const rowsToFieldsTransformer: DataTransformerInfo = { + id: DataTransformerID.rowsToFields, + name: 'Rows to fields', + description: 'Convert each row into a field with dynamic config.', + defaultOptions: {}, + + /** + * Return a modified copy of the series. If the transform is not or should not + * be applied, just return the input series + */ + operator: (options) => (source) => + source.pipe( + map((data) => { + return data.map((frame) => rowsToFields(options, frame)); + }) + ), +}; + +export function rowsToFields(options: RowToFieldsTransformOptions, data: DataFrame): DataFrame { + const mappingResult = evaluteFieldMappings(data, options.mappings ?? [], true); + const { nameField, valueField } = mappingResult; + + if (!nameField || !valueField) { + return data; + } + + const outFields: Field[] = []; + + for (let index = 0; index < nameField.values.length; index++) { + const name = nameField.values[index]; + const value = valueField.values[index]; + const config = getFieldConfigFromFrame(data, index, mappingResult); + const labels = getLabelsFromRow(data, index, mappingResult); + + const field: Field = { + name: `${name}`, + type: valueField.type, + values: [value], + config: config, + labels, + }; + + outFields.push(field); + } + + return { + fields: outFields, + length: 1, + }; +} + +function getLabelsFromRow(frame: DataFrame, index: number, mappingResult: EvaluatedMappingResult): Labels { + const labels = { ...mappingResult.nameField!.labels }; + + for (let i = 0; i < frame.fields.length; i++) { + const field = frame.fields[i]; + const fieldName = getFieldDisplayName(field, frame); + const fieldMapping = mappingResult.index[fieldName]; + + if (fieldMapping.handler && fieldMapping.handler.key !== FieldConfigHandlerKey.Label) { + continue; + } + + const value = field.values[index]; + if (value != null) { + labels[fieldName] = value; + } + } + + return labels; +} diff --git a/packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.test.ts b/packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.test.ts new file mode 100644 index 0000000..a281c4d --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.test.ts @@ -0,0 +1,133 @@ +import { toDataFrame, FieldType, Labels, DataFrame, Field, ReducerID } from '@grafana/data'; + +import { timeSeriesToTableTransform } from './timeSeriesTableTransformer'; + +describe('timeSeriesTableTransformer', () => { + it('Will transform a single query', () => { + const series = [ + getTimeSeries('A', { instance: 'A', pod: 'B' }), + getTimeSeries('A', { instance: 'A', pod: 'C' }), + getTimeSeries('A', { instance: 'A', pod: 'D' }), + ]; + + const results = timeSeriesToTableTransform({}, series); + expect(results).toHaveLength(1); + const result = results[0]; + expect(result.refId).toBe('A'); + expect(result.fields).toHaveLength(3); + expect(result.fields[0].values).toEqual(['A', 'A', 'A']); + expect(result.fields[1].values).toEqual(['B', 'C', 'D']); + assertDataFrameField(result.fields[2], series); + }); + + it('Will pass through non time series frames', () => { + const series = [ + getTable('B', ['foo', 'bar']), + getTimeSeries('A', { instance: 'A', pod: 'B' }), + getTimeSeries('A', { instance: 'A', pod: 'C' }), + getTable('C', ['bar', 'baz', 'bad']), + ]; + + const results = timeSeriesToTableTransform({}, series); + expect(results).toHaveLength(3); + expect(results[0]).toEqual(series[0]); + expect(results[1].refId).toBe('A'); + expect(results[1].fields).toHaveLength(3); + expect(results[1].fields[0].values).toEqual(['A', 'A']); + expect(results[1].fields[1].values).toEqual(['B', 'C']); + expect(results[2]).toEqual(series[3]); + }); + + it('Will group by refId', () => { + const series = [ + getTimeSeries('A', { instance: 'A', pod: 'B' }), + getTimeSeries('A', { instance: 'A', pod: 'C' }), + getTimeSeries('A', { instance: 'A', pod: 'D' }), + getTimeSeries('B', { instance: 'B', pod: 'F', cluster: 'A' }), + getTimeSeries('B', { instance: 'B', pod: 'G', cluster: 'B' }), + ]; + + const results = timeSeriesToTableTransform({}, series); + expect(results).toHaveLength(2); + expect(results[0].refId).toBe('A'); + expect(results[0].fields).toHaveLength(3); + expect(results[0].fields[0].values).toEqual(['A', 'A', 'A']); + expect(results[0].fields[1].values).toEqual(['B', 'C', 'D']); + assertDataFrameField(results[0].fields[2], series.slice(0, 3)); + expect(results[1].refId).toBe('B'); + expect(results[1].fields).toHaveLength(4); + expect(results[1].fields[0].values).toEqual(['B', 'B']); + expect(results[1].fields[1].values).toEqual(['F', 'G']); + expect(results[1].fields[2].values).toEqual(['A', 'B']); + assertDataFrameField(results[1].fields[3], series.slice(3, 5)); + }); + + it('Will include last value by deault', () => { + const series = [ + getTimeSeries('A', { instance: 'A', pod: 'B' }, [4, 2, 3]), + getTimeSeries('A', { instance: 'A', pod: 'C' }, [3, 4, 5]), + ]; + + const results = timeSeriesToTableTransform({}, series); + expect(results[0].fields[2].values[0].value).toEqual(3); + expect(results[0].fields[2].values[1].value).toEqual(5); + }); + + it('Will calculate average value if configured', () => { + const series = [ + getTimeSeries('A', { instance: 'A', pod: 'B' }, [4, 2, 3]), + getTimeSeries('B', { instance: 'A', pod: 'C' }, [3, 4, 5]), + ]; + + const results = timeSeriesToTableTransform( + { + refIdToStat: { + B: ReducerID.mean, + }, + }, + series + ); + expect(results[0].fields[2].values[0].value).toEqual(3); + expect(results[1].fields[2].values[0].value).toEqual(4); + }); +}); + +function assertFieldsEqual(field1: Field, field2: Field) { + expect(field1.type).toEqual(field2.type); + expect(field1.name).toEqual(field2.name); + expect(field1.values).toEqual(field2.values); + expect(field1.labels ?? {}).toEqual(field2.labels ?? {}); +} + +function assertDataFrameField(field: Field, matchesFrames: DataFrame[]) { + const frames: DataFrame[] = field.values; + expect(frames).toHaveLength(matchesFrames.length); + frames.forEach((frame, idx) => { + const matchingFrame = matchesFrames[idx]; + expect(frame.fields).toHaveLength(matchingFrame.fields.length); + frame.fields.forEach((field, fidx) => assertFieldsEqual(field, matchingFrame.fields[fidx])); + }); +} + +function getTimeSeries(refId: string, labels: Labels, values: number[] = [10]) { + return toDataFrame({ + refId, + fields: [ + { name: 'Time', type: FieldType.time, values: [10] }, + { + name: 'Value', + type: FieldType.number, + values, + labels, + }, + ], + }); +} + +function getTable(refId: string, fields: string[]) { + return toDataFrame({ + refId, + fields: fields.map((f) => ({ name: f, type: FieldType.string, values: ['value'] })), + labels: {}, + }); +} diff --git a/packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.ts b/packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.ts new file mode 100644 index 0000000..5137b15 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/timeSeriesTable/timeSeriesTableTransformer.ts @@ -0,0 +1,137 @@ +import { map } from 'rxjs/operators'; + +import { + DataFrame, + DataFrameWithValue, + DataTransformerID, + DataTransformerInfo, + Field, + FieldType, + MutableDataFrame, + isTimeSeriesFrame, + ReducerID, + reduceField, +} from '@grafana/data'; + +export interface TimeSeriesTableTransformerOptions { + refIdToStat?: Record; +} + +export const timeSeriesTableTransformer: DataTransformerInfo = { + id: DataTransformerID.timeSeriesTable, + name: 'Time series to table transform', + description: 'Time series to table rows.', + defaultOptions: {}, + + operator: (options) => (source) => + source.pipe( + map((data) => { + return timeSeriesToTableTransform(options, data); + }) + ), +}; + +/** + * Converts time series frames to table frames for use with sparkline chart type. + * + * @remarks + * For each refId (queryName) convert all time series frames into a single table frame, adding each series + * as values of a "Trend" frame field. This allows "Trend" to be rendered as area chart type. + * Any non time series frames are returned as is. + * + * @param options - Transform options, currently not used + * @param data - Array of data frames to transform + * @returns Array of transformed data frames + * + * @alpha + */ +export function timeSeriesToTableTransform(options: TimeSeriesTableTransformerOptions, data: DataFrame[]): DataFrame[] { + // initialize fields from labels for each refId + const refId2LabelFields = getLabelFields(data); + + const refId2frameField: Record> = {}; + + const result: DataFrame[] = []; + + for (const frame of data) { + if (!isTimeSeriesFrame(frame)) { + result.push(frame); + continue; + } + + const refId = frame.refId ?? ''; + + const labelFields = refId2LabelFields[refId] ?? {}; + // initialize a new frame for this refId with fields per label and a Trend frame field, if it doesn't exist yet + let frameField = refId2frameField[refId]; + if (!frameField) { + frameField = { + name: 'Trend' + (refId && Object.keys(refId2LabelFields).length > 1 ? ` #${refId}` : ''), + type: FieldType.frame, + config: {}, + values: [], + }; + refId2frameField[refId] = frameField; + + const table = new MutableDataFrame(); + for (const label of Object.values(labelFields)) { + table.addField(label); + } + table.addField(frameField); + table.refId = refId; + result.push(table); + } + + // add values to each label based field of this frame + const labels = frame.fields[1].labels; + for (const labelKey of Object.keys(labelFields)) { + const labelValue = labels?.[labelKey] ?? null; + labelFields[labelKey].values.push(labelValue!); + } + const reducerId = options.refIdToStat?.[refId] ?? ReducerID.lastNotNull; + const valueField = frame.fields.find((f) => f.type === FieldType.number); + const value = (valueField && reduceField({ field: valueField, reducers: [reducerId] })[reducerId]) || null; + frameField.values.push({ + ...frame, + value, + }); + } + return result; +} + +// For each refId, initialize a field for each label name +function getLabelFields(frames: DataFrame[]): Record>> { + // refId -> label name -> field + const labelFields: Record>> = {}; + + for (const frame of frames) { + if (!isTimeSeriesFrame(frame)) { + continue; + } + + const refId = frame.refId ?? ''; + + if (!labelFields[refId]) { + labelFields[refId] = {}; + } + + for (const field of frame.fields) { + if (!field.labels) { + continue; + } + + for (const labelName of Object.keys(field.labels)) { + if (!labelFields[refId][labelName]) { + labelFields[refId][labelName] = { + name: labelName, + type: FieldType.string, + config: {}, + values: [], + }; + } + } + } + } + + return labelFields; +} diff --git a/packages/grafana-utils/src/grafana/transformers/utils.test.ts b/packages/grafana-utils/src/grafana/transformers/utils.test.ts new file mode 100644 index 0000000..40bdd23 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/utils.test.ts @@ -0,0 +1,47 @@ +import { numberOrVariableValidator } from './utils'; + +describe('validator', () => { + it('validates a positive number', () => { + expect(numberOrVariableValidator(1)).toBe(true); + }); + + it('validates a negative number', () => { + expect(numberOrVariableValidator(-1)).toBe(true); + }); + + it('validates zero', () => { + expect(numberOrVariableValidator(0)).toBe(true); + }); + + it('validates a float', () => { + expect(numberOrVariableValidator(1.2)).toBe(true); + }); + + it('validates a negative float', () => { + expect(numberOrVariableValidator(1.2)).toBe(true); + }); + + it('validates a string that is a positive integer', () => { + expect(numberOrVariableValidator('1')).toBe(true); + }); + + it('validats a string that is a negative integer', () => { + expect(numberOrVariableValidator('-1')).toBe(true); + }); + + it('validats a string that is zero', () => { + expect(numberOrVariableValidator('0')).toBe(true); + }); + + it('validats a string that is a float', () => { + expect(numberOrVariableValidator('1.2')).toBe(true); + }); + + it('validats a string that is a negative float', () => { + expect(numberOrVariableValidator('-1.2')).toBe(true); + }); + + it('fails a string that is not a number', () => { + expect(numberOrVariableValidator('foo')).toBe(false); + }); +}); diff --git a/packages/grafana-utils/src/grafana/transformers/utils.ts b/packages/grafana-utils/src/grafana/transformers/utils.ts new file mode 100644 index 0000000..64e7d62 --- /dev/null +++ b/packages/grafana-utils/src/grafana/transformers/utils.ts @@ -0,0 +1,84 @@ +import { useMemo } from 'react'; + +import { DataFrame, getFieldDisplayName, TransformerCategory, SelectableValue, getTimeZones } from '@grafana/data'; + +const transformationsVariableSupport = true; + +export function useAllFieldNamesFromDataFrames(input: DataFrame[]): string[] { + return useMemo(() => { + if (!Array.isArray(input)) { + return []; + } + + return Object.keys( + input.reduce>((names, frame) => { + if (!frame || !Array.isArray(frame.fields)) { + return names; + } + + return frame.fields.reduce((names, field) => { + const t = getFieldDisplayName(field, frame, input); + names[t] = true; + return names; + }, names); + }, {}) + ); + }, [input]); +} + +export function getDistinctLabels(input: DataFrame[]): Set { + const distinct = new Set(); + for (const frame of input) { + for (const field of frame.fields) { + if (field.labels) { + for (const k of Object.keys(field.labels)) { + distinct.add(k); + } + } + } + } + return distinct; +} + +export const categoriesLabels: { [K in TransformerCategory]: string } = { + combine: 'Combine', + calculateNewFields: 'Calculate new fields', + createNewVisualization: 'Create new visualization', + filter: 'Filter', + performSpatialOperations: 'Perform spatial operations', + reformat: 'Reformat', + reorderAndRename: 'Reorder and rename', +}; + +export const numberOrVariableValidator = (value: string | number) => { + if (typeof value === 'number') { + return true; + } + if (!Number.isNaN(Number(value))) { + return true; + } + if (/^\$[A-Za-z0-9_]+$/.test(value) && transformationsVariableSupport) { + return true; + } + return false; +}; + +export function getTimezoneOptions(includeInternal: boolean) { + const timeZoneOptions: Array> = []; + + // There are currently only two internal timezones + // Browser and UTC. We add the manually to avoid + // funky string manipulation. + if (includeInternal) { + timeZoneOptions.push({ label: 'Browser', value: 'browser' }); + timeZoneOptions.push({ label: 'UTC', value: 'utc' }); + } + + // Add all other timezones + const tzs = getTimeZones(); + for (const tz of tzs) { + timeZoneOptions.push({ label: tz, value: tz }); + } + + return timeZoneOptions; +} diff --git a/packages/grafana-utils/src/grafana/utils/dimensions.ts b/packages/grafana-utils/src/grafana/utils/dimensions.ts new file mode 100644 index 0000000..33bc0e5 --- /dev/null +++ b/packages/grafana-utils/src/grafana/utils/dimensions.ts @@ -0,0 +1,24 @@ +import { DataFrame, Field, getFieldDisplayName } from '@grafana/data'; + +export function findField(frame?: DataFrame, name?: string): Field | undefined { + const idx = findFieldIndex(frame, name); + return idx == null ? undefined : frame!.fields[idx]; +} + +export function findFieldIndex(frame?: DataFrame, name?: string): number | undefined { + if (!frame || !name?.length) { + return undefined; + } + + for (let i = 0; i < frame.fields.length; i++) { + const field = frame.fields[i]; + if (name === field.name) { + return i; + } + const disp = getFieldDisplayName(field, frame); + if (name === disp) { + return i; + } + } + return undefined; +} diff --git a/packages/grafana-utils/src/grafana/utils/filterByName.ts b/packages/grafana-utils/src/grafana/utils/filterByName.ts new file mode 100644 index 0000000..465e2c9 --- /dev/null +++ b/packages/grafana-utils/src/grafana/utils/filterByName.ts @@ -0,0 +1,36 @@ +import { DataTransformContext, RegexpOrNamesMatcherOptions, MatcherConfig, FieldMatcherID } from '@grafana/data'; + +export const getMatcherConfig = ( + ctx: DataTransformContext, + options?: RegexpOrNamesMatcherOptions, + byVariable?: boolean +): MatcherConfig | undefined => { + if (!options) { + return undefined; + } + + const { names, pattern, variable } = options; + + if (byVariable && variable) { + const stringOfNames = ctx.interpolate(variable); + if (/\{.*\}/.test(stringOfNames)) { + const namesFromString = stringOfNames.slice(1).slice(0, -1).split(','); + return { id: FieldMatcherID.byNames, options: { names: namesFromString } }; + } + return { id: FieldMatcherID.byNames, options: { names: stringOfNames.split(',') } }; + } + + if ((!Array.isArray(names) || names.length === 0) && !pattern) { + return undefined; + } + + if (!pattern) { + return { id: FieldMatcherID.byNames, options: { names } }; + } + + if (!Array.isArray(names) || names.length === 0) { + return { id: FieldMatcherID.byRegexp, options: pattern }; + } + + return { id: FieldMatcherID.byRegexpOrNames, options }; +}; diff --git a/packages/grafana-utils/src/index.test.ts b/packages/grafana-utils/src/index.test.ts new file mode 100644 index 0000000..36f0186 --- /dev/null +++ b/packages/grafana-utils/src/index.test.ts @@ -0,0 +1,7 @@ +import * as module from './index'; + +describe('Export module', () => { + it('Should not throw window not defined error', () => { + expect(module).toBeTruthy(); + }); +}); diff --git a/packages/grafana-utils/src/index.ts b/packages/grafana-utils/src/index.ts new file mode 100644 index 0000000..8da7418 --- /dev/null +++ b/packages/grafana-utils/src/index.ts @@ -0,0 +1,4 @@ +export * from './privateTransformers'; +export * from './query'; +export * from './types'; +export * from '@grafana/data'; diff --git a/packages/grafana-utils/src/privateTransformers.ts b/packages/grafana-utils/src/privateTransformers.ts new file mode 100644 index 0000000..6894fc6 --- /dev/null +++ b/packages/grafana-utils/src/privateTransformers.ts @@ -0,0 +1,22 @@ +import { heatmapTransformer } from './grafana/transformers/calculateHeatmap/heatmap'; +import { configFromDataTransformer } from './grafana/transformers/configFromQuery/configFromQuery'; +import { extractFieldsTransformer } from './grafana/transformers/extractFields/extractFields'; +import { joinByLabelsTransformer } from './grafana/transformers/joinByLabels/joinByLabels'; +import { partitionByValuesTransformer } from './grafana/transformers/partitionByValues/partitionByValues'; +import { prepareTimeSeriesTransformer } from './grafana/transformers/prepareTimeSeries/prepareTimeSeries'; +import { rowsToFieldsTransformer } from './grafana/transformers/rowsToFields/rowsToFields'; +import { timeSeriesTableTransformer } from './grafana/transformers/timeSeriesTable/timeSeriesTableTransformer'; + +/** + * Private Transformers + */ +export const privateTransformers = { + configFromDataTransformer, + heatmapTransformer, + extractFieldsTransformer, + joinByLabelsTransformer, + partitionByValuesTransformer, + prepareTimeSeriesTransformer, + rowsToFieldsTransformer, + timeSeriesTableTransformer, +}; diff --git a/packages/grafana-utils/src/query.ts b/packages/grafana-utils/src/query.ts new file mode 100644 index 0000000..d7fa40d --- /dev/null +++ b/packages/grafana-utils/src/query.ts @@ -0,0 +1,117 @@ +import { dataFrameFromJSON, DataQueryError, DataQueryResponse, LoadingState, toDataFrame } from '@grafana/data'; + +import { FetchResponse, ResponseData } from './types'; + +/** + * To Data Query Error + * @param err + */ +export function toDataQueryError(err: DataQueryError | string | unknown): DataQueryError { + const error: DataQueryError = err || {}; + + if (!error.message) { + if (typeof err === 'string') { + return { message: err }; + } + + let message = 'Query error'; + if (error.message) { + message = error.message; + } else if (error.data && error.data.message && error.data?.message !== 'Query data error') { + message = error.data.message; + } else if (error?.data?.message === 'Query data error' && error?.data?.error) { + message = error.data.error; + } else if (error.data && error.data.error) { + message = error.data.error; + } else if (error.status) { + message = `Query error: ${error.status} ${error.statusText}`; + } + error.message = message; + } + + return error; +} + +/** + * To Data Query Response + * @param res + */ +export function toDataQueryResponse(res: FetchResponse): DataQueryResponse { + const rsp: DataQueryResponse = { data: [], state: LoadingState.Done }; + + // If the response isn't in a correct shape we just ignore the data and pass empty DataQueryResponse. + if ('results' in res.data) { + const results = res.data.results; + const refIds = Object.keys(results); + const data: ResponseData[] = []; + + for (const refId of refIds) { + const dr = results[refId]; + if (!dr) { + continue; + } + dr.refId = refId; + data.push(dr); + } + + for (const dr of data) { + if (dr.error) { + const errorObj: DataQueryError = { + refId: dr.refId, + message: dr.error, + status: dr.status, + }; + if (!rsp.error) { + rsp.error = { ...errorObj }; + } + if (rsp.errors) { + rsp.errors.push({ ...errorObj }); + } else { + rsp.errors = [{ ...errorObj }]; + } + rsp.state = LoadingState.Error; + } + + if (dr.frames?.length) { + for (const js of dr.frames) { + const df = dataFrameFromJSON(js); + if (!df.refId) { + df.refId = dr.refId; + } + rsp.data.push(df); + } + continue; // the other tests are legacy + } + + if (dr.series?.length) { + for (const s of dr.series) { + if (!s.refId) { + s.refId = dr.refId; + } + rsp.data.push(toDataFrame(s)); + } + } + + if (dr.tables?.length) { + for (const s of dr.tables) { + if (!s.refId) { + s.refId = dr.refId; + } + rsp.data.push(toDataFrame(s)); + } + } + } + } + + // When it is not an OK response, make sure the error gets added + if (res.status && res.status !== 200) { + if (rsp.state !== LoadingState.Error) { + rsp.state = LoadingState.Error; + } + if (!rsp.error) { + rsp.error = toDataQueryError(res); + } + } + + return rsp; +} diff --git a/packages/grafana-utils/src/types.ts b/packages/grafana-utils/src/types.ts new file mode 100644 index 0000000..ebe2ff7 --- /dev/null +++ b/packages/grafana-utils/src/types.ts @@ -0,0 +1,63 @@ +import { DataFrameJSON } from '@grafana/data'; + +/** + * Query Result + */ +export interface QueryResult { + /** + * Status + * + * @type {number} + */ + status: number; + + /** + * Frames + * + * @type {DataFrameJSON[]} + */ + frames: DataFrameJSON[]; + + /** + * Ref ID + * + * @type {string} + */ + refId?: string; +} + +/** + * Fetch Data Query Response + */ +export interface FetchDataQueryResponse { + /** + * Results + */ + results: Record; +} + +/** + * Response Data + */ +export interface ResponseData { + error?: string; + refId?: string; + frames?: DataFrameJSON[]; + status?: number; + + // Legacy TSDB format... + series?: any[]; + tables?: any[]; +} + +export interface FetchResponse { + data: T; + readonly status: number; + readonly statusText: string; + readonly ok?: boolean; + readonly headers: object; + readonly redirected?: boolean; + readonly type?: ResponseType; + readonly url?: string; + readonly traceId?: string; +} diff --git a/packages/grafana-utils/tsconfig.json b/packages/grafana-utils/tsconfig.json new file mode 100644 index 0000000..f219192 --- /dev/null +++ b/packages/grafana-utils/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "esnext", + "module": "esnext", + "jsx": "react", + "sourceMap": true, + "outDir": "dist", + "strict": true, + "moduleResolution": "node", + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["./src", "./types", "./jest-setup.ts"] +}