Compare commits

..

21 Commits

Author SHA1 Message Date
Ashley Harrison
a34787d2a4 make name and id required in the theme schema 2026-01-09 15:42:57 +00:00
Ashley Harrison
2ad1f04cf3 generate theme list at build time, don't do it at runtime 2026-01-09 14:52:52 +00:00
Ashley Harrison
a52ac632b7 generate theme json schema from zod 2026-01-08 10:42:33 +00:00
Ashley Harrison
2d43af56b5 error tidy up p2 2026-01-07 18:12:43 +00:00
Ashley Harrison
164b927f9d error tidy up 2026-01-07 18:12:04 +00:00
Ashley Harrison
29c5edf3ca use zod 2026-01-07 18:09:41 +00:00
Ashley Harrison
a5747231d3 automatically discover extra themes in go backend 2026-01-06 16:32:57 +00:00
Ashley Harrison
1c2ffcb653 convert all theme files to json 2026-01-06 13:56:24 +00:00
Ayush Kaithwas
92464b2dc8 Dynamic Dashboards: Fix Content outline not being scrollable (#115827)
Enhancement: Add ScrollContainer to DashboardOutline for improved scrolling experience
2026-01-06 14:25:57 +01:00
Joe Elliott
5fe192a893 Tempo: Fix multiple streaming TraceQL metrics queries being conflated into one (#114360)
* Correctly stream multiple metrics series

Signed-off-by: Joe Elliott <number101010@gmail.com>

* cleanup

Signed-off-by: Joe Elliott <number101010@gmail.com>

* prettier fix

---------

Signed-off-by: Joe Elliott <number101010@gmail.com>
Co-authored-by: Andre Pereira <adrapereira@gmail.com>
Co-authored-by: Zoltán Bedi <zoltan.bedi@gmail.com>
2026-01-06 12:47:52 +00:00
Alexander Akhmetov
380154707b Alerting: Fix hyphen escaping in rule labels filter (#115869) 2026-01-06 12:39:28 +01:00
Peter Nguyen
217427e072 Loki Language Provider: Add missing interpolation to fetchLabelsByLabelsEndpoint (#114608)
* Plugins: Implement bug fix for loki label selectors w/ variable interpolation

* Chore: Add test to ensure result is interpolated

---------

Co-authored-by: Zoltán Bedi <zoltan.bedi@gmail.com>
2026-01-06 10:29:51 +00:00
grafana-pr-automation[bot]
585d24dafa I18n: Download translations from Crowdin (#115860)
New Crowdin translations by GitHub Action

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2026-01-06 10:01:38 +00:00
Josh Hunt
fccece3ca0 Refactor: Remove jQuery from AppWrapper (#115842) 2026-01-06 09:58:42 +00:00
Juan Cabanas
d44cab9eaf DashboardLibrary: Add validations to visualize community dashboards (#114562)
* dashboard library check added

* community dashboard section tests in progress

* tests added

* translations added

* pagination removed

* total pages removed

* test updated. pagination removed

* filters applied

* tracking event removed to be created in another pr

* slug added so url is correclty generated

* ui fix

* improvements after review

* improvements after review

* more tests added. new logic created

* fix

* changes applied

* tests removed. pattern updated

* preset of 6 elements applied

* Improve code comments and adjust variable name based on PR feedback

* Fix unit test and add extra case for regex pattern

* Fix interaction event, we were missing contentKind on BasicProvisioned flow and datasources types were not being send

---------

Co-authored-by: nmarrs <nathanielmarrs@gmail.com>
Co-authored-by: alexandra vargas <alexa1866@gmail.com>
2026-01-06 10:38:15 +01:00
Saurabh Yadav
3d3b4dd213 Clean up packages/grafana-prometheus/src/dashboards (#115861)
Some checks failed
Frontend performance tests / performance-tests (push) Has been cancelled
Actionlint / Lint GitHub Actions files (push) Has been cancelled
Backend Code Checks / Detect whether code changed (push) Has been cancelled
Backend Code Checks / Validate Backend Configs (push) Has been cancelled
Backend Unit Tests / Detect whether code changed (push) Has been cancelled
Backend Unit Tests / Grafana (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana (8/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (8/8) (push) Has been cancelled
Backend Unit Tests / All backend unit tests complete (push) Has been cancelled
Lint Frontend / Detect whether code changed (push) Has been cancelled
Lint Frontend / Lint (push) Has been cancelled
Lint Frontend / Typecheck (push) Has been cancelled
Lint Frontend / Verify API clients (push) Has been cancelled
Lint Frontend / Verify API clients (enterprise) (push) Has been cancelled
Verify i18n / verify-i18n (push) Has been cancelled
End-to-end tests / Detect whether code changed (push) Has been cancelled
End-to-end tests / Build & Package Grafana (push) Has been cancelled
End-to-end tests / Build E2E test runner (push) Has been cancelled
End-to-end tests / push-docker-image (push) Has been cancelled
End-to-end tests / dashboards-suite (old arch) (push) Has been cancelled
End-to-end tests / panels-suite (old arch) (push) Has been cancelled
End-to-end tests / smoke-tests-suite (old arch) (push) Has been cancelled
End-to-end tests / various-suite (old arch) (push) Has been cancelled
End-to-end tests / Verify Storybook (Playwright) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (1/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (2/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (3/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (4/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (5/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (6/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (7/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (8/8) (push) Has been cancelled
End-to-end tests / run-azure-monitor-e2e (push) Has been cancelled
End-to-end tests / All Playwright tests complete (push) Has been cancelled
End-to-end tests / A11y test (push) Has been cancelled
End-to-end tests / Publish metrics (push) Has been cancelled
End-to-end tests / All E2E tests complete (push) Has been cancelled
Frontend tests / Detect whether code changed (push) Has been cancelled
Frontend tests / Unit tests (1 / 16) (push) Has been cancelled
Frontend tests / Unit tests (10 / 16) (push) Has been cancelled
Frontend tests / Unit tests (11 / 16) (push) Has been cancelled
Frontend tests / Unit tests (12 / 16) (push) Has been cancelled
Frontend tests / Unit tests (13 / 16) (push) Has been cancelled
Frontend tests / Unit tests (14 / 16) (push) Has been cancelled
Frontend tests / Unit tests (15 / 16) (push) Has been cancelled
Frontend tests / Unit tests (16 / 16) (push) Has been cancelled
Frontend tests / Unit tests (2 / 16) (push) Has been cancelled
Frontend tests / Unit tests (3 / 16) (push) Has been cancelled
Frontend tests / Unit tests (4 / 16) (push) Has been cancelled
Frontend tests / Unit tests (5 / 16) (push) Has been cancelled
Frontend tests / Unit tests (6 / 16) (push) Has been cancelled
Frontend tests / Unit tests (7 / 16) (push) Has been cancelled
Frontend tests / Unit tests (8 / 16) (push) Has been cancelled
Frontend tests / Unit tests (9 / 16) (push) Has been cancelled
Frontend tests / Decoupled plugin tests (push) Has been cancelled
Frontend tests / Packages unit tests (push) Has been cancelled
Frontend tests / All frontend unit tests complete (push) Has been cancelled
Frontend tests / Devenv frontend-service build (push) Has been cancelled
Integration Tests / Detect whether code changed (push) Has been cancelled
Integration Tests / Sqlite (1/4) (push) Has been cancelled
Integration Tests / Sqlite (2/4) (push) Has been cancelled
Integration Tests / Sqlite (3/4) (push) Has been cancelled
Integration Tests / Sqlite (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (1/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (2/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (3/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (profiled) (push) Has been cancelled
Integration Tests / MySQL (1/16) (push) Has been cancelled
Integration Tests / MySQL (10/16) (push) Has been cancelled
Integration Tests / MySQL (11/16) (push) Has been cancelled
Integration Tests / MySQL (12/16) (push) Has been cancelled
Integration Tests / MySQL (13/16) (push) Has been cancelled
Integration Tests / MySQL (14/16) (push) Has been cancelled
Integration Tests / MySQL (15/16) (push) Has been cancelled
Integration Tests / MySQL (16/16) (push) Has been cancelled
Integration Tests / MySQL (2/16) (push) Has been cancelled
Integration Tests / MySQL (3/16) (push) Has been cancelled
Integration Tests / MySQL (4/16) (push) Has been cancelled
Integration Tests / MySQL (5/16) (push) Has been cancelled
Integration Tests / MySQL (6/16) (push) Has been cancelled
Integration Tests / MySQL (7/16) (push) Has been cancelled
Integration Tests / MySQL (8/16) (push) Has been cancelled
Integration Tests / MySQL (9/16) (push) Has been cancelled
Integration Tests / Postgres (1/16) (push) Has been cancelled
Integration Tests / Postgres (10/16) (push) Has been cancelled
Integration Tests / Postgres (11/16) (push) Has been cancelled
Integration Tests / Postgres (12/16) (push) Has been cancelled
Integration Tests / Postgres (13/16) (push) Has been cancelled
Integration Tests / Postgres (14/16) (push) Has been cancelled
Integration Tests / Postgres (15/16) (push) Has been cancelled
Integration Tests / Postgres (16/16) (push) Has been cancelled
Integration Tests / Postgres (2/16) (push) Has been cancelled
Integration Tests / Postgres (3/16) (push) Has been cancelled
Integration Tests / Postgres (4/16) (push) Has been cancelled
Integration Tests / Postgres (5/16) (push) Has been cancelled
Integration Tests / Postgres (6/16) (push) Has been cancelled
Integration Tests / Postgres (7/16) (push) Has been cancelled
Integration Tests / Postgres (8/16) (push) Has been cancelled
Integration Tests / Postgres (9/16) (push) Has been cancelled
Integration Tests / All backend integration tests complete (push) Has been cancelled
Reject GitHub secrets / reject-gh-secrets (push) Has been cancelled
Build Release Packages / setup (push) Has been cancelled
Build Release Packages / Dispatch grafana-enterprise build (push) Has been cancelled
Build Release Packages / / darwin-amd64 (push) Has been cancelled
Build Release Packages / / darwin-arm64 (push) Has been cancelled
Build Release Packages / / linux-amd64 (push) Has been cancelled
Build Release Packages / / linux-armv6 (push) Has been cancelled
Build Release Packages / / linux-armv7 (push) Has been cancelled
Build Release Packages / / linux-arm64 (push) Has been cancelled
Build Release Packages / / linux-s390x (push) Has been cancelled
Build Release Packages / / windows-amd64 (push) Has been cancelled
Build Release Packages / / windows-arm64 (push) Has been cancelled
Build Release Packages / Upload artifacts (push) Has been cancelled
Build Release Packages / publish-dockerhub (push) Has been cancelled
Build Release Packages / Dispatch publish NPM canaries (push) Has been cancelled
Build Release Packages / notify-pr (push) Has been cancelled
Run dashboard schema v2 e2e / dashboard-schema-v2-e2e (push) Has been cancelled
Shellcheck / Shellcheck scripts (push) Has been cancelled
Run Storybook a11y tests / Detect whether code changed (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (light theme) (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (dark theme) (push) Has been cancelled
Swagger generated code / Detect whether code changed (push) Has been cancelled
Swagger generated code / Verify committed API specs match (push) Has been cancelled
Dispatch sync to mirror / dispatch-job (push) Has been cancelled
* remove:Dashboard json files

* removed: dashboards from packages/grafana-prometheus/src/dashboards
2026-01-06 09:26:04 +00:00
Larissa Wandzura
2947d41ea8 Docs: Fixed broken links for Cloudwatch (#115848)
Some checks failed
Frontend performance tests / performance-tests (push) Has been cancelled
Actionlint / Lint GitHub Actions files (push) Has been cancelled
Backend Code Checks / Detect whether code changed (push) Has been cancelled
Backend Code Checks / Validate Backend Configs (push) Has been cancelled
Backend Unit Tests / Detect whether code changed (push) Has been cancelled
Backend Unit Tests / Grafana (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana (8/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (8/8) (push) Has been cancelled
Backend Unit Tests / All backend unit tests complete (push) Has been cancelled
Lint Frontend / Detect whether code changed (push) Has been cancelled
Lint Frontend / Lint (push) Has been cancelled
Lint Frontend / Typecheck (push) Has been cancelled
Lint Frontend / Verify API clients (push) Has been cancelled
Lint Frontend / Verify API clients (enterprise) (push) Has been cancelled
Verify i18n / verify-i18n (push) Has been cancelled
End-to-end tests / Detect whether code changed (push) Has been cancelled
End-to-end tests / Build & Package Grafana (push) Has been cancelled
End-to-end tests / Build E2E test runner (push) Has been cancelled
End-to-end tests / push-docker-image (push) Has been cancelled
End-to-end tests / dashboards-suite (old arch) (push) Has been cancelled
End-to-end tests / panels-suite (old arch) (push) Has been cancelled
End-to-end tests / smoke-tests-suite (old arch) (push) Has been cancelled
End-to-end tests / various-suite (old arch) (push) Has been cancelled
End-to-end tests / Verify Storybook (Playwright) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (1/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (2/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (3/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (4/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (5/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (6/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (7/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (8/8) (push) Has been cancelled
End-to-end tests / run-azure-monitor-e2e (push) Has been cancelled
End-to-end tests / All Playwright tests complete (push) Has been cancelled
End-to-end tests / A11y test (push) Has been cancelled
End-to-end tests / Publish metrics (push) Has been cancelled
End-to-end tests / All E2E tests complete (push) Has been cancelled
Frontend tests / Detect whether code changed (push) Has been cancelled
Frontend tests / Unit tests (1 / 16) (push) Has been cancelled
Frontend tests / Unit tests (10 / 16) (push) Has been cancelled
Frontend tests / Unit tests (11 / 16) (push) Has been cancelled
Frontend tests / Unit tests (12 / 16) (push) Has been cancelled
Frontend tests / Unit tests (13 / 16) (push) Has been cancelled
Frontend tests / Unit tests (14 / 16) (push) Has been cancelled
Frontend tests / Unit tests (15 / 16) (push) Has been cancelled
Frontend tests / Unit tests (16 / 16) (push) Has been cancelled
Frontend tests / Unit tests (2 / 16) (push) Has been cancelled
Frontend tests / Unit tests (3 / 16) (push) Has been cancelled
Frontend tests / Unit tests (4 / 16) (push) Has been cancelled
Frontend tests / Unit tests (5 / 16) (push) Has been cancelled
Frontend tests / Unit tests (6 / 16) (push) Has been cancelled
Frontend tests / Unit tests (7 / 16) (push) Has been cancelled
Frontend tests / Unit tests (8 / 16) (push) Has been cancelled
Frontend tests / Unit tests (9 / 16) (push) Has been cancelled
Frontend tests / Decoupled plugin tests (push) Has been cancelled
Frontend tests / Packages unit tests (push) Has been cancelled
Frontend tests / All frontend unit tests complete (push) Has been cancelled
Frontend tests / Devenv frontend-service build (push) Has been cancelled
Integration Tests / Detect whether code changed (push) Has been cancelled
Integration Tests / Sqlite (1/4) (push) Has been cancelled
Integration Tests / Sqlite (2/4) (push) Has been cancelled
Integration Tests / Sqlite (3/4) (push) Has been cancelled
Integration Tests / Sqlite (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (1/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (2/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (3/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (profiled) (push) Has been cancelled
Integration Tests / MySQL (1/16) (push) Has been cancelled
Integration Tests / MySQL (10/16) (push) Has been cancelled
Integration Tests / MySQL (11/16) (push) Has been cancelled
Integration Tests / MySQL (12/16) (push) Has been cancelled
Integration Tests / MySQL (13/16) (push) Has been cancelled
Integration Tests / MySQL (14/16) (push) Has been cancelled
Integration Tests / MySQL (15/16) (push) Has been cancelled
Integration Tests / MySQL (16/16) (push) Has been cancelled
Integration Tests / MySQL (2/16) (push) Has been cancelled
Integration Tests / MySQL (3/16) (push) Has been cancelled
Integration Tests / MySQL (4/16) (push) Has been cancelled
Integration Tests / MySQL (5/16) (push) Has been cancelled
Integration Tests / MySQL (6/16) (push) Has been cancelled
Integration Tests / MySQL (7/16) (push) Has been cancelled
Integration Tests / MySQL (8/16) (push) Has been cancelled
Integration Tests / MySQL (9/16) (push) Has been cancelled
Integration Tests / Postgres (1/16) (push) Has been cancelled
Integration Tests / Postgres (10/16) (push) Has been cancelled
Integration Tests / Postgres (11/16) (push) Has been cancelled
Integration Tests / Postgres (12/16) (push) Has been cancelled
Integration Tests / Postgres (13/16) (push) Has been cancelled
Integration Tests / Postgres (14/16) (push) Has been cancelled
Integration Tests / Postgres (15/16) (push) Has been cancelled
Integration Tests / Postgres (16/16) (push) Has been cancelled
Integration Tests / Postgres (2/16) (push) Has been cancelled
Integration Tests / Postgres (3/16) (push) Has been cancelled
Integration Tests / Postgres (4/16) (push) Has been cancelled
Integration Tests / Postgres (5/16) (push) Has been cancelled
Integration Tests / Postgres (6/16) (push) Has been cancelled
Integration Tests / Postgres (7/16) (push) Has been cancelled
Integration Tests / Postgres (8/16) (push) Has been cancelled
Integration Tests / Postgres (9/16) (push) Has been cancelled
Integration Tests / All backend integration tests complete (push) Has been cancelled
Reject GitHub secrets / reject-gh-secrets (push) Has been cancelled
Build Release Packages / setup (push) Has been cancelled
Build Release Packages / Dispatch grafana-enterprise build (push) Has been cancelled
Build Release Packages / / darwin-amd64 (push) Has been cancelled
Build Release Packages / / darwin-arm64 (push) Has been cancelled
Build Release Packages / / linux-amd64 (push) Has been cancelled
Build Release Packages / / linux-armv6 (push) Has been cancelled
Build Release Packages / / linux-armv7 (push) Has been cancelled
Build Release Packages / / linux-arm64 (push) Has been cancelled
Build Release Packages / / linux-s390x (push) Has been cancelled
Build Release Packages / / windows-amd64 (push) Has been cancelled
Build Release Packages / / windows-arm64 (push) Has been cancelled
Build Release Packages / Upload artifacts (push) Has been cancelled
Build Release Packages / publish-dockerhub (push) Has been cancelled
Build Release Packages / Dispatch publish NPM canaries (push) Has been cancelled
Build Release Packages / notify-pr (push) Has been cancelled
Run dashboard schema v2 e2e / dashboard-schema-v2-e2e (push) Has been cancelled
Shellcheck / Shellcheck scripts (push) Has been cancelled
Run Storybook a11y tests / Detect whether code changed (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (light theme) (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (dark theme) (push) Has been cancelled
Swagger generated code / Detect whether code changed (push) Has been cancelled
Swagger generated code / Verify committed API specs match (push) Has been cancelled
Dispatch sync to mirror / dispatch-job (push) Has been cancelled
CodeQL checks / Detect whether code changed (push) Has been cancelled
CodeQL checks / Analyze (actions) (push) Has been cancelled
CodeQL checks / Analyze (go) (push) Has been cancelled
CodeQL checks / Analyze (javascript) (push) Has been cancelled
golangci-lint / Detect whether code changed (push) Has been cancelled
golangci-lint / go-fmt (push) Has been cancelled
golangci-lint / lint-go (push) Has been cancelled
Documentation / Build & Verify Docs (push) Has been cancelled
publish-technical-documentation-next / sync (push) Has been cancelled
Close stale issues and PRs / stale (push) Has been cancelled
Update `make docs` procedure / main (push) Has been cancelled
* updates broken links and aliases

* fixed query editor links
2026-01-05 20:56:50 +00:00
Stephanie Hingtgen
0acb030f46 Revert: OSS Seeding (115729) (#115839) 2026-01-05 12:33:55 -06:00
Stephanie Hingtgen
658a1c8228 Dashboards: Allow editing provisioned dashboards if AllowUIUpdates is set (#115804) 2026-01-05 11:46:14 -06:00
Will Browne
618316a2f7 Revert "App Plugins: Allow to define experimental pages" (#115841)
Some checks failed
Frontend performance tests / performance-tests (push) Has been cancelled
Actionlint / Lint GitHub Actions files (push) Has been cancelled
Backend Code Checks / Detect whether code changed (push) Has been cancelled
Backend Code Checks / Validate Backend Configs (push) Has been cancelled
Backend Unit Tests / Detect whether code changed (push) Has been cancelled
Backend Unit Tests / Grafana (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana (8/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (8/8) (push) Has been cancelled
Backend Unit Tests / All backend unit tests complete (push) Has been cancelled
CodeQL checks / Detect whether code changed (push) Has been cancelled
CodeQL checks / Analyze (actions) (push) Has been cancelled
CodeQL checks / Analyze (go) (push) Has been cancelled
CodeQL checks / Analyze (javascript) (push) Has been cancelled
Lint Frontend / Detect whether code changed (push) Has been cancelled
Lint Frontend / Lint (push) Has been cancelled
Lint Frontend / Typecheck (push) Has been cancelled
Lint Frontend / Verify API clients (push) Has been cancelled
Lint Frontend / Verify API clients (enterprise) (push) Has been cancelled
golangci-lint / Detect whether code changed (push) Has been cancelled
golangci-lint / go-fmt (push) Has been cancelled
golangci-lint / lint-go (push) Has been cancelled
Verify i18n / verify-i18n (push) Has been cancelled
Documentation / Build & Verify Docs (push) Has been cancelled
End-to-end tests / Detect whether code changed (push) Has been cancelled
End-to-end tests / Build & Package Grafana (push) Has been cancelled
End-to-end tests / Build E2E test runner (push) Has been cancelled
End-to-end tests / push-docker-image (push) Has been cancelled
End-to-end tests / dashboards-suite (old arch) (push) Has been cancelled
End-to-end tests / panels-suite (old arch) (push) Has been cancelled
End-to-end tests / smoke-tests-suite (old arch) (push) Has been cancelled
End-to-end tests / various-suite (old arch) (push) Has been cancelled
End-to-end tests / Verify Storybook (Playwright) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (1/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (2/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (3/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (4/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (5/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (6/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (7/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (8/8) (push) Has been cancelled
End-to-end tests / run-azure-monitor-e2e (push) Has been cancelled
End-to-end tests / All Playwright tests complete (push) Has been cancelled
End-to-end tests / A11y test (push) Has been cancelled
End-to-end tests / Publish metrics (push) Has been cancelled
End-to-end tests / All E2E tests complete (push) Has been cancelled
Frontend tests / Detect whether code changed (push) Has been cancelled
Frontend tests / Unit tests (1 / 16) (push) Has been cancelled
Frontend tests / Unit tests (10 / 16) (push) Has been cancelled
Frontend tests / Unit tests (11 / 16) (push) Has been cancelled
Frontend tests / Unit tests (12 / 16) (push) Has been cancelled
Frontend tests / Unit tests (13 / 16) (push) Has been cancelled
Frontend tests / Unit tests (14 / 16) (push) Has been cancelled
Frontend tests / Unit tests (15 / 16) (push) Has been cancelled
Frontend tests / Unit tests (16 / 16) (push) Has been cancelled
Frontend tests / Unit tests (2 / 16) (push) Has been cancelled
Frontend tests / Unit tests (3 / 16) (push) Has been cancelled
Frontend tests / Unit tests (4 / 16) (push) Has been cancelled
Frontend tests / Unit tests (5 / 16) (push) Has been cancelled
Frontend tests / Unit tests (6 / 16) (push) Has been cancelled
Frontend tests / Unit tests (7 / 16) (push) Has been cancelled
Frontend tests / Unit tests (8 / 16) (push) Has been cancelled
Frontend tests / Unit tests (9 / 16) (push) Has been cancelled
Frontend tests / Decoupled plugin tests (push) Has been cancelled
Frontend tests / Packages unit tests (push) Has been cancelled
Frontend tests / All frontend unit tests complete (push) Has been cancelled
Frontend tests / Devenv frontend-service build (push) Has been cancelled
Integration Tests / Detect whether code changed (push) Has been cancelled
Integration Tests / Sqlite (1/4) (push) Has been cancelled
Integration Tests / Sqlite (2/4) (push) Has been cancelled
Integration Tests / Sqlite (3/4) (push) Has been cancelled
Integration Tests / Sqlite (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (1/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (2/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (3/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (profiled) (push) Has been cancelled
Integration Tests / MySQL (1/16) (push) Has been cancelled
Integration Tests / MySQL (10/16) (push) Has been cancelled
Integration Tests / MySQL (11/16) (push) Has been cancelled
Integration Tests / MySQL (12/16) (push) Has been cancelled
Integration Tests / MySQL (13/16) (push) Has been cancelled
Integration Tests / MySQL (14/16) (push) Has been cancelled
Integration Tests / MySQL (15/16) (push) Has been cancelled
Integration Tests / MySQL (16/16) (push) Has been cancelled
Integration Tests / MySQL (2/16) (push) Has been cancelled
Integration Tests / MySQL (3/16) (push) Has been cancelled
Integration Tests / MySQL (4/16) (push) Has been cancelled
Integration Tests / MySQL (5/16) (push) Has been cancelled
Integration Tests / MySQL (6/16) (push) Has been cancelled
Integration Tests / MySQL (7/16) (push) Has been cancelled
Integration Tests / MySQL (8/16) (push) Has been cancelled
Integration Tests / MySQL (9/16) (push) Has been cancelled
Integration Tests / Postgres (1/16) (push) Has been cancelled
Integration Tests / Postgres (10/16) (push) Has been cancelled
Integration Tests / Postgres (11/16) (push) Has been cancelled
Integration Tests / Postgres (12/16) (push) Has been cancelled
Integration Tests / Postgres (13/16) (push) Has been cancelled
Integration Tests / Postgres (14/16) (push) Has been cancelled
Integration Tests / Postgres (15/16) (push) Has been cancelled
Integration Tests / Postgres (16/16) (push) Has been cancelled
Integration Tests / Postgres (2/16) (push) Has been cancelled
Integration Tests / Postgres (3/16) (push) Has been cancelled
Integration Tests / Postgres (4/16) (push) Has been cancelled
Integration Tests / Postgres (5/16) (push) Has been cancelled
Integration Tests / Postgres (6/16) (push) Has been cancelled
Integration Tests / Postgres (7/16) (push) Has been cancelled
Integration Tests / Postgres (8/16) (push) Has been cancelled
Integration Tests / Postgres (9/16) (push) Has been cancelled
Integration Tests / All backend integration tests complete (push) Has been cancelled
publish-technical-documentation-next / sync (push) Has been cancelled
Reject GitHub secrets / reject-gh-secrets (push) Has been cancelled
Build Release Packages / setup (push) Has been cancelled
Build Release Packages / Dispatch grafana-enterprise build (push) Has been cancelled
Build Release Packages / / darwin-amd64 (push) Has been cancelled
Build Release Packages / / darwin-arm64 (push) Has been cancelled
Build Release Packages / / linux-amd64 (push) Has been cancelled
Build Release Packages / / linux-armv6 (push) Has been cancelled
Build Release Packages / / linux-armv7 (push) Has been cancelled
Build Release Packages / / linux-arm64 (push) Has been cancelled
Build Release Packages / / linux-s390x (push) Has been cancelled
Build Release Packages / / windows-amd64 (push) Has been cancelled
Build Release Packages / / windows-arm64 (push) Has been cancelled
Build Release Packages / Upload artifacts (push) Has been cancelled
Build Release Packages / publish-dockerhub (push) Has been cancelled
Build Release Packages / Dispatch publish NPM canaries (push) Has been cancelled
Build Release Packages / notify-pr (push) Has been cancelled
Run dashboard schema v2 e2e / dashboard-schema-v2-e2e (push) Has been cancelled
Shellcheck / Shellcheck scripts (push) Has been cancelled
Run Storybook a11y tests / Detect whether code changed (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (light theme) (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (dark theme) (push) Has been cancelled
Swagger generated code / Detect whether code changed (push) Has been cancelled
Swagger generated code / Verify committed API specs match (push) Has been cancelled
Dispatch sync to mirror / dispatch-job (push) Has been cancelled
Crowdin Upload Action / upload-sources-to-crowdin (push) Has been cancelled
publish-kinds-next / main (push) Has been cancelled
Trivy Scan / trivy-scan (push) Has been cancelled
Relyance Compliance Inspection / relyance-compliance-inspector (push) Has been cancelled
Crowdin Download Action / download-sources-from-crowdin (push) Has been cancelled
Revert "App Plugins: Allow to define experimental pages (#114232)"

This reverts commit e1a2f178e7.
2026-01-05 17:04:07 +00:00
vesalaakso-oura
a9c2117aa7 Transformers: Add smoothing transformer (#111077)
* Transformers: Add smoothing transformer

Added a smoothing transformer to help clean up noisy time series data.
It uses the ASAP algorithm to pick the most important data points while
keeping the overall shape and trends intact.

The transformer always keeps the first and last points so you get the
complete time range. I also added a test for it.

* Change category

Change category from Reformat to CalculateNewFields

* Remove first/last point preservation

* Fix operator recreation

* Simplify ASAP code

Include performance optimization as well

* Refactor interpolateFromSmoothedCurve

Break function into smaller focused functions and lift functions to the
top level

* Add isApplicable Check

Make sure the transformer is applicable for timeseries data

* Add tests for isApplicable check

* UI/UX improvements: Display effective resolution when limited by data points

Show "Effective: X" indicator when resolution is capped by the 2x data
points multiplier. Includes tooltip explaining the limit.

Memoizes calculation to prevent unnecessary recalculation on re-renders.

Example: With 72 data points and resolution set to 150, displays
"Effective: 144" since the limit is 72 x 2 = 144.

Plus added tests

* Improve discoverability by adding tags

* Preserve Original Data

Let's preserve original data as well, makes the UX so much better.
Changed from appending (smoothed) to frame names to use Smoothed frame name. This should match the pattern used by other transformers (e.g,. regression)
Updated tests accordingly
Updated tooltip note

* Add asap tests

Basic functionality:
* returns valid DataPoint objects
* Maintain x-axis ordering

Edge cases:
* Empty array
* single data point
* filter NaN values
* all NaN values
* sort unsorted data
* negative values

* Update dark and light images

* Clear state cache

* Add feature toggle

* Conditionally add new transformation to the registry

* chore: update and regenerate feature toggles

* chore: update yarn.lock

* chore: fix transformers and imports
2026-01-05 17:53:45 +01:00
140 changed files with 5361 additions and 8398 deletions

View File

@@ -135,7 +135,7 @@ i18n-extract-enterprise:
@echo "Skipping i18n extract for Enterprise: not enabled"
else
i18n-extract-enterprise:
@echo "Extracting i18n strings for Enterprise"
@echo "Extracting i18n strings for Enterprise"
cd public/locales/enterprise && yarn run i18next-cli extract --sync-primary
endif
@@ -227,6 +227,10 @@ fix-cue:
gen-jsonnet:
go generate ./devenv/jsonnet
.PHONY: gen-themes
gen-themes:
go generate ./pkg/services/preference
.PHONY: update-workspace
update-workspace: gen-go
@echo "updating workspace"
@@ -244,6 +248,7 @@ build-go-fast: ## Build all Go binaries without updating workspace.
.PHONY: build-backend
build-backend: ## Build Grafana backend.
@echo "build backend"
$(MAKE) gen-themes
$(GO) run build.go $(GO_BUILD_FLAGS) build-backend
.PHONY: build-air

View File

@@ -1,11 +1,12 @@
---
aliases:
- ../data-sources/aws-CloudWatch/
- ../data-sources/aws-CloudWatch/preconfig-CloudWatch-dashboards/
- ../data-sources/aws-CloudWatch/provision-CloudWatch/
- CloudWatch/
- preconfig-CloudWatch-dashboards/
- provision-CloudWatch/
- ../../data-sources/aws-cloudwatch/configure/
- ../../data-sources/aws-cloudwatch/
- ../../data-sources/aws-cloudwatch/preconfig-cloudwatch-dashboards/
- ../../data-sources/aws-cloudwatch/provision-cloudwatch/
- ../cloudwatch/
- ../preconfig-cloudwatch-dashboards/
- ../provision-cloudwatch/
description: This document provides configuration instructions for the CloudWatch data source.
keywords:
- grafana
@@ -25,11 +26,6 @@ refs:
destination: /docs/grafana/<GRAFANA_VERSION>/panels-visualizations/visualizations/logs/
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/panels-visualizations/visualizations/logs/
explore:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/explore/
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/explore/
provisioning-data-sources:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/administration/provisioning/#data-sources
@@ -40,16 +36,6 @@ refs:
destination: /docs/grafana/<GRAFANA_VERSION>/setup-grafana/configure-grafana/#aws
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/setup-grafana/configure-grafana/#aws
alerting:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/
- pattern: /docs/grafana-cloud/
destination: /docs/grafana-cloud/alerting-and-irm/alerting/
build-dashboards:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/dashboards/build-dashboards/
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/dashboards/build-dashboards/
data-source-management:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/administration/data-source-management/
@@ -153,7 +139,7 @@ You must use both an access key ID and a secret access key to authenticate.
Grafana automatically creates a link to a trace in X-Ray data source if logs contain the `@xrayTraceId` field. To use this feature, you must already have an X-Ray data source configured. For details, see the [X-Ray data source docs](/grafana/plugins/grafana-X-Ray-datasource/). To view the X-Ray link, select the log row in either the Explore view or dashboard [Logs panel](ref:logs) to view the log details section.
To log the `@xrayTraceId`, refer to the [AWS X-Ray documentation](https://docs.amazonaws.cn/en_us/xray/latest/devguide/xray-services.html). To provide the field to Grafana, your log queries must also contain the `@xrayTraceId` field, for example by using the query `fields @message, @xrayTraceId`.
To log the `@xrayTraceId`, refer to the [AWS X-Ray documentation](https://docs.aws.amazon.com/xray/latest/devguide/xray-services.html). To provide the field to Grafana, your log queries must also contain the `@xrayTraceId` field, for example by using the query `fields @message, @xrayTraceId`.
**Private data source connect** - _Only for Grafana Cloud users._

View File

@@ -34,11 +34,6 @@ refs:
destination: /docs/grafana/<GRAFANA_VERSION>/panels-visualizations/query-transform-data/#navigate-the-query-tab
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/panels-visualizations/query-transform-data/#navigate-the-query-tab
explore:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/explore/
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/explore/
alerting:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/
@@ -183,7 +178,7 @@ If you use the expression field to reference another query, such as `queryA * 2`
When you select `Builder` mode within the Metric search editor, a new Account field is displayed. Use the `Account` field to specify which of the linked monitoring accounts to target for the given query. By default, the `All` option is specified, which will target all linked accounts.
While in `Code` mode, you can specify any math expression. If the Monitoring account badge displays in the query editor header, all `SEARCH` expressions entered in this field will be cross-account by default and can query metrics from linked accounts. Note that while queries run cross-account, the autocomplete feature currently doesn't fetch cross-account resources, so you'll need to manually specify resource names when writing cross-account queries.
You can limit the search to one or a set of accounts, as documented in the [AWS documentation](http://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Unified-Cross-Account.html).
You can limit the search to one or a set of accounts, as documented in the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Unified-Cross-Account.html).
### Period macro
@@ -198,7 +193,7 @@ The link provided is valid for any account but displays the expected metrics onl
{{< figure src="/media/docs/cloudwatch/cloudwatch-deep-link-v12.1.png" caption="CloudWatch deep linking" >}}
This feature is not available for metrics based on [metric math expressions](#metric-math-expressions).
This feature is not available for metrics based on [metric math expressions](#use-metric-math-expressions).
### Use Metric Insights syntax
@@ -319,9 +314,9 @@ The CloudWatch plugin monitors and troubleshoots applications that span multiple
To enable cross-account observability, complete the following steps:
1. Go to the [Amazon CloudWatch documentation](http://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Unified-Cross-Account.html) and follow the instructions for enabling cross-account observability.
1. Go to the [Amazon CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Unified-Cross-Account.html) and follow the instructions for enabling cross-account observability.
1. Add [two API actions](https://grafana.com//docs/grafana/latest/datasources/aws-cloudwatch/configure/#cross-account-observability-permissions) to the IAM policy attached to the role/user running the plugin.
1. Add [two API actions](https://grafana.com/docs/grafana/latest/datasources/aws-cloudwatch/configure/#cross-account-observability-permissions) to the IAM policy attached to the role/user running the plugin.
Cross-account querying is available in the plugin through the **Logs**, **Metric search**, and **Metric Insights** modes.
After you have configured it, you'll see a **Monitoring account** badge in the query editor header.

View File

@@ -62,8 +62,7 @@
"stats": "webpack --mode production --config scripts/webpack/webpack.prod.js --profile --json > compilation-stats.json",
"storybook": "yarn workspace @grafana/ui storybook --ci",
"storybook:build": "yarn workspace @grafana/ui storybook:build",
"themes-schema": "typescript-json-schema ./tsconfig.json NewThemeOptions --include 'packages/grafana-data/src/themes/createTheme.ts' --out public/app/features/theme-playground/schema.generated.json",
"themes-generate": "yarn themes-schema && esbuild --target=es6 ./scripts/cli/generateSassVariableFiles.ts --bundle --conditions=@grafana-app/source --platform=node --tsconfig=./scripts/cli/tsconfig.json | node",
"themes-generate": "yarn workspace @grafana/data themes-schema && esbuild --target=es6 ./scripts/cli/generateSassVariableFiles.ts --bundle --conditions=@grafana-app/source --platform=node --tsconfig=./scripts/cli/tsconfig.json | node",
"themes:usage": "eslint . --ignore-pattern '*.test.ts*' --ignore-pattern '*.spec.ts*' --cache --plugin '@grafana' --rule '{ @grafana/theme-token-usage: \"error\" }'",
"typecheck": "tsc --noEmit && yarn run packages:typecheck",
"plugins:build-bundled": "echo 'bundled plugins are no longer supported'",
@@ -254,7 +253,6 @@
"ts-jest": "29.4.0",
"ts-node": "10.9.2",
"typescript": "5.9.2",
"typescript-json-schema": "^0.65.1",
"webpack": "5.101.0",
"webpack-assets-manifest": "^5.1.0",
"webpack-cli": "6.0.1",
@@ -265,7 +263,7 @@
"webpackbar": "^7.0.0",
"yaml": "^2.0.0",
"yargs": "^18.0.0",
"zod": "^4.0.0"
"zod": "^4.3.0"
},
"dependencies": {
"@bsull/augurs": "^0.10.0",
@@ -347,6 +345,7 @@
"date-fns": "4.1.0",
"debounce-promise": "3.1.2",
"diff": "^8.0.0",
"downsample": "1.4.0",
"fast-deep-equal": "^3.1.3",
"fast-json-patch": "3.1.1",
"file-saver": "2.0.5",

View File

@@ -47,11 +47,12 @@
"LICENSE_APACHE2"
],
"scripts": {
"build": "tsc -p ./tsconfig.build.json && rollup -c rollup.config.ts --configPlugin esbuild",
"build": "yarn themes-schema && tsc -p ./tsconfig.build.json && rollup -c rollup.config.ts --configPlugin esbuild",
"clean": "rimraf ./dist ./compiled ./unstable ./package.tgz",
"typecheck": "tsc --emitDeclarationOnly false --noEmit",
"prepack": "cp package.json package.json.bak && node ../../scripts/prepare-npm-package.js",
"postpack": "mv package.json.bak package.json"
"postpack": "mv package.json.bak package.json",
"themes-schema": "tsx ./src/themes/scripts/generateSchema.ts"
},
"dependencies": {
"@braintree/sanitize-url": "7.0.1",
@@ -81,10 +82,12 @@
"tinycolor2": "1.6.0",
"tslib": "2.8.1",
"uplot": "1.6.32",
"xss": "^1.0.14"
"xss": "^1.0.14",
"zod": "^4.3.0"
},
"devDependencies": {
"@grafana/scenes": "6.38.0",
"@rollup/plugin-json": "6.1.0",
"@rollup/plugin-node-resolve": "16.0.1",
"@testing-library/react": "16.3.0",
"@types/history": "4.7.11",
@@ -101,6 +104,7 @@
"rollup": "^4.22.4",
"rollup-plugin-esbuild": "6.2.1",
"rollup-plugin-node-externals": "^8.0.0",
"tsx": "^4.21.0",
"typescript": "5.9.2"
},
"peerDependencies": {

View File

@@ -1,3 +1,4 @@
import json from '@rollup/plugin-json';
import { createRequire } from 'node:module';
import { entryPoint, plugins, esmOutput, cjsOutput } from '../rollup.config.parts';
@@ -8,13 +9,13 @@ const pkg = rq('./package.json');
export default [
{
input: entryPoint,
plugins,
plugins: [...plugins, json()],
output: [cjsOutput(pkg, 'grafana-data'), esmOutput(pkg, 'grafana-data')],
treeshake: false,
},
{
input: 'src/unstable.ts',
plugins,
plugins: [...plugins, json()],
output: [cjsOutput(pkg, 'grafana-data'), esmOutput(pkg, 'grafana-data')],
treeshake: false,
},

View File

@@ -106,3 +106,4 @@ export { findNumericFieldMinMax } from '../field/fieldOverrides';
export { type PanelOptionsSupplier } from '../panel/PanelPlugin';
export { sanitize, sanitizeUrl } from '../text/sanitize';
export { type NestedValueAccess, type NestedPanelOptions, isNestedPanelOptions } from '../utils/OptionsUIBuilders';
export { NewThemeOptionsSchema } from '../themes/createTheme';

View File

@@ -1,83 +1,103 @@
import { merge } from 'lodash';
import { z } from 'zod';
import { alpha, darken, emphasize, getContrastRatio, lighten } from './colorManipulator';
import { palette } from './palette';
import { DeepPartial, ThemeRichColor } from './types';
import { DeepRequired, ThemeRichColor, ThemeRichColorInputSchema } from './types';
const ThemeColorsModeSchema = z.enum(['light', 'dark']);
/** @internal */
export type ThemeColorsMode = 'light' | 'dark';
export type ThemeColorsMode = z.infer<typeof ThemeColorsModeSchema>;
const createThemeColorsBaseSchema = <TColor>(color: TColor) =>
z
.object({
mode: ThemeColorsModeSchema,
primary: color,
secondary: color,
info: color,
error: color,
success: color,
warning: color,
text: z.object({
primary: z.string().optional(),
secondary: z.string().optional(),
disabled: z.string().optional(),
link: z.string().optional(),
/** Used for auto white or dark text on colored backgrounds */
maxContrast: z.string().optional(),
}),
background: z.object({
/** Dashboard and body background */
canvas: z.string().optional(),
/** Primary content pane background (panels etc) */
primary: z.string().optional(),
/** Cards and elements that need to stand out on the primary background */
secondary: z.string().optional(),
/**
* For popovers and menu backgrounds. This is the same color as primary in most light themes but in dark
* themes it has a brighter shade to help give it contrast against the primary background.
**/
elevated: z.string().optional(),
}),
border: z.object({
weak: z.string().optional(),
medium: z.string().optional(),
strong: z.string().optional(),
}),
gradients: z.object({
brandVertical: z.string().optional(),
brandHorizontal: z.string().optional(),
}),
action: z.object({
/** Used for selected menu item / select option */
selected: z.string().optional(),
/**
* @alpha (Do not use from plugins)
* Used for selected items when background only change is not enough (Currently only used for FilterPill)
**/
selectedBorder: z.string().optional(),
/** Used for hovered menu item / select option */
hover: z.string().optional(),
/** Used for button/colored background hover opacity */
hoverOpacity: z.number().optional(),
/** Used focused menu item / select option */
focus: z.string().optional(),
/** Used for disabled buttons and inputs */
disabledBackground: z.string().optional(),
/** Disabled text */
disabledText: z.string().optional(),
/** Disablerd opacity */
disabledOpacity: z.number().optional(),
}),
hoverFactor: z.number(),
contrastThreshold: z.number(),
tonalOffset: z.number(),
})
.partial();
// Need to override the zod type to include the generic properly
/** @internal */
export interface ThemeColorsBase<TColor> {
mode: ThemeColorsMode;
export type ThemeColorsBase<TColor> = DeepRequired<
Omit<
z.infer<ReturnType<typeof createThemeColorsBaseSchema>>,
'primary' | 'secondary' | 'info' | 'error' | 'success' | 'warning'
>
> & {
primary: TColor;
secondary: TColor;
info: TColor;
error: TColor;
success: TColor;
warning: TColor;
text: {
primary: string;
secondary: string;
disabled: string;
link: string;
/** Used for auto white or dark text on colored backgrounds */
maxContrast: string;
};
background: {
/** Dashboard and body background */
canvas: string;
/** Primary content pane background (panels etc) */
primary: string;
/** Cards and elements that need to stand out on the primary background */
secondary: string;
/**
* For popovers and menu backgrounds. This is the same color as primary in most light themes but in dark
* themes it has a brighter shade to help give it contrast against the primary background.
**/
elevated: string;
};
border: {
weak: string;
medium: string;
strong: string;
};
gradients: {
brandVertical: string;
brandHorizontal: string;
};
action: {
/** Used for selected menu item / select option */
selected: string;
/**
* @alpha (Do not use from plugins)
* Used for selected items when background only change is not enough (Currently only used for FilterPill)
**/
selectedBorder: string;
/** Used for hovered menu item / select option */
hover: string;
/** Used for button/colored background hover opacity */
hoverOpacity: number;
/** Used focused menu item / select option */
focus: string;
/** Used for disabled buttons and inputs */
disabledBackground: string;
/** Disabled text */
disabledText: string;
/** Disablerd opacity */
disabledOpacity: number;
};
hoverFactor: number;
contrastThreshold: number;
tonalOffset: number;
}
};
export interface ThemeHoverStrengh {}
@@ -89,8 +109,10 @@ export interface ThemeColors extends ThemeColorsBase<ThemeRichColor> {
emphasize(color: string, amount?: number): string;
}
export const ThemeColorsInputSchema = createThemeColorsBaseSchema(ThemeRichColorInputSchema);
/** @internal */
export type ThemeColorsInput = DeepPartial<ThemeColorsBase<ThemeRichColor>>;
export type ThemeColorsInput = z.infer<typeof ThemeColorsInputSchema>;
class DarkColors implements ThemeColorsBase<Partial<ThemeRichColor>> {
mode: ThemeColorsMode = 'dark';

View File

@@ -1,3 +1,5 @@
import { z } from 'zod';
/** @beta */
export interface ThemeShape {
/**
@@ -34,9 +36,12 @@ export interface Radii {
}
/** @internal */
export interface ThemeShapeInput {
borderRadius?: number;
}
export const ThemeShapeInputSchema = z.object({
borderRadius: z.int().nonnegative().optional(),
});
/** @internal */
export type ThemeShapeInput = z.infer<typeof ThemeShapeInputSchema>;
export function createShape(options: ThemeShapeInput): ThemeShape {
const baseBorderRadius = options.borderRadius ?? 6;

View File

@@ -1,11 +1,15 @@
// Code based on Material UI
// The MIT License (MIT)
// Copyright (c) 2014 Call-Em-All
import { z } from 'zod';
/** @internal */
export type ThemeSpacingOptions = {
gridSize?: number;
};
export const ThemeSpacingOptionsSchema = z.object({
gridSize: z.int().positive().optional(),
});
/** @internal */
export type ThemeSpacingOptions = z.infer<typeof ThemeSpacingOptionsSchema>;
/** @internal */
export type ThemeSpacingArgument = number | string;

View File

@@ -1,28 +1,37 @@
import * as z from 'zod';
import { createBreakpoints } from './breakpoints';
import { createColors, ThemeColorsInput } from './createColors';
import { createColors, ThemeColorsInputSchema } from './createColors';
import { createComponents } from './createComponents';
import { createShadows } from './createShadows';
import { createShape, ThemeShapeInput } from './createShape';
import { createSpacing, ThemeSpacingOptions } from './createSpacing';
import { createShape, ThemeShapeInputSchema } from './createShape';
import { createSpacing, ThemeSpacingOptionsSchema } from './createSpacing';
import { createTransitions } from './createTransitions';
import { createTypography, ThemeTypographyInput } from './createTypography';
import { createTypography, ThemeTypographyInputSchema } from './createTypography';
import { createV1Theme } from './createV1Theme';
import { createVisualizationColors, ThemeVisualizationColorsInput } from './createVisualizationColors';
import { createVisualizationColors, ThemeVisualizationColorsInputSchema } from './createVisualizationColors';
import { GrafanaTheme2 } from './types';
import { zIndex } from './zIndex';
/** @internal */
export interface NewThemeOptions {
name?: string;
colors?: ThemeColorsInput;
spacing?: ThemeSpacingOptions;
shape?: ThemeShapeInput;
typography?: ThemeTypographyInput;
visualization?: ThemeVisualizationColorsInput;
}
export const NewThemeOptionsSchema = z.object({
name: z.string(),
id: z.string(),
colors: ThemeColorsInputSchema.optional(),
spacing: ThemeSpacingOptionsSchema.optional(),
shape: ThemeShapeInputSchema.optional(),
typography: ThemeTypographyInputSchema.optional(),
visualization: ThemeVisualizationColorsInputSchema.optional(),
});
/** @internal */
export function createTheme(options: NewThemeOptions = {}): GrafanaTheme2 {
export type NewThemeOptions = z.infer<typeof NewThemeOptionsSchema>;
/** @internal */
export function createTheme(
options: Omit<NewThemeOptions, 'id' | 'name'> & {
name?: NewThemeOptions['name'];
} = {}
): GrafanaTheme2 {
const {
name,
colors: colorsInput = {},

View File

@@ -1,6 +1,7 @@
// Code based on Material UI
// The MIT License (MIT)
// Copyright (c) 2014 Call-Em-All
import { z } from 'zod';
import { ThemeColors } from './createColors';
@@ -40,18 +41,20 @@ export interface ThemeTypographyVariant {
letterSpacing?: string;
}
export interface ThemeTypographyInput {
fontFamily?: string;
fontFamilyMonospace?: string;
fontSize?: number;
fontWeightLight?: number;
fontWeightRegular?: number;
fontWeightMedium?: number;
fontWeightBold?: number;
// hat's the font-size on the html element.
export const ThemeTypographyInputSchema = z.object({
fontFamily: z.string().optional(),
fontFamilyMonospace: z.string().optional(),
fontSize: z.number().positive().optional(),
fontWeightLight: z.number().positive().optional(),
fontWeightRegular: z.number().positive().optional(),
fontWeightMedium: z.number().positive().optional(),
fontWeightBold: z.number().positive().optional(),
// what's the font-size on the html element.
// 16px is the default font-size used by browsers.
htmlFontSize?: number;
}
htmlFontSize: z.number().positive().optional(),
});
export type ThemeTypographyInput = z.infer<typeof ThemeTypographyInputSchema>;
const defaultFontFamily = "'Inter', 'Helvetica', 'Arial', sans-serif";
const defaultFontFamilyMonospace = "'Roboto Mono', monospace";

View File

@@ -1,3 +1,5 @@
import { z } from 'zod';
import { FALLBACK_COLOR } from '../types/fieldColor';
import { ThemeColors } from './createColors';
@@ -26,29 +28,44 @@ export interface ThemeVizColor<T extends ThemeVizColorName> {
type ThemeVizColorName = 'red' | 'orange' | 'yellow' | 'green' | 'blue' | 'purple';
type ThemeVizColorShadeName<T extends ThemeVizColorName> =
| `super-light-${T}`
| `light-${T}`
| T
| `semi-dark-${T}`
| `dark-${T}`;
const createShadeSchema = <T>(color: T extends ThemeVizColorName ? T : never) =>
z.enum([`super-light-${color}`, `light-${color}`, color, `semi-dark-${color}`, `dark-${color}`]);
type ThemeVizHueGeneric<T> = T extends ThemeVizColorName
? {
name: T;
shades: Array<ThemeVizColor<T>>;
}
: never;
type ThemeVizColorShadeName<T extends ThemeVizColorName> = z.infer<ReturnType<typeof createShadeSchema<T>>>;
const createHueSchema = <T>(color: T extends ThemeVizColorName ? T : never) =>
z.object({
name: z.literal(color),
shades: z.array(
z.object({
color: z.string(),
name: createShadeSchema(color),
aliases: z.array(z.string()).optional(),
primary: z.boolean().optional(),
})
),
});
const ThemeVizHueSchema = z.union([
createHueSchema('red'),
createHueSchema('orange'),
createHueSchema('yellow'),
createHueSchema('green'),
createHueSchema('blue'),
createHueSchema('purple'),
]);
/**
* @alpha
*/
export type ThemeVizHue = ThemeVizHueGeneric<ThemeVizColorName>;
export type ThemeVizHue = z.infer<typeof ThemeVizHueSchema>;
export type ThemeVisualizationColorsInput = {
hues?: ThemeVizHue[];
palette?: string[];
};
export const ThemeVisualizationColorsInputSchema = z.object({
hues: z.array(ThemeVizHueSchema).optional(),
palette: z.array(z.string()).optional(),
});
export type ThemeVisualizationColorsInput = z.infer<typeof ThemeVisualizationColorsInputSchema>;
/**
* @internal

View File

@@ -1,6 +1,6 @@
import { Registry, RegistryItem } from '../utils/Registry';
import { createTheme } from './createTheme';
import { createTheme, NewThemeOptionsSchema } from './createTheme';
import * as extraThemes from './themeDefinitions';
import { GrafanaTheme2 } from './types';
@@ -42,9 +42,6 @@ export function getBuiltInThemes(allowedExtras: string[]) {
return sortedThemes;
}
/**
* There is also a backend list at pkg/services/preference/themes.go
*/
const themeRegistry = new Registry<ThemeRegistryItem>(() => {
return [
{ id: 'system', name: 'System preference', build: getSystemPreferenceTheme },
@@ -53,13 +50,19 @@ const themeRegistry = new Registry<ThemeRegistryItem>(() => {
];
});
for (const [id, theme] of Object.entries(extraThemes)) {
themeRegistry.register({
id,
name: theme.name ?? '',
build: () => createTheme(theme),
isExtra: true,
});
for (const [name, json] of Object.entries(extraThemes)) {
const result = NewThemeOptionsSchema.safeParse(json);
if (!result.success) {
console.error(`Invalid theme definition for theme ${name}: ${result.error.message}`);
} else {
const theme = result.data;
themeRegistry.register({
id: theme.id,
name: theme.name,
build: () => createTheme(theme),
isExtra: true,
});
}
}
function getSystemPreferenceTheme() {

View File

@@ -0,0 +1,608 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"name": {
"type": "string"
},
"id": {
"type": "string"
},
"colors": {
"type": "object",
"properties": {
"mode": {
"type": "string",
"enum": ["light", "dark"]
},
"primary": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"main": {
"type": "string"
},
"shade": {
"type": "string"
},
"text": {
"type": "string"
},
"border": {
"type": "string"
},
"transparent": {
"type": "string"
},
"borderTransparent": {
"type": "string"
},
"contrastText": {
"type": "string"
}
},
"additionalProperties": false
},
"secondary": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"main": {
"type": "string"
},
"shade": {
"type": "string"
},
"text": {
"type": "string"
},
"border": {
"type": "string"
},
"transparent": {
"type": "string"
},
"borderTransparent": {
"type": "string"
},
"contrastText": {
"type": "string"
}
},
"additionalProperties": false
},
"info": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"main": {
"type": "string"
},
"shade": {
"type": "string"
},
"text": {
"type": "string"
},
"border": {
"type": "string"
},
"transparent": {
"type": "string"
},
"borderTransparent": {
"type": "string"
},
"contrastText": {
"type": "string"
}
},
"additionalProperties": false
},
"error": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"main": {
"type": "string"
},
"shade": {
"type": "string"
},
"text": {
"type": "string"
},
"border": {
"type": "string"
},
"transparent": {
"type": "string"
},
"borderTransparent": {
"type": "string"
},
"contrastText": {
"type": "string"
}
},
"additionalProperties": false
},
"success": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"main": {
"type": "string"
},
"shade": {
"type": "string"
},
"text": {
"type": "string"
},
"border": {
"type": "string"
},
"transparent": {
"type": "string"
},
"borderTransparent": {
"type": "string"
},
"contrastText": {
"type": "string"
}
},
"additionalProperties": false
},
"warning": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"main": {
"type": "string"
},
"shade": {
"type": "string"
},
"text": {
"type": "string"
},
"border": {
"type": "string"
},
"transparent": {
"type": "string"
},
"borderTransparent": {
"type": "string"
},
"contrastText": {
"type": "string"
}
},
"additionalProperties": false
},
"text": {
"type": "object",
"properties": {
"primary": {
"type": "string"
},
"secondary": {
"type": "string"
},
"disabled": {
"type": "string"
},
"link": {
"type": "string"
},
"maxContrast": {
"type": "string"
}
},
"additionalProperties": false
},
"background": {
"type": "object",
"properties": {
"canvas": {
"type": "string"
},
"primary": {
"type": "string"
},
"secondary": {
"type": "string"
},
"elevated": {
"type": "string"
}
},
"additionalProperties": false
},
"border": {
"type": "object",
"properties": {
"weak": {
"type": "string"
},
"medium": {
"type": "string"
},
"strong": {
"type": "string"
}
},
"additionalProperties": false
},
"gradients": {
"type": "object",
"properties": {
"brandVertical": {
"type": "string"
},
"brandHorizontal": {
"type": "string"
}
},
"additionalProperties": false
},
"action": {
"type": "object",
"properties": {
"selected": {
"type": "string"
},
"selectedBorder": {
"type": "string"
},
"hover": {
"type": "string"
},
"hoverOpacity": {
"type": "number"
},
"focus": {
"type": "string"
},
"disabledBackground": {
"type": "string"
},
"disabledText": {
"type": "string"
},
"disabledOpacity": {
"type": "number"
}
},
"additionalProperties": false
},
"hoverFactor": {
"type": "number"
},
"contrastThreshold": {
"type": "number"
},
"tonalOffset": {
"type": "number"
}
},
"additionalProperties": false
},
"spacing": {
"type": "object",
"properties": {
"gridSize": {
"type": "integer",
"exclusiveMinimum": 0,
"maximum": 9007199254740991
}
},
"additionalProperties": false
},
"shape": {
"type": "object",
"properties": {
"borderRadius": {
"type": "integer",
"minimum": 0,
"maximum": 9007199254740991
}
},
"additionalProperties": false
},
"typography": {
"type": "object",
"properties": {
"fontFamily": {
"type": "string"
},
"fontFamilyMonospace": {
"type": "string"
},
"fontSize": {
"type": "number",
"exclusiveMinimum": 0
},
"fontWeightLight": {
"type": "number",
"exclusiveMinimum": 0
},
"fontWeightRegular": {
"type": "number",
"exclusiveMinimum": 0
},
"fontWeightMedium": {
"type": "number",
"exclusiveMinimum": 0
},
"fontWeightBold": {
"type": "number",
"exclusiveMinimum": 0
},
"htmlFontSize": {
"type": "number",
"exclusiveMinimum": 0
}
},
"additionalProperties": false
},
"visualization": {
"type": "object",
"properties": {
"hues": {
"type": "array",
"items": {
"anyOf": [
{
"type": "object",
"properties": {
"name": {
"type": "string",
"const": "red"
},
"shades": {
"type": "array",
"items": {
"type": "object",
"properties": {
"color": {
"type": "string"
},
"name": {
"type": "string",
"enum": ["super-light-red", "light-red", "red", "semi-dark-red", "dark-red"]
},
"aliases": {
"type": "array",
"items": {
"type": "string"
}
},
"primary": {
"type": "boolean"
}
},
"required": ["color", "name"],
"additionalProperties": false
}
}
},
"required": ["name", "shades"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"name": {
"type": "string",
"const": "orange"
},
"shades": {
"type": "array",
"items": {
"type": "object",
"properties": {
"color": {
"type": "string"
},
"name": {
"type": "string",
"enum": ["super-light-orange", "light-orange", "orange", "semi-dark-orange", "dark-orange"]
},
"aliases": {
"type": "array",
"items": {
"type": "string"
}
},
"primary": {
"type": "boolean"
}
},
"required": ["color", "name"],
"additionalProperties": false
}
}
},
"required": ["name", "shades"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"name": {
"type": "string",
"const": "yellow"
},
"shades": {
"type": "array",
"items": {
"type": "object",
"properties": {
"color": {
"type": "string"
},
"name": {
"type": "string",
"enum": ["super-light-yellow", "light-yellow", "yellow", "semi-dark-yellow", "dark-yellow"]
},
"aliases": {
"type": "array",
"items": {
"type": "string"
}
},
"primary": {
"type": "boolean"
}
},
"required": ["color", "name"],
"additionalProperties": false
}
}
},
"required": ["name", "shades"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"name": {
"type": "string",
"const": "green"
},
"shades": {
"type": "array",
"items": {
"type": "object",
"properties": {
"color": {
"type": "string"
},
"name": {
"type": "string",
"enum": ["super-light-green", "light-green", "green", "semi-dark-green", "dark-green"]
},
"aliases": {
"type": "array",
"items": {
"type": "string"
}
},
"primary": {
"type": "boolean"
}
},
"required": ["color", "name"],
"additionalProperties": false
}
}
},
"required": ["name", "shades"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"name": {
"type": "string",
"const": "blue"
},
"shades": {
"type": "array",
"items": {
"type": "object",
"properties": {
"color": {
"type": "string"
},
"name": {
"type": "string",
"enum": ["super-light-blue", "light-blue", "blue", "semi-dark-blue", "dark-blue"]
},
"aliases": {
"type": "array",
"items": {
"type": "string"
}
},
"primary": {
"type": "boolean"
}
},
"required": ["color", "name"],
"additionalProperties": false
}
}
},
"required": ["name", "shades"],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"name": {
"type": "string",
"const": "purple"
},
"shades": {
"type": "array",
"items": {
"type": "object",
"properties": {
"color": {
"type": "string"
},
"name": {
"type": "string",
"enum": ["super-light-purple", "light-purple", "purple", "semi-dark-purple", "dark-purple"]
},
"aliases": {
"type": "array",
"items": {
"type": "string"
}
},
"primary": {
"type": "boolean"
}
},
"required": ["color", "name"],
"additionalProperties": false
}
}
},
"required": ["name", "shades"],
"additionalProperties": false
}
]
}
},
"palette": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false
}
},
"required": ["name", "id"],
"additionalProperties": false
}

View File

@@ -0,0 +1,19 @@
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { NewThemeOptionsSchema } from '../createTheme';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
fs.writeFileSync(
path.join(__dirname, '../schema.generated.json'),
JSON.stringify(
NewThemeOptionsSchema.toJSONSchema({
target: 'draft-07',
}),
undefined,
2
)
);

View File

@@ -0,0 +1,50 @@
{
"name": "Aubergine",
"id": "aubergine",
"colors": {
"mode": "dark",
"border": {
"weak": "#4F2A3D",
"medium": "#6A3C4B",
"strong": "#8C5A69"
},
"text": {
"primary": "#E5D0D6",
"secondary": "#D1A8C4",
"disabled": "#B7A0A6",
"link": "#A56BB6",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#8C5A69"
},
"secondary": {
"main": "#6A3C4B",
"text": "#D1A8C4",
"border": "#8C5A69"
},
"background": {
"canvas": "#2E1F2D",
"primary": "#3C2136",
"secondary": "#4A2D47",
"elevated": "#4A2D47"
},
"action": {
"hover": "#6A3C4B",
"selected": "#8C5A69",
"selectedBorder": "#FFB300",
"focus": "#A56BB6",
"hoverOpacity": 0.1,
"disabledText": "#B7A0A6",
"disabledBackground": "#4A2D47",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #6A3C4B 0%, #A56BB6 100%)",
"brandVertical": "linear-gradient(0deg, #6A3C4B 0%, #A56BB6 100%)"
},
"contrastThreshold": 4,
"hoverFactor": 0.07,
"tonalOffset": 0.15
}
}

View File

@@ -1,53 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const aubergineTheme: NewThemeOptions = {
name: 'Aubergine',
colors: {
mode: 'dark',
border: {
weak: '#4F2A3D',
medium: '#6A3C4B',
strong: '#8C5A69',
},
text: {
primary: '#E5D0D6',
secondary: '#D1A8C4',
disabled: '#B7A0A6',
link: '#A56BB6',
maxContrast: '#FFFFFF',
},
primary: {
main: '#8C5A69',
},
secondary: {
main: '#6A3C4B',
text: '#D1A8C4',
border: '#8C5A69',
},
background: {
canvas: '#2E1F2D',
primary: '#3C2136',
secondary: '#4A2D47',
elevated: '#4A2D47',
},
action: {
hover: '#6A3C4B',
selected: '#8C5A69',
selectedBorder: '#FFB300',
focus: '#A56BB6',
hoverOpacity: 0.1,
disabledText: '#B7A0A6',
disabledBackground: '#4A2D47',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #6A3C4B 0%, #A56BB6 100%)',
brandVertical: 'linear-gradient(0deg, #6A3C4B 0%, #A56BB6 100%)',
},
contrastThreshold: 4,
hoverFactor: 0.07,
tonalOffset: 0.15,
},
};
export default aubergineTheme;

View File

@@ -0,0 +1,60 @@
{
"name": "Debug",
"id": "debug",
"colors": {
"mode": "dark",
"background": {
"canvas": "#000033",
"primary": "#000044",
"secondary": "#000055",
"elevated": "#000055"
},
"text": {
"primary": "#bbbb00",
"secondary": "#888800",
"disabled": "#444400",
"link": "#dddd00",
"maxContrast": "#ffff00"
},
"border": {
"weak": "#ff000044",
"medium": "#ff000088",
"strong": "#ff0000ff"
},
"primary": {
"border": "#ff000088",
"text": "#cccc00",
"contrastText": "#ffff00",
"shade": "#9900dd"
},
"secondary": {
"border": "#ff000088",
"text": "#cccc00",
"contrastText": "#ffff00",
"shade": "#9900dd"
},
"info": {
"shade": "#9900dd"
},
"warning": {
"shade": "#9900dd"
},
"success": {
"shade": "#9900dd"
},
"error": {
"shade": "#9900dd"
},
"action": {
"hover": "#9900dd",
"focus": "#6600aa",
"selected": "#440088"
}
},
"shape": {
"borderRadius": 8
},
"spacing": {
"gridSize": 10
}
}

View File

@@ -1,71 +0,0 @@
import { NewThemeOptions } from '../createTheme';
/**
* a very ugly theme that is useful for debugging and checking if the theme is applied correctly
* borders are red,
* backgrounds are blue,
* text is yellow,
* and grafana loves you <3
* (also corners are rounded, action states (hover, focus, selected) are purple)
*/
const debugTheme: NewThemeOptions = {
name: 'Debug',
colors: {
mode: 'dark',
background: {
canvas: '#000033',
primary: '#000044',
secondary: '#000055',
elevated: '#000055',
},
text: {
primary: '#bbbb00',
secondary: '#888800',
disabled: '#444400',
link: '#dddd00',
maxContrast: '#ffff00',
},
border: {
weak: '#ff000044',
medium: '#ff000088',
strong: '#ff0000ff',
},
primary: {
border: '#ff000088',
text: '#cccc00',
contrastText: '#ffff00',
shade: '#9900dd',
},
secondary: {
border: '#ff000088',
text: '#cccc00',
contrastText: '#ffff00',
shade: '#9900dd',
},
info: {
shade: '#9900dd',
},
warning: {
shade: '#9900dd',
},
success: {
shade: '#9900dd',
},
error: {
shade: '#9900dd',
},
action: {
hover: '#9900dd',
focus: '#6600aa',
selected: '#440088',
},
},
shape: {
borderRadius: 8,
},
spacing: {
gridSize: 10,
},
};
export default debugTheme;

View File

@@ -0,0 +1,71 @@
{
"name": "Desert bloom",
"id": "desertbloom",
"colors": {
"mode": "light",
"border": {
"weak": "rgba(0, 0, 0, 0.12)",
"medium": "rgba(0, 0, 0, 0.20)",
"strong": "rgba(0, 0, 0, 0.30)"
},
"text": {
"primary": "#333333",
"secondary": "#555555",
"disabled": "rgba(0, 0, 0, 0.5)",
"link": "#1A82E2",
"maxContrast": "#000000"
},
"primary": {
"main": "#FF6F61",
"text": "#FE6F61",
"border": "#E55B4D",
"name": "primary",
"shade": "#E55B4D",
"transparent": "#FF6F6126",
"contrastText": "#FFFFFF",
"borderTransparent": "#FF6F6140"
},
"secondary": {
"main": "#FFFFFF",
"text": "#695f53",
"border": "#d9cec0",
"name": "secondary",
"shade": "#d9cec0",
"transparent": "#FFFFFF26",
"contrastText": "#4c4339",
"borderTransparent": "#FFFFFF40"
},
"info": {
"main": "#1A82E2"
},
"success": {
"main": "#4CAF50"
},
"warning": {
"main": "#FFC107"
},
"background": {
"canvas": "#FFF8F0",
"primary": "#FFFFFF",
"secondary": "#f9f3e8",
"elevated": "#FFFFFF"
},
"action": {
"hover": "rgba(168, 156, 134, 0.12)",
"selected": "rgba(168, 156, 134, 0.36)",
"selectedBorder": "#FF6F61",
"focus": "rgba(168, 156, 134, 0.50)",
"hoverOpacity": 0.08,
"disabledText": "rgba(168, 156, 134, 0.5)",
"disabledBackground": "rgba(168, 156, 134, 0.06)",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg,rgba(255, 111, 97, 1) 0%, rgba(255, 167, 58, 1) 100%)",
"brandVertical": "linear-gradient(0deg, rgba(255, 111, 97, 1) 0%, rgba(255, 167, 58, 1) 100%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.03,
"tonalOffset": 0.15
}
}

View File

@@ -1,75 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const desertBloomTheme: NewThemeOptions = {
name: 'Desert bloom',
colors: {
mode: 'light',
border: {
weak: 'rgba(0, 0, 0, 0.12)',
medium: 'rgba(0, 0, 0, 0.20)',
strong: 'rgba(0, 0, 0, 0.30)',
},
text: {
primary: '#333333',
secondary: '#555555',
disabled: 'rgba(0, 0, 0, 0.5)',
link: '#1A82E2',
maxContrast: '#000000',
},
primary: {
main: '#FF6F61',
text: '#FE6F61',
border: '#E55B4D',
name: 'primary',
shade: '#E55B4D',
transparent: '#FF6F6126',
contrastText: '#FFFFFF',
borderTransparent: '#FF6F6140',
},
secondary: {
main: '#FFFFFF',
text: '#695f53',
border: '#d9cec0',
name: 'secondary',
shade: '#d9cec0',
transparent: '#FFFFFF26',
contrastText: '#4c4339',
borderTransparent: '#FFFFFF40',
},
info: {
main: '#1A82E2',
},
success: {
main: '#4CAF50',
},
warning: {
main: '#FFC107',
},
background: {
canvas: '#FFF8F0',
primary: '#FFFFFF',
secondary: '#f9f3e8',
elevated: '#FFFFFF',
},
action: {
hover: 'rgba(168, 156, 134, 0.12)',
selected: 'rgba(168, 156, 134, 0.36)',
selectedBorder: '#FF6F61',
focus: 'rgba(168, 156, 134, 0.50)',
hoverOpacity: 0.08,
disabledText: 'rgba(168, 156, 134, 0.5)',
disabledBackground: 'rgba(168, 156, 134, 0.06)',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg,rgba(255, 111, 97, 1) 0%, rgba(255, 167, 58, 1) 100%)',
brandVertical: 'linear-gradient(0deg, rgba(255, 111, 97, 1) 0%, rgba(255, 167, 58, 1) 100%)',
},
contrastThreshold: 3,
hoverFactor: 0.03,
tonalOffset: 0.15,
},
};
export default desertBloomTheme;

View File

@@ -0,0 +1,62 @@
{
"name": "Gilded grove",
"id": "gildedgrove",
"colors": {
"mode": "dark",
"border": {
"weak": "rgba(200, 200, 180, 0.12)",
"medium": "rgba(200, 200, 180, 0.20)",
"strong": "rgba(200, 200, 180, 0.30)"
},
"text": {
"primary": "rgb(250, 250, 239)",
"secondary": "rgba(200, 200, 180, 0.85)",
"disabled": "rgba(200, 200, 180, 0.6)",
"link": "#FEAC34",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#FEAC34",
"text": "#FFD783",
"border": "#FFD783",
"name": "primary",
"shade": "rgb(255, 173, 80)",
"transparent": "#FEAC3426",
"contrastText": "#111614",
"borderTransparent": "#FFD78340"
},
"secondary": {
"main": "rgba(200, 200, 180, 0.10)",
"shade": "rgba(200, 200, 180, 0.14)",
"transparent": "rgba(200, 200, 180, 0.08)",
"text": "rgb(200, 200, 180)",
"contrastText": "rgb(200, 200, 180)",
"border": "rgba(200, 200, 180, 0.08)",
"name": "secondary",
"borderTransparent": "rgba(200, 200, 180, 0.25)"
},
"background": {
"canvas": "#111614",
"primary": "#1d2220",
"secondary": "#27312E",
"elevated": "#27312E"
},
"action": {
"hover": "rgba(200, 200, 180, 0.16)",
"selected": "rgba(200, 200, 180, 0.12)",
"selectedBorder": "#FEAC34",
"focus": "rgba(200, 200, 180, 0.16)",
"hoverOpacity": 0.08,
"disabledText": "rgba(200, 200, 180, 0.6)",
"disabledBackground": "rgba(200, 200, 180, 0.04)",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #FEAC34 0%, #FFD783 100%)",
"brandVertical": "linear-gradient(0.01deg, #FEAC34 0.01%, #FFD783 99.99%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.03,
"tonalOffset": 0.15
}
}

View File

@@ -1,65 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const gildedGroveTheme: NewThemeOptions = {
name: 'Gilded grove',
colors: {
mode: 'dark',
border: {
weak: 'rgba(200, 200, 180, 0.12)',
medium: 'rgba(200, 200, 180, 0.20)',
strong: 'rgba(200, 200, 180, 0.30)',
},
text: {
primary: 'rgb(250, 250, 239)',
secondary: 'rgba(200, 200, 180, 0.85)',
disabled: 'rgba(200, 200, 180, 0.6)',
link: '#FEAC34',
maxContrast: '#FFFFFF',
},
primary: {
main: '#FEAC34',
text: '#FFD783',
border: '#FFD783',
name: 'primary',
shade: 'rgb(255, 173, 80)',
transparent: '#FEAC3426',
contrastText: '#111614',
borderTransparent: '#FFD78340',
},
secondary: {
main: 'rgba(200, 200, 180, 0.10)',
shade: 'rgba(200, 200, 180, 0.14)',
transparent: 'rgba(200, 200, 180, 0.08)',
text: 'rgb(200, 200, 180)',
contrastText: 'rgb(200, 200, 180)',
border: 'rgba(200, 200, 180, 0.08)',
name: 'secondary',
borderTransparent: 'rgba(200, 200, 180, 0.25)',
},
background: {
canvas: '#111614',
primary: '#1d2220',
secondary: '#27312E',
elevated: '#27312E',
},
action: {
hover: 'rgba(200, 200, 180, 0.16)',
selected: 'rgba(200, 200, 180, 0.12)',
selectedBorder: '#FEAC34',
focus: 'rgba(200, 200, 180, 0.16)',
hoverOpacity: 0.08,
disabledText: 'rgba(200, 200, 180, 0.6)',
disabledBackground: 'rgba(200, 200, 180, 0.04)',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #FEAC34 0%, #FFD783 100%)',
brandVertical: 'linear-gradient(0.01deg, #FEAC34 0.01%, #FFD783 99.99%)',
},
contrastThreshold: 3,
hoverFactor: 0.03,
tonalOffset: 0.15,
},
};
export default gildedGroveTheme;

View File

@@ -0,0 +1,52 @@
{
"name": "Gloom",
"id": "gloom",
"colors": {
"mode": "dark",
"border": {
"weak": "rgba(210, 210, 220, 0.12)",
"medium": "rgba(210, 210, 220, 0.20)",
"strong": "rgba(210, 210, 220, 0.30)"
},
"text": {
"primary": "rgb(210, 210, 220)",
"secondary": "rgba(210, 210, 220, 0.65)",
"disabled": "rgba(210, 210, 220, 0.48)",
"link": "#f99a5c",
"maxContrast": "#FFF"
},
"primary": {
"main": "#ff934d",
"text": "#f99a5c",
"border": "#ff934d",
"name": "primary"
},
"secondary": {
"main": "rgba(195, 195, 245, 0.10)",
"shade": "rgba(195, 195, 245, 0.14)",
"transparent": "rgba(195, 195, 245, 0.08)",
"text": "rgba(195, 195, 245)",
"contrastText": "rgb(195, 195, 245)",
"border": "rgba(195, 195, 245, 0.08)"
},
"background": {
"canvas": "#000",
"primary": "#121118",
"secondary": "#211e28",
"elevated": "#211e28"
},
"action": {
"hover": "rgba(195, 195, 245, 0.07)",
"selected": "rgba(195, 195, 245, 0.11)",
"selectedBorder": "#ff934d",
"focus": "rgba(195, 195, 245, 0.07)",
"hoverOpacity": 0.05,
"disabledText": "rgba(210, 210, 220, 0.48)",
"disabledBackground": "rgba(210, 210, 220, 0.04)",
"disabledOpacity": 0.38
},
"contrastThreshold": 3,
"hoverFactor": 0.03,
"tonalOffset": 0.15
}
}

View File

@@ -1,80 +0,0 @@
import { NewThemeOptions } from '../createTheme';
/**
* Torkel's GrafanaCon theme
* very WIP state
*/
const whiteBase = `210, 210, 220`;
const secondaryBase = `195, 195, 245`;
//const brandMain = '#3d71d9';
//const brandText = '#6e9fff';
const brandMain = '#ff934d';
const brandText = '#f99a5c';
const disabledText = `rgba(${whiteBase}, 0.48)`;
const gloomTheme: NewThemeOptions = {
name: 'Gloom',
colors: {
mode: 'dark',
border: {
weak: `rgba(${whiteBase}, 0.12)`,
medium: `rgba(${whiteBase}, 0.20)`,
strong: `rgba(${whiteBase}, 0.30)`,
},
text: {
primary: `rgb(${whiteBase})`,
secondary: `rgba(${whiteBase}, 0.65)`,
disabled: disabledText,
link: brandText,
maxContrast: '#FFF',
},
primary: {
main: brandMain,
text: brandText,
border: brandMain,
name: 'primary',
},
secondary: {
main: `rgba(${secondaryBase}, 0.10)`,
shade: `rgba(${secondaryBase}, 0.14)`,
transparent: `rgba(${secondaryBase}, 0.08)`,
text: `rgba(${secondaryBase})`,
contrastText: `rgb(${secondaryBase})`,
border: `rgba(${secondaryBase}, 0.08)`,
},
background: {
canvas: '#000',
primary: '#121118',
secondary: '#211e28',
elevated: '#211e28',
},
action: {
hover: `rgba(${secondaryBase}, 0.07)`,
selected: `rgba(${secondaryBase}, 0.11)`,
selectedBorder: brandMain,
focus: `rgba(${secondaryBase}, 0.07)`,
hoverOpacity: 0.05,
disabledText: disabledText,
disabledBackground: `rgba(${whiteBase}, 0.04)`,
disabledOpacity: 0.38,
},
// gradients: {
// brandHorizontal: 'linear-gradient(270deg, #ff934d 0%, #FEAC34 100%)',
// brandVertical: 'linear-gradient(0.01deg, #ff934d 0.01%, #FEAC34 99.99%)',
// },
contrastThreshold: 3,
hoverFactor: 0.03,
tonalOffset: 0.15,
},
};
export default gloomTheme;

View File

@@ -1,12 +1,12 @@
export { default as aubergine } from './aubergine';
export { default as debug } from './debug';
export { default as desertbloom } from './desertbloom';
export { default as gildedgrove } from './gildedgrove';
export { default as mars } from './mars';
export { default as matrix } from './matrix';
export { default as sapphiredusk } from './sapphiredusk';
export { default as synthwave } from './synthwave';
export { default as tron } from './tron';
export { default as victorian } from './victorian';
export { default as zen } from './zen';
export { default as gloom } from './gloom';
export { default as aubergine } from './aubergine.json';
export { default as debug } from './debug.json';
export { default as desertbloom } from './desertbloom.json';
export { default as gildedgrove } from './gildedgrove.json';
export { default as mars } from './mars.json';
export { default as matrix } from './matrix.json';
export { default as sapphiredusk } from './sapphiredusk.json';
export { default as synthwave } from './synthwave.json';
export { default as tron } from './tron.json';
export { default as victorian } from './victorian.json';
export { default as zen } from './zen.json';
export { default as gloom } from './gloom.json';

View File

@@ -0,0 +1,50 @@
{
"name": "Mars",
"id": "mars",
"colors": {
"mode": "dark",
"border": {
"weak": "rgba(210, 90, 60, 0.2)",
"medium": "rgba(210, 90, 60, 0.35)",
"strong": "rgba(210, 90, 60, 0.5)"
},
"text": {
"primary": "#DDDDDD",
"secondary": "#BBBBBB",
"disabled": "rgba(221, 221, 221, 0.5)",
"link": "#FF6F61",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#FF6F61"
},
"secondary": {
"main": "#6a2f2f",
"text": "#BBBBBB",
"border": "rgba(210, 90, 60, 0.2)"
},
"background": {
"canvas": "#3C1E1E",
"primary": "#522626",
"secondary": "#6A2F2F",
"elevated": "#6A2F2F"
},
"action": {
"hover": "rgba(210, 90, 60, 0.16)",
"selected": "rgba(210, 90, 60, 0.12)",
"selectedBorder": "#FF6F61",
"focus": "rgba(210, 90, 60, 0.16)",
"hoverOpacity": 0.08,
"disabledText": "rgba(221, 221, 221, 0.5)",
"disabledBackground": "rgba(210, 90, 60, 0.08)",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #FF6F61 0%, #D25A3C 100%)",
"brandVertical": "linear-gradient(0.01deg, #FF6F61 0.01%, #D25A3C 99.99%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.05,
"tonalOffset": 0.2
}
}

View File

@@ -1,53 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const marsTheme: NewThemeOptions = {
name: 'Mars',
colors: {
mode: 'dark',
border: {
weak: 'rgba(210, 90, 60, 0.2)',
medium: 'rgba(210, 90, 60, 0.35)',
strong: 'rgba(210, 90, 60, 0.5)',
},
text: {
primary: '#DDDDDD',
secondary: '#BBBBBB',
disabled: 'rgba(221, 221, 221, 0.5)',
link: '#FF6F61',
maxContrast: '#FFFFFF',
},
primary: {
main: '#FF6F61',
},
secondary: {
main: '#6a2f2f',
text: '#BBBBBB',
border: 'rgba(210, 90, 60, 0.2)',
},
background: {
canvas: '#3C1E1E',
primary: '#522626',
secondary: '#6A2F2F',
elevated: '#6A2F2F',
},
action: {
hover: 'rgba(210, 90, 60, 0.16)',
selected: 'rgba(210, 90, 60, 0.12)',
selectedBorder: '#FF6F61',
focus: 'rgba(210, 90, 60, 0.16)',
hoverOpacity: 0.08,
disabledText: 'rgba(221, 221, 221, 0.5)',
disabledBackground: 'rgba(210, 90, 60, 0.08)',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #FF6F61 0%, #D25A3C 100%)',
brandVertical: 'linear-gradient(0.01deg, #FF6F61 0.01%, #D25A3C 99.99%)',
},
contrastThreshold: 3,
hoverFactor: 0.05,
tonalOffset: 0.2,
},
};
export default marsTheme;

View File

@@ -0,0 +1,41 @@
{
"name": "Matrix",
"id": "matrix",
"colors": {
"mode": "dark",
"background": {
"canvas": "#000000",
"primary": "#020202",
"secondary": "#080808",
"elevated": "#080808"
},
"text": {
"primary": "#00c017",
"secondary": "#008910",
"disabled": "#006a0c",
"link": "#00ff41",
"maxContrast": "#00ff41"
},
"border": {
"weak": "#008f1144",
"medium": "#008f1188",
"strong": "#008910"
},
"primary": {
"main": "#008910"
},
"secondary": {
"text": "#008910"
},
"gradients": {
"brandVertical": "linear-gradient(0deg, #008910 0%, #00ff41 100%)",
"brandHorizontal": "linear-gradient(90deg, #008910 0%, #00ff41 100%)"
}
},
"shape": {
"borderRadius": 0
},
"typography": {
"fontFamily": "monospace"
}
}

View File

@@ -1,44 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const matrixTheme: NewThemeOptions = {
name: 'Matrix',
colors: {
mode: 'dark',
background: {
canvas: '#000000',
primary: '#020202',
secondary: '#080808',
elevated: '#080808',
},
text: {
primary: '#00c017',
secondary: '#008910',
disabled: '#006a0c',
link: '#00ff41',
maxContrast: '#00ff41',
},
border: {
weak: '#008f1144',
medium: '#008f1188',
strong: '#008910',
},
primary: {
main: '#008910',
},
secondary: {
text: '#008910',
},
gradients: {
brandVertical: 'linear-gradient(0deg, #008910 0%, #00ff41 100%)',
brandHorizontal: 'linear-gradient(90deg, #008910 0%, #00ff41 100%)',
},
},
shape: {
borderRadius: 0,
},
typography: {
fontFamily: 'monospace',
},
};
export default matrixTheme;

View File

@@ -0,0 +1,76 @@
{
"name": "Sapphire dusk",
"id": "sapphiredusk",
"colors": {
"mode": "dark",
"border": {
"weak": "#232e47",
"medium": "#2c3853",
"strong": "#404d6b"
},
"text": {
"primary": "#FFFFFF",
"secondary": "#bcccdd",
"disabled": "#838da5",
"link": "#93EBF0",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#93EBF0",
"text": "#a8e9ed",
"border": "#93ebf0",
"name": "primary",
"shade": "#c0f5d9",
"transparent": "#93EBF029",
"contrastText": "#111614",
"borderTransparent": "#93ebf040"
},
"secondary": {
"main": "#2c364f",
"shade": "#36415e",
"transparent": "rgba(200, 200, 180, 0.08)",
"text": "#d1dfff",
"contrastText": "#acfeff",
"border": "rgba(200, 200, 180, 0.08)",
"name": "secondary",
"borderTransparent": "rgba(200, 200, 180, 0.25)"
},
"info": {
"main": "#4d4593",
"text": "#a8e9ed",
"border": "#5d54a7"
},
"error": {
"main": "#c63370"
},
"success": {
"main": "#1A7F4B"
},
"warning": {
"main": "#D448EA"
},
"background": {
"canvas": "#1e273d",
"primary": "#12192e",
"secondary": "#212c47",
"elevated": "#212c47"
},
"action": {
"hover": "#364057",
"selected": "#364260",
"selectedBorder": "#D448EA",
"focus": "#364057",
"hoverOpacity": 0.08,
"disabledText": "#838da5",
"disabledBackground": "rgba(54, 64, 87, 0.2)",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #D346EF 0%, #2C83FE 100%)",
"brandVertical": "linear-gradient(0deg, #D346EF 0%, #2C83FE 100%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.03,
"tonalOffset": 0.15
}
}

View File

@@ -1,79 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const sapphireDuskTheme: NewThemeOptions = {
name: 'Sapphire dusk',
colors: {
mode: 'dark',
border: {
weak: '#232e47',
medium: '#2c3853',
strong: '#404d6b',
},
text: {
primary: '#FFFFFF',
secondary: '#bcccdd',
disabled: '#838da5',
link: '#93EBF0',
maxContrast: '#FFFFFF',
},
primary: {
main: '#93EBF0',
text: '#a8e9ed',
border: '#93ebf0',
name: 'primary',
shade: '#c0f5d9',
transparent: '#93EBF029',
contrastText: '#111614',
borderTransparent: '#93ebf040',
},
secondary: {
main: '#2c364f',
shade: '#36415e',
transparent: 'rgba(200, 200, 180, 0.08)',
text: '#d1dfff',
contrastText: '#acfeff',
border: 'rgba(200, 200, 180, 0.08)',
name: 'secondary',
borderTransparent: 'rgba(200, 200, 180, 0.25)',
},
info: {
main: '#4d4593',
text: '#a8e9ed',
border: '#5d54a7',
},
error: {
main: '#c63370',
},
success: {
main: '#1A7F4B',
},
warning: {
main: '#D448EA',
},
background: {
canvas: '#1e273d',
primary: '#12192e',
secondary: '#212c47',
elevated: '#212c47',
},
action: {
hover: '#364057',
selected: '#364260',
selectedBorder: '#D448EA',
focus: '#364057',
hoverOpacity: 0.08,
disabledText: '#838da5',
disabledBackground: 'rgba(54, 64, 87, 0.2)',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #D346EF 0%, #2C83FE 100%)',
brandVertical: 'linear-gradient(0deg, #D346EF 0%, #2C83FE 100%)',
},
contrastThreshold: 3,
hoverFactor: 0.03,
tonalOffset: 0.15,
},
};
export default sapphireDuskTheme;

View File

@@ -0,0 +1,50 @@
{
"name": "Synthwave",
"id": "synthwave",
"colors": {
"mode": "dark",
"border": {
"weak": "rgba(255, 20, 147, 0.12)",
"medium": "rgba(255, 20, 147, 0.20)",
"strong": "rgba(255, 20, 147, 0.30)"
},
"text": {
"primary": "#E0E0E0",
"secondary": "rgba(224, 224, 224, 0.75)",
"disabled": "rgba(224, 224, 224, 0.5)",
"link": "#FF69B4",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#FF1493"
},
"secondary": {
"main": "#37183a",
"text": "rgba(224, 224, 224, 0.75)",
"border": "rgba(255, 20, 147, 0.10)"
},
"background": {
"canvas": "#1A1A2E",
"primary": "#16213E",
"secondary": "#0F3460",
"elevated": "#0F3460"
},
"action": {
"hover": "rgba(255, 20, 147, 0.16)",
"selected": "rgba(255, 20, 147, 0.12)",
"selectedBorder": "#FF1493",
"focus": "rgba(255, 20, 147, 0.16)",
"hoverOpacity": 0.08,
"disabledText": "rgba(224, 224, 224, 0.5)",
"disabledBackground": "rgba(255, 20, 147, 0.08)",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #FF1493 0%, #1E90FF 100%)",
"brandVertical": "linear-gradient(0.01deg, #FF1493 0.01%, #1E90FF 99.99%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.03,
"tonalOffset": 0.15
}
}

View File

@@ -1,53 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const synthwaveTheme: NewThemeOptions = {
name: 'Synthwave',
colors: {
mode: 'dark',
border: {
weak: 'rgba(255, 20, 147, 0.12)',
medium: 'rgba(255, 20, 147, 0.20)',
strong: 'rgba(255, 20, 147, 0.30)',
},
text: {
primary: '#E0E0E0',
secondary: 'rgba(224, 224, 224, 0.75)',
disabled: 'rgba(224, 224, 224, 0.5)',
link: '#FF69B4',
maxContrast: '#FFFFFF',
},
primary: {
main: '#FF1493',
},
secondary: {
main: '#37183a',
text: 'rgba(224, 224, 224, 0.75)',
border: 'rgba(255, 20, 147, 0.10)',
},
background: {
canvas: '#1A1A2E',
primary: '#16213E',
secondary: '#0F3460',
elevated: '#0F3460',
},
action: {
hover: 'rgba(255, 20, 147, 0.16)',
selected: 'rgba(255, 20, 147, 0.12)',
selectedBorder: '#FF1493',
focus: 'rgba(255, 20, 147, 0.16)',
hoverOpacity: 0.08,
disabledText: 'rgba(224, 224, 224, 0.5)',
disabledBackground: 'rgba(255, 20, 147, 0.08)',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #FF1493 0%, #1E90FF 100%)',
brandVertical: 'linear-gradient(0.01deg, #FF1493 0.01%, #1E90FF 99.99%)',
},
contrastThreshold: 3,
hoverFactor: 0.03,
tonalOffset: 0.15,
},
};
export default synthwaveTheme;

View File

@@ -0,0 +1,50 @@
{
"name": "Tron",
"id": "tron",
"colors": {
"mode": "dark",
"border": {
"weak": "rgba(0, 255, 255, 0.12)",
"medium": "rgba(0, 255, 255, 0.20)",
"strong": "rgba(0, 255, 255, 0.30)"
},
"text": {
"primary": "#E0E0E0",
"secondary": "rgba(224, 224, 224, 0.75)",
"disabled": "rgba(224, 224, 224, 0.5)",
"link": "#00FFFF",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#00FFFF"
},
"secondary": {
"main": "#0b2e36",
"text": "rgba(224, 224, 224, 0.75)",
"border": "rgba(0, 255, 255, 0.10)"
},
"background": {
"canvas": "#0A0F18",
"primary": "#0F1B2A",
"secondary": "#152234",
"elevated": "#152234"
},
"action": {
"hover": "rgba(0, 255, 255, 0.16)",
"selected": "rgba(0, 255, 255, 0.12)",
"selectedBorder": "#00FFFF",
"focus": "rgba(0, 255, 255, 0.16)",
"hoverOpacity": 0.08,
"disabledText": "rgba(224, 224, 224, 0.5)",
"disabledBackground": "rgba(0, 255, 255, 0.08)",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #00FFFF 0%, #29ABE2 100%)",
"brandVertical": "linear-gradient(0.01deg, #00FFFF 0.01%, #29ABE2 99.99%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.05,
"tonalOffset": 0.2
}
}

View File

@@ -1,53 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const tronTheme: NewThemeOptions = {
name: 'Tron',
colors: {
mode: 'dark',
border: {
weak: 'rgba(0, 255, 255, 0.12)',
medium: 'rgba(0, 255, 255, 0.20)',
strong: 'rgba(0, 255, 255, 0.30)',
},
text: {
primary: '#E0E0E0',
secondary: 'rgba(224, 224, 224, 0.75)',
disabled: 'rgba(224, 224, 224, 0.5)',
link: '#00FFFF',
maxContrast: '#FFFFFF',
},
primary: {
main: '#00FFFF',
},
secondary: {
main: '#0b2e36',
text: 'rgba(224, 224, 224, 0.75)',
border: 'rgba(0, 255, 255, 0.10)',
},
background: {
canvas: '#0A0F18',
primary: '#0F1B2A',
secondary: '#152234',
elevated: '#152234',
},
action: {
hover: 'rgba(0, 255, 255, 0.16)',
selected: 'rgba(0, 255, 255, 0.12)',
selectedBorder: '#00FFFF',
focus: 'rgba(0, 255, 255, 0.16)',
hoverOpacity: 0.08,
disabledText: 'rgba(224, 224, 224, 0.5)',
disabledBackground: 'rgba(0, 255, 255, 0.08)',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #00FFFF 0%, #29ABE2 100%)',
brandVertical: 'linear-gradient(0.01deg, #00FFFF 0.01%, #29ABE2 99.99%)',
},
contrastThreshold: 3,
hoverFactor: 0.05,
tonalOffset: 0.2,
},
};
export default tronTheme;

View File

@@ -0,0 +1,54 @@
{
"name": "Victorian",
"id": "victorian",
"colors": {
"mode": "dark",
"border": {
"weak": "#3A2C22",
"medium": "#3A2C22",
"strong": "#4B3D32"
},
"text": {
"primary": "#D9D0A2",
"secondary": "#C4B89B",
"disabled": "#A89F91",
"link": "#C28A4D",
"maxContrast": "#FFFFFF"
},
"primary": {
"main": "#C28A4D"
},
"secondary": {
"main": "#3A2C22",
"text": "#C4B89B",
"border": "#4B3D32"
},
"background": {
"canvas": "#1F1510",
"primary": "#2C1A13",
"secondary": "#402A21",
"elevated": "#402A21"
},
"action": {
"hover": "#3A2C22",
"selected": "#4B3D32",
"selectedBorder": "#C28A4D",
"focus": "#C28A4D",
"hoverOpacity": 0.1,
"disabledText": "#A89F91",
"disabledBackground": "#402A21",
"disabledOpacity": 0.38
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #D9D0a1 0%, #C28A4D 100%)",
"brandVertical": "linear-gradient(0.01deg, #D9D0a1 0.01%, #C28A4D 99.99%)"
},
"contrastThreshold": 4,
"hoverFactor": 0.07,
"tonalOffset": 0.15
},
"typography": {
"fontFamily": "\"Georgia\", \"Times New Roman\", serif",
"fontFamilyMonospace": "'Courier New', monospace"
}
}

View File

@@ -1,57 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const victorianTheme: NewThemeOptions = {
name: 'Victorian',
colors: {
mode: 'dark',
border: {
weak: '#3A2C22',
medium: '#3A2C22',
strong: '#4B3D32',
},
text: {
primary: '#D9D0A2',
secondary: '#C4B89B',
disabled: '#A89F91',
link: '#C28A4D',
maxContrast: '#FFFFFF',
},
primary: {
main: '#C28A4D',
},
secondary: {
main: '#3A2C22',
text: '#C4B89B',
border: '#4B3D32',
},
background: {
canvas: '#1F1510',
primary: '#2C1A13',
secondary: '#402A21',
elevated: '#402A21',
},
action: {
hover: '#3A2C22',
selected: '#4B3D32',
selectedBorder: '#C28A4D',
focus: '#C28A4D',
hoverOpacity: 0.1,
disabledText: '#A89F91',
disabledBackground: '#402A21',
disabledOpacity: 0.38,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #D9D0a1 0%, #C28A4D 100%)',
brandVertical: 'linear-gradient(0.01deg, #D9D0a1 0.01%, #C28A4D 99.99%)',
},
contrastThreshold: 4,
hoverFactor: 0.07,
tonalOffset: 0.15,
},
typography: {
fontFamily: '"Georgia", "Times New Roman", serif',
fontFamilyMonospace: "'Courier New', monospace",
},
};
export default victorianTheme;

View File

@@ -0,0 +1,50 @@
{
"name": "Zen",
"id": "zen",
"colors": {
"mode": "light",
"text": {
"primary": "#333333",
"secondary": "#666666",
"disabled": "#B8B8B8",
"link": "#4F9F6E",
"maxContrast": "#000000"
},
"border": {
"weak": "#B1B7B3",
"medium": "#A2A8A2",
"strong": "#7C7F7A"
},
"primary": {
"main": "#6D8E6D"
},
"secondary": {
"main": "#E0E0E0",
"text": "#666666",
"border": "#A2A8A2"
},
"background": {
"canvas": "#F4F4F4",
"primary": "#E9E9E9",
"secondary": "#D8D8D8",
"elevated": "#E9E9E9"
},
"action": {
"hover": "#D1D1D1",
"selected": "#B8B8B8",
"selectedBorder": "#88B88B",
"hoverOpacity": 0.1,
"focus": "#D1D1D1",
"disabledBackground": "#E0E0E0",
"disabledText": "#B8B8B8",
"disabledOpacity": 0.5
},
"gradients": {
"brandHorizontal": "linear-gradient(270deg, #88B88B 0%, #6D8E6D 100%)",
"brandVertical": "linear-gradient(0.01deg, #88B88B 0.01%, #6D8E6D 99.99%)"
},
"contrastThreshold": 3,
"hoverFactor": 0.03,
"tonalOffset": 0.2
}
}

View File

@@ -1,53 +0,0 @@
import { NewThemeOptions } from '../createTheme';
const zenTheme: NewThemeOptions = {
name: 'Zen',
colors: {
mode: 'light',
text: {
primary: '#333333',
secondary: '#666666',
disabled: '#B8B8B8',
link: '#4F9F6E',
maxContrast: '#000000',
},
border: {
weak: '#B1B7B3',
medium: '#A2A8A2',
strong: '#7C7F7A',
},
primary: {
main: '#6D8E6D',
},
secondary: {
main: '#E0E0E0',
text: '#666666',
border: '#A2A8A2',
},
background: {
canvas: '#F4F4F4',
primary: '#E9E9E9',
secondary: '#D8D8D8',
elevated: '#E9E9E9',
},
action: {
hover: '#D1D1D1',
selected: '#B8B8B8',
selectedBorder: '#88B88B',
hoverOpacity: 0.1,
focus: '#D1D1D1',
disabledBackground: '#E0E0E0',
disabledText: '#B8B8B8',
disabledOpacity: 0.5,
},
gradients: {
brandHorizontal: 'linear-gradient(270deg, #88B88B 0%, #6D8E6D 100%)',
brandVertical: 'linear-gradient(0.01deg, #88B88B 0.01%, #6D8E6D 99.99%)',
},
contrastThreshold: 3,
hoverFactor: 0.03,
tonalOffset: 0.2,
},
};
export default zenTheme;

View File

@@ -1,3 +1,5 @@
import { z } from 'zod';
import { GrafanaTheme } from '../types/theme';
import { ThemeBreakpoints } from './breakpoints';
@@ -35,27 +37,36 @@ export interface GrafanaTheme2 {
flags: {};
}
/** @alpha */
export interface ThemeRichColor {
export const ThemeRichColorInputSchema = z.object({
/** color intent (primary, secondary, info, error, etc) */
name: string;
name: z.string().optional(),
/** Main color */
main: string;
main: z.string().optional(),
/** Used for hover */
shade: string;
shade: z.string().optional(),
/** Used for text */
text: string;
text: z.string().optional(),
/** Used for borders */
border: string;
border: z.string().optional(),
/** Used subtly colored backgrounds */
transparent: string;
transparent: z.string().optional(),
/** Used for weak colored borders like larger alert/banner boxes and smaller badges and tags */
borderTransparent: string;
borderTransparent: z.string().optional(),
/** Text color for text ontop of main */
contrastText: string;
}
contrastText: z.string().optional(),
});
export const ThemeRichColorSchema = ThemeRichColorInputSchema.required();
/** @alpha */
export type ThemeRichColor = z.infer<typeof ThemeRichColorSchema>;
/** @internal */
export type DeepPartial<T> = {
[P in keyof T]?: DeepPartial<T[P]>;
};
/** @internal */
export type DeepRequired<T> = Required<{
[P in keyof T]: T[P] extends Required<T[P]> ? T[P] : DeepRequired<T[P]>;
}>;

View File

@@ -42,5 +42,6 @@ export enum DataTransformerID {
formatTime = 'formatTime',
formatString = 'formatString',
regression = 'regression',
smoothing = 'smoothing',
groupToNestedTable = 'groupToNestedTable',
}

View File

@@ -1255,4 +1255,8 @@ export interface FeatureToggles {
* Enables support for variables whose values can have multiple properties
*/
multiPropsVariables?: boolean;
/**
* Enables the ASAP smoothing transformation for time series data
*/
smoothingTransformation?: boolean;
}

View File

@@ -9,5 +9,4 @@
* and be subject to the standard policies
*/
// This is a dummy export so typescript doesn't error importing an "empty module"
export const unstable = {};
export { default as themeJsonSchema } from './themes/schema.generated.json';

File diff suppressed because it is too large Load Diff

View File

@@ -1,834 +0,0 @@
{
"_comment": "Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/dashboards/prometheus_stats.json",
"__inputs": [
{
"name": "DS_GDEV-PROMETHEUS",
"label": "gdev-prometheus",
"description": "",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "8.1.0-pre"
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "stat",
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "text",
"name": "Text",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": null,
"iteration": 1624859749459,
"links": [
{
"icon": "info",
"tags": [],
"targetBlank": true,
"title": "Grafana Docs",
"tooltip": "",
"type": "link",
"url": "https://grafana.com/docs/grafana/latest/"
},
{
"icon": "info",
"tags": [],
"targetBlank": true,
"title": "Prometheus Docs",
"type": "link",
"url": "http://prometheus.io/docs/introduction/overview/"
}
],
"panels": [
{
"cacheTimeout": null,
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"decimals": 1,
"mappings": [
{
"options": {
"match": "null",
"result": {
"text": "N/A"
}
},
"type": "special"
}
],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 6,
"x": 0,
"y": 0
},
"id": 5,
"interval": null,
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "none",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "(time() - process_start_time_seconds{job=\"prometheus\", instance=~\"$node\"})",
"intervalFactor": 2,
"refId": "A"
}
],
"title": "Uptime",
"type": "stat"
},
{
"cacheTimeout": null,
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"fixedColor": "rgb(31, 120, 193)",
"mode": "fixed"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "rgba(50, 172, 45, 0.97)",
"value": null
},
{
"color": "rgba(237, 129, 40, 0.89)",
"value": 1
},
{
"color": "rgba(245, 54, 54, 0.9)",
"value": 5
}
]
},
"unit": "none"
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 6,
"x": 6,
"y": 0
},
"id": 6,
"interval": null,
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "none",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "prometheus_local_storage_memory_series{instance=~\"$node\"}",
"intervalFactor": 2,
"refId": "A"
}
],
"title": "Local Storage Memory Series",
"type": "stat"
},
{
"cacheTimeout": null,
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [
{
"options": {
"0": {
"text": "Empty"
}
},
"type": "value"
}
],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "rgba(50, 172, 45, 0.97)",
"value": null
},
{
"color": "rgba(237, 129, 40, 0.89)",
"value": 500
},
{
"color": "rgba(245, 54, 54, 0.9)",
"value": 4000
}
]
},
"unit": "none"
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 6,
"x": 12,
"y": 0
},
"id": 7,
"interval": null,
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "prometheus_local_storage_indexing_queue_length{instance=~\"$node\"}",
"intervalFactor": 2,
"refId": "A"
}
],
"title": "Internal Storage Queue Length",
"type": "stat"
},
{
"datasource": null,
"editable": true,
"error": false,
"gridPos": {
"h": 5,
"w": 6,
"x": 18,
"y": 0
},
"id": 9,
"links": [],
"options": {
"content": "<span style=\"font-family: 'Open Sans', 'Helvetica Neue', Helvetica; font-size: 25px;vertical-align: text-top;color: #bbbfc2;margin-left: 10px;\">Prometheus</span>\n\n<p style=\"margin-top: 10px;\">You're using Prometheus, an open-source systems monitoring and alerting toolkit originally built at SoundCloud. For more information, check out the <a href=\"https://grafana.com/\">Grafana</a> and <a href=\"http://prometheus.io/\">Prometheus</a> projects.</p>",
"mode": "html"
},
"pluginVersion": "8.1.0-pre",
"style": {},
"transparent": true,
"type": "text"
},
{
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "prometheus"
},
"properties": [
{
"id": "color",
"value": {
"fixedColor": "#C15C17",
"mode": "fixed"
}
}
]
},
{
"matcher": {
"id": "byName",
"options": "{instance=\"localhost:9090\",job=\"prometheus\"}"
},
"properties": [
{
"id": "color",
"value": {
"fixedColor": "#C15C17",
"mode": "fixed"
}
}
]
}
]
},
"gridPos": {
"h": 6,
"w": 18,
"x": 0,
"y": 5
},
"id": 3,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
},
"tooltip": {
"mode": "single"
}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "rate(prometheus_local_storage_ingested_samples_total{instance=~\"$node\"}[5m])",
"interval": "",
"intervalFactor": 2,
"legendFormat": "{{job}}",
"metric": "",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Samples ingested (rate-5m)",
"type": "timeseries"
},
{
"datasource": null,
"editable": true,
"error": false,
"gridPos": {
"h": 6,
"w": 4,
"x": 18,
"y": 5
},
"id": 8,
"links": [],
"options": {
"content": "#### Samples Ingested\nThis graph displays the count of samples ingested by the Prometheus server, as measured over the last 5 minutes, per time series in the range vector. When troubleshooting an issue on IRC or GitHub, this is often the first stat requested by the Prometheus team. ",
"mode": "markdown"
},
"pluginVersion": "8.1.0-pre",
"style": {},
"transparent": true,
"type": "text"
},
{
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "prometheus"
},
"properties": [
{
"id": "color",
"value": {
"fixedColor": "#F9BA8F",
"mode": "fixed"
}
}
]
},
{
"matcher": {
"id": "byName",
"options": "{instance=\"localhost:9090\",interval=\"5s\",job=\"prometheus\"}"
},
"properties": [
{
"id": "color",
"value": {
"fixedColor": "#F9BA8F",
"mode": "fixed"
}
}
]
}
]
},
"gridPos": {
"h": 7,
"w": 10,
"x": 0,
"y": 11
},
"id": 2,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
},
"tooltip": {
"mode": "single"
}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "rate(prometheus_target_interval_length_seconds_count{instance=~\"$node\"}[5m])",
"intervalFactor": 2,
"legendFormat": "{{job}}",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Target Scrapes (last 5m)",
"type": "timeseries"
},
{
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 8,
"x": 10,
"y": 11
},
"id": 14,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
},
"tooltip": {
"mode": "single"
}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "prometheus_target_interval_length_seconds{quantile!=\"0.01\", quantile!=\"0.05\",instance=~\"$node\"}",
"interval": "",
"intervalFactor": 2,
"legendFormat": "{{quantile}} ({{interval}})",
"metric": "",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Scrape Duration",
"type": "timeseries"
},
{
"datasource": null,
"editable": true,
"error": false,
"gridPos": {
"h": 7,
"w": 6,
"x": 18,
"y": 11
},
"id": 11,
"links": [],
"options": {
"content": "#### Scrapes\nPrometheus scrapes metrics from instrumented jobs, either directly or via an intermediary push gateway for short-lived jobs. Target scrapes will show how frequently targets are scraped, as measured over the last 5 minutes, per time series in the range vector. Scrape Duration will show how long the scrapes are taking, with percentiles available as series. ",
"mode": "markdown"
},
"pluginVersion": "8.1.0-pre",
"style": {},
"transparent": true,
"type": "text"
},
{
"datasource": "${DS_GDEV-PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"links": [],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 18,
"x": 0,
"y": 18
},
"id": 12,
"links": [],
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom"
},
"tooltip": {
"mode": "single"
}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"expr": "prometheus_evaluator_duration_seconds{quantile!=\"0.01\", quantile!=\"0.05\",instance=~\"$node\"}",
"interval": "",
"intervalFactor": 2,
"legendFormat": "{{quantile}}",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Rule Eval Duration",
"type": "timeseries"
},
{
"datasource": null,
"editable": true,
"error": false,
"gridPos": {
"h": 7,
"w": 6,
"x": 18,
"y": 18
},
"id": 15,
"links": [],
"options": {
"content": "#### Rule Evaluation Duration\nThis graph panel plots the duration for all evaluations to execute. The 50th percentile, 90th percentile and 99th percentile are shown as three separate series to help identify outliers that may be skewing the data.",
"mode": "markdown"
},
"pluginVersion": "8.1.0-pre",
"style": {},
"transparent": true,
"type": "text"
}
],
"refresh": false,
"revision": "1.0",
"schemaVersion": 30,
"tags": ["prometheus"],
"templating": {
"list": [
{
"allValue": null,
"current": {},
"datasource": "${DS_GDEV-PROMETHEUS}",
"definition": "",
"description": null,
"error": null,
"hide": 0,
"includeAll": false,
"label": "HOST:",
"multi": false,
"name": "node",
"options": [],
"query": {
"query": "label_values(prometheus_build_info, instance)",
"refId": "gdev-prometheus-node-Variable-Query"
},
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 1,
"tagValuesQuery": "",
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-5m",
"to": "now"
},
"timepicker": {
"now": true,
"refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"]
},
"timezone": "browser",
"title": "Prometheus Stats",
"uid": "rpfmFFz7z",
"version": 2
}

View File

@@ -1,7 +1,6 @@
package middleware
import (
"context"
"errors"
"net/http"
"net/url"
@@ -22,13 +21,6 @@ import (
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/web"
"github.com/open-feature/go-sdk/openfeature"
)
var openfeatureClient = openfeature.NewDefaultClient()
const (
pluginPageFeatureFlagPrefix = "plugin-page-visible."
)
type AuthOptions struct {
@@ -154,12 +146,6 @@ func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, log
return
}
if !PageIsFeatureToggleEnabled(c.Req.Context(), c.Req.URL.Path) {
logger.Debug("Forbidden experimental plugin page", "plugin", pluginID, "path", c.Req.URL.Path)
accessForbidden(c)
return
}
permitted := true
path := normalizeIncludePath(c.Req.URL.Path)
hasAccess := ac.HasAccess(accessControl, c)
@@ -308,18 +294,3 @@ func shouldForceLogin(c *contextmodel.ReqContext) bool {
return forceLogin
}
// PageIsFeatureToggleEnabled checks if a page is enabled via OpenFeature feature flags.
// It returns false if the feature flag is set and set to false.
// The feature flag key format is: "plugin-page-visible.<path>"
func PageIsFeatureToggleEnabled(ctx context.Context, path string) bool {
flagKey := pluginPageFeatureFlagPrefix + filepath.Clean(path)
enabled := openfeatureClient.Boolean(
ctx,
flagKey,
true,
openfeature.TransactionContext(ctx),
)
return enabled
}

View File

@@ -1,17 +1,12 @@
package middleware
import (
"context"
"errors"
"fmt"
"net/http"
"net/http/httptest"
"sync"
"testing"
"github.com/open-feature/go-sdk/openfeature"
"github.com/open-feature/go-sdk/openfeature/memprovider"
oftesting "github.com/open-feature/go-sdk/openfeature/testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -33,8 +28,6 @@ import (
"github.com/grafana/grafana/pkg/web"
)
var openfeatureTestMutex sync.Mutex
func setupAuthMiddlewareTest(t *testing.T, identity *authn.Identity, authErr error) *contexthandler.ContextHandler {
return contexthandler.ProvideService(setting.NewCfg(), &authntest.FakeService{
ExpectedErr: authErr,
@@ -429,60 +422,6 @@ func TestCanAdminPlugin(t *testing.T) {
}
}
func TestPageIsFeatureToggleEnabled(t *testing.T) {
type testCase struct {
desc string
path string
flags map[string]bool
expectedResult bool
}
tests := []testCase{
{
desc: "returns true when feature flag is enabled",
path: "/a/my-plugin/settings",
flags: map[string]bool{
pluginPageFeatureFlagPrefix + "/a/my-plugin/settings": true,
},
expectedResult: true,
},
{
desc: "returns false when feature flag is disabled",
path: "/a/my-plugin/settings",
flags: map[string]bool{
pluginPageFeatureFlagPrefix + "/a/my-plugin/settings": false,
},
expectedResult: false,
},
{
desc: "returns false when feature flag is disabled with trailing slash",
path: "/a/my-plugin/settings/",
flags: map[string]bool{
pluginPageFeatureFlagPrefix + "/a/my-plugin/settings": false,
},
expectedResult: false,
},
{
desc: "returns true when feature flag does not exist",
path: "/a/my-plugin/settings",
flags: map[string]bool{},
expectedResult: true,
},
}
for _, tt := range tests {
t.Run(tt.desc, func(t *testing.T) {
ctx := context.Background()
setupTestProvider(t, tt.flags)
result := PageIsFeatureToggleEnabled(ctx, tt.path)
assert.Equal(t, tt.expectedResult, result)
})
}
}
func contextProvider(modifiers ...func(c *contextmodel.ReqContext)) web.Handler {
return func(c *web.Context) {
reqCtx := &contextmodel.ReqContext{
@@ -498,38 +437,3 @@ func contextProvider(modifiers ...func(c *contextmodel.ReqContext)) web.Handler
c.Req = c.Req.WithContext(ctxkey.Set(c.Req.Context(), reqCtx))
}
}
// setupTestProvider creates a test OpenFeature provider with the given flags.
// Uses a global lock to prevent concurrent provider changes across tests.
func setupTestProvider(t *testing.T, flags map[string]bool) oftesting.TestProvider {
t.Helper()
// Lock to prevent concurrent provider changes
openfeatureTestMutex.Lock()
testProvider := oftesting.NewTestProvider()
flagsMap := map[string]memprovider.InMemoryFlag{}
for key, value := range flags {
flagsMap[key] = memprovider.InMemoryFlag{
DefaultVariant: "defaultVariant",
Variants: map[string]any{
"defaultVariant": value,
},
}
}
testProvider.UsingFlags(t, flagsMap)
err := openfeature.SetProviderAndWait(testProvider)
require.NoError(t, err)
t.Cleanup(func() {
testProvider.Cleanup()
_ = openfeature.SetProviderAndWait(openfeature.NoopProvider{})
// Unlock after cleanup to allow other tests to run
openfeatureTestMutex.Unlock()
})
return testProvider
}

View File

@@ -507,64 +507,6 @@ func TestFolderMetaAccessor(t *testing.T) {
})
}
func TestPathCreationError(t *testing.T) {
t.Run("Error method returns formatted message", func(t *testing.T) {
underlyingErr := fmt.Errorf("underlying error")
pathErr := &resources.PathCreationError{
Path: "grafana/folder-1",
Err: underlyingErr,
}
expectedMsg := "failed to create path grafana/folder-1: underlying error"
require.Equal(t, expectedMsg, pathErr.Error())
})
t.Run("Unwrap returns underlying error", func(t *testing.T) {
underlyingErr := fmt.Errorf("underlying error")
pathErr := &resources.PathCreationError{
Path: "grafana/folder-1",
Err: underlyingErr,
}
unwrapped := pathErr.Unwrap()
require.Equal(t, underlyingErr, unwrapped)
require.EqualError(t, unwrapped, "underlying error")
})
t.Run("errors.Is finds underlying error", func(t *testing.T) {
underlyingErr := fmt.Errorf("underlying error")
pathErr := &resources.PathCreationError{
Path: "grafana/folder-1",
Err: underlyingErr,
}
require.True(t, errors.Is(pathErr, underlyingErr))
require.False(t, errors.Is(pathErr, fmt.Errorf("different error")))
})
t.Run("errors.As extracts PathCreationError", func(t *testing.T) {
underlyingErr := fmt.Errorf("underlying error")
pathErr := &resources.PathCreationError{
Path: "grafana/folder-1",
Err: underlyingErr,
}
var extractedErr *resources.PathCreationError
require.True(t, errors.As(pathErr, &extractedErr))
require.NotNil(t, extractedErr)
require.Equal(t, "grafana/folder-1", extractedErr.Path)
require.Equal(t, underlyingErr, extractedErr.Err)
})
t.Run("errors.As returns false for non-PathCreationError", func(t *testing.T) {
regularErr := fmt.Errorf("regular error")
var extractedErr *resources.PathCreationError
require.False(t, errors.As(regularErr, &extractedErr))
require.Nil(t, extractedErr)
})
}
// mockDynamicInterface implements a simplified version of the dynamic.ResourceInterface
type mockDynamicInterface struct {
dynamic.ResourceInterface

View File

@@ -71,98 +71,6 @@ func (_c *MockJobProgressRecorder_Complete_Call) RunAndReturn(run func(context.C
return _c
}
// HasDirPathFailedDeletion provides a mock function with given fields: folderPath
func (_m *MockJobProgressRecorder) HasDirPathFailedDeletion(folderPath string) bool {
ret := _m.Called(folderPath)
if len(ret) == 0 {
panic("no return value specified for HasDirPathFailedDeletion")
}
var r0 bool
if rf, ok := ret.Get(0).(func(string) bool); ok {
r0 = rf(folderPath)
} else {
r0 = ret.Get(0).(bool)
}
return r0
}
// MockJobProgressRecorder_HasDirPathFailedDeletion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasDirPathFailedDeletion'
type MockJobProgressRecorder_HasDirPathFailedDeletion_Call struct {
*mock.Call
}
// HasDirPathFailedDeletion is a helper method to define mock.On call
// - folderPath string
func (_e *MockJobProgressRecorder_Expecter) HasDirPathFailedDeletion(folderPath interface{}) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
return &MockJobProgressRecorder_HasDirPathFailedDeletion_Call{Call: _e.mock.On("HasDirPathFailedDeletion", folderPath)}
}
func (_c *MockJobProgressRecorder_HasDirPathFailedDeletion_Call) Run(run func(folderPath string)) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(string))
})
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedDeletion_Call) Return(_a0 bool) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedDeletion_Call) RunAndReturn(run func(string) bool) *MockJobProgressRecorder_HasDirPathFailedDeletion_Call {
_c.Call.Return(run)
return _c
}
// HasDirPathFailedCreation provides a mock function with given fields: path
func (_m *MockJobProgressRecorder) HasDirPathFailedCreation(path string) bool {
ret := _m.Called(path)
if len(ret) == 0 {
panic("no return value specified for HasDirPathFailedCreation")
}
var r0 bool
if rf, ok := ret.Get(0).(func(string) bool); ok {
r0 = rf(path)
} else {
r0 = ret.Get(0).(bool)
}
return r0
}
// MockJobProgressRecorder_HasDirPathFailedCreation_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HasDirPathFailedCreation'
type MockJobProgressRecorder_HasDirPathFailedCreation_Call struct {
*mock.Call
}
// HasDirPathFailedCreation is a helper method to define mock.On call
// - path string
func (_e *MockJobProgressRecorder_Expecter) HasDirPathFailedCreation(path interface{}) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
return &MockJobProgressRecorder_HasDirPathFailedCreation_Call{Call: _e.mock.On("HasDirPathFailedCreation", path)}
}
func (_c *MockJobProgressRecorder_HasDirPathFailedCreation_Call) Run(run func(path string)) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(string))
})
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedCreation_Call) Return(_a0 bool) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockJobProgressRecorder_HasDirPathFailedCreation_Call) RunAndReturn(run func(string) bool) *MockJobProgressRecorder_HasDirPathFailedCreation_Call {
_c.Call.Return(run)
return _c
}
// Record provides a mock function with given fields: ctx, result
func (_m *MockJobProgressRecorder) Record(ctx context.Context, result JobResourceResult) {
_m.Called(ctx, result)

View File

@@ -2,7 +2,6 @@ package jobs
import (
"context"
"errors"
"fmt"
"sync"
"time"
@@ -10,8 +9,6 @@ import (
"github.com/grafana/grafana-app-sdk/logging"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
// maybeNotifyProgress will only notify if a certain amount of time has passed
@@ -61,8 +58,6 @@ type jobProgressRecorder struct {
notifyImmediatelyFn ProgressFn
maybeNotifyFn ProgressFn
summaries map[string]*provisioning.JobResourceSummary
failedCreations []string // Tracks folder paths that failed to be created
failedDeletions []string // Tracks resource paths that failed to be deleted
}
func newJobProgressRecorder(ProgressFn ProgressFn) JobProgressRecorder {
@@ -89,26 +84,10 @@ func (r *jobProgressRecorder) Record(ctx context.Context, result JobResourceResu
if result.Error != nil {
shouldLogError = true
logErr = result.Error
// Don't count ignored actions as errors in error count or error list
if result.Action != repository.FileActionIgnored {
if len(r.errors) < 20 {
r.errors = append(r.errors, result.Error.Error())
}
r.errorCount++
}
// Automatically track failed operations based on error type and action
// Check if this is a PathCreationError (folder creation failure)
var pathErr *resources.PathCreationError
if errors.As(result.Error, &pathErr) {
r.failedCreations = append(r.failedCreations, pathErr.Path)
}
// Track failed deletions, any deletion will stop the deletion of the parent folder (as it won't be empty)
if result.Action == repository.FileActionDeleted {
r.failedDeletions = append(r.failedDeletions, result.Path)
if len(r.errors) < 20 {
r.errors = append(r.errors, result.Error.Error())
}
r.errorCount++
}
r.updateSummary(result)
@@ -133,8 +112,6 @@ func (r *jobProgressRecorder) ResetResults() {
r.errorCount = 0
r.errors = nil
r.summaries = make(map[string]*provisioning.JobResourceSummary)
r.failedCreations = nil
r.failedDeletions = nil
}
func (r *jobProgressRecorder) SetMessage(ctx context.Context, msg string) {
@@ -332,29 +309,3 @@ func (r *jobProgressRecorder) Complete(ctx context.Context, err error) provision
return jobStatus
}
// HasDirPathFailedCreation checks if a path is nested under any failed folder creation
func (r *jobProgressRecorder) HasDirPathFailedCreation(path string) bool {
r.mu.RLock()
defer r.mu.RUnlock()
for _, failedCreation := range r.failedCreations {
if safepath.InDir(path, failedCreation) {
return true
}
}
return false
}
// HasDirPathFailedDeletion checks if any resource deletions failed under a folder path
func (r *jobProgressRecorder) HasDirPathFailedDeletion(folderPath string) bool {
r.mu.RLock()
defer r.mu.RUnlock()
for _, failedDeletion := range r.failedDeletions {
if safepath.InDir(failedDeletion, folderPath) {
return true
}
}
return false
}

View File

@@ -7,7 +7,6 @@ import (
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -253,221 +252,3 @@ func TestJobProgressRecorderWarningOnlyNoErrors(t *testing.T) {
require.NotNil(t, finalStatus.Warnings)
assert.Len(t, finalStatus.Warnings, 1)
}
func TestJobProgressRecorderFolderFailureTracking(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Record a folder creation failure with PathCreationError
pathErr := &resources.PathCreationError{
Path: "folder1/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file.json",
Action: repository.FileActionCreated,
Error: pathErr,
})
// Record another PathCreationError for a different folder
pathErr2 := &resources.PathCreationError{
Path: "folder2/subfolder/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder2/subfolder/file.json",
Action: repository.FileActionCreated,
Error: pathErr2,
})
// Record a deletion failure
recorder.Record(ctx, JobResourceResult{
Path: "folder3/file1.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Record another deletion failure
recorder.Record(ctx, JobResourceResult{
Path: "folder4/subfolder/file2.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Verify failed creations are tracked
recorder.mu.RLock()
assert.Len(t, recorder.failedCreations, 2)
assert.Contains(t, recorder.failedCreations, "folder1/")
assert.Contains(t, recorder.failedCreations, "folder2/subfolder/")
// Verify failed deletions are tracked
assert.Len(t, recorder.failedDeletions, 2)
assert.Contains(t, recorder.failedDeletions, "folder3/file1.json")
assert.Contains(t, recorder.failedDeletions, "folder4/subfolder/file2.json")
recorder.mu.RUnlock()
}
func TestJobProgressRecorderHasDirPathFailedCreation(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Add failed creations via Record
pathErr1 := &resources.PathCreationError{
Path: "folder1/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file.json",
Action: repository.FileActionCreated,
Error: pathErr1,
})
pathErr2 := &resources.PathCreationError{
Path: "folder2/subfolder/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder2/subfolder/file.json",
Action: repository.FileActionCreated,
Error: pathErr2,
})
// Test nested paths
assert.True(t, recorder.HasDirPathFailedCreation("folder1/file.json"))
assert.True(t, recorder.HasDirPathFailedCreation("folder1/nested/file.json"))
assert.True(t, recorder.HasDirPathFailedCreation("folder2/subfolder/file.json"))
// Test non-nested paths
assert.False(t, recorder.HasDirPathFailedCreation("folder2/file2.json"))
assert.False(t, recorder.HasDirPathFailedCreation("folder2/othersubfolder/inside.json"))
assert.False(t, recorder.HasDirPathFailedCreation("other/file.json"))
assert.False(t, recorder.HasDirPathFailedCreation("folder3/file.json"))
assert.False(t, recorder.HasDirPathFailedCreation("file.json"))
}
func TestJobProgressRecorderHasDirPathFailedDeletion(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Add failed deletions via Record
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file1.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
recorder.Record(ctx, JobResourceResult{
Path: "folder2/subfolder/file2.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
recorder.Record(ctx, JobResourceResult{
Path: "folder3/nested/deep/file3.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Test folder paths with failed deletions
assert.True(t, recorder.HasDirPathFailedDeletion("folder1/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder2/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder2/subfolder/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder3/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder3/nested/"))
assert.True(t, recorder.HasDirPathFailedDeletion("folder3/nested/deep/"))
// Test folder paths without failed deletions
assert.False(t, recorder.HasDirPathFailedDeletion("other/"))
assert.False(t, recorder.HasDirPathFailedDeletion("different/"))
assert.False(t, recorder.HasDirPathFailedDeletion("folder2/othersubfolder/"))
assert.False(t, recorder.HasDirPathFailedDeletion("folder2/subfolder/othersubfolder/"))
assert.False(t, recorder.HasDirPathFailedDeletion("folder3/nested/anotherdeep/"))
assert.False(t, recorder.HasDirPathFailedDeletion("folder3/nested/deep/insidedeep/"))
}
func TestJobProgressRecorderResetResults(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Add some data via Record
pathErr := &resources.PathCreationError{
Path: "folder1/",
Err: assert.AnError,
}
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file.json",
Action: repository.FileActionCreated,
Error: pathErr,
})
recorder.Record(ctx, JobResourceResult{
Path: "folder2/file.json",
Action: repository.FileActionDeleted,
Error: assert.AnError,
})
// Verify data is stored
recorder.mu.RLock()
assert.Len(t, recorder.failedCreations, 1)
assert.Len(t, recorder.failedDeletions, 1)
recorder.mu.RUnlock()
// Reset results
recorder.ResetResults()
// Verify data is cleared
recorder.mu.RLock()
assert.Nil(t, recorder.failedCreations)
assert.Nil(t, recorder.failedDeletions)
recorder.mu.RUnlock()
}
func TestJobProgressRecorderIgnoredActionsDontCountAsErrors(t *testing.T) {
ctx := context.Background()
// Create a progress recorder
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
return nil
}
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
// Record an ignored action with error
recorder.Record(ctx, JobResourceResult{
Path: "folder1/file1.json",
Action: repository.FileActionIgnored,
Error: assert.AnError,
})
// Record a real error for comparison
recorder.Record(ctx, JobResourceResult{
Path: "folder2/file2.json",
Action: repository.FileActionCreated,
Error: assert.AnError,
})
// Verify error count doesn't include ignored actions
recorder.mu.RLock()
assert.Equal(t, 1, recorder.errorCount, "ignored actions should not be counted as errors")
assert.Len(t, recorder.errors, 1, "ignored action errors should not be in error list")
recorder.mu.RUnlock()
}

View File

@@ -29,10 +29,6 @@ type JobProgressRecorder interface {
StrictMaxErrors(maxErrors int)
SetRefURLs(ctx context.Context, refURLs *provisioning.RepositoryURLs)
Complete(ctx context.Context, err error) provisioning.JobStatus
// HasDirPathFailedCreation checks if a path has any folder creations that failed
HasDirPathFailedCreation(path string) bool
// HasDirPathFailedDeletion checks if a folderPath has any folder deletions that failed
HasDirPathFailedDeletion(folderPath string) bool
}
// Worker is a worker that can process a job

View File

@@ -75,47 +75,11 @@ func FullSync(
return applyChanges(ctx, changes, clients, repositoryResources, progress, tracer, maxSyncWorkers, metrics)
}
// shouldSkipChange checks if a change should be skipped based on previous failures on parent/child folders.
// If there is a previous failure on the path, we don't need to process the change as it will fail anyway.
func shouldSkipChange(ctx context.Context, change ResourceFileChange, progress jobs.JobProgressRecorder, tracer tracing.Tracer) bool {
if change.Action != repository.FileActionDeleted && progress.HasDirPathFailedCreation(change.Path) {
skipCtx, skipSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.skip_nested_resource")
skipSpan.SetAttributes(attribute.String("path", change.Path))
progress.Record(skipCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Warning: fmt.Errorf("resource was not processed because the parent folder could not be created"),
})
skipSpan.End()
return true
}
if change.Action == repository.FileActionDeleted && safepath.IsDir(change.Path) && progress.HasDirPathFailedDeletion(change.Path) {
skipCtx, skipSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.skip_folder_with_failed_deletions")
skipSpan.SetAttributes(attribute.String("path", change.Path))
progress.Record(skipCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Group: resources.FolderKind.Group,
Kind: resources.FolderKind.Kind,
Warning: fmt.Errorf("folder was not processed because children resources in its path could not be deleted"),
})
skipSpan.End()
return true
}
return false
}
func applyChange(ctx context.Context, change ResourceFileChange, clients resources.ResourceClients, repositoryResources resources.RepositoryResources, progress jobs.JobProgressRecorder, tracer tracing.Tracer) {
if ctx.Err() != nil {
return
}
if shouldSkipChange(ctx, change, progress, tracer) {
return
}
if change.Action == repository.FileActionDeleted {
deleteCtx, deleteSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.delete")
result := jobs.JobResourceResult{
@@ -174,7 +138,6 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
ensureFolderSpan.RecordError(err)
ensureFolderSpan.End()
progress.Record(ctx, result)
return
}

View File

@@ -1,432 +0,0 @@
package sync
import (
"context"
"fmt"
"testing"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
dynamicfake "k8s.io/client-go/dynamic/fake"
k8testing "k8s.io/client-go/testing"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
/*
TestFullSync_HierarchicalErrorHandling tests the hierarchical error handling behavior:
FOLDER CREATION FAILURES:
- When a folder fails to be created with PathCreationError, all nested resources are skipped
- Nested resources are recorded with FileActionIgnored and error "folder was not processed because children resources in its path could not be deleted"
- Only the folder creation error counts toward error limits
- Nested resource skips do NOT count toward error limits
FOLDER DELETION FAILURES:
- When a file deletion fails, it's tracked in failedDeletions
- When cleaning up folders, we check HasDirPathFailedDeletion()
- If children failed to delete, folder deletion is skipped with FileActionIgnored
- This prevents orphaning resources that still exist
DELETIONS NOT AFFECTED BY CREATION FAILURES:
- If a folder creation fails, deletion operations for resources in that folder still proceed
- This is because the resource might already exist from a previous sync
- Only creations/updates/renames are affected by failed folder creation
AUTOMATIC TRACKING:
- Record() automatically detects PathCreationError and adds to failedCreations
- Record() automatically detects deletion failures and adds to failedDeletions
- No manual calls to AddFailedCreation/AddFailedDeletion needed
*/
func TestFullSync_HierarchicalErrorHandling(t *testing.T) { // nolint:gocyclo
tests := []struct {
name string
setupMocks func(*repository.MockRepository, *resources.MockRepositoryResources, *resources.MockResourceClients, *jobs.MockJobProgressRecorder, *dynamicfake.FakeDynamicClient)
changes []ResourceFileChange
description string
expectError bool
errorContains string
}{
{
name: "folder creation fails, nested file skipped",
description: "When folder1/ fails to create, folder1/file.json should be skipped with FileActionIgnored",
changes: []ResourceFileChange{
{Path: "folder1/file.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// First, check if nested under failed creation - not yet
progress.On("HasDirPathFailedCreation", "folder1/file.json").Return(false).Once()
// WriteResourceFromFile fails with PathCreationError for folder1/
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
// File will be recorded with error, triggering automatic tracking of folder1/ failure
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file.json" && r.Error != nil && r.Action == repository.FileActionCreated
})).Return().Once()
},
},
{
name: "folder creation fails, multiple nested resources skipped",
description: "When folder1/ fails to create, all nested resources (subfolder, files) are skipped",
changes: []ResourceFileChange{
{Path: "folder1/file1.json", Action: repository.FileActionCreated},
{Path: "folder1/subfolder/file2.json", Action: repository.FileActionCreated},
{Path: "folder1/file3.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// First file triggers folder creation failure
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file1.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Error != nil
})).Return().Once()
// Subsequent files in same folder are skipped
progress.On("HasDirPathFailedCreation", "folder1/subfolder/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/subfolder/file2.json" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil &&
r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "folder1/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file3.json" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil &&
r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
},
},
{
name: "file deletion failure tracked",
description: "When a file deletion fails, it's automatically tracked in failedDeletions",
changes: []ResourceFileChange{
{
Path: "folder1/file.json",
Action: repository.FileActionDeleted,
Existing: &provisioning.ResourceListItem{
Name: "file1",
Group: "dashboard.grafana.app",
Resource: "dashboards",
},
},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
// File deletion fails
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
// File deletion recorded with error, automatically tracked in failedDeletions
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file.json" &&
r.Action == repository.FileActionDeleted &&
r.Error != nil
})).Return().Once()
},
},
{
name: "deletion proceeds despite creation failure",
description: "When folder1/ fails to create, deletion of folder1/file2.json still proceeds (resource might exist from previous sync)",
changes: []ResourceFileChange{
{Path: "folder1/file1.json", Action: repository.FileActionCreated},
{
Path: "folder1/file2.json",
Action: repository.FileActionDeleted,
Existing: &provisioning.ResourceListItem{
Name: "file2",
Group: "dashboard.grafana.app",
Resource: "dashboards",
},
},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
// Creation fails
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file1.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Error != nil
})).Return().Once()
// Deletion proceeds (NOT checking HasDirPathFailedCreation for deletions)
// Note: deletion will fail because resource doesn't exist, but that's fine for this test
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
// Record deletion attempt (will have error since resource doesn't exist, but that's ok)
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file2.json" &&
r.Action == repository.FileActionDeleted
// Not checking r.Error because resource doesn't exist in fake client
})).Return().Once()
},
},
{
name: "multi-level nesting - all skipped",
description: "When level1/ fails, level1/level2/level3/file.json is also skipped",
changes: []ResourceFileChange{
{Path: "level1/file1.json", Action: repository.FileActionCreated},
{Path: "level1/level2/file2.json", Action: repository.FileActionCreated},
{Path: "level1/level2/level3/file3.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// First file triggers level1/ failure
progress.On("HasDirPathFailedCreation", "level1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "level1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "level1/file1.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/file1.json" && r.Error != nil
})).Return().Once()
// All nested files are skipped
for _, path := range []string{"level1/level2/file2.json", "level1/level2/level3/file3.json"} {
progress.On("HasDirPathFailedCreation", path).Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
{
name: "mixed success and failure",
description: "When success/ works and failure/ fails, only failure/* are skipped",
changes: []ResourceFileChange{
{Path: "success/file1.json", Action: repository.FileActionCreated},
{Path: "failure/file2.json", Action: repository.FileActionCreated},
{Path: "failure/nested/file3.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
// Success path works
progress.On("HasDirPathFailedCreation", "success/file1.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/file1.json", "").
Return("resource1", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/file1.json" && r.Error == nil
})).Return().Once()
// Failure path fails
progress.On("HasDirPathFailedCreation", "failure/file2.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "failure/", Err: fmt.Errorf("disk full")}
repoResources.On("WriteResourceFromFile", mock.Anything, "failure/file2.json", "").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/file2.json" && r.Error != nil
})).Return().Once()
// Nested file in failure path is skipped
progress.On("HasDirPathFailedCreation", "failure/nested/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/nested/file3.json" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "folder creation fails with explicit folder in changes",
description: "When folder1/ is explicitly in changes and fails to create, all nested resources (subfolders and files) are skipped",
changes: []ResourceFileChange{
{Path: "folder1/", Action: repository.FileActionCreated},
{Path: "folder1/subfolder/", Action: repository.FileActionCreated},
{Path: "folder1/file1.json", Action: repository.FileActionCreated},
{Path: "folder1/subfolder/file2.json", Action: repository.FileActionCreated},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, _ *dynamicfake.FakeDynamicClient) {
progress.On("HasDirPathFailedCreation", "folder1/").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "folder1/").Return("", folderErr).Once()
progress.On("HasDirPathFailedCreation", "folder1/subfolder/").Return(true).Once()
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(true).Once()
progress.On("HasDirPathFailedCreation", "folder1/subfolder/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/" && r.Error != nil
})).Return().Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/subfolder/" && r.Action == repository.FileActionIgnored
})).Return().Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Action == repository.FileActionIgnored
})).Return().Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/subfolder/file2.json" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "folder deletion prevented when child deletion fails",
description: "When a file deletion fails, folder deletion is skipped with FileActionIgnored to prevent orphaning resources",
changes: []ResourceFileChange{
{
Path: "folder1/file1.json",
Action: repository.FileActionDeleted,
Existing: &provisioning.ResourceListItem{Name: "file1", Group: "dashboard.grafana.app", Resource: "dashboards"},
},
{Path: "folder1/", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "folder1", Group: "folder.grafana.app", Resource: "Folder"}},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedDeletion", "folder1/").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "multiple folder deletion failures",
description: "When multiple independent folders have child deletion failures, all folder deletions are skipped",
changes: []ResourceFileChange{
{Path: "folder1/file1.json", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "file1", Group: "dashboard.grafana.app", Resource: "dashboards"}},
{Path: "folder1/", Action: repository.FileActionDeleted},
{Path: "folder2/file2.json", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "file2", Group: "dashboard.grafana.app", Resource: "dashboards"}},
{Path: "folder2/", Action: repository.FileActionDeleted},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
for _, path := range []string{"folder1/file1.json", "folder2/file2.json"} {
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Error != nil
})).Return().Once()
}
progress.On("HasDirPathFailedDeletion", "folder1/").Return(true).Once()
progress.On("HasDirPathFailedDeletion", "folder2/").Return(true).Once()
for _, path := range []string{"folder1/", "folder2/"} {
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
{
name: "nested subfolder deletion failure",
description: "When a file deletion fails in a nested subfolder, both the subfolder and parent folder deletions are skipped",
changes: []ResourceFileChange{
{Path: "parent/subfolder/file.json", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "file1", Group: "dashboard.grafana.app", Resource: "dashboards"}},
{Path: "parent/subfolder/", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "subfolder", Group: "folder.grafana.app", Resource: "Folder"}},
{Path: "parent/", Action: repository.FileActionDeleted, Existing: &provisioning.ResourceListItem{Name: "parent", Group: "folder.grafana.app", Resource: "Folder"}},
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, dynamicClient *dynamicfake.FakeDynamicClient) {
gvk := schema.GroupVersionKind{Group: "dashboard.grafana.app", Kind: "Dashboard", Version: "v1"}
gvr := schema.GroupVersionResource{Group: "dashboard.grafana.app", Resource: "dashboards", Version: "v1"}
clients.On("ForResource", mock.Anything, mock.MatchedBy(func(gvr schema.GroupVersionResource) bool {
return gvr.Group == "dashboard.grafana.app"
})).Return(dynamicClient.Resource(gvr), gvk, nil)
dynamicClient.PrependReactor("delete", "dashboards", func(action k8testing.Action) (bool, runtime.Object, error) {
return true, nil, fmt.Errorf("permission denied")
})
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "parent/subfolder/file.json" && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedDeletion", "parent/subfolder/").Return(true).Once()
progress.On("HasDirPathFailedDeletion", "parent/").Return(true).Once()
for _, path := range []string{"parent/subfolder/", "parent/"} {
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
scheme := runtime.NewScheme()
dynamicClient := dynamicfake.NewSimpleDynamicClient(scheme)
repo := repository.NewMockRepository(t)
repoResources := resources.NewMockRepositoryResources(t)
clients := resources.NewMockResourceClients(t)
progress := jobs.NewMockJobProgressRecorder(t)
compareFn := NewMockCompareFn(t)
repo.On("Config").Return(&provisioning.Repository{
ObjectMeta: metav1.ObjectMeta{Name: "test-repo"},
Spec: provisioning.RepositorySpec{Title: "Test Repo"},
})
tt.setupMocks(repo, repoResources, clients, progress, dynamicClient)
compareFn.On("Execute", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(tt.changes, nil)
progress.On("SetTotal", mock.Anything, len(tt.changes)).Return()
progress.On("TooManyErrors").Return(nil).Maybe()
err := FullSync(context.Background(), repo, compareFn.Execute, clients, "ref", repoResources, progress, tracing.NewNoopTracerService(), 10, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
if tt.expectError {
require.Error(t, err)
if tt.errorContains != "" {
require.Contains(t, err.Error(), tt.errorContains)
}
} else {
require.NoError(t, err)
}
progress.AssertExpectations(t)
repoResources.AssertExpectations(t)
})
}
}

View File

@@ -213,10 +213,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
return nil
})
progress.On("HasDirPathFailedCreation", mock.MatchedBy(func(path string) bool {
return path == "dashboards/one.json" || path == "dashboards/two.json" || path == "dashboards/three.json"
})).Return(false).Maybe()
repoResources.On("WriteResourceFromFile", mock.Anything, mock.MatchedBy(func(path string) bool {
return path == "dashboards/one.json" || path == "dashboards/two.json" || path == "dashboards/three.json"
}), "").Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil).Maybe()
@@ -239,7 +235,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -264,7 +259,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write error"))
@@ -291,7 +285,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -316,7 +309,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write error"))
@@ -343,7 +335,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "one/two/three/").Return(false)
repoResources.On("EnsureFolderPathExist", mock.Anything, "one/two/three/").Return("some-folder", nil)
progress.On("Record", mock.Anything, jobs.JobResourceResult{
@@ -366,7 +357,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "one/two/three/").Return(false)
repoResources.On(
"EnsureFolderPathExist",
@@ -591,7 +581,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedDeletion", "to-be-deleted/").Return(false)
scheme := runtime.NewScheme()
require.NoError(t, metav1.AddMetaToScheme(scheme))
@@ -651,7 +640,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedDeletion", "to-be-deleted/").Return(false)
scheme := runtime.NewScheme()
require.NoError(t, metav1.AddMetaToScheme(scheme))
@@ -707,7 +695,6 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
},
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
progress.On("TooManyErrors").Return(nil)
progress.On("HasDirPathFailedCreation", "dashboards/slow.json").Return(false)
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/slow.json", "").
Run(func(args mock.Arguments) {

View File

@@ -60,7 +60,7 @@ func IncrementalSync(ctx context.Context, repo repository.Versioned, previousRef
if len(affectedFolders) > 0 {
cleanupStart := time.Now()
span.AddEvent("checking if impacted folders should be deleted", trace.WithAttributes(attribute.Int("affected_folders", len(affectedFolders))))
err := cleanupOrphanedFolders(ctx, repo, affectedFolders, repositoryResources, tracer, progress)
err := cleanupOrphanedFolders(ctx, repo, affectedFolders, repositoryResources, tracer)
metrics.RecordIncrementalSyncPhase(jobs.IncrementalSyncPhaseCleanup, time.Since(cleanupStart))
if err != nil {
return tracing.Error(span, fmt.Errorf("cleanup orphaned folders: %w", err))
@@ -85,20 +85,6 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
return nil, tracing.Error(span, err)
}
// Check if this resource is nested under a failed folder creation
// This only applies to creation/update/rename operations, not deletions
if change.Action != repository.FileActionDeleted && progress.HasDirPathFailedCreation(change.Path) {
// Skip this resource since its parent folder failed to be created
skipCtx, skipSpan := tracer.Start(ctx, "provisioning.sync.incremental.skip_nested_resource")
progress.Record(skipCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Warning: fmt.Errorf("resource was not processed because the parent folder could not be created"),
})
skipSpan.End()
continue
}
if err := resources.IsPathSupported(change.Path); err != nil {
ensureFolderCtx, ensureFolderSpan := tracer.Start(ctx, "provisioning.sync.incremental.ensure_folder_path_exist")
// Maintain the safe segment for empty folders
@@ -112,13 +98,7 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
if err != nil {
ensureFolderSpan.RecordError(err)
ensureFolderSpan.End()
progress.Record(ensureFolderCtx, jobs.JobResourceResult{
Path: change.Path,
Action: repository.FileActionIgnored,
Error: err,
})
continue
return nil, tracing.Error(span, fmt.Errorf("unable to create empty file folder: %w", err))
}
progress.Record(ensureFolderCtx, jobs.JobResourceResult{
@@ -205,7 +185,6 @@ func cleanupOrphanedFolders(
affectedFolders map[string]string,
repositoryResources resources.RepositoryResources,
tracer tracing.Tracer,
progress jobs.JobProgressRecorder,
) error {
ctx, span := tracer.Start(ctx, "provisioning.sync.incremental.cleanup_orphaned_folders")
defer span.End()
@@ -219,12 +198,6 @@ func cleanupOrphanedFolders(
for path, folderName := range affectedFolders {
span.SetAttributes(attribute.String("folder", folderName))
// Check if any resources under this folder failed to delete
if progress.HasDirPathFailedDeletion(path) {
span.AddEvent("skipping orphaned folder cleanup: a child resource in its path failed to be deleted")
continue
}
// if we can no longer find the folder in git, then we can delete it from grafana
_, err := readerRepo.Read(ctx, path, "")
if err != nil && (errors.Is(err, repository.ErrFileNotFound) || apierrors.IsNotFound(err)) {

View File

@@ -1,623 +0,0 @@
package sync
import (
"context"
"fmt"
"testing"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/runtime/schema"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
/*
TestIncrementalSync_HierarchicalErrorHandling tests the hierarchical error handling behavior:
FOLDER CREATION FAILURES:
- When EnsureFolderPathExist fails with PathCreationError, the path is tracked
- Subsequent resources under that path are skipped with FileActionIgnored
- Only the initial folder creation error counts toward error limits
- WriteResourceFromFile can also return PathCreationError for implicit folder creation
FOLDER DELETION FAILURES (cleanupOrphanedFolders):
- When RemoveResourceFromFile fails, path is tracked in failedDeletions
- In cleanupOrphanedFolders, HasDirPathFailedDeletion() is checked before RemoveFolder
- If children failed to delete, folder cleanup is skipped with a span event
DELETIONS NOT AFFECTED BY CREATION FAILURES:
- HasDirPathFailedCreation is NOT checked for FileActionDeleted
- Deletions proceed even if their parent folder failed to be created
- This handles cleanup of resources that exist from previous syncs
RENAME OPERATIONS:
- RenameResourceFile can return PathCreationError for the destination folder
- Renames are affected by failed destination folder creation
- Renames are NOT skipped due to source folder creation failures
AUTOMATIC TRACKING:
- Record() automatically detects PathCreationError via errors.As() and adds to failedCreations
- Record() automatically detects FileActionDeleted with error and adds to failedDeletions
- No manual tracking calls needed
*/
func TestIncrementalSync_HierarchicalErrorHandling(t *testing.T) { // nolint:gocyclo
tests := []struct {
name string
setupMocks func(*repository.MockVersioned, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder)
changes []repository.VersionedFileChange
previousRef string
currentRef string
description string
expectError bool
errorContains string
}{
{
name: "folder creation fails, nested file skipped",
description: "When unsupported/ fails to create via EnsureFolderPathExist, nested file is skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "unsupported/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "unsupported/nested/file2.txt", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// First file triggers folder creation which fails
progress.On("HasDirPathFailedCreation", "unsupported/file.txt").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "unsupported/", Err: fmt.Errorf("permission denied")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/").Return("", folderErr).Once()
// First file recorded with error (note: error is from folder creation, but recorded against file)
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/file.txt" &&
r.Action == repository.FileActionIgnored &&
r.Error != nil
})).Return().Once()
// Second file is skipped because parent folder failed
progress.On("HasDirPathFailedCreation", "unsupported/nested/file2.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/nested/file2.txt" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil &&
r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
},
},
{
name: "WriteResourceFromFile returns PathCreationError, nested resources skipped",
description: "When WriteResourceFromFile implicitly creates a folder and fails, nested resources are skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "folder1/file1.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "folder1/file2.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "folder1/nested/file3.json", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// First file write fails with PathCreationError
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file1.json", "new-ref").
Return("", schema.GroupVersionKind{}, folderErr).Once()
// First file recorded with error, automatically tracked
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" &&
r.Action == repository.FileActionCreated &&
r.Error != nil
})).Return().Once()
// Subsequent files are skipped
progress.On("HasDirPathFailedCreation", "folder1/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file2.json" && r.Action == repository.FileActionIgnored && r.Warning != nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "folder1/nested/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/nested/file3.json" && r.Action == repository.FileActionIgnored && r.Warning != nil
})).Return().Once()
},
},
{
name: "file deletion fails, folder cleanup skipped",
description: "When RemoveResourceFromFile fails, cleanupOrphanedFolders skips folder removal",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionDeleted, Path: "dashboards/file1.json", PreviousRef: "old-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// File deletion fails (deletions don't check HasDirPathFailedCreation)
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/file1.json", "old-ref").
Return("dashboard-1", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard"}, fmt.Errorf("permission denied")).Once()
// Error recorded, automatically tracked in failedDeletions
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "dashboards/file1.json" &&
r.Action == repository.FileActionDeleted &&
r.Error != nil
})).Return().Once()
// During cleanup, folder deletion is skipped
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(true).Once()
// Note: RemoveFolder should NOT be called (verified via AssertNotCalled in test)
},
},
{
name: "deletion proceeds despite creation failure",
description: "When folder1/ creation fails, deletion of folder1/old.json still proceeds",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "folder1/new.json", Ref: "new-ref"},
{Action: repository.FileActionDeleted, Path: "folder1/old.json", PreviousRef: "old-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Creation fails
progress.On("HasDirPathFailedCreation", "folder1/new.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/new.json", "new-ref").
Return("", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/new.json" && r.Error != nil
})).Return().Once()
// Deletion proceeds (NOT checking HasDirPathFailedCreation for deletions)
repoResources.On("RemoveResourceFromFile", mock.Anything, "folder1/old.json", "old-ref").
Return("old-resource", "", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/old.json" &&
r.Action == repository.FileActionDeleted &&
r.Error == nil // Deletion succeeds!
})).Return().Once()
},
},
{
name: "multi-level nesting cascade",
description: "When level1/ fails, level1/level2/level3/file.json is also skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "level1/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/level3/file.txt", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// First file triggers level1/ failure
progress.On("HasDirPathFailedCreation", "level1/file.txt").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "level1/", Err: fmt.Errorf("permission denied")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "level1/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/file.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
// All nested files are skipped
for _, path := range []string{"level1/level2/file.txt", "level1/level2/level3/file.txt"} {
progress.On("HasDirPathFailedCreation", path).Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == path && r.Action == repository.FileActionIgnored
})).Return().Once()
}
},
},
{
name: "mixed success and failure",
description: "When success/ works and failure/ fails, only failure/* are skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "success/file1.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "success/nested/file2.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/file3.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/nested/file4.txt", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Success path works
progress.On("HasDirPathFailedCreation", "success/file1.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/file1.json", "new-ref").
Return("resource-1", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/file1.json" && r.Error == nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "success/nested/file2.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/nested/file2.json", "new-ref").
Return("resource-2", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/nested/file2.json" && r.Error == nil
})).Return().Once()
// Failure path fails
progress.On("HasDirPathFailedCreation", "failure/file3.txt").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "failure/", Err: fmt.Errorf("disk full")}
repoResources.On("EnsureFolderPathExist", mock.Anything, "failure/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/file3.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
// Nested file in failure path is skipped
progress.On("HasDirPathFailedCreation", "failure/nested/file4.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/nested/file4.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
},
},
{
name: "rename with failed destination folder",
description: "When RenameResourceFile fails with PathCreationError for destination, rename is skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{
Action: repository.FileActionRenamed,
Path: "newfolder/file.json",
PreviousPath: "oldfolder/file.json",
Ref: "new-ref",
PreviousRef: "old-ref",
},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Rename fails with PathCreationError for destination folder
progress.On("HasDirPathFailedCreation", "newfolder/file.json").Return(false).Once()
folderErr := &resources.PathCreationError{Path: "newfolder/", Err: fmt.Errorf("permission denied")}
repoResources.On("RenameResourceFile", mock.Anything, "oldfolder/file.json", "old-ref", "newfolder/file.json", "new-ref").
Return("", "", schema.GroupVersionKind{}, folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "newfolder/file.json" &&
r.Action == repository.FileActionRenamed &&
r.Error != nil
})).Return().Once()
},
},
{
name: "renamed file still checked, subsequent nested resources skipped",
description: "After rename fails for folder1/file.json, other folder1/* files are skipped",
previousRef: "old-ref",
currentRef: "new-ref",
changes: []repository.VersionedFileChange{
{Action: repository.FileActionRenamed, Path: "folder1/file1.json", PreviousPath: "old/file1.json", Ref: "new-ref", PreviousRef: "old-ref"},
{Action: repository.FileActionCreated, Path: "folder1/file2.json", Ref: "new-ref"},
},
setupMocks: func(repo *repository.MockVersioned, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder) {
// Rename is NOT skipped for creation failures (it's checking the destination path)
progress.On("HasDirPathFailedCreation", "folder1/file1.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file1.json" &&
r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
// Second file also skipped
progress.On("HasDirPathFailedCreation", "folder1/file2.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file2.json" && r.Action == repository.FileActionIgnored && r.Warning != nil
})).Return().Once()
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
runHierarchicalErrorHandlingTest(t, tt)
})
}
}
type compositeRepoForTest struct {
*repository.MockVersioned
*repository.MockReader
}
func runHierarchicalErrorHandlingTest(t *testing.T, tt struct {
name string
setupMocks func(*repository.MockVersioned, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder)
changes []repository.VersionedFileChange
previousRef string
currentRef string
description string
expectError bool
errorContains string
}) {
var repo repository.Versioned
mockVersioned := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
// For tests that need cleanup (folder deletion), use composite repo
if tt.name == "file deletion fails, folder cleanup skipped" {
mockReader := repository.NewMockReader(t)
repo = &compositeRepoForTest{
MockVersioned: mockVersioned,
MockReader: mockReader,
}
} else {
repo = mockVersioned
}
mockVersioned.On("CompareFiles", mock.Anything, tt.previousRef, tt.currentRef).Return(tt.changes, nil)
progress.On("SetTotal", mock.Anything, len(tt.changes)).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
tt.setupMocks(mockVersioned, repoResources, progress)
err := IncrementalSync(context.Background(), repo, tt.previousRef, tt.currentRef, repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
if tt.expectError {
require.Error(t, err)
if tt.errorContains != "" {
require.Contains(t, err.Error(), tt.errorContains)
}
} else {
require.NoError(t, err)
}
progress.AssertExpectations(t)
repoResources.AssertExpectations(t)
// For deletion tests, verify RemoveFolder was NOT called
if tt.name == "file deletion fails, folder cleanup skipped" {
repoResources.AssertNotCalled(t, "RemoveFolder", mock.Anything, mock.Anything)
}
}
// TestIncrementalSync_HierarchicalErrorHandling_FailedFolderCreation tests nested resource skipping
func TestIncrementalSync_HierarchicalErrorHandling_FailedFolderCreation(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "unsupported/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "unsupported/subfolder/file2.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "unsupported/file3.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "other/file.json", Ref: "new-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 4).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
folderErr := &resources.PathCreationError{Path: "unsupported/", Err: fmt.Errorf("permission denied")}
// First check is before it fails.
progress.On("HasDirPathFailedCreation", "unsupported/file.txt").Return(false).Once()
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/file.txt" && r.Action == repository.FileActionIgnored && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "unsupported/subfolder/file2.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/subfolder/file2.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "unsupported/file3.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "unsupported/file3.json" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "other/file.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "other/file.json", "new-ref").
Return("test-resource", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "other/file.json" && r.Action == repository.FileActionCreated && r.Error == nil
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_FailedFileDeletion tests folder cleanup prevention
func TestIncrementalSync_HierarchicalErrorHandling_FailedFileDeletion(t *testing.T) {
mockVersioned := repository.NewMockVersioned(t)
mockReader := repository.NewMockReader(t)
repo := &compositeRepoForTest{MockVersioned: mockVersioned, MockReader: mockReader}
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionDeleted, Path: "dashboards/file1.json", PreviousRef: "old-ref"},
}
mockVersioned.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
// Deletions don't check HasDirPathFailedCreation, they go straight to removal
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/file1.json", "old-ref").
Return("dashboard-1", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard"}, fmt.Errorf("permission denied")).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "dashboards/file1.json" && r.Action == repository.FileActionDeleted &&
r.Error != nil && r.Error.Error() == "removing resource from file dashboards/file1.json: permission denied"
})).Return().Once()
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(true).Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
repoResources.AssertNotCalled(t, "RemoveFolder", mock.Anything, mock.Anything)
}
// TestIncrementalSync_HierarchicalErrorHandling_DeletionNotAffectedByCreationFailure tests deletions proceed despite creation failures
func TestIncrementalSync_HierarchicalErrorHandling_DeletionNotAffectedByCreationFailure(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "folder1/file.json", Ref: "new-ref"},
{Action: repository.FileActionDeleted, Path: "folder1/old.json", PreviousRef: "old-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 2).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
// Creation fails
progress.On("HasDirPathFailedCreation", "folder1/file.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "folder1/file.json", "new-ref").
Return("", schema.GroupVersionKind{}, &resources.PathCreationError{Path: "folder1/", Err: fmt.Errorf("permission denied")}).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/file.json" && r.Error != nil
})).Return().Once()
// Deletion should NOT be skipped (not checking HasDirPathFailedCreation for deletions)
// Deletions don't check HasDirPathFailedCreation, they go straight to removal
repoResources.On("RemoveResourceFromFile", mock.Anything, "folder1/old.json", "old-ref").
Return("old-resource", "", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "folder1/old.json" && r.Action == repository.FileActionDeleted && r.Error == nil
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_MultiLevelNesting tests multi-level cascade
func TestIncrementalSync_HierarchicalErrorHandling_MultiLevelNesting(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "level1/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/file.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "level1/level2/level3/file.txt", Ref: "new-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 3).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
folderErr := &resources.PathCreationError{Path: "level1/", Err: fmt.Errorf("permission denied")}
// First check is before it fails.
progress.On("HasDirPathFailedCreation", "level1/file.txt").Return(false).Once()
repoResources.On("EnsureFolderPathExist", mock.Anything, "level1/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/file.txt" && r.Action == repository.FileActionIgnored && r.Error != nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "level1/level2/file.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/level2/file.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
progress.On("HasDirPathFailedCreation", "level1/level2/level3/file.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "level1/level2/level3/file.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_MixedSuccessAndFailure tests partial failures
func TestIncrementalSync_HierarchicalErrorHandling_MixedSuccessAndFailure(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionCreated, Path: "success/file1.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "success/nested/file2.json", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/file3.txt", Ref: "new-ref"},
{Action: repository.FileActionCreated, Path: "failure/nested/file4.txt", Ref: "new-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 4).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
progress.On("HasDirPathFailedCreation", "success/file1.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/file1.json", "new-ref").
Return("resource-1", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/file1.json" && r.Action == repository.FileActionCreated && r.Error == nil
})).Return().Once()
progress.On("HasDirPathFailedCreation", "success/nested/file2.json").Return(false).Once()
repoResources.On("WriteResourceFromFile", mock.Anything, "success/nested/file2.json", "new-ref").
Return("resource-2", schema.GroupVersionKind{Kind: "Dashboard"}, nil).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "success/nested/file2.json" && r.Action == repository.FileActionCreated && r.Error == nil
})).Return().Once()
folderErr := &resources.PathCreationError{Path: "failure/", Err: fmt.Errorf("disk full")}
progress.On("HasDirPathFailedCreation", "failure/file3.txt").Return(false).Once()
repoResources.On("EnsureFolderPathExist", mock.Anything, "failure/").Return("", folderErr).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/file3.txt" && r.Action == repository.FileActionIgnored
})).Return().Once()
progress.On("HasDirPathFailedCreation", "failure/nested/file4.txt").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "failure/nested/file4.txt" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
repoResources.AssertExpectations(t)
}
// TestIncrementalSync_HierarchicalErrorHandling_RenameWithFailedFolderCreation tests rename operations affected by folder failures
func TestIncrementalSync_HierarchicalErrorHandling_RenameWithFailedFolderCreation(t *testing.T) {
repo := repository.NewMockVersioned(t)
repoResources := resources.NewMockRepositoryResources(t)
progress := jobs.NewMockJobProgressRecorder(t)
changes := []repository.VersionedFileChange{
{Action: repository.FileActionRenamed, Path: "newfolder/file.json", PreviousPath: "oldfolder/file.json", Ref: "new-ref", PreviousRef: "old-ref"},
}
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
progress.On("TooManyErrors").Return(nil).Maybe()
progress.On("HasDirPathFailedCreation", "newfolder/file.json").Return(true).Once()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Path == "newfolder/file.json" && r.Action == repository.FileActionIgnored &&
r.Warning != nil && r.Warning.Error() == "resource was not processed because the parent folder could not be created"
})).Return().Once()
err := IncrementalSync(context.Background(), repo, "old-ref", "new-ref", repoResources, progress, tracing.NewNoopTracerService(), jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
require.NoError(t, err)
progress.AssertExpectations(t)
}

View File

@@ -92,10 +92,6 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation checks
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
progress.On("HasDirPathFailedCreation", "alerts/alert.yaml").Return(false)
// Mock successful resource writes
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "new-ref").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -131,9 +127,6 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "unsupported/path/file.txt").Return(false)
// Mock folder creation
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/path/").
Return("test-folder", nil)
@@ -168,9 +161,6 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", ".unsupported/path/file.txt").Return(false)
progress.On("Record", mock.Anything, jobs.JobResourceResult{
Action: repository.FileActionIgnored,
Path: ".unsupported/path/file.txt",
@@ -232,9 +222,6 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "dashboards/new.json").Return(false)
// Mock resource rename
repoResources.On("RenameResourceFile", mock.Anything, "dashboards/old.json", "old-ref", "dashboards/new.json", "new-ref").
Return("renamed-dashboard", "", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
@@ -267,10 +254,6 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "dashboards/ignored.json").Return(false)
progress.On("Record", mock.Anything, jobs.JobResourceResult{
Action: repository.FileActionIgnored,
Path: "dashboards/ignored.json",
@@ -294,28 +277,16 @@ func TestIncrementalSync(t *testing.T) {
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "unsupported/path/file.txt").Return(false)
// Mock folder creation error
repoResources.On("EnsureFolderPathExist", mock.Anything, "unsupported/path/").
Return("", fmt.Errorf("failed to create folder"))
// Mock progress recording with error
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
return result.Action == repository.FileActionIgnored &&
result.Path == "unsupported/path/file.txt" &&
result.Error != nil &&
result.Error.Error() == "failed to create folder"
})).Return()
progress.On("TooManyErrors").Return(nil)
},
previousRef: "old-ref",
currentRef: "new-ref",
expectedCalls: 1,
expectedError: "unable to create empty file folder: failed to create folder",
},
{
name: "error writing resource",
@@ -332,9 +303,6 @@ func TestIncrementalSync(t *testing.T) {
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
// Mock HasDirPathFailedCreation check
progress.On("HasDirPathFailedCreation", "dashboards/test.json").Return(false)
// Mock resource write error
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "new-ref").
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write failed"))
@@ -404,8 +372,7 @@ func TestIncrementalSync(t *testing.T) {
repo.On("CompareFiles", mock.Anything, "old-ref", "new-ref").Return(changes, nil)
progress.On("SetTotal", mock.Anything, 1).Return()
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
// Mock too many errors - this is checked before processing files, so HasDirPathFailedCreation won't be called
// Mock too many errors
progress.On("TooManyErrors").Return(fmt.Errorf("too many errors occurred"))
},
previousRef: "old-ref",
@@ -461,9 +428,6 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/old.json", "old-ref").
Return("old-dashboard", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
// Mock HasDirPathFailedDeletion check for cleanup
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(false)
// if the folder is not found in git, there should be a call to remove the folder from grafana
repo.MockReader.On("Read", mock.Anything, "dashboards/", "").
Return((*repository.FileInfo)(nil), repository.ErrFileNotFound)
@@ -489,10 +453,6 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
repoResources.On("RemoveResourceFromFile", mock.Anything, "dashboards/old.json", "old-ref").
Return("old-dashboard", "folder-uid", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
// Mock HasDirPathFailedDeletion check for cleanup
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(false)
// if the folder still exists in git, there should not be a call to delete it from grafana
repo.MockReader.On("Read", mock.Anything, "dashboards/", "").
Return(&repository.FileInfo{}, nil)
@@ -525,13 +485,6 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
repoResources.On("RemoveResourceFromFile", mock.Anything, "alerts/old-alert.yaml", "old-ref").
Return("old-alert", "folder-uid-2", schema.GroupVersionKind{Kind: "Alert", Group: "alerts"}, nil)
progress.On("Record", mock.Anything, mock.Anything).Return()
progress.On("TooManyErrors").Return(nil)
// Mock HasDirPathFailedDeletion checks for cleanup
progress.On("HasDirPathFailedDeletion", "dashboards/").Return(false)
progress.On("HasDirPathFailedDeletion", "alerts/").Return(false)
// both not found in git, both should be deleted
repo.MockReader.On("Read", mock.Anything, "dashboards/", "").
Return((*repository.FileInfo)(nil), repository.ErrFileNotFound)
@@ -539,6 +492,9 @@ func TestIncrementalSync_CleanupOrphanedFolders(t *testing.T) {
Return((*repository.FileInfo)(nil), repository.ErrFileNotFound)
repoResources.On("RemoveFolder", mock.Anything, "folder-uid-1").Return(nil)
repoResources.On("RemoveFolder", mock.Anything, "folder-uid-2").Return(nil)
progress.On("Record", mock.Anything, mock.Anything).Return()
progress.On("TooManyErrors").Return(nil)
},
},
}

View File

@@ -20,21 +20,6 @@ import (
const MaxNumberOfFolders = 10000
// PathCreationError represents an error that occurred while creating a folder path.
// It contains the path that failed and the underlying error.
type PathCreationError struct {
Path string
Err error
}
func (e *PathCreationError) Unwrap() error {
return e.Err
}
func (e *PathCreationError) Error() string {
return fmt.Sprintf("failed to create path %s: %v", e.Path, e.Err)
}
type FolderManager struct {
repo repository.ReaderWriter
tree FolderTree
@@ -88,11 +73,7 @@ func (fm *FolderManager) EnsureFolderPathExist(ctx context.Context, filePath str
}
if err := fm.EnsureFolderExists(ctx, f, parent); err != nil {
// Wrap in PathCreationError to indicate which path failed
return &PathCreationError{
Path: f.Path,
Err: fmt.Errorf("ensure folder exists: %w", err),
}
return fmt.Errorf("ensure folder exists: %w", err)
}
fm.tree.Add(f, parent)

View File

@@ -1,44 +0,0 @@
package acimpl
import (
"context"
"time"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
const (
ossBasicRoleSeedLockName = "oss-ac-basic-role-seeder"
ossBasicRoleSeedTimeout = 2 * time.Minute
)
// refreshBasicRolePermissionsInDB ensures basic role permissions are fully derived from in-memory registrations
func (s *Service) refreshBasicRolePermissionsInDB(ctx context.Context, rolesSnapshot map[string][]accesscontrol.Permission) error {
if s.sql == nil || s.seeder == nil {
return nil
}
run := func(ctx context.Context) error {
desired := map[accesscontrol.SeedPermission]struct{}{}
for role, permissions := range rolesSnapshot {
for _, permission := range permissions {
desired[accesscontrol.SeedPermission{BuiltInRole: role, Action: permission.Action, Scope: permission.Scope}] = struct{}{}
}
}
s.seeder.SetDesiredPermissions(desired)
return s.seeder.Seed(ctx)
}
if s.serverLock == nil {
return run(ctx)
}
var err error
errLock := s.serverLock.LockExecuteAndRelease(ctx, ossBasicRoleSeedLockName, ossBasicRoleSeedTimeout, func(ctx context.Context) {
err = run(ctx)
})
if errLock != nil {
return errLock
}
return err
}

View File

@@ -1,128 +0,0 @@
package acimpl
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/localcache"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/database"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/resourcepermissions"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegration_OSSBasicRolePermissions_PersistAndRefreshOnRegisterFixedRoles(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
ctx := context.Background()
sql := db.InitTestDB(t)
store := database.ProvideService(sql)
svc := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc.RegisterFixedRoles(ctx))
// verify permission is persisted to DB for basic:viewer
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
return nil
}))
// ensure RegisterFixedRoles refreshes it back to defaults
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
ts := time.Now()
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
_, err = sess.Exec("DELETE FROM permission WHERE role_id = ?", role.ID)
require.NoError(t, err)
p := accesscontrol.Permission{
RoleID: role.ID,
Action: "custom:keep",
Scope: "",
Created: ts,
Updated: ts,
}
p.Kind, p.Attribute, p.Identifier = accesscontrol.SplitScope(p.Scope)
_, err = sess.Table("permission").Insert(&p)
return err
}))
svc2 := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc2.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc2.RegisterFixedRoles(ctx))
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
count, err = sess.Table("permission").Where("role_id = ? AND action = ?", role.ID, "custom:keep").Count()
require.NoError(t, err)
require.Equal(t, int64(0), count)
return nil
}))
}

View File

@@ -30,7 +30,6 @@ import (
"github.com/grafana/grafana/pkg/services/accesscontrol/migrator"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -97,12 +96,6 @@ func ProvideOSSService(
roles: accesscontrol.BuildBasicRoleDefinitions(),
store: store,
permRegistry: permRegistry,
sql: db,
serverLock: lock,
}
if backend, ok := store.(*database.AccessControlStore); ok {
s.seeder = seeding.New(log.New("accesscontrol.seeder"), backend, backend)
}
return s
@@ -119,11 +112,8 @@ type Service struct {
rolesMu sync.RWMutex
roles map[string]*accesscontrol.RoleDTO
store accesscontrol.Store
seeder *seeding.Seeder
permRegistry permreg.PermissionRegistry
isInitialized bool
sql db.DB
serverLock *serverlock.ServerLockService
}
func (s *Service) GetUsageStats(_ context.Context) map[string]any {
@@ -441,54 +431,17 @@ func (s *Service) RegisterFixedRoles(ctx context.Context) error {
defer span.End()
s.rolesMu.Lock()
registrations := s.registrations.Slice()
defer s.rolesMu.Unlock()
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
s.registerRolesLocked(registration)
return true
})
s.isInitialized = true
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.Unlock()
if s.seeder != nil {
if err := s.seeder.SeedRoles(ctx, registrations); err != nil {
return err
}
if err := s.seeder.RemoveAbsentRoles(ctx); err != nil {
return err
}
}
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
return nil
}
// getBasicRolePermissionsSnapshotFromRegistrationsLocked computes the desired basic role permissions from the
// current registration list, using the shared seeding registration logic.
//
// it has to be called while holding the roles lock
func (s *Service) getBasicRolePermissionsLocked() map[string][]accesscontrol.Permission {
desired := map[accesscontrol.SeedPermission]struct{}{}
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
seeding.AppendDesiredPermissions(desired, s.log, &registration.Role, registration.Grants, registration.Exclude, true)
return true
})
out := make(map[string][]accesscontrol.Permission)
for sp := range desired {
out[sp.BuiltInRole] = append(out[sp.BuiltInRole], accesscontrol.Permission{
Action: sp.Action,
Scope: sp.Scope,
})
}
return out
}
// registerRolesLocked processes a single role registration and adds permissions to basic roles.
// Must be called with s.rolesMu locked.
func (s *Service) registerRolesLocked(registration accesscontrol.RoleRegistration) {
@@ -521,7 +474,6 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
defer span.End()
acRegs := pluginutils.ToRegistrations(ID, name, regs)
updatedBasicRoles := false
for _, r := range acRegs {
if err := pluginutils.ValidatePluginRole(ID, r.Role); err != nil {
return err
@@ -548,23 +500,11 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
if initialized {
s.rolesMu.Lock()
s.registerRolesLocked(r)
updatedBasicRoles = true
s.rolesMu.Unlock()
s.cache.Flush()
}
}
if updatedBasicRoles {
s.rolesMu.RLock()
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.RUnlock()
// plugin roles can be declared after startup - keep DB in sync
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
}
return nil
}

View File

@@ -1,623 +0,0 @@
package database
import (
"context"
"strings"
"time"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
"github.com/grafana/grafana/pkg/util/xorm/core"
)
const basicRolePermBatchSize = 500
// LoadRoles returns all fixed and plugin roles (global org) with permissions, indexed by role name.
func (s *AccessControlStore) LoadRoles(ctx context.Context) (map[string]*accesscontrol.RoleDTO, error) {
out := map[string]*accesscontrol.RoleDTO{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
type roleRow struct {
ID int64 `xorm:"id"`
OrgID int64 `xorm:"org_id"`
Version int64 `xorm:"version"`
UID string `xorm:"uid"`
Name string `xorm:"name"`
DisplayName string `xorm:"display_name"`
Description string `xorm:"description"`
Group string `xorm:"group_name"`
Hidden bool `xorm:"hidden"`
Updated time.Time `xorm:"updated"`
Created time.Time `xorm:"created"`
}
roles := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
Where("(name LIKE ? OR name LIKE ?)", accesscontrol.FixedRolePrefix+"%", accesscontrol.PluginRolePrefix+"%").
Find(&roles); err != nil {
return err
}
if len(roles) == 0 {
return nil
}
roleIDs := make([]any, 0, len(roles))
roleByID := make(map[int64]*accesscontrol.RoleDTO, len(roles))
for _, r := range roles {
dto := &accesscontrol.RoleDTO{
ID: r.ID,
OrgID: r.OrgID,
Version: r.Version,
UID: r.UID,
Name: r.Name,
DisplayName: r.DisplayName,
Description: r.Description,
Group: r.Group,
Hidden: r.Hidden,
Updated: r.Updated,
Created: r.Created,
}
out[dto.Name] = dto
roleByID[dto.ID] = dto
roleIDs = append(roleIDs, dto.ID)
}
type permRow struct {
RoleID int64 `xorm:"role_id"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
}
perms := []permRow{}
if err := sess.Table("permission").In("role_id", roleIDs...).Find(&perms); err != nil {
return err
}
for _, p := range perms {
dto := roleByID[p.RoleID]
if dto == nil {
continue
}
dto.Permissions = append(dto.Permissions, accesscontrol.Permission{
RoleID: p.RoleID,
Action: p.Action,
Scope: p.Scope,
})
}
return nil
})
return out, err
}
func (s *AccessControlStore) SetRole(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
return s.sql.WithDbSession(ctx, func(sess *db.Session) error {
_, err := sess.Table("role").
Where("id = ? AND org_id = ?", existingRole.ID, accesscontrol.GlobalOrgID).
Update(map[string]any{
"display_name": wantedRole.DisplayName,
"description": wantedRole.Description,
"group_name": wantedRole.Group,
"hidden": wantedRole.Hidden,
"updated": time.Now(),
})
return err
})
}
func (s *AccessControlStore) SetPermissions(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
type key struct{ Action, Scope string }
existing := map[key]struct{}{}
for _, p := range existingRole.Permissions {
existing[key{p.Action, p.Scope}] = struct{}{}
}
desired := map[key]struct{}{}
for _, p := range wantedRole.Permissions {
desired[key{p.Action, p.Scope}] = struct{}{}
}
toAdd := make([]accesscontrol.Permission, 0)
toRemove := make([]accesscontrol.SeedPermission, 0)
now := time.Now()
for k := range desired {
if _, ok := existing[k]; ok {
continue
}
perm := accesscontrol.Permission{
RoleID: existingRole.ID,
Action: k.Action,
Scope: k.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toAdd = append(toAdd, perm)
}
for k := range existing {
if _, ok := desired[k]; ok {
continue
}
toRemove = append(toRemove, accesscontrol.SeedPermission{Action: k.Action, Scope: k.Scope})
}
if len(toAdd) == 0 && len(toRemove) == 0 {
return nil
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if len(toRemove) > 0 {
if err := DeleteRolePermissionTuples(sess, s.sql.GetDBType(), existingRole.ID, toRemove); err != nil {
return err
}
}
if len(toAdd) > 0 {
_, err := sess.InsertMulti(toAdd)
return err
}
return nil
})
}
func (s *AccessControlStore) CreateRole(ctx context.Context, role accesscontrol.RoleDTO) error {
now := time.Now()
uid := role.UID
if uid == "" && (strings.HasPrefix(role.Name, accesscontrol.FixedRolePrefix) || strings.HasPrefix(role.Name, accesscontrol.PluginRolePrefix)) {
uid = accesscontrol.PrefixedRoleUID(role.Name)
}
r := accesscontrol.Role{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: uid,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Hidden: role.Hidden,
Created: now,
Updated: now,
}
if r.Version == 0 {
r.Version = 1
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if _, err := sess.Insert(&r); err != nil {
return err
}
if len(role.Permissions) == 0 {
return nil
}
// De-duplicate permissions on (action, scope) to avoid unique constraint violations.
// Some role definitions may accidentally include duplicates.
type permKey struct{ Action, Scope string }
seen := make(map[permKey]struct{}, len(role.Permissions))
perms := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
k := permKey{Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: r.ID,
Action: p.Action,
Scope: p.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
perms = append(perms, perm)
}
_, err := sess.InsertMulti(perms)
return err
})
}
func (s *AccessControlStore) DeleteRoles(ctx context.Context, roleUIDs []string) error {
if len(roleUIDs) == 0 {
return nil
}
uids := make([]any, 0, len(roleUIDs))
for _, uid := range roleUIDs {
uids = append(uids, uid)
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
type row struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []row{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return err
}
if len(rows) == 0 {
return nil
}
roleIDs := make([]any, 0, len(rows))
for _, r := range rows {
roleIDs = append(roleIDs, r.ID)
}
// Remove permissions and assignments first to avoid FK issues (if enabled).
{
args := append([]any{"DELETE FROM permission WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM user_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM team_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM builtin_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
args := append([]any{"DELETE FROM role WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")", accesscontrol.GlobalOrgID}, uids...)
_, err := sess.Exec(args...)
return err
})
}
// OSS basic-role permission refresh uses seeding.Seeder.Seed() with a desired set computed in memory.
// These methods implement the permission seeding part of seeding.SeedingBackend against the current permission table.
func (s *AccessControlStore) LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
var out map[accesscontrol.SeedPermission]struct{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
rows, err := LoadBasicRoleSeedPermissions(sess)
if err != nil {
return err
}
out = make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
r.Origin = ""
out[r] = struct{}{}
}
return nil
})
return out, err
}
func (s *AccessControlStore) Apply(ctx context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
rolesToUpgrade := seeding.RolesToUpgrade(added, removed)
// Run the same OSS apply logic as ossBasicRoleSeedBackend.Apply inside a single transaction.
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
defs := accesscontrol.BuildBasicRoleDefinitions()
builtinToRoleID, err := EnsureBasicRolesExist(sess, defs)
if err != nil {
return err
}
backend := &ossBasicRoleSeedBackend{
sess: sess,
now: time.Now(),
builtinToRoleID: builtinToRoleID,
desired: nil,
dbType: s.sql.GetDBType(),
}
if err := backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return BumpBasicRoleVersions(sess, rolesToUpgrade)
})
}
// EnsureBasicRolesExist ensures the built-in basic roles exist in the role table and are bound in builtin_role.
// It returns a mapping from builtin role name (for example "Admin") to role ID.
func EnsureBasicRolesExist(sess *db.Session, defs map[string]*accesscontrol.RoleDTO) (map[string]int64, error) {
uidToBuiltin := make(map[string]string, len(defs))
uids := make([]any, 0, len(defs))
for builtin, def := range defs {
uidToBuiltin[def.UID] = builtin
uids = append(uids, def.UID)
}
type roleRow struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return nil, err
}
ts := time.Now()
builtinToRoleID := make(map[string]int64, len(defs))
for _, r := range rows {
br, ok := uidToBuiltin[r.UID]
if !ok {
continue
}
builtinToRoleID[br] = r.ID
}
for builtin, def := range defs {
roleID, ok := builtinToRoleID[builtin]
if !ok {
role := accesscontrol.Role{
OrgID: def.OrgID,
Version: def.Version,
UID: def.UID,
Name: def.Name,
DisplayName: def.DisplayName,
Description: def.Description,
Group: def.Group,
Hidden: def.Hidden,
Created: ts,
Updated: ts,
}
if _, err := sess.Insert(&role); err != nil {
return nil, err
}
roleID = role.ID
builtinToRoleID[builtin] = roleID
}
has, err := sess.Table("builtin_role").
Where("role_id = ? AND role = ? AND org_id = ?", roleID, builtin, accesscontrol.GlobalOrgID).
Exist()
if err != nil {
return nil, err
}
if !has {
br := accesscontrol.BuiltinRole{
RoleID: roleID,
OrgID: accesscontrol.GlobalOrgID,
Role: builtin,
Created: ts,
Updated: ts,
}
if _, err := sess.Table("builtin_role").Insert(&br); err != nil {
return nil, err
}
}
}
return builtinToRoleID, nil
}
// DeleteRolePermissionTuples deletes permissions for a single role by (action, scope) pairs.
//
// It uses a row-constructor IN clause where supported (MySQL, Postgres, SQLite) and falls back
// to a WHERE ... OR ... form for MSSQL.
func DeleteRolePermissionTuples(sess *db.Session, dbType core.DbType, roleID int64, perms []accesscontrol.SeedPermission) error {
if len(perms) == 0 {
return nil
}
if dbType == migrator.MSSQL {
// MSSQL doesn't support (action, scope) IN ((?,?),(?,?)) row constructors.
where := make([]string, 0, len(perms))
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
where = append(where, "(action = ? AND scope = ?)")
args = append(args, p.Action, p.Scope)
}
_, err := sess.Exec(
append([]any{
"DELETE FROM permission WHERE role_id = ? AND (" + strings.Join(where, " OR ") + ")",
}, args...)...,
)
return err
}
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
args = append(args, p.Action, p.Scope)
}
sql := "DELETE FROM permission WHERE role_id = ? AND (action, scope) IN (" +
strings.Repeat("(?, ?),", len(perms)-1) + "(?, ?))"
_, err := sess.Exec(append([]any{sql}, args...)...)
return err
}
type ossBasicRoleSeedBackend struct {
sess *db.Session
now time.Time
builtinToRoleID map[string]int64
desired map[accesscontrol.SeedPermission]struct{}
dbType core.DbType
}
func (b *ossBasicRoleSeedBackend) LoadPrevious(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
rows, err := LoadBasicRoleSeedPermissions(b.sess)
if err != nil {
return nil, err
}
out := make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
// Ensure the key matches what OSS seeding uses (Origin is always empty for basic role refresh).
r.Origin = ""
out[r] = struct{}{}
}
return out, nil
}
func (b *ossBasicRoleSeedBackend) LoadDesired(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
return b.desired, nil
}
func (b *ossBasicRoleSeedBackend) Apply(_ context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
// Delete removed permissions (this includes user-defined permissions that aren't in desired).
if len(removed) > 0 {
permsByRoleID := map[int64][]accesscontrol.SeedPermission{}
for _, p := range removed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
permsByRoleID[roleID] = append(permsByRoleID[roleID], p)
}
for roleID, perms := range permsByRoleID {
// Chunk to keep statement sizes and parameter counts bounded.
if err := batch(len(perms), basicRolePermBatchSize, func(start, end int) error {
return DeleteRolePermissionTuples(b.sess, b.dbType, roleID, perms[start:end])
}); err != nil {
return err
}
}
}
// Insert added permissions and updated-target permissions.
toInsertSeed := make([]accesscontrol.SeedPermission, 0, len(added)+len(updated))
toInsertSeed = append(toInsertSeed, added...)
for _, v := range updated {
toInsertSeed = append(toInsertSeed, v)
}
if len(toInsertSeed) == 0 {
return nil
}
// De-duplicate on (role_id, action, scope). This avoids unique constraint violations when:
// - the same permission appears in both added and updated
// - multiple plugin origins grant the same permission (Origin is not persisted in permission table)
type permKey struct {
RoleID int64
Action string
Scope string
}
seen := make(map[permKey]struct{}, len(toInsertSeed))
toInsert := make([]accesscontrol.Permission, 0, len(toInsertSeed))
for _, p := range toInsertSeed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
k := permKey{RoleID: roleID, Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: roleID,
Action: p.Action,
Scope: p.Scope,
Created: b.now,
Updated: b.now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toInsert = append(toInsert, perm)
}
return batch(len(toInsert), basicRolePermBatchSize, func(start, end int) error {
// MySQL: ignore conflicts to make seeding idempotent under retries/concurrency.
// Conflicts can happen if the same permission already exists (unique on role_id, action, scope).
if b.dbType == migrator.MySQL {
args := make([]any, 0, (end-start)*8)
for i := start; i < end; i++ {
p := toInsert[i]
args = append(args, p.RoleID, p.Action, p.Scope, p.Kind, p.Attribute, p.Identifier, p.Updated, p.Created)
}
sql := append([]any{`INSERT IGNORE INTO permission (role_id, action, scope, kind, attribute, identifier, updated, created) VALUES ` +
strings.Repeat("(?, ?, ?, ?, ?, ?, ?, ?),", end-start-1) + "(?, ?, ?, ?, ?, ?, ?, ?)"}, args...)
_, err := b.sess.Exec(sql...)
return err
}
_, err := b.sess.InsertMulti(toInsert[start:end])
return err
})
}
func batch(count, size int, eachFn func(start, end int) error) error {
for i := 0; i < count; {
end := i + size
if end > count {
end = count
}
if err := eachFn(i, end); err != nil {
return err
}
i = end
}
return nil
}
// BumpBasicRoleVersions increments the role version for the given builtin basic roles (Viewer/Editor/Admin/Grafana Admin).
// Unknown role names are ignored.
func BumpBasicRoleVersions(sess *db.Session, basicRoles []string) error {
if len(basicRoles) == 0 {
return nil
}
defs := accesscontrol.BuildBasicRoleDefinitions()
uids := make([]any, 0, len(basicRoles))
for _, br := range basicRoles {
def, ok := defs[br]
if !ok {
continue
}
uids = append(uids, def.UID)
}
if len(uids) == 0 {
return nil
}
sql := "UPDATE role SET version = version + 1 WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")"
_, err := sess.Exec(append([]any{sql, accesscontrol.GlobalOrgID}, uids...)...)
return err
}
// LoadBasicRoleSeedPermissions returns the current (builtin_role, action, scope) permissions granted to basic roles.
// It sets Origin to empty.
func LoadBasicRoleSeedPermissions(sess *db.Session) ([]accesscontrol.SeedPermission, error) {
rows := []accesscontrol.SeedPermission{}
err := sess.SQL(
`SELECT role.display_name AS builtin_role, p.action, p.scope, '' AS origin
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE 'basic:%'`,
accesscontrol.GlobalOrgID,
).Find(&rows)
return rows, err
}

View File

@@ -15,7 +15,6 @@ import (
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/serverlock"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -131,9 +130,6 @@ func (r *ZanzanaReconciler) Run(ctx context.Context) error {
// Reconcile schedules as job that will run and reconcile resources between
// legacy access control and zanzana.
func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
// Ensure we don't reconcile an empty/partial RBAC state before OSS has seeded basic role permissions.
// This matters most during startup where fixed-role loading + basic-role permission refresh runs as another background service.
r.waitForBasicRolesSeeded(ctx)
r.reconcile(ctx)
// FIXME:
@@ -149,57 +145,6 @@ func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
}
}
func (r *ZanzanaReconciler) hasBasicRolePermissions(ctx context.Context) bool {
var count int64
// Basic role permissions are stored on "basic:%" roles in the global org (0).
// In a fresh DB, this will be empty until fixed roles are registered and the basic role permission refresh runs.
type row struct {
Count int64 `xorm:"count"`
}
_ = r.store.WithDbSession(ctx, func(sess *db.Session) error {
var rr row
_, err := sess.SQL(
`SELECT COUNT(*) AS count
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE ?`,
accesscontrol.GlobalOrgID,
accesscontrol.BasicRolePrefix+"%",
).Get(&rr)
if err != nil {
return err
}
count = rr.Count
return nil
})
return count > 0
}
func (r *ZanzanaReconciler) waitForBasicRolesSeeded(ctx context.Context) {
// Best-effort: don't block forever. If we can't observe basic roles, proceed anyway.
const (
maxWait = 15 * time.Second
interval = 1 * time.Second
)
deadline := time.NewTimer(maxWait)
defer deadline.Stop()
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
if r.hasBasicRolePermissions(ctx) {
return
}
select {
case <-ctx.Done():
return
case <-deadline.C:
return
case <-ticker.C:
}
}
}
func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
run := func(ctx context.Context, namespace string) (ok bool) {
now := time.Now()

View File

@@ -1,67 +0,0 @@
package dualwrite
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
func TestZanzanaReconciler_hasBasicRolePermissions(t *testing.T) {
env := setupTestEnv(t)
r := &ZanzanaReconciler{
store: env.db,
}
ctx := context.Background()
require.False(t, r.hasBasicRolePermissions(ctx))
err := env.db.WithDbSession(ctx, func(sess *db.Session) error {
now := time.Now()
_, err := sess.Exec(
`INSERT INTO role (org_id, uid, name, display_name, group_name, description, hidden, version, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
accesscontrol.GlobalOrgID,
"basic_viewer_uid_test",
accesscontrol.BasicRolePrefix+"viewer",
"Viewer",
"Basic",
"Viewer role",
false,
1,
now,
now,
)
if err != nil {
return err
}
var roleID int64
if _, err := sess.SQL(`SELECT id FROM role WHERE org_id = ? AND uid = ?`, accesscontrol.GlobalOrgID, "basic_viewer_uid_test").Get(&roleID); err != nil {
return err
}
_, err = sess.Exec(
`INSERT INTO permission (role_id, action, scope, kind, attribute, identifier, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
roleID,
"dashboards:read",
"dashboards:*",
"",
"",
"",
now,
now,
)
return err
})
require.NoError(t, err)
require.True(t, r.hasBasicRolePermissions(ctx))
}

View File

@@ -1,7 +1,6 @@
package accesscontrol
import (
"context"
"encoding/json"
"errors"
"fmt"
@@ -595,18 +594,3 @@ type QueryWithOrg struct {
OrgId *int64 `json:"orgId"`
Global bool `json:"global"`
}
type SeedPermission struct {
BuiltInRole string `xorm:"builtin_role"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
Origin string `xorm:"origin"`
}
type RoleStore interface {
LoadRoles(ctx context.Context) (map[string]*RoleDTO, error)
SetRole(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
SetPermissions(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
CreateRole(ctx context.Context, role RoleDTO) error
DeleteRoles(ctx context.Context, roleUIDs []string) error
}

View File

@@ -1,451 +0,0 @@
package seeding
import (
"context"
"fmt"
"regexp"
"slices"
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
)
type Seeder struct {
log log.Logger
roleStore accesscontrol.RoleStore
backend SeedingBackend
builtinsPermissions map[accesscontrol.SeedPermission]struct{}
seededFixedRoles map[string]bool
seededPluginRoles map[string]bool
seededPlugins map[string]bool
hasSeededAlready bool
}
// SeedingBackend provides the seed-set specific operations needed to seed.
type SeedingBackend interface {
// LoadPrevious returns the currently stored permissions for previously seeded roles.
LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error)
// Apply updates the database to match the desired permissions.
Apply(ctx context.Context,
added, removed []accesscontrol.SeedPermission,
updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission,
) error
}
func New(log log.Logger, roleStore accesscontrol.RoleStore, backend SeedingBackend) *Seeder {
return &Seeder{
log: log,
roleStore: roleStore,
backend: backend,
builtinsPermissions: map[accesscontrol.SeedPermission]struct{}{},
seededFixedRoles: map[string]bool{},
seededPluginRoles: map[string]bool{},
seededPlugins: map[string]bool{},
hasSeededAlready: false,
}
}
// SetDesiredPermissions replaces the in-memory desired permission set used by Seed().
func (s *Seeder) SetDesiredPermissions(desired map[accesscontrol.SeedPermission]struct{}) {
if desired == nil {
s.builtinsPermissions = map[accesscontrol.SeedPermission]struct{}{}
return
}
s.builtinsPermissions = desired
}
// Seed loads current and desired permissions, diffs them (including scope updates), applies changes, and bumps versions.
func (s *Seeder) Seed(ctx context.Context) error {
previous, err := s.backend.LoadPrevious(ctx)
if err != nil {
return err
}
// - Do not remove plugin permissions when the plugin didn't register this run (Origin set but not in seededPlugins).
// - Preserve legacy plugin app access permissions in the persisted seed set (these are granted by default).
if len(previous) > 0 {
filtered := make(map[accesscontrol.SeedPermission]struct{}, len(previous))
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess {
continue
}
if p.Origin != "" && !s.seededPlugins[p.Origin] {
continue
}
filtered[p] = struct{}{}
}
previous = filtered
}
added, removed, updated := s.permissionDiff(previous, s.builtinsPermissions)
if err := s.backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return nil
}
// SeedRoles populates the database with the roles and their assignments
// It will create roles that do not exist and update roles that have changed
// Do not use for provisioning. Validation is not enforced.
func (s *Seeder) SeedRoles(ctx context.Context, registrationList []accesscontrol.RoleRegistration) error {
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
missingRoles := make([]accesscontrol.RoleRegistration, 0, len(registrationList))
// Diff existing roles with the ones we want to seed.
// If a role is missing, we add it to the missingRoles list
for _, registration := range registrationList {
registration := registration
role, ok := roleMap[registration.Role.Name]
switch {
case registration.Role.IsFixed():
s.seededFixedRoles[registration.Role.Name] = true
case registration.Role.IsPlugin():
s.seededPluginRoles[registration.Role.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(registration.Role.Name)] = true
}
s.rememberPermissionAssignments(&registration.Role, registration.Grants, registration.Exclude)
if !ok {
missingRoles = append(missingRoles, registration)
continue
}
if needsRoleUpdate(role, registration.Role) {
if err := s.roleStore.SetRole(ctx, role, registration.Role); err != nil {
return err
}
}
if needsPermissionsUpdate(role, registration.Role) {
if err := s.roleStore.SetPermissions(ctx, role, registration.Role); err != nil {
return err
}
}
}
for _, registration := range missingRoles {
if err := s.roleStore.CreateRole(ctx, registration.Role); err != nil {
return err
}
}
return nil
}
func needsPermissionsUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if len(existingRole.Permissions) != len(wantedRole.Permissions) {
return true
}
for _, p := range wantedRole.Permissions {
found := false
for _, ep := range existingRole.Permissions {
if ep.Action == p.Action && ep.Scope == p.Scope {
found = true
break
}
}
if !found {
return true
}
}
return false
}
func needsRoleUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if existingRole.Name != wantedRole.Name {
return false
}
if existingRole.DisplayName != wantedRole.DisplayName {
return true
}
if existingRole.Description != wantedRole.Description {
return true
}
if existingRole.Group != wantedRole.Group {
return true
}
if existingRole.Hidden != wantedRole.Hidden {
return true
}
return false
}
// Deprecated: SeedRole is deprecated and should not be used.
// SeedRoles only does boot up seeding and should not be used for runtime seeding.
func (s *Seeder) SeedRole(ctx context.Context, role accesscontrol.RoleDTO, builtInRoles []string) error {
addedPermissions := make(map[string]struct{}, len(role.Permissions))
permissions := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
key := fmt.Sprintf("%s:%s", p.Action, p.Scope)
if _, ok := addedPermissions[key]; !ok {
addedPermissions[key] = struct{}{}
permissions = append(permissions, accesscontrol.Permission{Action: p.Action, Scope: p.Scope})
}
}
wantedRole := accesscontrol.RoleDTO{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: role.UID,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Permissions: permissions,
Hidden: role.Hidden,
}
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
existingRole := roleMap[wantedRole.Name]
if existingRole == nil {
if err := s.roleStore.CreateRole(ctx, wantedRole); err != nil {
return err
}
} else {
if needsRoleUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetRole(ctx, existingRole, wantedRole); err != nil {
return err
}
}
if needsPermissionsUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetPermissions(ctx, existingRole, wantedRole); err != nil {
return err
}
}
}
// Remember seeded roles
if wantedRole.IsFixed() {
s.seededFixedRoles[wantedRole.Name] = true
}
isPluginRole := wantedRole.IsPlugin()
if isPluginRole {
s.seededPluginRoles[wantedRole.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(role.Name)] = true
}
s.rememberPermissionAssignments(&wantedRole, builtInRoles, []string{})
return nil
}
func (s *Seeder) rememberPermissionAssignments(role *accesscontrol.RoleDTO, builtInRoles []string, excludedRoles []string) {
AppendDesiredPermissions(s.builtinsPermissions, s.log, role, builtInRoles, excludedRoles, true)
}
// AppendDesiredPermissions accumulates permissions from a role registration onto basic roles (Viewer/Editor/Admin/Grafana Admin).
// - It expands parents via accesscontrol.BuiltInRolesWithParents.
// - It can optionally ignore plugin app access permissions (which are granted by default).
func AppendDesiredPermissions(
out map[accesscontrol.SeedPermission]struct{},
logger log.Logger,
role *accesscontrol.RoleDTO,
builtInRoles []string,
excludedRoles []string,
ignorePluginAppAccess bool,
) {
if out == nil || role == nil {
return
}
for builtInRole := range accesscontrol.BuiltInRolesWithParents(builtInRoles) {
// Skip excluded grants
if slices.Contains(excludedRoles, builtInRole) {
continue
}
for _, perm := range role.Permissions {
if ignorePluginAppAccess && perm.Action == pluginaccesscontrol.ActionAppAccess {
logger.Debug("Role is attempting to grant access permission, but this permission is already granted by default and will be ignored",
"role", role.Name, "permission", perm.Action, "scope", perm.Scope)
continue
}
sp := accesscontrol.SeedPermission{
BuiltInRole: builtInRole,
Action: perm.Action,
Scope: perm.Scope,
}
if role.IsPlugin() {
sp.Origin = pluginutils.PluginIDFromName(role.Name)
}
out[sp] = struct{}{}
}
}
}
// permissionDiff returns:
// - added: present in desired permissions, not in previous permissions
// - removed: present in previous permissions, not in desired permissions
// - updated: same role + action, but scope changed
func (s *Seeder) permissionDiff(previous, desired map[accesscontrol.SeedPermission]struct{}) (added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) {
addedSet := make(map[accesscontrol.SeedPermission]struct{}, 0)
for n := range desired {
if _, already := previous[n]; !already {
addedSet[n] = struct{}{}
} else {
delete(previous, n)
}
}
// Check if any of the new permissions is actually an old permission with an updated scope
updated = make(map[accesscontrol.SeedPermission]accesscontrol.SeedPermission, 0)
for n := range addedSet {
for p := range previous {
if n.BuiltInRole == p.BuiltInRole && n.Action == p.Action {
updated[p] = n
delete(addedSet, n)
}
}
}
for p := range addedSet {
added = append(added, p)
}
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess &&
p.Scope != pluginaccesscontrol.ScopeProvider.GetResourceAllScope() {
// Allows backward compatibility with plugins that have been seeded before the grant ignore rule was added
s.log.Info("This permission already existed so it will not be removed",
"role", p.BuiltInRole, "permission", p.Action, "scope", p.Scope)
continue
}
removed = append(removed, p)
}
return added, removed, updated
}
func (s *Seeder) ClearBasicRolesPluginPermissions(ID string) {
removable := []accesscontrol.SeedPermission{}
for key := range s.builtinsPermissions {
if matchPermissionByPluginID(key, ID) {
removable = append(removable, key)
}
}
for _, perm := range removable {
delete(s.builtinsPermissions, perm)
}
}
func matchPermissionByPluginID(perm accesscontrol.SeedPermission, pluginID string) bool {
if perm.Origin != pluginID {
return false
}
actionTemplate := regexp.MustCompile(fmt.Sprintf("%s[.:]", pluginID))
scopeTemplate := fmt.Sprintf(":%s", pluginID)
return actionTemplate.MatchString(perm.Action) || strings.HasSuffix(perm.Scope, scopeTemplate)
}
// RolesToUpgrade returns the unique basic roles that should have their version incremented.
func RolesToUpgrade(added, removed []accesscontrol.SeedPermission) []string {
set := map[string]struct{}{}
for _, p := range added {
set[p.BuiltInRole] = struct{}{}
}
for _, p := range removed {
set[p.BuiltInRole] = struct{}{}
}
out := make([]string, 0, len(set))
for r := range set {
out = append(out, r)
}
return out
}
func (s *Seeder) ClearPluginRoles(ID string) {
expectedPrefix := fmt.Sprintf("%s%s:", accesscontrol.PluginRolePrefix, ID)
for roleName := range s.seededPluginRoles {
if strings.HasPrefix(roleName, expectedPrefix) {
delete(s.seededPluginRoles, roleName)
}
}
}
func (s *Seeder) MarkSeededAlready() {
s.hasSeededAlready = true
}
func (s *Seeder) HasSeededAlready() bool {
return s.hasSeededAlready
}
func (s *Seeder) RemoveAbsentRoles(ctx context.Context) error {
roleMap, errGet := s.roleStore.LoadRoles(ctx)
if errGet != nil {
s.log.Error("failed to get fixed roles from store", "err", errGet)
return errGet
}
toRemove := []string{}
for _, r := range roleMap {
if r == nil {
continue
}
if r.IsFixed() {
if !s.seededFixedRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
continue
}
if r.IsPlugin() {
if !s.seededPlugins[pluginutils.PluginIDFromName(r.Name)] {
// To be resilient to failed plugin loadings
// ignore stored roles related to plugins that have not registered this time
s.log.Debug("plugin role has not been registered on this run skipping its removal", "role", r.Name)
continue
}
if !s.seededPluginRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
}
}
if errDelete := s.roleStore.DeleteRoles(ctx, toRemove); errDelete != nil {
s.log.Error("failed to delete absent fixed and plugin roles", "err", errDelete)
return errDelete
}
return nil
}

View File

@@ -294,6 +294,9 @@ type DashboardProvisioning struct {
ExternalID string `xorm:"external_id"`
CheckSum string
Updated int64
// note: only used when writing metadata to unified storage resources - not saved in legacy table.
AllowUIUpdates bool `xorm:"-"`
}
type DeleteDashboardCommand struct {

View File

@@ -1942,6 +1942,7 @@ func (dr *DashboardServiceImpl) saveProvisionedDashboardThroughK8s(ctx context.C
// HOWEVER, maybe OK to leave this for now and "fix" it by using file provisioning for mode 4
m.Kind = utils.ManagerKindClassicFP // nolint:staticcheck
m.Identity = provisioning.Name
m.AllowsEdits = provisioning.AllowUIUpdates
s.Path = provisioning.ExternalID
s.Checksum = provisioning.CheckSum
s.TimestampMillis = time.Unix(provisioning.Updated, 0).UnixMilli()

View File

@@ -2075,6 +2075,13 @@ var (
FrontendOnly: true,
Owner: grafanaDashboardsSquad,
},
{
Name: "smoothingTransformation",
Description: "Enables the ASAP smoothing transformation for time series data",
Stage: FeatureStageExperimental,
FrontendOnly: true,
Owner: grafanaDataProSquad,
},
}
)

View File

@@ -281,3 +281,4 @@ rudderstackUpgrade,experimental,@grafana/grafana-frontend-platform,false,false,t
kubernetesAlertingHistorian,experimental,@grafana/alerting-squad,false,true,false
useMTPlugins,experimental,@grafana/plugins-platform-backend,false,false,true
multiPropsVariables,experimental,@grafana/dashboards-squad,false,false,true
smoothingTransformation,experimental,@grafana/datapro,false,false,true
1 Name Stage Owner requiresDevMode RequiresRestart FrontendOnly
281 kubernetesAlertingHistorian experimental @grafana/alerting-squad false true false
282 useMTPlugins experimental @grafana/plugins-platform-backend false false true
283 multiPropsVariables experimental @grafana/dashboards-squad false false true
284 smoothingTransformation experimental @grafana/datapro false false true

View File

@@ -3293,6 +3293,19 @@
"codeowner": "@grafana/dashboards-squad"
}
},
{
"metadata": {
"name": "smoothingTransformation",
"resourceVersion": "1767349656275",
"creationTimestamp": "2026-01-02T10:27:36Z"
},
"spec": {
"description": "Enables the ASAP smoothing transformation for time series data",
"stage": "experimental",
"codeowner": "@grafana/datapro",
"frontend": true
}
},
{
"metadata": {
"name": "sqlExpressions",

View File

@@ -6,7 +6,6 @@ import (
"strconv"
"strings"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/plugins"
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
@@ -129,10 +128,6 @@ func (s *ServiceImpl) processAppPlugin(plugin pluginstore.Plugin, c *contextmode
}
if include.Type == "page" {
if !middleware.PageIsFeatureToggleEnabled(c.Req.Context(), include.Path) {
s.log.Debug("Skipping page", "plugin", plugin.ID, "path", include.Path)
continue
}
link := &navtree.NavLink{
Text: include.Name,
Icon: include.Icon,

View File

@@ -73,21 +73,21 @@ func TestBuildLabelMatcherJSON(t *testing.T) {
name: "MySQL MatchEqual with non-empty value",
dialect: migrator.NewMysqlDialect(),
matcher: &labels.Matcher{Type: labels.MatchEqual, Name: "team", Value: "alerting"},
wantSQL: "JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) = ?",
wantSQL: `JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) = ?`,
wantArgs: []any{"team", "alerting"},
},
{
name: "MySQL MatchEqual with empty value",
dialect: migrator.NewMysqlDialect(),
matcher: &labels.Matcher{Type: labels.MatchEqual, Name: "team", Value: ""},
wantSQL: "(JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) = ? OR JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?)) IS NULL)",
wantSQL: `(JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) = ? OR JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"')) IS NULL)`,
wantArgs: []any{"team", "", "team"},
},
{
name: "MySQL MatchNotEqual",
dialect: migrator.NewMysqlDialect(),
matcher: &labels.Matcher{Type: labels.MatchNotEqual, Name: "team", Value: "alerting"},
wantSQL: "(JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) IS NULL OR JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) != ?)",
wantSQL: `(JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) IS NULL OR JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) != ?)`,
wantArgs: []any{"team", "team", "alerting"},
},
{
@@ -149,7 +149,7 @@ func TestBuildLabelKeyExistsCondition(t *testing.T) {
dialect: migrator.NewMysqlDialect(),
column: "labels",
key: "__grafana_origin",
wantSQL: "JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?)) IS NOT NULL",
wantSQL: `JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"')) IS NOT NULL`,
wantArgs: []any{"__grafana_origin"},
},
{
@@ -194,7 +194,7 @@ func TestBuildLabelKeyMissingCondition(t *testing.T) {
dialect: migrator.NewMysqlDialect(),
column: "labels",
key: "__grafana_origin",
wantSQL: "JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?)) IS NULL",
wantSQL: `JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"')) IS NULL`,
wantArgs: []any{"__grafana_origin"},
},
{

View File

@@ -2454,7 +2454,7 @@ func TestIntegration_ListAlertRules(t *testing.T) {
ruleGen.WithLabels(map[string]string{"glob": "*[?]"}),
ruleGen.WithTitle("rule_glob")))
ruleSpecialChars := createRule(t, store, ruleGen.With(
ruleGen.WithLabels(map[string]string{"json": "line1\nline2\\end\"quote"}),
ruleGen.WithLabels(map[string]string{"label-with-hyphen": "line1\nline2\\end\"quote"}),
ruleGen.WithTitle("rule_special_chars")))
ruleEmpty := createRule(t, store, ruleGen.With(
ruleGen.WithLabels(map[string]string{"empty": ""}),
@@ -2531,7 +2531,7 @@ func TestIntegration_ListAlertRules(t *testing.T) {
name: "JSON escape characters are handled correctly",
labelMatchers: labels.Matchers{
func() *labels.Matcher {
m, _ := labels.NewMatcher(labels.MatchEqual, "json", "line1\nline2\\end\"quote")
m, _ := labels.NewMatcher(labels.MatchEqual, "label-with-hyphen", "line1\nline2\\end\"quote")
return m
}(),
},

View File

@@ -13,7 +13,7 @@ import (
func jsonEquals(dialect migrator.Dialect, column, key, value string) (string, []any) {
switch dialect.DriverName() {
case migrator.MySQL:
return fmt.Sprintf("JSON_UNQUOTE(JSON_EXTRACT(NULLIF(%s, ''), CONCAT('$.', ?))) = ?", column), []any{key, value}
return fmt.Sprintf(`JSON_UNQUOTE(JSON_EXTRACT(NULLIF(%s, ''), CONCAT('$."', ?, '"'))) = ?`, column), []any{key, value}
case migrator.Postgres:
return fmt.Sprintf("jsonb_extract_path_text(NULLIF(%s, '')::jsonb, ?) = ?", column), []any{key, value}
default:
@@ -25,7 +25,7 @@ func jsonNotEquals(dialect migrator.Dialect, column, key, value string) (string,
var jx string
switch dialect.DriverName() {
case migrator.MySQL:
jx = fmt.Sprintf("JSON_UNQUOTE(JSON_EXTRACT(NULLIF(%s, ''), CONCAT('$.', ?)))", column)
jx = fmt.Sprintf(`JSON_UNQUOTE(JSON_EXTRACT(NULLIF(%s, ''), CONCAT('$."', ?, '"')))`, column)
case migrator.Postgres:
jx = fmt.Sprintf("jsonb_extract_path_text(NULLIF(%s, '')::jsonb, ?)", column)
default:
@@ -49,7 +49,7 @@ func jsonKeyCondition(dialect migrator.Dialect, column, key string, exists bool)
}
switch dialect.DriverName() {
case migrator.MySQL:
return fmt.Sprintf("JSON_EXTRACT(NULLIF(%s, ''), CONCAT('$.', ?)) %s", column, nullCheck), []any{key}, nil
return fmt.Sprintf(`JSON_EXTRACT(NULLIF(%s, ''), CONCAT('$."', ?, '"')) %s`, column, nullCheck), []any{key}, nil
case migrator.Postgres:
return fmt.Sprintf("jsonb_extract_path_text(NULLIF(%s, '')::jsonb, ?) %s", column, nullCheck), []any{key}, nil
default:

View File

@@ -23,7 +23,7 @@ func TestJsonEquals(t *testing.T) {
column: "labels",
key: "team",
value: "alerting",
wantSQL: "JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) = ?",
wantSQL: `JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) = ?`,
wantArgs: []any{"team", "alerting"},
},
{
@@ -62,7 +62,7 @@ func TestJsonNotEquals(t *testing.T) {
column: "labels",
key: "team",
value: "alerting",
wantSQL: "(JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) IS NULL OR JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?))) != ?)",
wantSQL: `(JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) IS NULL OR JSON_UNQUOTE(JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"'))) != ?)`,
wantArgs: []any{"team", "team", "alerting"},
},
{
@@ -99,7 +99,7 @@ func TestJsonKeyMissing(t *testing.T) {
dialect: migrator.NewMysqlDialect(),
column: "labels",
key: "team",
wantSQL: "JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?)) IS NULL",
wantSQL: `JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"')) IS NULL`,
wantArgs: []any{"team"},
},
{
@@ -136,7 +136,7 @@ func TestJsonKeyExists(t *testing.T) {
dialect: migrator.NewMysqlDialect(),
column: "labels",
key: "__grafana_origin",
wantSQL: "JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$.', ?)) IS NOT NULL",
wantSQL: `JSON_EXTRACT(NULLIF(labels, ''), CONCAT('$."', ?, '"')) IS NOT NULL`,
wantArgs: []any{"__grafana_origin"},
},
{

View File

@@ -0,0 +1,90 @@
//go:build ignore
package main
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
)
type Colors struct {
Mode string `json:"mode"`
}
type ThemeDefinition struct {
Colors Colors `json:"colors"`
Id string `json:"id"`
}
func main() {
themesPath := filepath.Join("..", "..", "..", "packages", "grafana-data", "src", "themes", "themeDefinitions")
// Check if the themes directory exists
if _, err := os.Stat(themesPath); os.IsNotExist(err) {
fmt.Fprintf(os.Stderr, "Themes directory not found: %s\n", themesPath)
os.Exit(1)
}
output := `// Code generated by go generate; DO NOT EDIT.
package pref
var themes = []ThemeDTO{
{ID: "light", Type: "light"},
{ID: "dark", Type: "dark"},
{ID: "system", Type: "dark"},
`
err := filepath.WalkDir(themesPath, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
// Only process json files
if d.IsDir() || !strings.HasSuffix(d.Name(), ".json") {
return nil
}
fileBytes, readErr := os.ReadFile(path)
if readErr != nil {
fmt.Fprintf(os.Stderr, "Error reading file %s: %v\n", path, readErr)
return nil // Continue processing other files
}
var themeDef ThemeDefinition
jsonErr := json.Unmarshal(fileBytes, &themeDef)
if jsonErr != nil {
fmt.Fprintf(os.Stderr, "Error parsing JSON from %s: %v\n", path, jsonErr)
return nil // Continue processing other files
}
themeId := themeDef.Id
themeType := "dark" // default fallback
if themeDef.Colors.Mode != "" {
themeType = themeDef.Colors.Mode
}
output += fmt.Sprintf("\t{ID: %q, Type: %q, IsExtra: true},\n", themeId, themeType)
return nil
})
if err != nil {
fmt.Fprintf(os.Stderr, "Error walking themes directory: %v\n", err)
os.Exit(1)
}
output += "}\n"
// Write the generated file
outputPath := filepath.Join("themes_generated.go")
if err := os.WriteFile(outputPath, []byte(output), 0644); err != nil {
fmt.Fprintf(os.Stderr, "Error writing output file: %v\n", err)
os.Exit(1)
}
fmt.Printf("Successfully generated themes_generated.go\n")
}

View File

@@ -1,3 +1,5 @@
//go:generate go run generate_themes.go
package pref
type ThemeDTO struct {
@@ -6,24 +8,6 @@ type ThemeDTO struct {
IsExtra bool `json:"isExtra"`
}
var themes = []ThemeDTO{
{ID: "light", Type: "light"},
{ID: "dark", Type: "dark"},
{ID: "system", Type: "dark"},
{ID: "debug", Type: "dark", IsExtra: true},
{ID: "aubergine", Type: "dark", IsExtra: true},
{ID: "desertbloom", Type: "light", IsExtra: true},
{ID: "gildedgrove", Type: "dark", IsExtra: true},
{ID: "mars", Type: "dark", IsExtra: true},
{ID: "matrix", Type: "dark", IsExtra: true},
{ID: "sapphiredusk", Type: "dark", IsExtra: true},
{ID: "synthwave", Type: "dark", IsExtra: true},
{ID: "tron", Type: "dark", IsExtra: true},
{ID: "victorian", Type: "dark", IsExtra: true},
{ID: "zen", Type: "light", IsExtra: true},
{ID: "gloom", Type: "dark", IsExtra: true},
}
func GetThemeByID(id string) *ThemeDTO {
for _, theme := range themes {
if theme.ID == id {

View File

@@ -0,0 +1,21 @@
// Code generated by go generate; DO NOT EDIT.
package pref
var themes = []ThemeDTO{
{ID: "light", Type: "light"},
{ID: "dark", Type: "dark"},
{ID: "system", Type: "dark"},
{ID: "aubergine", Type: "dark", IsExtra: true},
{ID: "debug", Type: "dark", IsExtra: true},
{ID: "desertbloom", Type: "light", IsExtra: true},
{ID: "gildedgrove", Type: "dark", IsExtra: true},
{ID: "gloom", Type: "dark", IsExtra: true},
{ID: "mars", Type: "dark", IsExtra: true},
{ID: "matrix", Type: "dark", IsExtra: true},
{ID: "sapphiredusk", Type: "dark", IsExtra: true},
{ID: "synthwave", Type: "dark", IsExtra: true},
{ID: "tron", Type: "dark", IsExtra: true},
{ID: "victorian", Type: "dark", IsExtra: true},
{ID: "zen", Type: "light", IsExtra: true},
}

View File

@@ -358,6 +358,8 @@ func (fr *FileReader) saveDashboard(ctx context.Context, path string, folderID i
Name: fr.Cfg.Name,
Updated: resolvedFileInfo.ModTime().Unix(),
CheckSum: jsonFile.checkSum,
// adds `grafana.app/managerAllowsEdits` to the provisioned dashboards in unified storage. not used if in legacy.
AllowUIUpdates: fr.Cfg.AllowUIUpdates,
}
_, err := fr.dashboardProvisioningService.SaveProvisionedDashboard(ctx, dash, dp)
if err != nil {

View File

@@ -33,6 +33,8 @@ import (
)
func TestIntegrationFolderTreeZanzana(t *testing.T) {
// TODO: Add back OSS seeding and enable this test
t.Skip("Skipping folder tree test with Zanzana")
testutil.SkipIntegrationTestInShortMode(t)
runIntegrationFolderTree(t, testinfra.GrafanaOpts{

View File

@@ -57,7 +57,7 @@ export class AppWrapper extends Component<AppWrapperProps, AppWrapperState> {
async componentDidMount() {
this.setState({ ready: true });
$('.preloader').remove();
this.removePreloader();
// clear any old icon caches
const cacheKeys = (await window.caches?.keys()) ?? [];
@@ -68,6 +68,15 @@ export class AppWrapper extends Component<AppWrapperProps, AppWrapperState> {
}
}
removePreloader() {
const preloader = document.querySelector('.preloader');
if (preloader) {
preloader.remove();
} else {
console.warn('Preloader element not found');
}
}
renderRoute = (route: RouteDescriptor) => {
return (
<Route

View File

@@ -5,7 +5,7 @@ import { GrafanaTheme2 } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { Trans, t } from '@grafana/i18n';
import { SceneObject } from '@grafana/scenes';
import { Box, Icon, Sidebar, Text, useElementSelection, useStyles2 } from '@grafana/ui';
import { Box, Icon, ScrollContainer, Sidebar, Text, useElementSelection, useStyles2 } from '@grafana/ui';
import { isRepeatCloneOrChildOf } from '../utils/clone';
import { DashboardInteractions } from '../utils/interactions';
@@ -24,12 +24,14 @@ export function DashboardOutline({ editPane, isEditing }: Props) {
const dashboard = getDashboardSceneFor(editPane);
return (
<>
<Box display="flex" direction="column" flex={1} height="100%">
<Sidebar.PaneHeader title={t('dashboard.outline.pane-header', 'Content outline')} />
<Box padding={1} gap={0} display="flex" direction="column" element="ul" role="tree" position="relative">
<DashboardOutlineNode sceneObject={dashboard} isEditing={isEditing} editPane={editPane} depth={0} index={0} />
</Box>
</>
<ScrollContainer showScrollIndicators={true}>
<Box padding={1} gap={0} display="flex" direction="column" element="ul" role="tree" position="relative">
<DashboardOutlineNode sceneObject={dashboard} isEditing={isEditing} editPane={editPane} depth={0} index={0} />
</Box>
</ScrollContainer>
</Box>
);
}

View File

@@ -1,7 +1,15 @@
import { GrafanaConfig, locationUtil } from '@grafana/data';
import * as folderHooks from 'app/api/clients/folder/v1beta1/hooks';
import { backendSrv } from 'app/core/services/backend_srv';
import { AnnoKeyFolder, AnnoKeyMessage, AnnoReloadOnParamsChange } from 'app/features/apiserver/types';
import {
AnnoKeyFolder,
AnnoKeyManagerAllowsEdits,
AnnoKeyManagerKind,
AnnoKeyMessage,
AnnoKeySourcePath,
AnnoReloadOnParamsChange,
ManagerKind,
} from 'app/features/apiserver/types';
import { DashboardDataDTO } from 'app/types/dashboard';
import { DashboardWithAccessInfo } from './types';
@@ -215,6 +223,63 @@ describe('v1 dashboard API', () => {
expect(result.meta.reloadOnParamsChange).toBe(true);
});
describe('managed/provisioned dashboards', () => {
it('should not mark dashboard as provisioned when manager allows UI edits', async () => {
mockGet.mockResolvedValueOnce({
...mockDashboardDto,
metadata: {
...mockDashboardDto.metadata,
annotations: {
[AnnoKeyManagerKind]: ManagerKind.Terraform,
[AnnoKeyManagerAllowsEdits]: 'true',
[AnnoKeySourcePath]: 'dashboards/test.json',
},
},
});
const api = new K8sDashboardAPI();
const result = await api.getDashboardDTO('test');
expect(result.meta.provisioned).toBe(false);
expect(result.meta.provisionedExternalId).toBe('dashboards/test.json');
});
it('should mark dashboard as provisioned when manager does not allow UI edits', async () => {
mockGet.mockResolvedValueOnce({
...mockDashboardDto,
metadata: {
...mockDashboardDto.metadata,
annotations: {
[AnnoKeyManagerKind]: ManagerKind.Terraform,
[AnnoKeySourcePath]: 'dashboards/test.json',
},
},
});
const api = new K8sDashboardAPI();
const result = await api.getDashboardDTO('test');
expect(result.meta.provisioned).toBe(true);
expect(result.meta.provisionedExternalId).toBe('dashboards/test.json');
});
it('should not mark repository-managed dashboard as provisioned (locked)', async () => {
mockGet.mockResolvedValueOnce({
...mockDashboardDto,
metadata: {
...mockDashboardDto.metadata,
annotations: {
[AnnoKeyManagerKind]: ManagerKind.Repo,
[AnnoKeySourcePath]: 'dashboards/test.json',
},
},
});
const api = new K8sDashboardAPI();
const result = await api.getDashboardDTO('test');
expect(result.meta.provisioned).toBe(false);
expect(result.meta.provisionedExternalId).toBe('dashboards/test.json');
});
});
describe('saveDashboard', () => {
beforeEach(() => {
locationUtil.initialize({

View File

@@ -164,7 +164,11 @@ export class K8sDashboardAPI implements DashboardAPI<DashboardDTO, Dashboard> {
const managerKind = annotations[AnnoKeyManagerKind];
if (managerKind) {
result.meta.provisioned = annotations[AnnoKeyManagerAllowsEdits] === 'true' || managerKind === ManagerKind.Repo;
// `meta.provisioned` is used by the save/delete UI to decide if a dashboard is locked
// (i.e. it can't be saved from the UI). This should match the legacy behavior where
// `allowUiUpdates: true` keeps the dashboard editable/savable.
const allowsEdits = annotations[AnnoKeyManagerAllowsEdits] === 'true';
result.meta.provisioned = !allowsEdits && managerKind !== ManagerKind.Repo;
result.meta.provisionedExternalId = annotations[AnnoKeySourcePath];
}

View File

@@ -78,6 +78,7 @@ export const BasicProvisionedDashboardsEmptyPage = ({ datasourceUid }: Props) =>
sourceEntryPoint: SOURCE_ENTRY_POINTS.DATASOURCE_PAGE,
libraryItemId: dashboard.uid,
creationOrigin: CREATION_ORIGINS.DASHBOARD_LIBRARY_DATASOURCE_DASHBOARD,
contentKind: CONTENT_KINDS.DATASOURCE_DASHBOARD,
});
const templateUrl = `${DASHBOARD_LIBRARY_ROUTES.Template}?${params.toString()}`;

View File

@@ -0,0 +1,125 @@
import { screen, waitFor } from '@testing-library/react';
import React from 'react';
import { render } from 'test/test-utils';
import { CommunityDashboardSection } from './CommunityDashboardSection';
import { fetchCommunityDashboards } from './api/dashboardLibraryApi';
import { GnetDashboard } from './types';
import { onUseCommunityDashboard } from './utils/communityDashboardHelpers';
jest.mock('./api/dashboardLibraryApi', () => ({
fetchCommunityDashboards: jest.fn(),
}));
jest.mock('./utils/communityDashboardHelpers', () => ({
...jest.requireActual('./utils/communityDashboardHelpers'),
onUseCommunityDashboard: jest.fn(),
}));
jest.mock('@grafana/runtime', () => ({
...jest.requireActual('@grafana/runtime'),
getDataSourceSrv: () => ({
getInstanceSettings: jest.fn((uid: string) => ({
uid,
name: `DataSource ${uid}`,
type: 'test',
})),
}),
}));
const mockFetchCommunityDashboards = fetchCommunityDashboards as jest.MockedFunction<typeof fetchCommunityDashboards>;
const mockOnUseCommunityDashboard = onUseCommunityDashboard as jest.MockedFunction<typeof onUseCommunityDashboard>;
const createMockGnetDashboard = (overrides: Partial<GnetDashboard> = {}): GnetDashboard => ({
id: 1,
name: 'Test Dashboard',
description: 'Test Description',
downloads: 2000,
datasource: 'Prometheus',
slug: 'test-dashboard',
...overrides,
});
const setup = async (
props: Partial<React.ComponentProps<typeof CommunityDashboardSection>> = {},
successScenario = true
) => {
const renderResult = render(
<CommunityDashboardSection onShowMapping={jest.fn()} datasourceType="test" {...props} />,
{
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-datasource-uid'],
},
}
);
if (successScenario) {
await waitFor(() => {
expect(screen.getByText('Test Dashboard')).toBeInTheDocument();
});
}
return renderResult;
};
describe('CommunityDashboardSection', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should render', async () => {
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 5,
items: [
createMockGnetDashboard(),
createMockGnetDashboard({ id: 2, name: 'Test Dashboard 2' }),
createMockGnetDashboard({ id: 3, name: 'Test Dashboard 3' }),
],
});
await setup();
await waitFor(() => {
expect(screen.getByText('Test Dashboard')).toBeInTheDocument();
expect(screen.getByText('Test Dashboard 2')).toBeInTheDocument();
expect(screen.getByText('Test Dashboard 3')).toBeInTheDocument();
});
});
it('should show error when fetching a specific community dashboard after clicking use dashboard button fails', async () => {
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 5,
items: [createMockGnetDashboard()],
});
mockOnUseCommunityDashboard.mockRejectedValue(new Error('Failed to use community dashboard'));
const { user } = await setup();
await waitFor(() => {
expect(screen.getByText('Test Dashboard')).toBeInTheDocument();
});
const useDashboardButton = screen.getByRole('button', { name: 'Use dashboard' });
await user.click(useDashboardButton);
await waitFor(() => {
expect(screen.getByText('Error loading community dashboard')).toBeInTheDocument();
});
});
it('should show error when fetching community dashboards list fails', async () => {
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
mockFetchCommunityDashboards.mockRejectedValue(new Error('Failed to fetch community dashboards'));
await setup(undefined, false);
await waitFor(() => {
expect(screen.getByText('Error loading community dashboards')).toBeInTheDocument();
});
expect(consoleErrorSpy).toHaveBeenCalledWith('Error loading community dashboards', expect.any(Error));
consoleErrorSpy.mockRestore();
});
});

View File

@@ -1,12 +1,12 @@
import { css } from '@emotion/css';
import { useEffect, useRef, useState } from 'react';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { useAsync, useDebounce } from 'react-use';
import { useAsyncFn, useAsyncRetry, useDebounce } from 'react-use';
import { GrafanaTheme2 } from '@grafana/data';
import { Trans, t } from '@grafana/i18n';
import { getDataSourceSrv } from '@grafana/runtime';
import { Button, useStyles2, Stack, Grid, EmptyState, Alert, Pagination, FilterInput } from '@grafana/ui';
import { Button, useStyles2, Stack, Grid, EmptyState, Alert, FilterInput, Box } from '@grafana/ui';
import { DashboardCard } from './DashboardCard';
import { MappingContext } from './SuggestedDashboardsModal';
@@ -24,6 +24,8 @@ import {
getLogoUrl,
buildDashboardDetails,
onUseCommunityDashboard,
COMMUNITY_PAGE_SIZE_QUERY,
COMMUNITY_RESULT_SIZE,
} from './utils/communityDashboardHelpers';
interface Props {
@@ -31,8 +33,6 @@ interface Props {
datasourceType?: string;
}
// Constants for community dashboard pagination and API params
const COMMUNITY_PAGE_SIZE = 9;
const SEARCH_DEBOUNCE_MS = 500;
const DEFAULT_SORT_ORDER = 'downloads';
const DEFAULT_SORT_DIRECTION = 'desc';
@@ -42,7 +42,6 @@ const INCLUDE_SCREENSHOTS = true;
export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Props) => {
const [searchParams] = useSearchParams();
const datasourceUid = searchParams.get('dashboardLibraryDatasourceUid');
const [currentPage, setCurrentPage] = useState(1);
const [searchQuery, setSearchQuery] = useState('');
const hasTrackedLoaded = useRef(false);
@@ -55,18 +54,12 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
[searchQuery]
);
// Reset to page 1 when debounced search query changes
useEffect(() => {
if (debouncedSearchQuery) {
setCurrentPage(1);
}
}, [debouncedSearchQuery]);
const {
value: response,
loading,
error,
} = useAsync(async () => {
retry,
} = useAsyncRetry(async () => {
if (!datasourceUid) {
return null;
}
@@ -80,8 +73,8 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
const apiResponse = await fetchCommunityDashboards({
orderBy: DEFAULT_SORT_ORDER,
direction: DEFAULT_SORT_DIRECTION,
page: currentPage,
pageSize: COMMUNITY_PAGE_SIZE,
page: 1,
pageSize: COMMUNITY_PAGE_SIZE_QUERY,
includeLogo: INCLUDE_LOGO,
includeScreenshots: INCLUDE_SCREENSHOTS,
dataSourceSlugIn: ds.type,
@@ -100,15 +93,14 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
}
return {
dashboards: apiResponse.items,
pages: apiResponse.pages,
dashboards: apiResponse.items.slice(0, COMMUNITY_RESULT_SIZE),
datasourceType: ds.type,
};
} catch (err) {
console.error('Error loading community dashboards', err);
throw err;
}
}, [datasourceUid, currentPage, debouncedSearchQuery]);
}, [datasourceUid, debouncedSearchQuery]);
// Track analytics only once on first successful load
useEffect(() => {
@@ -128,37 +120,49 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
// Determine what to show in results area
const dashboards = Array.isArray(response?.dashboards) ? response.dashboards : [];
const totalPages = response?.pages || 1;
const showEmptyState = !loading && (!response?.dashboards || response.dashboards.length === 0);
const showError = !loading && error;
const onPreviewCommunityDashboard = (dashboard: GnetDashboard) => {
if (!response) {
return;
}
const [{ error: isPreviewDashboardError }, onPreviewCommunityDashboard] = useAsyncFn(
async (dashboard: GnetDashboard) => {
if (!response) {
return;
}
// Track item click
DashboardLibraryInteractions.itemClicked({
contentKind: CONTENT_KINDS.COMMUNITY_DASHBOARD,
datasourceTypes: [response.datasourceType],
libraryItemId: String(dashboard.id),
libraryItemTitle: dashboard.name,
sourceEntryPoint: SOURCE_ENTRY_POINTS.DATASOURCE_PAGE,
eventLocation: EVENT_LOCATIONS.MODAL_COMMUNITY_TAB,
discoveryMethod: debouncedSearchQuery.trim() ? DISCOVERY_METHODS.SEARCH : DISCOVERY_METHODS.BROWSE,
});
// Track item click
DashboardLibraryInteractions.itemClicked({
contentKind: CONTENT_KINDS.COMMUNITY_DASHBOARD,
datasourceTypes: [response.datasourceType],
libraryItemId: String(dashboard.id),
libraryItemTitle: dashboard.name,
sourceEntryPoint: SOURCE_ENTRY_POINTS.DATASOURCE_PAGE,
eventLocation: EVENT_LOCATIONS.MODAL_COMMUNITY_TAB,
discoveryMethod: debouncedSearchQuery.trim() ? DISCOVERY_METHODS.SEARCH : DISCOVERY_METHODS.BROWSE,
});
onUseCommunityDashboard({
dashboard,
datasourceUid: datasourceUid || '',
datasourceType: response.datasourceType,
eventLocation: EVENT_LOCATIONS.MODAL_COMMUNITY_TAB,
onShowMapping,
});
};
await onUseCommunityDashboard({
dashboard,
datasourceUid: datasourceUid || '',
datasourceType: response.datasourceType,
eventLocation: EVENT_LOCATIONS.MODAL_COMMUNITY_TAB,
onShowMapping,
});
},
[response, datasourceUid, debouncedSearchQuery, onShowMapping]
);
return (
<Stack direction="column" gap={2} height="100%">
{isPreviewDashboardError && (
<div>
<Alert
title={t('dashboard-library.community-error-title', 'Error loading community dashboard')}
severity="error"
>
<Trans i18nKey="dashboard-library.community-error-description">Failed to load community dashboard.</Trans>
</Alert>
</div>
)}
<FilterInput
placeholder={
datasourceType
@@ -183,7 +187,7 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
lg: 3,
}}
>
{Array.from({ length: COMMUNITY_PAGE_SIZE }).map((_, i) => (
{Array.from({ length: COMMUNITY_RESULT_SIZE }).map((_, i) => (
<DashboardCard.Skeleton key={`skeleton-${i}`} />
))}
</Grid>
@@ -197,7 +201,7 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
Failed to load community dashboards. Please try again.
</Trans>
</Alert>
<Button variant="secondary" onClick={() => setCurrentPage(1)}>
<Button variant="secondary" onClick={retry}>
<Trans i18nKey="dashboard-library.retry">Retry</Trans>
</Button>
</Stack>
@@ -233,42 +237,47 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
)}
</EmptyState>
) : (
<Grid
gap={4}
columns={{
xs: 1,
sm: dashboards.length >= 2 ? 2 : 1,
lg: dashboards.length >= 3 ? 3 : dashboards.length >= 2 ? 2 : 1,
}}
>
{dashboards.map((dashboard) => {
const thumbnailUrl = getThumbnailUrl(dashboard);
const logoUrl = getLogoUrl(dashboard);
const imageUrl = thumbnailUrl || logoUrl;
const isLogo = !thumbnailUrl;
const details = buildDashboardDetails(dashboard);
<Stack direction="column" gap={2}>
<Grid
gap={4}
columns={{
xs: 1,
sm: dashboards.length >= 2 ? 2 : 1,
lg: dashboards.length >= 3 ? 3 : dashboards.length >= 2 ? 2 : 1,
}}
>
{dashboards.map((dashboard) => {
const thumbnailUrl = getThumbnailUrl(dashboard);
const logoUrl = getLogoUrl(dashboard);
const imageUrl = thumbnailUrl || logoUrl;
const isLogo = !thumbnailUrl;
const details = buildDashboardDetails(dashboard);
return (
<DashboardCard
key={dashboard.id}
title={dashboard.name}
imageUrl={imageUrl}
dashboard={dashboard}
onClick={() => onPreviewCommunityDashboard(dashboard)}
isLogo={isLogo}
details={details}
kind="suggested_dashboard"
/>
);
})}
</Grid>
return (
<DashboardCard
key={dashboard.id}
title={dashboard.name}
imageUrl={imageUrl}
dashboard={dashboard}
onClick={() => onPreviewCommunityDashboard(dashboard)}
isLogo={isLogo}
details={details}
kind="suggested_dashboard"
/>
);
})}
</Grid>
<Box display="flex" justifyContent="end" gap={2} paddingRight={1.5}>
<Button
variant="secondary"
onClick={() => window.open('https://grafana.com/grafana/dashboards/', '_blank')}
>
<Trans i18nKey="dashboard-library.browse-grafana-com">Browse Grafana.com</Trans>
</Button>
</Box>
</Stack>
)}
</div>
{totalPages > 1 && (
<div className={styles.paginationWrapper}>
<Pagination currentPage={currentPage} numberOfPages={totalPages} onNavigate={setCurrentPage} />
</div>
)}
</Stack>
);
};
@@ -277,18 +286,9 @@ function getStyles(theme: GrafanaTheme2) {
return {
resultsContainer: css({
width: '100%',
position: 'relative',
flex: 1,
overflow: 'auto',
}),
paginationWrapper: css({
position: 'sticky',
bottom: 0,
backgroundColor: theme.colors.background.primary,
padding: theme.spacing(2),
display: 'flex',
justifyContent: 'flex-end',
zIndex: 2,
paddingBottom: theme.spacing(2),
}),
};
}

View File

@@ -1,41 +1,8 @@
import { screen } from '@testing-library/react';
import { render } from 'test/test-utils';
import { PluginDashboard } from 'app/types/plugins';
import { DashboardCard } from './DashboardCard';
import { GnetDashboard } from './types';
// Helper functions for creating mock objects
const createMockPluginDashboard = (overrides: Partial<PluginDashboard> = {}): PluginDashboard => ({
dashboardId: 1,
description: 'Test description',
imported: false,
importedRevision: 0,
importedUri: '',
importedUrl: '',
path: '',
pluginId: 'test-plugin',
removed: false,
revision: 1,
slug: 'test-dashboard',
title: 'Test Dashboard',
uid: 'test-uid',
...overrides,
});
const createMockGnetDashboard = (overrides: Partial<GnetDashboard> = {}): GnetDashboard => ({
id: 123,
name: 'Test Dashboard',
description: 'Test description',
datasource: 'Prometheus',
orgName: 'Test Org',
userName: 'testuser',
publishedAt: '',
updatedAt: '',
downloads: 0,
...overrides,
});
import { createMockGnetDashboard, createMockPluginDashboard } from './utils/test-utils';
const createMockDetails = (overrides = {}) => ({
id: '123',

View File

@@ -0,0 +1,273 @@
import { screen, waitFor, within } from '@testing-library/react';
import { render } from 'test/test-utils';
import { locationService } from '@grafana/runtime';
import { DashboardLibrarySection } from './DashboardLibrarySection';
import { fetchProvisionedDashboards } from './api/dashboardLibraryApi';
import { DashboardLibraryInteractions } from './interactions';
import { createMockPluginDashboard } from './utils/test-utils';
jest.mock('./api/dashboardLibraryApi', () => ({
fetchProvisionedDashboards: jest.fn(),
}));
jest.mock('@grafana/runtime', () => ({
...jest.requireActual('@grafana/runtime'),
getDataSourceSrv: () => ({
getInstanceSettings: jest.fn((uid?: string) => {
if (uid) {
return {
uid,
name: `DataSource ${uid}`,
type: 'test-datasource',
};
}
return null;
}),
}),
locationService: {
push: jest.fn(),
getHistory: jest.fn(() => ({
listen: jest.fn(() => jest.fn()),
})),
},
}));
jest.mock('./interactions', () => ({
...jest.requireActual('./interactions'),
DashboardLibraryInteractions: {
loaded: jest.fn(),
itemClicked: jest.fn(),
},
}));
jest.mock('./DashboardCard', () => {
const DashboardCardComponent = ({ title, onClick }: { title: string; onClick: () => void }) => (
<div data-testid={`dashboard-card-${title}`} onClick={onClick}>
{title}
</div>
);
const DashboardCardSkeleton = () => <div data-testid="dashboard-card-skeleton">Skeleton</div>;
return {
DashboardCard: Object.assign(DashboardCardComponent, {
Skeleton: DashboardCardSkeleton,
}),
};
});
const mockFetchProvisionedDashboards = fetchProvisionedDashboards as jest.MockedFunction<
typeof fetchProvisionedDashboards
>;
const mockLocationServicePush = locationService.push as jest.MockedFunction<typeof locationService.push>;
const mockDashboardLibraryInteractionsLoaded = DashboardLibraryInteractions.loaded as jest.MockedFunction<
typeof DashboardLibraryInteractions.loaded
>;
const mockDashboardLibraryInteractionsItemClicked = DashboardLibraryInteractions.itemClicked as jest.MockedFunction<
typeof DashboardLibraryInteractions.itemClicked
>;
describe('DashboardLibrarySection', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should render dashboards when they are available', async () => {
const dashboards = [
createMockPluginDashboard({ title: 'Dashboard 1', uid: 'uid-1' }),
createMockPluginDashboard({ title: 'Dashboard 2', uid: 'uid-2' }),
];
mockFetchProvisionedDashboards.mockResolvedValue(dashboards);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Dashboard 1')).toBeInTheDocument();
expect(screen.getByTestId('dashboard-card-Dashboard 2')).toBeInTheDocument();
});
});
it('should show empty state when there are no dashboards', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([]);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
expect(screen.getByText('No test-datasource provisioned dashboards found')).toBeInTheDocument();
expect(
screen.getByText(
'Provisioned dashboards are provided by data source plugins. You can find more plugins on Grafana.com.'
)
).toBeInTheDocument();
const browseButton = screen.getByRole('button', { name: 'Browse plugins' });
expect(browseButton).toBeInTheDocument();
});
});
it('should show empty state without datasource type when datasourceUid is not provided', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([]);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test'],
},
});
await waitFor(() => {
expect(screen.getByText('No provisioned dashboards found')).toBeInTheDocument();
});
});
it('should render pagination when there are more than 9 dashboards', async () => {
const dashboards = Array.from({ length: 18 }, (_, i) =>
createMockPluginDashboard({ title: `Dashboard ${i + 1}`, uid: `uid-${i + 1}` })
);
mockFetchProvisionedDashboards.mockResolvedValue(dashboards);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
const pagination = screen.getByRole('navigation');
expect(pagination).toBeInTheDocument();
expect(within(pagination).getByText('1')).toBeInTheDocument();
expect(within(pagination).getByText('2')).toBeInTheDocument();
});
});
it('should not render pagination when there are 9 or fewer dashboards', async () => {
const dashboards = Array.from({ length: 9 }, (_, i) =>
createMockPluginDashboard({ title: `Dashboard ${i + 1}`, uid: `uid-${i + 1}` })
);
mockFetchProvisionedDashboards.mockResolvedValue(dashboards);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Dashboard 1')).toBeInTheDocument();
});
const pagination = screen.queryByRole('navigation');
expect(pagination).not.toBeInTheDocument();
});
it('should navigate to template route when clicking on a dashboard', async () => {
const dashboard = createMockPluginDashboard({
title: 'Test Dashboard',
uid: 'test-uid-123',
pluginId: 'test-plugin',
path: 'test/path.json',
});
mockFetchProvisionedDashboards.mockResolvedValue([dashboard]);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Test Dashboard')).toBeInTheDocument();
});
const dashboardCard = screen.getByTestId('dashboard-card-Test Dashboard');
dashboardCard.click();
await waitFor(() => {
expect(mockLocationServicePush).toHaveBeenCalled();
const callArgs = mockLocationServicePush.mock.calls[0][0];
expect(callArgs).toContain('/dashboard/template');
expect(callArgs).toContain('datasource=test-uid');
expect(callArgs).toContain('title=Test+Dashboard');
expect(callArgs).toContain('pluginId=test-plugin');
expect(callArgs).toContain('path=test%2Fpath.json');
expect(callArgs).toContain('libraryItemId=test-uid-123');
});
});
it('should track analytics when dashboards are loaded', async () => {
const dashboards = [
createMockPluginDashboard({ title: 'Dashboard 1', uid: 'uid-1' }),
createMockPluginDashboard({ title: 'Dashboard 2', uid: 'uid-2' }),
];
mockFetchProvisionedDashboards.mockResolvedValue(dashboards);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Dashboard 1')).toBeInTheDocument();
});
await waitFor(() => {
expect(mockDashboardLibraryInteractionsLoaded).toHaveBeenCalledWith({
numberOfItems: 2,
contentKinds: ['datasource_dashboard'],
datasourceTypes: ['test-datasource'],
sourceEntryPoint: 'datasource_page',
eventLocation: 'suggested_dashboards_modal_provisioned_tab',
});
});
});
it('should track analytics when a dashboard is clicked', async () => {
const dashboard = createMockPluginDashboard({
title: 'Test Dashboard',
uid: 'test-uid-123',
pluginId: 'test-plugin',
});
mockFetchProvisionedDashboards.mockResolvedValue([dashboard]);
render(<DashboardLibrarySection />, {
historyOptions: {
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-uid'],
},
});
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Test Dashboard')).toBeInTheDocument();
});
const dashboardCard = screen.getByTestId('dashboard-card-Test Dashboard');
dashboardCard.click();
await waitFor(() => {
expect(mockDashboardLibraryInteractionsItemClicked).toHaveBeenCalledWith({
contentKind: 'datasource_dashboard',
datasourceTypes: ['test-plugin'],
libraryItemId: 'test-uid-123',
libraryItemTitle: 'Test Dashboard',
sourceEntryPoint: 'datasource_page',
eventLocation: 'suggested_dashboards_modal_provisioned_tab',
discoveryMethod: 'browse',
});
});
});
});

View File

@@ -0,0 +1,186 @@
import { screen, waitFor } from '@testing-library/react';
import { render } from 'test/test-utils';
import { SuggestedDashboards } from './SuggestedDashboards';
import { fetchCommunityDashboards, fetchProvisionedDashboards } from './api/dashboardLibraryApi';
import { createMockGnetDashboard, createMockPluginDashboard } from './utils/test-utils';
jest.mock('./api/dashboardLibraryApi', () => ({
fetchProvisionedDashboards: jest.fn(),
fetchCommunityDashboards: jest.fn(),
}));
jest.mock('./utils/communityDashboardHelpers', () => ({
...jest.requireActual('./utils/communityDashboardHelpers'),
onUseCommunityDashboard: jest.fn(),
}));
jest.mock('./SuggestedDashboardsModal', () => ({
SuggestedDashboardsModal: () => <div data-testid="suggested-dashboards-modal">Modal</div>,
}));
jest.mock('./DashboardCard', () => {
const DashboardCardComponent = ({ title, onClick }: { title: string; onClick: () => void }) => (
<div data-testid={`dashboard-card-${title}`} onClick={onClick}>
{title}
</div>
);
const DashboardCardSkeleton = () => <div data-testid="dashboard-card-skeleton">Skeleton</div>;
return {
DashboardCard: Object.assign(DashboardCardComponent, {
Skeleton: DashboardCardSkeleton,
}),
};
});
jest.mock('@grafana/runtime', () => ({
...jest.requireActual('@grafana/runtime'),
getDataSourceSrv: () => ({
getInstanceSettings: jest.fn((uid?: string) => {
if (uid) {
return {
uid,
name: `DataSource ${uid}`,
type: 'test-datasource',
};
}
return null;
}),
}),
}));
jest.mock('./interactions', () => ({
...jest.requireActual('./interactions'),
DashboardLibraryInteractions: {
loaded: jest.fn(),
itemClicked: jest.fn(),
},
}));
const mockFetchProvisionedDashboards = fetchProvisionedDashboards as jest.MockedFunction<
typeof fetchProvisionedDashboards
>;
const mockFetchCommunityDashboards = fetchCommunityDashboards as jest.MockedFunction<typeof fetchCommunityDashboards>;
describe('SuggestedDashboards', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should render when there are dashboards', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([createMockPluginDashboard()]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [createMockGnetDashboard()],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(screen.getByTestId('suggested-dashboards')).toBeInTheDocument();
});
});
it('should not render when there are no dashboards', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(screen.queryByTestId('suggested-dashboards')).not.toBeInTheDocument();
});
});
it('should render provisioned dashboard cards', async () => {
const provisionedDashboard = createMockPluginDashboard({ title: 'Provisioned Dashboard 1' });
mockFetchProvisionedDashboards.mockResolvedValue([provisionedDashboard]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Provisioned Dashboard 1')).toBeInTheDocument();
});
});
it('should render community dashboard cards', async () => {
const communityDashboard = createMockGnetDashboard({ name: 'Community Dashboard 1' });
mockFetchProvisionedDashboards.mockResolvedValue([]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [communityDashboard],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(screen.getByTestId('dashboard-card-Community Dashboard 1')).toBeInTheDocument();
});
});
it('should show "View all" button when hasMoreDashboards is true', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([
createMockPluginDashboard(),
createMockPluginDashboard({ title: 'Provisioned Dashboard 2' }),
]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(screen.getByRole('button', { name: 'View all' })).toBeInTheDocument();
});
});
it('should not show "View all" button when hasMoreDashboards is false', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([createMockPluginDashboard()]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [createMockGnetDashboard()],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(screen.queryByRole('button', { name: 'View all' })).not.toBeInTheDocument();
});
});
it('should render title and subtitle with datasource type when datasourceUid is provided', async () => {
mockFetchProvisionedDashboards.mockResolvedValue([createMockPluginDashboard()]);
mockFetchCommunityDashboards.mockResolvedValue({
page: 1,
pages: 1,
items: [],
});
render(<SuggestedDashboards datasourceUid="test-uid" />);
await waitFor(() => {
expect(
screen.getByText('Build a dashboard using suggested options for your test-datasource data source')
).toBeInTheDocument();
expect(
screen.getByText('Browse and select from data-source provided or community dashboards')
).toBeInTheDocument();
});
});
});

View File

@@ -1,12 +1,12 @@
import { css } from '@emotion/css';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { useAsync } from 'react-use';
import { useAsync, useAsyncFn } from 'react-use';
import { GrafanaTheme2 } from '@grafana/data';
import { Trans, t } from '@grafana/i18n';
import { getDataSourceSrv, locationService } from '@grafana/runtime';
import { Button, useStyles2, Grid } from '@grafana/ui';
import { Button, useStyles2, Grid, Alert } from '@grafana/ui';
import { PluginDashboard } from 'app/types/plugins';
import { DashboardCard } from './DashboardCard';
@@ -26,6 +26,8 @@ import {
getLogoUrl,
buildDashboardDetails,
onUseCommunityDashboard,
COMMUNITY_PAGE_SIZE_QUERY,
COMMUNITY_RESULT_SIZE,
} from './utils/communityDashboardHelpers';
import { getProvisionedDashboardImageUrl } from './utils/provisionedDashboardHelpers';
@@ -43,7 +45,7 @@ type SuggestedDashboardsResult = {
};
// Constants for suggested dashboards API params
const SUGGESTED_COMMUNITY_PAGE_SIZE = 2;
const MAX_SUGGESTED_DASHBOARDS_PREVIEW = 2;
const DEFAULT_SORT_ORDER = 'downloads';
const DEFAULT_SORT_DIRECTION = 'desc';
const INCLUDE_SCREENSHOTS = true;
@@ -91,14 +93,14 @@ export const SuggestedDashboards = ({ datasourceUid }: Props) => {
orderBy: DEFAULT_SORT_ORDER,
direction: DEFAULT_SORT_DIRECTION,
page: 1,
pageSize: SUGGESTED_COMMUNITY_PAGE_SIZE,
pageSize: COMMUNITY_PAGE_SIZE_QUERY,
includeScreenshots: INCLUDE_SCREENSHOTS,
dataSourceSlugIn: ds.type,
includeLogo: INCLUDE_LOGO,
}),
]);
const community = communityResponse.items;
const community = communityResponse.items.slice(0, COMMUNITY_RESULT_SIZE);
// Mix: 1 provisioned + 2 community
const mixed: MixedDashboard[] = [];
@@ -130,7 +132,7 @@ export const SuggestedDashboards = ({ datasourceUid }: Props) => {
// Determine if there are more dashboards available beyond what we're showing
// Show "View all" if: more than 1 provisioned exists OR we got the full page size of community dashboards
const hasMoreDashboards = provisioned.length > 1 || community.length >= SUGGESTED_COMMUNITY_PAGE_SIZE;
const hasMoreDashboards = provisioned.length > 1 || community.length > MAX_SUGGESTED_DASHBOARDS_PREVIEW;
return { dashboards: mixed, hasMoreDashboards };
} catch (error) {
@@ -233,35 +235,38 @@ export const SuggestedDashboards = ({ datasourceUid }: Props) => {
locationService.push(`/dashboard/template?${params.toString()}`);
};
const onPreviewCommunityDashboard = (dashboard: GnetDashboard) => {
if (!datasourceUid) {
return;
}
const [{ error: isPreviewCommunityDashboardError }, onPreviewCommunityDashboard] = useAsyncFn(
async (dashboard: GnetDashboard) => {
if (!datasourceUid) {
return;
}
const ds = getDataSourceSrv().getInstanceSettings(datasourceUid);
if (!ds) {
return;
}
const ds = getDataSourceSrv().getInstanceSettings(datasourceUid);
if (!ds) {
return;
}
// Track item click
DashboardLibraryInteractions.itemClicked({
contentKind: CONTENT_KINDS.COMMUNITY_DASHBOARD,
datasourceTypes: [ds.type],
libraryItemId: String(dashboard.id),
libraryItemTitle: dashboard.name,
sourceEntryPoint: SOURCE_ENTRY_POINTS.DATASOURCE_PAGE,
eventLocation: EVENT_LOCATIONS.EMPTY_DASHBOARD,
discoveryMethod: DISCOVERY_METHODS.BROWSE,
});
// Track item click
DashboardLibraryInteractions.itemClicked({
contentKind: CONTENT_KINDS.COMMUNITY_DASHBOARD,
datasourceTypes: [ds.type],
libraryItemId: String(dashboard.id),
libraryItemTitle: dashboard.name,
sourceEntryPoint: SOURCE_ENTRY_POINTS.DATASOURCE_PAGE,
eventLocation: EVENT_LOCATIONS.EMPTY_DASHBOARD,
discoveryMethod: DISCOVERY_METHODS.BROWSE,
});
onUseCommunityDashboard({
dashboard,
datasourceUid,
datasourceType: ds.type,
eventLocation: EVENT_LOCATIONS.EMPTY_DASHBOARD,
onShowMapping: onShowMapping,
});
};
await onUseCommunityDashboard({
dashboard,
datasourceUid,
datasourceType: ds.type,
eventLocation: EVENT_LOCATIONS.EMPTY_DASHBOARD,
onShowMapping: onShowMapping,
});
},
[datasourceUid, onShowMapping]
);
// Don't render if no dashboards or still loading
if (!loading && (!result || result.dashboards.length === 0)) {
@@ -297,7 +302,16 @@ export const SuggestedDashboards = ({ datasourceUid }: Props) => {
</Button>
)}
</div>
{isPreviewCommunityDashboardError && (
<div>
<Alert
title={t('dashboard-library.community-error-title', 'Error loading community dashboard')}
severity="error"
>
<Trans i18nKey="dashboard-library.community-error-description">Failed to load community dashboard.</Trans>
</Alert>
</div>
)}
<Grid
gap={4}
columns={{

View File

@@ -0,0 +1,101 @@
import { screen } from '@testing-library/react';
import { render } from 'test/test-utils';
import { DashboardJson } from 'app/features/manage-dashboards/types';
import { SuggestedDashboardsModal } from './SuggestedDashboardsModal';
import { CONTENT_KINDS, EVENT_LOCATIONS } from './interactions';
jest.mock('./DashboardLibrarySection', () => ({
DashboardLibrarySection: () => <div data-testid="dashboard-library-section">Dashboard Library Section</div>,
}));
jest.mock('./CommunityDashboardSection', () => ({
CommunityDashboardSection: () => <div data-testid="community-dashboard-section">Community Dashboard Section</div>,
}));
jest.mock('./CommunityDashboardMappingForm', () => ({
CommunityDashboardMappingForm: () => (
<div data-testid="community-dashboard-mapping-form">Community Dashboard Mapping Form</div>
),
}));
describe('SuggestedDashboardsModal', () => {
const defaultProps = {
isOpen: true,
onDismiss: jest.fn(),
};
beforeEach(() => {
jest.clearAllMocks();
});
it('should render when isOpen is true', () => {
render(<SuggestedDashboardsModal {...defaultProps} />);
expect(screen.getByRole('dialog')).toBeInTheDocument();
});
it('should not render when isOpen is false', () => {
render(<SuggestedDashboardsModal {...defaultProps} isOpen={false} />);
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
});
it('should render both tabs: Data-source provided and Community', () => {
render(<SuggestedDashboardsModal {...defaultProps} />);
expect(screen.getByRole('tab', { name: 'Data-source provided' })).toBeInTheDocument();
expect(screen.getByRole('tab', { name: 'Community' })).toBeInTheDocument();
});
it('should render tablist with both tabs', () => {
render(<SuggestedDashboardsModal {...defaultProps} />);
const tablist = screen.getByRole('tablist');
expect(tablist).toBeInTheDocument();
const tabs = screen.getAllByRole('tab');
expect(tabs).toHaveLength(2);
expect(tabs[0]).toHaveTextContent('Data-source provided');
expect(tabs[1]).toHaveTextContent('Community');
});
it('should render DashboardLibrarySection when activeView is datasource', () => {
render(<SuggestedDashboardsModal {...defaultProps} defaultTab="datasource" />);
expect(screen.getByTestId('dashboard-library-section')).toBeInTheDocument();
expect(screen.queryByTestId('community-dashboard-section')).not.toBeInTheDocument();
expect(screen.queryByTestId('community-dashboard-mapping-form')).not.toBeInTheDocument();
});
it('should render CommunityDashboardSection when activeView is community', () => {
render(<SuggestedDashboardsModal {...defaultProps} defaultTab="community" />);
expect(screen.getByTestId('community-dashboard-section')).toBeInTheDocument();
expect(screen.queryByTestId('dashboard-library-section')).not.toBeInTheDocument();
expect(screen.queryByTestId('community-dashboard-mapping-form')).not.toBeInTheDocument();
});
it('should render CommunityDashboardMappingForm when activeView is mapping', () => {
render(
<SuggestedDashboardsModal
{...defaultProps}
initialMappingContext={{
dashboardName: 'Test Dashboard',
dashboardJson: { title: 'Test Dashboard', panels: [], schemaVersion: 41 } as DashboardJson,
unmappedDsInputs: [],
constantInputs: [],
existingMappings: [],
onInterpolateAndNavigate: jest.fn(),
eventLocation: EVENT_LOCATIONS.MODAL_COMMUNITY_TAB,
contentKind: CONTENT_KINDS.COMMUNITY_DASHBOARD,
}}
/>
);
expect(screen.getByTestId('community-dashboard-mapping-form')).toBeInTheDocument();
expect(screen.queryByTestId('dashboard-library-section')).not.toBeInTheDocument();
expect(screen.queryByTestId('community-dashboard-section')).not.toBeInTheDocument();
});
});

View File

@@ -3,6 +3,7 @@ import { DashboardJson } from 'app/features/manage-dashboards/types';
import { PluginDashboard } from 'app/types/plugins';
import { GnetDashboard } from '../types';
import { createMockGnetDashboard, createMockPluginDashboard } from '../utils/test-utils';
import {
fetchCommunityDashboard,
@@ -14,8 +15,16 @@ import {
jest.mock('@grafana/runtime', () => ({
getBackendSrv: jest.fn(),
reportInteraction: jest.fn(),
}));
jest.mock('../interactions', () => ({
...jest.requireActual('../interactions'),
DashboardLibraryInteractions: {
...jest.requireActual('../interactions').DashboardLibraryInteractions,
communityDashboardFiltered: jest.fn(),
},
}));
const mockGetBackendSrv = getBackendSrv as jest.MockedFunction<typeof getBackendSrv>;
// Helper to create mock BackendSrv
@@ -26,31 +35,9 @@ const createMockBackendSrv = (overrides: Partial<BackendSrv> = {}): BackendSrv =
}) as unknown as BackendSrv;
// Helper functions for creating mock objects
const createMockGnetDashboard = (overrides: Partial<GnetDashboard> = {}): GnetDashboard => ({
id: 1,
name: 'Test Dashboard',
description: 'Test Description',
downloads: 100,
datasource: 'Prometheus',
...overrides,
});
const createMockPluginDashboard = (overrides: Partial<PluginDashboard> = {}): PluginDashboard => ({
dashboardId: 1,
uid: 'dash-uid',
title: 'Test Dashboard',
pluginId: 'prometheus',
path: 'dashboards/test.json',
description: 'Test plugin dashboard',
imported: false,
importedRevision: 0,
importedUri: '',
importedUrl: '',
removed: false,
revision: 1,
slug: 'test-dashboard',
...overrides,
});
const createMockGnetDashboardWithDownloads = (overrides: Partial<GnetDashboard> = {}): GnetDashboard => {
return createMockGnetDashboard({ ...overrides, downloads: 10000 });
};
const defaultFetchParams: FetchCommunityDashboardsParams = {
orderBy: 'downloads',
@@ -80,8 +67,54 @@ describe('dashboardLibraryApi', () => {
});
describe('fetchCommunityDashboards', () => {
describe('filterNotSafeDashboards', () => {
it('should filter out dashboards with panel types that can contain JavaScript code', async () => {
const safeDashboard = createMockGnetDashboardWithDownloads({ id: 1 });
const mockDashboards = [
safeDashboard,
createMockGnetDashboardWithDownloads({ id: 2, panelTypeSlugs: ['ae3e-plotly-panel'] }),
];
const mockResponse = {
page: 1,
pages: 5,
items: mockDashboards,
};
mockGet.mockResolvedValue(mockResponse);
const result = await fetchCommunityDashboards(defaultFetchParams);
expect(result).toEqual({
page: 1,
pages: 5,
items: [safeDashboard],
});
});
it('should filter out dashboards with low downloads', async () => {
const safeDashboard = createMockGnetDashboardWithDownloads({ id: 1 });
const mockDashboards = [safeDashboard, createMockGnetDashboard({ id: 2, downloads: 999 })];
const mockResponse = {
page: 1,
pages: 5,
items: mockDashboards,
};
mockGet.mockResolvedValue(mockResponse);
const result = await fetchCommunityDashboards(defaultFetchParams);
expect(result).toEqual({
page: 1,
pages: 5,
items: [safeDashboard],
});
});
});
it('should fetch community dashboards with correct query parameters', async () => {
const mockDashboards = [createMockGnetDashboard({ id: 1 }), createMockGnetDashboard({ id: 2 })];
const mockDashboards = [
createMockGnetDashboardWithDownloads({ id: 1 }),
createMockGnetDashboardWithDownloads({ id: 2 }),
];
const mockResponse = {
page: 1,
pages: 5,
@@ -93,7 +126,7 @@ describe('dashboardLibraryApi', () => {
const result = await fetchCommunityDashboards(defaultFetchParams);
expect(mockGet).toHaveBeenCalledWith(
'/api/gnet/dashboards?orderBy=downloads&direction=desc&page=1&pageSize=10&includeLogo=1&includeScreenshots=true',
'/api/gnet/dashboards?orderBy=downloads&direction=desc&page=1&pageSize=10&includeLogo=1&includeScreenshots=true&includePanelTypeSlugs=true',
undefined,
undefined,
{ showErrorAlert: false }
@@ -154,7 +187,7 @@ describe('dashboardLibraryApi', () => {
});
it('should use fallback values when page/pages are missing', async () => {
const items = [createMockGnetDashboard()];
const items = [createMockGnetDashboardWithDownloads()];
mockGet.mockResolvedValue({
items,

View File

@@ -2,7 +2,35 @@ import { getBackendSrv } from '@grafana/runtime';
import { DashboardJson } from 'app/features/manage-dashboards/types';
import { PluginDashboard } from 'app/types/plugins';
import { GnetDashboardsResponse, Link } from '../types';
import { GnetDashboard, GnetDashboardsResponse, Link } from '../types';
/**
* Panel types that are known to allow JavaScript code execution.
* These panels are filtered out due to security concerns.
*/
const UNSAFE_PANEL_TYPE_SLUGS = [
'aceiot-svg-panel',
'ae3e-plotly-panel',
'gapit-htmlgraphics-panel',
'marcusolsson-dynamictext-panel',
'volkovlabs-echarts-panel',
'volkovlabs-form-panel',
];
/**
* Minimum number of downloads required for a community dashboard to be shown as a suggestion.
*
* Rationale:
* - Dashboards with higher download counts have been vetted by a larger community
* - This acts as a heuristic for quality and trustworthiness
* - Reduces risk of malicious or poorly-maintained dashboards
*
* Trade-offs:
* - May filter out legitimate but less popular dashboards
* - Newer dashboards with good content but low download counts won't be shown
* - The threshold of 10,000 is somewhat arbitrary and may need tuning based on ecosystem growth
*/
const MIN_DOWNLOADS_FILTER = 10000;
/**
* Parameters for fetching community dashboards from Grafana.com
@@ -56,6 +84,7 @@ export async function fetchCommunityDashboards(
pageSize: params.pageSize.toString(),
includeLogo: params.includeLogo ? '1' : '0',
includeScreenshots: params.includeScreenshots ? 'true' : 'false',
includePanelTypeSlugs: 'true',
});
if (params.dataSourceSlugIn) {
@@ -69,13 +98,13 @@ export async function fetchCommunityDashboards(
showErrorAlert: false,
});
// Grafana.com API returns format: { page: number, pages: number, items: GnetDashboard[] }
// We normalize it to use "dashboards" instead of "items" for consistency
if (result && Array.isArray(result.items)) {
const dashboards = filterNonSafeDashboards(result.items);
return {
page: result.page || params.page,
pages: result.pages || 1,
items: result.items,
items: dashboards,
};
}
@@ -109,3 +138,20 @@ export async function fetchProvisionedDashboards(datasourceType: string): Promis
return [];
}
}
// We only show dashboards with at least MIN_DOWNLOADS_FILTER downloads
// They are already ordered by downloads amount
const filterNonSafeDashboards = (dashboards: GnetDashboard[]): GnetDashboard[] => {
return dashboards.filter((item: GnetDashboard) => {
const hasUnsafePanelTypes = item.panelTypeSlugs?.some((slug: string) => UNSAFE_PANEL_TYPE_SLUGS.includes(slug));
const hasLowDownloads = typeof item.downloads === 'number' && item.downloads < MIN_DOWNLOADS_FILTER;
if (hasUnsafePanelTypes || hasLowDownloads) {
console.warn(
`Community dashboard ${item.id} ${item.name} filtered out due to low downloads ${item.downloads} or panel types ${item.panelTypeSlugs?.join(', ')} that can embed JavaScript`
);
return false;
}
return true;
});
};

View File

@@ -8,6 +8,7 @@ export const EVENT_LOCATIONS = {
MODAL_PROVISIONED_TAB: 'suggested_dashboards_modal_provisioned_tab',
MODAL_COMMUNITY_TAB: 'suggested_dashboards_modal_community_tab',
BROWSE_DASHBOARDS_PAGE: 'browse_dashboards_page',
COMMUNITY_DASHBOARD_LOADED: 'community_dashboard_loaded',
} as const;
export const CONTENT_KINDS = {

Some files were not shown because too many files have changed in this diff Show More