Compare commits

...

11 Commits

Author SHA1 Message Date
Gareth Dawson
045b33c040 fix tests 2026-01-07 19:57:48 +09:00
Gareth Dawson
32023fa243 close response body 2026-01-06 23:18:30 +09:00
Gareth Dawson
6a0f6eee30 OpenTSDB: Add error source 2026-01-06 21:38:36 +09:00
Akshat Sinha
c05e1bd43a Auth: Cleanup duplicate isGroupMember methods from OAuth connectors (#115786)
Remove duplicate isGroupMember methods from OAuth connectors
2026-01-05 11:35:28 +01:00
Gabriel MABILLE
93566ce4ef Chore: Unify token exchange round trippers (#115609)
* Chore: Unify token exchange rount trippers

* Remove the conditional provider for now

* Remove unecessary strategy

* test cleanup

* Lint
2026-01-05 11:23:35 +01:00
Mariell Hoversholm
76a6db818e Frontend: Remove bootstrap (#115813) 2026-01-05 11:07:23 +01:00
Will Browne
1a0bc39ec3 Plugins: Remove some pkg/infra/* dependencies from pkg/plugins (#115795)
* tackle some /pkg/infra/* packages

* run make update-workspace

* add owner for slugify dep
2026-01-05 09:42:47 +00:00
Gareth
3b3e87ff89 OpenTSDB: Migrate frontend requests to data source backend (#115221)
* OpenTSDB: Migrate metadata queries to data source backend

* OpenTSDB: Migrate annotations to the data source backend

* return errors for failed unmarshal

* remove trailing / from metadata requests

* remove console logs
2026-01-05 18:35:19 +09:00
Stephanie Hingtgen
eb2a390425 Unistore: Prevent deadlock on startup errors (#115799)
Some checks failed
Frontend performance tests / performance-tests (push) Has been cancelled
Actionlint / Lint GitHub Actions files (push) Has been cancelled
Backend Code Checks / Detect whether code changed (push) Has been cancelled
Backend Code Checks / Validate Backend Configs (push) Has been cancelled
Backend Unit Tests / Detect whether code changed (push) Has been cancelled
Backend Unit Tests / Grafana (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana (8/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (8/8) (push) Has been cancelled
Backend Unit Tests / All backend unit tests complete (push) Has been cancelled
CodeQL checks / Detect whether code changed (push) Has been cancelled
CodeQL checks / Analyze (actions) (push) Has been cancelled
CodeQL checks / Analyze (go) (push) Has been cancelled
CodeQL checks / Analyze (javascript) (push) Has been cancelled
Lint Frontend / Detect whether code changed (push) Has been cancelled
Lint Frontend / Lint (push) Has been cancelled
Lint Frontend / Typecheck (push) Has been cancelled
Lint Frontend / Verify API clients (push) Has been cancelled
Lint Frontend / Verify API clients (enterprise) (push) Has been cancelled
golangci-lint / Detect whether code changed (push) Has been cancelled
golangci-lint / go-fmt (push) Has been cancelled
golangci-lint / lint-go (push) Has been cancelled
Verify i18n / verify-i18n (push) Has been cancelled
End-to-end tests / Detect whether code changed (push) Has been cancelled
End-to-end tests / Build & Package Grafana (push) Has been cancelled
End-to-end tests / Build E2E test runner (push) Has been cancelled
End-to-end tests / push-docker-image (push) Has been cancelled
End-to-end tests / dashboards-suite (old arch) (push) Has been cancelled
End-to-end tests / panels-suite (old arch) (push) Has been cancelled
End-to-end tests / smoke-tests-suite (old arch) (push) Has been cancelled
End-to-end tests / various-suite (old arch) (push) Has been cancelled
End-to-end tests / Verify Storybook (Playwright) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (1/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (2/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (3/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (4/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (5/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (6/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (7/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (8/8) (push) Has been cancelled
End-to-end tests / run-azure-monitor-e2e (push) Has been cancelled
End-to-end tests / All Playwright tests complete (push) Has been cancelled
End-to-end tests / A11y test (push) Has been cancelled
End-to-end tests / Publish metrics (push) Has been cancelled
End-to-end tests / All E2E tests complete (push) Has been cancelled
Frontend tests / Detect whether code changed (push) Has been cancelled
Frontend tests / Unit tests (1 / 16) (push) Has been cancelled
Frontend tests / Unit tests (10 / 16) (push) Has been cancelled
Frontend tests / Unit tests (11 / 16) (push) Has been cancelled
Frontend tests / Unit tests (12 / 16) (push) Has been cancelled
Frontend tests / Unit tests (13 / 16) (push) Has been cancelled
Frontend tests / Unit tests (14 / 16) (push) Has been cancelled
Frontend tests / Unit tests (15 / 16) (push) Has been cancelled
Frontend tests / Unit tests (16 / 16) (push) Has been cancelled
Frontend tests / Unit tests (2 / 16) (push) Has been cancelled
Frontend tests / Unit tests (3 / 16) (push) Has been cancelled
Frontend tests / Unit tests (4 / 16) (push) Has been cancelled
Frontend tests / Unit tests (5 / 16) (push) Has been cancelled
Frontend tests / Unit tests (6 / 16) (push) Has been cancelled
Frontend tests / Unit tests (7 / 16) (push) Has been cancelled
Frontend tests / Unit tests (8 / 16) (push) Has been cancelled
Frontend tests / Unit tests (9 / 16) (push) Has been cancelled
Frontend tests / Decoupled plugin tests (push) Has been cancelled
Frontend tests / Packages unit tests (push) Has been cancelled
Frontend tests / All frontend unit tests complete (push) Has been cancelled
Frontend tests / Devenv frontend-service build (push) Has been cancelled
Integration Tests / Detect whether code changed (push) Has been cancelled
Integration Tests / Sqlite (1/4) (push) Has been cancelled
Integration Tests / Sqlite (2/4) (push) Has been cancelled
Integration Tests / Sqlite (3/4) (push) Has been cancelled
Integration Tests / Sqlite (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (1/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (2/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (3/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (profiled) (push) Has been cancelled
Integration Tests / MySQL (1/16) (push) Has been cancelled
Integration Tests / MySQL (10/16) (push) Has been cancelled
Integration Tests / MySQL (11/16) (push) Has been cancelled
Integration Tests / MySQL (12/16) (push) Has been cancelled
Integration Tests / MySQL (13/16) (push) Has been cancelled
Integration Tests / MySQL (14/16) (push) Has been cancelled
Integration Tests / MySQL (15/16) (push) Has been cancelled
Integration Tests / MySQL (16/16) (push) Has been cancelled
Integration Tests / MySQL (2/16) (push) Has been cancelled
Integration Tests / MySQL (3/16) (push) Has been cancelled
Integration Tests / MySQL (4/16) (push) Has been cancelled
Integration Tests / MySQL (5/16) (push) Has been cancelled
Integration Tests / MySQL (6/16) (push) Has been cancelled
Integration Tests / MySQL (7/16) (push) Has been cancelled
Integration Tests / MySQL (8/16) (push) Has been cancelled
Integration Tests / MySQL (9/16) (push) Has been cancelled
Integration Tests / Postgres (1/16) (push) Has been cancelled
Integration Tests / Postgres (10/16) (push) Has been cancelled
Integration Tests / Postgres (11/16) (push) Has been cancelled
Integration Tests / Postgres (12/16) (push) Has been cancelled
Integration Tests / Postgres (13/16) (push) Has been cancelled
Integration Tests / Postgres (14/16) (push) Has been cancelled
Integration Tests / Postgres (15/16) (push) Has been cancelled
Integration Tests / Postgres (16/16) (push) Has been cancelled
Integration Tests / Postgres (2/16) (push) Has been cancelled
Integration Tests / Postgres (3/16) (push) Has been cancelled
Integration Tests / Postgres (4/16) (push) Has been cancelled
Integration Tests / Postgres (5/16) (push) Has been cancelled
Integration Tests / Postgres (6/16) (push) Has been cancelled
Integration Tests / Postgres (7/16) (push) Has been cancelled
Integration Tests / Postgres (8/16) (push) Has been cancelled
Integration Tests / Postgres (9/16) (push) Has been cancelled
Integration Tests / All backend integration tests complete (push) Has been cancelled
Reject GitHub secrets / reject-gh-secrets (push) Has been cancelled
Build Release Packages / setup (push) Has been cancelled
Build Release Packages / Dispatch grafana-enterprise build (push) Has been cancelled
Build Release Packages / / darwin-amd64 (push) Has been cancelled
Build Release Packages / / darwin-arm64 (push) Has been cancelled
Build Release Packages / / linux-amd64 (push) Has been cancelled
Build Release Packages / / linux-armv6 (push) Has been cancelled
Build Release Packages / / linux-armv7 (push) Has been cancelled
Build Release Packages / / linux-arm64 (push) Has been cancelled
Build Release Packages / / linux-s390x (push) Has been cancelled
Build Release Packages / / windows-amd64 (push) Has been cancelled
Build Release Packages / / windows-arm64 (push) Has been cancelled
Build Release Packages / Upload artifacts (push) Has been cancelled
Build Release Packages / publish-dockerhub (push) Has been cancelled
Build Release Packages / Dispatch publish NPM canaries (push) Has been cancelled
Build Release Packages / notify-pr (push) Has been cancelled
Run dashboard schema v2 e2e / dashboard-schema-v2-e2e (push) Has been cancelled
Shellcheck / Shellcheck scripts (push) Has been cancelled
Run Storybook a11y tests / Detect whether code changed (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (light theme) (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (dark theme) (push) Has been cancelled
Swagger generated code / Detect whether code changed (push) Has been cancelled
Swagger generated code / Verify committed API specs match (push) Has been cancelled
Dispatch sync to mirror / dispatch-job (push) Has been cancelled
trigger-dashboard-search-e2e / trigger-search-e2e (push) Has been cancelled
2026-01-05 00:51:23 -07:00
Kristina Demeshchik
967ba3acaf Dashboard: Fix dashboardUID in conversion logs to use actual dashboard UID (#115797)
Some checks failed
Frontend performance tests / performance-tests (push) Has been cancelled
Relyance Compliance Inspection / relyance-compliance-inspector (push) Has been cancelled
Crowdin Download Action / download-sources-from-crowdin (push) Has been cancelled
Close stale issues and PRs / stale (push) Has been cancelled
Actionlint / Lint GitHub Actions files (push) Has been cancelled
Backend Code Checks / Detect whether code changed (push) Has been cancelled
Backend Unit Tests / Detect whether code changed (push) Has been cancelled
CodeQL checks / Detect whether code changed (push) Has been cancelled
Lint Frontend / Detect whether code changed (push) Has been cancelled
Lint Frontend / Verify API clients (push) Has been cancelled
Lint Frontend / Verify API clients (enterprise) (push) Has been cancelled
Verify i18n / verify-i18n (push) Has been cancelled
Documentation / Build & Verify Docs (push) Has been cancelled
End-to-end tests / Detect whether code changed (push) Has been cancelled
Frontend tests / Detect whether code changed (push) Has been cancelled
Integration Tests / Detect whether code changed (push) Has been cancelled
publish-technical-documentation-next / sync (push) Has been cancelled
Reject GitHub secrets / reject-gh-secrets (push) Has been cancelled
Build Release Packages / setup (push) Has been cancelled
Run dashboard schema v2 e2e / dashboard-schema-v2-e2e (push) Has been cancelled
Shellcheck / Shellcheck scripts (push) Has been cancelled
Run Storybook a11y tests / Detect whether code changed (push) Has been cancelled
Swagger generated code / Detect whether code changed (push) Has been cancelled
Dispatch sync to mirror / dispatch-job (push) Has been cancelled
Update Schema Types / bundle-schema-types (push) Has been cancelled
Backend Code Checks / Validate Backend Configs (push) Has been cancelled
Backend Unit Tests / Grafana (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana (8/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (8/8) (push) Has been cancelled
Backend Unit Tests / All backend unit tests complete (push) Has been cancelled
CodeQL checks / Analyze (actions) (push) Has been cancelled
CodeQL checks / Analyze (go) (push) Has been cancelled
CodeQL checks / Analyze (javascript) (push) Has been cancelled
Lint Frontend / Lint (push) Has been cancelled
Lint Frontend / Typecheck (push) Has been cancelled
End-to-end tests / Build & Package Grafana (push) Has been cancelled
End-to-end tests / Build E2E test runner (push) Has been cancelled
End-to-end tests / push-docker-image (push) Has been cancelled
End-to-end tests / dashboards-suite (old arch) (push) Has been cancelled
End-to-end tests / panels-suite (old arch) (push) Has been cancelled
End-to-end tests / smoke-tests-suite (old arch) (push) Has been cancelled
End-to-end tests / various-suite (old arch) (push) Has been cancelled
End-to-end tests / Verify Storybook (Playwright) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (1/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (2/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (3/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (4/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (5/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (6/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (7/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (8/8) (push) Has been cancelled
End-to-end tests / run-azure-monitor-e2e (push) Has been cancelled
End-to-end tests / All Playwright tests complete (push) Has been cancelled
End-to-end tests / A11y test (push) Has been cancelled
End-to-end tests / Publish metrics (push) Has been cancelled
End-to-end tests / All E2E tests complete (push) Has been cancelled
Frontend tests / Unit tests (1 / 16) (push) Has been cancelled
Frontend tests / Unit tests (10 / 16) (push) Has been cancelled
Frontend tests / Unit tests (11 / 16) (push) Has been cancelled
Frontend tests / Unit tests (12 / 16) (push) Has been cancelled
Frontend tests / Unit tests (13 / 16) (push) Has been cancelled
Frontend tests / Unit tests (14 / 16) (push) Has been cancelled
Frontend tests / Unit tests (15 / 16) (push) Has been cancelled
Frontend tests / Unit tests (16 / 16) (push) Has been cancelled
Frontend tests / Unit tests (2 / 16) (push) Has been cancelled
Frontend tests / Unit tests (3 / 16) (push) Has been cancelled
Frontend tests / Unit tests (4 / 16) (push) Has been cancelled
Frontend tests / Unit tests (5 / 16) (push) Has been cancelled
Frontend tests / Unit tests (6 / 16) (push) Has been cancelled
Frontend tests / Unit tests (7 / 16) (push) Has been cancelled
Frontend tests / Unit tests (8 / 16) (push) Has been cancelled
Frontend tests / Unit tests (9 / 16) (push) Has been cancelled
Frontend tests / Decoupled plugin tests (push) Has been cancelled
Frontend tests / Packages unit tests (push) Has been cancelled
Frontend tests / All frontend unit tests complete (push) Has been cancelled
Frontend tests / Devenv frontend-service build (push) Has been cancelled
Integration Tests / Sqlite (1/4) (push) Has been cancelled
Integration Tests / Sqlite (2/4) (push) Has been cancelled
Integration Tests / Sqlite (3/4) (push) Has been cancelled
Integration Tests / Sqlite (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (1/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (2/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (3/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (profiled) (push) Has been cancelled
Integration Tests / MySQL (1/16) (push) Has been cancelled
Integration Tests / MySQL (10/16) (push) Has been cancelled
Integration Tests / MySQL (11/16) (push) Has been cancelled
Integration Tests / MySQL (12/16) (push) Has been cancelled
Integration Tests / MySQL (13/16) (push) Has been cancelled
Integration Tests / MySQL (14/16) (push) Has been cancelled
Integration Tests / MySQL (15/16) (push) Has been cancelled
Integration Tests / MySQL (16/16) (push) Has been cancelled
Integration Tests / MySQL (2/16) (push) Has been cancelled
Integration Tests / MySQL (3/16) (push) Has been cancelled
Integration Tests / MySQL (4/16) (push) Has been cancelled
Integration Tests / MySQL (5/16) (push) Has been cancelled
Integration Tests / MySQL (6/16) (push) Has been cancelled
Integration Tests / MySQL (7/16) (push) Has been cancelled
Integration Tests / MySQL (8/16) (push) Has been cancelled
Integration Tests / MySQL (9/16) (push) Has been cancelled
Integration Tests / Postgres (1/16) (push) Has been cancelled
Integration Tests / Postgres (10/16) (push) Has been cancelled
Integration Tests / Postgres (11/16) (push) Has been cancelled
Integration Tests / Postgres (12/16) (push) Has been cancelled
Integration Tests / Postgres (13/16) (push) Has been cancelled
Integration Tests / Postgres (14/16) (push) Has been cancelled
Integration Tests / Postgres (15/16) (push) Has been cancelled
Integration Tests / Postgres (16/16) (push) Has been cancelled
Integration Tests / Postgres (2/16) (push) Has been cancelled
Integration Tests / Postgres (3/16) (push) Has been cancelled
Integration Tests / Postgres (4/16) (push) Has been cancelled
Integration Tests / Postgres (5/16) (push) Has been cancelled
Integration Tests / Postgres (6/16) (push) Has been cancelled
Integration Tests / Postgres (7/16) (push) Has been cancelled
Integration Tests / Postgres (8/16) (push) Has been cancelled
Integration Tests / Postgres (9/16) (push) Has been cancelled
Integration Tests / All backend integration tests complete (push) Has been cancelled
Build Release Packages / Dispatch grafana-enterprise build (push) Has been cancelled
Build Release Packages / / darwin-amd64 (push) Has been cancelled
Build Release Packages / / darwin-arm64 (push) Has been cancelled
Build Release Packages / / linux-amd64 (push) Has been cancelled
Build Release Packages / / linux-armv6 (push) Has been cancelled
Build Release Packages / / linux-armv7 (push) Has been cancelled
Build Release Packages / / linux-arm64 (push) Has been cancelled
Build Release Packages / / linux-s390x (push) Has been cancelled
Build Release Packages / / windows-amd64 (push) Has been cancelled
Build Release Packages / / windows-arm64 (push) Has been cancelled
Build Release Packages / Upload artifacts (push) Has been cancelled
Build Release Packages / publish-dockerhub (push) Has been cancelled
Build Release Packages / Dispatch publish NPM canaries (push) Has been cancelled
Build Release Packages / notify-pr (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (light theme) (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (dark theme) (push) Has been cancelled
Swagger generated code / Verify committed API specs match (push) Has been cancelled
/ Alerting Swagger spec generation cron job (push) Has been cancelled
Update `make docs` procedure / main (push) Has been cancelled
Clean up orphaned branches / cleanup-branches (push) Has been cancelled
udpate loggers
2026-01-02 13:12:04 -05:00
Will Browne
105b407629 Plugins: Sync validator plugin.json schema copy edits back to source of truth (#115790)
sync validator copy edits back to source of truth
2026-01-02 15:52:10 +00:00
40 changed files with 1103 additions and 1698 deletions

View File

@@ -54,6 +54,7 @@ require (
github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.5.0 // indirect
github.com/BurntSushi/toml v1.5.0 // indirect
github.com/Machiel/slugify v1.0.1 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver v1.5.0 // indirect
github.com/Masterminds/sprig/v3 v3.3.0 // indirect

View File

@@ -115,6 +115,8 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapp
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
github.com/IBM/pgxpoolprometheus v1.1.2 h1:sHJwxoL5Lw4R79Zt+H4Uj1zZ4iqXJLdk7XDE7TPs97U=
github.com/IBM/pgxpoolprometheus v1.1.2/go.mod h1:+vWzISN6S9ssgurhUNmm6AlXL9XLah3TdWJktquKTR8=
github.com/Machiel/slugify v1.0.1 h1:EfWSlRWstMadsgzmiV7d0yVd2IFlagWH68Q+DcYCm4E=
github.com/Machiel/slugify v1.0.1/go.mod h1:fTFGn5uWEynW4CUMG7sWkYXOf1UgDxyTM3DbR6Qfg3k=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=

View File

@@ -85,20 +85,20 @@ func withConversionMetrics(sourceVersionAPI, targetVersionAPI string, conversion
// Only track schema versions for v0/v1 dashboards (v2+ info is redundant with API version)
switch source := a.(type) {
case *dashv0.Dashboard:
dashboardUID = string(source.UID)
dashboardUID = source.Name
if source.Spec.Object != nil {
sourceSchemaVersion = schemaversion.GetSchemaVersion(source.Spec.Object)
}
case *dashv1.Dashboard:
dashboardUID = string(source.UID)
dashboardUID = source.Name
if source.Spec.Object != nil {
sourceSchemaVersion = schemaversion.GetSchemaVersion(source.Spec.Object)
}
case *dashv2alpha1.Dashboard:
dashboardUID = string(source.UID)
dashboardUID = source.Name
// Don't track schema version for v2+ (redundant with API version)
case *dashv2beta1.Dashboard:
dashboardUID = string(source.UID)
dashboardUID = source.Name
// Don't track schema version for v2+ (redundant with API version)
}

View File

@@ -89,6 +89,7 @@ require (
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
github.com/HdrHistogram/hdrhistogram-go v1.1.2 // indirect
github.com/IBM/pgxpoolprometheus v1.1.2 // indirect
github.com/Machiel/slugify v1.0.1 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver v1.5.0 // indirect
github.com/Masterminds/semver/v3 v3.4.0 // indirect

View File

@@ -167,6 +167,8 @@ github.com/HdrHistogram/hdrhistogram-go v1.1.2 h1:5IcZpTvzydCQeHzK4Ef/D5rrSqwxob
github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo=
github.com/IBM/pgxpoolprometheus v1.1.2 h1:sHJwxoL5Lw4R79Zt+H4Uj1zZ4iqXJLdk7XDE7TPs97U=
github.com/IBM/pgxpoolprometheus v1.1.2/go.mod h1:+vWzISN6S9ssgurhUNmm6AlXL9XLah3TdWJktquKTR8=
github.com/Machiel/slugify v1.0.1 h1:EfWSlRWstMadsgzmiV7d0yVd2IFlagWH68Q+DcYCm4E=
github.com/Machiel/slugify v1.0.1/go.mod h1:fTFGn5uWEynW4CUMG7sWkYXOf1UgDxyTM3DbR6Qfg3k=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=

View File

@@ -23,6 +23,7 @@ require (
require (
cel.dev/expr v0.25.1 // indirect
github.com/Machiel/slugify v1.0.1 // indirect
github.com/NYTimes/gziphandler v1.1.1 // indirect
github.com/ProtonMail/go-crypto v1.1.6 // indirect
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
@@ -191,7 +192,6 @@ require (
go.opentelemetry.io/contrib/propagators/jaeger v1.38.0 // indirect
go.opentelemetry.io/contrib/samplers/jaegerremote v0.32.0 // indirect
go.opentelemetry.io/otel v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect

View File

@@ -7,6 +7,8 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/Machiel/slugify v1.0.1 h1:EfWSlRWstMadsgzmiV7d0yVd2IFlagWH68Q+DcYCm4E=
github.com/Machiel/slugify v1.0.1/go.mod h1:fTFGn5uWEynW4CUMG7sWkYXOf1UgDxyTM3DbR6Qfg3k=
github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
@@ -541,8 +543,6 @@ go.opentelemetry.io/contrib/samplers/jaegerremote v0.32.0/go.mod h1:B9Oka5QVD0bn
go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo=
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4=
go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 h1:f0cb2XPmrqn4XMy9PNliTgRKJgS5WcL/u0/WRYGz4t0=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0/go.mod h1:vnakAaFckOMiMtOIhFI2MNH4FYrZzXCYxmb1LlhoGz8=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 h1:in9O8ESIOlwJAEGTkkf34DesGRAc/Pn8qJ7k3r/42LM=

View File

@@ -369,7 +369,7 @@
"description": "For data source plugins. Proxy routes used for plugin authentication and adding headers to HTTP requests made by the plugin. For more information, refer to [Authentication for data source plugins](https://grafana.com/developers/plugin-tools/how-to-guides/data-source-plugins/add-authentication-for-data-source-plugins).",
"items": {
"type": "object",
"description": "",
"description": "For data source plugins. Proxy routes used for plugin authentication and adding headers to HTTP requests made by the plugin. For more information, refer to [Authentication for data source plugins](https://grafana.com/developers/plugin-tools/how-to-guides/data-source-plugins/add-authentication-for-data-source-plugins).",
"additionalProperties": false,
"properties": {
"path": {

2
go.mod
View File

@@ -660,6 +660,8 @@ require (
require github.com/grafana/tempo v1.5.1-0.20250529124718-87c2dc380cec // @grafana/observability-traces-and-profiling
require github.com/Machiel/slugify v1.0.1 // @grafana/plugins-platform-backend
require (
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect
github.com/IBM/pgxpoolprometheus v1.1.2 // indirect

2
go.sum
View File

@@ -738,6 +738,8 @@ github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXY
github.com/IBM/pgxpoolprometheus v1.1.2 h1:sHJwxoL5Lw4R79Zt+H4Uj1zZ4iqXJLdk7XDE7TPs97U=
github.com/IBM/pgxpoolprometheus v1.1.2/go.mod h1:+vWzISN6S9ssgurhUNmm6AlXL9XLah3TdWJktquKTR8=
github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk=
github.com/Machiel/slugify v1.0.1 h1:EfWSlRWstMadsgzmiV7d0yVd2IFlagWH68Q+DcYCm4E=
github.com/Machiel/slugify v1.0.1/go.mod h1:fTFGn5uWEynW4CUMG7sWkYXOf1UgDxyTM3DbR6Qfg3k=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=

View File

@@ -0,0 +1,43 @@
package clientauth
import (
"context"
)
// NamespaceProvider is a strategy for determining the namespace to use in token exchange requests.
type NamespaceProvider interface {
GetNamespace(ctx context.Context) string
}
// AudienceProvider is a strategy for determining the audiences to use in token exchange requests.
type AudienceProvider interface {
GetAudiences(ctx context.Context) []string
}
// StaticNamespaceProvider returns a fixed namespace for all requests.
type StaticNamespaceProvider struct {
namespace string
}
// NewStaticNamespaceProvider creates a namespace provider that always returns the same namespace.
func NewStaticNamespaceProvider(namespace string) *StaticNamespaceProvider {
return &StaticNamespaceProvider{namespace: namespace}
}
func (p *StaticNamespaceProvider) GetNamespace(ctx context.Context) string {
return p.namespace
}
// StaticAudienceProvider returns a fixed set of audiences for all requests.
type StaticAudienceProvider struct {
audiences []string
}
// NewStaticAudienceProvider creates an audience provider that always returns the same audiences.
func NewStaticAudienceProvider(audiences ...string) *StaticAudienceProvider {
return &StaticAudienceProvider{audiences: audiences}
}
func (p *StaticAudienceProvider) GetAudiences(ctx context.Context) []string {
return p.audiences
}

View File

@@ -0,0 +1,78 @@
package clientauth
import (
"context"
"testing"
"github.com/stretchr/testify/require"
)
func TestStaticNamespaceProvider(t *testing.T) {
tests := []struct {
name string
namespace string
expectedNamespace string
}{
{
name: "wildcard namespace",
namespace: "*",
expectedNamespace: "*",
},
{
name: "specific namespace",
namespace: "my-namespace",
expectedNamespace: "my-namespace",
},
{
name: "empty namespace",
namespace: "",
expectedNamespace: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
provider := NewStaticNamespaceProvider(tt.namespace)
result := provider.GetNamespace(context.Background())
require.Equal(t, tt.expectedNamespace, result)
})
}
}
func TestStaticAudienceProvider(t *testing.T) {
tests := []struct {
name string
audiences []string
expectedAudiences []string
}{
{
name: "single audience",
audiences: []string{"folder.grafana.app"},
expectedAudiences: []string{"folder.grafana.app"},
},
{
name: "multiple audiences",
audiences: []string{"audience1", "audience2", "audience3"},
expectedAudiences: []string{"audience1", "audience2", "audience3"},
},
{
name: "empty audiences",
audiences: []string{},
expectedAudiences: []string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
provider := NewStaticAudienceProvider(tt.audiences...)
result := provider.GetAudiences(context.Background())
require.Equal(t, tt.expectedAudiences, result)
})
}
}
func TestProviderInterfaces(t *testing.T) {
// Verify that all providers implement their interfaces
var _ NamespaceProvider = (*StaticNamespaceProvider)(nil)
var _ AudienceProvider = (*StaticAudienceProvider)(nil)
}

View File

@@ -0,0 +1,90 @@
package clientauth
import (
"fmt"
"net/http"
authnlib "github.com/grafana/authlib/authn"
utilnet "k8s.io/apimachinery/pkg/util/net"
"k8s.io/client-go/transport"
)
// tokenExchangeRoundTripper wraps an http.RoundTripper and injects an exchanged
// access token into outgoing requests via the X-Access-Token header.
type tokenExchangeRoundTripper struct {
exchanger authnlib.TokenExchanger
transport http.RoundTripper
namespaceProvider NamespaceProvider
audienceProvider AudienceProvider
}
var _ http.RoundTripper = (*tokenExchangeRoundTripper)(nil)
// newTokenExchangeRoundTripperWithStrategies creates a new RoundTripper with custom
// namespace and audience strategies, allowing for flexible configuration.
func newTokenExchangeRoundTripperWithStrategies(
exchanger authnlib.TokenExchanger,
transport http.RoundTripper,
namespaceProvider NamespaceProvider,
audienceProvider AudienceProvider,
) *tokenExchangeRoundTripper {
return &tokenExchangeRoundTripper{
exchanger: exchanger,
transport: transport,
namespaceProvider: namespaceProvider,
audienceProvider: audienceProvider,
}
}
// RoundTrip implements http.RoundTripper by exchanging a token and setting it
// in the X-Access-Token header before forwarding the request.
func (t *tokenExchangeRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
ctx := req.Context()
tokenResponse, err := t.exchanger.Exchange(ctx, authnlib.TokenExchangeRequest{
Audiences: t.audienceProvider.GetAudiences(ctx),
Namespace: t.namespaceProvider.GetNamespace(ctx),
})
if err != nil {
return nil, fmt.Errorf("failed to exchange token: %w", err)
}
// Clone the request as RoundTrippers are not expected to mutate the passed request
req = utilnet.CloneRequest(req)
req.Header.Set("X-Access-Token", "Bearer "+tokenResponse.Token)
return t.transport.RoundTrip(req)
}
// NewStaticTokenExchangeTransportWrapper creates a transport.WrapperFunc that wraps
// an http.RoundTripper with token exchange authentication for use with k8s
// rest.Config.WrapTransport.
func NewStaticTokenExchangeTransportWrapper(
exchanger authnlib.TokenExchanger,
audience string,
namespace string,
) transport.WrapperFunc {
return func(rt http.RoundTripper) http.RoundTripper {
return newTokenExchangeRoundTripperWithStrategies(exchanger, rt, NewStaticNamespaceProvider(namespace), NewStaticAudienceProvider(audience))
}
}
// NewTokenExchangeTransportWrapperWithStrategies creates a transport.WrapperFunc with custom strategies.
func NewTokenExchangeTransportWrapper(
exchanger authnlib.TokenExchanger,
namespaceProvider NamespaceProvider,
audienceProvider AudienceProvider,
) transport.WrapperFunc {
return func(rt http.RoundTripper) http.RoundTripper {
return newTokenExchangeRoundTripperWithStrategies(
exchanger,
rt,
namespaceProvider,
audienceProvider,
)
}
}
// WildcardNamespace is a convenience constant for the wildcard namespace.
const WildcardNamespace = "*"

View File

@@ -0,0 +1,259 @@
package clientauth
import (
"context"
"errors"
"net/http"
"net/http/httptest"
"testing"
"github.com/grafana/authlib/authn"
"github.com/stretchr/testify/require"
)
type fakeExchanger struct {
resp *authn.TokenExchangeResponse
err error
gotReq *authn.TokenExchangeRequest
}
func (f *fakeExchanger) Exchange(_ context.Context, req authn.TokenExchangeRequest) (*authn.TokenExchangeResponse, error) {
f.gotReq = &req
return f.resp, f.err
}
// roundTripperFunc allows building a stub transport inline
type roundTripperFunc func(*http.Request) (*http.Response, error)
func (f roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { return f(r) }
func TestTokenExchangeRoundTripper_SetsAccessTokenHeader(t *testing.T) {
exchanger := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "test-token-123"}}
var capturedHeader string
transport := roundTripperFunc(func(r *http.Request) (*http.Response, error) {
capturedHeader = r.Header.Get("X-Access-Token")
rr := httptest.NewRecorder()
rr.WriteHeader(http.StatusOK)
return rr.Result(), nil
})
rt := newTokenExchangeRoundTripperWithStrategies(exchanger, transport, NewStaticNamespaceProvider("test-namespace"), NewStaticAudienceProvider("test-audience"))
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
resp, err := rt.RoundTrip(req)
require.NoError(t, err)
if resp != nil {
_ = resp.Body.Close()
}
// Clean up response
_ = resp.Body.Close()
require.Equal(t, "Bearer test-token-123", capturedHeader)
}
func TestTokenExchangeRoundTripper_PropagatesExchangeError(t *testing.T) {
expectedErr := errors.New("token exchange failed")
exchanger := &fakeExchanger{err: expectedErr}
transport := roundTripperFunc(func(_ *http.Request) (*http.Response, error) {
t.Fatal("transport should not be called on exchange error")
return nil, nil
})
rt := newTokenExchangeRoundTripperWithStrategies(exchanger, transport, NewStaticNamespaceProvider("test-namespace"), NewStaticAudienceProvider("test-audience"))
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
resp, err := rt.RoundTrip(req)
require.Error(t, err)
if resp != nil {
_ = resp.Body.Close()
}
require.ErrorContains(t, err, "failed to exchange token")
require.ErrorIs(t, err, expectedErr)
}
func TestTokenExchangeRoundTripper_SendsCorrectAudienceAndNamespace(t *testing.T) {
tests := []struct {
name string
audience string
namespace string
expectedAudiences []string
expectedNamespace string
}{
{
name: "single audience with wildcard namespace",
audience: "folder.grafana.app",
namespace: "*",
expectedAudiences: []string{"folder.grafana.app"},
expectedNamespace: "*",
},
{
name: "different audience with wildcard namespace",
audience: "dashboard.grafana.app",
namespace: "*",
expectedAudiences: []string{"dashboard.grafana.app"},
expectedNamespace: "*",
},
{
name: "audience with specific namespace",
audience: "test-audience",
namespace: "test-namespace",
expectedAudiences: []string{"test-audience"},
expectedNamespace: "test-namespace",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
exchanger := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "token"}}
transport := roundTripperFunc(func(_ *http.Request) (*http.Response, error) {
rr := httptest.NewRecorder()
rr.WriteHeader(http.StatusOK)
return rr.Result(), nil
})
rt := newTokenExchangeRoundTripperWithStrategies(exchanger, transport, NewStaticNamespaceProvider(tt.namespace), NewStaticAudienceProvider(tt.audience))
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
resp, err := rt.RoundTrip(req)
require.NoError(t, err)
if resp != nil {
_ = resp.Body.Close()
}
require.NotNil(t, exchanger.gotReq)
require.Equal(t, tt.expectedAudiences, exchanger.gotReq.Audiences)
require.Equal(t, tt.expectedNamespace, exchanger.gotReq.Namespace)
})
}
}
func TestTokenExchangeRoundTripper_DoesNotMutateOriginalRequest(t *testing.T) {
exchanger := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "token"}}
transport := roundTripperFunc(func(r *http.Request) (*http.Response, error) {
rr := httptest.NewRecorder()
rr.WriteHeader(http.StatusOK)
return rr.Result(), nil
})
rt := newTokenExchangeRoundTripperWithStrategies(exchanger, transport, NewStaticNamespaceProvider("namespace"), NewStaticAudienceProvider("audience"))
originalReq, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
// Ensure original request has no X-Access-Token header
originalReq.Header.Set("X-Custom-Header", "original-value")
require.Empty(t, originalReq.Header.Get("X-Access-Token"))
resp, err := rt.RoundTrip(originalReq)
require.NoError(t, err)
_ = resp.Body.Close()
// Original request should not have been mutated
require.Empty(t, originalReq.Header.Get("X-Access-Token"))
require.Equal(t, "original-value", originalReq.Header.Get("X-Custom-Header"))
}
func TestTokenExchangeRoundTripper_PropagatesTransportError(t *testing.T) {
exchanger := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "token"}}
expectedErr := errors.New("transport error")
transport := roundTripperFunc(func(_ *http.Request) (*http.Response, error) {
return nil, expectedErr
})
rt := newTokenExchangeRoundTripperWithStrategies(exchanger, transport, NewStaticNamespaceProvider("namespace"), NewStaticAudienceProvider("audience"))
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
resp, err := rt.RoundTrip(req)
require.Error(t, err)
if resp != nil {
_ = resp.Body.Close()
}
require.ErrorIs(t, err, expectedErr)
}
func TestNewTokenExchangeTransportWrapper(t *testing.T) {
exchanger := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "wrapped-token"}}
var capturedHeader string
baseTransport := roundTripperFunc(func(r *http.Request) (*http.Response, error) {
capturedHeader = r.Header.Get("X-Access-Token")
rr := httptest.NewRecorder()
rr.WriteHeader(http.StatusOK)
return rr.Result(), nil
})
wrapper := NewStaticTokenExchangeTransportWrapper(exchanger, "test-audience", "test-namespace")
wrappedTransport := wrapper(baseTransport)
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
resp, err := wrappedTransport.RoundTrip(req)
require.NoError(t, err)
_ = resp.Body.Close()
require.Equal(t, "Bearer wrapped-token", capturedHeader)
require.NotNil(t, exchanger.gotReq)
require.Equal(t, []string{"test-audience"}, exchanger.gotReq.Audiences)
require.Equal(t, "test-namespace", exchanger.gotReq.Namespace)
}
func TestTokenExchangeRoundTripperWithStrategies(t *testing.T) {
tests := []struct {
name string
namespaceProvider NamespaceProvider
audienceProvider AudienceProvider
expectedNamespace string
expectedAudiences []string
expectedHeader string
}{
{
name: "static providers with bearer prefix",
namespaceProvider: NewStaticNamespaceProvider("*"),
audienceProvider: NewStaticAudienceProvider("folder.grafana.app"),
expectedNamespace: "*",
expectedAudiences: []string{"folder.grafana.app"},
expectedHeader: "Bearer test-token",
},
{
name: "multiple audiences",
namespaceProvider: NewStaticNamespaceProvider("*"),
audienceProvider: NewStaticAudienceProvider("audience1", "audience2"),
expectedNamespace: "*",
expectedAudiences: []string{"audience1", "audience2"},
expectedHeader: "Bearer test-token",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
exchanger := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "test-token"}}
var capturedHeader string
transport := roundTripperFunc(func(r *http.Request) (*http.Response, error) {
capturedHeader = r.Header.Get("X-Access-Token")
rr := httptest.NewRecorder()
rr.WriteHeader(http.StatusOK)
return rr.Result(), nil
})
rt := newTokenExchangeRoundTripperWithStrategies(
exchanger,
transport,
tt.namespaceProvider,
tt.audienceProvider,
)
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example.org", nil)
resp, err := rt.RoundTrip(req)
require.NoError(t, err)
if resp != nil {
_ = resp.Body.Close()
}
require.Equal(t, tt.expectedHeader, capturedHeader)
require.NotNil(t, exchanger.gotReq)
require.Equal(t, tt.expectedAudiences, exchanger.gotReq.Audiences)
require.Equal(t, tt.expectedNamespace, exchanger.gotReq.Namespace)
})
}
}

View File

@@ -162,23 +162,6 @@ func (s *SocialGenericOAuth) Reload(ctx context.Context, settings ssoModels.SSOS
return nil
}
// TODOD: remove this in the next PR and use the isGroupMember from social.go
func (s *SocialGenericOAuth) isGroupMember(groups []string) bool {
if len(s.info.AllowedGroups) == 0 {
return true
}
for _, allowedGroup := range s.info.AllowedGroups {
for _, group := range groups {
if group == allowedGroup {
return true
}
}
}
return false
}
func (s *SocialGenericOAuth) isTeamMember(ctx context.Context, client *http.Client) bool {
if len(s.teamIds) == 0 {
return true

View File

@@ -205,20 +205,3 @@ func (s *SocialOkta) getGroups(data *OktaUserInfoJson) []string {
}
return groups
}
// TODO: remove this in a separate PR and use the isGroupMember from the social.go
func (s *SocialOkta) isGroupMember(groups []string) bool {
if len(s.info.AllowedGroups) == 0 {
return true
}
for _, allowedGroup := range s.info.AllowedGroups {
for _, group := range groups {
if group == allowedGroup {
return true
}
}
}
return false
}

View File

@@ -6,24 +6,22 @@ import (
"errors"
"fmt"
"log/slog"
"net/http"
"os"
"os/signal"
"syscall"
"github.com/prometheus/client_golang/prometheus"
"k8s.io/client-go/rest"
"k8s.io/client-go/transport"
"github.com/grafana/grafana-app-sdk/logging"
"github.com/grafana/grafana-app-sdk/operator"
folder "github.com/grafana/grafana/apps/folder/pkg/apis/folder/v1beta1"
"github.com/grafana/grafana/apps/iam/pkg/app"
"github.com/grafana/grafana/pkg/clientauth"
"github.com/grafana/grafana/pkg/server"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/authlib/authn"
utilnet "k8s.io/apimachinery/pkg/util/net"
)
func RunIAMFolderReconciler(deps server.OperatorDependencies) error {
@@ -151,12 +149,11 @@ func buildKubeConfigFromFolderAppURL(
return &rest.Config{
APIPath: "/apis",
Host: folderAppURL,
WrapTransport: transport.WrapperFunc(func(rt http.RoundTripper) http.RoundTripper {
return &authRoundTripper{
tokenExchangeClient: tokenExchangeClient,
transport: rt,
}
}),
WrapTransport: clientauth.NewStaticTokenExchangeTransportWrapper(
tokenExchangeClient,
folder.GROUP,
clientauth.WildcardNamespace,
),
TLSClientConfig: tlsConfig,
}, nil
}
@@ -189,23 +186,3 @@ func buildTLSConfig(insecure bool, certFile, keyFile, caFile string) (rest.TLSCl
return tlsConfig, nil
}
type authRoundTripper struct {
tokenExchangeClient *authn.TokenExchangeClient
transport http.RoundTripper
}
func (t *authRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
tokenResponse, err := t.tokenExchangeClient.Exchange(req.Context(), authn.TokenExchangeRequest{
Audiences: []string{folder.GROUP},
Namespace: "*",
})
if err != nil {
return nil, fmt.Errorf("failed to exchange token: %w", err)
}
// clone the request as RTs are not expected to mutate the passed request
req = utilnet.CloneRequest(req)
req.Header.Set("X-Access-Token", "Bearer "+tokenResponse.Token)
return t.transport.RoundTrip(req)
}

View File

@@ -10,8 +10,8 @@ import (
sdktracing "github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/log"
@@ -94,7 +94,7 @@ func NewRegistry(store map[string]backendplugin.PluginFactoryFunc) *Registry {
}
}
func ProvideCoreRegistry(tracer tracing.Tracer, am *azuremonitor.Service, cw *cloudwatch.Service, cm *cloudmonitoring.Service,
func ProvideCoreRegistry(tracer trace.Tracer, am *azuremonitor.Service, cw *cloudwatch.Service, cm *cloudmonitoring.Service,
es *elasticsearch.Service, grap *graphite.Service, idb *influxdb.Service, lk *loki.Service, otsdb *opentsdb.Service,
pr *prometheus.Service, t *tempo.Service, td *testdatasource.Service, pg *postgres.Service, my *mysql.Service,
ms *mssql.Service, graf *grafanads.Service, pyroscope *pyroscope.Service, parca *parca.Service, zipkin *zipkin.Service, jaeger *jaeger.Service) *Registry {
@@ -204,7 +204,7 @@ var ErrCorePluginNotFound = errors.New("core plugin not found")
// NewPlugin factory for creating and initializing a single core plugin.
// Note: cfg only needed for mssql connection pooling defaults.
func NewPlugin(pluginID string, cfg *setting.Cfg, httpClientProvider *httpclient.Provider, tracer tracing.Tracer, features featuremgmt.FeatureToggles) (*plugins.Plugin, error) {
func NewPlugin(pluginID string, cfg *setting.Cfg, httpClientProvider *httpclient.Provider, tracer trace.Tracer, features featuremgmt.FeatureToggles) (*plugins.Plugin, error) {
jsonData := plugins.JSONData{
ID: pluginID,
AliasIDs: []string{},

View File

@@ -4,8 +4,8 @@ import (
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins/log"
"github.com/grafana/grafana/pkg/plugins/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
@@ -46,7 +46,7 @@ func TestNewPlugin(t *testing.T) {
tc.ExpectedID = tc.ID
}
p, err := NewPlugin(tc.ID, setting.NewCfg(), httpclient.NewProvider(), tracing.InitializeTracerForTest(), featuremgmt.WithFeatures())
p, err := NewPlugin(tc.ID, setting.NewCfg(), httpclient.NewProvider(), tracing.NoopTracer(), featuremgmt.WithFeatures())
if tc.ExpectedNotFoundErr {
require.ErrorIs(t, err, ErrCorePluginNotFound)
require.Nil(t, p)

View File

@@ -9,7 +9,6 @@ import (
"github.com/hashicorp/go-plugin"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/process"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/log"
@@ -90,14 +89,6 @@ func (p *grpcPlugin) Start(_ context.Context) error {
return errors.New("no compatible plugin implementation found")
}
elevated, err := process.IsRunningWithElevatedPrivileges()
if err != nil {
p.logger.Debug("Error checking plugin process execution privilege", "error", err)
}
if elevated {
p.logger.Warn("Plugin process is running with elevated privileges. This is not recommended")
}
p.state = pluginStateStartSuccess
return nil
}

View File

@@ -6,12 +6,12 @@ import (
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"
"github.com/grafana/grafana/pkg/plugins/manager/signature"
"github.com/grafana/grafana/pkg/plugins/pluginassets"
"github.com/grafana/grafana/pkg/plugins/tracing"
"github.com/grafana/grafana/pkg/semconv"
)

View File

@@ -5,7 +5,8 @@ import (
"path"
"slices"
"github.com/grafana/grafana/pkg/infra/slugify"
"github.com/Machiel/slugify"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"

View File

@@ -7,10 +7,10 @@ import (
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"
"github.com/grafana/grafana/pkg/plugins/tracing"
)
// Discoverer is responsible for the Discovery stage of the plugin loader pipeline.

View File

@@ -6,10 +6,10 @@ import (
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"
"github.com/grafana/grafana/pkg/plugins/tracing"
"github.com/grafana/grafana/pkg/semconv"
)

View File

@@ -6,10 +6,10 @@ import (
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"
"github.com/grafana/grafana/pkg/plugins/tracing"
"github.com/grafana/grafana/pkg/semconv"
)

View File

@@ -6,10 +6,10 @@ import (
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"
"github.com/grafana/grafana/pkg/plugins/tracing"
"github.com/grafana/grafana/pkg/semconv"
)

View File

@@ -10,7 +10,6 @@ import (
"slices"
"strings"
"github.com/grafana/grafana/pkg/infra/fs"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/config"
"github.com/grafana/grafana/pkg/plugins/log"
@@ -79,15 +78,14 @@ func (s *LocalSource) Discover(_ context.Context) ([]*plugins.FoundBundle, error
pluginJSONPaths := make([]string, 0, len(s.paths))
for _, path := range s.paths {
exists, err := fs.Exists(path)
if err != nil {
if _, err := os.Stat(path); err != nil {
if os.IsNotExist(err) {
s.log.Warn("Skipping finding plugins as directory does not exist", "path", path)
continue
}
s.log.Warn("Skipping finding plugins as an error occurred", "path", path, "error", err)
continue
}
if !exists {
s.log.Warn("Skipping finding plugins as directory does not exist", "path", path)
continue
}
paths, err := s.getAbsPluginJSONPaths(path)
if err != nil {

View File

@@ -0,0 +1,35 @@
package tracing
import (
"context"
"net/http"
"go.opentelemetry.io/otel/codes"
"go.opentelemetry.io/otel/trace"
"go.opentelemetry.io/otel/trace/noop"
)
// Tracer defines the service used to create new spans.
type Tracer interface {
trace.Tracer
// Inject adds identifying information for the span to the
// headers defined in [http.Header] map (this mutates http.Header).
Inject(context.Context, http.Header, trace.Span)
}
// Error sets the status to error and record the error as an exception in the provided span.
// This is a simplified version that works directly with OpenTelemetry spans.
func Error(span trace.Span, err error) error {
if err == nil {
return nil
}
span.SetStatus(codes.Error, err.Error())
span.RecordError(err)
return err
}
// NoopTracer returns a no-op tracer that can be used when tracing is not available.
func NoopTracer() trace.Tracer {
return noop.NewTracerProvider().Tracer("")
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
"net/http"
"sync"
"github.com/grafana/authlib/authn"
@@ -15,8 +14,8 @@ import (
dashboardv1 "github.com/grafana/grafana/apps/dashboard/pkg/apis/dashboard/v1beta1"
folderv1 "github.com/grafana/grafana/apps/folder/pkg/apis/folder/v1beta1"
"github.com/grafana/grafana/apps/provisioning/pkg/auth"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/clientauth"
)
var (
@@ -73,10 +72,8 @@ func NewRemoteConfigProvider(cfg map[schema.GroupResource]DialConfig, exchangeCl
for gr, dialConfig := range cfg {
configProviders[gr] = func(ctx context.Context) (*rest.Config, error) {
return &rest.Config{
Host: dialConfig.Host,
WrapTransport: func(rt http.RoundTripper) http.RoundTripper {
return auth.NewRoundTripper(exchangeClient, rt, dialConfig.Audience)
},
Host: dialConfig.Host,
WrapTransport: clientauth.NewStaticTokenExchangeTransportWrapper(exchangeClient, dialConfig.Audience, clientauth.WildcardNamespace),
TLSClientConfig: rest.TLSClientConfig{
Insecure: dialConfig.Insecure,
CAFile: dialConfig.CAFile,

View File

@@ -390,13 +390,13 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
return nil, err
}
validate := pipeline.ProvideValidationStage(pluginManagementCfg, validation, angularinspectorService)
tracer := otelTracer()
ossDataSourceRequestURLValidator := validations.ProvideURLValidator()
httpclientProvider := httpclientprovider.New(cfg, ossDataSourceRequestURLValidator, tracingService)
azuremonitorService := azuremonitor.ProvideService(httpclientProvider)
cloudwatchService := cloudwatch.ProvideService()
cloudmonitoringService := cloudmonitoring.ProvideService(httpclientProvider)
elasticsearchService := elasticsearch.ProvideService(httpclientProvider)
tracer := otelTracer()
graphiteService := graphite.ProvideService(httpclientProvider, tracer)
influxdbService := influxdb.ProvideService(httpclientProvider)
lokiService := loki.ProvideService(httpclientProvider, tracer)
@@ -556,7 +556,7 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
parcaService := parca.ProvideService(httpclientProvider)
zipkinService := zipkin.ProvideService(httpclientProvider)
jaegerService := jaeger.ProvideService(httpclientProvider)
corepluginRegistry := coreplugin.ProvideCoreRegistry(tracingService, azuremonitorService, cloudwatchService, cloudmonitoringService, elasticsearchService, graphiteService, influxdbService, lokiService, opentsdbService, prometheusService, tempoService, testdatasourceService, postgresService, mysqlService, mssqlService, grafanadsService, pyroscopeService, parcaService, zipkinService, jaegerService)
corepluginRegistry := coreplugin.ProvideCoreRegistry(tracer, azuremonitorService, cloudwatchService, cloudmonitoringService, elasticsearchService, graphiteService, influxdbService, lokiService, opentsdbService, prometheusService, tempoService, testdatasourceService, postgresService, mysqlService, mssqlService, grafanadsService, pyroscopeService, parcaService, zipkinService, jaegerService)
providerService := provider2.ProvideService(corepluginRegistry)
processService := process.ProvideService()
retrieverService := retriever.ProvideService(sqlStore, apikeyService, kvStore, userService, orgService)
@@ -1050,13 +1050,13 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
return nil, err
}
validate := pipeline.ProvideValidationStage(pluginManagementCfg, validation, angularinspectorService)
tracer := otelTracer()
ossDataSourceRequestURLValidator := validations.ProvideURLValidator()
httpclientProvider := httpclientprovider.New(cfg, ossDataSourceRequestURLValidator, tracingService)
azuremonitorService := azuremonitor.ProvideService(httpclientProvider)
cloudwatchService := cloudwatch.ProvideService()
cloudmonitoringService := cloudmonitoring.ProvideService(httpclientProvider)
elasticsearchService := elasticsearch.ProvideService(httpclientProvider)
tracer := otelTracer()
graphiteService := graphite.ProvideService(httpclientProvider, tracer)
influxdbService := influxdb.ProvideService(httpclientProvider)
lokiService := loki.ProvideService(httpclientProvider, tracer)
@@ -1216,7 +1216,7 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
parcaService := parca.ProvideService(httpclientProvider)
zipkinService := zipkin.ProvideService(httpclientProvider)
jaegerService := jaeger.ProvideService(httpclientProvider)
corepluginRegistry := coreplugin.ProvideCoreRegistry(tracingService, azuremonitorService, cloudwatchService, cloudmonitoringService, elasticsearchService, graphiteService, influxdbService, lokiService, opentsdbService, prometheusService, tempoService, testdatasourceService, postgresService, mysqlService, mssqlService, grafanadsService, pyroscopeService, parcaService, zipkinService, jaegerService)
corepluginRegistry := coreplugin.ProvideCoreRegistry(tracer, azuremonitorService, cloudwatchService, cloudmonitoringService, elasticsearchService, graphiteService, influxdbService, lokiService, opentsdbService, prometheusService, tempoService, testdatasourceService, postgresService, mysqlService, mssqlService, grafanadsService, pyroscopeService, parcaService, zipkinService, jaegerService)
providerService := provider2.ProvideService(corepluginRegistry)
processService := process.ProvideService()
retrieverService := retriever.ProvideService(sqlStore, apikeyService, kvStore, userService, orgService)

View File

@@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
"net/http"
"time"
"github.com/fullstorydev/grpchan/inprocgrpc"
@@ -24,6 +23,7 @@ import (
authlib "github.com/grafana/authlib/types"
"github.com/grafana/dskit/middleware"
"github.com/grafana/grafana/pkg/clientauth"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
@@ -262,9 +262,11 @@ func RegisterRBACAuthZService(
folderStore = store.NewAPIFolderStore(tracer, reg, func(ctx context.Context) (*rest.Config, error) {
return &rest.Config{
Host: cfg.Folder.Host,
WrapTransport: func(rt http.RoundTripper) http.RoundTripper {
return &tokenExhangeRoundTripper{te: exchangeClient, rt: rt}
},
WrapTransport: clientauth.NewStaticTokenExchangeTransportWrapper(
exchangeClient,
"folder.grafana.app",
clientauth.WildcardNamespace,
),
TLSClientConfig: rest.TLSClientConfig{
Insecure: cfg.Folder.Insecure,
CAFile: cfg.Folder.CAFile,
@@ -291,27 +293,6 @@ func RegisterRBACAuthZService(
authzv1.RegisterAuthzServiceServer(srv, server)
}
var _ http.RoundTripper = tokenExhangeRoundTripper{}
type tokenExhangeRoundTripper struct {
te authnlib.TokenExchanger
rt http.RoundTripper
}
func (t tokenExhangeRoundTripper) RoundTrip(r *http.Request) (*http.Response, error) {
res, err := t.te.Exchange(r.Context(), authnlib.TokenExchangeRequest{
Namespace: "*",
Audiences: []string{"folder.grafana.app"},
})
if err != nil {
return nil, fmt.Errorf("create access token: %w", err)
}
r.Header.Set("X-Access-Token", "Bearer "+res.Token)
return t.rt.RoundTrip(r)
}
type NoopCache struct{}
func (lc *NoopCache) Get(ctx context.Context, key string) ([]byte, error) {

View File

@@ -115,6 +115,7 @@ func ProvideUnifiedStorageGrpcService(
cfg: cfg,
features: features,
stopCh: make(chan struct{}),
stoppedCh: make(chan error, 1),
authenticator: authn,
tracing: tracer,
db: db,

View File

@@ -84,7 +84,7 @@ func TestIntegrationOpenTSDB(t *testing.T) {
// nolint:gosec
resp, err := http.Post(u, "application/json", buf1)
require.NoError(t, err)
require.Equal(t, http.StatusInternalServerError, resp.StatusCode)
require.Equal(t, http.StatusBadRequest, resp.StatusCode)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)

View File

@@ -1,10 +1,13 @@
package opentsdb
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"path"
"sort"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
@@ -65,3 +68,386 @@ func (s *Service) HandleSuggestQuery(rw http.ResponseWriter, req *http.Request)
return
}
}
func (s *Service) HandleAggregatorsQuery(rw http.ResponseWriter, req *http.Request) {
logger := logger.FromContext(req.Context())
dsInfo, err := s.getDSInfo(req.Context(), backend.PluginConfigFromContext(req.Context()))
if err != nil {
http.Error(rw, fmt.Sprintf("failed to get datasource info: %v", err), http.StatusInternalServerError)
return
}
u, err := url.Parse(dsInfo.URL)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to parse datasource URL: %v", err), http.StatusInternalServerError)
return
}
u.Path = path.Join(u.Path, "api/aggregators")
httpReq, err := http.NewRequestWithContext(req.Context(), http.MethodGet, u.String(), nil)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to create request: %v", err), http.StatusInternalServerError)
return
}
res, err := dsInfo.HTTPClient.Do(httpReq)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to execute request: %v", err), http.StatusInternalServerError)
return
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Error("Failed to close response body", "error", err)
}
}()
responseBody, err := DecodeResponseBody(res, logger)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to decode response: %v", err), http.StatusInternalServerError)
return
}
var aggregators []string
if err := json.Unmarshal(responseBody, &aggregators); err != nil {
http.Error(rw, fmt.Sprintf("failed to unmarshal aggregators response: %v", err), http.StatusInternalServerError)
return
}
sort.Strings(aggregators)
sortedResponse, err := json.Marshal(aggregators)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to marshal response: %v", err), http.StatusInternalServerError)
return
}
for name, values := range res.Header {
if name == "Content-Encoding" || name == "Content-Length" {
continue
}
for _, value := range values {
rw.Header().Add(name, value)
}
}
rw.WriteHeader(res.StatusCode)
if _, err := rw.Write(sortedResponse); err != nil {
logger.Error("Failed to write response", "error", err)
return
}
}
func (s *Service) HandleFiltersQuery(rw http.ResponseWriter, req *http.Request) {
logger := logger.FromContext(req.Context())
dsInfo, err := s.getDSInfo(req.Context(), backend.PluginConfigFromContext(req.Context()))
if err != nil {
http.Error(rw, fmt.Sprintf("failed to get datasource info: %v", err), http.StatusInternalServerError)
return
}
u, err := url.Parse(dsInfo.URL)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to parse datasource URL: %v", err), http.StatusInternalServerError)
return
}
u.Path = path.Join(u.Path, "/api/config/filters")
httpReq, err := http.NewRequestWithContext(req.Context(), http.MethodGet, u.String(), nil)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to create request: %v", err), http.StatusInternalServerError)
return
}
res, err := dsInfo.HTTPClient.Do(httpReq)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to execute request: %v", err), http.StatusInternalServerError)
return
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Error("Failed to close response body", "error", err)
}
}()
responseBody, err := DecodeResponseBody(res, logger)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to decode response: %v", err), http.StatusInternalServerError)
return
}
var filters map[string]json.RawMessage
if err := json.Unmarshal(responseBody, &filters); err != nil {
http.Error(rw, fmt.Sprintf("failed to unmarshal filters response: %v", err), http.StatusInternalServerError)
return
}
keys := make([]string, 0, len(filters))
for key := range filters {
keys = append(keys, key)
}
sort.Strings(keys)
sortedResponse, err := json.Marshal(keys)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to marshal response: %v", err), http.StatusInternalServerError)
return
}
for name, values := range res.Header {
if name == "Content-Encoding" || name == "Content-Length" {
continue
}
for _, value := range values {
rw.Header().Add(name, value)
}
}
rw.WriteHeader(res.StatusCode)
if _, err := rw.Write(sortedResponse); err != nil {
logger.Error("Failed to write response", "error", err)
return
}
}
func (s *Service) HandleLookupQuery(rw http.ResponseWriter, req *http.Request) {
queryParams := req.URL.Query()
typeParam := queryParams.Get("type")
if typeParam == "" {
http.Error(rw, "missing 'type' parameter", http.StatusBadRequest)
return
}
switch typeParam {
case "key":
s.HandleKeyLookup(rw, req, queryParams)
case "keyvalue":
s.HandleKeyValueLookup(rw, req, queryParams)
default:
http.Error(rw, fmt.Sprintf("unsupported type: %s", typeParam), http.StatusBadRequest)
return
}
}
func (s *Service) HandleKeyLookup(rw http.ResponseWriter, req *http.Request, queryParams url.Values) {
logger := logger.FromContext(req.Context())
dsInfo, err := s.getDSInfo(req.Context(), backend.PluginConfigFromContext(req.Context()))
if err != nil {
http.Error(rw, fmt.Sprintf("failed to get datasource info: %v", err), http.StatusInternalServerError)
return
}
metric := queryParams.Get("metric")
if metric == "" {
http.Error(rw, "missing 'metric' parameter", http.StatusBadRequest)
return
}
u, err := url.Parse(dsInfo.URL)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to parse datasource URL: %v", err), http.StatusInternalServerError)
return
}
u.Path = path.Join(u.Path, "api/search/lookup")
lookupQueryParams := u.Query()
lookupQueryParams.Set("m", metric)
lookupQueryParams.Set("limit", "1000")
u.RawQuery = lookupQueryParams.Encode()
httpReq, err := http.NewRequestWithContext(req.Context(), http.MethodGet, u.String(), nil)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to create request: %v", err), http.StatusInternalServerError)
return
}
res, err := dsInfo.HTTPClient.Do(httpReq)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to execute request: %v", err), http.StatusInternalServerError)
return
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Error("Failed to close response body", "error", err)
}
}()
responseBody, err := DecodeResponseBody(res, logger)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to decode response: %v", err), http.StatusInternalServerError)
return
}
var lookupResponse struct {
Results []struct {
Tags map[string]string `json:"tags"`
} `json:"results"`
}
if err := json.Unmarshal(responseBody, &lookupResponse); err != nil {
http.Error(rw, fmt.Sprintf("failed to unmarshal lookup response: %v", err), http.StatusInternalServerError)
return
}
tagKeysMap := make(map[string]bool)
for _, result := range lookupResponse.Results {
for tagKey := range result.Tags {
tagKeysMap[tagKey] = true
}
}
tagKeys := make([]string, 0, len(tagKeysMap))
for tagKey := range tagKeysMap {
tagKeys = append(tagKeys, tagKey)
}
sort.Strings(tagKeys)
sortedResponse, err := json.Marshal(tagKeys)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to marshal response: %v", err), http.StatusInternalServerError)
return
}
for name, values := range res.Header {
if name == "Content-Encoding" || name == "Content-Length" {
continue
}
for _, value := range values {
rw.Header().Add(name, value)
}
}
rw.Header().Set("Content-Type", "application/json")
rw.WriteHeader(res.StatusCode)
if _, err := rw.Write(sortedResponse); err != nil {
logger.Error("Failed to write response", "error", err)
return
}
}
func (s *Service) HandleKeyValueLookup(rw http.ResponseWriter, req *http.Request, queryParams url.Values) {
logger := logger.FromContext(req.Context())
dsInfo, err := s.getDSInfo(req.Context(), backend.PluginConfigFromContext(req.Context()))
if err != nil {
http.Error(rw, fmt.Sprintf("failed to get datasource info: %v", err), http.StatusInternalServerError)
return
}
metric := queryParams.Get("metric")
if metric == "" {
http.Error(rw, "missing 'metric' parameter", http.StatusBadRequest)
return
}
keys := queryParams.Get("keys")
if keys == "" {
http.Error(rw, "missing 'keys' parameter", http.StatusBadRequest)
return
}
keysArray := strings.Split(keys, ",")
for i := range keysArray {
keysArray[i] = strings.TrimSpace(keysArray[i])
}
if len(keysArray) == 0 {
http.Error(rw, "keys parameter cannot be empty", http.StatusBadRequest)
return
}
key := keysArray[0]
keysQuery := key + "=*"
if len(keysArray) > 1 {
keysQuery += "," + strings.Join(keysArray[1:], ",")
}
m := metric + "{" + keysQuery + "}"
u, err := url.Parse(dsInfo.URL)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to parse datasource URL: %v", err), http.StatusInternalServerError)
return
}
u.Path = path.Join(u.Path, "api/search/lookup")
lookupQueryParams := u.Query()
lookupQueryParams.Set("m", m)
lookupQueryParams.Set("limit", fmt.Sprintf("%d", dsInfo.LookupLimit))
u.RawQuery = lookupQueryParams.Encode()
httpReq, err := http.NewRequestWithContext(req.Context(), http.MethodGet, u.String(), nil)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to create request: %v", err), http.StatusInternalServerError)
return
}
res, err := dsInfo.HTTPClient.Do(httpReq)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to execute request: %v", err), http.StatusInternalServerError)
return
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Error("Failed to close response body", "error", err)
}
}()
responseBody, err := DecodeResponseBody(res, logger)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to decode response: %v", err), http.StatusInternalServerError)
return
}
var lookupResponse struct {
Results []struct {
Tags map[string]string `json:"tags"`
} `json:"results"`
}
if err := json.Unmarshal(responseBody, &lookupResponse); err != nil {
http.Error(rw, fmt.Sprintf("failed to unmarshal lookup response: %v", err), http.StatusInternalServerError)
return
}
tagValuesMap := make(map[string]bool)
for _, result := range lookupResponse.Results {
if tagValue, exists := result.Tags[key]; exists {
tagValuesMap[tagValue] = true
}
}
tagValues := make([]string, 0, len(tagValuesMap))
for tagValue := range tagValuesMap {
tagValues = append(tagValues, tagValue)
}
sort.Strings(tagValues)
sortedResponse, err := json.Marshal(tagValues)
if err != nil {
http.Error(rw, fmt.Sprintf("failed to marshal response: %v", err), http.StatusInternalServerError)
return
}
for name, values := range res.Header {
if name == "Content-Encoding" || name == "Content-Length" {
continue
}
for _, value := range values {
rw.Header().Add(name, value)
}
}
rw.Header().Set("Content-Type", "application/json")
rw.WriteHeader(res.StatusCode)
if _, err := rw.Write(sortedResponse); err != nil {
logger.Error("Failed to write response", "error", err)
return
}
}

View File

@@ -3,10 +3,12 @@ package opentsdb
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"net/url"
"path"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
@@ -152,6 +154,9 @@ func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthReque
func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
mux := http.NewServeMux()
mux.HandleFunc("/api/suggest", s.HandleSuggestQuery)
mux.HandleFunc("/api/aggregators", s.HandleAggregatorsQuery)
mux.HandleFunc("/api/config/filters", s.HandleFiltersQuery)
mux.HandleFunc("/api/search/lookup", s.HandleLookupQuery)
handler := httpadapter.New(mux)
return handler.CallResource(ctx, req, sender)
@@ -162,28 +167,52 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
dsInfo, err := s.getDSInfo(ctx, req.PluginContext)
if err != nil {
return nil, err
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
resp.Responses[q.RefID] = backend.ErrorResponseWithErrorSource(backend.PluginError(err))
}
return resp, nil
}
result := backend.NewQueryDataResponse()
for _, query := range req.Queries {
metric, err := BuildMetric(query)
if err != nil {
result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(backend.PluginError(err))
continue
}
tsdbQuery := OpenTsdbQuery{
Start: query.TimeRange.From.Unix(),
End: query.TimeRange.To.Unix(),
Queries: []map[string]any{
BuildMetric(query),
metric,
},
}
httpReq, err := CreateRequest(ctx, logger, dsInfo, tsdbQuery)
if err != nil {
return nil, err
var urlErr *url.Error
if errors.As(err, &urlErr) {
result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(backend.DownstreamError(err))
} else {
result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(backend.PluginError(err))
}
continue
}
httpRes, err := dsInfo.HTTPClient.Do(httpReq)
if err != nil {
return nil, err
if backend.IsDownstreamHTTPError(err) {
err = backend.DownstreamError(err)
}
var urlErr *url.Error
if errors.As(err, &urlErr) && urlErr.Err != nil && strings.HasPrefix(urlErr.Err.Error(), "unsupported protocol scheme") {
err = backend.DownstreamError(err)
}
result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(err)
continue
}
defer func() {
@@ -194,7 +223,8 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
queryRes, err := ParseResponse(logger, httpRes, query.RefID, dsInfo.TSDBVersion)
if err != nil {
return nil, err
result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(backend.DownstreamError(err))
continue
}
result.Responses[query.RefID] = queryRes.Responses[query.RefID]

View File

@@ -86,7 +86,7 @@ func TestBuildMetric(t *testing.T) {
Interval: 30 * time.Second,
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Equal(t, "30s-avg", metric["downsample"], "should use query interval formatted as seconds")
})
@@ -104,7 +104,7 @@ func TestBuildMetric(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Equal(t, "500ms-avg", metric["downsample"], "should convert 0.5s to 500ms")
})
@@ -123,7 +123,7 @@ func TestBuildMetric(t *testing.T) {
Interval: 500 * time.Millisecond,
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Equal(t, "500ms-avg", metric["downsample"], "should use query interval formatted as milliseconds")
})
@@ -142,7 +142,7 @@ func TestBuildMetric(t *testing.T) {
Interval: 5 * time.Minute,
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Equal(t, "5m-sum", metric["downsample"], "should use query interval formatted as minutes")
})
@@ -161,7 +161,7 @@ func TestBuildMetric(t *testing.T) {
Interval: 2 * time.Hour,
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Equal(t, "2h-max", metric["downsample"], "should use query interval formatted as hours")
})
@@ -180,7 +180,7 @@ func TestBuildMetric(t *testing.T) {
Interval: 48 * time.Hour,
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Equal(t, "2d-min", metric["downsample"], "should use query interval formatted as days")
})
@@ -199,7 +199,7 @@ func TestBuildMetric(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.True(t, metric["explicitTags"].(bool), "explicitTags should be true")
metricTags := metric["tags"].(map[string]any)
@@ -221,7 +221,7 @@ func TestBuildMetric(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Nil(t, metric["explicitTags"], "explicitTags should not be present when false")
})
}
@@ -501,7 +501,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Len(t, metric, 3)
require.Equal(t, "cpu.average.percent", metric["metric"])
@@ -523,7 +523,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Len(t, metric, 2)
require.Equal(t, "cpu.average.percent", metric["metric"])
@@ -544,7 +544,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Len(t, metric, 3)
require.Equal(t, "cpu.average.percent", metric["metric"])
@@ -570,7 +570,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Len(t, metric, 3)
require.Equal(t, "cpu.average.percent", metric["metric"])
@@ -601,7 +601,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
require.Len(t, metric, 5)
require.Equal(t, "cpu.average.percent", metric["metric"])
@@ -636,7 +636,7 @@ func TestOpenTsdbExecutor(t *testing.T) {
),
}
metric := BuildMetric(query)
metric, _ := BuildMetric(query)
t.Log(metric)
require.Len(t, metric, 5)

View File

@@ -7,9 +7,16 @@ type OpenTsdbQuery struct {
}
type OpenTsdbCommon struct {
Metric string `json:"metric"`
Tags map[string]string `json:"tags"`
AggregateTags []string `json:"aggregateTags"`
Metric string `json:"metric"`
Tags map[string]string `json:"tags"`
AggregateTags []string `json:"aggregateTags"`
Annotations []OpenTsdbAnnotation `json:"annotations,omitempty"`
GlobalAnnotations []OpenTsdbAnnotation `json:"globalAnnotations,omitempty"`
}
type OpenTsdbAnnotation struct {
Description string `json:"description"`
StartTime float64 `json:"startTime"`
}
type OpenTsdbResponse struct {

View File

@@ -44,12 +44,12 @@ func FormatDownsampleInterval(ms int64) string {
return strconv.FormatInt(days, 10) + "d"
}
func BuildMetric(query backend.DataQuery) map[string]any {
func BuildMetric(query backend.DataQuery) (map[string]any, error) {
metric := make(map[string]any)
var model QueryModel
if err := json.Unmarshal(query.JSON, &model); err != nil {
return nil
return nil, err
}
// Setting metric and aggregator
@@ -126,7 +126,7 @@ func BuildMetric(query backend.DataQuery) map[string]any {
metric["explicitTags"] = true
}
return metric
return metric, nil
}
func CreateRequest(ctx context.Context, logger log.Logger, dsInfo *datasourceInfo, data OpenTsdbQuery) (*http.Request, error) {
@@ -198,11 +198,21 @@ func CreateDataFrame(val OpenTsdbCommon, length int, refID string) *data.Frame {
sort.Strings(tagKeys)
tagKeys = append(tagKeys, val.AggregateTags...)
custom := map[string]any{
"tagKeys": tagKeys,
}
if len(val.Annotations) > 0 {
custom["annotations"] = val.Annotations
}
if len(val.GlobalAnnotations) > 0 {
custom["globalAnnotations"] = val.GlobalAnnotations
}
frame := data.NewFrameOfFieldTypes(val.Metric, length, data.FieldTypeTime, data.FieldTypeFloat64)
frame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti,
TypeVersion: data.FrameTypeVersion{0, 1},
Custom: map[string]any{"tagKeys": tagKeys},
Custom: custom,
}
frame.RefID = refID
timeField := frame.Fields[0]

View File

@@ -77,8 +77,28 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
};
}
// Called once per panel (graph)
query(options: DataQueryRequest<OpenTsdbQuery>): Observable<DataQueryResponse> {
if (options.targets.some((target: OpenTsdbQuery) => target.fromAnnotations)) {
const streams: Array<Observable<DataQueryResponse>> = [];
for (const annotation of options.targets) {
if (annotation.target) {
streams.push(
new Observable((subscriber) => {
this.annotationEvent(options, annotation)
.then((events) => subscriber.next({ data: [toDataFrame(events)] }))
.catch((ex) => {
return subscriber.next({ data: [toDataFrame([])] });
})
.finally(() => subscriber.complete());
})
);
}
}
return merge(...streams);
}
if (config.featureToggles.opentsdbBackendMigration) {
const hasValidTargets = options.targets.some((target) => target.metric && !target.hide);
if (!hasValidTargets) {
@@ -93,31 +113,6 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
);
}
// migrate annotations
if (options.targets.some((target: OpenTsdbQuery) => target.fromAnnotations)) {
const streams: Array<Observable<DataQueryResponse>> = [];
for (const annotation of options.targets) {
if (annotation.target) {
streams.push(
new Observable((subscriber) => {
this.annotationEvent(options, annotation)
.then((events) => subscriber.next({ data: [toDataFrame(events)] }))
.catch((ex) => {
// grafana fetch throws the error so for annotation consistency among datasources
// we return an empty array which displays as 'no events found'
// in the annnotation editor
return subscriber.next({ data: [toDataFrame([])] });
})
.finally(() => subscriber.complete());
})
);
}
}
return merge(...streams);
}
const start = this.convertToTSDBTime(options.range.raw.from, false, options.timezone);
const end = this.convertToTSDBTime(options.range.raw.to, true, options.timezone);
const qs: any[] = [];
@@ -181,6 +176,50 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
}
annotationEvent(options: DataQueryRequest, annotation: OpenTsdbQuery): Promise<AnnotationEvent[]> {
if (config.featureToggles.opentsdbBackendMigration) {
const query: OpenTsdbQuery = {
refId: annotation.refId ?? 'Anno',
metric: annotation.target,
aggregator: 'sum',
fromAnnotations: true,
isGlobal: annotation.isGlobal,
disableDownsampling: true,
};
const queryRequest: DataQueryRequest<OpenTsdbQuery> = {
...options,
targets: [query],
};
return lastValueFrom(
super.query(queryRequest).pipe(
map((response) => {
const eventList: AnnotationEvent[] = [];
for (const frame of response.data) {
const annotationObject = annotation.isGlobal
? frame.meta?.custom?.globalAnnotations
: frame.meta?.custom?.annotations;
if (annotationObject && isArray(annotationObject)) {
annotationObject.forEach((ann) => {
const event: AnnotationEvent = {
text: ann.description,
time: Math.floor(ann.startTime) * 1000,
annotation: annotation,
};
eventList.push(event);
});
}
}
return eventList;
})
)
);
}
const start = this.convertToTSDBTime(options.range.raw.from, false, options.timezone);
const end = this.convertToTSDBTime(options.range.raw.to, true, options.timezone);
const qs = [];
@@ -306,6 +345,10 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
return of([]);
}
if (config.featureToggles.opentsdbBackendMigration) {
return from(this.getResource('api/search/lookup', { type: 'keyvalue', metric, keys }));
}
const keysArray = keys.split(',').map((key) => {
return key.trim();
});
@@ -337,6 +380,10 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
return of([]);
}
if (config.featureToggles.opentsdbBackendMigration) {
return from(this.getResource('api/search/lookup', { type: 'key', metric }));
}
return this._get('/api/search/lookup', { m: metric, limit: 1000 }).pipe(
map((result) => {
result = result.data.results;
@@ -450,6 +497,11 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
return this.aggregatorsPromise;
}
if (config.featureToggles.opentsdbBackendMigration) {
this.aggregatorsPromise = this.getResource('api/aggregators');
return this.aggregatorsPromise;
}
this.aggregatorsPromise = lastValueFrom(
this._get('/api/aggregators').pipe(
map((result) => {
@@ -468,6 +520,11 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
return this.filterTypesPromise;
}
if (config.featureToggles.opentsdbBackendMigration) {
this.filterTypesPromise = this.getResource('api/config/filters');
return this.filterTypesPromise;
}
this.filterTypesPromise = lastValueFrom(
this._get('/api/config/filters').pipe(
map((result) => {

File diff suppressed because it is too large Load Diff