Compare commits

...

7 Commits

Author SHA1 Message Date
Matheus Macabu
51d876e5c7 Reporting: Add feature toggle for CSV encoding options 2025-12-19 11:25:38 +01:00
Misi
285f2b1d32 Auth: Allow service accounts to authenticate to ST Grafana (#115536)
* Allow SAs to authn ext_jwt

* Address feedback
2025-12-19 09:28:20 +00:00
Tania
7360194ab9 Chore: Remove unifiedReqeustLog feature flag (#115559)
Chore: Remove unifiedReqeustLog feature flag
2025-12-19 09:55:47 +01:00
Will Assis
99f5f14de7 unified-storage: move rvmanager into its own package (#115445)
Some checks failed
Frontend performance tests / performance-tests (push) Has been cancelled
Actionlint / Lint GitHub Actions files (push) Has been cancelled
Backend Code Checks / Detect whether code changed (push) Has been cancelled
Backend Code Checks / Validate Backend Configs (push) Has been cancelled
Backend Unit Tests / Detect whether code changed (push) Has been cancelled
Backend Unit Tests / Grafana (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana (8/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (1/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (2/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (3/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (4/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (5/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (6/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (7/8) (push) Has been cancelled
Backend Unit Tests / Grafana Enterprise (8/8) (push) Has been cancelled
Backend Unit Tests / All backend unit tests complete (push) Has been cancelled
CodeQL checks / Detect whether code changed (push) Has been cancelled
CodeQL checks / Analyze (actions) (push) Has been cancelled
CodeQL checks / Analyze (go) (push) Has been cancelled
CodeQL checks / Analyze (javascript) (push) Has been cancelled
Lint Frontend / Detect whether code changed (push) Has been cancelled
Lint Frontend / Lint (push) Has been cancelled
Lint Frontend / Typecheck (push) Has been cancelled
Lint Frontend / Verify API clients (push) Has been cancelled
Lint Frontend / Verify API clients (enterprise) (push) Has been cancelled
golangci-lint / Detect whether code changed (push) Has been cancelled
golangci-lint / go-fmt (push) Has been cancelled
golangci-lint / lint-go (push) Has been cancelled
Crowdin Upload Action / upload-sources-to-crowdin (push) Has been cancelled
Verify i18n / verify-i18n (push) Has been cancelled
Documentation / Build & Verify Docs (push) Has been cancelled
End-to-end tests / Detect whether code changed (push) Has been cancelled
End-to-end tests / Build & Package Grafana (push) Has been cancelled
End-to-end tests / Build E2E test runner (push) Has been cancelled
End-to-end tests / push-docker-image (push) Has been cancelled
End-to-end tests / dashboards-suite (old arch) (push) Has been cancelled
End-to-end tests / panels-suite (old arch) (push) Has been cancelled
End-to-end tests / smoke-tests-suite (old arch) (push) Has been cancelled
End-to-end tests / various-suite (old arch) (push) Has been cancelled
End-to-end tests / Verify Storybook (Playwright) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (1/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (2/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (3/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (4/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (5/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (6/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (7/8) (push) Has been cancelled
End-to-end tests / Playwright E2E tests (8/8) (push) Has been cancelled
End-to-end tests / run-azure-monitor-e2e (push) Has been cancelled
End-to-end tests / All Playwright tests complete (push) Has been cancelled
End-to-end tests / A11y test (push) Has been cancelled
End-to-end tests / Publish metrics (push) Has been cancelled
End-to-end tests / All E2E tests complete (push) Has been cancelled
Frontend tests / Detect whether code changed (push) Has been cancelled
Frontend tests / Unit tests (1 / 16) (push) Has been cancelled
Frontend tests / Unit tests (10 / 16) (push) Has been cancelled
Frontend tests / Unit tests (11 / 16) (push) Has been cancelled
Frontend tests / Unit tests (12 / 16) (push) Has been cancelled
Frontend tests / Unit tests (13 / 16) (push) Has been cancelled
Frontend tests / Unit tests (14 / 16) (push) Has been cancelled
Frontend tests / Unit tests (15 / 16) (push) Has been cancelled
Frontend tests / Unit tests (16 / 16) (push) Has been cancelled
Frontend tests / Unit tests (2 / 16) (push) Has been cancelled
Frontend tests / Unit tests (3 / 16) (push) Has been cancelled
Frontend tests / Unit tests (4 / 16) (push) Has been cancelled
Frontend tests / Unit tests (5 / 16) (push) Has been cancelled
Frontend tests / Unit tests (6 / 16) (push) Has been cancelled
Frontend tests / Unit tests (7 / 16) (push) Has been cancelled
Frontend tests / Unit tests (8 / 16) (push) Has been cancelled
Frontend tests / Unit tests (9 / 16) (push) Has been cancelled
Frontend tests / Decoupled plugin tests (push) Has been cancelled
Frontend tests / Packages unit tests (push) Has been cancelled
Frontend tests / All frontend unit tests complete (push) Has been cancelled
Frontend tests / Devenv frontend-service build (push) Has been cancelled
Integration Tests / Detect whether code changed (push) Has been cancelled
Integration Tests / Sqlite (1/4) (push) Has been cancelled
Integration Tests / Sqlite (2/4) (push) Has been cancelled
Integration Tests / Sqlite (3/4) (push) Has been cancelled
Integration Tests / Sqlite (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (1/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (2/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (3/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (4/4) (push) Has been cancelled
Integration Tests / Sqlite Without CGo (profiled) (push) Has been cancelled
Integration Tests / MySQL (1/16) (push) Has been cancelled
Integration Tests / MySQL (10/16) (push) Has been cancelled
Integration Tests / MySQL (11/16) (push) Has been cancelled
Integration Tests / MySQL (12/16) (push) Has been cancelled
Integration Tests / MySQL (13/16) (push) Has been cancelled
Integration Tests / MySQL (14/16) (push) Has been cancelled
Integration Tests / MySQL (15/16) (push) Has been cancelled
Integration Tests / MySQL (16/16) (push) Has been cancelled
Integration Tests / MySQL (2/16) (push) Has been cancelled
Integration Tests / MySQL (3/16) (push) Has been cancelled
Integration Tests / MySQL (4/16) (push) Has been cancelled
Integration Tests / MySQL (5/16) (push) Has been cancelled
Integration Tests / MySQL (6/16) (push) Has been cancelled
Integration Tests / MySQL (7/16) (push) Has been cancelled
Integration Tests / MySQL (8/16) (push) Has been cancelled
Integration Tests / MySQL (9/16) (push) Has been cancelled
Integration Tests / Postgres (1/16) (push) Has been cancelled
Integration Tests / Postgres (10/16) (push) Has been cancelled
Integration Tests / Postgres (11/16) (push) Has been cancelled
Integration Tests / Postgres (12/16) (push) Has been cancelled
Integration Tests / Postgres (13/16) (push) Has been cancelled
Integration Tests / Postgres (14/16) (push) Has been cancelled
Integration Tests / Postgres (15/16) (push) Has been cancelled
Integration Tests / Postgres (16/16) (push) Has been cancelled
Integration Tests / Postgres (2/16) (push) Has been cancelled
Integration Tests / Postgres (3/16) (push) Has been cancelled
Integration Tests / Postgres (4/16) (push) Has been cancelled
Integration Tests / Postgres (5/16) (push) Has been cancelled
Integration Tests / Postgres (6/16) (push) Has been cancelled
Integration Tests / Postgres (7/16) (push) Has been cancelled
Integration Tests / Postgres (8/16) (push) Has been cancelled
Integration Tests / Postgres (9/16) (push) Has been cancelled
Integration Tests / All backend integration tests complete (push) Has been cancelled
publish-technical-documentation-next / sync (push) Has been cancelled
Reject GitHub secrets / reject-gh-secrets (push) Has been cancelled
Build Release Packages / setup (push) Has been cancelled
Build Release Packages / Dispatch grafana-enterprise build (push) Has been cancelled
Build Release Packages / / darwin-amd64 (push) Has been cancelled
Build Release Packages / / darwin-arm64 (push) Has been cancelled
Build Release Packages / / linux-amd64 (push) Has been cancelled
Build Release Packages / / linux-armv6 (push) Has been cancelled
Build Release Packages / / linux-armv7 (push) Has been cancelled
Build Release Packages / / linux-arm64 (push) Has been cancelled
Build Release Packages / / linux-s390x (push) Has been cancelled
Build Release Packages / / windows-amd64 (push) Has been cancelled
Build Release Packages / / windows-arm64 (push) Has been cancelled
Build Release Packages / Upload artifacts (push) Has been cancelled
Build Release Packages / publish-dockerhub (push) Has been cancelled
Build Release Packages / Dispatch publish NPM canaries (push) Has been cancelled
Build Release Packages / notify-pr (push) Has been cancelled
Run dashboard schema v2 e2e / dashboard-schema-v2-e2e (push) Has been cancelled
Shellcheck / Shellcheck scripts (push) Has been cancelled
Run Storybook a11y tests / Detect whether code changed (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (light theme) (push) Has been cancelled
Run Storybook a11y tests / Run Storybook a11y tests (dark theme) (push) Has been cancelled
Swagger generated code / Detect whether code changed (push) Has been cancelled
Swagger generated code / Verify committed API specs match (push) Has been cancelled
Dispatch sync to mirror / dispatch-job (push) Has been cancelled
trigger-dashboard-search-e2e / trigger-search-e2e (push) Has been cancelled
publish-kinds-next / main (push) Has been cancelled
* unified-storage: move rvmanager into its own package so it can be reused with sqlkv later
2025-12-18 18:35:32 -05:00
Collin Fingar
606a59584a Saved Queries: Pass editor ref for dynamic dropdown display (#114321)
* Saved Queries: Pass editor ref for dynamic dropdown display

* Updated docs per feedback

* Update docs/sources/visualizations/dashboards/build-dashboards/annotate-visualizations/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/dashboards/build-dashboards/annotate-visualizations/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/dashboards/build-dashboards/create-dashboard/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/dashboards/build-dashboards/create-dashboard/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/explore/get-started-with-explore.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

---------

Co-authored-by: Nathan Marrs <nathanielmarrs@gmail.com>
Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>
2025-12-18 18:18:24 -05:00
Nathan Marrs
0ec716a433 Embedded Dashboard Panels: Add Grafana Branding (#115198)
* feat: add Grafana logo to embedded panels

- Add Grafana logo watermark to solo panel view (embedded panels)
- Logo appears in top-right corner with subtle background container
- Logo hides on hover to avoid interfering with panel content
- Uses React state to track hover for reliable behavior across nested elements

* minor formatting

* update changes to match public dashboards styling

* match styles of public dashboards

* feat: add responsive Grafana branding to embedded panels

- Add 'Powered by Grafana' branding with text logo to solo panel view
- Implement responsive scaling based on panel dimensions (0.6x to 1.0x)
- Logo and text scale proportionally with panel size
- Branding hides on hover to avoid interfering with panel content
- Matches public dashboard branding pattern for consistency
- Uses ResizeObserver for efficient responsive updates

* feat: add Grafana branding to embedded solo panels

- Add 'Powered by Grafana' branding with text logo to embedded panels
- Create SoloPanelPageLogo component for reusable branding
- Implement responsive scaling based on panel dimensions
- Add hover-to-hide functionality to avoid content overlap
- Logo scales between 0.6x and 1.0x based on panel size

* refactor: move scale calculation into SoloPanelPageLogo component

- Move responsive scale calculation logic from SoloPanelRenderer to SoloPanelPageLogo
- Logo component now manages its own scaling based on container dimensions
- Improves separation of concerns and component encapsulation

* feat: add hideLogo query parameter to disable embedded panel branding

- Add hideLogo query parameter support to SoloPanelPage
- Logo can be hidden via ?hideLogo, ?hideLogo=true, or ?hideLogo=1
- Useful for customers who want to disable branding and for image rendering scenarios
- Update Props interface to include hideLogo in queryParams type

* feat: hide logo in panel image renderer URLs

- Add hideLogo=true parameter to image renderer URLs in ShareLinkTab
- Ensures logo is hidden when generating panel images through share feature
- Update test to expect hideLogo=true in render URL

* feat: hide logo in old dashboard sharing panel image URLs

- Add hideLogo=true parameter to buildImageUrl in ShareModal utils
- Ensures logo is hidden when generating panel images through old share modal
- Update all ShareLink tests to expect hideLogo=true in render URLs

* test: add comprehensive tests for SoloPanelPage and SoloPanelPageLogo

- Add SoloPanelPageLogo tests covering rendering, hover behavior, theme selection, and scaling
- Add SoloPanelPage tests covering logo visibility based on hideLogo prop
- Test logo hiding functionality (most important behavior)
- Test responsive scaling based on container dimensions
- Test ResizeObserver integration
- All 14 tests passing

* refactor: centralize hideLogo handling in SoloPanelPageLogo

Move hideLogo parsing and decision-making into SoloPanelPageLogo so SoloPanelPage/SoloPanelRenderer only pass through the raw query param value.

* chore: clean up solo logo test and share link params

Remove a duplicate SVG mock in SoloPanelPageLogo.test, and simplify ShareLinkTab image URL building without changing behavior.

* chore: revert ShareLinkTab image query refactor

Restore the previous image URL query-param mutation logic in ShareLinkTab to reduce risk.

* chore: set hideLogo once for ShareLinkTab image URLs

Avoid passing hideLogo twice when building the rendered image URL.

* fix: handle boolean hideLogo query param in SoloPanelPageLogo

Handle query params that are represented as booleans (e.g., ?hideLogo) and arrays, and avoid calling trim() on non-strings.

* fix i18n

* fix(dashboard-scene): address SoloPanelPageLogo review feedback

Avoid double-scaling logo margin, clarify scaling comments, and extend tests for null/array values and ResizeObserver cleanup.

* update margin left on logo to better match text spacing
2025-12-18 15:01:16 -08:00
Leon Sorokin
72e1f1e546 Heatmap: Support for linear y axis (#113337)
* wip

* boop

* Base factor on data

* Add some basic option control

* Remove old comments

* Add feature flag

* Apply feature flag to axis options

* Turn factor calculation into exported function

* Simplify bucket factor function

* Clarify comments

* Fix cell sizing of pre-bucketed heatmaps with log

* Remove unnecessary category change

* Consolidate editor for calculate from data no

* Update bucket function sanity checks

* Wire up scale config from yBucketScale

* Hide bucket controls for heatmap cells

* Fix splits

* Add test coverage

* Fix failing test

* Add basic util test coverage

* Fix tooltip for legacy in linear

* Fix y bucket option width to be consistent

* Hide tick alignment for explicit scale modes

* Clarify comment

* Make sure units are passed properly for linear

* Remove null assertion operator

* Clean up nested ternary

* Add type protection to scaleLog

* Remove repeated code for ySize calcs

* Remove ternary for scaleDistribution

* Add test coverage for YBucketScaleEditor

* Add isHeatmapSparse function to tooltip utils

* Create calculateYSizeDivisor util function

* Fix y axis min and max options and extend to log

* Add toLogBase test coverage

* Create applyExplicitMinMax function

* Add additional test coverage for scale editor

* Run i18n-extract

* Update eslint suppressions

---------

Co-authored-by: Drew Slobodnjak <60050885+drew08t@users.noreply.github.com>
2025-12-18 14:45:00 -08:00
56 changed files with 2429 additions and 258 deletions

View File

@@ -31,7 +31,6 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general-
| `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view | Yes |
| `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals | Yes |
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy | Yes |
| `unifiedRequestLog` | Writes error logs to the request logger | Yes |
| `logsExploreTableVisualisation` | A table visualisation for logs in Explore | Yes |
| `awsDatasourcesTempCredentials` | Support temporary security credentials in AWS plugins for Grafana Cloud customers | Yes |
| `awsAsyncQueryCaching` | Enable caching for async queries for Redshift and Athena. Requires that the datasource has caching and async query support enabled | Yes |

View File

@@ -163,9 +163,9 @@ To add a new annotation query to a dashboard, follow these steps:
1. To create a query, do one of the following:
- Write or construct a query in the query language of your data source. The annotation query options are different for each data source. For information about annotations in a specific data source, refer to the specific [data source](ref:data-source) topic.
- Click **Replace with saved query** to reuse a [saved query](ref:saved-queries).
- Open the **Saved queries** drop-down menu and click **Replace query** to reuse a [saved query](ref:saved-queries).
1. (Optional) To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
1. (Optional) To [save the query](ref:save-query) for reuse, open the **Saved queries** drop-down menu and click the **Save query** option.
1. (Optional) Click **Test annotation query** to ensure that the query is working properly.
1. (Optional) To add subsequent queries, click **+ Add query** or **+ Add from saved queries**, and test them as many times as needed.

View File

@@ -125,9 +125,9 @@ Dashboards and panels allow you to show your data in visual form. Each panel nee
1. To create a query, do one of the following:
- Write or construct a query in the query language of your data source.
- Click **Replace with saved query** to reuse a [saved query](ref:saved-queries).
- Open the **Saved queries** drop-down menu and click **Replace query** to reuse a [saved query](ref:saved-queries).
1. (Optional) To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
1. (Optional) To [save the query](ref:save-query) for reuse, open the **Saved queries** drop-down menu and click the **Save query** option.
1. Click **Refresh** to query the data source.
1. (Optional) To add subsequent queries, click **+ Add query** or **+ Add from saved queries**, and refresh the data source as many times as needed.

View File

@@ -71,8 +71,9 @@ Explore consists of a toolbar, outline, query editor, the ability to add multipl
- **Run query** - Click to run your query.
- **Query editor** - Interface where you construct the query for a specific data source. Query editor elements differ based on data source. In order to run queries across multiple data sources you need to select **Mixed** from the data source picker.
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace with saved query** - Reuse a saved query.
- **Saved queries**:
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace query** - Reuse a saved query.
- **+ Add query** - Add an additional query.
- **+ Add from saved queries** - Add an additional query by reusing a saved query.

View File

@@ -88,8 +88,9 @@ The data section contains tabs where you enter queries, transform your data, and
- **Queries**
- Select your data source. You can also set or update the data source in existing dashboards using the drop-down menu in the **Queries** tab.
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace with saved query** - Reuse a saved query.
- **Saved queries**:
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace query** - Reuse a saved query.
- **+ Add query** - Add an additional query.
- **+ Add from saved queries** - Add an additional query by reusing a saved query.

View File

@@ -156,11 +156,11 @@ In the **Saved queries** drawer, you can:
- Edit a query title, description, tags, or the availability of the query to other users in your organization. By default, saved queries are locked for editing.
- When you access the **Saved queries** drawer from Explore, you can use the **Edit in Explore** option to edit the body of a query.
To access your saved queries, click **+ Add from saved queries** or **Replace with saved query** in the query editor:
To access your saved queries, click **+ Add from saved queries** or open the **Saved queries** drop-down menu and click **Replace query** in the query editor:
{{< figure src="/media/docs/grafana/dashboards/screenshot-use-saved-queries-v12.3.png" max-width="750px" alt="Access saved queries" >}}
Clicking **+ Add from saved queries** adds an additional query, while clicking **Replace with saved query** updates your existing query.
Clicking **+ Add from saved queries** adds an additional query, while clicking **Replace query** in the **Saved queries** drop-down menu updates your existing query.
{{< admonition type="note" >}}
Users with Admin and Editor roles can create and save queries for reuse.
@@ -172,7 +172,7 @@ Viewers can only reuse queries.
To save a query you've created:
1. From the query editor, click the **Save query** icon:
1. From the query editor, open the **Saved queries** drop-down menu and click the **Save query** option:
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-save-query-v12.2.png" max-width="750px" alt="Save a query" >}}
@@ -227,7 +227,7 @@ To add a query, follow these steps:
1. To create a query, do one of the following:
- Write or construct a query in the query language of your data source.
- Click **Replace with saved query** to reuse a saved query.
- Open the **Saved queries** drop-down menu and click **Replace query** to reuse a saved query.
{{< admonition type="note" >}}
[Saved queries](#saved-queries) is currently in [public preview](https://grafana.com/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available.
@@ -235,7 +235,7 @@ To add a query, follow these steps:
This feature is only available on Grafana Enterprise and Grafana Cloud.
{{< /admonition >}}
1. (Optional) To [save the query](#save-a-query) for reuse, click the **Save query** button (or icon).
1. (Optional) To [save the query](#save-a-query) for reuse, click the **Save query** option in the **Saved queries** drop-down menu.
1. (Optional) Click **+ Add query** or **Add from saved queries** to add more queries as needed.
1. Click **Run queries**.

View File

@@ -4339,7 +4339,7 @@
},
"public/app/plugins/panel/heatmap/utils.ts": {
"@typescript-eslint/consistent-type-assertions": {
"count": 16
"count": 14
}
},
"public/app/plugins/panel/histogram/Histogram.tsx": {

View File

@@ -126,11 +126,6 @@ export interface FeatureToggles {
*/
disableSSEDataplane?: boolean;
/**
* Writes error logs to the request logger
* @default true
*/
unifiedRequestLog?: boolean;
/**
* Uses JWT-based auth for rendering instead of relying on remote cache
*/
renderAuthJWT?: boolean;
@@ -212,6 +207,10 @@ export interface FeatureToggles {
*/
reportingRetries?: boolean;
/**
* Enables CSV encoding options in the reporting feature
*/
reportingCsvEncodingOptions?: boolean;
/**
* Send query to the same datasource in a single request when using server side expressions. The `cloudWatchBatchQueries` feature toggle should be enabled if this used with CloudWatch.
*/
sseGroupByDatasource?: boolean;
@@ -1169,6 +1168,11 @@ export interface FeatureToggles {
*/
externalVizSuggestions?: boolean;
/**
* Enable Y-axis scale configuration options for pre-bucketed heatmap data (heatmap-rows)
* @default false
*/
heatmapRowsAxisOptions?: boolean;
/**
* Restrict PanelChrome contents with overflow: hidden;
* @default true
*/

View File

@@ -185,6 +185,10 @@ export interface RowsHeatmapOptions {
* Sets the name of the cell when not calculating from data
*/
value?: string;
/**
* Controls the scale distribution of the y-axis buckets
*/
yBucketScale?: ui.ScaleDistributionConfig;
}
export interface Options {

View File

@@ -64,10 +64,7 @@ func (l *loggerImpl) Middleware() web.Middleware {
// put the start time on context so we can measure it later.
r = r.WithContext(log.InitstartTime(r.Context(), time.Now()))
//nolint:staticcheck // not yet migrated to OpenFeature
if l.flags.IsEnabled(r.Context(), featuremgmt.FlagUnifiedRequestLog) {
r = r.WithContext(errutil.SetUnifiedLogging(r.Context()))
}
r = r.WithContext(errutil.SetUnifiedLogging(r.Context()))
rw := web.Rw(w, r)
next.ServeHTTP(rw, r)

View File

@@ -131,7 +131,8 @@ func (s *ExtendedJWT) authenticateAsUser(
return nil, errExtJWTInvalid.Errorf("failed to parse id token subject: %w", err)
}
if !claims.IsIdentityType(t, claims.TypeUser) {
// TODO: How to support other identity types like render and anonymous here?
if !claims.IsIdentityType(t, claims.TypeUser, claims.TypeServiceAccount) {
return nil, errExtJWTInvalidSubject.Errorf("unexpected identity: %s", idTokenClaims.Subject)
}

View File

@@ -53,6 +53,17 @@ var (
Namespace: "default", // org ID of 1 is special and translates to default
},
}
validIDTokenClaimsWithServiceAccount = idTokenClaims{
Claims: jwt.Claims{
Subject: "service-account:3",
Expiry: jwt.NewNumericDate(time.Date(2023, 5, 3, 0, 0, 0, 0, time.UTC)),
IssuedAt: jwt.NewNumericDate(time.Date(2023, 5, 2, 0, 0, 0, 0, time.UTC)),
},
Rest: authnlib.IDTokenClaims{
AuthenticatedBy: "extended_jwt",
Namespace: "default", // org ID of 1 is special and translates to default
},
}
validIDTokenClaimsWithStackSet = idTokenClaims{
Claims: jwt.Claims{
Subject: "user:2",
@@ -118,7 +129,7 @@ var (
}
invalidSubjectIDTokenClaims = idTokenClaims{
Claims: jwt.Claims{
Subject: "service-account:2",
Subject: "anonymous:2",
Expiry: jwt.NewNumericDate(time.Date(2023, 5, 3, 0, 0, 0, 0, time.UTC)),
IssuedAt: jwt.NewNumericDate(time.Date(2023, 5, 2, 0, 0, 0, 0, time.UTC)),
},
@@ -286,6 +297,29 @@ func TestExtendedJWT_Authenticate(t *testing.T) {
},
},
},
{
name: "should authenticate as service account",
accessToken: &validAccessTokenClaims,
idToken: &validIDTokenClaimsWithServiceAccount,
orgID: 1,
want: &authn.Identity{
ID: "3",
Type: claims.TypeServiceAccount,
OrgID: 1,
AccessTokenClaims: &validAccessTokenClaims,
IDTokenClaims: &validIDTokenClaimsWithServiceAccount,
Namespace: "default",
AuthenticatedBy: "extendedjwt",
AuthID: "access-policy:this-uid",
ClientParams: authn.ClientParams{
FetchSyncedUser: true,
SyncPermissions: true,
FetchPermissionsParams: authn.FetchPermissionsParams{
RestrictedActions: []string{"dashboards:create", "folders:read", "datasources:explore", "datasources.insights:read"},
},
},
},
},
{
name: "should authenticate as user in the user namespace",
accessToken: &validAccessTokenClaimsWildcard,

View File

@@ -185,13 +185,6 @@ var (
Stage: FeatureStageExperimental,
Owner: grafanaDatasourcesCoreServicesSquad,
},
{
Name: "unifiedRequestLog",
Description: "Writes error logs to the request logger",
Stage: FeatureStageGeneralAvailability,
Owner: grafanaBackendGroup,
Expression: "true",
},
{
Name: "renderAuthJWT",
Description: "Uses JWT-based auth for rendering instead of relying on remote cache",
@@ -329,6 +322,13 @@ var (
Owner: grafanaOperatorExperienceSquad,
RequiresRestart: true,
},
{
Name: "reportingCsvEncodingOptions",
Description: "Enables CSV encoding options in the reporting feature",
Stage: FeatureStageExperimental,
FrontendOnly: false,
Owner: grafanaOperatorExperienceSquad,
},
{
Name: "sseGroupByDatasource",
Description: "Send query to the same datasource in a single request when using server side expressions. The `cloudWatchBatchQueries` feature toggle should be enabled if this used with CloudWatch.",
@@ -1928,6 +1928,14 @@ var (
Owner: grafanaDatavizSquad,
Expression: "false",
},
{
Name: "heatmapRowsAxisOptions",
Description: "Enable Y-axis scale configuration options for pre-bucketed heatmap data (heatmap-rows)",
Stage: FeatureStageExperimental,
FrontendOnly: true,
Owner: grafanaDatavizSquad,
Expression: "false",
},
{
Name: "preventPanelChromeOverflow",
Description: "Restrict PanelChrome contents with overflow: hidden;",

View File

@@ -24,7 +24,6 @@ influxqlStreamingParser,experimental,@grafana/partner-datasources,false,false,fa
influxdbRunQueriesInParallel,privatePreview,@grafana/partner-datasources,false,false,false
lokiLogsDataplane,experimental,@grafana/observability-logs,false,false,false
disableSSEDataplane,experimental,@grafana/grafana-datasources-core-services,false,false,false
unifiedRequestLog,GA,@grafana/grafana-backend-group,false,false,false
renderAuthJWT,preview,@grafana/grafana-operator-experience-squad,false,false,false
refactorVariablesTimeRange,preview,@grafana/dashboards-squad,false,false,false
faroDatasourceSelector,preview,@grafana/app-o11y,false,false,true
@@ -44,6 +43,7 @@ configurableSchedulerTick,experimental,@grafana/alerting-squad,false,true,false
dashgpt,GA,@grafana/dashboards-squad,false,false,true
aiGeneratedDashboardChanges,experimental,@grafana/dashboards-squad,false,false,true
reportingRetries,preview,@grafana/grafana-operator-experience-squad,false,true,false
reportingCsvEncodingOptions,experimental,@grafana/grafana-operator-experience-squad,false,false,false
sseGroupByDatasource,experimental,@grafana/grafana-datasources-core-services,false,false,false
lokiRunQueriesInParallel,privatePreview,@grafana/observability-logs,false,false,false
externalServiceAccounts,preview,@grafana/identity-access-team,false,false,false
@@ -262,6 +262,7 @@ pluginInstallAPISync,experimental,@grafana/plugins-platform-backend,false,false,
newGauge,experimental,@grafana/dataviz-squad,false,false,true
newVizSuggestions,preview,@grafana/dataviz-squad,false,false,true
externalVizSuggestions,experimental,@grafana/dataviz-squad,false,false,true
heatmapRowsAxisOptions,experimental,@grafana/dataviz-squad,false,false,true
preventPanelChromeOverflow,preview,@grafana/grafana-frontend-platform,false,false,true
jaegerEnableGrpcEndpoint,experimental,@grafana/oss-big-tent,false,false,false
pluginStoreServiceLoading,experimental,@grafana/plugins-platform-backend,false,false,false
1 Name Stage Owner requiresDevMode RequiresRestart FrontendOnly
24 influxdbRunQueriesInParallel privatePreview @grafana/partner-datasources false false false
25 lokiLogsDataplane experimental @grafana/observability-logs false false false
26 disableSSEDataplane experimental @grafana/grafana-datasources-core-services false false false
unifiedRequestLog GA @grafana/grafana-backend-group false false false
27 renderAuthJWT preview @grafana/grafana-operator-experience-squad false false false
28 refactorVariablesTimeRange preview @grafana/dashboards-squad false false false
29 faroDatasourceSelector preview @grafana/app-o11y false false true
43 dashgpt GA @grafana/dashboards-squad false false true
44 aiGeneratedDashboardChanges experimental @grafana/dashboards-squad false false true
45 reportingRetries preview @grafana/grafana-operator-experience-squad false true false
46 reportingCsvEncodingOptions experimental @grafana/grafana-operator-experience-squad false false false
47 sseGroupByDatasource experimental @grafana/grafana-datasources-core-services false false false
48 lokiRunQueriesInParallel privatePreview @grafana/observability-logs false false false
49 externalServiceAccounts preview @grafana/identity-access-team false false false
262 newGauge experimental @grafana/dataviz-squad false false true
263 newVizSuggestions preview @grafana/dataviz-squad false false true
264 externalVizSuggestions experimental @grafana/dataviz-squad false false true
265 heatmapRowsAxisOptions experimental @grafana/dataviz-squad false false true
266 preventPanelChromeOverflow preview @grafana/grafana-frontend-platform false false true
267 jaegerEnableGrpcEndpoint experimental @grafana/oss-big-tent false false false
268 pluginStoreServiceLoading experimental @grafana/plugins-platform-backend false false false

View File

@@ -79,10 +79,6 @@ const (
// Disables dataplane specific processing in server side expressions.
FlagDisableSSEDataplane = "disableSSEDataplane"
// FlagUnifiedRequestLog
// Writes error logs to the request logger
FlagUnifiedRequestLog = "unifiedRequestLog"
// FlagRenderAuthJWT
// Uses JWT-based auth for rendering instead of relying on remote cache
FlagRenderAuthJWT = "renderAuthJWT"
@@ -139,6 +135,10 @@ const (
// Enables rendering retries for the reporting feature
FlagReportingRetries = "reportingRetries"
// FlagReportingCsvEncodingOptions
// Enables CSV encoding options in the reporting feature
FlagReportingCsvEncodingOptions = "reportingCsvEncodingOptions"
// FlagSseGroupByDatasource
// Send query to the same datasource in a single request when using server side expressions. The `cloudWatchBatchQueries` feature toggle should be enabled if this used with CloudWatch.
FlagSseGroupByDatasource = "sseGroupByDatasource"

View File

@@ -1647,6 +1647,20 @@
"codeowner": "@grafana/search-and-storage"
}
},
{
"metadata": {
"name": "heatmapRowsAxisOptions",
"resourceVersion": "1765353244400",
"creationTimestamp": "2025-12-10T07:54:04Z"
},
"spec": {
"description": "Enable Y-axis scale configuration options for pre-bucketed heatmap data (heatmap-rows)",
"stage": "experimental",
"codeowner": "@grafana/dataviz-squad",
"frontend": true,
"expression": "false"
}
},
{
"metadata": {
"name": "improvedExternalSessionHandling",
@@ -3067,6 +3081,18 @@
"hideFromDocs": true
}
},
{
"metadata": {
"name": "reportingCsvEncodingOptions",
"resourceVersion": "1766080709938",
"creationTimestamp": "2025-12-18T17:58:29Z"
},
"spec": {
"description": "Enables CSV encoding options in the reporting feature",
"stage": "experimental",
"codeowner": "@grafana/grafana-operator-experience-squad"
}
},
{
"metadata": {
"name": "reportingRetries",
@@ -3501,7 +3527,8 @@
"metadata": {
"name": "unifiedRequestLog",
"resourceVersion": "1764664939750",
"creationTimestamp": "2023-03-31T13:38:09Z"
"creationTimestamp": "2023-03-31T13:38:09Z",
"deletionTimestamp": "2025-12-18T14:21:02Z"
},
"spec": {
"description": "Writes error logs to the request logger",

View File

@@ -31,6 +31,7 @@ import (
"github.com/grafana/grafana/pkg/storage/unified/resourcepb"
"github.com/grafana/grafana/pkg/storage/unified/sql/db"
"github.com/grafana/grafana/pkg/storage/unified/sql/dbutil"
"github.com/grafana/grafana/pkg/storage/unified/sql/rvmanager"
"github.com/grafana/grafana/pkg/storage/unified/sql/sqltemplate"
"github.com/grafana/grafana/pkg/util/debouncer"
)
@@ -126,7 +127,7 @@ type backend struct {
notifier eventNotifier
// resource version manager
rvManager *resourceVersionManager
rvManager *rvmanager.ResourceVersionManager
// testing
simulatedNetworkLatency time.Duration
@@ -163,7 +164,7 @@ func (b *backend) initLocked(ctx context.Context) error {
}
// Initialize ResourceVersionManager
rvManager, err := NewResourceVersionManager(ResourceManagerOptions{
rvManager, err := rvmanager.NewResourceVersionManager(rvmanager.ResourceManagerOptions{
Dialect: b.dialect,
DB: b.db,
})
@@ -928,12 +929,12 @@ func (b *backend) listLatestRVs(ctx context.Context) (groupResourceRV, error) {
func (b *backend) fetchLatestRV(ctx context.Context, x db.ContextExecer, d sqltemplate.Dialect, group, resource string) (int64, error) {
ctx, span := tracer.Start(ctx, "sql.backend.fetchLatestRV")
defer span.End()
res, err := dbutil.QueryRow(ctx, x, sqlResourceVersionGet, sqlResourceVersionGetRequest{
res, err := dbutil.QueryRow(ctx, x, rvmanager.SqlResourceVersionGet, rvmanager.SqlResourceVersionGetRequest{
SQLTemplate: sqltemplate.New(d),
Group: group,
Resource: resource,
ReadOnly: true,
Response: new(resourceVersionResponse),
Response: new(rvmanager.ResourceVersionResponse),
})
if errors.Is(err, sql.ErrNoRows) {
return 1, nil

View File

@@ -40,6 +40,26 @@ type testBackend struct {
test.TestDBProvider
}
func expectSuccessfulResourceVersionLock(t *testing.T, dbp test.TestDBProvider, rv int64, timestamp int64) {
dbp.SQLMock.ExpectQuery("select resource_version, unix_timestamp for update").
WillReturnRows(sqlmock.NewRows([]string{"resource_version", "unix_timestamp"}).
AddRow(rv, timestamp))
}
func expectSuccessfulResourceVersionSaveRV(t *testing.T, dbp test.TestDBProvider) {
dbp.SQLMock.ExpectExec("update resource set resource_version").WillReturnResult(sqlmock.NewResult(1, 1))
dbp.SQLMock.ExpectExec("update resource_history set resource_version").WillReturnResult(sqlmock.NewResult(1, 1))
dbp.SQLMock.ExpectExec("update resource_version set resource_version").WillReturnResult(sqlmock.NewResult(1, 1))
}
func expectSuccessfulResourceVersionExec(t *testing.T, dbp test.TestDBProvider, cbs ...func()) {
for _, cb := range cbs {
cb()
}
expectSuccessfulResourceVersionLock(t, dbp, 100, 200)
expectSuccessfulResourceVersionSaveRV(t, dbp)
}
func (b testBackend) ExecWithResult(expectedSQL string, lastInsertID int64, rowsAffected int64) {
b.SQLMock.ExpectExec(expectedSQL).WillReturnResult(sqlmock.NewResult(lastInsertID, rowsAffected))
}

View File

@@ -281,13 +281,13 @@ func (b *backend) processBulkWithTx(ctx context.Context, tx db.Tx, setting resou
}
if b.dialect.DialectName() == "sqlite" {
nextRV, err := b.rvManager.lock(ctx, tx, key.Group, key.Resource)
nextRV, err := b.rvManager.Lock(ctx, tx, key.Group, key.Resource)
if err != nil {
b.log.Error("error locking RV", "error", err, "key", resource.NSGR(key))
} else {
b.log.Info("successfully locked RV", "nextRV", nextRV, "key", resource.NSGR(key))
// Save the incremented RV
if err := b.rvManager.saveRV(ctx, tx, key.Group, key.Resource, nextRV); err != nil {
if err := b.rvManager.SaveRV(ctx, tx, key.Group, key.Resource, nextRV); err != nil {
b.log.Error("error saving RV", "error", err, "key", resource.NSGR(key))
} else {
b.log.Info("successfully saved RV", "rv", nextRV, "key", resource.NSGR(key))

View File

@@ -17,6 +17,7 @@ import (
dbsql "github.com/grafana/grafana/pkg/storage/unified/sql/db"
"github.com/grafana/grafana/pkg/storage/unified/sql/db/dbimpl"
"github.com/grafana/grafana/pkg/storage/unified/sql/dbutil"
"github.com/grafana/grafana/pkg/storage/unified/sql/rvmanager"
"github.com/grafana/grafana/pkg/storage/unified/sql/sqltemplate"
"github.com/grafana/grafana/pkg/tests/testsuite"
"github.com/grafana/grafana/pkg/util/testutil"
@@ -94,7 +95,7 @@ func TestIntegrationListIter(t *testing.T) {
return fmt.Errorf("failed to insert test data: %w", err)
}
if _, err = dbutil.Exec(ctx, tx, sqlResourceUpdateRV, sqlResourceUpdateRVRequest{
if _, err = dbutil.Exec(ctx, tx, rvmanager.SqlResourceUpdateRV, rvmanager.SqlResourceUpdateRVRequest{
SQLTemplate: sqltemplate.New(dialect),
GUIDToRV: map[string]int64{
item.guid: item.resourceVersion,

View File

@@ -38,10 +38,8 @@ var (
sqlResourceList = mustTemplate("resource_list.sql")
sqlResourceHistoryList = mustTemplate("resource_history_list.sql")
sqlResourceHistoryListModifiedSince = mustTemplate("resource_history_list_since_modified.sql")
sqlResourceUpdateRV = mustTemplate("resource_update_rv.sql")
sqlResourceHistoryRead = mustTemplate("resource_history_read.sql")
sqlResourceHistoryReadLatestRV = mustTemplate("resource_history_read_latest_rv.sql")
sqlResourceHistoryUpdateRV = mustTemplate("resource_history_update_rv.sql")
sqlResourceHistoryInsert = mustTemplate("resource_history_insert.sql")
sqlResourceHistoryPoll = mustTemplate("resource_history_poll.sql")
sqlResourceHistoryGet = mustTemplate("resource_history_get.sql")
@@ -51,10 +49,7 @@ var (
sqlResourceInsertFromHistory = mustTemplate("resource_insert_from_history.sql")
// sqlResourceLabelsInsert = mustTemplate("resource_labels_insert.sql")
sqlResourceVersionGet = mustTemplate("resource_version_get.sql")
sqlResourceVersionUpdate = mustTemplate("resource_version_update.sql")
sqlResourceVersionInsert = mustTemplate("resource_version_insert.sql")
sqlResourceVersionList = mustTemplate("resource_version_list.sql")
sqlResourceVersionList = mustTemplate("resource_version_list.sql")
sqlResourceBlobInsert = mustTemplate("resource_blob_insert.sql")
sqlResourceBlobQuery = mustTemplate("resource_blob_query.sql")
@@ -365,76 +360,11 @@ func (r sqlResourceBlobQueryRequest) Validate() error {
return nil
}
// update RV
type sqlResourceUpdateRVRequest struct {
sqltemplate.SQLTemplate
GUIDToRV map[string]int64
GUIDToSnowflakeRV map[string]int64
}
func (r sqlResourceUpdateRVRequest) Validate() error {
return nil // TODO
}
func (r sqlResourceUpdateRVRequest) SlashFunc() string {
if r.DialectName() == "postgres" {
return "CHR(47)"
}
return "CHAR(47)"
}
func (r sqlResourceUpdateRVRequest) TildeFunc() string {
if r.DialectName() == "postgres" {
return "CHR(126)"
}
return "CHAR(126)"
}
// resource_version table requests.
type resourceVersionResponse struct {
ResourceVersion int64
CurrentEpoch int64
}
func (r *resourceVersionResponse) Results() (*resourceVersionResponse, error) {
return r, nil
}
type groupResourceVersion struct {
Group, Resource string
ResourceVersion int64
}
type sqlResourceVersionUpsertRequest struct {
sqltemplate.SQLTemplate
Group, Resource string
ResourceVersion int64
}
func (r sqlResourceVersionUpsertRequest) Validate() error {
return nil // TODO
}
type sqlResourceVersionGetRequest struct {
sqltemplate.SQLTemplate
Group, Resource string
ReadOnly bool
Response *resourceVersionResponse
}
func (r sqlResourceVersionGetRequest) Validate() error {
return nil // TODO
}
func (r sqlResourceVersionGetRequest) Results() (*resourceVersionResponse, error) {
return &resourceVersionResponse{
ResourceVersion: r.Response.ResourceVersion,
CurrentEpoch: r.Response.CurrentEpoch,
}, nil
}
type sqlResourceVersionListRequest struct {
sqltemplate.SQLTemplate
*groupResourceVersion

View File

@@ -8,6 +8,7 @@ import (
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/storage/unified/resource"
"github.com/grafana/grafana/pkg/storage/unified/resourcepb"
"github.com/grafana/grafana/pkg/storage/unified/sql/rvmanager"
"github.com/grafana/grafana/pkg/storage/unified/sql/sqltemplate/mocks"
)
@@ -162,10 +163,10 @@ func TestUnifiedStorageQueries(t *testing.T) {
},
},
sqlResourceUpdateRV: {
rvmanager.SqlResourceUpdateRV: {
{
Name: "single path",
Data: &sqlResourceUpdateRVRequest{
Data: &rvmanager.SqlResourceUpdateRVRequest{
SQLTemplate: mocks.NewTestingSQLTemplate(),
GUIDToRV: map[string]int64{
"guid1": 123,
@@ -228,10 +229,10 @@ func TestUnifiedStorageQueries(t *testing.T) {
},
},
sqlResourceHistoryUpdateRV: {
rvmanager.SqlResourceHistoryUpdateRV: {
{
Name: "single path",
Data: &sqlResourceUpdateRVRequest{
Data: &rvmanager.SqlResourceUpdateRVRequest{
SQLTemplate: mocks.NewTestingSQLTemplate(),
GUIDToRV: map[string]int64{
"guid1": 123,
@@ -334,23 +335,23 @@ func TestUnifiedStorageQueries(t *testing.T) {
},
},
sqlResourceVersionGet: {
rvmanager.SqlResourceVersionGet: {
{
Name: "single path",
Data: &sqlResourceVersionGetRequest{
Data: &rvmanager.SqlResourceVersionGetRequest{
SQLTemplate: mocks.NewTestingSQLTemplate(),
Resource: "resource",
Group: "group",
Response: new(resourceVersionResponse),
Response: new(rvmanager.ResourceVersionResponse),
ReadOnly: false,
},
},
},
sqlResourceVersionUpdate: {
rvmanager.SqlResourceVersionUpdate: {
{
Name: "increment resource version",
Data: &sqlResourceVersionUpsertRequest{
Data: &rvmanager.SqlResourceVersionUpsertRequest{
SQLTemplate: mocks.NewTestingSQLTemplate(),
Resource: "resource",
Group: "group",
@@ -359,10 +360,10 @@ func TestUnifiedStorageQueries(t *testing.T) {
},
},
sqlResourceVersionInsert: {
rvmanager.SqlResourceVersionInsert: {
{
Name: "single path",
Data: &sqlResourceVersionUpsertRequest{
Data: &rvmanager.SqlResourceVersionUpsertRequest{
SQLTemplate: mocks.NewTestingSQLTemplate(),
ResourceVersion: int64(12354),
},

View File

@@ -0,0 +1,84 @@
package rvmanager
import (
"github.com/grafana/grafana/pkg/storage/unified/sql/sqltemplate"
)
type SqlResourceUpdateRVRequest struct {
sqltemplate.SQLTemplate
GUIDToRV map[string]int64
GUIDToSnowflakeRV map[string]int64
}
func (r SqlResourceUpdateRVRequest) Validate() error {
return nil // TODO
}
func (r SqlResourceUpdateRVRequest) SlashFunc() string {
if r.DialectName() == "postgres" {
return "CHR(47)"
}
return "CHAR(47)"
}
func (r SqlResourceUpdateRVRequest) TildeFunc() string {
if r.DialectName() == "postgres" {
return "CHR(126)"
}
return "CHAR(126)"
}
type ResourceVersionResponse struct {
ResourceVersion int64
CurrentEpoch int64
}
func (r *ResourceVersionResponse) Results() (*ResourceVersionResponse, error) {
return r, nil
}
type sqlResourceVersionGetRequest struct {
sqltemplate.SQLTemplate
Group, Resource string
ReadOnly bool
Response *ResourceVersionResponse
}
func (r sqlResourceVersionGetRequest) Validate() error {
return nil // TODO
}
func (r sqlResourceVersionGetRequest) Results() (*ResourceVersionResponse, error) {
return &ResourceVersionResponse{
ResourceVersion: r.Response.ResourceVersion,
CurrentEpoch: r.Response.CurrentEpoch,
}, nil
}
type SqlResourceVersionUpsertRequest struct {
sqltemplate.SQLTemplate
Group, Resource string
ResourceVersion int64
}
func (r SqlResourceVersionUpsertRequest) Validate() error {
return nil // TODO
}
type SqlResourceVersionGetRequest struct {
sqltemplate.SQLTemplate
Group, Resource string
ReadOnly bool
Response *ResourceVersionResponse
}
func (r SqlResourceVersionGetRequest) Validate() error {
return nil // TODO
}
func (r SqlResourceVersionGetRequest) Results() (*ResourceVersionResponse, error) {
return &ResourceVersionResponse{
ResourceVersion: r.Response.ResourceVersion,
CurrentEpoch: r.Response.CurrentEpoch,
}, nil
}

View File

@@ -1,4 +1,4 @@
package sql
package rvmanager
import (
"context"
@@ -11,6 +11,7 @@ import (
"github.com/bwmarrin/snowflake"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
@@ -20,6 +21,8 @@ import (
"github.com/grafana/grafana/pkg/storage/unified/sql/sqltemplate"
)
var tracer = otel.Tracer("github.com/grafana/grafana/pkg/storage/unified/sql/rvmanager")
var (
rvmWriteDuration = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "rvmanager_write_duration_seconds",
@@ -62,8 +65,8 @@ const (
defaultBatchTimeout = 5 * time.Second
)
// resourceVersionManager handles resource version operations
type resourceVersionManager struct {
// ResourceVersionManager handles resource version operations
type ResourceVersionManager struct {
dialect sqltemplate.Dialect
db db.DB
batchMu sync.RWMutex
@@ -100,7 +103,7 @@ type ResourceManagerOptions struct {
}
// NewResourceVersionManager creates a new ResourceVersionManager
func NewResourceVersionManager(opts ResourceManagerOptions) (*resourceVersionManager, error) {
func NewResourceVersionManager(opts ResourceManagerOptions) (*ResourceVersionManager, error) {
if opts.MaxBatchSize == 0 {
opts.MaxBatchSize = defaultMaxBatchSize
}
@@ -113,7 +116,7 @@ func NewResourceVersionManager(opts ResourceManagerOptions) (*resourceVersionMan
if opts.DB == nil {
return nil, errors.New("db is required")
}
return &resourceVersionManager{
return &ResourceVersionManager{
dialect: opts.Dialect,
db: opts.DB,
batchChMap: make(map[string]chan *writeOp),
@@ -123,7 +126,7 @@ func NewResourceVersionManager(opts ResourceManagerOptions) (*resourceVersionMan
}
// ExecWithRV executes the given function with an incremented resource version
func (m *resourceVersionManager) ExecWithRV(ctx context.Context, key *resourcepb.ResourceKey, fn WriteEventFunc) (rv int64, err error) {
func (m *ResourceVersionManager) ExecWithRV(ctx context.Context, key *resourcepb.ResourceKey, fn WriteEventFunc) (rv int64, err error) {
rvmInflightWrites.WithLabelValues(key.Group, key.Resource).Inc()
defer rvmInflightWrites.WithLabelValues(key.Group, key.Resource).Dec()
@@ -179,7 +182,7 @@ func (m *resourceVersionManager) ExecWithRV(ctx context.Context, key *resourcepb
}
// startBatchProcessor is responsible for processing batches of write operations
func (m *resourceVersionManager) startBatchProcessor(group, resource string) {
func (m *ResourceVersionManager) startBatchProcessor(group, resource string) {
ctx := context.TODO()
batchKey := fmt.Sprintf("%s/%s", group, resource)
@@ -216,7 +219,11 @@ func (m *resourceVersionManager) startBatchProcessor(group, resource string) {
}
}
func (m *resourceVersionManager) execBatch(ctx context.Context, group, resource string, batch []writeOp) {
var readCommitted = &sql.TxOptions{
Isolation: sql.LevelReadCommitted,
}
func (m *ResourceVersionManager) execBatch(ctx context.Context, group, resource string, batch []writeOp) {
ctx, span := tracer.Start(ctx, "sql.resourceVersionManager.execBatch")
defer span.End()
@@ -245,7 +252,7 @@ func (m *resourceVersionManager) execBatch(ctx context.Context, group, resource
guids := make([]string, len(batch)) // The GUIDs of the created resources in the same order as the batch
rvs := make([]int64, len(batch)) // The RVs of the created resources in the same order as the batch
err = m.db.WithTx(ctx, ReadCommitted, func(ctx context.Context, tx db.Tx) error {
err = m.db.WithTx(ctx, readCommitted, func(ctx context.Context, tx db.Tx) error {
span.AddEvent("starting_batch_transaction")
writeTimer := prometheus.NewTimer(prometheus.ObserverFunc(func(v float64) {
@@ -268,7 +275,7 @@ func (m *resourceVersionManager) execBatch(ctx context.Context, group, resource
lockTimer := prometheus.NewTimer(prometheus.ObserverFunc(func(v float64) {
rvmExecBatchPhaseDuration.WithLabelValues(group, resource, "waiting_for_lock").Observe(v)
}))
rv, err := m.lock(ctx, tx, group, resource)
rv, err := m.Lock(ctx, tx, group, resource)
lockTimer.ObserveDuration()
if err != nil {
span.AddEvent("resource_version_lock_failed", trace.WithAttributes(
@@ -292,7 +299,7 @@ func (m *resourceVersionManager) execBatch(ctx context.Context, group, resource
rv++
}
// Update the resource version for the created resources in both the resource and the resource history
if _, err := dbutil.Exec(ctx, tx, sqlResourceUpdateRV, sqlResourceUpdateRVRequest{
if _, err := dbutil.Exec(ctx, tx, SqlResourceUpdateRV, SqlResourceUpdateRVRequest{
SQLTemplate: sqltemplate.New(m.dialect),
GUIDToRV: guidToRV,
}); err != nil {
@@ -303,7 +310,7 @@ func (m *resourceVersionManager) execBatch(ctx context.Context, group, resource
}
span.AddEvent("resource_versions_updated")
if _, err := dbutil.Exec(ctx, tx, sqlResourceHistoryUpdateRV, sqlResourceUpdateRVRequest{
if _, err := dbutil.Exec(ctx, tx, SqlResourceHistoryUpdateRV, SqlResourceUpdateRVRequest{
SQLTemplate: sqltemplate.New(m.dialect),
GUIDToRV: guidToRV,
GUIDToSnowflakeRV: guidToSnowflakeRV,
@@ -316,7 +323,7 @@ func (m *resourceVersionManager) execBatch(ctx context.Context, group, resource
span.AddEvent("resource_history_versions_updated")
// Record the latest RV in the resource version table
err = m.saveRV(ctx, tx, group, resource, rv)
err = m.SaveRV(ctx, tx, group, resource, rv)
if err != nil {
span.AddEvent("save_rv_failed", trace.WithAttributes(
attribute.String("error", err.Error()),
@@ -350,20 +357,20 @@ func snowflakeFromRv(rv int64) int64 {
return (((rv / 1000) - snowflake.Epoch) << (snowflake.NodeBits + snowflake.StepBits)) + (rv % 1000)
}
// lock locks the resource version for the given key
func (m *resourceVersionManager) lock(ctx context.Context, x db.ContextExecer, group, resource string) (nextRV int64, err error) {
// Lock locks the resource version for the given key
func (m *ResourceVersionManager) Lock(ctx context.Context, x db.ContextExecer, group, resource string) (nextRV int64, err error) {
// 1. Lock the row and prevent concurrent updates until the transaction is committed
res, err := dbutil.QueryRow(ctx, x, sqlResourceVersionGet, sqlResourceVersionGetRequest{
res, err := dbutil.QueryRow(ctx, x, SqlResourceVersionGet, sqlResourceVersionGetRequest{
SQLTemplate: sqltemplate.New(m.dialect),
Group: group,
Resource: resource,
Response: new(resourceVersionResponse),
Response: new(ResourceVersionResponse),
ReadOnly: false, // Lock the row for update
})
if errors.Is(err, sql.ErrNoRows) {
// If there wasn't a row for this resource, create it
if _, err = dbutil.Exec(ctx, x, sqlResourceVersionInsert, sqlResourceVersionUpsertRequest{
if _, err = dbutil.Exec(ctx, x, SqlResourceVersionInsert, SqlResourceVersionUpsertRequest{
SQLTemplate: sqltemplate.New(m.dialect),
Group: group,
Resource: resource,
@@ -372,11 +379,11 @@ func (m *resourceVersionManager) lock(ctx context.Context, x db.ContextExecer, g
}
// Fetch the newly created resource version
res, err = dbutil.QueryRow(ctx, x, sqlResourceVersionGet, sqlResourceVersionGetRequest{
res, err = dbutil.QueryRow(ctx, x, SqlResourceVersionGet, sqlResourceVersionGetRequest{
SQLTemplate: sqltemplate.New(m.dialect),
Group: group,
Resource: resource,
Response: new(resourceVersionResponse),
Response: new(ResourceVersionResponse),
ReadOnly: true,
})
if err != nil {
@@ -390,8 +397,8 @@ func (m *resourceVersionManager) lock(ctx context.Context, x db.ContextExecer, g
return max(res.CurrentEpoch, res.ResourceVersion+1), nil
}
func (m *resourceVersionManager) saveRV(ctx context.Context, x db.ContextExecer, group, resource string, rv int64) error {
_, err := dbutil.Exec(ctx, x, sqlResourceVersionUpdate, sqlResourceVersionUpsertRequest{
func (m *ResourceVersionManager) SaveRV(ctx context.Context, x db.ContextExecer, group, resource string, rv int64) error {
_, err := dbutil.Exec(ctx, x, SqlResourceVersionUpdate, SqlResourceVersionUpsertRequest{
SQLTemplate: sqltemplate.New(m.dialect),
Group: group,
Resource: resource,

View File

@@ -1,4 +1,4 @@
package sql
package rvmanager
import (
"testing"

View File

@@ -0,0 +1,30 @@
package rvmanager
import (
"embed"
"fmt"
"text/template"
)
// Templates setup.
var (
//go:embed data/*.sql
sqlTemplatesFS embed.FS
sqlTemplates = template.Must(template.New("sql").ParseFS(sqlTemplatesFS, `data/*.sql`))
)
func mustTemplate(filename string) *template.Template {
if t := sqlTemplates.Lookup(filename); t != nil {
return t
}
panic(fmt.Sprintf("template file not found: %s", filename))
}
var (
SqlResourceUpdateRV = mustTemplate("resource_update_rv.sql")
SqlResourceHistoryUpdateRV = mustTemplate("resource_history_update_rv.sql")
SqlResourceVersionGet = mustTemplate("resource_version_get.sql")
SqlResourceVersionUpdate = mustTemplate("resource_version_update.sql")
SqlResourceVersionInsert = mustTemplate("resource_version_insert.sql")
)

View File

@@ -89,7 +89,7 @@ describe('ShareLinkTab', () => {
await screen.findByRole('link', { name: selectors.pages.SharePanelModal.linkToRenderedImage })
).toHaveAttribute(
'href',
'http://dashboards.grafana.com/grafana/render/d-solo/dash-1?from=2019-02-11T13:00:00.000Z&to=2019-02-11T19:00:00.000Z&panelId=A$panel-12&__feature.dashboardSceneSolo=true&width=1000&height=500&tz=Pacific%2FEaster'
'http://dashboards.grafana.com/grafana/render/d-solo/dash-1?from=2019-02-11T13:00:00.000Z&to=2019-02-11T19:00:00.000Z&panelId=A$panel-12&__feature.dashboardSceneSolo=true&hideLogo=true&width=1000&height=500&tz=Pacific%2FEaster'
);
});
});

View File

@@ -81,6 +81,9 @@ export class ShareLinkTab extends SceneObjectBase<ShareLinkTabState> implements
imageQueryParams['__feature.dashboardSceneSolo'] = true;
}
// hide Grafana logo in the rendered image
urlParamsUpdate.hideLogo = 'true';
const imageUrl = getDashboardUrl({
uid: dashboard.state.uid,
currentQueryParams: window.location.search,

View File

@@ -0,0 +1,152 @@
import { render, screen } from '@testing-library/react';
import { useParams } from 'react-router-dom-v5-compat';
import { SceneTimeRange, VizPanel } from '@grafana/scenes';
import { getDashboardScenePageStateManager } from '../pages/DashboardScenePageStateManager';
import { DashboardScene } from '../scene/DashboardScene';
import { DefaultGridLayoutManager } from '../scene/layout-default/DefaultGridLayoutManager';
import { SoloPanelRenderer } from './SoloPanelPage';
// Mock dependencies
jest.mock('react-router-dom-v5-compat', () => ({
useParams: jest.fn(),
}));
jest.mock('../pages/DashboardScenePageStateManager', () => ({
getDashboardScenePageStateManager: jest.fn(),
}));
jest.mock('../scene/SoloPanelContext', () => ({
SoloPanelContextProvider: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
useDefineSoloPanelContext: jest.fn(() => ({})),
}));
jest.mock('./SoloPanelPageLogo', () => ({
shouldHideSoloPanelLogo: (hideLogo?: unknown) => {
if (hideLogo === undefined) {
return false;
}
if (hideLogo === true) {
return true;
}
if (hideLogo === false) {
return false;
}
if (Array.isArray(hideLogo)) {
hideLogo = hideLogo[0] ?? '';
}
const normalized = String(hideLogo).trim().toLowerCase();
return normalized !== 'false' && normalized !== '0';
},
SoloPanelPageLogo: ({ isHovered, hideLogo }: { isHovered: boolean; hideLogo?: unknown }) => {
if (hideLogo === true) {
return null;
}
if (hideLogo === false) {
return (
<div data-testid="solo-panel-logo" data-hovered={String(isHovered)}>
Logo
</div>
);
}
if (Array.isArray(hideLogo)) {
hideLogo = hideLogo[0] ?? '';
}
if (hideLogo !== undefined) {
const normalized = String(hideLogo).trim().toLowerCase();
if (normalized !== 'false' && normalized !== '0') {
return null;
}
}
return (
<div data-testid="solo-panel-logo" data-hovered={String(isHovered)}>
Logo
</div>
);
},
}));
describe('SoloPanelPage', () => {
const mockStateManager = {
useState: jest.fn(() => ({
dashboard: null,
loadError: null,
})),
loadDashboard: jest.fn(),
clearState: jest.fn(),
};
beforeEach(() => {
jest.clearAllMocks();
(getDashboardScenePageStateManager as jest.Mock).mockReturnValue(mockStateManager);
(useParams as jest.Mock).mockReturnValue({ uid: 'test-uid', type: undefined, slug: undefined });
});
describe('SoloPanelRenderer', () => {
const createMockDashboard = () => {
const panel = new VizPanel({
title: 'Test Panel',
pluginId: 'table',
key: 'panel-1',
});
const dashboard = new DashboardScene({
title: 'Test Dashboard',
uid: 'test-dash',
$timeRange: new SceneTimeRange({}),
body: DefaultGridLayoutManager.fromVizPanels([panel]),
});
// Mock the activate method
dashboard.activate = jest.fn(() => jest.fn());
// Mock useState to return the dashboard state object with required properties
dashboard.useState = jest.fn(() => ({
controls: {
useState: jest.fn(() => ({
refreshPicker: {
activate: jest.fn(() => jest.fn()),
},
})),
},
body: {
Component: () => <div data-testid="panel-content">Panel Content</div>,
},
})) as unknown as typeof dashboard.useState;
return dashboard;
};
it('should render the panel', () => {
const dashboard = createMockDashboard();
render(<SoloPanelRenderer dashboard={dashboard} panelId="panel-1" hideLogo={undefined} />);
// The panel should be rendered (we can't easily test the actual panel content without more setup)
expect(screen.getByTestId('solo-panel-logo')).toBeInTheDocument();
});
it('should render logo when hideLogo is false', () => {
const dashboard = createMockDashboard();
render(<SoloPanelRenderer dashboard={dashboard} panelId="panel-1" hideLogo={undefined} />);
expect(screen.getByTestId('solo-panel-logo')).toBeInTheDocument();
});
it('should not render logo when hideLogo is true', () => {
const dashboard = createMockDashboard();
render(<SoloPanelRenderer dashboard={dashboard} panelId="panel-1" hideLogo="true" />);
expect(screen.queryByTestId('solo-panel-logo')).not.toBeInTheDocument();
});
it('should initialize with isHovered as false', () => {
const dashboard = createMockDashboard();
render(<SoloPanelRenderer dashboard={dashboard} panelId="panel-1" hideLogo={undefined} />);
const logo = screen.getByTestId('solo-panel-logo');
expect(logo).toHaveAttribute('data-hovered', 'false');
});
});
});

View File

@@ -1,9 +1,9 @@
// Libraries
import { css } from '@emotion/css';
import { useEffect } from 'react';
import { useEffect, useRef, useState } from 'react';
import { useParams } from 'react-router-dom-v5-compat';
import { GrafanaTheme2 } from '@grafana/data';
import { GrafanaTheme2, UrlQueryValue } from '@grafana/data';
import { t } from '@grafana/i18n';
import { UrlSyncContextProvider } from '@grafana/scenes';
import { Alert, Box, useStyles2 } from '@grafana/ui';
@@ -17,7 +17,10 @@ import { getDashboardScenePageStateManager } from '../pages/DashboardScenePageSt
import { DashboardScene } from '../scene/DashboardScene';
import { SoloPanelContextProvider, useDefineSoloPanelContext } from '../scene/SoloPanelContext';
export interface Props extends GrafanaRouteComponentProps<DashboardPageRouteParams, { panelId: string }> {}
import { SoloPanelPageLogo } from './SoloPanelPageLogo';
export interface Props
extends GrafanaRouteComponentProps<DashboardPageRouteParams, { panelId: string; hideLogo?: UrlQueryValue }> {}
/**
* Used for iframe embedding and image rendering of single panels
@@ -52,18 +55,28 @@ export function SoloPanelPage({ queryParams }: Props) {
return (
<UrlSyncContextProvider scene={dashboard}>
<SoloPanelRenderer dashboard={dashboard} panelId={queryParams.panelId} />
<SoloPanelRenderer dashboard={dashboard} panelId={queryParams.panelId} hideLogo={queryParams.hideLogo} />
</UrlSyncContextProvider>
);
}
export default SoloPanelPage;
export function SoloPanelRenderer({ dashboard, panelId }: { dashboard: DashboardScene; panelId: string }) {
export function SoloPanelRenderer({
dashboard,
panelId,
hideLogo,
}: {
dashboard: DashboardScene;
panelId: string;
hideLogo?: UrlQueryValue;
}) {
const { controls, body } = dashboard.useState();
const refreshPicker = controls?.useState()?.refreshPicker;
const styles = useStyles2(getStyles);
const soloPanelContext = useDefineSoloPanelContext(panelId)!;
const [isHovered, setIsHovered] = useState(false);
const containerRef = useRef<HTMLDivElement>(null);
useEffect(() => {
const dashDeactivate = dashboard.activate();
@@ -76,11 +89,19 @@ export function SoloPanelRenderer({ dashboard, panelId }: { dashboard: Dashboard
}, [dashboard, refreshPicker]);
return (
<div className={styles.container}>
<div
ref={containerRef}
className={styles.container}
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
>
<SoloPanelPageLogo containerRef={containerRef} isHovered={isHovered} hideLogo={hideLogo} />
{renderHiddenVariables(dashboard)}
<SoloPanelContextProvider value={soloPanelContext} dashboard={dashboard} singleMatch={true}>
<body.Component model={body} />
</SoloPanelContextProvider>
<div className={styles.panelWrapper}>
<SoloPanelContextProvider value={soloPanelContext} dashboard={dashboard} singleMatch={true}>
<body.Component model={body} />
</SoloPanelContextProvider>
</div>
</div>
);
}
@@ -107,15 +128,23 @@ function renderHiddenVariables(dashboard: DashboardScene) {
);
}
const getStyles = (theme: GrafanaTheme2) => ({
container: css({
position: 'fixed',
bottom: 0,
right: 0,
margin: 0,
left: 0,
top: 0,
const getStyles = (theme: GrafanaTheme2) => {
const panelWrapper = css({
width: '100%',
height: '100%',
}),
});
});
return {
container: css({
position: 'fixed',
bottom: 0,
right: 0,
margin: 0,
left: 0,
top: 0,
width: '100%',
height: '100%',
}),
panelWrapper,
};
};

View File

@@ -0,0 +1,291 @@
import { render, screen } from '@testing-library/react';
import { createRef } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { shouldHideSoloPanelLogo, SoloPanelPageLogo } from './SoloPanelPageLogo';
// Mock the theme hook
const mockUseTheme2 = jest.fn();
const mockUseStyles2 = jest.fn((fn) => fn({} as GrafanaTheme2));
jest.mock('@grafana/ui', () => ({
...jest.requireActual('@grafana/ui'),
useTheme2: () => mockUseTheme2(),
useStyles2: (fn: (theme: GrafanaTheme2) => Record<string, unknown>) => mockUseStyles2(fn),
}));
// Mock the logo images for dark and light modes
jest.mock('img/grafana_text_logo_dark.svg', () => 'grafana-text-logo-dark.svg');
jest.mock('img/grafana_text_logo_light.svg', () => 'grafana-text-logo-light.svg');
// Mock ResizeObserver
global.ResizeObserver = jest.fn().mockImplementation((callback) => {
return {
observe: jest.fn(),
unobserve: jest.fn(),
disconnect: jest.fn(),
// Helper to trigger resize
trigger: (width: number, height: number) => {
callback([{ contentRect: { width, height } }]);
},
};
});
// Helper function to assign a mock div to a ref
function assignMockDivToRef(ref: React.RefObject<HTMLDivElement>, mockDiv: HTMLDivElement) {
// Use type assertion to bypass readonly restriction in tests
(ref as { current: HTMLDivElement | null }).current = mockDiv;
}
describe('SoloPanelPageLogo', () => {
describe('shouldHideSoloPanelLogo', () => {
it('treats null as false', () => {
expect(shouldHideSoloPanelLogo(null)).toBe(false);
});
it('treats presence (empty string) as true', () => {
expect(shouldHideSoloPanelLogo('')).toBe(true);
});
it('treats true/1 as true', () => {
expect(shouldHideSoloPanelLogo('true')).toBe(true);
expect(shouldHideSoloPanelLogo('1')).toBe(true);
expect(shouldHideSoloPanelLogo(' TRUE ')).toBe(true);
});
it('treats false/0 as false', () => {
expect(shouldHideSoloPanelLogo('false')).toBe(false);
expect(shouldHideSoloPanelLogo('0')).toBe(false);
expect(shouldHideSoloPanelLogo(' FALSE ')).toBe(false);
});
it('treats boolean true as true and boolean false as false', () => {
expect(shouldHideSoloPanelLogo(true)).toBe(true);
expect(shouldHideSoloPanelLogo(false)).toBe(false);
});
it('treats undefined as false', () => {
expect(shouldHideSoloPanelLogo(undefined)).toBe(false);
});
it('handles array values (uses the first value)', () => {
expect(shouldHideSoloPanelLogo([''])).toBe(true);
expect(shouldHideSoloPanelLogo(['true'])).toBe(true);
expect(shouldHideSoloPanelLogo(['1'])).toBe(true);
expect(shouldHideSoloPanelLogo(['false'])).toBe(false);
expect(shouldHideSoloPanelLogo(['0'])).toBe(false);
expect(shouldHideSoloPanelLogo(['false', 'true'])).toBe(false);
});
});
const mockTheme = {
isDark: false,
colors: {
background: { primary: '#ffffff' },
border: { weak: '#e0e0e0' },
text: { secondary: '#666666' },
},
shape: { radius: { default: '4px' } },
shadows: { z3: '0 2px 4px rgba(0,0,0,0.1)' },
typography: { body: { fontSize: '14px' } },
spacing: jest.fn((n: number) => `${n * 8}px`),
transitions: {
handleMotion: jest.fn(() => ({})),
},
} as unknown as GrafanaTheme2;
beforeEach(() => {
jest.clearAllMocks();
mockUseTheme2.mockReturnValue({
...mockTheme,
isDark: false,
});
mockUseStyles2.mockImplementation((fn) => fn(mockTheme));
});
it('should render the logo component', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 800,
height: 600,
top: 0,
left: 0,
bottom: 600,
right: 800,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
render(<SoloPanelPageLogo containerRef={containerRef} isHovered={false} hideLogo={undefined} />);
expect(screen.getByText('Powered by')).toBeInTheDocument();
expect(screen.getByAltText('Grafana')).toBeInTheDocument();
});
it('should hide logo when isHovered is true', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 800,
height: 600,
top: 0,
left: 0,
bottom: 600,
right: 800,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
render(<SoloPanelPageLogo containerRef={containerRef} isHovered={true} hideLogo={undefined} />);
// The logo should still be in the DOM but with reduced opacity
const poweredByText = screen.getByText('Powered by');
expect(poweredByText).toBeInTheDocument();
// The logoHidden class should be applied (we can't easily test the class name without more setup)
});
it('should show logo when isHovered is false', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 800,
height: 600,
top: 0,
left: 0,
bottom: 600,
right: 800,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
render(<SoloPanelPageLogo containerRef={containerRef} isHovered={false} hideLogo={undefined} />);
// The logo should be visible
expect(screen.getByText('Powered by')).toBeInTheDocument();
expect(screen.getByAltText('Grafana')).toBeInTheDocument();
});
it('should use dark logo in dark theme', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 800,
height: 600,
top: 0,
left: 0,
bottom: 600,
right: 800,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
mockUseTheme2.mockReturnValue({
...mockTheme,
isDark: true,
});
render(<SoloPanelPageLogo containerRef={containerRef} isHovered={false} hideLogo={undefined} />);
const logo = screen.getByAltText('Grafana');
expect(logo).toHaveAttribute('src', 'grafana-text-logo-light.svg');
});
it('should use correct logo based on theme', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 800,
height: 600,
top: 0,
left: 0,
bottom: 600,
right: 800,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
// The beforeEach sets isDark: false by default, so this should work
// But the previous test might have changed it, so let's ensure it's reset
mockUseTheme2.mockClear();
mockUseTheme2.mockReturnValue({
...mockTheme,
isDark: false,
});
render(<SoloPanelPageLogo containerRef={containerRef} isHovered={false} hideLogo={undefined} />);
const logo = screen.getByAltText('Grafana');
// Verify logo is rendered (the exact src depends on theme, which is tested in other tests)
expect(logo).toBeInTheDocument();
expect(logo).toHaveAttribute('src');
});
it('should apply scaling styles based on container dimensions', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 400,
height: 300,
top: 0,
left: 0,
bottom: 300,
right: 400,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
render(<SoloPanelPageLogo containerRef={containerRef} isHovered={false} hideLogo={undefined} />);
// Find the logo container by looking for the "Powered by" text's parent
const poweredByText = screen.getByText('Powered by');
const logoContainer = poweredByText.parentElement as HTMLElement;
expect(logoContainer).toBeInTheDocument();
// Check that inline styles are applied (scaling should be between 0.6 and 1.0)
expect(logoContainer.style.fontSize).toBeTruthy();
expect(logoContainer.style.top).toBeTruthy();
expect(logoContainer.style.right).toBeTruthy();
});
it('should observe container resize', () => {
const containerRef = createRef<HTMLDivElement>();
const mockDiv = document.createElement('div');
mockDiv.getBoundingClientRect = jest.fn(() => ({
width: 800,
height: 600,
top: 0,
left: 0,
bottom: 600,
right: 800,
x: 0,
y: 0,
toJSON: jest.fn(),
}));
assignMockDivToRef(containerRef, mockDiv);
const { unmount } = render(
<SoloPanelPageLogo containerRef={containerRef} isHovered={false} hideLogo={undefined} />
);
expect(ResizeObserver).toHaveBeenCalled();
const resizeObserverInstance = (ResizeObserver as jest.Mock).mock.results[0].value;
expect(resizeObserverInstance.observe).toHaveBeenCalledWith(mockDiv);
unmount();
expect(resizeObserverInstance.disconnect).toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,159 @@
import { css, cx } from '@emotion/css';
import { useEffect, useState } from 'react';
import { GrafanaTheme2, UrlQueryValue } from '@grafana/data';
import { Trans } from '@grafana/i18n';
import { useStyles2, useTheme2 } from '@grafana/ui';
import grafanaTextLogoDarkSvg from 'img/grafana_text_logo_dark.svg';
import grafanaTextLogoLightSvg from 'img/grafana_text_logo_light.svg';
interface SoloPanelPageLogoProps {
containerRef: React.RefObject<HTMLDivElement>;
isHovered: boolean;
hideLogo?: UrlQueryValue;
}
export function shouldHideSoloPanelLogo(hideLogo?: UrlQueryValue): boolean {
if (hideLogo === undefined || hideLogo === null) {
return false;
}
// React-router / locationSearchToObject can represent a "present but no value" query param as boolean true.
if (hideLogo === true) {
return true;
}
if (hideLogo === false) {
return false;
}
const value = Array.isArray(hideLogo) ? String(hideLogo[0] ?? '') : String(hideLogo);
// Treat presence as "true", except explicit disable values.
// Examples:
// - ?hideLogo => hide
// - ?hideLogo=true => hide
// - ?hideLogo=1 => hide
// - ?hideLogo=false => show
// - ?hideLogo=0 => show
const normalized = value.trim().toLowerCase();
return normalized !== 'false' && normalized !== '0';
}
export function SoloPanelPageLogo({ containerRef, isHovered, hideLogo }: SoloPanelPageLogoProps) {
const shouldHide = shouldHideSoloPanelLogo(hideLogo);
const [scale, setScale] = useState(1);
const styles = useStyles2(getStyles);
const theme = useTheme2();
const grafanaLogo = theme.isDark ? grafanaTextLogoLightSvg : grafanaTextLogoDarkSvg;
// Calculate responsive scale based on panel dimensions
useEffect(() => {
const updateScale = () => {
if (!containerRef.current) {
return;
}
const { width, height } = containerRef.current.getBoundingClientRect();
// Use the smaller dimension to ensure it scales appropriately for both wide and tall panels
const minDimension = Math.min(width, height);
// Base scale calculation: scales from 0.6 (for small panels ~200px) up to 1.0 when the smaller dimension is ~800px
// Clamp to a maximum of 1.0 for larger panels
const baseScale = Math.max(0.6, Math.min(1.0, 0.6 + (minDimension - 200) / 600));
// Also consider width specifically for very wide but short panels; reaches 1.0 when width is ~1000px
const widthScale = Math.max(0.6, Math.min(1.0, 0.6 + (width - 200) / 800));
// Use the average of both for balanced scaling; panels around 1000x1000px (or larger in both dimensions) reach a scale of 1.0
const finalScale = Math.min(1.0, (baseScale + widthScale) / 2);
setScale(finalScale);
};
updateScale();
const resizeObserver = new ResizeObserver(updateScale);
if (containerRef.current) {
resizeObserver.observe(containerRef.current);
}
return () => {
resizeObserver.disconnect();
};
}, [containerRef]);
if (shouldHide) {
return null;
}
return (
<div
className={cx(styles.logoContainer, isHovered && styles.logoHidden)}
style={{
fontSize: `${scale * 100}%`,
top: `${8 * scale}px`,
right: `${8 * scale}px`,
padding: `${8 * scale}px ${8 * scale}px`,
}}
>
<span className={styles.text}>
<Trans i18nKey="embedded-panel.powered-by">Powered by</Trans>
</span>
<img
src={grafanaLogo}
alt="Grafana"
className={styles.logo}
style={{
height: `${16 * scale}px`,
marginLeft: '0.25em',
}}
/>
</div>
);
}
const getStyles = (theme: GrafanaTheme2) => {
const logoContainer = css({
position: 'absolute',
// top, right, and padding will be set via inline styles for scaling
backgroundColor: theme.colors.background.primary,
borderRadius: theme.shape.radius.default,
opacity: 0.9,
pointerEvents: 'none',
zIndex: 1000,
display: 'flex',
alignItems: 'center',
boxShadow: theme.shadows.z3,
border: `1px solid ${theme.colors.border.weak}`,
// Base font size - will be scaled via inline style
fontSize: theme.typography.body.fontSize,
lineHeight: 1.2,
[theme.transitions.handleMotion('no-preference', 'reduce')]: {
transition: 'opacity 0.2s ease-in-out',
},
});
const logoHidden = css({
opacity: 0,
});
const text = css({
color: theme.colors.text.secondary,
// fontSize will be inherited from parent container's scale
lineHeight: 1.2,
display: 'block',
});
const logo = css({
// height will be set via inline style (16px * scale) to scale with panel size
display: 'block',
flexShrink: 0,
});
return {
logoContainer,
logoHidden,
text,
logo,
};
};

View File

@@ -106,7 +106,7 @@ describe('ShareModal', () => {
render(<ShareLink {...props} />);
const base = 'http://dashboards.grafana.com/render/d-solo/abcdefghi/my-dash';
const params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&scale=1&tz=UTC';
const params = '?from=1000&to=2000&orgId=1&panelId=22&hideLogo=true&width=1000&height=500&scale=1&tz=UTC';
expect(
await screen.findByRole('link', { name: selectors.pages.SharePanelModal.linkToRenderedImage })
).toHaveAttribute('href', base + params);
@@ -117,7 +117,7 @@ describe('ShareModal', () => {
render(<ShareLink {...props} />);
const base = 'http://dashboards.grafana.com/render/dashboard-solo/script/my-dash.js';
const params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&scale=1&tz=UTC';
const params = '?from=1000&to=2000&orgId=1&panelId=22&hideLogo=true&width=1000&height=500&scale=1&tz=UTC';
expect(
await screen.findByRole('link', { name: selectors.pages.SharePanelModal.linkToRenderedImage })
).toHaveAttribute('href', base + params);
@@ -154,7 +154,7 @@ describe('ShareModal', () => {
await screen.findByRole('link', { name: selectors.pages.SharePanelModal.linkToRenderedImage })
).toHaveAttribute(
'href',
base + path + '?from=1000&to=2000&orgId=1&panelId=1&width=1000&height=500&scale=1&tz=UTC'
base + path + '?from=1000&to=2000&orgId=1&panelId=1&hideLogo=true&width=1000&height=500&scale=1&tz=UTC'
);
});
@@ -172,7 +172,7 @@ describe('ShareModal', () => {
render(<ShareLink {...props} />);
const base = 'http://dashboards.grafana.com/render/d-solo/abcdefghi/my-dash';
const params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&scale=1&tz=UTC';
const params = '?from=1000&to=2000&orgId=1&panelId=22&hideLogo=true&width=1000&height=500&scale=1&tz=UTC';
expect(
await screen.findByRole('link', { name: selectors.pages.SharePanelModal.linkToRenderedImage })
).toHaveAttribute('href', base + params);
@@ -213,7 +213,7 @@ describe('when appUrl is set in the grafana config', () => {
await screen.findByRole('link', { name: selectors.pages.SharePanelModal.linkToRenderedImage })
).toHaveAttribute(
'href',
`http://dashboards.grafana.com/render/d-solo/${mockDashboard.uid}?orgId=1&from=1000&to=2000&panelId=${mockPanel.id}&width=1000&height=500&scale=1&tz=UTC`
`http://dashboards.grafana.com/render/d-solo/${mockDashboard.uid}?orgId=1&from=1000&to=2000&panelId=${mockPanel.id}&hideLogo=true&width=1000&height=500&scale=1&tz=UTC`
);
});
});

View File

@@ -142,6 +142,7 @@ export function buildImageUrl(
let imageUrl = soloUrl.replace(config.appSubUrl + '/dashboard-solo/', config.appSubUrl + '/render/dashboard-solo/');
imageUrl = imageUrl.replace(config.appSubUrl + '/d-solo/', config.appSubUrl + '/render/d-solo/');
imageUrl +=
`&hideLogo=true` +
`&width=${config.rendererDefaultImageWidth}` +
`&height=${config.rendererDefaultImageHeight}` +
`&scale=${config.rendererDefaultImageScale}` +

View File

@@ -43,7 +43,8 @@ export type QueryLibraryContextType = {
app?: CoreApp,
onUpdateSuccess?: () => void,
onSelectQuery?: (query: DataQuery) => void,
datasourceFilters?: string[]
datasourceFilters?: string[],
parentRef?: React.RefObject<HTMLDivElement>
) => ReactNode;
/**

View File

@@ -461,8 +461,7 @@ describe('QueryEditorRow', () => {
render(<QueryEditorRow {...props(testData)} app={CoreApp.UnifiedAlerting} />);
await waitFor(() => {
expect(screen.queryByText('Save query')).not.toBeInTheDocument();
expect(screen.queryByText('Replace with saved query')).not.toBeInTheDocument();
expect(screen.queryByText('Saved queries')).not.toBeInTheDocument();
});
});

View File

@@ -1,7 +1,7 @@
import classNames from 'classnames';
import { cloneDeep, filter, uniqBy, uniqueId } from 'lodash';
import pluralize from 'pluralize';
import { PureComponent, ReactNode, type JSX } from 'react';
import { PureComponent, ReactNode, type JSX, createRef } from 'react';
import {
CoreApp,
@@ -88,6 +88,7 @@ interface State<TQuery extends DataQuery> {
export class QueryEditorRow<TQuery extends DataQuery> extends PureComponent<Props<TQuery>, State<TQuery>> {
dataSourceSrv = getDataSourceSrv();
id = '';
editorRef = createRef<HTMLDivElement>();
state: State<TQuery> = {
datasource: null,
@@ -419,6 +420,7 @@ export class QueryEditorRow<TQuery extends DataQuery> extends PureComponent<Prop
onUpdateSuccess={this.onExitQueryLibraryEditingMode}
onSelectQuery={this.onSelectQueryFromLibrary}
datasourceFilters={datasource?.name ? [datasource.name] : []}
parentRef={this.editorRef}
/>
)}
@@ -542,7 +544,7 @@ export class QueryEditorRow<TQuery extends DataQuery> extends PureComponent<Prop
);
return (
<div data-testid="query-editor-row" aria-label={selectors.components.QueryEditorRows.rows}>
<div data-testid="query-editor-row" aria-label={selectors.components.QueryEditorRows.rows} ref={this.editorRef}>
{queryLibraryRef && (
<MaybeQueryLibraryEditingHeader
query={query}
@@ -611,9 +613,17 @@ function SavedQueryButtons(props: {
onUpdateSuccess?: () => void;
onSelectQuery: (query: DataQuery) => void;
datasourceFilters: string[];
parentRef: React.RefObject<HTMLDivElement>;
}) {
const { renderSavedQueryButtons } = useQueryLibraryContext();
return renderSavedQueryButtons(props.query, props.app, props.onUpdateSuccess, props.onSelectQuery);
return renderSavedQueryButtons(
props.query,
props.app,
props.onUpdateSuccess,
props.onSelectQuery,
undefined,
props.parentRef
);
}
// Will render editing header only if query library is enabled

View File

@@ -1,7 +1,7 @@
import { FieldType, toDataFrame } from '@grafana/data';
import { HeatmapCalculationOptions } from '@grafana/schema';
import { HeatmapCalculationOptions, HeatmapCellLayout, ScaleDistribution } from '@grafana/schema';
import { rowsToCellsHeatmap, calculateHeatmapFromData } from './heatmap';
import { rowsToCellsHeatmap, calculateHeatmapFromData, calculateBucketFactor } from './heatmap';
describe('Heatmap transformer', () => {
it('calculate heatmap from input data', async () => {
@@ -121,4 +121,327 @@ describe('Heatmap transformer', () => {
})
).toThrowErrorMatchingInlineSnapshot(`"No numeric fields found for heatmap"`);
});
describe('calculateBucketFactor', () => {
it('calculates ratio from last two buckets for log2 spacing', () => {
const buckets = [1, 2, 4, 8];
expect(calculateBucketFactor(buckets)).toBe(2);
});
it('calculates ratio from last two buckets for log10 spacing', () => {
const buckets = [1, 10, 100, 1000];
expect(calculateBucketFactor(buckets)).toBe(10);
});
it('calculates ratio for non-uniform spacing', () => {
const buckets = [1, 2.5, 6.25];
expect(calculateBucketFactor(buckets)).toBe(2.5);
});
it('returns default factor for single value array', () => {
expect(calculateBucketFactor([5])).toBe(1.5);
});
it('returns default factor for empty array', () => {
expect(calculateBucketFactor([])).toBe(1.5);
});
it('returns default factor when ratio is not valid expansion (<=1)', () => {
const buckets = [10, 5]; // Descending
expect(calculateBucketFactor(buckets)).toBe(1.5);
});
it('returns default factor when ratio contains zero', () => {
const buckets = [0, 5];
expect(calculateBucketFactor(buckets)).toBe(1.5);
});
it('returns default factor when ratio is infinite', () => {
const buckets = [5, Infinity];
expect(calculateBucketFactor(buckets)).toBe(1.5);
});
it('accepts custom default factor', () => {
expect(calculateBucketFactor([5], 3)).toBe(3);
});
});
describe('rowsToCellsHeatmap with linear scale', () => {
it('converts prometheus-style le labels to numeric buckets with linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000, 2000] },
{
name: '1',
type: FieldType.number,
labels: { le: '1' },
values: [10, 15],
},
{
name: '10',
type: FieldType.number,
labels: { le: '10' },
values: [20, 25],
},
{
name: '100',
type: FieldType.number,
labels: { le: '100' },
values: [30, 35],
},
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
expect(heatmap.fields[1].name).toBe('yMin');
expect(heatmap.fields[1].values).toEqual([1, 10, 100, 1, 10, 100]);
});
it('converts ge labels to numeric buckets with linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000, 2000] },
{
name: '1',
type: FieldType.number,
labels: { ge: '1' },
values: [10, 15],
},
{
name: '10',
type: FieldType.number,
labels: { ge: '10' },
values: [20, 25],
},
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
layout: HeatmapCellLayout.ge,
});
expect(heatmap.fields[1].values).toEqual([1, 10, 1, 10]);
expect(heatmap.fields[1].name).toBe('yMin'); // ge layout
});
it('generates yMax field for linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '2', type: FieldType.number, values: [20] },
{ name: '4', type: FieldType.number, values: [30] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
// Should have yMin, yMax, and count fields
expect(heatmap.fields.length).toBe(4);
expect(heatmap.fields[2].name).toBe('yMax');
expect(heatmap.fields[2].type).toBe('number');
// yMax should be [2, 4, 8] (shifted buckets + calculated last bucket)
// Last bucket uses factor 2 (from 2→4) to estimate 4→8
expect(heatmap.fields[2].values).toEqual([2, 4, 8]);
});
it('clears yOrdinalDisplay for linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
expect(heatmap.meta?.custom?.yOrdinalDisplay).toBeUndefined();
});
it('clears yOrdinalDisplay for log scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Log, log: 10 },
});
expect(heatmap.meta?.custom?.yOrdinalDisplay).toBeUndefined();
});
it('clears yOrdinalDisplay for symlog scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Symlog, log: 10, linearThreshold: 1 },
});
expect(heatmap.meta?.custom?.yOrdinalDisplay).toBeUndefined();
});
it('preserves yOrdinalDisplay for non-numeric scale (auto/ordinal)', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: 'low', type: FieldType.number, values: [10] },
{ name: 'high', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({ frame });
expect(heatmap.meta?.custom?.yOrdinalDisplay).toEqual(['low', 'high']);
});
it('sets unit to undefined for linear scale when no unit exists', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
// No unit → expect undefined (not 'short')
expect(heatmap.fields[1].config.unit).toBeUndefined();
});
it('passes through existing unit for linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10], config: { unit: 'ms' } },
{ name: '10', type: FieldType.number, values: [20], config: { unit: 'ms' } },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
// Existing unit → pass through unchanged
expect(heatmap.fields[1].config.unit).toBe('ms');
});
it('sets unit to short for ordinal scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: 'low', type: FieldType.number, values: [10] },
{ name: 'high', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({ frame });
expect(heatmap.fields[1].config.unit).toBe('short');
});
it('uses "count" as value field name for linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
// Without yMax, should be 3 fields: xMax, y/yMin/yMax, yMax, count
const valueField = heatmap.fields.find((f) => f.name === 'count');
expect(valueField).toBeDefined();
});
it('uses "Value" as field name for ordinal scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: 'low', type: FieldType.number, values: [10] },
{ name: 'high', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({ frame });
const valueField = heatmap.fields.find((f) => f.name === 'Value');
expect(valueField).toBeDefined();
});
it('respects custom value field name for linear scale', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
value: 'Temperature',
});
const valueField = heatmap.fields.find((f) => f.name === 'Temperature');
expect(valueField).toBeDefined();
});
it('calculates yMax upper bound using bucket factor', () => {
const frame = toDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [1000] },
{ name: '1', type: FieldType.number, values: [10] },
{ name: '10', type: FieldType.number, values: [20] },
{ name: '100', type: FieldType.number, values: [30] },
],
});
const heatmap = rowsToCellsHeatmap({
frame,
yBucketScale: { type: ScaleDistribution.Linear },
});
// buckets: [1, 10, 100]
// yMax: [10, 100, 1000] - last one calculated as 100 * 10
const yMaxField = heatmap.fields.find((f) => f.name === 'yMax');
expect(yMaxField?.values).toEqual([10, 100, 1000]);
});
});
});

View File

@@ -19,6 +19,7 @@ import { isLikelyAscendingVector } from '@grafana/data/internal';
import { t } from '@grafana/i18n';
import {
ScaleDistribution,
ScaleDistributionConfig,
HeatmapCellLayout,
HeatmapCalculationMode,
HeatmapCalculationOptions,
@@ -72,13 +73,36 @@ function parseNumeric(v?: string | null) {
return v === '+Inf' ? Infinity : v === '-Inf' ? -Infinity : +(v ?? 0);
}
/**
* Calculate the expansion factor from adjacent bucket values.
* This is used to estimate the size/bound of the next bucket based on the spacing of existing buckets.
*
* @param bucketValues - Array of bucket boundary values
* @param defaultFactor - Factor to use if ratio cannot be determined (default: 1.5 for 50% expansion)
* @returns The calculated or default expansion factor
*/
export function calculateBucketFactor(bucketValues: number[], defaultFactor = 1.5): number {
if (bucketValues.length >= 2) {
const last = bucketValues.at(-1)!;
const prev = bucketValues.at(-2)!;
const ratio = last / prev;
// Only use ratio if it represents expansion (>1) and is valid
if (ratio > 1 && Number.isFinite(ratio)) {
return ratio;
}
}
return defaultFactor;
}
export function sortAscStrInf(aName?: string | null, bName?: string | null) {
return parseNumeric(aName) - parseNumeric(bName);
}
export interface HeatmapRowsCustomMeta {
/** This provides the lookup values */
yOrdinalDisplay: string[];
yOrdinalDisplay?: string[];
yOrdinalLabel?: string[];
yMatchWithLabel?: string;
yMinDisplay?: string;
@@ -115,6 +139,7 @@ export interface RowsHeatmapOptions {
unit?: string;
decimals?: number;
layout?: HeatmapCellLayout;
yBucketScale?: ScaleDistributionConfig;
}
/** Given existing buckets, create a values style frame */
@@ -129,10 +154,19 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
throw new Error(t('heatmap.error.no-y-fields', 'No numeric fields found for heatmap'));
}
// Determine if we should use numeric scaling based on yBucketScale option
// Default to 'auto' behavior (ordinal) if not specified
const scaleType = opts.yBucketScale?.type;
const useNumericScale =
scaleType === ScaleDistribution.Linear ||
scaleType === ScaleDistribution.Log ||
scaleType === ScaleDistribution.Symlog;
// similar to initBins() below
const len = xValues.length * yFields.length;
const xs = new Array(len);
const ys = new Array(len);
const ys2 = useNumericScale ? new Array(len) : undefined;
const counts2 = new Array(len);
const counts = yFields.map((field) => field.values.slice());
@@ -144,21 +178,8 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
}
});
const bucketBounds = Array.from({ length: yFields.length }, (v, i) => i);
// fill flat/repeating array
for (let i = 0, yi = 0, xi = 0; i < len; yi = ++i % bucketBounds.length) {
ys[i] = bucketBounds[yi];
if (yi === 0 && i >= bucketBounds.length) {
xi++;
}
xs[i] = xValues[xi];
}
// this name determines whether cells are drawn above, below, or centered on the values
let ordinalFieldName = yFields[0].labels?.le != null ? 'yMax' : 'y';
let ordinalFieldName = yFields[0].labels?.le != null ? 'yMax' : yFields[0].labels?.ge != null ? 'yMin' : 'y';
switch (opts.layout) {
case HeatmapCellLayout.le:
ordinalFieldName = 'yMax';
@@ -175,6 +196,45 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
yOrdinalDisplay: yFields.map((f) => getFieldDisplayName(f, opts.frame)),
yMatchWithLabel: Object.keys(yFields[0].labels ?? {})[0],
};
let bucketBounds: number[];
let bucketBoundsMax: number[] | undefined;
if (useNumericScale) {
// Numeric mode: use numeric bucket values
bucketBounds = yFields.map((field) => {
const labelKey = custom.yMatchWithLabel;
const labelValue = labelKey ? field.labels?.[labelKey] : undefined;
const valueStr = labelValue ?? field.name;
return Number(valueStr);
});
// Generate upper bounds: shift values + calculate last bucket
bucketBoundsMax = bucketBounds.slice();
bucketBoundsMax.shift();
const factor = calculateBucketFactor(bucketBounds);
bucketBoundsMax.push(bucketBounds[bucketBounds.length - 1] * factor);
custom.yMatchWithLabel = undefined;
} else {
// Auto mode: use ordinal indices like the original main branch behavior
bucketBounds = Array.from({ length: yFields.length }, (v, i) => i);
}
// fill flat/repeating array
for (let i = 0, yi = 0, xi = 0; i < len; yi = ++i % bucketBounds.length) {
ys[i] = bucketBounds[yi];
if (useNumericScale && ys2 && bucketBoundsMax) {
ys2[i] = bucketBoundsMax[yi];
}
if (yi === 0 && i >= bucketBounds.length) {
xi++;
}
xs[i] = xValues[xi];
}
if (custom.yMatchWithLabel) {
custom.yOrdinalLabel = yFields.map((f) => f.labels?.[custom.yMatchWithLabel!] ?? '');
if (custom.yMatchWithLabel === 'le') {
@@ -189,7 +249,7 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
if (custom.yMinDisplay) {
custom.yMinDisplay = formattedValueToString(fmt(0, opts.decimals));
}
custom.yOrdinalDisplay = custom.yOrdinalDisplay.map((name) => {
custom.yOrdinalDisplay = custom.yOrdinalDisplay?.map((name) => {
let num = +name;
if (!Number.isNaN(num)) {
@@ -200,6 +260,11 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
});
}
// Clear yOrdinalDisplay when using numeric scales (linear, log, symlog)
if (useNumericScale) {
custom.yOrdinalDisplay = undefined;
}
const valueCfg = {
...yFields[0].config,
};
@@ -208,6 +273,43 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
delete valueCfg.displayNameFromDS;
}
// Build fields array - only include yMax in linear scale mode
const fields: Field[] = [
{
name: xField.type === FieldType.time ? 'xMax' : 'x',
type: xField.type,
values: xs,
config: xField.config,
},
{
name: useNumericScale ? 'yMin' : ordinalFieldName,
type: FieldType.number,
values: ys,
config: {
unit: useNumericScale ? yFields[0]?.config?.unit : 'short', // preserve original unit for numeric, use 'short' for ordinal
},
},
];
// yMax provides explicit upper bounds for proper rendering, critical for ge layout
if (useNumericScale && ys2) {
fields.push({
name: 'yMax',
type: FieldType.number,
values: ys2,
config: {},
});
}
// Add value/count field
fields.push({
name: opts.value?.length ? opts.value : useNumericScale ? 'count' : 'Value',
type: FieldType.number,
values: counts2,
config: valueCfg,
display: yFields[0].display,
});
return {
length: xs.length,
refId: opts.frame.refId,
@@ -215,29 +317,7 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
type: DataFrameType.HeatmapCells,
custom,
},
fields: [
{
name: xField.type === FieldType.time ? 'xMax' : 'x',
type: xField.type,
values: xs,
config: xField.config,
},
{
name: ordinalFieldName,
type: FieldType.number,
values: ys,
config: {
unit: 'short', // ordinal lookup
},
},
{
name: opts.value?.length ? opts.value : 'Value',
type: FieldType.number,
values: counts2,
config: valueCfg,
display: yFields[0].display,
},
],
fields,
};
}

View File

@@ -5,7 +5,6 @@ import { DashboardCursorSync, PanelProps, TimeRange } from '@grafana/data';
import { PanelDataErrorView } from '@grafana/runtime';
import { ScaleDistributionConfig } from '@grafana/schema';
import {
ScaleDistribution,
TooltipPlugin2,
TooltipDisplayMode,
UPlotChart,
@@ -29,7 +28,7 @@ import { HeatmapTooltip } from './HeatmapTooltip';
import { HeatmapData, prepareHeatmapData } from './fields';
import { quantizeScheme } from './palettes';
import { Options } from './types';
import { prepConfig } from './utils';
import { calculateYSizeDivisor, prepConfig } from './utils';
interface HeatmapPanelProps extends PanelProps<Options> {}
@@ -141,6 +140,16 @@ const HeatmapPanelViz = ({
const builder = useMemo(() => {
const scaleConfig: ScaleDistributionConfig = dataRef.current?.heatmap?.fields[1].config?.custom?.scaleDistribution;
const activeScaleConfig = options.rowsFrame?.yBucketScale ?? scaleConfig;
// For log/symlog scales: use 1 for pre-bucketed data with explicit scale, otherwise use split value
const hasExplicitScale = options.rowsFrame?.yBucketScale !== undefined;
const ySizeDivisor = calculateYSizeDivisor(
activeScaleConfig?.type,
hasExplicitScale,
options.calculation?.yBuckets?.value
);
return prepConfig({
dataRef,
theme,
@@ -151,9 +160,10 @@ const HeatmapPanelViz = ({
hideGE: options.filterValues?.ge,
exemplarColor: options.exemplars?.color ?? 'rgba(255,0,255,0.7)',
yAxisConfig: options.yAxis,
ySizeDivisor: scaleConfig?.type === ScaleDistribution.Log ? +(options.calculation?.yBuckets?.value || 1) : 1,
ySizeDivisor,
selectionMode: options.selectionMode,
xAxisConfig: getXAxisConfig(annotationsLength),
rowsFrame: options.rowsFrame,
});
// eslint-disable-next-line react-hooks/exhaustive-deps

View File

@@ -4,7 +4,6 @@ import uPlot from 'uplot';
import {
ActionModel,
DataFrameType,
Field,
FieldType,
formattedValueToString,
@@ -26,7 +25,7 @@ import {
} from '@grafana/ui/internal';
import { ColorScale } from 'app/core/components/ColorScale/ColorScale';
import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
import { isHeatmapCellsDense, readHeatmapRowsCustomMeta } from 'app/features/transformers/calculateHeatmap/heatmap';
import { readHeatmapRowsCustomMeta } from 'app/features/transformers/calculateHeatmap/heatmap';
import { getDisplayValuesAndLinks } from 'app/features/visualization/data-hover/DataHoverView';
import { ExemplarTooltip } from 'app/features/visualization/data-hover/ExemplarTooltip';
@@ -35,7 +34,13 @@ import { isTooltipScrollable } from '../timeseries/utils';
import { HeatmapData } from './fields';
import { renderHistogram } from './renderHistogram';
import { formatMilliseconds, getFieldFromData, getHoverCellColor, getSparseCellMinMax } from './tooltip/utils';
import {
formatMilliseconds,
getFieldFromData,
getHoverCellColor,
getSparseCellMinMax,
isHeatmapSparse,
} from './tooltip/utils';
interface HeatmapTooltipProps {
mode: TooltipDisplayMode;
@@ -99,9 +104,7 @@ const HeatmapHoverCell = ({
const index = dataIdxs[1]!;
const data = dataRef.current;
const [isSparse] = useState(
() => data.heatmap?.meta?.type === DataFrameType.HeatmapCells && !isHeatmapCellsDense(data.heatmap)
);
const [isSparse] = useState(() => isHeatmapSparse(data.heatmap));
const xField = getFieldFromData(data.heatmap!, 'x', isSparse)!;
const yField = getFieldFromData(data.heatmap!, 'y', isSparse)!;

View File

@@ -0,0 +1,277 @@
import { render, screen, within } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import selectEvent from 'react-select-event';
import { StandardEditorContext, StandardEditorsRegistryItem } from '@grafana/data';
import { ScaleDistribution, ScaleDistributionConfig } from '@grafana/schema';
import { YBucketScaleEditor } from './YBucketScaleEditor';
const mockContext: StandardEditorContext<unknown> = {
data: [],
};
const mockItem: StandardEditorsRegistryItem<ScaleDistributionConfig | undefined> = {
id: 'yBucketScale',
name: 'Y Bucket Scale',
editor: YBucketScaleEditor,
};
describe('YBucketScaleEditor', () => {
describe('Scale selection', () => {
it('should render with Auto selected when value is undefined', () => {
const onChange = jest.fn();
render(<YBucketScaleEditor value={undefined} onChange={onChange} context={mockContext} item={mockItem} />);
const autoButton = screen.getByRole('radio', { name: /auto/i });
expect(autoButton).toBeChecked();
});
it('should render with Linear selected when value is Linear', () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Linear }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
const linearButton = screen.getByRole('radio', { name: /linear/i });
expect(linearButton).toBeChecked();
});
it('should call onChange with undefined when Auto is selected', async () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Linear }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
const autoButton = screen.getByRole('radio', { name: /auto/i });
await userEvent.click(autoButton);
expect(onChange).toHaveBeenCalledWith(undefined);
});
it('should call onChange with Linear config when Linear is selected', async () => {
const onChange = jest.fn();
render(<YBucketScaleEditor value={undefined} onChange={onChange} context={mockContext} item={mockItem} />);
const linearButton = screen.getByRole('radio', { name: /linear/i });
await userEvent.click(linearButton);
expect(onChange).toHaveBeenCalledWith({ type: ScaleDistribution.Linear });
});
it('should call onChange with Log config when Log is selected', async () => {
const onChange = jest.fn();
render(<YBucketScaleEditor value={undefined} onChange={onChange} context={mockContext} item={mockItem} />);
const logButton = screen.getByRole('radio', { name: /^log$/i });
await userEvent.click(logButton);
expect(onChange).toHaveBeenCalledWith({ type: ScaleDistribution.Log, log: 2 });
});
it('should call onChange with Symlog config when Symlog is selected', async () => {
const onChange = jest.fn();
render(<YBucketScaleEditor value={undefined} onChange={onChange} context={mockContext} item={mockItem} />);
const symlogButton = screen.getByRole('radio', { name: /symlog/i });
await userEvent.click(symlogButton);
expect(onChange).toHaveBeenCalledWith({ type: ScaleDistribution.Symlog, log: 2, linearThreshold: 1 });
});
});
describe('Log base selection', () => {
it('should show log base selector for Log scale', () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Log, log: 2 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
expect(screen.getByText('Log base')).toBeInTheDocument();
});
it('should show log base selector for Symlog scale', () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Symlog, log: 2, linearThreshold: 1 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
expect(screen.getByText('Log base')).toBeInTheDocument();
});
it('should not show log base selector for Linear scale', () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Linear }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
expect(screen.queryByText('Log base')).not.toBeInTheDocument();
});
it('should not show log base selector for Auto', () => {
const onChange = jest.fn();
render(<YBucketScaleEditor value={undefined} onChange={onChange} context={mockContext} item={mockItem} />);
expect(screen.queryByText('Log base')).not.toBeInTheDocument();
});
it('should preserve existing log base when switching to Log', async () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Symlog, log: 10, linearThreshold: 1 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
const logButton = screen.getByRole('radio', { name: /^log$/i });
await userEvent.click(logButton);
expect(onChange).toHaveBeenCalledWith({ type: ScaleDistribution.Log, log: 10 });
});
it('should update log base when changed for Log scale', async () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Log, log: 2 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
// Find the log base field container and query the combobox within it
const logBaseLabel = screen.getByText('Log base');
const fieldContainer = logBaseLabel.closest('div[style]') as HTMLElement; // The div with style="margin-top: 8px;"
const selectEl = within(fieldContainer).getByRole('combobox');
await selectEvent.select(selectEl, '10', { container: document.body });
expect(onChange).toHaveBeenCalledWith({ type: ScaleDistribution.Log, log: 10 });
});
it('should update log base when changed for Symlog scale', async () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Symlog, log: 2, linearThreshold: 1 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
// Find the log base field container and query the combobox within it
const logBaseLabel = screen.getByText('Log base');
const fieldContainer = logBaseLabel.closest('div[style]') as HTMLElement; // The div with style="margin-top: 8px;"
const selectEl = within(fieldContainer).getByRole('combobox');
await selectEvent.select(selectEl, '10', { container: document.body });
expect(onChange).toHaveBeenCalledWith({ type: ScaleDistribution.Symlog, log: 10, linearThreshold: 1 });
});
});
describe('Linear threshold', () => {
it('should show linear threshold input for Symlog scale', () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Symlog, log: 2, linearThreshold: 1 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
expect(screen.getByText('Linear threshold')).toBeInTheDocument();
});
it('should not show linear threshold input for Log scale', () => {
const onChange = jest.fn();
render(
<YBucketScaleEditor
value={{ type: ScaleDistribution.Log, log: 2 }}
onChange={onChange}
context={mockContext}
item={mockItem}
/>
);
expect(screen.queryByText('Linear threshold')).not.toBeInTheDocument();
});
it('should not update linear threshold for a 0 value', async () => {
const onChange = jest.fn();
const origValue = { type: ScaleDistribution.Symlog, log: 10, linearThreshold: 1 };
render(<YBucketScaleEditor value={{ ...origValue }} onChange={onChange} context={mockContext} item={mockItem} />);
const input = screen.getByPlaceholderText('1');
await userEvent.clear(input);
await userEvent.type(input, '0');
expect(onChange).not.toHaveBeenCalled();
await userEvent.type(input, '.');
expect(onChange).not.toHaveBeenCalled();
await userEvent.type(input, '5');
expect(onChange).toHaveBeenCalledWith({ ...origValue, linearThreshold: 0.5 });
});
it('should update linear threshold for valid non-zero values', async () => {
const onChange = jest.fn();
const origValue = { type: ScaleDistribution.Symlog, log: 2, linearThreshold: 1 };
render(<YBucketScaleEditor value={{ ...origValue }} onChange={onChange} context={mockContext} item={mockItem} />);
const input = screen.getByPlaceholderText('1');
await userEvent.clear(input);
await userEvent.type(input, '5');
expect(onChange).toHaveBeenCalledWith({ ...origValue, linearThreshold: 5 });
});
it('should not dispatch onChange for invalid input', async () => {
const onChange = jest.fn();
const origValue = { type: ScaleDistribution.Symlog, log: 2, linearThreshold: 1 };
render(<YBucketScaleEditor value={{ ...origValue }} onChange={onChange} context={mockContext} item={mockItem} />);
const input = screen.getByPlaceholderText('1');
await userEvent.clear(input);
await userEvent.type(input, 'abc');
expect(onChange).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,135 @@
import { useState } from 'react';
import { SelectableValue, StandardEditorProps } from '@grafana/data';
import { t } from '@grafana/i18n';
import { ScaleDistribution, ScaleDistributionConfig } from '@grafana/schema';
import { RadioButtonGroup, Field, Select, Input } from '@grafana/ui';
type ScaleOptionValue = 'auto' | ScaleDistribution;
/**
* Simplified scale editor that shows all options in a single line.
* Includes "Auto" option which returns undefined to use default behavior.
*/
export const YBucketScaleEditor = (props: StandardEditorProps<ScaleDistributionConfig | undefined>) => {
const { value, onChange } = props;
const type = value?.type;
const log = value?.log ?? 2;
const isAuto = value === undefined;
const [localLinearThreshold, setLocalLinearThreshold] = useState<string>(
value?.linearThreshold != null ? String(value.linearThreshold) : ''
);
const currentOption: ScaleOptionValue = isAuto ? 'auto' : type!;
const showLogBase = type === ScaleDistribution.Log || type === ScaleDistribution.Symlog;
const showLinearThreshold = type === ScaleDistribution.Symlog;
const SCALE_OPTIONS: Array<SelectableValue<ScaleOptionValue>> = [
{
label: t('heatmap.y-bucket-scale-editor.scale-options.label-auto', 'Auto'),
value: 'auto',
},
{
label: t('heatmap.y-bucket-scale-editor.scale-options.label-linear', 'Linear'),
value: ScaleDistribution.Linear,
},
{
label: t('heatmap.y-bucket-scale-editor.scale-options.label-log', 'Log'),
value: ScaleDistribution.Log,
},
{
label: t('heatmap.y-bucket-scale-editor.scale-options.label-symlog', 'Symlog'),
value: ScaleDistribution.Symlog,
},
];
const LOG_BASE_OPTIONS: Array<SelectableValue<number>> = [
{
label: '2',
value: 2,
},
{
label: '10',
value: 10,
},
];
const handleScaleChange = (v: ScaleOptionValue) => {
if (v === 'auto') {
onChange(undefined);
return;
}
if (v === ScaleDistribution.Linear) {
onChange({ type: ScaleDistribution.Linear });
return;
}
if (v === ScaleDistribution.Log) {
onChange({ type: ScaleDistribution.Log, log });
return;
}
if (v === ScaleDistribution.Symlog) {
onChange({
type: ScaleDistribution.Symlog,
log,
linearThreshold: value?.linearThreshold ?? 1,
});
return;
}
};
const handleLogBaseChange = (newLog: number) => {
onChange({
...value!,
log: newLog,
});
};
const handleLinearThresholdChange = (newValue: string) => {
setLocalLinearThreshold(newValue);
const numValue = parseFloat(newValue);
if (!isNaN(numValue) && numValue !== 0) {
onChange({
...value!,
linearThreshold: numValue,
});
}
};
return (
<>
<RadioButtonGroup value={currentOption} options={SCALE_OPTIONS} onChange={handleScaleChange} />
{showLogBase && (
<Field
label={t('heatmap.y-bucket-scale-editor.log-base-label', 'Log base')}
style={{ marginTop: '8px' }}
noMargin
>
<Select options={LOG_BASE_OPTIONS} value={log} onChange={(v) => handleLogBaseChange(v.value!)} />
</Field>
)}
{showLinearThreshold && (
<Field
label={t('heatmap.y-bucket-scale-editor.linear-threshold-label', 'Linear threshold')}
description={t(
'heatmap.y-bucket-scale-editor.linear-threshold-description',
'Range within which the scale is linear'
)}
style={{ marginTop: '8px' }}
noMargin
>
<Input
type="number"
value={localLinearThreshold}
onChange={(e) => handleLinearThresholdChange(e.currentTarget.value)}
placeholder={t('heatmap.y-bucket-scale-editor.linear-threshold-placeholder', '1')}
/>
</Field>
)}
</>
);
};

View File

@@ -1,4 +1,11 @@
import { DataFrame, FieldConfigProperty, FieldType, identityOverrideProcessor, PanelPlugin } from '@grafana/data';
import {
DataFrame,
DataFrameType,
FieldConfigProperty,
FieldType,
identityOverrideProcessor,
PanelPlugin,
} from '@grafana/data';
import { t } from '@grafana/i18n';
import { config } from '@grafana/runtime';
import {
@@ -15,6 +22,7 @@ import { addHeatmapCalculationOptions } from 'app/features/transformers/calculat
import { readHeatmapRowsCustomMeta } from 'app/features/transformers/calculateHeatmap/heatmap';
import { HeatmapPanel } from './HeatmapPanel';
import { YBucketScaleEditor } from './YBucketScaleEditor';
import { prepareHeatmapData } from './fields';
import { heatmapChangedHandler, heatmapMigrationHandler } from './migrations';
import { colorSchemes, quantizeScheme } from './palettes';
@@ -59,6 +67,7 @@ export const plugin = new PanelPlugin<Options, GraphFieldConfig>(HeatmapPanel)
const opts = context.options ?? defaultOptions;
let isOrdinalY = false;
const isHeatmapCells = context.data.some((frame) => frame.meta?.type === DataFrameType.HeatmapCells);
if (context.data.length > 0) {
try {
@@ -94,6 +103,17 @@ export const plugin = new PanelPlugin<Options, GraphFieldConfig>(HeatmapPanel)
addHeatmapCalculationOptions('calculation.', builder, opts.calculation, category);
}
if (!opts.calculate && !isHeatmapCells && config.featureToggles.heatmapRowsAxisOptions) {
builder.addCustomEditor({
id: 'rowsFrame-yBucketScale',
path: 'rowsFrame.yBucketScale',
name: t('heatmap.name-y-bucket-scale', 'Y bucket scale'),
category,
editor: YBucketScaleEditor,
defaultValue: undefined,
});
}
category = [t('heatmap.category-y-axis', 'Y Axis')];
builder
@@ -170,7 +190,9 @@ export const plugin = new PanelPlugin<Options, GraphFieldConfig>(HeatmapPanel)
category,
});
if (!opts.calculate) {
// Hide tick alignment for explicit scales - bucket boundaries are fixed by numeric labels
const hasExplicitScale = context.options?.rowsFrame?.yBucketScale !== undefined;
if (!opts.calculate && !hasExplicitScale) {
builder.addRadio({
path: 'rowsFrame.layout',
name: t('heatmap.name-tick-alignment', 'Tick alignment'),

View File

@@ -105,6 +105,8 @@ composableKinds: PanelCfg: lineage: {
value?: string
// Controls tick alignment when not calculating from data
layout?: ui.HeatmapCellLayout
// Controls the scale distribution of the y-axis buckets
yBucketScale?: ui.ScaleDistributionConfig
} @cuetsy(kind="interface")
Options: {
annotations?: ui.VizAnnotations

View File

@@ -183,6 +183,10 @@ export interface RowsHeatmapOptions {
* Sets the name of the cell when not calculating from data
*/
value?: string;
/**
* Controls the scale distribution of the y-axis buckets
*/
yBucketScale?: ui.ScaleDistributionConfig;
}
export interface Options {

View File

@@ -0,0 +1,47 @@
import { DataFrameType, toDataFrame } from '@grafana/data';
import { isHeatmapSparse } from './utils';
describe('isHeatmapSparse', () => {
it('should return false when heatmap is undefined', () => {
expect(isHeatmapSparse(undefined)).toBe(false);
});
it('should return false for dense HeatmapCells (single Y field)', () => {
const heatmap = toDataFrame({
fields: [{ name: 'y', values: [] }],
meta: { type: DataFrameType.HeatmapCells },
});
expect(isHeatmapSparse(heatmap)).toBe(false);
});
it('should return true for sparse HeatmapCells (yMin and yMax fields)', () => {
const heatmap = toDataFrame({
fields: [
{ name: 'yMin', values: [] },
{ name: 'yMax', values: [] },
],
meta: { type: DataFrameType.HeatmapCells },
});
expect(isHeatmapSparse(heatmap)).toBe(true);
});
it('should return false for non-HeatmapCells data frames', () => {
const heatmap = toDataFrame({
fields: [{ name: 'Value', values: [] }],
meta: { type: DataFrameType.HeatmapRows },
});
expect(isHeatmapSparse(heatmap)).toBe(false);
});
it('should return false when meta is undefined', () => {
const heatmap = toDataFrame({
fields: [{ name: 'value', values: [] }],
});
expect(isHeatmapSparse(heatmap)).toBe(false);
});
});

View File

@@ -1,4 +1,5 @@
import { DataFrame, Field } from '@grafana/data';
import { DataFrame, DataFrameType, Field } from '@grafana/data';
import { isHeatmapCellsDense } from 'app/features/transformers/calculateHeatmap/heatmap';
import { HeatmapData } from '../fields';
@@ -91,3 +92,14 @@ export const getSparseCellMinMax = (data: HeatmapData, index: number): BucketsMi
yBucketMax: yMax.values[index],
};
};
/**
* Determines if a heatmap DataFrame is sparse (has explicit yMin/yMax bounds).
* Sparse heatmaps have HeatmapCells type and are not dense.
*/
export function isHeatmapSparse(heatmap: DataFrame | undefined): boolean {
if (!heatmap) {
return false;
}
return heatmap.meta?.type === DataFrameType.HeatmapCells && !isHeatmapCellsDense(heatmap);
}

View File

@@ -1,5 +1,374 @@
describe('a test', () => {
it('has to have at least one test', () => {
expect(true).toBeTruthy();
import { ScaleDistribution } from '@grafana/schema';
import { applyExplicitMinMax, boundedMinMax, calculateYSizeDivisor, toLogBase, valuesToFills } from './utils';
describe('toLogBase', () => {
it('returns 10 when value is 10', () => {
expect(toLogBase(10)).toBe(10);
});
it('returns 2 when value is 2', () => {
expect(toLogBase(2)).toBe(2);
});
it('returns 2 (default) when value is undefined', () => {
expect(toLogBase(undefined)).toBe(2);
});
it('returns 2 (default) for invalid values', () => {
expect(toLogBase(5)).toBe(2);
expect(toLogBase(0)).toBe(2);
expect(toLogBase(-1)).toBe(2);
expect(toLogBase(100)).toBe(2);
});
});
describe('applyExplicitMinMax', () => {
it('returns original values when no explicit values provided', () => {
const [min, max] = applyExplicitMinMax(0, 100, undefined, undefined);
expect(min).toBe(0);
expect(max).toBe(100);
});
it('applies explicit min only', () => {
const [min, max] = applyExplicitMinMax(0, 100, 10, undefined);
expect(min).toBe(10);
expect(max).toBe(100);
});
it('applies explicit max only', () => {
const [min, max] = applyExplicitMinMax(0, 100, undefined, 90);
expect(min).toBe(0);
expect(max).toBe(90);
});
it('applies both explicit min and max', () => {
const [min, max] = applyExplicitMinMax(0, 100, 20, 80);
expect(min).toBe(20);
expect(max).toBe(80);
});
it('handles negative values', () => {
const [min, max] = applyExplicitMinMax(-50, 50, -10, 10);
expect(min).toBe(-10);
expect(max).toBe(10);
});
it('handles explicit min = 0', () => {
const [min, max] = applyExplicitMinMax(10, 100, 0, undefined);
expect(min).toBe(0);
expect(max).toBe(100);
});
it('handles explicit max = 0', () => {
const [min, max] = applyExplicitMinMax(-100, -10, undefined, 0);
expect(min).toBe(-100);
expect(max).toBe(0);
});
it('handles null scaleMin', () => {
const [min, max] = applyExplicitMinMax(null, 100, 10, undefined);
expect(min).toBe(10);
expect(max).toBe(100);
});
it('handles null scaleMax', () => {
const [min, max] = applyExplicitMinMax(0, null, undefined, 90);
expect(min).toBe(0);
expect(max).toBe(90);
});
it('preserves null when no explicit value provided', () => {
const [min, max] = applyExplicitMinMax(null, null, undefined, undefined);
expect(min).toBe(null);
expect(max).toBe(null);
});
});
describe('calculateYSizeDivisor', () => {
it('returns 1 for linear scale', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Linear, false, 2)).toBe(1);
});
it('returns 1 for log scale with explicit scale', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Log, true, 2)).toBe(1);
});
it('returns 1 for symlog scale with explicit scale', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Symlog, true, 2)).toBe(1);
});
it('returns split value for log scale without explicit scale', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Log, false, 2)).toBe(2);
expect(calculateYSizeDivisor(ScaleDistribution.Log, false, 4)).toBe(4);
});
it('returns split value for symlog scale without explicit scale', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Symlog, false, 2)).toBe(2);
expect(calculateYSizeDivisor(ScaleDistribution.Symlog, false, 3)).toBe(3);
});
it('handles string split values', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Log, false, '2')).toBe(2);
expect(calculateYSizeDivisor(ScaleDistribution.Log, false, '4')).toBe(4);
});
it('returns 1 when split value is undefined', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Log, false, undefined)).toBe(1);
});
it('returns 1 when scale type is undefined', () => {
expect(calculateYSizeDivisor(undefined, false, 2)).toBe(1);
});
it('returns 1 for ordinal scale', () => {
expect(calculateYSizeDivisor(ScaleDistribution.Ordinal, false, 2)).toBe(1);
});
});
describe('boundedMinMax', () => {
describe('when min and max are not provided', () => {
it('calculates min and max from values', () => {
const values = [10, 20, 5, 30, 15];
const [min, max] = boundedMinMax(values);
expect(min).toBe(5);
expect(max).toBe(30);
});
it('handles single value', () => {
const values = [42];
const [min, max] = boundedMinMax(values);
expect(min).toBe(42);
expect(max).toBe(42);
});
it('handles negative values', () => {
const values = [-10, -20, -5, -30];
const [min, max] = boundedMinMax(values);
expect(min).toBe(-30);
expect(max).toBe(-5);
});
it('handles mixed positive and negative values', () => {
const values = [-10, 20, -5, 30];
const [min, max] = boundedMinMax(values);
expect(min).toBe(-10);
expect(max).toBe(30);
});
it('returns Infinity/-Infinity for empty array', () => {
const values: number[] = [];
const [min, max] = boundedMinMax(values);
expect(min).toBe(Infinity);
expect(max).toBe(-Infinity);
});
});
describe('when min is provided', () => {
it('uses provided min value', () => {
const values = [10, 20, 5, 30];
const [min, max] = boundedMinMax(values, 0);
expect(min).toBe(0);
expect(max).toBe(30);
});
it('uses provided min even if higher than data min', () => {
const values = [10, 20, 5, 30];
const [min, max] = boundedMinMax(values, 15);
expect(min).toBe(15);
expect(max).toBe(30);
});
});
describe('when max is provided', () => {
it('uses provided max value', () => {
const values = [10, 20, 5, 30];
const [min, max] = boundedMinMax(values, undefined, 50);
expect(min).toBe(5);
expect(max).toBe(50);
});
it('uses provided max even if lower than data max', () => {
const values = [10, 20, 5, 30];
const [min, max] = boundedMinMax(values, undefined, 25);
expect(min).toBe(5);
expect(max).toBe(25);
});
});
describe('when both min and max are provided', () => {
it('uses both provided values', () => {
const values = [10, 20, 5, 30];
const [min, max] = boundedMinMax(values, 0, 50);
expect(min).toBe(0);
expect(max).toBe(50);
});
});
describe('with hideLE filter', () => {
it('excludes values less than or equal to hideLE', () => {
const values = [5, 10, 15, 20, 25];
const [min, max] = boundedMinMax(values, undefined, undefined, 10);
expect(min).toBe(15);
expect(max).toBe(25);
});
it('excludes all values when hideLE is higher than all values', () => {
const values = [5, 10, 15];
const [min, max] = boundedMinMax(values, undefined, undefined, 20);
expect(min).toBe(Infinity);
expect(max).toBe(-Infinity);
});
});
describe('with hideGE filter', () => {
it('excludes values greater than or equal to hideGE', () => {
const values = [5, 10, 15, 20, 25];
const [min, max] = boundedMinMax(values, undefined, undefined, -Infinity, 20);
expect(min).toBe(5);
expect(max).toBe(15);
});
it('excludes all values when hideGE is lower than all values', () => {
const values = [15, 20, 25];
const [min, max] = boundedMinMax(values, undefined, undefined, -Infinity, 10);
expect(min).toBe(Infinity);
expect(max).toBe(-Infinity);
});
});
describe('with both hideLE and hideGE filters', () => {
it('excludes values outside the range', () => {
const values = [5, 10, 15, 20, 25, 30];
const [min, max] = boundedMinMax(values, undefined, undefined, 10, 25);
expect(min).toBe(15);
expect(max).toBe(20);
});
it('works with provided min/max bounds', () => {
const values = [5, 10, 15, 20, 25, 30];
const [min, max] = boundedMinMax(values, 0, 50, 10, 25);
expect(min).toBe(0);
expect(max).toBe(50);
});
});
});
describe('valuesToFills', () => {
// Fake color palette for testing index mapping
const palette5 = ['c0', 'c1', 'c2', 'c3', 'c4'];
describe('basic mapping', () => {
it('maps values to palette indices', () => {
const values = [0, 25, 50, 75, 100];
const fills = valuesToFills(values, palette5, 0, 100);
expect(fills).toEqual([0, 1, 2, 3, 4]);
});
it('maps min value to first palette index', () => {
const values = [10];
const fills = valuesToFills(values, palette5, 10, 20);
expect(fills[0]).toBe(0);
});
it('maps max value to last palette index', () => {
const values = [20];
const fills = valuesToFills(values, palette5, 10, 20);
expect(fills[0]).toBe(4);
});
it('maps mid-range values proportionally', () => {
const values = [15];
const fills = valuesToFills(values, palette5, 10, 20);
// 15 is middle of 10-20, should map to index 2 (middle color)
expect(fills[0]).toBe(2);
});
});
describe('edge cases', () => {
it('clamps values below min to first index', () => {
const values = [5, 8, 10];
const fills = valuesToFills(values, palette5, 10, 20);
expect(fills[0]).toBe(0); // 5 < 10
expect(fills[1]).toBe(0); // 8 < 10
});
it('clamps values above max to last index', () => {
const values = [20, 25, 30];
const fills = valuesToFills(values, palette5, 10, 20);
expect(fills[0]).toBe(4); // 20 = max
expect(fills[1]).toBe(4); // 25 > max
expect(fills[2]).toBe(4); // 30 > max
});
it('handles zero range (min equals max)', () => {
const values = [10, 10, 10];
const fills = valuesToFills(values, palette5, 10, 10);
// When range is 0, defaults to 1, so all values map to 0
expect(fills).toEqual([0, 0, 0]);
});
it('handles single color palette', () => {
const values = [0, 50, 100];
const palette = ['c0'];
const fills = valuesToFills(values, palette, 0, 100);
expect(fills).toEqual([0, 0, 0]);
});
it('handles large palette', () => {
const values = [50];
const palette = Array.from({ length: 256 }, (_, i) => `c${i}`);
const fills = valuesToFills(values, palette, 0, 100);
// 50 is 50% of 0-100, should map to 128 (middle of 256)
expect(fills[0]).toBe(128);
});
});
describe('negative values', () => {
it('handles negative min and max', () => {
const values = [-10, -5, 0];
const palette = ['c0', 'c1', 'c2'];
const fills = valuesToFills(values, palette, -10, 0);
expect(fills[0]).toBe(0); // -10 is min
expect(fills[1]).toBe(1); // -5 is middle
expect(fills[2]).toBe(2); // 0 is max
});
it('handles range crossing zero', () => {
const values = [-10, 0, 10];
const palette = ['c0', 'c1', 'c2'];
const fills = valuesToFills(values, palette, -10, 10);
expect(fills[0]).toBe(0); // -10 is min
expect(fills[1]).toBe(1); // 0 is middle
expect(fills[2]).toBe(2); // 10 is max
});
});
describe('preserves array length', () => {
it('returns array with same length as input', () => {
const values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
const palette = ['c0', 'c1'];
const fills = valuesToFills(values, palette, 1, 10);
expect(fills.length).toBe(values.length);
});
it('handles empty array', () => {
const values: number[] = [];
const fills = valuesToFills(values, palette5, 0, 100);
expect(fills).toEqual([]);
});
});
});

View File

@@ -14,13 +14,22 @@ import {
} from '@grafana/data';
import { AxisPlacement, ScaleDirection, ScaleDistribution, ScaleOrientation, HeatmapCellLayout } from '@grafana/schema';
import { UPlotConfigBuilder, UPlotConfigPrepFn } from '@grafana/ui';
import { isHeatmapCellsDense, readHeatmapRowsCustomMeta } from 'app/features/transformers/calculateHeatmap/heatmap';
import {
calculateBucketFactor,
isHeatmapCellsDense,
readHeatmapRowsCustomMeta,
} from 'app/features/transformers/calculateHeatmap/heatmap';
import { pointWithin, Quadtree, Rect } from '../barchart/quadtree';
import { HeatmapData } from './fields';
import { FieldConfig, HeatmapSelectionMode, YAxisConfig } from './types';
/** Validates and returns a safe log base (2 or 10), defaults to 2 if invalid */
export function toLogBase(value: number | undefined): 2 | 10 {
return value === 10 ? 10 : 2;
}
interface PathbuilderOpts {
each: (u: uPlot, seriesIdx: number, dataIdx: number, lft: number, top: number, wid: number, hgt: number) => void;
gap?: number | null;
@@ -54,6 +63,7 @@ interface PrepConfigOpts {
ySizeDivisor?: number;
selectionMode?: HeatmapSelectionMode;
xAxisConfig?: Parameters<UPlotConfigPrepFn>[0]['xAxisConfig'];
rowsFrame?: { yBucketScale?: { type: ScaleDistribution; log?: number; linearThreshold?: number } };
}
export function prepConfig(opts: PrepConfigOpts) {
@@ -69,8 +79,11 @@ export function prepConfig(opts: PrepConfigOpts) {
ySizeDivisor,
selectionMode = HeatmapSelectionMode.X,
xAxisConfig,
rowsFrame,
} = opts;
const yBucketScale = rowsFrame?.yBucketScale;
const xScaleKey = 'x';
let isTime = true;
@@ -196,7 +209,20 @@ export function prepConfig(opts: PrepConfigOpts) {
const yScale = yFieldConfig?.scaleDistribution ?? { type: ScaleDistribution.Linear };
const yAxisReverse = Boolean(yAxisConfig.reverse);
const isSparseHeatmap = heatmapType === DataFrameType.HeatmapCells && !isHeatmapCellsDense(dataRef.current?.heatmap!);
const shouldUseLogScale = yScale.type !== ScaleDistribution.Linear || isSparseHeatmap;
const scaleDistribution = (() => {
if (yBucketScale) {
return yBucketScale.type;
}
if (yScale.type !== ScaleDistribution.Linear || isSparseHeatmap) {
return ScaleDistribution.Log;
}
return ScaleDistribution.Linear;
})();
const scaleLog = toLogBase(yBucketScale?.log ?? yScale.log);
const scaleLinearThreshold = yBucketScale?.linearThreshold;
const isOrdinalY = readHeatmapRowsCustomMeta(dataRef.current?.heatmap).yOrdinalDisplay != null;
// random to prevent syncing y in other heatmaps
@@ -210,8 +236,9 @@ export function prepConfig(opts: PrepConfigOpts) {
orientation: ScaleOrientation.Vertical,
direction: yAxisReverse ? ScaleDirection.Down : ScaleDirection.Up,
// should be tweakable manually
distribution: shouldUseLogScale ? ScaleDistribution.Log : ScaleDistribution.Linear,
log: yScale.log ?? 2,
distribution: scaleDistribution,
log: scaleLog,
linearThreshold: scaleLinearThreshold,
range:
// sparse already accounts for le/ge by explicit yMin & yMax cell bounds, so no need to expand y range
isSparseHeatmap
@@ -224,16 +251,16 @@ export function prepConfig(opts: PrepConfigOpts) {
let scaleMin: number | null, scaleMax: number | null;
[scaleMin, scaleMax] = shouldUseLogScale
? uPlot.rangeLog(dataMin, dataMax, (yScale.log ?? 2) as unknown as uPlot.Scale.LogBase, true)
: [dataMin, dataMax];
const isLogScale =
scaleDistribution === ScaleDistribution.Log || scaleDistribution === ScaleDistribution.Symlog;
[scaleMin, scaleMax] = isLogScale ? uPlot.rangeLog(dataMin, dataMax, scaleLog, true) : [dataMin, dataMax];
if (shouldUseLogScale && !isOrdinalY) {
let { min: explicitMin, max: explicitMax } = yAxisConfig;
if (isLogScale && !isOrdinalY) {
let yExp = u.scales[yScaleKey].log!;
let log = yExp === 2 ? Math.log2 : Math.log10;
let { min: explicitMin, max: explicitMax } = yAxisConfig;
// guard against <= 0
if (explicitMin != null && explicitMin > 0) {
// snap to magnitude
@@ -245,6 +272,9 @@ export function prepConfig(opts: PrepConfigOpts) {
let maxLog = log(explicitMax);
scaleMax = yExp ** incrRoundUp(maxLog, 1);
}
} else if (!isOrdinalY) {
// Apply explicit min/max for linear scale
[scaleMin, scaleMax] = applyExplicitMinMax(scaleMin, scaleMax, explicitMin, explicitMax);
}
return [scaleMin, scaleMax];
@@ -257,7 +287,7 @@ export function prepConfig(opts: PrepConfigOpts) {
let { min: explicitMin, max: explicitMax } = yAxisConfig;
// logarithmic expansion
if (shouldUseLogScale) {
if (scaleDistribution === ScaleDistribution.Log || scaleDistribution === ScaleDistribution.Symlog) {
let yExp = u.scales[yScaleKey].log!;
let minExpanded = false;
@@ -280,17 +310,31 @@ export function prepConfig(opts: PrepConfigOpts) {
}
}
// For pre-bucketed data with explicit scale, calculate expansion factor from actual bucket spacing
// For calculated heatmaps, use the full log base
let expansionFactor: number = yExp;
if (yBucketScale !== undefined) {
// Try to infer the bucket factor from the actual data spacing
const yValues = u.data[1]?.[1];
if (Array.isArray(yValues) && yValues.length >= 2 && typeof yValues[0] === 'number') {
expansionFactor = calculateBucketFactor(yValues, yExp);
}
}
if (dataRef.current?.yLayout === HeatmapCellLayout.le) {
if (!minExpanded) {
scaleMin /= yExp;
scaleMin /= expansionFactor;
}
} else if (dataRef.current?.yLayout === HeatmapCellLayout.ge) {
if (!maxExpanded) {
scaleMax *= yExp;
scaleMax *= expansionFactor;
}
} else {
scaleMin /= yExp / 2;
scaleMax *= yExp / 2;
// Unknown layout - expand both directions
const factor = Math.sqrt(expansionFactor); // Use sqrt for balanced expansion
scaleMin /= factor;
scaleMax *= factor;
}
if (!isOrdinalY) {
@@ -383,7 +427,7 @@ export function prepConfig(opts: PrepConfigOpts) {
return splits.map((v) =>
v < 0
? (meta.yMinDisplay ?? '') // Check prometheus style labels
: (meta.yOrdinalDisplay[v] ?? '')
: (meta.yOrdinalDisplay?.[v] ?? '')
);
}
return splits;
@@ -585,15 +629,19 @@ export function heatmapPathsDense(opts: PathbuilderOpts) {
let ySize: number;
if (scaleX.distr === 3) {
xSize = Math.abs(valToPosX(xs[0] * scaleX.log!, scaleX, xDim, xOff) - valToPosX(xs[0], scaleX, xDim, xOff));
// For log scales, calculate cell size from actual adjacent bucket positions
const nextXValue = xs[yBinQty] ?? xs[0] * scaleX.log!;
xSize = Math.abs(valToPosX(nextXValue, scaleX, xDim, xOff) - valToPosX(xs[0], scaleX, xDim, xOff));
} else {
xSize = Math.abs(valToPosX(xBinIncr, scaleX, xDim, xOff) - valToPosX(0, scaleX, xDim, xOff));
}
if (scaleY.distr === 3) {
ySize =
Math.abs(valToPosY(ys[0] * scaleY.log!, scaleY, yDim, yOff) - valToPosY(ys[0], scaleY, yDim, yOff)) /
ySizeDivisor;
// Use actual data spacing for pre-bucketed data, or full magnitude for calculated heatmaps with splits
const nextYValue = ySizeDivisor === 1 ? (ys[1] ?? ys[0] * scaleY.log!) : ys[0] * scaleY.log!;
const baseYSize = Math.abs(valToPosY(nextYValue, scaleY, yDim, yOff) - valToPosY(ys[0], scaleY, yDim, yOff));
ySize = baseYSize / ySizeDivisor;
} else {
ySize = Math.abs(valToPosY(yBinIncr, scaleY, yDim, yOff) - valToPosY(0, scaleY, yDim, yOff)) / ySizeDivisor;
}
@@ -882,3 +930,30 @@ export const valuesToFills = (values: number[], palette: string[], minValue: num
return indexedFills;
};
/**
* Calculates the Y-axis size divisor for heatmap cell rendering.
* For log/symlog scales with calculated data (no explicit scale), divides cells by the split value.
* Otherwise returns 1 (no division).
*/
export function calculateYSizeDivisor(
scaleType: ScaleDistribution | undefined,
hasExplicitScale: boolean,
splitValue: number | string | undefined
): number {
const isLogScale = scaleType === ScaleDistribution.Log || scaleType === ScaleDistribution.Symlog;
return isLogScale && !hasExplicitScale ? +(splitValue || 1) : 1;
}
/**
* Applies explicit min/max values to scale range for linear scales.
* Returns the original values if explicitMin/explicitMax are undefined.
*/
export function applyExplicitMinMax(
scaleMin: number | null,
scaleMax: number | null,
explicitMin: number | undefined,
explicitMax: number | undefined
): [number | null, number | null] {
return [explicitMin ?? scaleMin, explicitMax ?? scaleMax];
}

View File

@@ -7166,6 +7166,9 @@
"time-range-label": "Lock time range"
}
},
"embedded-panel": {
"powered-by": "Powered by"
},
"empty-list-cta": {
"pro-tip": "ProTip: {{proTip}}"
},
@@ -9361,6 +9364,7 @@
"name-unit": "Unit",
"name-value-name": "Value name",
"name-y-axis-scale": "Y axis scale",
"name-y-bucket-scale": "Y bucket scale",
"placeholder-axis-label": "Auto",
"placeholder-axis-width": "Auto",
"placeholder-decimals": "Auto",
@@ -9390,6 +9394,18 @@
"label-all": "All",
"label-hidden": "Hidden",
"label-single": "Single"
},
"y-bucket-scale-editor": {
"linear-threshold-description": "Range within which the scale is linear",
"linear-threshold-label": "Linear threshold",
"linear-threshold-placeholder": "1",
"log-base-label": "Log base",
"scale-options": {
"label-auto": "Auto",
"label-linear": "Linear",
"label-log": "Log",
"label-symlog": "Symlog"
}
}
},
"help-modal": {