mirror of
https://github.com/grafana/grafana.git
synced 2026-01-06 17:33:49 +08:00
Compare commits
46 Commits
v10.4.9
...
v8.1.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f778fdbbe8 | ||
|
|
d1cd7e7c64 | ||
|
|
0f5446d768 | ||
|
|
33ab22229f | ||
|
|
d74d4d6be9 | ||
|
|
e9bea6053c | ||
|
|
376cbf75b0 | ||
|
|
65d0e4227e | ||
|
|
41315b0935 | ||
|
|
0d29421a87 | ||
|
|
1bad6de8cc | ||
|
|
2fbfe987f0 | ||
|
|
c2d807a1c8 | ||
|
|
3c4754b19f | ||
|
|
c440fd4f5a | ||
|
|
28972eaf4b | ||
|
|
a7017f2729 | ||
|
|
31cc177e31 | ||
|
|
abf351f776 | ||
|
|
1a353a1eea | ||
|
|
2b97f6a507 | ||
|
|
4478259f70 | ||
|
|
38cb26bd5b | ||
|
|
1f0339179f | ||
|
|
f3f8972505 | ||
|
|
0a08cf10e5 | ||
|
|
a8f5445d47 | ||
|
|
631c12ec91 | ||
|
|
d388afece6 | ||
|
|
b0fe99911a | ||
|
|
8392ebdacb | ||
|
|
147704deb9 | ||
|
|
078d716be9 | ||
|
|
04cb471599 | ||
|
|
6564f22772 | ||
|
|
13cd3ea28b | ||
|
|
d5e0665081 | ||
|
|
909141592d | ||
|
|
fda235a862 | ||
|
|
154231a58d | ||
|
|
197e4344da | ||
|
|
64b008e28b | ||
|
|
bc9ac1199b | ||
|
|
2b15e1a962 | ||
|
|
4308a77e27 | ||
|
|
f18749927c |
40
.drone.yml
40
.drone.yml
@@ -17,7 +17,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- curl -fLO https://github.com/jwilder/dockerize/releases/download/v$${DOCKERIZE_VERSION}/dockerize-linux-amd64-v$${DOCKERIZE_VERSION}.tar.gz
|
||||
@@ -258,7 +258,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- curl -fLO https://github.com/jwilder/dockerize/releases/download/v$${DOCKERIZE_VERSION}/dockerize-linux-amd64-v$${DOCKERIZE_VERSION}.tar.gz
|
||||
@@ -589,7 +589,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
|
||||
- name: build-windows-installer
|
||||
image: grafana/ci-wix:0.1.1
|
||||
@@ -638,7 +638,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
environment:
|
||||
@@ -723,7 +723,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- ./bin/grabpl verify-version ${DRONE_TAG}
|
||||
@@ -1029,7 +1029,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
|
||||
- name: build-windows-installer
|
||||
image: grafana/ci-wix:0.1.1
|
||||
@@ -1079,7 +1079,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- git clone "https://$${GITHUB_TOKEN}@github.com/grafana/grafana-enterprise.git"
|
||||
- cd grafana-enterprise
|
||||
@@ -1504,7 +1504,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- git clone "https://$$env:GITHUB_TOKEN@github.com/grafana/grafana-enterprise.git"
|
||||
- cd grafana-enterprise
|
||||
- git checkout ${DRONE_TAG}
|
||||
@@ -1572,7 +1572,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- ./bin/grabpl verify-version ${DRONE_TAG}
|
||||
@@ -1677,7 +1677,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- ./bin/grabpl verify-version v7.3.0-test
|
||||
@@ -1972,7 +1972,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
|
||||
- name: build-windows-installer
|
||||
image: grafana/ci-wix:0.1.1
|
||||
@@ -2022,7 +2022,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- git clone "https://$${GITHUB_TOKEN}@github.com/grafana/grafana-enterprise.git"
|
||||
- cd grafana-enterprise
|
||||
@@ -2441,7 +2441,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- git clone "https://$$env:GITHUB_TOKEN@github.com/grafana/grafana-enterprise.git"
|
||||
- cd grafana-enterprise
|
||||
- git checkout main
|
||||
@@ -2509,7 +2509,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- ./bin/grabpl verify-version v7.3.0-test
|
||||
@@ -2614,7 +2614,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- ./bin/grabpl verify-drone
|
||||
- curl -fLO https://github.com/jwilder/dockerize/releases/download/v$${DOCKERIZE_VERSION}/dockerize-linux-amd64-v$${DOCKERIZE_VERSION}.tar.gz
|
||||
@@ -2884,7 +2884,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
|
||||
- name: build-windows-installer
|
||||
image: grafana/ci-wix:0.1.1
|
||||
@@ -2930,7 +2930,7 @@ steps:
|
||||
image: grafana/build-container:1.4.1
|
||||
commands:
|
||||
- mkdir -p bin
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/grabpl
|
||||
- curl -fL -o bin/grabpl https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/grabpl
|
||||
- chmod +x bin/grabpl
|
||||
- git clone "https://$${GITHUB_TOKEN}@github.com/grafana/grafana-enterprise.git"
|
||||
- cd grafana-enterprise
|
||||
@@ -3352,7 +3352,7 @@ steps:
|
||||
image: grafana/ci-wix:0.1.1
|
||||
commands:
|
||||
- $$ProgressPreference = "SilentlyContinue"
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.2.8/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v2.3.2/windows/grabpl.exe -OutFile grabpl.exe
|
||||
- git clone "https://$$env:GITHUB_TOKEN@github.com/grafana/grafana-enterprise.git"
|
||||
- cd grafana-enterprise
|
||||
- git checkout $$env:DRONE_BRANCH
|
||||
@@ -3494,8 +3494,4 @@ get:
|
||||
path: infra/data/ci/drone
|
||||
name: machine-user-token
|
||||
|
||||
---
|
||||
kind: signature
|
||||
hmac: fa16b4de5ce285e6e9495b3ed797383627ffd4d43539eab58186fe8cc227d3e7
|
||||
|
||||
...
|
||||
|
||||
105
CHANGELOG.md
105
CHANGELOG.md
@@ -1,4 +1,109 @@
|
||||
|
||||
<!-- 8.1.0-beta2 START -->
|
||||
|
||||
# 8.1.0-beta2 (2021-07-23)
|
||||
|
||||
### Features and enhancements
|
||||
|
||||
* **Alerting:** Expand the value string in alert annotations and labels. [#37051](https://github.com/grafana/grafana/pull/37051), [@gerobinson](https://github.com/gerobinson)
|
||||
* **Auth:** Add Azure HTTP authentication middleware. [#36932](https://github.com/grafana/grafana/pull/36932), [@kostrse](https://github.com/kostrse)
|
||||
* **Auth:** Auth: Pass user role when using the authentication proxy. [#36729](https://github.com/grafana/grafana/pull/36729), [@yuwaMSFT2](https://github.com/yuwaMSFT2)
|
||||
* **Gazetteer:** Update countries.json file to allow for linking to 3-letter country codes. [#37129](https://github.com/grafana/grafana/pull/37129), [@bryanuribe](https://github.com/bryanuribe)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* **Config:** Fix Docker builds by correcting formatting in sample.ini. [#37106](https://github.com/grafana/grafana/pull/37106), [@FZambia](https://github.com/FZambia)
|
||||
* **Explore:** Fix encoding of internal URLs. [#36919](https://github.com/grafana/grafana/pull/36919), [@aocenas](https://github.com/aocenas)
|
||||
|
||||
<!-- 8.1.0-beta2 END -->
|
||||
|
||||
<!-- 8.1.0-beta1 START -->
|
||||
|
||||
# 8.1.0-beta1 (2021-07-22)
|
||||
|
||||
### Features and enhancements
|
||||
|
||||
* **Alerting:** Add Alertmanager notifications tab. [#35759](https://github.com/grafana/grafana/pull/35759), [@nathanrodman](https://github.com/nathanrodman)
|
||||
* **Alerting:** Add button to deactivate current Alertmanager configuration. [#36951](https://github.com/grafana/grafana/pull/36951), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** Add toggle in Loki/Prometheus data source configuration to opt out of alerting UI. [#36552](https://github.com/grafana/grafana/pull/36552), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** Allow any "evaluate for" value >=0 in the alert rule form. [#35807](https://github.com/grafana/grafana/pull/35807), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** Load default configuration from status endpoint, if Cortex Alertmanager returns empty user configuration. [#35769](https://github.com/grafana/grafana/pull/35769), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** view to display alert rule and its underlying data. [#35546](https://github.com/grafana/grafana/pull/35546), [@mckn](https://github.com/mckn)
|
||||
* **Annotation panel:** Release the annotation panel. [#36959](https://github.com/grafana/grafana/pull/36959), [@ryantxu](https://github.com/ryantxu)
|
||||
* **Annotations:** Add typeahead support for tags in built-in annotations. [#36377](https://github.com/grafana/grafana/pull/36377), [@ashharrison90](https://github.com/ashharrison90)
|
||||
* **AzureMonitor:** Add curated dashboards for Azure services. [#35356](https://github.com/grafana/grafana/pull/35356), [@avidhanju](https://github.com/avidhanju)
|
||||
* **AzureMonitor:** Add support for deep links to Microsoft Azure portal for Metrics. [#32273](https://github.com/grafana/grafana/pull/32273), [@shuotli](https://github.com/shuotli)
|
||||
* **AzureMonitor:** Remove support for different credentials for Azure Monitor Logs. [#35121](https://github.com/grafana/grafana/pull/35121), [@andresmgot](https://github.com/andresmgot)
|
||||
* **AzureMonitor:** Support querying any Resource for Logs queries. [#33879](https://github.com/grafana/grafana/pull/33879), [@joshhunt](https://github.com/joshhunt)
|
||||
* **Elasticsearch:** Add frozen indices search support. [#36018](https://github.com/grafana/grafana/pull/36018), [@Elfo404](https://github.com/Elfo404)
|
||||
* **Elasticsearch:** Name fields after template variables values instead of their name. [#36035](https://github.com/grafana/grafana/pull/36035), [@Elfo404](https://github.com/Elfo404)
|
||||
* **Elasticsearch:** add rate aggregation. [#33311](https://github.com/grafana/grafana/pull/33311), [@estermv](https://github.com/estermv)
|
||||
* **Email:** Allow configuration of content types for email notifications. [#34530](https://github.com/grafana/grafana/pull/34530), [@djairhogeuens](https://github.com/djairhogeuens)
|
||||
* **Explore:** Add more meta information when line limit is hit. [#33069](https://github.com/grafana/grafana/pull/33069), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Explore:** UI improvements to trace view. [#34276](https://github.com/grafana/grafana/pull/34276), [@aocenas](https://github.com/aocenas)
|
||||
* **FieldOverrides:** Added support to change display name in an override field and have it be matched by a later rule. [#35893](https://github.com/grafana/grafana/pull/35893), [@torkelo](https://github.com/torkelo)
|
||||
* **HTTP Client:** Introduce `dataproxy_max_idle_connections` config variable. [#35864](https://github.com/grafana/grafana/pull/35864), [@dsotirakis](https://github.com/dsotirakis)
|
||||
* **InfluxDB:** InfluxQL: adds tags to timeseries data. [#36702](https://github.com/grafana/grafana/pull/36702), [@gabor](https://github.com/gabor)
|
||||
* **InfluxDB:** InfluxQL: make measurement search case insensitive. [#34563](https://github.com/grafana/grafana/pull/34563), [@gabor](https://github.com/gabor)
|
||||
* **Legacy Alerting:** Replace simplejson with a struct in webhook notification channel. [#34952](https://github.com/grafana/grafana/pull/34952), [@KEVISONG](https://github.com/KEVISONG)
|
||||
* **Legend:** Updates display name for Last (not null) to just Last*. [#35633](https://github.com/grafana/grafana/pull/35633), [@torkelo](https://github.com/torkelo)
|
||||
* **Logs panel:** Add option to show common labels. [#36166](https://github.com/grafana/grafana/pull/36166), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Loki:** Add $__range variable. [#36175](https://github.com/grafana/grafana/pull/36175), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Loki:** Add support for "label_values(log stream selector, label)" in templating. [#35488](https://github.com/grafana/grafana/pull/35488), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Loki:** Add support for ad-hoc filtering in dashboard. [#36393](https://github.com/grafana/grafana/pull/36393), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **MySQL Datasource:** Add timezone parameter. [#27535](https://github.com/grafana/grafana/pull/27535), [@andipabst](https://github.com/andipabst)
|
||||
* **NodeGraph:** Show gradient fields in legend. [#34078](https://github.com/grafana/grafana/pull/34078), [@aocenas](https://github.com/aocenas)
|
||||
* **PanelOptions:** Don't mutate panel options/field config object when updating. [#36441](https://github.com/grafana/grafana/pull/36441), [@dprokop](https://github.com/dprokop)
|
||||
* **PieChart:** Make pie gradient more subtle to match other charts. [#36961](https://github.com/grafana/grafana/pull/36961), [@nikki-kiga](https://github.com/nikki-kiga)
|
||||
* **Prometheus:** Update PromQL typeahead and highlighting. [#36730](https://github.com/grafana/grafana/pull/36730), [@ekpdt](https://github.com/ekpdt)
|
||||
* **Prometheus:** interpolate variable for step field. [#36437](https://github.com/grafana/grafana/pull/36437), [@zoltanbedi](https://github.com/zoltanbedi)
|
||||
* **Provisioning:** Improve validation by validating across all dashboard providers. [#26742](https://github.com/grafana/grafana/pull/26742), [@nabokihms](https://github.com/nabokihms)
|
||||
* **SQL Datasources:** Allow multiple string/labels columns with time series. [#36485](https://github.com/grafana/grafana/pull/36485), [@kylebrandt](https://github.com/kylebrandt)
|
||||
* **Select:** Portal select menu to document.body. [#36398](https://github.com/grafana/grafana/pull/36398), [@ashharrison90](https://github.com/ashharrison90)
|
||||
* **Team Sync:** Add group mapping to support team sync in the Generic OAuth provider. [#36307](https://github.com/grafana/grafana/pull/36307), [@wardbekker](https://github.com/wardbekker)
|
||||
* **Tooltip:** Make active series more noticeable. [#36824](https://github.com/grafana/grafana/pull/36824), [@nikki-kiga](https://github.com/nikki-kiga)
|
||||
* **Tracing:** Add support to configure trace to logs start and end time. [#34995](https://github.com/grafana/grafana/pull/34995), [@zoltanbedi](https://github.com/zoltanbedi)
|
||||
* **Transformations:** Skip merge when there is only a single data frame. [#36407](https://github.com/grafana/grafana/pull/36407), [@edgarpoce](https://github.com/edgarpoce)
|
||||
* **ValueMapping:** Added support for mapping text to color, boolean values, NaN and Null. Improved UI for value mapping. [#33820](https://github.com/grafana/grafana/pull/33820), [@torkelo](https://github.com/torkelo)
|
||||
* **Visualizations:** Dynamically set any config (min, max, unit, color, thresholds) from query results. [#36548](https://github.com/grafana/grafana/pull/36548), [@torkelo](https://github.com/torkelo)
|
||||
* **live:** Add support to handle origin without a value for the port when matching with root_url. [#36834](https://github.com/grafana/grafana/pull/36834), [@FZambia](https://github.com/FZambia)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* **Alerting:** Handle marshaling Inf values. [#36947](https://github.com/grafana/grafana/pull/36947), [@kylebrandt](https://github.com/kylebrandt)
|
||||
* **AzureMonitor:** Fix macro resolution for template variables. [#36944](https://github.com/grafana/grafana/pull/36944), [@andresmgot](https://github.com/andresmgot)
|
||||
* **AzureMonitor:** Fix queries with Microsoft.NetApp/../../volumes resources. [#32661](https://github.com/grafana/grafana/pull/32661), [@pckls](https://github.com/pckls)
|
||||
* **AzureMonitor:** Request and concat subsequent resource pages. [#36958](https://github.com/grafana/grafana/pull/36958), [@andresmgot](https://github.com/andresmgot)
|
||||
* **Bug:** Fix parse duration for day. [#36942](https://github.com/grafana/grafana/pull/36942), [@idafurjes](https://github.com/idafurjes)
|
||||
* **Datasources:** Improve error handling for error messages. [#35120](https://github.com/grafana/grafana/pull/35120), [@ifrost](https://github.com/ifrost)
|
||||
* **Explore:** Correct the functionality of shift-enter shortcut across all uses. [#36600](https://github.com/grafana/grafana/pull/36600), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Explore:** Show all dataFrames in data tab in Inspector. [#32161](https://github.com/grafana/grafana/pull/32161), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **GraphNG:** Fix Tooltip mode 'All' for XYChart. [#31260](https://github.com/grafana/grafana/pull/31260), [@Posnet](https://github.com/Posnet)
|
||||
* **Loki:** Fix highlight of logs when using filter expressions with backticks. [#36024](https://github.com/grafana/grafana/pull/36024), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Modal:** Force modal content to overflow with scroll. [#36754](https://github.com/grafana/grafana/pull/36754), [@ashharrison90](https://github.com/ashharrison90)
|
||||
* **Plugins:** Ignore symlinked folders when verifying plugin signature. [#34434](https://github.com/grafana/grafana/pull/34434), [@wbrowne](https://github.com/wbrowne)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
|
||||
When parsing Elasticsearch query responses using template variables, each field gets named after the variable value instead of the name.
|
||||
For example, executing a `terms` aggregation on a variable named `$groupBy` that has `@hostname` as a value, the resulting column in the table response will be `@hostname` instead of `$groupBy` Issue [#36035](https://github.com/grafana/grafana/issues/36035)
|
||||
|
||||
|
||||
Azure Monitor data source no longer supports different credentials for Metrics and Logs in existing data sources. To use different credentials for Azure Monitor Logs, create another data source. Issue [#35121](https://github.com/grafana/grafana/issues/35121)
|
||||
|
||||
Existing Azure Metrics Logs queries for Log Analytics Workspaces should be backward compatible with this change and should not get impacted. Panels will be migrated to use the new resource-centric backend when you first edit and save them.
|
||||
|
||||
Application Insights and Insights Analytics queries are now read-only and cannot be modified. To update Application Insights queries, users can manually recreate them as Metrics queries, and Insights Analytics are recreated with Logs.
|
||||
|
||||
Issue [#33879](https://github.com/grafana/grafana/issues/33879)
|
||||
|
||||
### Plugin development fixes & changes
|
||||
|
||||
* **Toolkit:** Improve error messages when tasks fail. [#36381](https://github.com/grafana/grafana/pull/36381), [@joshhunt](https://github.com/joshhunt)
|
||||
|
||||
<!-- 8.1.0-beta1 END -->
|
||||
|
||||
<!-- 8.0.6 START -->
|
||||
|
||||
# 8.0.6 (2021-07-14)
|
||||
|
||||
@@ -24,14 +24,12 @@ RUN apk add --no-cache gcc g++
|
||||
WORKDIR $GOPATH/src/github.com/grafana/grafana
|
||||
|
||||
COPY go.mod go.sum embed.go ./
|
||||
|
||||
RUN go mod verify
|
||||
|
||||
COPY cue cue
|
||||
COPY public/app/plugins public/app/plugins
|
||||
COPY pkg pkg
|
||||
COPY build.go package.json ./
|
||||
|
||||
RUN go mod verify
|
||||
RUN go run build.go build
|
||||
|
||||
# Final stage
|
||||
|
||||
@@ -22,14 +22,12 @@ FROM golang:1.16 AS go-builder
|
||||
WORKDIR /src/grafana
|
||||
|
||||
COPY go.mod go.sum embed.go ./
|
||||
|
||||
RUN go mod verify
|
||||
|
||||
COPY build.go package.json ./
|
||||
COPY pkg pkg/
|
||||
COPY cue cue/
|
||||
COPY public/app/plugins public/app/plugins/
|
||||
|
||||
RUN go mod verify
|
||||
RUN go run build.go build
|
||||
|
||||
FROM ubuntu:20.04
|
||||
|
||||
@@ -995,12 +995,12 @@
|
||||
[geomap]
|
||||
# Set the JSON configuration for the default basemap
|
||||
;default_baselayer_config = `{
|
||||
"type": "xyz",
|
||||
"config": {
|
||||
"attribution": "Open street map",
|
||||
"url": "https://tile.openstreetmap.org/{z}/{x}/{y}.png"
|
||||
}
|
||||
}`
|
||||
; "type": "xyz",
|
||||
; "config": {
|
||||
; "attribution": "Open street map",
|
||||
; "url": "https://tile.openstreetmap.org/{z}/{x}/{y}.png"
|
||||
; }
|
||||
;}`
|
||||
|
||||
# Enable or disable loading other base map layers
|
||||
;enable_custom_baselayers = true
|
||||
|
||||
@@ -1529,6 +1529,30 @@ For example:
|
||||
allowed_origins = "https://*.example.com"
|
||||
```
|
||||
|
||||
### ha_engine
|
||||
|
||||
> **Note**: Available in Grafana v8.1 and later versions.
|
||||
|
||||
**Experimental**
|
||||
|
||||
The high availability (HA) engine name for Grafana Live. By default, it's not set. The only possible value is "redis".
|
||||
|
||||
For more information, refer to [Configure Grafana Live HA setup]({{< relref "../live/live-ha-setup.md" >}}).
|
||||
|
||||
### ha_engine_address
|
||||
|
||||
> **Note**: Available in Grafana v8.1 and later versions.
|
||||
|
||||
**Experimental**
|
||||
|
||||
Address string of selected the high availability (HA) Live engine. For Redis, it's a `host:port` string. Example:
|
||||
|
||||
```ini
|
||||
[live]
|
||||
ha_engine = redis
|
||||
ha_engine_address = 127.0.0.1:6379
|
||||
```
|
||||
|
||||
<hr>
|
||||
|
||||
## [plugin.grafana-image-renderer]
|
||||
|
||||
@@ -109,8 +109,9 @@ The following template variables are available when expanding annotations and la
|
||||
|
||||
| Name | Description |
|
||||
| ------- | --------------- |
|
||||
| $labels | Labels contains the labels from the query or condition. For example, `{{ $labels.instance }}` and `{{ $labels.job }}`. |
|
||||
| $values | Values contains the values of all reduce and math expressions that were evaluated for this alert rule. For example, `{{ $values.A }}`, `{{ $values.A.Labels }}` and `{{ $values.A.Value }}` where `A` is the `refID` of the expression. |
|
||||
| $labels | The labels from the query or condition. For example, `{{ $labels.instance }}` and `{{ $labels.job }}`. |
|
||||
| $values | The values of all reduce and math expressions that were evaluated for this alert rule. For example, `{{ $values.A }}`, `{{ $values.A.Labels }}` and `{{ $values.A.Value }}` where `A` is the `refID` of the expression. |
|
||||
| $value | The value string of the alert instance. For example, `[ var='A' labels={instance=foo} value=10 ]`. |
|
||||
|
||||
## Preview alerts
|
||||
|
||||
|
||||
@@ -8,9 +8,10 @@ weight = 400
|
||||
# View alert rules
|
||||
|
||||
To view alerts:
|
||||
|
||||
1. In the Grafana menu hover your cursor over the Alerting (bell) icon.
|
||||
1. Click **Alert Rules**. You can see all configured Grafana alert rules as well as any rules from Loki or Prometheus data sources.
|
||||
By default, the group view is shown. You can toggle between group or state views by clicking the relevant **View as** buttons in the options area at the top of the page.
|
||||
1. Click **Alert Rules**. You can see all configured Grafana alert rules as well as any rules from Loki or Prometheus data sources.
|
||||
By default, the group view is shown. You can toggle between group or state views by clicking the relevant **View as** buttons in the options area at the top of the page.
|
||||
|
||||
### Group view
|
||||
|
||||
@@ -25,9 +26,10 @@ State view shows alert rules grouped by state. Use this view to get an overview
|
||||

|
||||
|
||||
## Filter alert rules
|
||||
|
||||
You can use the following filters to view only alert rules that match specific criteria:
|
||||
|
||||
- **Filter alerts by name or label -** Type an alert name, label name or value in the **Search** input.
|
||||
- **Filter alerts by label -** Search by alert labels using label selectors in the **Search** input. eg: `environment=production,region=~US|EU,severity!=warning`
|
||||
- **Filter alerts by state -** In **States** Select which alert states you want to see. All others are hidden.
|
||||
- **Filter alerts by data source -** Click the **Select data source** and select an alerting data source. Only alert rules that query selected data source will be visible.
|
||||
|
||||
@@ -39,13 +41,13 @@ A rule row shows the rule state, health, and summary annotation if the rule has
|
||||
|
||||
### Edit or delete rule
|
||||
|
||||
Grafana rules can only be edited or deleted by users with Edit permissions for the folder which contains the rule. Prometheus or Loki rules can be edited or deleted by users with Editor or Admin roles.
|
||||
Grafana rules can only be edited or deleted by users with Edit permissions for the folder which contains the rule. Prometheus or Loki rules can be edited or deleted by users with Editor or Admin roles.
|
||||
|
||||
To edit or delete a rule:
|
||||
|
||||
1. Expand this rule to reveal rule controls.
|
||||
1. Expand this rule to reveal rule controls.
|
||||
1. Click **Edit** to go to the rule editing form. Make changes following [instructions listed here]({{< relref "./create-grafana-managed-rule.md" >}}).
|
||||
1. Click **Delete"** to delete a rule.
|
||||
1. Click **Delete"** to delete a rule.
|
||||
|
||||
## Opt-out a Loki or Prometheus data source
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ sync_ttl = 60
|
||||
# Example `whitelist = 192.168.1.1, 192.168.1.0/24, 2001::23, 2001::0/120`
|
||||
whitelist =
|
||||
# Optionally define more headers to sync other user attributes
|
||||
# Example `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL Groups:X-WEBAUTH-GROUPS`
|
||||
# Example `headers = Name:X-WEBAUTH-NAME Role:X-WEBAUTH-ROLE Email:X-WEBAUTH-EMAIL Groups:X-WEBAUTH-GROUPS`
|
||||
headers =
|
||||
# Check out docs on this for more details on the below setting
|
||||
enable_login_token = false
|
||||
|
||||
@@ -237,7 +237,9 @@ Content-Length: 97
|
||||
|
||||
`DELETE /api/folders/:uid`
|
||||
|
||||
Deletes an existing folder identified by uid together with all dashboards stored in the folder, if any. This operation cannot be reverted.
|
||||
Deletes an existing folder identified by UID along with all dashboards (and their alerts) stored in the folder. This operation cannot be reverted.
|
||||
|
||||
If [Grafana 8 Alerts]({{< relref "../alerting/unified-alerting/_index.md" >}}) are enabled, you can set an optional query parameter `forceDeleteRules=false` so that requests will fail with 400 (Bad Request) error if the folder contains any Grafana 8 Alerts. However, if this parameter is set to `true` then it will delete any Grafana 8 Alerts under this folder.
|
||||
|
||||
**Example Request**:
|
||||
|
||||
@@ -265,6 +267,7 @@ Status Codes:
|
||||
|
||||
- **200** – Deleted
|
||||
- **401** – Unauthorized
|
||||
- **400** – Bad Request
|
||||
- **403** – Access Denied
|
||||
- **404** – Folder not found
|
||||
|
||||
|
||||
@@ -7,10 +7,52 @@ weight = 130
|
||||
|
||||
# Configure Grafana Live HA setup
|
||||
|
||||
Live features in Grafana v8.0 are designed to work with a single Grafana server instance only. We will add the option for HA configuration in future Grafana releases to eliminate the current limitations.
|
||||
By default, Grafana Live uses in-memory data structures and in-memory PUB/SUB hub for handling subscriptions.
|
||||
|
||||
Currently, if you have several Grafana server instances behind a load balancer, you may come across the following limitations:
|
||||
In a high availability Grafana setup involving several Grafana server instances behind a load balancer, you can find the following limitations:
|
||||
|
||||
- Built-in features like dashboard change notifications will only be broadcasted to users connected to the same Grafana server process instance.
|
||||
- Streaming from Telegraf will deliver data only to clients connected to the same instance which received Telegraf data, active stream cache is not shared between different Grafana instances.
|
||||
- A separate unidirectional stream between Grafana and backend data source may be opened on different Grafana servers for the same channel.
|
||||
|
||||
To bypass these limitations, Grafana v8.1 has an experimental Live HA engine that requires Redis to work.
|
||||
|
||||
## Configure Redis Live engine
|
||||
|
||||
When the Redis engine is configured, Grafana Live keeps its state in Redis and uses Redis PUB/SUB functionality to deliver messages to all subscribers throughout all Grafana server nodes.
|
||||
|
||||
Here is an example configuration:
|
||||
|
||||
```
|
||||
[live]
|
||||
ha_engine = redis
|
||||
ha_engine_address = 127.0.0.1:6379
|
||||
```
|
||||
|
||||
After running:
|
||||
|
||||
- All built-in real-time notifications like dashboard changes are delivered to all Grafana server instances and broadcasted to all subscribers.
|
||||
- Streaming from Telegraf delivers messages to all subscribers.
|
||||
- A separate unidirectional stream between Grafana and backend data source opens on different Grafana servers. Publishing data to a channel delivers messages to instance subscribers, as a result, publications from different instances on different machines do not produce duplicate data on panels.
|
||||
|
||||
At the moment we only support single Redis node.
|
||||
|
||||
> **Note:** It's possible to use Redis Sentinel and Haproxy to achieve a highly available Redis setup. Redis nodes should be managed by [Redis Sentinel](https://redis.io/topics/sentinel) to achieve automatic failover. Haproxy configuration example:
|
||||
> ```
|
||||
> listen redis
|
||||
> server redis-01 127.0.0.1:6380 check port 6380 check inter 2s weight 1 inter 2s downinter 5s rise 10 fall 2 on-marked-down shutdown-sessions on-marked-up shutdown-backup-sessions
|
||||
> server redis-02 127.0.0.1:6381 check port 6381 check inter 2s weight 1 inter 2s downinter 5s rise 10 fall 2 backup
|
||||
> bind *:6379
|
||||
> mode tcp
|
||||
> option tcpka
|
||||
> option tcplog
|
||||
> option tcp-check
|
||||
> tcp-check send PING\r\n
|
||||
> tcp-check expect string +PONG
|
||||
> tcp-check send info\ replication\r\n
|
||||
> tcp-check expect string role:master
|
||||
> tcp-check send QUIT\r\n
|
||||
> tcp-check expect string +OK
|
||||
> balance roundrobin
|
||||
> ```
|
||||
> Next, point Grafana Live to Haproxy address:port.
|
||||
|
||||
@@ -8,6 +8,8 @@ weight = 10000
|
||||
Here you can find detailed release notes that list everything that is included in every release as well as notices
|
||||
about deprecations, breaking changes as well as changes that relate to plugin development.
|
||||
|
||||
- [Release notes for 8.1.0-beta2]({{< relref "release-notes-8-1-0-beta2" >}})
|
||||
- [Release notes for 8.1.0-beta1]({{< relref "release-notes-8-1-0-beta1" >}})
|
||||
- [Release notes for 8.0.6]({{< relref "release-notes-8-0-6" >}})
|
||||
- [Release notes for 8.0.5]({{< relref "release-notes-8-0-5" >}})
|
||||
- [Release notes for 8.0.4]({{< relref "release-notes-8-0-4" >}})
|
||||
|
||||
91
docs/sources/release-notes/release-notes-8-1-0-beta1.md
Normal file
91
docs/sources/release-notes/release-notes-8-1-0-beta1.md
Normal file
@@ -0,0 +1,91 @@
|
||||
+++
|
||||
title = "Release notes for Grafana 8.1.0-beta1"
|
||||
[_build]
|
||||
list = false
|
||||
+++
|
||||
|
||||
<!-- Auto generated by update changelog github action -->
|
||||
|
||||
# Release notes for Grafana 8.1.0-beta1
|
||||
|
||||
### Features and enhancements
|
||||
|
||||
* **Alerting:** Add Alertmanager notifications tab. [#35759](https://github.com/grafana/grafana/pull/35759), [@nathanrodman](https://github.com/nathanrodman)
|
||||
* **Alerting:** Add button to deactivate current Alertmanager configuration. [#36951](https://github.com/grafana/grafana/pull/36951), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** Add toggle in Loki/Prometheus data source configuration to opt out of alerting UI. [#36552](https://github.com/grafana/grafana/pull/36552), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** Allow any "evaluate for" value >=0 in the alert rule form. [#35807](https://github.com/grafana/grafana/pull/35807), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** Load default configuration from status endpoint, if Cortex Alertmanager returns empty user configuration. [#35769](https://github.com/grafana/grafana/pull/35769), [@domasx2](https://github.com/domasx2)
|
||||
* **Alerting:** view to display alert rule and its underlying data. [#35546](https://github.com/grafana/grafana/pull/35546), [@mckn](https://github.com/mckn)
|
||||
* **Annotation panel:** Release the annotation panel. [#36959](https://github.com/grafana/grafana/pull/36959), [@ryantxu](https://github.com/ryantxu)
|
||||
* **Annotations:** Add typeahead support for tags in built-in annotations. [#36377](https://github.com/grafana/grafana/pull/36377), [@ashharrison90](https://github.com/ashharrison90)
|
||||
* **AzureMonitor:** Add curated dashboards for Azure services. [#35356](https://github.com/grafana/grafana/pull/35356), [@avidhanju](https://github.com/avidhanju)
|
||||
* **AzureMonitor:** Add support for deep links to Microsoft Azure portal for Metrics. [#32273](https://github.com/grafana/grafana/pull/32273), [@shuotli](https://github.com/shuotli)
|
||||
* **AzureMonitor:** Remove support for different credentials for Azure Monitor Logs. [#35121](https://github.com/grafana/grafana/pull/35121), [@andresmgot](https://github.com/andresmgot)
|
||||
* **AzureMonitor:** Support querying any Resource for Logs queries. [#33879](https://github.com/grafana/grafana/pull/33879), [@joshhunt](https://github.com/joshhunt)
|
||||
* **Elasticsearch:** Add frozen indices search support. [#36018](https://github.com/grafana/grafana/pull/36018), [@Elfo404](https://github.com/Elfo404)
|
||||
* **Elasticsearch:** Name fields after template variables values instead of their name. [#36035](https://github.com/grafana/grafana/pull/36035), [@Elfo404](https://github.com/Elfo404)
|
||||
* **Elasticsearch:** add rate aggregation. [#33311](https://github.com/grafana/grafana/pull/33311), [@estermv](https://github.com/estermv)
|
||||
* **Email:** Allow configuration of content types for email notifications. [#34530](https://github.com/grafana/grafana/pull/34530), [@djairhogeuens](https://github.com/djairhogeuens)
|
||||
* **Explore:** Add more meta information when line limit is hit. [#33069](https://github.com/grafana/grafana/pull/33069), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Explore:** UI improvements to trace view. [#34276](https://github.com/grafana/grafana/pull/34276), [@aocenas](https://github.com/aocenas)
|
||||
* **FieldOverrides:** Added support to change display name in an override field and have it be matched by a later rule. [#35893](https://github.com/grafana/grafana/pull/35893), [@torkelo](https://github.com/torkelo)
|
||||
* **HTTP Client:** Introduce `dataproxy_max_idle_connections` config variable. [#35864](https://github.com/grafana/grafana/pull/35864), [@dsotirakis](https://github.com/dsotirakis)
|
||||
* **InfluxDB:** InfluxQL: adds tags to timeseries data. [#36702](https://github.com/grafana/grafana/pull/36702), [@gabor](https://github.com/gabor)
|
||||
* **InfluxDB:** InfluxQL: make measurement search case insensitive. [#34563](https://github.com/grafana/grafana/pull/34563), [@gabor](https://github.com/gabor)
|
||||
* **Legacy Alerting:** Replace simplejson with a struct in webhook notification channel. [#34952](https://github.com/grafana/grafana/pull/34952), [@KEVISONG](https://github.com/KEVISONG)
|
||||
* **Legend:** Updates display name for Last (not null) to just Last*. [#35633](https://github.com/grafana/grafana/pull/35633), [@torkelo](https://github.com/torkelo)
|
||||
* **Logs panel:** Add option to show common labels. [#36166](https://github.com/grafana/grafana/pull/36166), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Loki:** Add $__range variable. [#36175](https://github.com/grafana/grafana/pull/36175), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Loki:** Add support for "label_values(log stream selector, label)" in templating. [#35488](https://github.com/grafana/grafana/pull/35488), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Loki:** Add support for ad-hoc filtering in dashboard. [#36393](https://github.com/grafana/grafana/pull/36393), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **MySQL Datasource:** Add timezone parameter. [#27535](https://github.com/grafana/grafana/pull/27535), [@andipabst](https://github.com/andipabst)
|
||||
* **NodeGraph:** Show gradient fields in legend. [#34078](https://github.com/grafana/grafana/pull/34078), [@aocenas](https://github.com/aocenas)
|
||||
* **PanelOptions:** Don't mutate panel options/field config object when updating. [#36441](https://github.com/grafana/grafana/pull/36441), [@dprokop](https://github.com/dprokop)
|
||||
* **PieChart:** Make pie gradient more subtle to match other charts. [#36961](https://github.com/grafana/grafana/pull/36961), [@nikki-kiga](https://github.com/nikki-kiga)
|
||||
* **Prometheus:** Update PromQL typeahead and highlighting. [#36730](https://github.com/grafana/grafana/pull/36730), [@ekpdt](https://github.com/ekpdt)
|
||||
* **Prometheus:** interpolate variable for step field. [#36437](https://github.com/grafana/grafana/pull/36437), [@zoltanbedi](https://github.com/zoltanbedi)
|
||||
* **Provisioning:** Improve validation by validating across all dashboard providers. [#26742](https://github.com/grafana/grafana/pull/26742), [@nabokihms](https://github.com/nabokihms)
|
||||
* **SQL Datasources:** Allow multiple string/labels columns with time series. [#36485](https://github.com/grafana/grafana/pull/36485), [@kylebrandt](https://github.com/kylebrandt)
|
||||
* **Select:** Portal select menu to document.body. [#36398](https://github.com/grafana/grafana/pull/36398), [@ashharrison90](https://github.com/ashharrison90)
|
||||
* **Team Sync:** Add group mapping to support team sync in the Generic OAuth provider. [#36307](https://github.com/grafana/grafana/pull/36307), [@wardbekker](https://github.com/wardbekker)
|
||||
* **Tooltip:** Make active series more noticeable. [#36824](https://github.com/grafana/grafana/pull/36824), [@nikki-kiga](https://github.com/nikki-kiga)
|
||||
* **Tracing:** Add support to configure trace to logs start and end time. [#34995](https://github.com/grafana/grafana/pull/34995), [@zoltanbedi](https://github.com/zoltanbedi)
|
||||
* **Transformations:** Skip merge when there is only a single data frame. [#36407](https://github.com/grafana/grafana/pull/36407), [@edgarpoce](https://github.com/edgarpoce)
|
||||
* **ValueMapping:** Added support for mapping text to color, boolean values, NaN and Null. Improved UI for value mapping. [#33820](https://github.com/grafana/grafana/pull/33820), [@torkelo](https://github.com/torkelo)
|
||||
* **Visualizations:** Dynamically set any config (min, max, unit, color, thresholds) from query results. [#36548](https://github.com/grafana/grafana/pull/36548), [@torkelo](https://github.com/torkelo)
|
||||
* **live:** Add support to handle origin without a value for the port when matching with root_url. [#36834](https://github.com/grafana/grafana/pull/36834), [@FZambia](https://github.com/FZambia)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* **Alerting:** Handle marshaling Inf values. [#36947](https://github.com/grafana/grafana/pull/36947), [@kylebrandt](https://github.com/kylebrandt)
|
||||
* **AzureMonitor:** Fix macro resolution for template variables. [#36944](https://github.com/grafana/grafana/pull/36944), [@andresmgot](https://github.com/andresmgot)
|
||||
* **AzureMonitor:** Fix queries with Microsoft.NetApp/../../volumes resources. [#32661](https://github.com/grafana/grafana/pull/32661), [@pckls](https://github.com/pckls)
|
||||
* **AzureMonitor:** Request and concat subsequent resource pages. [#36958](https://github.com/grafana/grafana/pull/36958), [@andresmgot](https://github.com/andresmgot)
|
||||
* **Bug:** Fix parse duration for day. [#36942](https://github.com/grafana/grafana/pull/36942), [@idafurjes](https://github.com/idafurjes)
|
||||
* **Datasources:** Improve error handling for error messages. [#35120](https://github.com/grafana/grafana/pull/35120), [@ifrost](https://github.com/ifrost)
|
||||
* **Explore:** Correct the functionality of shift-enter shortcut across all uses. [#36600](https://github.com/grafana/grafana/pull/36600), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Explore:** Show all dataFrames in data tab in Inspector. [#32161](https://github.com/grafana/grafana/pull/32161), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **GraphNG:** Fix Tooltip mode 'All' for XYChart. [#31260](https://github.com/grafana/grafana/pull/31260), [@Posnet](https://github.com/Posnet)
|
||||
* **Loki:** Fix highlight of logs when using filter expressions with backticks. [#36024](https://github.com/grafana/grafana/pull/36024), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Modal:** Force modal content to overflow with scroll. [#36754](https://github.com/grafana/grafana/pull/36754), [@ashharrison90](https://github.com/ashharrison90)
|
||||
* **Plugins:** Ignore symlinked folders when verifying plugin signature. [#34434](https://github.com/grafana/grafana/pull/34434), [@wbrowne](https://github.com/wbrowne)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
|
||||
When parsing Elasticsearch query responses using template variables, each field gets named after the variable value instead of the name.
|
||||
For example, executing a `terms` aggregation on a variable named `$groupBy` that has `@hostname` as a value, the resulting column in the table response will be `@hostname` instead of `$groupBy` Issue [#36035](https://github.com/grafana/grafana/issues/36035)
|
||||
|
||||
|
||||
Azure Monitor data source no longer supports different credentials for Metrics and Logs in existing data sources. To use different credentials for Azure Monitor Logs, create another data source. Issue [#35121](https://github.com/grafana/grafana/issues/35121)
|
||||
|
||||
Existing Azure Metrics Logs queries for Log Analytics Workspaces should be backward compatible with this change and should not get impacted. Panels will be migrated to use the new resource-centric backend when you first edit and save them.
|
||||
|
||||
Application Insights and Insights Analytics queries are now read-only and cannot be modified. To update Application Insights queries, users can manually recreate them as Metrics queries, and Insights Analytics are recreated with Logs.
|
||||
|
||||
Issue [#33879](https://github.com/grafana/grafana/issues/33879)
|
||||
|
||||
### Plugin development fixes & changes
|
||||
|
||||
* **Toolkit:** Improve error messages when tasks fail. [#36381](https://github.com/grafana/grafana/pull/36381), [@joshhunt](https://github.com/joshhunt)
|
||||
|
||||
21
docs/sources/release-notes/release-notes-8-1-0-beta2.md
Normal file
21
docs/sources/release-notes/release-notes-8-1-0-beta2.md
Normal file
@@ -0,0 +1,21 @@
|
||||
+++
|
||||
title = "Release notes for Grafana 8.1.0-beta2"
|
||||
[_build]
|
||||
list = false
|
||||
+++
|
||||
|
||||
<!-- Auto generated by update changelog github action -->
|
||||
|
||||
# Release notes for Grafana 8.1.0-beta2
|
||||
|
||||
### Features and enhancements
|
||||
|
||||
* **Alerting:** Expand the value string in alert annotations and labels. [#37051](https://github.com/grafana/grafana/pull/37051), [@gerobinson](https://github.com/gerobinson)
|
||||
* **Auth:** Add Azure HTTP authentication middleware. [#36932](https://github.com/grafana/grafana/pull/36932), [@kostrse](https://github.com/kostrse)
|
||||
* **Auth:** Auth: Pass user role when using the authentication proxy. [#36729](https://github.com/grafana/grafana/pull/36729), [@yuwaMSFT2](https://github.com/yuwaMSFT2)
|
||||
* **Gazetteer:** Update countries.json file to allow for linking to 3-letter country codes. [#37129](https://github.com/grafana/grafana/pull/37129), [@bryanuribe](https://github.com/bryanuribe)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* **Config:** Fix Docker builds by correcting formatting in sample.ini. [#37106](https://github.com/grafana/grafana/pull/37106), [@FZambia](https://github.com/FZambia)
|
||||
* **Explore:** Fix encoding of internal URLs. [#36919](https://github.com/grafana/grafana/pull/36919), [@aocenas](https://github.com/aocenas)
|
||||
@@ -11,6 +11,7 @@ as info on deprecations, breaking changes and plugin development read the [relea
|
||||
|
||||
## Grafana 8
|
||||
|
||||
- [What's new in 8.1]({{< relref "whats-new-in-v8-1" >}})
|
||||
- [What's new in 8.0]({{< relref "whats-new-in-v8-0" >}})
|
||||
|
||||
## Grafana 7
|
||||
|
||||
@@ -10,8 +10,6 @@ list = false
|
||||
|
||||
# What’s new in Grafana v8.0
|
||||
|
||||
> **Note:** This topic will be updated frequently between now and the final release.
|
||||
|
||||
This topic includes the release notes for Grafana v8.0. For all details, read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md).
|
||||
|
||||
## Grafana OSS features
|
||||
|
||||
79
docs/sources/whatsnew/whats-new-in-v8-1.md
Normal file
79
docs/sources/whatsnew/whats-new-in-v8-1.md
Normal file
@@ -0,0 +1,79 @@
|
||||
+++
|
||||
title = "What's new in Grafana v8.1"
|
||||
description = "Feature and improvement highlights for Grafana v8.1"
|
||||
keywords = ["grafana", "new", "documentation", "8.1", "release notes"]
|
||||
weight = -33
|
||||
aliases = ["/docs/grafana/latest/guides/whats-new-in-v8-1/"]
|
||||
[_build]
|
||||
list = false
|
||||
+++
|
||||
|
||||
# What’s new in Grafana v8.1
|
||||
|
||||
> **Note:** This topic will be updated frequently between now and the final release.
|
||||
|
||||
This topic includes the release notes for Grafana v8.1. For all details, read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md).
|
||||
|
||||
## Grafana OSS features
|
||||
|
||||
These features are included in the Grafana open source edition.
|
||||
|
||||
### Time series panel updates
|
||||
|
||||
Time series panels have been updated with the ability to color series and line by thresholds or gradient color scales. This allows users to create time series panels where the line color can change dynamically based on thresholds or using gradient color scales.
|
||||
|
||||
Also, we have added possibility to create annotations directly from the panel. For more information, refer to ...
|
||||
|
||||
|
||||
### Geomap panel
|
||||
|
||||
Grafana 8.1 introduces the foundation for our new map panel. This new panel leverages [OpenLayers](https://openlayers.org/) and gives us a flexible solution for extending the way we use the new Geomap panel moving forward. We expect to ship this new visualization with the ability to use [Circle Overlays](https://github.com/grafana/grafana/pull/36680) and [Heatmaps](https://github.com/open-o11y/grafana/pull/18).
|
||||
|
||||
For more information, refer to [issue 36585](https://github.com/grafana/grafana/issues/36585). For documentation, refer to ...
|
||||
|
||||
### Annotation panel
|
||||
|
||||
This section is for the new panel...
|
||||
|
||||
### Transformations improvements
|
||||
|
||||
Grafana 8.1 includes many transformations enhancements.
|
||||
|
||||
#### Config from query (Beta)
|
||||
|
||||
This transformation enables panel config (Threshold, Min, Max, etc.) to be derived from query results. For more information, refer to [Config from query results transform]({{< relref "../panels/transformations/config-from-query.md" >}}).
|
||||
|
||||
#### Rows to fields (Beta)
|
||||
|
||||
This transformation enables rows in returned data to be converted into separate fields. Prior to this enhancement, you could style and configure fields individually, but not rows. For more information, refer to [Rows to fields transform]({{< relref "../panels/transformations/rows-to-fields.md" >}}).
|
||||
|
||||
|
||||
#### Contextual & Inline Help
|
||||
|
||||
Additional inline help will be available for Transformations. We can now share examples of how to use specific transformations and point users directly to the appropriate place in the docs for more information.
|
||||
|
||||
|
||||
### Data source updates
|
||||
|
||||
The following data source updates are included with this Grafana release.
|
||||
|
||||
#### MySQL Data Source
|
||||
|
||||
We have added timezone support. As a result, you can now specify the time zone used in the database session, such as `Europe/Berlin` or `+02:00`.
|
||||
|
||||
### Trace to logs improvements
|
||||
|
||||
We have updated the default behavior from creating a one (1) hour span Loki query to only query at the exact time the trace span started for the duration of it. For more fine grained control over this you can shift this time in the tracing data source settings. It is now possible to to shift the start time and end time of the Loki query by the set amount. For more information, refer to [Trace to logs]({{< relref "../datasources/tempo.md#trace-to-logs" >}}).
|
||||
|
||||
#### Documentation updates
|
||||
|
||||
New panel summaries and preview on the top level [Visualizations]({{< relref "../panels/visualizations/_index.md" >}}) page to help users pick or learn about specific visualizations more easily.
|
||||
|
||||
## Enterprise features
|
||||
|
||||
These features are included in the Grafana Enterprise edition.
|
||||
|
||||
### Oauth2 - Team Sync to Group Mapping
|
||||
|
||||
With Team Sync you can map your Generic OAuth groups to teams in Grafana so that the users are automatically added to the correct teams.
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "8.1.0-pre"
|
||||
"packages": [
|
||||
"packages/*"
|
||||
],
|
||||
"version": "8.1.0-beta.2"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"license": "AGPL-3.0-only",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"repository": "github:grafana/grafana",
|
||||
"scripts": {
|
||||
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Vector, QueryResultMeta } from '../types';
|
||||
import { guessFieldTypeFromNameAndValue, toDataFrameDTO } from './processDataFrame';
|
||||
import { FunctionalVector } from '../vector/FunctionalVector';
|
||||
|
||||
/** @public */
|
||||
export type ValueConverter<T = any> = (val: any) => T;
|
||||
|
||||
const NOOP: ValueConverter = (v) => v;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DataFrame } from '../types';
|
||||
import { AnnotationEvent, DataFrame } from '../types';
|
||||
import { BusEventWithPayload } from './types';
|
||||
|
||||
/**
|
||||
@@ -34,3 +34,8 @@ export class DataHoverClearEvent extends BusEventWithPayload<DataHoverPayload> {
|
||||
export class DataSelectEvent extends BusEventWithPayload<DataHoverPayload> {
|
||||
static type = 'data-select';
|
||||
}
|
||||
|
||||
/** @alpha */
|
||||
export class AnnotationChangeEvent extends BusEventWithPayload<Partial<AnnotationEvent>> {
|
||||
static type = 'annotation-event';
|
||||
}
|
||||
|
||||
@@ -644,7 +644,7 @@ describe('getLinksSupplier', () => {
|
||||
expect(links[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
title: 'testDS',
|
||||
href: '/explore?left={"datasource":"testDS","queries":["12345"]}',
|
||||
href: `/explore?left=${encodeURIComponent('{"datasource":"testDS","queries":["12345"]}')}`,
|
||||
onClick: undefined,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -32,6 +32,7 @@ import { getFrameDisplayName } from './fieldState';
|
||||
import { getTimeField } from '../dataframe/processDataFrame';
|
||||
import { mapInternalLinkToExplore } from '../utils/dataLinks';
|
||||
import { getTemplateProxyForField } from './templateProxies';
|
||||
import { asHexString } from '../themes/colorManipulator';
|
||||
|
||||
interface OverrideProps {
|
||||
match: FieldMatcher;
|
||||
@@ -216,6 +217,13 @@ function cachingDisplayProcessor(disp: DisplayProcessor, maxCacheSize = 2500): D
|
||||
}
|
||||
|
||||
v = disp(value);
|
||||
|
||||
// convert to hex6 or hex8 so downstream we can cheaply test for alpha (and set new alpha)
|
||||
// via a simple length check (in colorManipulator) rather using slow parsing via tinycolor
|
||||
if (v.color) {
|
||||
v.color = asHexString(v.color);
|
||||
}
|
||||
|
||||
cache.set(value, v);
|
||||
}
|
||||
return v;
|
||||
|
||||
@@ -68,8 +68,8 @@ export interface MapLayerOptions<TConfig = any> {
|
||||
*/
|
||||
export interface MapLayerHandler {
|
||||
init: () => BaseLayer;
|
||||
legend?: () => ReactNode;
|
||||
update?: (data: PanelData) => void;
|
||||
legend?: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -102,5 +102,5 @@ export interface MapLayerRegistryItem<TConfig = MapLayerOptions> extends Registr
|
||||
/**
|
||||
* Show custom elements in the panel edit UI
|
||||
*/
|
||||
registerOptionsUI?: (builder: PanelOptionsEditorBuilder<TConfig>) => void;
|
||||
registerOptionsUI?: (builder: PanelOptionsEditorBuilder<MapLayerOptions<TConfig>>) => void;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
// https://github.com/mui-org/material-ui/blob/1b096070faf102281f8e3c4f9b2bf50acf91f412/packages/material-ui/src/styles/colorManipulator.js#L97
|
||||
// MIT License Copyright (c) 2014 Call-Em-All
|
||||
|
||||
import tinycolor from 'tinycolor2';
|
||||
|
||||
/**
|
||||
* Returns a number whose value is limited to the given range.
|
||||
* @param value The value to be clamped
|
||||
@@ -66,6 +68,19 @@ export function rgbToHex(color: string) {
|
||||
return `#${values.map((n: number) => intToHex(n)).join('')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a color to hex6 format if there is no alpha, hex8 if there is.
|
||||
* @param color - Hex, RGB, HSL color
|
||||
* @returns A hex color string, i.e. #ff0000 or #ff0000ff
|
||||
*/
|
||||
export function asHexString(color: string): string {
|
||||
if (color[0] === '#') {
|
||||
return color;
|
||||
}
|
||||
const tColor = tinycolor(color);
|
||||
return tColor.getAlpha() === 1 ? tColor.toHexString() : tColor.toHex8String();
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a color from hsl format to rgb format.
|
||||
* @param color - HSL color values
|
||||
|
||||
@@ -128,7 +128,6 @@ export interface QueryResultBase {
|
||||
export interface Labels {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
export interface Column {
|
||||
text: string; // For a Column, the 'text' is the field name
|
||||
filterable?: boolean;
|
||||
|
||||
@@ -31,7 +31,7 @@ describe('mapInternalLinkToExplore', () => {
|
||||
expect(link).toEqual(
|
||||
expect.objectContaining({
|
||||
title: 'dsName',
|
||||
href: '/explore?left={"datasource":"dsName","queries":[{"query":"12344"}]}',
|
||||
href: `/explore?left=${encodeURIComponent('{"datasource":"dsName","queries":[{"query":"12344"}]}')}`,
|
||||
onClick: undefined,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -67,11 +67,13 @@ export function mapInternalLinkToExplore(options: LinkToExploreOptions): LinkMod
|
||||
*/
|
||||
function generateInternalHref<T extends DataQuery = any>(datasourceName: string, query: T, range: TimeRange): string {
|
||||
return locationUtil.assureBaseUrl(
|
||||
`/explore?left=${serializeStateToUrlParam({
|
||||
range: range.raw,
|
||||
datasource: datasourceName,
|
||||
queries: [query],
|
||||
})}`
|
||||
`/explore?left=${encodeURIComponent(
|
||||
serializeStateToUrlParam({
|
||||
range: range.raw,
|
||||
datasource: datasourceName,
|
||||
queries: [query],
|
||||
})
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { vectorToArray } from './vectorToArray';
|
||||
import { Vector } from '../types';
|
||||
|
||||
/** @public */
|
||||
export abstract class FunctionalVector<T = any> implements Vector<T>, Iterable<T> {
|
||||
abstract get length(): number;
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e-selectors",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"description": "Grafana End-to-End Test Selectors Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"description": "Grafana End-to-End Test Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
@@ -45,7 +45,7 @@
|
||||
"types": "src/index.ts",
|
||||
"dependencies": {
|
||||
"@cypress/webpack-preprocessor": "5.9.0",
|
||||
"@grafana/e2e-selectors": "8.1.0-pre",
|
||||
"@grafana/e2e-selectors": "8.1.0-beta.2",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@mochajs/json-file-reporter": "^1.2.0",
|
||||
"blink-diff": "1.0.13",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -22,9 +22,9 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "8.1.0-pre",
|
||||
"@grafana/e2e-selectors": "8.1.0-pre",
|
||||
"@grafana/ui": "8.1.0-pre",
|
||||
"@grafana/data": "8.1.0-beta.2",
|
||||
"@grafana/e2e-selectors": "8.1.0-beta.2",
|
||||
"@grafana/ui": "8.1.0-beta.2",
|
||||
"history": "4.10.1",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "0.1.37"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -28,10 +28,10 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "7.13.14",
|
||||
"@babel/preset-env": "7.13.12",
|
||||
"@grafana/data": "8.1.0-pre",
|
||||
"@grafana/data": "8.1.0-beta.2",
|
||||
"@grafana/eslint-config": "2.5.0",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@grafana/ui": "8.1.0-pre",
|
||||
"@grafana/ui": "8.1.0-beta.2",
|
||||
"@types/command-exists": "^1.2.0",
|
||||
"@types/expect-puppeteer": "3.3.1",
|
||||
"@types/fs-extra": "^8.1.0",
|
||||
|
||||
@@ -18,6 +18,7 @@ module.exports = {
|
||||
backgrounds: false,
|
||||
},
|
||||
},
|
||||
'@storybook/addon-a11y',
|
||||
'@storybook/addon-knobs',
|
||||
'@storybook/addon-storysource',
|
||||
'storybook-dark-mode',
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -29,8 +29,8 @@
|
||||
"@emotion/css": "11.1.3",
|
||||
"@emotion/react": "11.1.5",
|
||||
"@grafana/aws-sdk": "0.0.3",
|
||||
"@grafana/data": "8.1.0-pre",
|
||||
"@grafana/e2e-selectors": "8.1.0-pre",
|
||||
"@grafana/data": "8.1.0-beta.2",
|
||||
"@grafana/e2e-selectors": "8.1.0-beta.2",
|
||||
"@grafana/slate-react": "0.22.10-grafana",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@monaco-editor/react": "4.1.1",
|
||||
@@ -62,6 +62,7 @@
|
||||
"react-colorful": "5.1.2",
|
||||
"react-custom-scrollbars": "4.2.1",
|
||||
"react-dom": "17.0.1",
|
||||
"react-dropzone": "11.3.4",
|
||||
"react-highlight-words": "0.16.0",
|
||||
"react-hook-form": "7.5.3",
|
||||
"react-inlinesvg": "2.3.0",
|
||||
@@ -79,11 +80,12 @@
|
||||
"@rollup/plugin-commonjs": "16.0.0",
|
||||
"@rollup/plugin-image": "2.0.5",
|
||||
"@rollup/plugin-node-resolve": "10.0.0",
|
||||
"@storybook/addon-essentials": "6.3.0",
|
||||
"@storybook/addon-a11y": "6.3.5",
|
||||
"@storybook/addon-essentials": "6.3.5",
|
||||
"@storybook/addon-knobs": "6.3.0",
|
||||
"@storybook/addon-storysource": "6.3.0",
|
||||
"@storybook/react": "6.3.0",
|
||||
"@storybook/theming": "6.3.0",
|
||||
"@storybook/addon-storysource": "6.3.5",
|
||||
"@storybook/react": "6.3.5",
|
||||
"@storybook/theming": "6.3.5",
|
||||
"@testing-library/jest-dom": "5.11.9",
|
||||
"@types/classnames": "2.2.7",
|
||||
"@types/common-tags": "^1.8.0",
|
||||
|
||||
@@ -2,7 +2,7 @@ import React from 'react';
|
||||
import { mount, ReactWrapper } from 'enzyme';
|
||||
import { ColorPickerPopover } from './ColorPickerPopover';
|
||||
import { ColorSwatch } from './ColorSwatch';
|
||||
import { createTheme, getColorForTheme } from '@grafana/data';
|
||||
import { createTheme } from '@grafana/data';
|
||||
|
||||
describe('ColorPickerPopover', () => {
|
||||
const theme = createTheme();
|
||||
@@ -35,7 +35,7 @@ describe('ColorPickerPopover', () => {
|
||||
basicBlueSwatch.simulate('click');
|
||||
|
||||
expect(onChangeSpy).toBeCalledTimes(1);
|
||||
expect(onChangeSpy).toBeCalledWith(getColorForTheme('green', theme.v1));
|
||||
expect(onChangeSpy).toBeCalledWith(theme.visualization.getColorByName('green'));
|
||||
});
|
||||
|
||||
it('should pass color name to onChange prop when named colors enabled', () => {
|
||||
|
||||
@@ -5,7 +5,7 @@ import SpectrumPalette from './SpectrumPalette';
|
||||
import { Themeable2 } from '../../types/theme';
|
||||
import { warnAboutColorPickerPropsDeprecation } from './warnAboutColorPickerPropsDeprecation';
|
||||
import { css } from '@emotion/css';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { GrafanaTheme2, colorManipulator } from '@grafana/data';
|
||||
import { stylesFactory, withTheme2 } from '../../themes';
|
||||
|
||||
export type ColorPickerChangeHandler = (color: string) => void;
|
||||
@@ -55,11 +55,10 @@ class UnThemedColorPickerPopover<T extends CustomPickersDescriptor> extends Reac
|
||||
handleChange = (color: any) => {
|
||||
const { onColorChange, onChange, enableNamedColors, theme } = this.props;
|
||||
const changeHandler = onColorChange || onChange;
|
||||
|
||||
if (enableNamedColors) {
|
||||
return changeHandler(color);
|
||||
}
|
||||
changeHandler(theme.visualization.getColorByName(color));
|
||||
changeHandler(colorManipulator.asHexString(theme.visualization.getColorByName(color)));
|
||||
};
|
||||
|
||||
onTabChange = (tab: PickerType | keyof T) => {
|
||||
|
||||
@@ -3,9 +3,9 @@ import React, { useMemo, useState } from 'react';
|
||||
import { RgbaStringColorPicker } from 'react-colorful';
|
||||
import tinycolor from 'tinycolor2';
|
||||
import ColorInput from './ColorInput';
|
||||
import { GrafanaTheme, getColorForTheme } from '@grafana/data';
|
||||
import { GrafanaTheme2, colorManipulator } from '@grafana/data';
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { useStyles, useTheme2 } from '../../themes';
|
||||
import { useStyles2, useTheme2 } from '../../themes';
|
||||
import { useThrottleFn } from 'react-use';
|
||||
|
||||
export interface SpectrumPaletteProps {
|
||||
@@ -15,26 +15,33 @@ export interface SpectrumPaletteProps {
|
||||
|
||||
const SpectrumPalette: React.FunctionComponent<SpectrumPaletteProps> = ({ color, onChange }) => {
|
||||
const [currentColor, setColor] = useState(color);
|
||||
useThrottleFn(onChange, 500, [currentColor]);
|
||||
|
||||
useThrottleFn(
|
||||
(c) => {
|
||||
onChange(colorManipulator.asHexString(theme.visualization.getColorByName(c)));
|
||||
},
|
||||
500,
|
||||
[currentColor]
|
||||
);
|
||||
|
||||
const theme = useTheme2();
|
||||
const styles = useStyles(getStyles);
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
const rgbaString = useMemo(() => {
|
||||
return currentColor.startsWith('rgba')
|
||||
? currentColor
|
||||
: tinycolor(getColorForTheme(currentColor, theme.v1)).toRgbString();
|
||||
}, [currentColor, theme]);
|
||||
: tinycolor(theme.visualization.getColorByName(color)).toRgbString();
|
||||
}, [currentColor, theme, color]);
|
||||
|
||||
return (
|
||||
<div className={styles.wrapper}>
|
||||
<RgbaStringColorPicker className={cx(styles.root)} color={rgbaString} onChange={setColor} />
|
||||
<ColorInput theme={theme} color={currentColor} onChange={setColor} className={styles.colorInput} />
|
||||
<ColorInput theme={theme} color={rgbaString} onChange={setColor} className={styles.colorInput} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const getStyles = (theme: GrafanaTheme) => ({
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
wrapper: css`
|
||||
flex-grow: 1;
|
||||
`,
|
||||
@@ -45,24 +52,24 @@ const getStyles = (theme: GrafanaTheme) => ({
|
||||
|
||||
.react-colorful {
|
||||
&__saturation {
|
||||
border-radius: ${theme.border.radius.sm} ${theme.border.radius.sm} 0 0;
|
||||
border-radius: ${theme.v1.border.radius.sm} ${theme.v1.border.radius.sm} 0 0;
|
||||
}
|
||||
&__alpha {
|
||||
border-radius: 0 0 ${theme.border.radius.sm} ${theme.border.radius.sm};
|
||||
border-radius: 0 0 ${theme.v1.border.radius.sm} ${theme.v1.border.radius.sm};
|
||||
}
|
||||
&__alpha,
|
||||
&__hue {
|
||||
height: ${theme.spacing.md};
|
||||
height: ${theme.spacing(2)};
|
||||
position: relative;
|
||||
}
|
||||
&__pointer {
|
||||
height: ${theme.spacing.md};
|
||||
width: ${theme.spacing.md};
|
||||
height: ${theme.spacing(2)};
|
||||
width: ${theme.spacing(2)};
|
||||
}
|
||||
}
|
||||
`,
|
||||
colorInput: css`
|
||||
margin-top: ${theme.spacing.md};
|
||||
margin-top: ${theme.spacing(2)};
|
||||
`,
|
||||
});
|
||||
|
||||
|
||||
@@ -56,7 +56,14 @@ const HttpAccessHelp = () => (
|
||||
);
|
||||
|
||||
export const DataSourceHttpSettings: React.FC<HttpSettingsProps> = (props) => {
|
||||
const { defaultUrl, dataSourceConfig, onChange, showAccessOptions, sigV4AuthToggleEnabled } = props;
|
||||
const {
|
||||
defaultUrl,
|
||||
dataSourceConfig,
|
||||
onChange,
|
||||
showAccessOptions,
|
||||
sigV4AuthToggleEnabled,
|
||||
azureAuthSettings,
|
||||
} = props;
|
||||
let urlTooltip;
|
||||
const [isAccessHelpVisible, setIsAccessHelpVisible] = useState(false);
|
||||
const theme = useTheme();
|
||||
@@ -207,6 +214,22 @@ export const DataSourceHttpSettings: React.FC<HttpSettingsProps> = (props) => {
|
||||
/>
|
||||
</div>
|
||||
|
||||
{azureAuthSettings?.azureAuthEnabled && (
|
||||
<div className="gf-form-inline">
|
||||
<Switch
|
||||
label="Azure Authentication"
|
||||
labelClass="width-13"
|
||||
checked={dataSourceConfig.jsonData.azureAuth || false}
|
||||
onChange={(event) => {
|
||||
onSettingsChange({
|
||||
jsonData: { ...dataSourceConfig.jsonData, azureAuth: event!.currentTarget.checked },
|
||||
});
|
||||
}}
|
||||
tooltip="Use Azure authentication for Azure endpoint."
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{sigV4AuthToggleEnabled && (
|
||||
<div className="gf-form-inline">
|
||||
<Switch
|
||||
@@ -238,6 +261,12 @@ export const DataSourceHttpSettings: React.FC<HttpSettingsProps> = (props) => {
|
||||
</>
|
||||
)}
|
||||
|
||||
{azureAuthSettings?.azureAuthEnabled &&
|
||||
azureAuthSettings?.azureSettingsUI &&
|
||||
dataSourceConfig.jsonData.azureAuth && (
|
||||
<azureAuthSettings.azureSettingsUI dataSourceConfig={dataSourceConfig} onChange={onChange} />
|
||||
)}
|
||||
|
||||
{dataSourceConfig.jsonData.sigV4Auth && <SigV4AuthSettings {...props} />}
|
||||
|
||||
{(dataSourceConfig.jsonData.tlsAuth || dataSourceConfig.jsonData.tlsAuthWithCACert) && (
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import React from 'react';
|
||||
import { DataSourceSettings } from '@grafana/data';
|
||||
|
||||
export interface AzureAuthSettings {
|
||||
azureAuthEnabled: boolean;
|
||||
azureSettingsUI?: React.ComponentType<HttpSettingsBaseProps>;
|
||||
}
|
||||
|
||||
export interface HttpSettingsBaseProps {
|
||||
/** The configuration object of the data source */
|
||||
dataSourceConfig: DataSourceSettings<any, any>;
|
||||
@@ -14,4 +20,6 @@ export interface HttpSettingsProps extends HttpSettingsBaseProps {
|
||||
showAccessOptions?: boolean;
|
||||
/** Show the SigV4 auth toggle option */
|
||||
sigV4AuthToggleEnabled?: boolean;
|
||||
/** Azure authentication settings **/
|
||||
azureAuthSettings?: AzureAuthSettings;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { ChangeEvent } from 'react';
|
||||
import { css } from '@emotion/css';
|
||||
import { dateTimeFormat } from '@grafana/data';
|
||||
import { dateTime, dateTimeFormat } from '@grafana/data';
|
||||
import { DatePicker } from '../DatePicker/DatePicker';
|
||||
import { Props as InputProps, Input } from '../../Input/Input';
|
||||
import { useStyles } from '../../../themes';
|
||||
@@ -46,7 +46,7 @@ export const DatePickerWithInput = ({
|
||||
/>
|
||||
<DatePicker
|
||||
isOpen={open}
|
||||
value={value && typeof value !== 'string' ? value : new Date()}
|
||||
value={value && typeof value !== 'string' ? value : dateTime().toDate()}
|
||||
onChange={(ev) => {
|
||||
onChange(ev);
|
||||
if (closeOnSelect) {
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
import { Story, Preview, Props } from '@storybook/addon-docs/blocks';
|
||||
import { FileDropzone } from './FileDropzone';
|
||||
|
||||
# FileDropzone
|
||||
|
||||
A dropzone component to use for file uploads.
|
||||
|
||||
### Usage
|
||||
|
||||
```jsx
|
||||
import { FileDropzone } from '@grafana/ui';
|
||||
|
||||
<FileDropzone onLoad={(result) => console.log(result)} />;
|
||||
```
|
||||
|
||||
### Props
|
||||
|
||||
<Props of={FileDropzone} />
|
||||
@@ -0,0 +1,23 @@
|
||||
import { FileDropzone, FileDropzoneProps } from '@grafana/ui';
|
||||
import { Meta, Story } from '@storybook/react';
|
||||
import React from 'react';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import mdx from './FileDropzone.mdx';
|
||||
|
||||
export default {
|
||||
title: 'Forms/FileDropzone',
|
||||
component: FileDropzone,
|
||||
decorators: [withCenteredStory],
|
||||
parameters: {
|
||||
docs: {
|
||||
page: mdx,
|
||||
},
|
||||
},
|
||||
argTypes: {
|
||||
onLoad: { action: 'onLoad' },
|
||||
},
|
||||
} as Meta;
|
||||
|
||||
export const Basic: Story<FileDropzoneProps> = (args) => {
|
||||
return <FileDropzone {...args} />;
|
||||
};
|
||||
@@ -0,0 +1,130 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import { FileDropzone } from './FileDropzone';
|
||||
import { REMOVE_FILE } from './FileListItem';
|
||||
|
||||
const file = ({
|
||||
fileBits = JSON.stringify({ ping: true }),
|
||||
fileName = 'ping.json',
|
||||
options = { type: 'application/json' },
|
||||
}) => new File([fileBits], fileName, options);
|
||||
|
||||
const files = [
|
||||
file({}),
|
||||
file({ fileName: 'pong.json' }),
|
||||
file({ fileBits: 'something', fileName: 'something.jpg', options: { type: 'image/jpeg' } }),
|
||||
];
|
||||
|
||||
describe('The FileDropzone component', () => {
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should show the default text of the dropzone component when no props passed', () => {
|
||||
render(<FileDropzone />);
|
||||
|
||||
expect(screen.getByText('Upload file')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show accepted file type when passed in the options as a string', () => {
|
||||
render(<FileDropzone options={{ accept: '.json' }} />);
|
||||
|
||||
expect(screen.getByText('Accepted file type: .json')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show accepted file types when passed in the options as a string array', () => {
|
||||
render(<FileDropzone options={{ accept: ['.json', '.txt'] }} />);
|
||||
|
||||
expect(screen.getByText('Accepted file types: .json, .txt')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should handle file removal from the list', async () => {
|
||||
render(<FileDropzone />);
|
||||
|
||||
dispatchEvt(screen.getByTestId('dropzone'), 'drop', mockData(files));
|
||||
|
||||
expect(await screen.findAllByLabelText(REMOVE_FILE)).toHaveLength(3);
|
||||
|
||||
fireEvent.click(screen.getAllByLabelText(REMOVE_FILE)[0]);
|
||||
|
||||
expect(await screen.findAllByLabelText(REMOVE_FILE)).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should overwrite selected file when multiple false', async () => {
|
||||
render(<FileDropzone options={{ multiple: false }} />);
|
||||
|
||||
dispatchEvt(screen.getByTestId('dropzone'), 'drop', mockData([file({})]));
|
||||
|
||||
expect(await screen.findAllByLabelText(REMOVE_FILE)).toHaveLength(1);
|
||||
expect(screen.getByText('ping.json')).toBeInTheDocument();
|
||||
|
||||
dispatchEvt(screen.getByTestId('dropzone'), 'drop', mockData([file({ fileName: 'newFile.jpg' })]));
|
||||
|
||||
expect(await screen.findByText('newFile.jpg')).toBeInTheDocument();
|
||||
expect(screen.getAllByLabelText(REMOVE_FILE)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should use the passed readAs prop with the FileReader API', async () => {
|
||||
render(<FileDropzone readAs="readAsDataURL" />);
|
||||
const fileReaderSpy = jest.spyOn(FileReader.prototype, 'readAsDataURL');
|
||||
|
||||
dispatchEvt(screen.getByTestId('dropzone'), 'drop', mockData([file({})]));
|
||||
|
||||
expect(await screen.findByText('ping.json')).toBeInTheDocument();
|
||||
expect(fileReaderSpy).toBeCalled();
|
||||
});
|
||||
|
||||
it('should use the readAsText FileReader API if no readAs prop passed', async () => {
|
||||
render(<FileDropzone />);
|
||||
const fileReaderSpy = jest.spyOn(FileReader.prototype, 'readAsText');
|
||||
|
||||
dispatchEvt(screen.getByTestId('dropzone'), 'drop', mockData([file({})]));
|
||||
|
||||
expect(await screen.findByText('ping.json')).toBeInTheDocument();
|
||||
expect(fileReaderSpy).toBeCalled();
|
||||
});
|
||||
|
||||
it('should use the onDrop that is passed', async () => {
|
||||
const onDrop = jest.fn();
|
||||
const fileToUpload = file({});
|
||||
render(<FileDropzone options={{ onDrop }} />);
|
||||
const fileReaderSpy = jest.spyOn(FileReader.prototype, 'readAsText');
|
||||
|
||||
dispatchEvt(screen.getByTestId('dropzone'), 'drop', mockData([fileToUpload]));
|
||||
|
||||
expect(await screen.findByText('ping.json')).toBeInTheDocument();
|
||||
expect(fileReaderSpy).not.toBeCalled();
|
||||
expect(onDrop).toBeCalledWith([fileToUpload], [], expect.anything());
|
||||
});
|
||||
|
||||
it('should show children inside the dropzone', () => {
|
||||
const component = (
|
||||
<FileDropzone>
|
||||
<p>Custom dropzone text</p>
|
||||
</FileDropzone>
|
||||
);
|
||||
render(component);
|
||||
|
||||
screen.getByText('Custom dropzone text');
|
||||
});
|
||||
});
|
||||
|
||||
function dispatchEvt(node: HTMLElement, type: string, data: any) {
|
||||
const event = new Event(type, { bubbles: true });
|
||||
Object.assign(event, data);
|
||||
fireEvent(node, event);
|
||||
}
|
||||
|
||||
function mockData(files: File[]) {
|
||||
return {
|
||||
dataTransfer: {
|
||||
files,
|
||||
items: files.map((file) => ({
|
||||
kind: 'file',
|
||||
type: file.type,
|
||||
getAsFile: () => file,
|
||||
})),
|
||||
types: ['Files'],
|
||||
},
|
||||
};
|
||||
}
|
||||
220
packages/grafana-ui/src/components/FileDropzone/FileDropzone.tsx
Normal file
220
packages/grafana-ui/src/components/FileDropzone/FileDropzone.tsx
Normal file
@@ -0,0 +1,220 @@
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { uniqueId } from 'lodash';
|
||||
import React, { ReactNode, useCallback, useState } from 'react';
|
||||
import { DropEvent, DropzoneOptions, FileRejection, useDropzone } from 'react-dropzone';
|
||||
import { useTheme2 } from '../../themes';
|
||||
import { Icon } from '../Icon/Icon';
|
||||
import { FileListItem } from './FileListItem';
|
||||
|
||||
export interface FileDropzoneProps {
|
||||
/**
|
||||
* Use the children property to have custom dropzone view.
|
||||
*/
|
||||
children?: ReactNode;
|
||||
/**
|
||||
* Use this property to override the default behaviour for the react-dropzone options.
|
||||
* @default {
|
||||
* maxSize: Infinity,
|
||||
* minSize: 0,
|
||||
* multiple: true,
|
||||
* maxFiles: 0,
|
||||
* }
|
||||
*/
|
||||
options?: DropzoneOptions;
|
||||
/**
|
||||
* Use this to change the FileReader's read.
|
||||
*/
|
||||
readAs?: 'readAsArrayBuffer' | 'readAsText' | 'readAsBinaryString' | 'readAsDataURL';
|
||||
/**
|
||||
* Use the onLoad function to get the result from FileReader.
|
||||
*/
|
||||
onLoad?: (result: string | ArrayBuffer | null) => void;
|
||||
}
|
||||
|
||||
export interface DropzoneFile {
|
||||
file: File;
|
||||
id: string;
|
||||
error: DOMException | null;
|
||||
progress?: number;
|
||||
abortUpload?: () => void;
|
||||
retryUpload?: () => void;
|
||||
}
|
||||
|
||||
export function FileDropzone({ options, children, readAs, onLoad }: FileDropzoneProps) {
|
||||
const [files, setFiles] = useState<DropzoneFile[]>([]);
|
||||
|
||||
const setFileProperty = useCallback(
|
||||
(customFile: DropzoneFile, action: (customFileToModify: DropzoneFile) => void) => {
|
||||
setFiles((oldFiles) => {
|
||||
return oldFiles.map((oldFile) => {
|
||||
if (oldFile.id === customFile.id) {
|
||||
action(oldFile);
|
||||
return oldFile;
|
||||
}
|
||||
return oldFile;
|
||||
});
|
||||
});
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const onDrop = useCallback(
|
||||
(acceptedFiles: File[], rejectedFiles: FileRejection[], event: DropEvent) => {
|
||||
let customFiles = acceptedFiles.map(mapToCustomFile);
|
||||
if (options?.multiple === false) {
|
||||
setFiles(customFiles);
|
||||
} else {
|
||||
setFiles((oldFiles) => [...oldFiles, ...customFiles]);
|
||||
}
|
||||
|
||||
if (options?.onDrop) {
|
||||
options.onDrop(acceptedFiles, rejectedFiles, event);
|
||||
} else {
|
||||
for (const customFile of customFiles) {
|
||||
const reader = new FileReader();
|
||||
|
||||
const read = () => {
|
||||
if (readAs) {
|
||||
reader[readAs](customFile.file);
|
||||
} else {
|
||||
reader.readAsText(customFile.file);
|
||||
}
|
||||
};
|
||||
|
||||
// Set abort FileReader
|
||||
setFileProperty(customFile, (fileToModify) => {
|
||||
fileToModify.abortUpload = () => {
|
||||
reader.abort();
|
||||
};
|
||||
fileToModify.retryUpload = () => {
|
||||
setFileProperty(customFile, (fileToModify) => {
|
||||
fileToModify.error = null;
|
||||
fileToModify.progress = undefined;
|
||||
});
|
||||
read();
|
||||
};
|
||||
});
|
||||
|
||||
reader.onabort = () => {
|
||||
setFileProperty(customFile, (fileToModify) => {
|
||||
fileToModify.error = new DOMException('Aborted');
|
||||
});
|
||||
};
|
||||
|
||||
reader.onprogress = (event) => {
|
||||
setFileProperty(customFile, (fileToModify) => {
|
||||
fileToModify.progress = event.loaded;
|
||||
});
|
||||
};
|
||||
|
||||
reader.onload = () => {
|
||||
onLoad?.(reader.result);
|
||||
};
|
||||
|
||||
reader.onerror = () => {
|
||||
setFileProperty(customFile, (fileToModify) => {
|
||||
fileToModify.error = reader.error;
|
||||
});
|
||||
};
|
||||
|
||||
read();
|
||||
}
|
||||
}
|
||||
},
|
||||
[onLoad, options, readAs, setFileProperty]
|
||||
);
|
||||
|
||||
const removeFile = (file: DropzoneFile) => {
|
||||
const newFiles = files.filter((f) => file.id !== f.id);
|
||||
setFiles(newFiles);
|
||||
};
|
||||
|
||||
const { getRootProps, getInputProps, isDragActive } = useDropzone({ ...options, onDrop });
|
||||
const theme = useTheme2();
|
||||
const styles = getStyles(theme, isDragActive);
|
||||
const fileList = files.map((file) => <FileListItem key={file.id} file={file} removeFile={removeFile} />);
|
||||
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<div data-testid="dropzone" {...getRootProps({ className: styles.dropzone })}>
|
||||
<input {...getInputProps()} />
|
||||
{children ?? <FileDropzoneDefaultChildren primaryText={getPrimaryText(files, options)} />}
|
||||
</div>
|
||||
{options?.accept && (
|
||||
<small className={cx(styles.small, styles.acceptMargin)}>{getAcceptedFileTypeText(options)}</small>
|
||||
)}
|
||||
{fileList}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function FileDropzoneDefaultChildren({
|
||||
primaryText = 'Upload file',
|
||||
secondaryText = 'Drag and drop here or browse',
|
||||
}) {
|
||||
const theme = useTheme2();
|
||||
const styles = getStyles(theme);
|
||||
|
||||
return (
|
||||
<div className={styles.iconWrapper}>
|
||||
<Icon name="upload" size="xxl" />
|
||||
<h3>{primaryText}</h3>
|
||||
<small className={styles.small}>{secondaryText}</small>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
function getPrimaryText(files: DropzoneFile[], options?: DropzoneOptions) {
|
||||
if (options?.multiple === undefined || options?.multiple) {
|
||||
return 'Upload file';
|
||||
}
|
||||
return files.length ? 'Replace file' : 'Upload file';
|
||||
}
|
||||
|
||||
function getAcceptedFileTypeText(options: DropzoneOptions) {
|
||||
if (Array.isArray(options.accept)) {
|
||||
return `Accepted file types: ${options.accept.join(', ')}`;
|
||||
}
|
||||
|
||||
return `Accepted file type: ${options.accept}`;
|
||||
}
|
||||
|
||||
function mapToCustomFile(file: File): DropzoneFile {
|
||||
return {
|
||||
id: uniqueId('file'),
|
||||
file,
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
function getStyles(theme: GrafanaTheme2, isDragActive?: boolean) {
|
||||
return {
|
||||
container: css`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 100%;
|
||||
`,
|
||||
dropzone: css`
|
||||
display: flex;
|
||||
flex: 1;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
padding: ${theme.spacing(6)};
|
||||
border-radius: 2px;
|
||||
border: 2px dashed ${theme.colors.border.medium};
|
||||
background-color: ${isDragActive ? theme.colors.background.secondary : theme.colors.background.primary};
|
||||
cursor: pointer;
|
||||
`,
|
||||
iconWrapper: css`
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
`,
|
||||
acceptMargin: css`
|
||||
margin: ${theme.spacing(2, 0, 1)};
|
||||
`,
|
||||
small: css`
|
||||
color: ${theme.colors.text.secondary};
|
||||
`,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
import { Story, Preview, Props } from '@storybook/addon-docs/blocks';
|
||||
import { FileListItem } from './FileListItem';
|
||||
|
||||
# FileListItem
|
||||
|
||||
A FileListItem component used for the FileDropzone component to show uploaded files.
|
||||
|
||||
### Usage
|
||||
|
||||
```jsx
|
||||
import { FileListItem } from '@grafana/ui';
|
||||
|
||||
<FileListItem file={{ file: { name: 'someFile.jpg', size: 12345 } }} />;
|
||||
```
|
||||
|
||||
### Props
|
||||
|
||||
<Props of={FileListItem} />
|
||||
@@ -0,0 +1,28 @@
|
||||
import { Meta, Story } from '@storybook/react';
|
||||
import React from 'react';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import { FileListItem as FileListItemComponent, FileListItemProps } from './FileListItem';
|
||||
import mdx from './FileListItem.mdx';
|
||||
|
||||
export default {
|
||||
title: 'Forms/FileListItem',
|
||||
component: FileListItemComponent,
|
||||
decorators: [withCenteredStory],
|
||||
parameters: {
|
||||
docs: {
|
||||
page: mdx,
|
||||
},
|
||||
},
|
||||
argTypes: {
|
||||
abortUpload: { action: 'abortUpload' },
|
||||
retryUpload: { action: 'retryUpload' },
|
||||
removeFile: { action: 'removeFile' },
|
||||
},
|
||||
args: {
|
||||
file: { file: { name: 'some-file.jpg', size: 123456 } as any, id: '1', error: new DOMException('error') },
|
||||
},
|
||||
} as Meta;
|
||||
|
||||
export const FileListItem: Story<FileListItemProps> = (args) => {
|
||||
return <FileListItemComponent {...args} />;
|
||||
};
|
||||
@@ -0,0 +1,62 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import { FileListItem, REMOVE_FILE } from './FileListItem';
|
||||
|
||||
const file = ({
|
||||
fileBits = 'prettyPicture',
|
||||
fileName = 'someFile.jpg',
|
||||
options = { lastModified: 1604849095696, type: 'image/jpeg' },
|
||||
}) => new File([fileBits], fileName, options);
|
||||
|
||||
describe('The FileListItem component', () => {
|
||||
it('should show an error message when error prop is not null', () => {
|
||||
render(<FileListItem file={{ file: file({}), id: '1', error: new DOMException('error') }} />);
|
||||
|
||||
expect(screen.getByText('error')).toBeInTheDocument();
|
||||
expect(screen.queryByLabelText('Retry')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show a retry icon when error is not null and retryUpload prop is passed', () => {
|
||||
const retryUpload = jest.fn();
|
||||
render(<FileListItem file={{ file: file({}), id: '1', error: new DOMException('error'), retryUpload }} />);
|
||||
|
||||
fireEvent.click(screen.getByLabelText('Retry'));
|
||||
|
||||
expect(screen.getByText('error')).toBeInTheDocument();
|
||||
expect(screen.getByLabelText('Retry'));
|
||||
expect(retryUpload).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should show a progressbar when the progress prop has a value', () => {
|
||||
render(<FileListItem file={{ file: file({}), id: '1', error: null, progress: 6 }} />);
|
||||
|
||||
expect(screen.queryByText('Cancel')).not.toBeInTheDocument();
|
||||
expect(screen.getByText('46%')).toBeInTheDocument();
|
||||
expect(screen.getByRole('progressbar')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not show a progressbar when progress is equal to the size', () => {
|
||||
render(<FileListItem file={{ file: file({}), id: '1', error: null, progress: 13 }} />);
|
||||
|
||||
expect(screen.queryByRole('progressbar')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show a Cancel button when abortUpload prop is passed', () => {
|
||||
const abortUpload = jest.fn();
|
||||
render(<FileListItem file={{ file: file({}), id: '1', error: null, progress: 6, abortUpload }} />);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Cancel' }));
|
||||
|
||||
expect(abortUpload).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should show a Remove icon when removeFile prop is passed', () => {
|
||||
const removeFile = jest.fn();
|
||||
const customFile = { file: file({}), id: '1', error: null };
|
||||
render(<FileListItem file={customFile} removeFile={removeFile} />);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: REMOVE_FILE }));
|
||||
|
||||
expect(removeFile).toBeCalledWith(customFile);
|
||||
});
|
||||
});
|
||||
127
packages/grafana-ui/src/components/FileDropzone/FileListItem.tsx
Normal file
127
packages/grafana-ui/src/components/FileDropzone/FileListItem.tsx
Normal file
@@ -0,0 +1,127 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { formattedValueToString, getValueFormat, GrafanaTheme2 } from '@grafana/data';
|
||||
import React from 'react';
|
||||
import { useStyles2 } from '../../themes';
|
||||
import { trimFileName } from '../../utils/file';
|
||||
import { Button } from '../Button';
|
||||
import { Icon } from '../Icon/Icon';
|
||||
import { IconButton } from '../IconButton/IconButton';
|
||||
import { DropzoneFile } from './FileDropzone';
|
||||
|
||||
export const REMOVE_FILE = 'Remove file';
|
||||
export interface FileListItemProps {
|
||||
file: DropzoneFile;
|
||||
removeFile?: (file: DropzoneFile) => void;
|
||||
}
|
||||
|
||||
export function FileListItem({ file: customFile, removeFile }: FileListItemProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
const { file, progress, error, abortUpload, retryUpload } = customFile;
|
||||
|
||||
const renderRightSide = () => {
|
||||
if (error) {
|
||||
return (
|
||||
<>
|
||||
<span className={styles.error}>{error.message}</span>
|
||||
{retryUpload && (
|
||||
<IconButton aria-label="Retry" name="sync" tooltip="Retry" tooltipPlacement="top" onClick={retryUpload} />
|
||||
)}
|
||||
{removeFile && (
|
||||
<IconButton
|
||||
className={retryUpload ? styles.marginLeft : ''}
|
||||
name="trash-alt"
|
||||
onClick={() => removeFile(customFile)}
|
||||
tooltip={REMOVE_FILE}
|
||||
aria-label={REMOVE_FILE}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
if (progress && file.size > progress) {
|
||||
return (
|
||||
<>
|
||||
<progress className={styles.progressBar} max={file.size} value={progress} />
|
||||
<span className={styles.paddingLeft}>{Math.round((progress / file.size) * 100)}%</span>
|
||||
{abortUpload && (
|
||||
<Button variant="secondary" type="button" fill="text" onClick={abortUpload}>
|
||||
Cancel
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
return (
|
||||
removeFile && (
|
||||
<IconButton
|
||||
name="trash-alt"
|
||||
onClick={() => removeFile(customFile)}
|
||||
tooltip={REMOVE_FILE}
|
||||
aria-label={REMOVE_FILE}
|
||||
tooltipPlacement="top"
|
||||
/>
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const valueFormat = getValueFormat('decbytes')(file.size);
|
||||
|
||||
return (
|
||||
<div className={styles.fileListContainer}>
|
||||
<span className={styles.fileNameWrapper}>
|
||||
<Icon name="file-blank" size="lg" />
|
||||
<span className={styles.padding}>{trimFileName(file.name)}</span>
|
||||
<span>{formattedValueToString(valueFormat)}</span>
|
||||
</span>
|
||||
|
||||
<div className={styles.fileNameWrapper}>{renderRightSide()}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function getStyles(theme: GrafanaTheme2) {
|
||||
return {
|
||||
fileListContainer: css`
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: ${theme.spacing(2)};
|
||||
border: 1px dashed ${theme.colors.border.medium};
|
||||
background-color: ${theme.colors.background.secondary};
|
||||
margin-top: ${theme.spacing(1)};
|
||||
`,
|
||||
fileNameWrapper: css`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
`,
|
||||
padding: css`
|
||||
padding: ${theme.spacing(0, 1)};
|
||||
`,
|
||||
paddingLeft: css`
|
||||
padding-left: ${theme.spacing(2)};
|
||||
`,
|
||||
marginLeft: css`
|
||||
margin-left: ${theme.spacing(1)};
|
||||
`,
|
||||
error: css`
|
||||
padding-right: ${theme.spacing(2)};
|
||||
color: ${theme.colors.error.text};
|
||||
`,
|
||||
progressBar: css`
|
||||
border-radius: ${theme.spacing(1)};
|
||||
height: 4px;
|
||||
::-webkit-progress-bar {
|
||||
background-color: ${theme.colors.border.weak};
|
||||
border-radius: ${theme.spacing(1)};
|
||||
}
|
||||
::-webkit-progress-value {
|
||||
background-color: ${theme.colors.primary.main};
|
||||
border-radius: ${theme.spacing(1)};
|
||||
}
|
||||
`,
|
||||
};
|
||||
}
|
||||
4
packages/grafana-ui/src/components/FileDropzone/index.ts
Normal file
4
packages/grafana-ui/src/components/FileDropzone/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import { FileDropzone, DropzoneFile, FileDropzoneProps, FileDropzoneDefaultChildren } from './FileDropzone';
|
||||
import { FileListItem, FileListItemProps } from './FileListItem';
|
||||
|
||||
export { FileDropzone, FileDropzoneProps, DropzoneFile, FileListItem, FileListItemProps, FileDropzoneDefaultChildren };
|
||||
@@ -5,6 +5,7 @@ import { Icon } from '../index';
|
||||
import { stylesFactory, useTheme2 } from '../../themes';
|
||||
import { ComponentSize } from '../../types/size';
|
||||
import { getButtonStyles } from '../Button';
|
||||
import { trimFileName } from '../../utils/file';
|
||||
|
||||
export interface Props {
|
||||
/** Callback function to handle uploaded file */
|
||||
@@ -17,19 +18,6 @@ export interface Props {
|
||||
size?: ComponentSize;
|
||||
}
|
||||
|
||||
function trimFileName(fileName: string) {
|
||||
const nameLength = 16;
|
||||
const delimiter = fileName.lastIndexOf('.');
|
||||
const extension = fileName.substring(delimiter);
|
||||
const file = fileName.substring(0, delimiter);
|
||||
|
||||
if (file.length < nameLength) {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
return `${file.substring(0, nameLength)}...${extension}`;
|
||||
}
|
||||
|
||||
export const FileUpload: FC<Props> = ({
|
||||
onFileUpload,
|
||||
className,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import { AlignedData } from 'uplot';
|
||||
import { Themeable2 } from '../../types';
|
||||
import { findMidPointYPosition, pluginLog, preparePlotData } from '../uPlot/utils';
|
||||
import { findMidPointYPosition, pluginLog } from '../uPlot/utils';
|
||||
import {
|
||||
DataFrame,
|
||||
FieldMatcherID,
|
||||
@@ -98,16 +98,20 @@ export class GraphNG extends React.Component<GraphNGProps, GraphNGState> {
|
||||
pluginLog('GraphNG', false, 'data aligned', alignedFrame);
|
||||
|
||||
if (alignedFrame) {
|
||||
state = {
|
||||
alignedFrame,
|
||||
alignedData: preparePlotData(alignedFrame),
|
||||
};
|
||||
pluginLog('GraphNG', false, 'data prepared', state.alignedData);
|
||||
let config = this.state?.config;
|
||||
|
||||
if (withConfig) {
|
||||
state.config = props.prepConfig(alignedFrame, this.props.frames, this.getTimeRange);
|
||||
pluginLog('GraphNG', false, 'config prepared', state.config);
|
||||
config = props.prepConfig(alignedFrame, this.props.frames, this.getTimeRange);
|
||||
pluginLog('GraphNG', false, 'config prepared', config);
|
||||
}
|
||||
|
||||
state = {
|
||||
alignedFrame,
|
||||
alignedData: config!.prepData!(alignedFrame),
|
||||
config,
|
||||
};
|
||||
|
||||
pluginLog('GraphNG', false, 'data prepared', state.alignedData);
|
||||
}
|
||||
|
||||
return state;
|
||||
@@ -123,7 +127,7 @@ export class GraphNG extends React.Component<GraphNGProps, GraphNGState> {
|
||||
.pipe(throttleTime(50))
|
||||
.subscribe({
|
||||
next: (evt) => {
|
||||
const u = this.plotInstance?.current;
|
||||
const u = this.plotInstance.current;
|
||||
if (u) {
|
||||
// Try finding left position on time axis
|
||||
const left = u.valToPos(evt.payload.point.time, 'time');
|
||||
@@ -183,6 +187,7 @@ export class GraphNG extends React.Component<GraphNGProps, GraphNGState> {
|
||||
|
||||
if (shouldReconfig) {
|
||||
newState.config = this.props.prepConfig(newState.alignedFrame, this.props.frames, this.getTimeRange);
|
||||
newState.alignedData = newState.config.prepData!(newState.alignedFrame);
|
||||
pluginLog('GraphNG', false, 'config recreated', newState.config);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,6 +69,7 @@ Object {
|
||||
},
|
||||
],
|
||||
"cursor": Object {
|
||||
"dataIdx": [Function],
|
||||
"drag": Object {
|
||||
"setScale": false,
|
||||
},
|
||||
|
||||
@@ -18,7 +18,9 @@ function applySpanNullsThresholds(frame: DataFrame) {
|
||||
let spanNulls = field.config.custom?.spanNulls;
|
||||
|
||||
if (typeof spanNulls === 'number') {
|
||||
field.values = new ArrayVector(nullToUndefThreshold(refValues, field.values.toArray(), spanNulls));
|
||||
if (spanNulls !== -1) {
|
||||
field.values = new ArrayVector(nullToUndefThreshold(refValues, field.values.toArray(), spanNulls));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { useTheme } from '../../themes';
|
||||
import mdx from './Icon.mdx';
|
||||
|
||||
export default {
|
||||
title: 'General/Icon',
|
||||
title: 'Docs overview/Icon',
|
||||
component: Icon,
|
||||
decorators: [withCenteredStory],
|
||||
parameters: {
|
||||
|
||||
@@ -26,17 +26,32 @@ export interface Props extends React.ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
tooltipPlacement?: TooltipPlacement;
|
||||
/** Variant to change the color of the Icon */
|
||||
variant?: IconButtonVariant;
|
||||
/** Text avilable ony for screenscreen readers. Will use tooltip text as fallback. */
|
||||
ariaLabel?: string;
|
||||
}
|
||||
|
||||
type SurfaceType = 'dashboard' | 'panel' | 'header';
|
||||
|
||||
export const IconButton = React.forwardRef<HTMLButtonElement, Props>(
|
||||
({ name, size = 'md', iconType, tooltip, tooltipPlacement, className, variant = 'secondary', ...restProps }, ref) => {
|
||||
(
|
||||
{
|
||||
name,
|
||||
size = 'md',
|
||||
iconType,
|
||||
tooltip,
|
||||
tooltipPlacement,
|
||||
ariaLabel,
|
||||
className,
|
||||
variant = 'secondary',
|
||||
...restProps
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const theme = useTheme2();
|
||||
const styles = getStyles(theme, size, variant);
|
||||
|
||||
const button = (
|
||||
<button ref={ref} {...restProps} className={cx(styles.button, className)}>
|
||||
<button ref={ref} aria-label={ariaLabel || tooltip || ''} {...restProps} className={cx(styles.button, className)}>
|
||||
<Icon name={name} size={size} className={styles.icon} type={iconType} />
|
||||
</button>
|
||||
);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import { getColorForTheme, GrafanaTheme } from '@grafana/data';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { ColorPicker } from '../ColorPicker/ColorPicker';
|
||||
import { stylesFactory, useTheme } from '../../themes';
|
||||
import { useTheme2, useStyles2 } from '../../themes';
|
||||
import { css } from '@emotion/css';
|
||||
import { ColorSwatch } from '../ColorPicker/ColorSwatch';
|
||||
|
||||
@@ -17,8 +17,8 @@ export interface ColorValueEditorProps {
|
||||
* @alpha
|
||||
* */
|
||||
export const ColorValueEditor: React.FC<ColorValueEditorProps> = ({ value, onChange }) => {
|
||||
const theme = useTheme();
|
||||
const styles = getStyles(theme);
|
||||
const theme = useTheme2();
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
return (
|
||||
<ColorPicker color={value ?? ''} onChange={onChange} enableNamedColors={true}>
|
||||
@@ -30,7 +30,7 @@ export const ColorValueEditor: React.FC<ColorValueEditorProps> = ({ value, onCha
|
||||
ref={ref}
|
||||
onClick={showColorPicker}
|
||||
onMouseLeave={hideColorPicker}
|
||||
color={value ? getColorForTheme(value, theme) : theme.colors.formInputBorder}
|
||||
color={value ? theme.visualization.getColorByName(value) : theme.components.input.borderColor}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -40,23 +40,23 @@ export const ColorValueEditor: React.FC<ColorValueEditorProps> = ({ value, onCha
|
||||
);
|
||||
};
|
||||
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
const getStyles = (theme: GrafanaTheme2) => {
|
||||
return {
|
||||
spot: css`
|
||||
color: ${theme.colors.text};
|
||||
background: ${theme.colors.formInputBg};
|
||||
background: ${theme.components.input.background};
|
||||
padding: 3px;
|
||||
height: ${theme.spacing.formInputHeight}px;
|
||||
border: 1px solid ${theme.colors.formInputBorder};
|
||||
height: ${theme.v1.spacing.formInputHeight}px;
|
||||
border: 1px solid ${theme.components.input.borderColor};
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
&:hover {
|
||||
border: 1px solid ${theme.colors.formInputBorderHover};
|
||||
border: 1px solid ${theme.components.input.borderHover};
|
||||
}
|
||||
`,
|
||||
colorPicker: css`
|
||||
padding: 0 ${theme.spacing.sm};
|
||||
padding: 0 ${theme.spacing(1)};
|
||||
`,
|
||||
colorText: css`
|
||||
cursor: pointer;
|
||||
@@ -64,10 +64,10 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
`,
|
||||
trashIcon: css`
|
||||
cursor: pointer;
|
||||
color: ${theme.colors.textWeak};
|
||||
color: ${theme.colors.text.secondary};
|
||||
&:hover {
|
||||
color: ${theme.colors.text};
|
||||
}
|
||||
`,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
@@ -11,6 +11,7 @@ export interface SegmentSyncProps<T> extends SegmentProps<T>, Omit<HTMLProps<HTM
|
||||
value?: T | SelectableValue<T>;
|
||||
onChange: (item: SelectableValue<T>) => void;
|
||||
options: Array<SelectableValue<T>>;
|
||||
inputMinWidth?: number;
|
||||
}
|
||||
|
||||
export function Segment<T>({
|
||||
@@ -20,12 +21,16 @@ export function Segment<T>({
|
||||
Component,
|
||||
className,
|
||||
allowCustomValue,
|
||||
allowEmptyValue,
|
||||
placeholder,
|
||||
disabled,
|
||||
inputMinWidth,
|
||||
inputPlaceholder,
|
||||
onExpandedChange,
|
||||
autofocus = false,
|
||||
...rest
|
||||
}: React.PropsWithChildren<SegmentSyncProps<T>>) {
|
||||
const [Label, labelWidth, expanded, setExpanded] = useExpandableLabel(false);
|
||||
const [Label, labelWidth, expanded, setExpanded] = useExpandableLabel(autofocus, onExpandedChange);
|
||||
const width = inputMinWidth ? Math.max(inputMinWidth, labelWidth) : labelWidth;
|
||||
const styles = useStyles(getSegmentStyles);
|
||||
|
||||
@@ -59,10 +64,12 @@ export function Segment<T>({
|
||||
<SegmentSelect
|
||||
{...rest}
|
||||
value={value && !isObject(value) ? { value } : value}
|
||||
placeholder={inputPlaceholder}
|
||||
options={options}
|
||||
width={width}
|
||||
onClickOutside={() => setExpanded(false)}
|
||||
allowCustomValue={allowCustomValue}
|
||||
allowEmptyValue={allowEmptyValue}
|
||||
onChange={(item) => {
|
||||
setExpanded(false);
|
||||
onChange(item);
|
||||
|
||||
@@ -15,6 +15,7 @@ export interface SegmentAsyncProps<T> extends SegmentProps<T>, Omit<HTMLProps<HT
|
||||
loadOptions: (query?: string) => Promise<Array<SelectableValue<T>>>;
|
||||
onChange: (item: SelectableValue<T>) => void;
|
||||
noOptionMessageHandler?: (state: AsyncState<Array<SelectableValue<T>>>) => string;
|
||||
inputMinWidth?: number;
|
||||
}
|
||||
|
||||
export function SegmentAsync<T>({
|
||||
@@ -24,14 +25,18 @@ export function SegmentAsync<T>({
|
||||
Component,
|
||||
className,
|
||||
allowCustomValue,
|
||||
allowEmptyValue,
|
||||
disabled,
|
||||
placeholder,
|
||||
inputMinWidth,
|
||||
inputPlaceholder,
|
||||
autofocus = false,
|
||||
onExpandedChange,
|
||||
noOptionMessageHandler = mapStateToNoOptionsMessage,
|
||||
...rest
|
||||
}: React.PropsWithChildren<SegmentAsyncProps<T>>) {
|
||||
const [state, fetchOptions] = useAsyncFn(loadOptions, [loadOptions]);
|
||||
const [Label, labelWidth, expanded, setExpanded] = useExpandableLabel(false);
|
||||
const [Label, labelWidth, expanded, setExpanded] = useExpandableLabel(autofocus, onExpandedChange);
|
||||
const width = inputMinWidth ? Math.max(inputMinWidth, labelWidth) : labelWidth;
|
||||
const styles = useStyles(getSegmentStyles);
|
||||
|
||||
@@ -66,10 +71,12 @@ export function SegmentAsync<T>({
|
||||
<SegmentSelect
|
||||
{...rest}
|
||||
value={value && !isObject(value) ? { value } : value}
|
||||
placeholder={inputPlaceholder}
|
||||
options={state.value ?? []}
|
||||
width={width}
|
||||
noOptionsMessage={noOptionMessageHandler(state)}
|
||||
allowCustomValue={allowCustomValue}
|
||||
allowEmptyValue={allowEmptyValue}
|
||||
onClickOutside={() => {
|
||||
setExpanded(false);
|
||||
}}
|
||||
|
||||
@@ -10,7 +10,6 @@ import { useStyles } from '../../themes';
|
||||
export interface SegmentInputProps<T> extends SegmentProps<T>, Omit<HTMLProps<HTMLInputElement>, 'value' | 'onChange'> {
|
||||
value: string | number;
|
||||
onChange: (text: string | number) => void;
|
||||
autofocus?: boolean;
|
||||
}
|
||||
|
||||
const FONT_SIZE = 14;
|
||||
@@ -21,14 +20,16 @@ export function SegmentInput<T>({
|
||||
Component,
|
||||
className,
|
||||
placeholder,
|
||||
inputPlaceholder,
|
||||
disabled,
|
||||
autofocus = false,
|
||||
onExpandedChange,
|
||||
...rest
|
||||
}: React.PropsWithChildren<SegmentInputProps<T>>) {
|
||||
const ref = useRef<HTMLInputElement>(null);
|
||||
const [value, setValue] = useState<number | string>(initialValue);
|
||||
const [inputWidth, setInputWidth] = useState<number>(measureText((initialValue || '').toString(), FONT_SIZE).width);
|
||||
const [Label, , expanded, setExpanded] = useExpandableLabel(autofocus);
|
||||
const [Label, , expanded, setExpanded] = useExpandableLabel(autofocus, onExpandedChange);
|
||||
const styles = useStyles(getSegmentStyles);
|
||||
|
||||
useClickAway(ref, () => {
|
||||
@@ -71,6 +72,7 @@ export function SegmentInput<T>({
|
||||
autoFocus
|
||||
className={cx(`gf-form gf-form-input`, inputWidthStyle)}
|
||||
value={value}
|
||||
placeholder={inputPlaceholder}
|
||||
onChange={(item) => {
|
||||
const { width } = measureText(item.target.value, FONT_SIZE);
|
||||
setInputWidth(width);
|
||||
|
||||
@@ -15,17 +15,25 @@ export interface Props<T> extends Omit<HTMLProps<HTMLDivElement>, 'value' | 'onC
|
||||
width: number;
|
||||
noOptionsMessage?: string;
|
||||
allowCustomValue?: boolean;
|
||||
/**
|
||||
* If true, empty value will be passed to onChange callback otherwise using empty value
|
||||
* will work as canceling and using the previous value
|
||||
*/
|
||||
allowEmptyValue?: boolean;
|
||||
placeholder?: string;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export function SegmentSelect<T>({
|
||||
value,
|
||||
placeholder = '',
|
||||
options = [],
|
||||
onChange,
|
||||
onClickOutside,
|
||||
width: widthPixels,
|
||||
noOptionsMessage = '',
|
||||
allowCustomValue = false,
|
||||
allowEmptyValue = false,
|
||||
...rest
|
||||
}: React.PropsWithChildren<Props<T>>) {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
@@ -38,7 +46,7 @@ export function SegmentSelect<T>({
|
||||
<Select
|
||||
width={width}
|
||||
noOptionsMessage={noOptionsMessage}
|
||||
placeholder=""
|
||||
placeholder={placeholder}
|
||||
autoFocus={true}
|
||||
isOpen={true}
|
||||
onChange={onChange}
|
||||
@@ -53,7 +61,7 @@ export function SegmentSelect<T>({
|
||||
// https://github.com/JedWatson/react-select/issues/188#issuecomment-279240292
|
||||
// Unfortunately there's no other way of retrieving the value (not yet) created new option
|
||||
const input = ref.current.querySelector('input[id^="react-select-"]') as HTMLInputElement;
|
||||
if (input && input.value) {
|
||||
if (input && (input.value || allowEmptyValue)) {
|
||||
onChange({ value: input.value as any, label: input.value });
|
||||
} else {
|
||||
onClickOutside();
|
||||
|
||||
@@ -6,5 +6,8 @@ export interface SegmentProps<T> {
|
||||
allowCustomValue?: boolean;
|
||||
placeholder?: string;
|
||||
disabled?: boolean;
|
||||
inputMinWidth?: number;
|
||||
onExpandedChange?: (expanded: boolean) => void;
|
||||
autofocus?: boolean;
|
||||
allowEmptyValue?: boolean;
|
||||
inputPlaceholder?: string;
|
||||
}
|
||||
|
||||
@@ -7,12 +7,20 @@ interface LabelProps {
|
||||
}
|
||||
|
||||
export const useExpandableLabel = (
|
||||
initialExpanded: boolean
|
||||
initialExpanded: boolean,
|
||||
onExpandedChange?: (expanded: boolean) => void
|
||||
): [React.ComponentType<LabelProps>, number, boolean, (expanded: boolean) => void] => {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
const [expanded, setExpanded] = useState<boolean>(initialExpanded);
|
||||
const [width, setWidth] = useState(0);
|
||||
|
||||
const setExpandedWrapper = (expanded: boolean) => {
|
||||
setExpanded(expanded);
|
||||
if (onExpandedChange) {
|
||||
onExpandedChange(expanded);
|
||||
}
|
||||
};
|
||||
|
||||
const Label: React.FC<LabelProps> = ({ Component, onClick, disabled }) => (
|
||||
<div
|
||||
ref={ref}
|
||||
@@ -20,7 +28,7 @@ export const useExpandableLabel = (
|
||||
disabled
|
||||
? undefined
|
||||
: () => {
|
||||
setExpanded(true);
|
||||
setExpandedWrapper(true);
|
||||
if (ref && ref.current) {
|
||||
setWidth(ref.current.clientWidth * 1.25);
|
||||
}
|
||||
@@ -34,5 +42,5 @@ export const useExpandableLabel = (
|
||||
</div>
|
||||
);
|
||||
|
||||
return [Label, width, expanded, setExpanded];
|
||||
return [Label, width, expanded, setExpandedWrapper];
|
||||
};
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import { AlignedData } from 'uplot';
|
||||
import { AlignedData, Range } from 'uplot';
|
||||
import {
|
||||
compareDataFrameStructures,
|
||||
DataFrame,
|
||||
Field,
|
||||
FieldConfig,
|
||||
FieldSparkline,
|
||||
FieldType,
|
||||
@@ -21,6 +22,7 @@ import { UPlotChart } from '../uPlot/Plot';
|
||||
import { Themeable2 } from '../../types';
|
||||
import { preparePlotData } from '../uPlot/utils';
|
||||
import { preparePlotFrame } from './utils';
|
||||
import { isEqual } from 'lodash';
|
||||
|
||||
export interface SparklineProps extends Themeable2 {
|
||||
width: number;
|
||||
@@ -46,10 +48,9 @@ export class Sparkline extends PureComponent<SparklineProps, State> {
|
||||
super(props);
|
||||
|
||||
const alignedDataFrame = preparePlotFrame(props.sparkline, props.config);
|
||||
const data = preparePlotData(alignedDataFrame);
|
||||
|
||||
this.state = {
|
||||
data,
|
||||
data: preparePlotData(alignedDataFrame),
|
||||
alignedDataFrame,
|
||||
configBuilder: this.prepareConfig(alignedDataFrame),
|
||||
};
|
||||
@@ -79,8 +80,8 @@ export class Sparkline extends PureComponent<SparklineProps, State> {
|
||||
|
||||
if (prevProps.sparkline !== this.props.sparkline) {
|
||||
rebuildConfig = !compareDataFrameStructures(this.state.alignedDataFrame, prevState.alignedDataFrame);
|
||||
} else if (prevProps.config !== this.props.config) {
|
||||
rebuildConfig = true;
|
||||
} else {
|
||||
rebuildConfig = !isEqual(prevProps.config, this.props.config);
|
||||
}
|
||||
|
||||
if (rebuildConfig) {
|
||||
@@ -88,12 +89,21 @@ export class Sparkline extends PureComponent<SparklineProps, State> {
|
||||
}
|
||||
}
|
||||
|
||||
getYRange(field: Field) {
|
||||
let { min, max } = this.state.alignedDataFrame.fields[1].state?.range!;
|
||||
|
||||
return [
|
||||
Math.max(min!, field.config.min ?? -Infinity),
|
||||
Math.min(max!, field.config.max ?? Infinity),
|
||||
] as Range.MinMax;
|
||||
}
|
||||
|
||||
prepareConfig(data: DataFrame) {
|
||||
const { theme } = this.props;
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
builder.setCursor({
|
||||
show: true,
|
||||
show: false,
|
||||
x: false, // no crosshairs
|
||||
y: false,
|
||||
});
|
||||
@@ -141,9 +151,7 @@ export class Sparkline extends PureComponent<SparklineProps, State> {
|
||||
scaleKey,
|
||||
orientation: ScaleOrientation.Vertical,
|
||||
direction: ScaleDirection.Up,
|
||||
min: field.config.min,
|
||||
max: field.config.max,
|
||||
getDataMinMax: () => field.state?.range,
|
||||
range: () => this.getYRange(field),
|
||||
});
|
||||
|
||||
builder.addAxis({
|
||||
@@ -157,6 +165,7 @@ export class Sparkline extends PureComponent<SparklineProps, State> {
|
||||
const pointsMode = customConfig.drawStyle === DrawStyle.Points ? PointVisibility.Always : customConfig.showPoints;
|
||||
|
||||
builder.addSeries({
|
||||
pxAlign: false,
|
||||
scaleKey,
|
||||
theme,
|
||||
drawStyle: customConfig.drawStyle!,
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
ScaleDirection,
|
||||
ScaleOrientation,
|
||||
} from '../uPlot/config';
|
||||
import { collectStackingGroups } from '../uPlot/utils';
|
||||
import { collectStackingGroups, preparePlotData } from '../uPlot/utils';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
const defaultFormatter = (v: any) => (v == null ? '-' : v.toFixed(1));
|
||||
@@ -46,6 +46,8 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<{ sync: DashboardCursor
|
||||
}) => {
|
||||
const builder = new UPlotConfigBuilder(timeZone);
|
||||
|
||||
builder.setPrepData(preparePlotData);
|
||||
|
||||
// X is the first field in the aligned frame
|
||||
const xField = frame.fields[0];
|
||||
if (!xField) {
|
||||
@@ -262,6 +264,58 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<{ sync: DashboardCursor
|
||||
|
||||
builder.scaleKeys = [xScaleKey, yScaleKey];
|
||||
|
||||
// if hovered value is null, how far we may scan left/right to hover nearest non-null
|
||||
const hoverProximityPx = 15;
|
||||
|
||||
let cursor: Partial<uPlot.Cursor> = {
|
||||
// this scans left and right from cursor position to find nearest data index with value != null
|
||||
// TODO: do we want to only scan past undefined values, but halt at explicit null values?
|
||||
dataIdx: (self, seriesIdx, hoveredIdx, cursorXVal) => {
|
||||
let seriesData = self.data[seriesIdx];
|
||||
|
||||
if (seriesData[hoveredIdx] == null) {
|
||||
let nonNullLft = hoveredIdx,
|
||||
nonNullRgt = hoveredIdx,
|
||||
i;
|
||||
|
||||
i = hoveredIdx;
|
||||
while (nonNullLft === hoveredIdx && i-- > 0) {
|
||||
if (seriesData[i] != null) {
|
||||
nonNullLft = i;
|
||||
}
|
||||
}
|
||||
|
||||
i = hoveredIdx;
|
||||
while (nonNullRgt === hoveredIdx && i++ < seriesData.length) {
|
||||
if (seriesData[i] != null) {
|
||||
nonNullRgt = i;
|
||||
}
|
||||
}
|
||||
|
||||
let xVals = self.data[0];
|
||||
|
||||
let curPos = self.valToPos(cursorXVal, 'x');
|
||||
let rgtPos = self.valToPos(xVals[nonNullRgt], 'x');
|
||||
let lftPos = self.valToPos(xVals[nonNullLft], 'x');
|
||||
|
||||
let lftDelta = curPos - lftPos;
|
||||
let rgtDelta = rgtPos - curPos;
|
||||
|
||||
if (lftDelta <= rgtDelta) {
|
||||
if (lftDelta <= hoverProximityPx) {
|
||||
hoveredIdx = nonNullLft;
|
||||
}
|
||||
} else {
|
||||
if (rgtDelta <= hoverProximityPx) {
|
||||
hoveredIdx = nonNullRgt;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return hoveredIdx;
|
||||
},
|
||||
};
|
||||
|
||||
if (sync !== DashboardCursorSync.Off) {
|
||||
const payload: DataHoverPayload = {
|
||||
point: {
|
||||
@@ -271,34 +325,34 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<{ sync: DashboardCursor
|
||||
data: frame,
|
||||
};
|
||||
const hoverEvent = new DataHoverEvent(payload);
|
||||
builder.setSync();
|
||||
builder.setCursor({
|
||||
sync: {
|
||||
key: '__global_',
|
||||
filters: {
|
||||
pub: (type: string, src: uPlot, x: number, y: number, w: number, h: number, dataIdx: number) => {
|
||||
payload.columnIndex = dataIdx;
|
||||
if (x < 0 && y < 0) {
|
||||
payload.point[xScaleUnit] = null;
|
||||
payload.point[yScaleKey] = null;
|
||||
eventBus.publish(new DataHoverClearEvent(payload));
|
||||
} else {
|
||||
// convert the points
|
||||
payload.point[xScaleUnit] = src.posToVal(x, xScaleKey);
|
||||
payload.point[yScaleKey] = src.posToVal(y, yScaleKey);
|
||||
eventBus.publish(hoverEvent);
|
||||
hoverEvent.payload.down = undefined;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
cursor.sync = {
|
||||
key: '__global_',
|
||||
filters: {
|
||||
pub: (type: string, src: uPlot, x: number, y: number, w: number, h: number, dataIdx: number) => {
|
||||
payload.rowIndex = dataIdx;
|
||||
if (x < 0 && y < 0) {
|
||||
payload.point[xScaleUnit] = null;
|
||||
payload.point[yScaleKey] = null;
|
||||
eventBus.publish(new DataHoverClearEvent(payload));
|
||||
} else {
|
||||
// convert the points
|
||||
payload.point[xScaleUnit] = src.posToVal(x, xScaleKey);
|
||||
payload.point[yScaleKey] = src.posToVal(y, yScaleKey);
|
||||
eventBus.publish(hoverEvent);
|
||||
hoverEvent.payload.down = undefined;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
// ??? setSeries: syncMode === DashboardCursorSync.Tooltip,
|
||||
scales: builder.scaleKeys,
|
||||
match: [() => true, () => true],
|
||||
},
|
||||
});
|
||||
// ??? setSeries: syncMode === DashboardCursorSync.Tooltip,
|
||||
scales: builder.scaleKeys,
|
||||
match: [() => true, () => true],
|
||||
};
|
||||
}
|
||||
|
||||
builder.setSync();
|
||||
builder.setCursor(cursor);
|
||||
|
||||
return builder;
|
||||
};
|
||||
|
||||
|
||||
@@ -201,6 +201,7 @@ export { Checkbox } from './Forms/Checkbox';
|
||||
|
||||
export { TextArea } from './TextArea/TextArea';
|
||||
export { FileUpload } from './FileUpload/FileUpload';
|
||||
export * from './FileDropzone';
|
||||
export { TimeRangeInput } from './DateTimePickers/TimeRangeInput';
|
||||
export { RelativeTimeRangePicker } from './DateTimePickers/RelativeTimeRangePicker/RelativeTimeRangePicker';
|
||||
export { Card, Props as CardProps, getCardStyles } from './Card/Card';
|
||||
|
||||
@@ -285,6 +285,7 @@ export const graphFieldOptions = {
|
||||
stacking: [
|
||||
{ label: 'Off', value: StackingMode.None },
|
||||
{ label: 'Normal', value: StackingMode.Normal },
|
||||
{ label: '100%', value: StackingMode.Percent },
|
||||
] as Array<SelectableValue<StackingMode>>,
|
||||
|
||||
thresholdsDisplayModes: [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import uPlot, { Cursor, Band, Hooks, Select } from 'uplot';
|
||||
import uPlot, { Cursor, Band, Hooks, Select, AlignedData } from 'uplot';
|
||||
import { merge } from 'lodash';
|
||||
import {
|
||||
DataFrame,
|
||||
@@ -35,6 +35,8 @@ const cursorDefaults: Cursor = {
|
||||
},
|
||||
};
|
||||
|
||||
type PrepData = (frame: DataFrame) => AlignedData;
|
||||
|
||||
export class UPlotConfigBuilder {
|
||||
private series: UPlotSeriesBuilder[] = [];
|
||||
private axes: Record<string, UPlotAxisBuilder> = {};
|
||||
@@ -56,6 +58,8 @@ export class UPlotConfigBuilder {
|
||||
*/
|
||||
tooltipInterpolator: PlotTooltipInterpolator | undefined = undefined;
|
||||
|
||||
prepData: PrepData | undefined = undefined;
|
||||
|
||||
constructor(timeZone: TimeZone = DefaultTimeZone) {
|
||||
this.tz = getTimeZoneInfo(timeZone, Date.now())?.ianaName;
|
||||
}
|
||||
@@ -153,6 +157,10 @@ export class UPlotConfigBuilder {
|
||||
this.tooltipInterpolator = interpolator;
|
||||
}
|
||||
|
||||
setPrepData(prepData: PrepData) {
|
||||
this.prepData = prepData;
|
||||
}
|
||||
|
||||
setSync() {
|
||||
this.sync = true;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import uPlot, { Scale, Range } from 'uplot';
|
||||
import { PlotConfigBuilder } from '../types';
|
||||
import { ScaleOrientation, ScaleDirection } from '../config';
|
||||
import { ScaleDistribution } from '../models.gen';
|
||||
import { isBooleanUnit, NumericRange } from '@grafana/data';
|
||||
import { isBooleanUnit } from '@grafana/data';
|
||||
|
||||
export interface ScaleProps {
|
||||
scaleKey: string;
|
||||
@@ -16,7 +16,6 @@ export interface ScaleProps {
|
||||
orientation: ScaleOrientation;
|
||||
direction: ScaleDirection;
|
||||
log?: number;
|
||||
getDataMinMax?: () => NumericRange | undefined;
|
||||
}
|
||||
|
||||
export class UPlotScaleBuilder extends PlotConfigBuilder<ScaleProps, Scale> {
|
||||
@@ -63,15 +62,6 @@ export class UPlotScaleBuilder extends PlotConfigBuilder<ScaleProps, Scale> {
|
||||
|
||||
// uPlot range function
|
||||
const rangeFn = (u: uPlot, dataMin: number, dataMax: number, scaleKey: string) => {
|
||||
let { getDataMinMax } = this.props;
|
||||
|
||||
// cumulative data min/max across multiple charts, usually via VizRepeater
|
||||
if (getDataMinMax) {
|
||||
let dataRange = getDataMinMax()!;
|
||||
dataMin = dataRange.min!;
|
||||
dataMax = dataRange.max!;
|
||||
}
|
||||
|
||||
const scale = u.scales[scaleKey];
|
||||
|
||||
let minMax: uPlot.Range.MinMax = [dataMin, dataMax];
|
||||
|
||||
@@ -43,6 +43,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({
|
||||
const plotCtx = usePlotContext();
|
||||
const [focusedSeriesIdx, setFocusedSeriesIdx] = useState<number | null>(null);
|
||||
const [focusedPointIdx, setFocusedPointIdx] = useState<number | null>(null);
|
||||
const [focusedPointIdxs, setFocusedPointIdxs] = useState<Array<number | null>>([]);
|
||||
const [coords, setCoords] = useState<CartesianCoords2D | null>(null);
|
||||
const plotInstance = plotCtx.plot;
|
||||
|
||||
@@ -93,10 +94,13 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({
|
||||
})(u);
|
||||
});
|
||||
} else {
|
||||
config.addHook('setLegend', (u) => {
|
||||
setFocusedPointIdx(u.cursor.idx!);
|
||||
setFocusedPointIdxs(u.cursor.idxs!.slice());
|
||||
});
|
||||
|
||||
// default series/datapoint idx retireval
|
||||
config.addHook('setCursor', (u) => {
|
||||
setFocusedPointIdx(u.cursor.idx === undefined ? u.posToIdx(u.cursor.left || 0) : u.cursor.idx);
|
||||
|
||||
const bbox = plotCtx.getCanvasBoundingBox();
|
||||
if (!bbox) {
|
||||
return;
|
||||
@@ -174,7 +178,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({
|
||||
continue;
|
||||
}
|
||||
|
||||
const display = field.display!(otherProps.data.fields[i].values.get(focusedPointIdx));
|
||||
const display = field.display!(otherProps.data.fields[i].values.get(focusedPointIdxs[i]!));
|
||||
|
||||
series.push({
|
||||
color: display.color || FALLBACK_COLOR,
|
||||
|
||||
@@ -34,7 +34,12 @@ export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
|
||||
};
|
||||
|
||||
/** @internal */
|
||||
export function preparePlotData(frame: DataFrame): AlignedData {
|
||||
interface StackMeta {
|
||||
totals: AlignedData;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export function preparePlotData(frame: DataFrame, onStackMeta?: (meta: StackMeta) => void): AlignedData {
|
||||
const result: any[] = [];
|
||||
const stackingGroups: Map<string, number[]> = new Map();
|
||||
let seriesIndex = 0;
|
||||
@@ -64,21 +69,48 @@ export function preparePlotData(frame: DataFrame): AlignedData {
|
||||
|
||||
// Stacking
|
||||
if (stackingGroups.size !== 0) {
|
||||
const byPct = frame.fields[1].config.custom?.stacking?.mode === StackingMode.Percent;
|
||||
const dataLength = result[0].length;
|
||||
const alignedTotals = Array(stackingGroups.size);
|
||||
alignedTotals[0] = null;
|
||||
|
||||
// array or stacking groups
|
||||
for (const [_, seriesIdxs] of stackingGroups.entries()) {
|
||||
const acc = Array(result[0].length).fill(0);
|
||||
const groupTotals = byPct ? Array(dataLength).fill(0) : null;
|
||||
|
||||
if (byPct) {
|
||||
for (let j = 0; j < seriesIdxs.length; j++) {
|
||||
const currentlyStacking = result[seriesIdxs[j]];
|
||||
|
||||
for (let k = 0; k < dataLength; k++) {
|
||||
const v = currentlyStacking[k];
|
||||
groupTotals![k] += v == null ? 0 : +v;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const acc = Array(dataLength).fill(0);
|
||||
|
||||
for (let j = 0; j < seriesIdxs.length; j++) {
|
||||
const currentlyStacking = result[seriesIdxs[j]];
|
||||
let seriesIdx = seriesIdxs[j];
|
||||
|
||||
for (let k = 0; k < result[0].length; k++) {
|
||||
alignedTotals[seriesIdx] = groupTotals;
|
||||
|
||||
const currentlyStacking = result[seriesIdx];
|
||||
|
||||
for (let k = 0; k < dataLength; k++) {
|
||||
const v = currentlyStacking[k];
|
||||
acc[k] += v == null ? 0 : +v;
|
||||
acc[k] += v == null ? 0 : v / (byPct ? groupTotals![k] : 1);
|
||||
}
|
||||
|
||||
result[seriesIdxs[j]] = acc.slice();
|
||||
result[seriesIdx] = acc.slice();
|
||||
}
|
||||
}
|
||||
|
||||
onStackMeta &&
|
||||
onStackMeta({
|
||||
totals: alignedTotals as AlignedData,
|
||||
});
|
||||
}
|
||||
|
||||
return result as AlignedData;
|
||||
|
||||
16
packages/grafana-ui/src/utils/file.ts
Normal file
16
packages/grafana-ui/src/utils/file.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Shortens the filename to 16 length
|
||||
* @param fileName
|
||||
*/
|
||||
export function trimFileName(fileName: string): string {
|
||||
const nameLength = 16;
|
||||
const delimiter = fileName.lastIndexOf('.');
|
||||
const extension = fileName.substring(delimiter);
|
||||
const file = fileName.substring(0, delimiter);
|
||||
|
||||
if (file.length < nameLength) {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
return `${file.substring(0, nameLength)}...${extension}`;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@jaegertracing/jaeger-ui-components",
|
||||
"version": "8.1.0-pre",
|
||||
"version": "8.1.0-beta.2",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"license": "Apache-2.0",
|
||||
@@ -16,8 +16,8 @@
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.1.3",
|
||||
"@emotion/react": "11.1.5",
|
||||
"@grafana/data": "8.1.0-pre",
|
||||
"@grafana/ui": "8.1.0-pre",
|
||||
"@grafana/data": "8.1.0-beta.2",
|
||||
"@grafana/ui": "8.1.0-beta.2",
|
||||
"@types/classnames": "^2.2.7",
|
||||
"@types/deep-freeze": "^0.1.1",
|
||||
"@types/hoist-non-react-statics": "^3.3.1",
|
||||
|
||||
@@ -96,7 +96,7 @@ func (hs *HTTPServer) DeleteFolder(c *models.ReqContext) response.Response { //
|
||||
return ToFolderErrorResponse(err)
|
||||
}
|
||||
|
||||
f, err := s.DeleteFolder(c.Params(":uid"))
|
||||
f, err := s.DeleteFolder(c.Params(":uid"), c.QueryBool("forceDeleteRules"))
|
||||
if err != nil {
|
||||
return ToFolderErrorResponse(err)
|
||||
}
|
||||
@@ -149,7 +149,8 @@ func ToFolderErrorResponse(err error) response.Response {
|
||||
if errors.Is(err, models.ErrFolderTitleEmpty) ||
|
||||
errors.Is(err, models.ErrDashboardTypeMismatch) ||
|
||||
errors.Is(err, models.ErrDashboardInvalidUid) ||
|
||||
errors.Is(err, models.ErrDashboardUidTooLong) {
|
||||
errors.Is(err, models.ErrDashboardUidTooLong) ||
|
||||
errors.Is(err, models.ErrFolderContainsAlertRules) {
|
||||
return response.Error(400, err.Error(), nil)
|
||||
}
|
||||
|
||||
|
||||
@@ -238,7 +238,7 @@ func (s *fakeFolderService) UpdateFolder(existingUID string, cmd *models.UpdateF
|
||||
return s.UpdateFolderError
|
||||
}
|
||||
|
||||
func (s *fakeFolderService) DeleteFolder(uid string) (*models.Folder, error) {
|
||||
func (s *fakeFolderService) DeleteFolder(uid string, forceDeleteRules bool) (*models.Folder, error) {
|
||||
s.DeletedFolderUids = append(s.DeletedFolderUids, uid)
|
||||
return s.DeleteFolderResult, s.DeleteFolderError
|
||||
}
|
||||
|
||||
113
pkg/infra/httpclient/httpclientprovider/azure_middleware.go
Normal file
113
pkg/infra/httpclient/httpclientprovider/azure_middleware.go
Normal file
@@ -0,0 +1,113 @@
|
||||
package httpclientprovider
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/aztokenprovider"
|
||||
)
|
||||
|
||||
const azureMiddlewareName = "AzureAuthentication.Provider"
|
||||
|
||||
func AzureMiddleware(cfg *setting.Cfg) httpclient.Middleware {
|
||||
return httpclient.NamedMiddlewareFunc(azureMiddlewareName, func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
|
||||
if enabled, err := isAzureAuthenticationEnabled(opts.CustomOptions); err != nil {
|
||||
return errorResponse(err)
|
||||
} else if !enabled {
|
||||
return next
|
||||
}
|
||||
|
||||
credentials, err := getAzureCredentials(opts.CustomOptions)
|
||||
if err != nil {
|
||||
return errorResponse(err)
|
||||
} else if credentials == nil {
|
||||
credentials = getDefaultAzureCredentials(cfg)
|
||||
}
|
||||
|
||||
tokenProvider, err := aztokenprovider.NewAzureAccessTokenProvider(cfg, credentials)
|
||||
if err != nil {
|
||||
return errorResponse(err)
|
||||
}
|
||||
|
||||
scopes, err := getAzureEndpointScopes(opts.CustomOptions)
|
||||
if err != nil {
|
||||
return errorResponse(err)
|
||||
}
|
||||
|
||||
return aztokenprovider.ApplyAuth(tokenProvider, scopes, next)
|
||||
})
|
||||
}
|
||||
|
||||
func errorResponse(err error) http.RoundTripper {
|
||||
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
|
||||
return nil, fmt.Errorf("invalid Azure configuration: %s", err)
|
||||
})
|
||||
}
|
||||
|
||||
func isAzureAuthenticationEnabled(customOptions map[string]interface{}) (bool, error) {
|
||||
if untypedValue, ok := customOptions["_azureAuth"]; !ok {
|
||||
return false, nil
|
||||
} else if value, ok := untypedValue.(bool); !ok {
|
||||
err := fmt.Errorf("the field 'azureAuth' should be a bool")
|
||||
return false, err
|
||||
} else {
|
||||
return value, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getAzureCredentials(customOptions map[string]interface{}) (azcredentials.AzureCredentials, error) {
|
||||
if untypedValue, ok := customOptions["_azureCredentials"]; !ok {
|
||||
return nil, nil
|
||||
} else if value, ok := untypedValue.(azcredentials.AzureCredentials); !ok {
|
||||
err := fmt.Errorf("the field 'azureCredentials' should be a valid credentials object")
|
||||
return nil, err
|
||||
} else {
|
||||
return value, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getDefaultAzureCredentials(cfg *setting.Cfg) azcredentials.AzureCredentials {
|
||||
if cfg.Azure.ManagedIdentityEnabled {
|
||||
return &azcredentials.AzureManagedIdentityCredentials{}
|
||||
} else {
|
||||
return &azcredentials.AzureClientSecretCredentials{
|
||||
AzureCloud: cfg.Azure.Cloud,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getAzureEndpointResourceId(customOptions map[string]interface{}) (*url.URL, error) {
|
||||
var value string
|
||||
if untypedValue, ok := customOptions["azureEndpointResourceId"]; !ok {
|
||||
err := fmt.Errorf("the field 'azureEndpointResourceId' should be set")
|
||||
return nil, err
|
||||
} else if value, ok = untypedValue.(string); !ok {
|
||||
err := fmt.Errorf("the field 'azureEndpointResourceId' should be a string")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resourceId, err := url.Parse(value)
|
||||
if err != nil || resourceId.Scheme == "" || resourceId.Host == "" {
|
||||
err := fmt.Errorf("invalid endpoint Resource ID URL '%s'", value)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return resourceId, nil
|
||||
}
|
||||
|
||||
func getAzureEndpointScopes(customOptions map[string]interface{}) ([]string, error) {
|
||||
resourceId, err := getAzureEndpointResourceId(customOptions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resourceId.Path = path.Join(resourceId.Path, ".default")
|
||||
scopes := []string{resourceId.String()}
|
||||
|
||||
return scopes, nil
|
||||
}
|
||||
@@ -34,6 +34,10 @@ func New(cfg *setting.Cfg) httpclient.Provider {
|
||||
|
||||
setDefaultTimeoutOptions(cfg)
|
||||
|
||||
if cfg.FeatureToggles["httpclientprovider_azure_auth"] {
|
||||
middlewares = append(middlewares, AzureMiddleware(cfg))
|
||||
}
|
||||
|
||||
return newProviderFunc(sdkhttpclient.ProviderOptions{
|
||||
Middlewares: middlewares,
|
||||
ConfigureTransport: func(opts sdkhttpclient.Options, transport *http.Transport) {
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"net"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -349,6 +350,8 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
t.Run("auth_proxy", func(t *testing.T) {
|
||||
const userID int64 = 33
|
||||
const orgID int64 = 4
|
||||
const defaultOrgId int64 = 1
|
||||
const orgRole = "Admin"
|
||||
|
||||
configure := func(cfg *setting.Cfg) {
|
||||
cfg.AuthProxyEnabled = true
|
||||
@@ -356,7 +359,7 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
cfg.LDAPEnabled = true
|
||||
cfg.AuthProxyHeaderName = "X-WEBAUTH-USER"
|
||||
cfg.AuthProxyHeaderProperty = "username"
|
||||
cfg.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||
cfg.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS", "Role": "X-WEBAUTH-ROLE"}
|
||||
}
|
||||
|
||||
const hdrName = "markelog"
|
||||
@@ -432,6 +435,71 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
cfg.AuthProxyAutoSignUp = true
|
||||
})
|
||||
|
||||
middlewareScenario(t, "Should assign role from header to default org", func(t *testing.T, sc *scenarioContext) {
|
||||
var storedRoleInfo map[int64]models.RoleType = nil
|
||||
bus.AddHandlerCtx("test", func(ctx context.Context, query *models.GetSignedInUserQuery) error {
|
||||
if query.UserId > 0 {
|
||||
query.Result = &models.SignedInUser{OrgId: defaultOrgId, UserId: userID, OrgRole: storedRoleInfo[defaultOrgId]}
|
||||
return nil
|
||||
}
|
||||
return models.ErrUserNotFound
|
||||
})
|
||||
|
||||
bus.AddHandler("test", func(cmd *models.UpsertUserCommand) error {
|
||||
cmd.Result = &models.User{Id: userID}
|
||||
storedRoleInfo = cmd.ExternalUser.OrgRoles
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.fakeReq("GET", "/")
|
||||
sc.req.Header.Set(sc.cfg.AuthProxyHeaderName, hdrName)
|
||||
sc.req.Header.Set("X-WEBAUTH-ROLE", orgRole)
|
||||
sc.exec()
|
||||
|
||||
assert.True(t, sc.context.IsSignedIn)
|
||||
assert.Equal(t, userID, sc.context.UserId)
|
||||
assert.Equal(t, defaultOrgId, sc.context.OrgId)
|
||||
assert.Equal(t, orgRole, string(sc.context.OrgRole))
|
||||
}, func(cfg *setting.Cfg) {
|
||||
configure(cfg)
|
||||
cfg.LDAPEnabled = false
|
||||
cfg.AuthProxyAutoSignUp = true
|
||||
})
|
||||
|
||||
middlewareScenario(t, "Should NOT assign role from header to non-default org", func(t *testing.T, sc *scenarioContext) {
|
||||
var storedRoleInfo map[int64]models.RoleType = nil
|
||||
bus.AddHandlerCtx("test", func(ctx context.Context, query *models.GetSignedInUserQuery) error {
|
||||
if query.UserId > 0 {
|
||||
query.Result = &models.SignedInUser{OrgId: orgID, UserId: userID, OrgRole: storedRoleInfo[orgID]}
|
||||
return nil
|
||||
}
|
||||
return models.ErrUserNotFound
|
||||
})
|
||||
|
||||
bus.AddHandler("test", func(cmd *models.UpsertUserCommand) error {
|
||||
cmd.Result = &models.User{Id: userID}
|
||||
storedRoleInfo = cmd.ExternalUser.OrgRoles
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.fakeReq("GET", "/")
|
||||
sc.req.Header.Set(sc.cfg.AuthProxyHeaderName, hdrName)
|
||||
sc.req.Header.Set("X-WEBAUTH-ROLE", "Admin")
|
||||
sc.req.Header.Set("X-Grafana-Org-Id", strconv.FormatInt(orgID, 10))
|
||||
sc.exec()
|
||||
|
||||
assert.True(t, sc.context.IsSignedIn)
|
||||
assert.Equal(t, userID, sc.context.UserId)
|
||||
assert.Equal(t, orgID, sc.context.OrgId)
|
||||
|
||||
// For non-default org, the user role should be empty
|
||||
assert.Equal(t, "", string(sc.context.OrgRole))
|
||||
}, func(cfg *setting.Cfg) {
|
||||
configure(cfg)
|
||||
cfg.LDAPEnabled = false
|
||||
cfg.AuthProxyAutoSignUp = true
|
||||
})
|
||||
|
||||
middlewareScenario(t, "Should get an existing user from header", func(t *testing.T, sc *scenarioContext) {
|
||||
const userID int64 = 12
|
||||
const orgID int64 = 2
|
||||
|
||||
@@ -363,8 +363,9 @@ type DashboardProvisioning struct {
|
||||
}
|
||||
|
||||
type DeleteDashboardCommand struct {
|
||||
Id int64
|
||||
OrgId int64
|
||||
Id int64
|
||||
OrgId int64
|
||||
ForceDeleteFolderRules bool
|
||||
}
|
||||
|
||||
type DeleteOrphanedProvisionedDashboardsCommand struct {
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/httpclient"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
|
||||
)
|
||||
|
||||
func (ds *DataSource) getTimeout() time.Duration {
|
||||
@@ -66,10 +67,14 @@ func (ds *DataSource) GetHTTPTransport(provider httpclient.Provider, customMiddl
|
||||
return t.roundTripper, nil
|
||||
}
|
||||
|
||||
opts := ds.HTTPClientOptions()
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts.Middlewares = customMiddlewares
|
||||
|
||||
rt, err := provider.GetTransport(opts)
|
||||
rt, err := provider.GetTransport(*opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -82,7 +87,7 @@ func (ds *DataSource) GetHTTPTransport(provider httpclient.Provider, customMiddl
|
||||
return rt, nil
|
||||
}
|
||||
|
||||
func (ds *DataSource) HTTPClientOptions() sdkhttpclient.Options {
|
||||
func (ds *DataSource) HTTPClientOptions() (*sdkhttpclient.Options, error) {
|
||||
tlsOptions := ds.TLSOptions()
|
||||
timeouts := &sdkhttpclient.TimeoutOptions{
|
||||
Timeout: ds.getTimeout(),
|
||||
@@ -95,7 +100,7 @@ func (ds *DataSource) HTTPClientOptions() sdkhttpclient.Options {
|
||||
MaxIdleConnsPerHost: sdkhttpclient.DefaultTimeoutOptions.MaxIdleConnsPerHost,
|
||||
IdleConnTimeout: sdkhttpclient.DefaultTimeoutOptions.IdleConnTimeout,
|
||||
}
|
||||
opts := sdkhttpclient.Options{
|
||||
opts := &sdkhttpclient.Options{
|
||||
Timeouts: timeouts,
|
||||
Headers: getCustomHeaders(ds.JsonData, ds.DecryptedValues()),
|
||||
Labels: map[string]string{
|
||||
@@ -121,6 +126,19 @@ func (ds *DataSource) HTTPClientOptions() sdkhttpclient.Options {
|
||||
}
|
||||
}
|
||||
|
||||
if ds.JsonData != nil && ds.JsonData.Get("azureAuth").MustBool() {
|
||||
credentials, err := azcredentials.FromDatasourceData(ds.JsonData.MustMap(), ds.DecryptedValues())
|
||||
if err != nil {
|
||||
err = fmt.Errorf("invalid Azure credentials: %s", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts.CustomOptions["_azureAuth"] = true
|
||||
if credentials != nil {
|
||||
opts.CustomOptions["_azureCredentials"] = credentials
|
||||
}
|
||||
}
|
||||
|
||||
if ds.JsonData != nil && ds.JsonData.Get("sigV4Auth").MustBool(false) {
|
||||
opts.SigV4 = &sdkhttpclient.SigV4Config{
|
||||
Service: awsServiceNamespace(ds.Type),
|
||||
@@ -140,7 +158,7 @@ func (ds *DataSource) HTTPClientOptions() sdkhttpclient.Options {
|
||||
}
|
||||
}
|
||||
|
||||
return opts
|
||||
return opts, nil
|
||||
}
|
||||
|
||||
func (ds *DataSource) TLSOptions() sdkhttpclient.TLSOptions {
|
||||
@@ -180,7 +198,11 @@ func (ds *DataSource) TLSOptions() sdkhttpclient.TLSOptions {
|
||||
}
|
||||
|
||||
func (ds *DataSource) GetTLSConfig(httpClientProvider httpclient.Provider) (*tls.Config, error) {
|
||||
return httpClientProvider.GetTLSConfig(ds.HTTPClientOptions())
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return httpClientProvider.GetTLSConfig(*opts)
|
||||
}
|
||||
|
||||
// getCustomHeaders returns a map with all the to be set headers
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/httpclient"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -394,6 +395,109 @@ func TestDataSource_DecryptedValue(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestDataSource_HTTPClientOptions(t *testing.T) {
|
||||
emptyJsonData := simplejson.New()
|
||||
emptySecureJsonData := map[string][]byte{}
|
||||
|
||||
ds := DataSource{
|
||||
Id: 1,
|
||||
Url: "https://api.example.com",
|
||||
Type: "prometheus",
|
||||
}
|
||||
|
||||
t.Run("Azure authentication", func(t *testing.T) {
|
||||
t.Run("should be disabled if not enabled in JsonData", func(t *testing.T) {
|
||||
t.Cleanup(func() { ds.JsonData = emptyJsonData; ds.SecureJsonData = emptySecureJsonData })
|
||||
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.NotEqual(t, true, opts.CustomOptions["_azureAuth"])
|
||||
assert.NotContains(t, opts.CustomOptions, "_azureCredentials")
|
||||
})
|
||||
|
||||
t.Run("should be enabled if enabled in JsonData without credentials configured", func(t *testing.T) {
|
||||
t.Cleanup(func() { ds.JsonData = emptyJsonData; ds.SecureJsonData = emptySecureJsonData })
|
||||
|
||||
ds.JsonData = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureAuth": true,
|
||||
})
|
||||
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, true, opts.CustomOptions["_azureAuth"])
|
||||
assert.NotContains(t, opts.CustomOptions, "_azureCredentials")
|
||||
})
|
||||
|
||||
t.Run("should be enabled if enabled in JsonData with credentials configured", func(t *testing.T) {
|
||||
t.Cleanup(func() { ds.JsonData = emptyJsonData; ds.SecureJsonData = emptySecureJsonData })
|
||||
|
||||
ds.JsonData = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureAuth": true,
|
||||
"azureCredentials": map[string]interface{}{
|
||||
"authType": "msi",
|
||||
},
|
||||
})
|
||||
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, true, opts.CustomOptions["_azureAuth"])
|
||||
|
||||
require.Contains(t, opts.CustomOptions, "_azureCredentials")
|
||||
credentials := opts.CustomOptions["_azureCredentials"]
|
||||
|
||||
assert.IsType(t, &azcredentials.AzureManagedIdentityCredentials{}, credentials)
|
||||
})
|
||||
|
||||
t.Run("should be disabled if disabled in JsonData even with credentials configured", func(t *testing.T) {
|
||||
t.Cleanup(func() { ds.JsonData = emptyJsonData; ds.SecureJsonData = emptySecureJsonData })
|
||||
|
||||
ds.JsonData = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureAuth": false,
|
||||
"azureCredentials": map[string]interface{}{
|
||||
"authType": "msi",
|
||||
},
|
||||
})
|
||||
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.NotEqual(t, true, opts.CustomOptions["_azureAuth"])
|
||||
assert.NotContains(t, opts.CustomOptions, "_azureCredentials")
|
||||
})
|
||||
|
||||
t.Run("should fail if credentials are invalid", func(t *testing.T) {
|
||||
t.Cleanup(func() { ds.JsonData = emptyJsonData; ds.SecureJsonData = emptySecureJsonData })
|
||||
|
||||
ds.JsonData = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureAuth": true,
|
||||
"azureCredentials": "invalid",
|
||||
})
|
||||
|
||||
_, err := ds.HTTPClientOptions()
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("should pass resourceId from JsonData", func(t *testing.T) {
|
||||
t.Cleanup(func() { ds.JsonData = emptyJsonData; ds.SecureJsonData = emptySecureJsonData })
|
||||
|
||||
ds.JsonData = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureEndpointResourceId": "https://api.example.com/abd5c4ce-ca73-41e9-9cb2-bed39aa2adb5",
|
||||
})
|
||||
|
||||
opts, err := ds.HTTPClientOptions()
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Contains(t, opts.CustomOptions, "azureEndpointResourceId")
|
||||
azureEndpointResourceId := opts.CustomOptions["azureEndpointResourceId"]
|
||||
|
||||
assert.Equal(t, "https://api.example.com/abd5c4ce-ca73-41e9-9cb2-bed39aa2adb5", azureEndpointResourceId)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func clearDSProxyCache(t *testing.T) {
|
||||
t.Helper()
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ var (
|
||||
ErrFolderSameNameExists = errors.New("a folder or dashboard in the general folder with the same name already exists")
|
||||
ErrFolderFailedGenerateUniqueUid = errors.New("failed to generate unique folder ID")
|
||||
ErrFolderAccessDenied = errors.New("access denied to folder")
|
||||
ErrFolderContainsAlertRules = errors.New("folder contains alert rules")
|
||||
)
|
||||
|
||||
type Folder struct {
|
||||
|
||||
@@ -45,7 +45,7 @@ var isLDAPEnabled = func(cfg *setting.Cfg) bool {
|
||||
var newLDAP = multildap.New
|
||||
|
||||
// supportedHeaders states the supported headers configuration fields
|
||||
var supportedHeaderFields = []string{"Name", "Email", "Login", "Groups"}
|
||||
var supportedHeaderFields = []string{"Name", "Email", "Login", "Groups", "Role"}
|
||||
|
||||
// AuthProxy struct
|
||||
type AuthProxy struct {
|
||||
@@ -152,7 +152,7 @@ func HashCacheKey(key string) (string, error) {
|
||||
|
||||
// getKey forms a key for the cache based on the headers received as part of the authentication flow.
|
||||
// Our configuration supports multiple headers. The main header contains the email or username.
|
||||
// And the additional ones that allow us to specify extra attributes: Name, Email or Groups.
|
||||
// And the additional ones that allow us to specify extra attributes: Name, Email, Role, or Groups.
|
||||
func (auth *AuthProxy) getKey() (string, error) {
|
||||
key := strings.TrimSpace(auth.header) // start the key with the main header
|
||||
|
||||
@@ -278,9 +278,23 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
||||
}
|
||||
|
||||
auth.headersIterator(func(field string, header string) {
|
||||
if field == "Groups" {
|
||||
switch field {
|
||||
case "Groups":
|
||||
extUser.Groups = util.SplitString(header)
|
||||
} else {
|
||||
case "Role":
|
||||
// If Role header is specified, we update the user role of the default org
|
||||
if header != "" {
|
||||
rt := models.RoleType(header)
|
||||
if rt.IsValid() {
|
||||
extUser.OrgRoles = map[int64]models.RoleType{}
|
||||
orgID := int64(1)
|
||||
if setting.AutoAssignOrg && setting.AutoAssignOrgId > 0 {
|
||||
orgID = int64(setting.AutoAssignOrgId)
|
||||
}
|
||||
extUser.OrgRoles[orgID] = rt
|
||||
}
|
||||
}
|
||||
default:
|
||||
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(header)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -107,8 +107,9 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
t.Run("When the cache key contains additional headers", func(t *testing.T) {
|
||||
const id int64 = 33
|
||||
const group = "grafana-core-team"
|
||||
const role = "Admin"
|
||||
|
||||
h, err := HashCacheKey(hdrName + "-" + group)
|
||||
h, err := HashCacheKey(hdrName + "-" + group + "-" + role)
|
||||
require.NoError(t, err)
|
||||
key := fmt.Sprintf(CachePrefix, h)
|
||||
err = cache.Set(key, id, 0)
|
||||
@@ -116,9 +117,10 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
|
||||
auth := prepareMiddleware(t, cache, func(req *http.Request, cfg *setting.Cfg) {
|
||||
req.Header.Set("X-WEBAUTH-GROUPS", group)
|
||||
cfg.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||
req.Header.Set("X-WEBAUTH-ROLE", role)
|
||||
cfg.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS", "Role": "X-WEBAUTH-ROLE"}
|
||||
})
|
||||
assert.Equal(t, "auth-proxy-sync-ttl:14f69b7023baa0ac98c96b31cec07bc0", key)
|
||||
assert.Equal(t, "auth-proxy-sync-ttl:f5acfffd56daac98d502ef8c8b8c5d56", key)
|
||||
|
||||
gotID, err := auth.Login(logger, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -19,7 +19,7 @@ type FolderService interface {
|
||||
GetFolderByTitle(title string) (*models.Folder, error)
|
||||
CreateFolder(title, uid string) (*models.Folder, error)
|
||||
UpdateFolder(uid string, cmd *models.UpdateFolderCommand) error
|
||||
DeleteFolder(uid string) (*models.Folder, error)
|
||||
DeleteFolder(uid string, forceDeleteRules bool) (*models.Folder, error)
|
||||
MakeUserAdmin(orgID int64, userID, folderID int64, setViewAndEditPermissions bool) error
|
||||
}
|
||||
|
||||
@@ -192,7 +192,7 @@ func (dr *dashboardServiceImpl) UpdateFolder(existingUid string, cmd *models.Upd
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dr *dashboardServiceImpl) DeleteFolder(uid string) (*models.Folder, error) {
|
||||
func (dr *dashboardServiceImpl) DeleteFolder(uid string, forceDeleteRules bool) (*models.Folder, error) {
|
||||
query := models.GetDashboardQuery{OrgId: dr.orgId, Uid: uid}
|
||||
dashFolder, err := getFolder(query)
|
||||
if err != nil {
|
||||
@@ -207,7 +207,7 @@ func (dr *dashboardServiceImpl) DeleteFolder(uid string) (*models.Folder, error)
|
||||
return nil, models.ErrFolderAccessDenied
|
||||
}
|
||||
|
||||
deleteCmd := models.DeleteDashboardCommand{OrgId: dr.orgId, Id: dashFolder.Id}
|
||||
deleteCmd := models.DeleteDashboardCommand{OrgId: dr.orgId, Id: dashFolder.Id, ForceDeleteFolderRules: forceDeleteRules}
|
||||
if err := bus.Dispatch(&deleteCmd); err != nil {
|
||||
return nil, toFolderError(err)
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ func TestFolderService(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("When deleting folder by uid should return access denied error", func(t *testing.T) {
|
||||
_, err := service.DeleteFolder("uid")
|
||||
_, err := service.DeleteFolder("uid", false)
|
||||
require.Error(t, err)
|
||||
require.Equal(t, err, models.ErrFolderAccessDenied)
|
||||
})
|
||||
@@ -121,7 +121,7 @@ func TestFolderService(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("When deleting folder by uid should not return access denied error", func(t *testing.T) {
|
||||
_, err := service.DeleteFolder("uid")
|
||||
_, err := service.DeleteFolder("uid", false)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
|
||||
@@ -661,7 +661,12 @@ func (am *Alertmanager) createReceiverStage(name string, integrations []notify.I
|
||||
}
|
||||
|
||||
func waitFunc() time.Duration {
|
||||
return setting.AlertingNotificationTimeout
|
||||
// When it's a single instance, we don't need additional wait. The routing policies will have their own group wait.
|
||||
// We need >0 wait here in case we have peers to sync the notification state with. 0 wait in that case can result
|
||||
// in duplicate notifications being sent.
|
||||
// TODO: we have setting.AlertingNotificationTimeout in legacy settings. Either use that or separate set of config
|
||||
// for clustering with intuitive name, like "PeerTimeout".
|
||||
return 0
|
||||
}
|
||||
|
||||
func timeoutFunc(d time.Duration) time.Duration {
|
||||
|
||||
@@ -39,7 +39,7 @@ func (c *cache) getOrCreate(alertRule *ngModels.AlertRule, result eval.Result) *
|
||||
// clone the labels so we don't change eval.Result
|
||||
labels := result.Instance.Copy()
|
||||
attachRuleLabels(labels, alertRule)
|
||||
ruleLabels, annotations := c.expandRuleLabelsAndAnnotations(alertRule, labels, result.Values)
|
||||
ruleLabels, annotations := c.expandRuleLabelsAndAnnotations(alertRule, labels, result)
|
||||
|
||||
// if duplicate labels exist, alertRule label will take precedence
|
||||
lbs := mergeLabels(ruleLabels, result.Instance)
|
||||
@@ -88,11 +88,11 @@ func attachRuleLabels(m map[string]string, alertRule *ngModels.AlertRule) {
|
||||
m[prometheusModel.AlertNameLabel] = alertRule.Title
|
||||
}
|
||||
|
||||
func (c *cache) expandRuleLabelsAndAnnotations(alertRule *ngModels.AlertRule, labels map[string]string, values map[string]eval.NumberValueCapture) (map[string]string, map[string]string) {
|
||||
func (c *cache) expandRuleLabelsAndAnnotations(alertRule *ngModels.AlertRule, labels map[string]string, alertInstance eval.Result) (map[string]string, map[string]string) {
|
||||
expand := func(original map[string]string) map[string]string {
|
||||
expanded := make(map[string]string, len(original))
|
||||
for k, v := range original {
|
||||
ev, err := expandTemplate(alertRule.Title, v, labels, values)
|
||||
ev, err := expandTemplate(alertRule.Title, v, labels, alertInstance)
|
||||
expanded[k] = ev
|
||||
if err != nil {
|
||||
c.log.Error("error in expanding template", "name", k, "value", v, "err", err.Error())
|
||||
@@ -122,9 +122,9 @@ func (v templateCaptureValue) String() string {
|
||||
return "null"
|
||||
}
|
||||
|
||||
func expandTemplate(name, text string, labels map[string]string, values map[string]eval.NumberValueCapture) (result string, resultErr error) {
|
||||
func expandTemplate(name, text string, labels map[string]string, alertInstance eval.Result) (result string, resultErr error) {
|
||||
name = "__alert_" + name
|
||||
text = "{{- $labels := .Labels -}}{{- $values := .Values -}}" + text
|
||||
text = "{{- $labels := .Labels -}}{{- $values := .Values -}}{{- $value := .Value -}}" + text
|
||||
// It'd better to have no alert description than to kill the whole process
|
||||
// if there's a bug in the template.
|
||||
defer func() {
|
||||
@@ -145,11 +145,12 @@ func expandTemplate(name, text string, labels map[string]string, values map[stri
|
||||
if err := tmpl.Execute(&buffer, struct {
|
||||
Labels map[string]string
|
||||
Values map[string]templateCaptureValue
|
||||
Value string
|
||||
}{
|
||||
Labels: labels,
|
||||
Values: func() map[string]templateCaptureValue {
|
||||
m := make(map[string]templateCaptureValue)
|
||||
for k, v := range values {
|
||||
for k, v := range alertInstance.Values {
|
||||
m[k] = templateCaptureValue{
|
||||
Labels: v.Labels,
|
||||
Value: v.Value,
|
||||
@@ -157,6 +158,7 @@ func expandTemplate(name, text string, labels map[string]string, values map[stri
|
||||
}
|
||||
return m
|
||||
}(),
|
||||
Value: alertInstance.EvaluationString,
|
||||
}); err != nil {
|
||||
return "", fmt.Errorf("error executing template %v: %s", name, err.Error())
|
||||
}
|
||||
@@ -261,3 +263,9 @@ func mergeLabels(a, b data.Labels) data.Labels {
|
||||
}
|
||||
return newLbs
|
||||
}
|
||||
|
||||
func (c *cache) deleteEntry(orgID int64, alertRuleUID, cacheID string) {
|
||||
c.mtxStates.Lock()
|
||||
defer c.mtxStates.Unlock()
|
||||
delete(c.states[orgID][alertRuleUID], cacheID)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
package state
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
ptr "github.com/xorcare/pointer"
|
||||
)
|
||||
|
||||
@@ -32,3 +36,65 @@ func TestTemplateCaptureValueStringer(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpandTemplate(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
text string
|
||||
alertInstance eval.Result
|
||||
labels data.Labels
|
||||
expected string
|
||||
expectedError error
|
||||
}{{
|
||||
name: "instance labels are expanded into $labels",
|
||||
text: "{{ $labels.instance }} is down",
|
||||
labels: data.Labels{"instance": "foo"},
|
||||
expected: "foo is down",
|
||||
}, {
|
||||
name: "missing instance label returns error",
|
||||
text: "{{ $labels.instance }} is down",
|
||||
labels: data.Labels{},
|
||||
expectedError: errors.New("error executing template __alert_test: template: __alert_test:1:86: executing \"__alert_test\" at <$labels.instance>: map has no entry for key \"instance\""),
|
||||
}, {
|
||||
name: "values are expanded into $values",
|
||||
text: "{{ $values.A.Labels.instance }} has value {{ $values.A }}",
|
||||
alertInstance: eval.Result{
|
||||
Values: map[string]eval.NumberValueCapture{
|
||||
"A": {
|
||||
Var: "A",
|
||||
Labels: data.Labels{"instance": "foo"},
|
||||
Value: ptr.Float64(10),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "foo has value 10",
|
||||
}, {
|
||||
name: "missing label in $values returns error",
|
||||
text: "{{ $values.A.Labels.instance }} has value {{ $values.A }}",
|
||||
alertInstance: eval.Result{
|
||||
Values: map[string]eval.NumberValueCapture{
|
||||
"A": {
|
||||
Var: "A",
|
||||
Labels: data.Labels{},
|
||||
Value: ptr.Float64(10),
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedError: errors.New("error executing template __alert_test: template: __alert_test:1:86: executing \"__alert_test\" at <$values.A.Labels.instance>: map has no entry for key \"instance\""),
|
||||
}, {
|
||||
name: "value string is expanded into $value",
|
||||
text: "{{ $value }}",
|
||||
alertInstance: eval.Result{
|
||||
EvaluationString: "[ var='A' labels={instance=foo} value=10 ]",
|
||||
},
|
||||
expected: "[ var='A' labels={instance=foo} value=10 ]",
|
||||
}}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
v, err := expandTemplate("test", c.text, c.labels, c.alertInstance)
|
||||
require.Equal(t, c.expectedError, err)
|
||||
require.Equal(t, c.expected, v)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,11 +137,13 @@ func (st *Manager) RemoveByRuleUID(orgID int64, ruleUID string) {
|
||||
func (st *Manager) ProcessEvalResults(alertRule *ngModels.AlertRule, results eval.Results) []*State {
|
||||
st.log.Debug("state manager processing evaluation results", "uid", alertRule.UID, "resultCount", len(results))
|
||||
var states []*State
|
||||
processedResults := make(map[string]*State, len(results))
|
||||
for _, result := range results {
|
||||
s := st.setNextState(alertRule, result)
|
||||
states = append(states, s)
|
||||
processedResults[s.CacheId] = s
|
||||
}
|
||||
st.log.Debug("returning changed states to scheduler", "count", len(states))
|
||||
st.staleResultsHandler(alertRule, processedResults)
|
||||
return states
|
||||
}
|
||||
|
||||
@@ -265,3 +267,27 @@ func (st *Manager) createAlertAnnotation(new eval.State, alertRule *ngModels.Ale
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (st *Manager) staleResultsHandler(alertRule *ngModels.AlertRule, states map[string]*State) {
|
||||
allStates := st.GetStatesForRuleUID(alertRule.OrgID, alertRule.UID)
|
||||
for _, s := range allStates {
|
||||
_, ok := states[s.CacheId]
|
||||
if !ok && isItStale(s.LastEvaluationTime, alertRule.IntervalSeconds) {
|
||||
st.log.Debug("removing stale state entry", "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID, "cacheID", s.CacheId)
|
||||
st.cache.deleteEntry(s.OrgID, s.AlertRuleUID, s.CacheId)
|
||||
ilbs := ngModels.InstanceLabels(s.Labels)
|
||||
_, labelsHash, err := ilbs.StringAndHash()
|
||||
if err != nil {
|
||||
st.log.Error("unable to get labelsHash", "error", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID)
|
||||
}
|
||||
|
||||
if err = st.instanceStore.DeleteAlertInstance(s.OrgID, s.AlertRuleUID, labelsHash); err != nil {
|
||||
st.log.Error("unable to delete stale instance from database", "error", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID, "cacheID", s.CacheId)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isItStale(lastEval time.Time, intervalSeconds int64) bool {
|
||||
return lastEval.Add(2 * time.Duration(intervalSeconds) * time.Second).Before(time.Now())
|
||||
}
|
||||
|
||||
@@ -4,6 +4,9 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/registry"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/tests"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||
@@ -864,3 +867,107 @@ func TestProcessEvalResults(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStaleResultsHandler(t *testing.T) {
|
||||
evaluationTime, err := time.Parse("2006-01-02", "2021-03-25")
|
||||
if err != nil {
|
||||
t.Fatalf("error parsing date format: %s", err.Error())
|
||||
}
|
||||
|
||||
dbstore := tests.SetupTestEnv(t, 1)
|
||||
|
||||
rule := tests.CreateTestAlertRule(t, dbstore, 600)
|
||||
|
||||
saveCmd1 := &models.SaveAlertInstanceCommand{
|
||||
RuleOrgID: rule.OrgID,
|
||||
RuleUID: rule.UID,
|
||||
Labels: models.InstanceLabels{"test1": "testValue1"},
|
||||
State: models.InstanceStateNormal,
|
||||
LastEvalTime: evaluationTime,
|
||||
CurrentStateSince: evaluationTime.Add(-1 * time.Minute),
|
||||
CurrentStateEnd: evaluationTime.Add(1 * time.Minute),
|
||||
}
|
||||
|
||||
_ = dbstore.SaveAlertInstance(saveCmd1)
|
||||
|
||||
saveCmd2 := &models.SaveAlertInstanceCommand{
|
||||
RuleOrgID: rule.OrgID,
|
||||
RuleUID: rule.UID,
|
||||
Labels: models.InstanceLabels{"test2": "testValue2"},
|
||||
State: models.InstanceStateFiring,
|
||||
LastEvalTime: evaluationTime,
|
||||
CurrentStateSince: evaluationTime.Add(-1 * time.Minute),
|
||||
CurrentStateEnd: evaluationTime.Add(1 * time.Minute),
|
||||
}
|
||||
_ = dbstore.SaveAlertInstance(saveCmd2)
|
||||
|
||||
t.Cleanup(registry.ClearOverrides)
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
evalResults []eval.Results
|
||||
expectedStates map[string]*state.State
|
||||
startingStateCount int
|
||||
finalStateCount int
|
||||
}{
|
||||
{
|
||||
desc: "stale cache entries are removed",
|
||||
evalResults: []eval.Results{
|
||||
{
|
||||
eval.Result{
|
||||
Instance: data.Labels{"test1": "testValue1"},
|
||||
State: eval.Normal,
|
||||
EvaluatedAt: evaluationTime.Add(3 * time.Minute),
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedStates: map[string]*state.State{
|
||||
`[["__alert_rule_namespace_uid__","namespace"],["__alert_rule_uid__","` + rule.UID + `"],["alertname","` + rule.Title + `"],["test1","testValue1"]]`: {
|
||||
AlertRuleUID: rule.UID,
|
||||
OrgID: 1,
|
||||
CacheId: `[["__alert_rule_namespace_uid__","namespace"],["__alert_rule_uid__","` + rule.UID + `"],["alertname","` + rule.Title + `"],["test1","testValue1"]]`,
|
||||
Labels: data.Labels{
|
||||
"__alert_rule_namespace_uid__": "namespace",
|
||||
"__alert_rule_uid__": rule.UID,
|
||||
"alertname": rule.Title,
|
||||
"test1": "testValue1",
|
||||
},
|
||||
State: eval.Normal,
|
||||
Results: []state.Evaluation{
|
||||
{
|
||||
EvaluationTime: evaluationTime.Add(3 * time.Minute),
|
||||
EvaluationState: eval.Normal,
|
||||
Values: make(map[string]state.EvaluationValue),
|
||||
},
|
||||
},
|
||||
LastEvaluationTime: evaluationTime.Add(3 * time.Minute),
|
||||
EvaluationDuration: 0,
|
||||
Annotations: map[string]string{"testAnnoKey": "testAnnoValue"},
|
||||
},
|
||||
},
|
||||
startingStateCount: 2,
|
||||
finalStateCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
st := state.NewManager(log.New("test_stale_results_handler"), nilMetrics, dbstore, dbstore)
|
||||
st.Warm()
|
||||
existingStatesForRule := st.GetStatesForRuleUID(rule.OrgID, rule.UID)
|
||||
|
||||
// We have loaded the expected number of entries from the db
|
||||
assert.Equal(t, tc.startingStateCount, len(existingStatesForRule))
|
||||
for _, res := range tc.evalResults {
|
||||
st.ProcessEvalResults(rule, res)
|
||||
for _, s := range tc.expectedStates {
|
||||
cachedState, err := st.Get(s.OrgID, s.AlertRuleUID, s.CacheId)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, s, cachedState)
|
||||
}
|
||||
}
|
||||
existingStatesForRule = st.GetStatesForRuleUID(rule.OrgID, rule.UID)
|
||||
|
||||
// The expected number of state entries remains after results are processed
|
||||
assert.Equal(t, tc.finalStateCount, len(existingStatesForRule))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ type InstanceStore interface {
|
||||
ListAlertInstances(cmd *models.ListAlertInstancesQuery) error
|
||||
SaveAlertInstance(cmd *models.SaveAlertInstanceCommand) error
|
||||
FetchOrgIds() ([]int64, error)
|
||||
DeleteAlertInstance(orgID int64, ruleUID, labelsHash string) error
|
||||
}
|
||||
|
||||
// GetAlertInstance is a handler for retrieving an alert instance based on OrgId, AlertDefintionID, and
|
||||
@@ -142,3 +143,13 @@ func (st DBstore) FetchOrgIds() ([]int64, error) {
|
||||
|
||||
return orgIds, err
|
||||
}
|
||||
|
||||
func (st DBstore) DeleteAlertInstance(orgID int64, ruleUID, labelsHash string) error {
|
||||
return st.SQLStore.WithTransactionalDbSession(context.Background(), func(sess *sqlstore.DBSession) error {
|
||||
_, err := sess.Exec("DELETE FROM alert_instance WHERE rule_org_id = ? AND rule_uid = ? AND labels_hash = ?", orgID, ruleUID, labelsHash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
@@ -476,16 +476,27 @@ func deleteDashboard(cmd *models.DeleteDashboardCommand, sess *DBSession) error
|
||||
}
|
||||
}
|
||||
|
||||
// clean ngalert tables
|
||||
ngalertDeletes := []string{
|
||||
"DELETE FROM alert_rule WHERE namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)",
|
||||
"DELETE FROM alert_rule_version WHERE rule_namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)",
|
||||
var existingRuleID int64
|
||||
exists, err := sess.Table("alert_rule").Where("namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)", dashboard.Id).Cols("id").Get(&existingRuleID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
if !cmd.ForceDeleteFolderRules {
|
||||
return fmt.Errorf("folder cannot be deleted: %w", models.ErrFolderContainsAlertRules)
|
||||
}
|
||||
|
||||
for _, sql := range ngalertDeletes {
|
||||
_, err := sess.Exec(sql, dashboard.Id)
|
||||
if err != nil {
|
||||
return err
|
||||
// Delete all rules under this folder.
|
||||
deleteNGAlertsByFolder := []string{
|
||||
"DELETE FROM alert_rule WHERE namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)",
|
||||
"DELETE FROM alert_rule_version WHERE rule_namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)",
|
||||
}
|
||||
|
||||
for _, sql := range deleteNGAlertsByFolder {
|
||||
_, err := sess.Exec(sql, dashboard.Id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ package sqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -29,6 +31,7 @@ func TestDashboardDataAccess(t *testing.T) {
|
||||
savedDash := insertTestDashboard(t, sqlStore, "test dash 23", 1, savedFolder.Id, false, "prod", "webapp")
|
||||
insertTestDashboard(t, sqlStore, "test dash 45", 1, savedFolder.Id, false, "prod")
|
||||
savedDash2 := insertTestDashboard(t, sqlStore, "test dash 67", 1, 0, false, "prod")
|
||||
insertTestRule(t, sqlStore, savedFolder.OrgId, savedFolder.Uid)
|
||||
|
||||
Convey("Should return dashboard model", func() {
|
||||
So(savedDash.Title, ShouldEqual, "test dash 23")
|
||||
@@ -204,8 +207,14 @@ func TestDashboardDataAccess(t *testing.T) {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("Should be able to delete a dashboard folder and its children", func() {
|
||||
deleteCmd := &models.DeleteDashboardCommand{Id: savedFolder.Id}
|
||||
Convey("Should be not able to delete a dashboard if force delete rules is disabled", func() {
|
||||
deleteCmd := &models.DeleteDashboardCommand{Id: savedFolder.Id, ForceDeleteFolderRules: false}
|
||||
err := DeleteDashboard(deleteCmd)
|
||||
So(errors.Is(err, models.ErrFolderContainsAlertRules), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Should be able to delete a dashboard folder and its children if force delete rules is enabled", func() {
|
||||
deleteCmd := &models.DeleteDashboardCommand{Id: savedFolder.Id, ForceDeleteFolderRules: true}
|
||||
err := DeleteDashboard(deleteCmd)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -219,6 +228,20 @@ func TestDashboardDataAccess(t *testing.T) {
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(query.Result), ShouldEqual, 0)
|
||||
|
||||
sqlStore.WithDbSession(context.Background(), func(sess *DBSession) error {
|
||||
var existingRuleID int64
|
||||
exists, err := sess.Table("alert_rule").Where("namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)", savedFolder.Id).Cols("id").Get(&existingRuleID)
|
||||
require.NoError(t, err)
|
||||
So(exists, ShouldBeFalse)
|
||||
|
||||
var existingRuleVersionID int64
|
||||
exists, err = sess.Table("alert_rule_version").Where("rule_namespace_uid = (SELECT uid FROM dashboard WHERE id = ?)", savedFolder.Id).Cols("id").Get(&existingRuleVersionID)
|
||||
require.NoError(t, err)
|
||||
So(exists, ShouldBeFalse)
|
||||
|
||||
return nil
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Should return error if no dashboard is found for update when dashboard id is greater than zero", func() {
|
||||
@@ -460,6 +483,84 @@ func insertTestDashboard(t *testing.T, sqlStore *SQLStore, title string, orgId i
|
||||
return dash
|
||||
}
|
||||
|
||||
func insertTestRule(t *testing.T, sqlStore *SQLStore, foderOrgID int64, folderUID string) {
|
||||
sqlStore.WithDbSession(context.Background(), func(sess *DBSession) error {
|
||||
|
||||
type alertQuery struct {
|
||||
RefID string
|
||||
DatasourceUID string
|
||||
Model json.RawMessage
|
||||
}
|
||||
|
||||
type alertRule struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `xorm:"org_id"`
|
||||
Title string
|
||||
Updated time.Time
|
||||
UID string `xorm:"uid"`
|
||||
NamespaceUID string `xorm:"namespace_uid"`
|
||||
RuleGroup string
|
||||
Condition string
|
||||
Data []alertQuery
|
||||
}
|
||||
|
||||
rule := alertRule{
|
||||
OrgID: foderOrgID,
|
||||
NamespaceUID: folderUID,
|
||||
UID: "rule",
|
||||
RuleGroup: "rulegroup",
|
||||
Updated: time.Now(),
|
||||
Condition: "A",
|
||||
Data: []alertQuery{
|
||||
{
|
||||
RefID: "A",
|
||||
DatasourceUID: "-100",
|
||||
Model: json.RawMessage(`{
|
||||
"type": "math",
|
||||
"expression": "2 + 3 > 1"
|
||||
}`),
|
||||
},
|
||||
},
|
||||
}
|
||||
_, err := sess.Insert(&rule)
|
||||
require.NoError(t, err)
|
||||
|
||||
type alertRuleVersion struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
RuleOrgID int64 `xorm:"rule_org_id"`
|
||||
RuleUID string `xorm:"rule_uid"`
|
||||
RuleNamespaceUID string `xorm:"rule_namespace_uid"`
|
||||
RuleGroup string
|
||||
ParentVersion int64
|
||||
RestoredFrom int64
|
||||
Version int64
|
||||
Created time.Time
|
||||
Title string
|
||||
Condition string
|
||||
Data []alertQuery
|
||||
IntervalSeconds int64
|
||||
}
|
||||
|
||||
ruleVersion := alertRuleVersion{
|
||||
RuleOrgID: rule.OrgID,
|
||||
RuleUID: rule.UID,
|
||||
RuleNamespaceUID: rule.NamespaceUID,
|
||||
RuleGroup: rule.RuleGroup,
|
||||
Created: rule.Updated,
|
||||
Condition: rule.Condition,
|
||||
Data: rule.Data,
|
||||
ParentVersion: 0,
|
||||
RestoredFrom: 0,
|
||||
Version: 1,
|
||||
IntervalSeconds: 60,
|
||||
}
|
||||
_, err = sess.Insert(&ruleVersion)
|
||||
require.NoError(t, err)
|
||||
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func insertTestDashboardForPlugin(t *testing.T, sqlStore *SQLStore, title string, orgId int64,
|
||||
folderId int64, isFolder bool, pluginId string) *models.Dashboard {
|
||||
t.Helper()
|
||||
|
||||
@@ -54,6 +54,16 @@ func TestLoadingSettings(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
Convey("sample.ini should load successfully", func() {
|
||||
customInitPath := CustomInitPath
|
||||
CustomInitPath = "conf/sample.ini"
|
||||
cfg := NewCfg()
|
||||
err := cfg.Load(&CommandLineArgs{HomePath: "../../"})
|
||||
So(err, ShouldBeNil)
|
||||
// Restore CustomInitPath to avoid side effects.
|
||||
CustomInitPath = customInitPath
|
||||
})
|
||||
|
||||
Convey("Should be able to override via environment variables", func() {
|
||||
err := os.Setenv("GF_SECURITY_ADMIN_USER", "superduper")
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -748,7 +748,7 @@ func TestDeleteFolderWithRules(t *testing.T) {
|
||||
assert.JSONEq(t, expectedGetRulesResponseBody, string(b))
|
||||
}
|
||||
|
||||
// Next, the editor can delete the folder.
|
||||
// Next, the editor can not delete the folder because it contains Grafana 8 alerts.
|
||||
{
|
||||
u := fmt.Sprintf("http://editor:editor@%s/api/folders/%s", grafanaListedAddr, namespaceUID)
|
||||
req, err := http.NewRequest(http.MethodDelete, u, nil)
|
||||
@@ -762,6 +762,24 @@ func TestDeleteFolderWithRules(t *testing.T) {
|
||||
})
|
||||
b, err := ioutil.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, http.StatusBadRequest, resp.StatusCode)
|
||||
require.JSONEq(t, `{"message":"folder cannot be deleted: folder contains alert rules"}`, string(b))
|
||||
}
|
||||
|
||||
// Next, the editor can delete the folder if forceDeleteRules is true.
|
||||
{
|
||||
u := fmt.Sprintf("http://editor:editor@%s/api/folders/%s?forceDeleteRules=true", grafanaListedAddr, namespaceUID)
|
||||
req, err := http.NewRequest(http.MethodDelete, u, nil)
|
||||
require.NoError(t, err)
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
err := resp.Body.Close()
|
||||
require.NoError(t, err)
|
||||
})
|
||||
b, err := ioutil.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 200, resp.StatusCode)
|
||||
require.JSONEq(t, `{"id":1,"message":"Folder default deleted","title":"default"}`, string(b))
|
||||
}
|
||||
|
||||
83
pkg/tsdb/azuremonitor/azcredentials/builder.go
Normal file
83
pkg/tsdb/azuremonitor/azcredentials/builder.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package azcredentials
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func FromDatasourceData(data map[string]interface{}, secureData map[string]string) (AzureCredentials, error) {
|
||||
if credentialsObj, err := getMapOptional(data, "azureCredentials"); err != nil {
|
||||
return nil, err
|
||||
} else if credentialsObj == nil {
|
||||
return nil, nil
|
||||
} else {
|
||||
return getFromCredentialsObject(credentialsObj, secureData)
|
||||
}
|
||||
}
|
||||
|
||||
func getFromCredentialsObject(credentialsObj map[string]interface{}, secureData map[string]string) (AzureCredentials, error) {
|
||||
authType, err := getStringValue(credentialsObj, "authType")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch authType {
|
||||
case AzureAuthManagedIdentity:
|
||||
credentials := &AzureManagedIdentityCredentials{}
|
||||
return credentials, nil
|
||||
|
||||
case AzureAuthClientSecret:
|
||||
cloud, err := getStringValue(credentialsObj, "azureCloud")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tenantId, err := getStringValue(credentialsObj, "tenantId")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clientId, err := getStringValue(credentialsObj, "clientId")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clientSecret := secureData["azureClientSecret"]
|
||||
|
||||
credentials := &AzureClientSecretCredentials{
|
||||
AzureCloud: cloud,
|
||||
TenantId: tenantId,
|
||||
ClientId: clientId,
|
||||
ClientSecret: clientSecret,
|
||||
}
|
||||
return credentials, nil
|
||||
|
||||
default:
|
||||
err := fmt.Errorf("the authentication type '%s' not supported", authType)
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
func getMapOptional(obj map[string]interface{}, key string) (map[string]interface{}, error) {
|
||||
if untypedValue, ok := obj[key]; ok {
|
||||
if value, ok := untypedValue.(map[string]interface{}); ok {
|
||||
return value, nil
|
||||
} else {
|
||||
err := fmt.Errorf("the field '%s' should be an object", key)
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
// Value optional, not error
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
func getStringValue(obj map[string]interface{}, key string) (string, error) {
|
||||
if untypedValue, ok := obj[key]; ok {
|
||||
if value, ok := untypedValue.(string); ok {
|
||||
return value, nil
|
||||
} else {
|
||||
err := fmt.Errorf("the field '%s' should be a string", key)
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
err := fmt.Errorf("the field '%s' should be set", key)
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
@@ -11,13 +11,17 @@ const authenticationMiddlewareName = "AzureAuthentication"
|
||||
|
||||
func AuthMiddleware(tokenProvider AzureTokenProvider, scopes []string) httpclient.Middleware {
|
||||
return httpclient.NamedMiddlewareFunc(authenticationMiddlewareName, func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
|
||||
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
|
||||
token, err := tokenProvider.GetAccessToken(req.Context(), scopes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to retrieve Azure access token: %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
return next.RoundTrip(req)
|
||||
})
|
||||
return ApplyAuth(tokenProvider, scopes, next)
|
||||
})
|
||||
}
|
||||
|
||||
func ApplyAuth(tokenProvider AzureTokenProvider, scopes []string, next http.RoundTripper) http.RoundTripper {
|
||||
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
|
||||
token, err := tokenProvider.GetAccessToken(req.Context(), scopes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to retrieve Azure access token: %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
return next.RoundTrip(req)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -90,6 +90,7 @@ var metricsMap = map[string][]string{
|
||||
"AWS/Glue": {"glue.driver.BlockManager.disk.diskSpaceUsed_MB", "glue.driver.ExecutorAllocationManager.executors.numberAllExecutors", "glue.driver.ExecutorAllocationManager.executors.numberMaxNeededExecutors", "glue.driver.aggregate.bytesRead", "glue.driver.aggregate.elapsedTime", "glue.driver.aggregate.numCompletedStages", "glue.driver.aggregate.numCompletedTasks", "glue.driver.aggregate.numFailedTasks", "glue.driver.aggregate.numKilledTasks", "glue.driver.aggregate.recordsRead", "glue.driver.aggregate.shuffleBytesWritten", "glue.driver.aggregate.shuffleLocalBytesRead", "glue.driver.jvm.heap.usage glue.executorId.jvm.heap.usage glue.ALL.jvm.heap.usage", "glue.driver.jvm.heap.used glue.executorId.jvm.heap.used glue.ALL.jvm.heap.used", "glue.driver.s3.filesystem.read_bytes glue.executorId.s3.filesystem.read_bytes glue.ALL.s3.filesystem.read_bytes", "glue.driver.s3.filesystem.write_bytes glue.executorId.s3.filesystem.write_bytes glue.ALL.s3.filesystem.write_bytes", "glue.driver.system.cpuSystemLoad glue.executorId.system.cpuSystemLoad glue.ALL.system.cpuSystemLoad"},
|
||||
"AWS/GroundStation": {"BitErrorRate", "BlockErrorRate", "ReceivedPower", "Es/N0"},
|
||||
"AWS/Inspector": {"TotalAssessmentRunFindings", "TotalAssessmentRuns", "TotalHealthyAgents", "TotalMatchingAgents"},
|
||||
"AWS/IVS": {"ConcurrentViews", "ConcurrentStreams", "LiveDeliveredTime", "LiveInputTime", "RecordedTime"},
|
||||
"AWS/IoT": {"CanceledJobExecutionCount", "CanceledJobExecutionTotalCount", "ClientError", "Connect.AuthError", "Connect.ClientError", "Connect.ServerError", "Connect.Success", "Connect.Throttle", "DeleteThingShadow.Accepted", "FailedJobExecutionCount", "FailedJobExecutionTotalCount", "Failure", "GetThingShadow.Accepted", "InProgressJobExecutionCount", "InProgressJobExecutionTotalCount", "NonCompliantResources", "NumLogBatchesFailedToPublishThrottled", "NumLogEventsFailedToPublishThrottled", "ParseError", "Ping.Success", "PublishIn.AuthError", "PublishIn.ClientError", "PublishIn.ServerError", "PublishIn.Success", "PublishIn.Throttle", "PublishOut.AuthError", "PublishOut.ClientError", "PublishOut.Success", "QueuedJobExecutionCount", "QueuedJobExecutionTotalCount", "RejectedJobExecutionCount", "RejectedJobExecutionTotalCount", "RemovedJobExecutionCount", "RemovedJobExecutionTotalCount", "ResourcesEvaluated", "RuleMessageThrottled", "RuleNotFound", "RulesExecuted", "ServerError", "Subscribe.AuthError", "Subscribe.ClientError", "Subscribe.ServerError", "Subscribe.Success", "Subscribe.Throttle", "SuccededJobExecutionCount", "SuccededJobExecutionTotalCount", "Success", "TopicMatch", "Unsubscribe.ClientError", "Unsubscribe.ServerError", "Unsubscribe.Success", "Unsubscribe.Throttle", "UpdateThingShadow.Accepted", "Violations", "ViolationsCleared", "ViolationsInvalidated"},
|
||||
"AWS/IoTAnalytics": {"ActionExecution", "ActivityExecutionError", "IncomingMessages"},
|
||||
"AWS/KMS": {"SecondsUntilKeyMaterialExpiration"},
|
||||
@@ -100,6 +101,7 @@ var metricsMap = map[string][]string{
|
||||
"AWS/Lambda": {"ConcurrentExecutions", "DeadLetterErrors", "Duration", "Errors", "Invocations", "IteratorAge", "Throttles", "UnreservedConcurrentExecutions"},
|
||||
"AWS/Lex": {"BotChannelAuthErrors", "BotChannelConfigurationErrors", "BotChannelInboundThrottledEvents", "BotChannelOutboundThrottledEvents", "BotChannelRequestCount", "BotChannelResponseCardErrors", "BotChannelSystemErrors", "MissedUtteranceCount", "RuntimeInvalidLambdaResponses", "RuntimeLambdaErrors", "RuntimePollyErrors", "RuntimeRequestCount", "RuntimeSucessfulRequestLatency", "RuntimeSystemErrors", "RuntimeThrottledEvents", "RuntimeUserErrors"},
|
||||
"AWS/Logs": {"DeliveryErrors", "DeliveryThrottling", "ForwardedBytes", "ForwardedLogEvents", "IncomingBytes", "IncomingLogEvents"},
|
||||
"AWS/LookoutMetrics": {"ExecutionsStarted", "ExecutionsSucceeded", "ExecutionsFailed", "Delivered", "Undelivered"},
|
||||
"AWS/ML": {"PredictCount", "PredictFailureCount"},
|
||||
"AWS/MediaConnect": {"ARQRecovered", "ARQRequests", "BitRate", "CATError", "CRCError", "Connected", "ConnectedOutputs", "ContinuityCounter", "Disconnections", "DroppedPackets", "FECPackets", "FECRecovered", "NotRecoveredPackets", "OutputConnected", "OutputDisconnections", "OverflowPackets", "PATError", "PCRAccuracyError", "PCRError", "PIDError", "PMTError", "PTSError", "PacketLossPercent", "RecoveredPackets", "RoundTripTime", "SourceARQRecovered", "SourceARQRequests", "SourceBitRate", "SourceCATError", "SourceCRCError", "SourceConnected", "SourceContinuityCounter", "SourceDisconnections", "SourceDroppedPackets", "SourceFECPackets", "SourceFECRecovered", "SourceNotRecoveredPackets", "SourceOverflowPackets", "SourcePATError", "SourcePCRAccuracyError", "SourcePCRError", "SourcePIDError", "SourcePMTError", "SourcePTSError", "SourcePacketLossPercent", "SourceRecoveredPackets", "SourceRoundTripTime", "SourceTSByteError", "SourceTSSyncLoss", "SourceTotalPackets", "SourceTransportError", "TSByteError", "TSSyncLoss", "TotalPackets", "TransportError"},
|
||||
"AWS/MediaConvert": {"AudioOutputSeconds", "Errors", "HDOutputSeconds", "JobsCompletedCount", "JobsErroredCount", "SDOutputSeconds", "StandbyTime", "TranscodingTime", "UHDOutputSeconds"},
|
||||
@@ -192,6 +194,7 @@ var dimensionsMap = map[string][]string{
|
||||
"AWS/Glue": {"JobName", "JobRunId", "Type"},
|
||||
"AWS/GroundStation": {"Channel", "Polarization", "SatelliteId"},
|
||||
"AWS/Inspector": {},
|
||||
"AWS/IVS": {"Channel", "ViewerCountryCode"},
|
||||
"AWS/IoT": {"ActionType", "BehaviorName", "CheckName", "JobId", "Protocol", "RuleName", "ScheduledAuditName", "SecurityProfileName"},
|
||||
"AWS/IoTAnalytics": {"ActionType", "ChannelName", "DatasetName", "DatastoreName", "PipelineActivityName", "PipelineActivityType", "PipelineName"},
|
||||
"AWS/KMS": {"KeyId"},
|
||||
@@ -202,6 +205,7 @@ var dimensionsMap = map[string][]string{
|
||||
"AWS/Lambda": {"Alias", "ExecutedVersion", "FunctionName", "Resource"},
|
||||
"AWS/Lex": {"BotAlias", "BotChannelName", "BotName", "BotVersion", "InputMode", "Operation", "Source"},
|
||||
"AWS/Logs": {"DestinationType", "FilterName", "LogGroupName"},
|
||||
"AWS/LookoutMetrics": {"AlertArn", "AnomalyDetectorArn"},
|
||||
"AWS/ML": {"MLModelId", "RequestMode"},
|
||||
"AWS/MediaConnect": {"AvailabilityZone", "FlowARN", "SourceARN", "OutputARN"},
|
||||
"AWS/MediaConvert": {"Job", "Operation", "Queue"},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user