mirror of
https://github.com/grafana/grafana.git
synced 2025-12-20 16:54:59 +08:00
Compare commits
100 Commits
docs/add-d
...
v7.0.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
08cd127361 | ||
|
|
20c6e50434 | ||
|
|
b6a318db93 | ||
|
|
56622c2ba1 | ||
|
|
dcdedaca82 | ||
|
|
1856e16b3e | ||
|
|
395cc26019 | ||
|
|
b1277a4869 | ||
|
|
92fa9a996d | ||
|
|
127e4d6490 | ||
|
|
7ae62d714c | ||
|
|
0921ba7609 | ||
|
|
c21cb46c1d | ||
|
|
bf8f3c03b8 | ||
|
|
9cf52bd20f | ||
|
|
803b089e5b | ||
|
|
e2996f49a2 | ||
|
|
f42a2d9528 | ||
|
|
19db500ca9 | ||
|
|
f21a9fff0a | ||
|
|
3d0d38fd64 | ||
|
|
f214005fe3 | ||
|
|
3449c1d344 | ||
|
|
dc9e69be90 | ||
|
|
a407efcaab | ||
|
|
74dff04996 | ||
|
|
21db57db7b | ||
|
|
951bd7130a | ||
|
|
d481538c77 | ||
|
|
154f6dccc9 | ||
|
|
b307ff0edc | ||
|
|
2bce7f56fa | ||
|
|
546e47c59b | ||
|
|
78bb177608 | ||
|
|
73c04254e5 | ||
|
|
3f228c082a | ||
|
|
ef72c489de | ||
|
|
e5902b0f22 | ||
|
|
a0bf16824e | ||
|
|
8a18531b06 | ||
|
|
c23f4ac103 | ||
|
|
b6f1501a82 | ||
|
|
a8ef363665 | ||
|
|
c466978366 | ||
|
|
6284c96b07 | ||
|
|
57cccec020 | ||
|
|
9419b96391 | ||
|
|
da60079615 | ||
|
|
3fe7b4f383 | ||
|
|
c1d632dcc9 | ||
|
|
27a6cd7093 | ||
|
|
d8e3b7ddb3 | ||
|
|
055096e047 | ||
|
|
2490a55b73 | ||
|
|
4e435c2917 | ||
|
|
16e7d4563c | ||
|
|
f51c8dcd56 | ||
|
|
e086eca92d | ||
|
|
63098b3452 | ||
|
|
d2084175c3 | ||
|
|
7be6603e54 | ||
|
|
b12a2914c1 | ||
|
|
3767325016 | ||
|
|
d5a370fba2 | ||
|
|
8bd4c1ecd3 | ||
|
|
f7269c43d1 | ||
|
|
cbed026c4d | ||
|
|
a64ae3bad1 | ||
|
|
413cc7c5a8 | ||
|
|
11a4a8c40f | ||
|
|
cdb3048f05 | ||
|
|
30dc6bdd01 | ||
|
|
b60fff2174 | ||
|
|
b6f7be0bae | ||
|
|
92138de21b | ||
|
|
1df9921666 | ||
|
|
f9a8469c74 | ||
|
|
69f10c97c3 | ||
|
|
350c2423a1 | ||
|
|
8b860a6114 | ||
|
|
e5cc96a108 | ||
|
|
ab94e17d97 | ||
|
|
e428ac77a7 | ||
|
|
8a68f6c59a | ||
|
|
a0d2add655 | ||
|
|
bd7cf1edd0 | ||
|
|
37042811c6 | ||
|
|
4062817c7f | ||
|
|
b4e2276d48 | ||
|
|
b09c75afab | ||
|
|
97c6efbc0e | ||
|
|
e94405a786 | ||
|
|
56e76f9e40 | ||
|
|
134d43978d | ||
|
|
f961766d08 | ||
|
|
331b2abfc8 | ||
|
|
413aaf26d7 | ||
|
|
b72a0c81b9 | ||
|
|
37b9ad2efe | ||
|
|
29a3641147 |
@@ -2,6 +2,10 @@ version: 2.1
|
||||
|
||||
aliases:
|
||||
# Workflow filters
|
||||
# filter-all triggers for all branches and version tags
|
||||
- &filter-all
|
||||
tags:
|
||||
only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
- &filter-only-release
|
||||
branches:
|
||||
only: chore/test-release-pipeline
|
||||
@@ -50,7 +54,7 @@ commands:
|
||||
- run:
|
||||
name: "Install Grafana build pipeline tool"
|
||||
command: |
|
||||
VERSION=0.2.12
|
||||
VERSION=0.3.1
|
||||
curl -fLO https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v${VERSION}/grabpl
|
||||
chmod +x grabpl
|
||||
mv grabpl /tmp
|
||||
@@ -188,7 +192,13 @@ jobs:
|
||||
- run:
|
||||
name: Build internal Grafana plug-ins
|
||||
command: |
|
||||
/tmp/grabpl build-plugins --jobs 2 --edition << parameters.edition >>
|
||||
if [[ -n "$CIRCLE_PR_NUMBER" ]]; then
|
||||
# This is a forked PR, so don't sign as it requires an API secret
|
||||
/tmp/grabpl build-plugins --jobs 2 --edition << parameters.edition >>
|
||||
else
|
||||
export GRAFANA_API_KEY=$GRAFANA_COM_API_KEY
|
||||
/tmp/grabpl build-plugins --jobs 2 --edition << parameters.edition >> --sign --signing-admin
|
||||
fi
|
||||
- run:
|
||||
name: Move artifacts
|
||||
command: |
|
||||
@@ -659,6 +669,9 @@ jobs:
|
||||
name: Start grafana-server
|
||||
command: ./e2e/start-server
|
||||
background: true
|
||||
- run:
|
||||
name: "Wait for Grafana to start"
|
||||
command: './e2e/wait-for-grafana'
|
||||
- run:
|
||||
name: Run end-to-end tests
|
||||
command: ./e2e/run-suite
|
||||
@@ -967,7 +980,6 @@ jobs:
|
||||
workflows:
|
||||
build-pipeline:
|
||||
jobs:
|
||||
# No filters, meaning this job runs for all branches
|
||||
- build-backend:
|
||||
filters: *filter-master-or-release
|
||||
edition: oss
|
||||
@@ -1009,7 +1021,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
edition: oss
|
||||
variant: osx64
|
||||
name: build-oss-backend-osx64
|
||||
@@ -1017,7 +1029,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
edition: oss
|
||||
variant: win64
|
||||
name: build-oss-backend-win64
|
||||
@@ -1025,7 +1037,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
edition: oss
|
||||
variant: linux-x64
|
||||
name: build-oss-backend-linux-x64
|
||||
@@ -1033,7 +1045,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
edition: oss
|
||||
variant: linux-x64-musl
|
||||
name: build-oss-backend-linux-x64-musl
|
||||
@@ -1041,14 +1053,14 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-frontend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-oss-frontend
|
||||
edition: oss
|
||||
requires:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-plugins:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-oss-plugins
|
||||
edition: oss
|
||||
requires:
|
||||
@@ -1095,7 +1107,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-backend-osx64
|
||||
edition: enterprise
|
||||
variant: osx64
|
||||
@@ -1103,7 +1115,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-backend-win64
|
||||
edition: enterprise
|
||||
variant: win64
|
||||
@@ -1111,7 +1123,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-backend-linux-x64
|
||||
edition: enterprise
|
||||
variant: linux-x64
|
||||
@@ -1119,7 +1131,7 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-backend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-backend-linux-x64-musl
|
||||
edition: enterprise
|
||||
variant: linux-x64-musl
|
||||
@@ -1127,14 +1139,14 @@ workflows:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-frontend:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-frontend
|
||||
edition: enterprise
|
||||
requires:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-plugins:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-plugins
|
||||
edition: enterprise
|
||||
requires:
|
||||
@@ -1142,32 +1154,32 @@ workflows:
|
||||
- test-frontend
|
||||
- build-release-publisher:
|
||||
filters: *filter-master-or-release
|
||||
# No filters, meaning this job runs for all branches
|
||||
- codespell
|
||||
# No filters, meaning this job runs for all branches
|
||||
- lint-go
|
||||
# No filters, meaning this job runs for all branches
|
||||
- shellcheck
|
||||
# No filters, meaning this job runs for all branches
|
||||
- codespell:
|
||||
filters: *filter-all
|
||||
- lint-go:
|
||||
filters: *filter-all
|
||||
- shellcheck:
|
||||
filters: *filter-all
|
||||
- test-backend:
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- lint-go
|
||||
# No filters, meaning this job runs for all branches
|
||||
- test-frontend
|
||||
# No filters, meaning this job runs for all branches
|
||||
- test-frontend:
|
||||
filters: *filter-all
|
||||
- mysql-integration-test:
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- lint-go
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- postgres-integration-test:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- lint-go
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- package-oss:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- build-oss-backend-armv6
|
||||
- build-oss-backend-armv7
|
||||
@@ -1185,7 +1197,7 @@ workflows:
|
||||
- shellcheck
|
||||
- build-oss-plugins
|
||||
- package-enterprise:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- build-enterprise-backend-armv6
|
||||
- build-enterprise-backend-armv7
|
||||
@@ -1203,11 +1215,11 @@ workflows:
|
||||
- shellcheck
|
||||
- build-enterprise-plugins
|
||||
- build-oss-windows-installer:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- package-oss
|
||||
- build-enterprise-windows-installer:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- package-enterprise
|
||||
- release-next-packages:
|
||||
@@ -1243,11 +1255,12 @@ workflows:
|
||||
- postgres-integration-test
|
||||
- build-release-publisher
|
||||
- publish-storybook:
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- build-docker-images:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-oss-docker-images
|
||||
edition: oss
|
||||
ubuntu: false
|
||||
@@ -1258,7 +1271,7 @@ workflows:
|
||||
- package-oss
|
||||
- build-oss-windows-installer
|
||||
- build-docker-images:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-oss-ubuntu-docker-images
|
||||
edition: oss
|
||||
ubuntu: true
|
||||
@@ -1269,7 +1282,7 @@ workflows:
|
||||
- package-oss
|
||||
- build-oss-windows-installer
|
||||
- build-docker-images:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-docker-images
|
||||
edition: enterprise
|
||||
ubuntu: false
|
||||
@@ -1280,7 +1293,7 @@ workflows:
|
||||
- package-enterprise
|
||||
- build-enterprise-windows-installer
|
||||
- build-docker-images:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
name: build-enterprise-ubuntu-docker-images
|
||||
edition: enterprise
|
||||
ubuntu: true
|
||||
@@ -1291,7 +1304,7 @@ workflows:
|
||||
- package-enterprise
|
||||
- build-enterprise-windows-installer
|
||||
- end-to-end-tests:
|
||||
# No filters, meaning this job runs for all branches
|
||||
filters: *filter-all
|
||||
requires:
|
||||
- package-oss
|
||||
- build-docs-website:
|
||||
|
||||
94
CHANGELOG.md
94
CHANGELOG.md
@@ -1,4 +1,4 @@
|
||||
# 7.0.0 (unreleased)
|
||||
# 7.0.0-beta1 (2020-04-28)
|
||||
|
||||
## Breaking changes
|
||||
|
||||
@@ -6,6 +6,98 @@
|
||||
- **Docker**: Our Ubuntu based images have been upgraded to Ubuntu [20.04 LTS](https://releases.ubuntu.com/20.04/).
|
||||
- **Dashboard**: A global minimum dashboard refresh interval is now enforced and defaults to 5 seconds.
|
||||
|
||||
### Features / Enhancements
|
||||
* **@grafana/ui**: Create Icon component and replace icons. [#23402](https://github.com/grafana/grafana/pull/23402), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **@grafana/ui**: Create slider component. [#22275](https://github.com/grafana/grafana/pull/22275), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **@grafana/ui**: Remove ColorPallete component. [#23592](https://github.com/grafana/grafana/pull/23592), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **AWS IAM**: Support for AWS EKS ServiceAccount roles. [#21594](https://github.com/grafana/grafana/pull/21594), [@patstrom](https://github.com/patstrom)
|
||||
* **Add ServerlessDatabaseCapacity to AWS/RDS metrics**. [#23635](https://github.com/grafana/grafana/pull/23635), [@jackstevenson](https://github.com/jackstevenson)
|
||||
* **AlertNotifications**: Translate notifications IDs to UIDs in Rule builder. [#19882](https://github.com/grafana/grafana/pull/19882), [@aSapien](https://github.com/aSapien)
|
||||
* **Alerting**: Adds support for basic auth in Alertmanager notifier. [#23231](https://github.com/grafana/grafana/pull/23231), [@melchiormoulin](https://github.com/melchiormoulin)
|
||||
* **Alerting**: Enable Alert rule tags to override PagerDuty Severity setting. [#22736](https://github.com/grafana/grafana/pull/22736), [@AndrewBurian](https://github.com/AndrewBurian)
|
||||
* **Alerting**: Handle image renderer unavailable when edit notifiers. [#23711](https://github.com/grafana/grafana/pull/23711), [@marefr](https://github.com/marefr)
|
||||
* **Alerting**: Upload error image when image renderer unavailable. [#23713](https://github.com/grafana/grafana/pull/23713), [@marefr](https://github.com/marefr)
|
||||
* **Allow API to assign new user to a specific organization**. [#21775](https://github.com/grafana/grafana/pull/21775), [@Sytten](https://github.com/Sytten)
|
||||
* **Auth**: update cookies' path value to reduce requests and avoid query param truncation. [#22265](https://github.com/grafana/grafana/pull/22265), [@consideRatio](https://github.com/consideRatio)
|
||||
* **Build**: Remove usage of Go vendoring. [#23796](https://github.com/grafana/grafana/pull/23796), [@kylebrandt](https://github.com/kylebrandt)
|
||||
* **Build**: Upgrade to Go 1.14. [#23371](https://github.com/grafana/grafana/pull/23371), [@aknuds1](https://github.com/aknuds1)
|
||||
* **CloudWatch**: Added AWS Chatbot metrics and dimensions. [#23516](https://github.com/grafana/grafana/pull/23516), [@ilyastoli](https://github.com/ilyastoli)
|
||||
* **CloudWatch**: Added Cassandra namespace. [#23299](https://github.com/grafana/grafana/pull/23299), [@vikkyomkar](https://github.com/vikkyomkar)
|
||||
* **CloudWatch**: Added missing Cassandra metrics. [#23467](https://github.com/grafana/grafana/pull/23467), [@ilyastoli](https://github.com/ilyastoli)
|
||||
* **CloudWatch**: Prefer webIdentity over EC2 role. [#23452](https://github.com/grafana/grafana/pull/23452), [@dnascimento](https://github.com/dnascimento)
|
||||
* **CloudWatch**: Prefer webIdentity over EC2 role also when assuming a role. [#23807](https://github.com/grafana/grafana/pull/23807), [@bruecktech](https://github.com/bruecktech)
|
||||
* **Dashboard**: Add failsafe for slug generation. [#23709](https://github.com/grafana/grafana/pull/23709), [@sakjur](https://github.com/sakjur)
|
||||
* **Dashboard**: Enforce minimum dashboard refresh interval to 5 seconds per default. [#23929](https://github.com/grafana/grafana/pull/23929), [@marefr](https://github.com/marefr)
|
||||
* **Dashboard**: Handle no renderer available in panel share dialog. [#23856](https://github.com/grafana/grafana/pull/23856), [@marefr](https://github.com/marefr)
|
||||
* **Data source**: Max data points now used in interval calculation for all data sources. [#23915](https://github.com/grafana/grafana/pull/23915), [@torkelo](https://github.com/torkelo)
|
||||
* **Database**: Update the xorm dependency to v0.8.1. [#22376](https://github.com/grafana/grafana/pull/22376), [@novalagung](https://github.com/novalagung)
|
||||
* **Datasource/Cloudwatch**: Adds support for Cloudwatch Logs. [#23566](https://github.com/grafana/grafana/pull/23566), [@kaydelaney](https://github.com/kaydelaney)
|
||||
* **DateTime**: adding support to select preferred timezone for presentation of date and time values.. [#23586](https://github.com/grafana/grafana/pull/23586), [@mckn](https://github.com/mckn)
|
||||
* **Docker**: Upgrade to Ubuntu 20.04 in Dockerfiles. [#23852](https://github.com/grafana/grafana/pull/23852), [@aknuds1](https://github.com/aknuds1)
|
||||
* **Docs**: adding API reference documentation support for the packages libraries.. [#21931](https://github.com/grafana/grafana/pull/21931), [@mckn](https://github.com/mckn)
|
||||
* **Explore**: Add trace UI to show traces from tracing datasources. [#23047](https://github.com/grafana/grafana/pull/23047), [@aocenas](https://github.com/aocenas)
|
||||
* **FieldOverrides**: Support data links via field overrides. [#23590](https://github.com/grafana/grafana/pull/23590), [@dprokop](https://github.com/dprokop)
|
||||
* **Grafana Toolkit**: Adds template for backend datasource. [#23864](https://github.com/grafana/grafana/pull/23864), [@bergquist](https://github.com/bergquist)
|
||||
* **Graphite**: Rollup indicator and custom meta data inspector . [#22738](https://github.com/grafana/grafana/pull/22738), [@torkelo](https://github.com/torkelo)
|
||||
* **Image Rendering**: New setting to control render request concurrency. [#23950](https://github.com/grafana/grafana/pull/23950), [@marefr](https://github.com/marefr)
|
||||
* **Image Rendering**: Remove PhantomJS support. [#23460](https://github.com/grafana/grafana/pull/23460), [@marefr](https://github.com/marefr)
|
||||
* **Logs**: Derived fields link design. [#23695](https://github.com/grafana/grafana/pull/23695), [@aocenas](https://github.com/aocenas)
|
||||
* **Migration**: Add old Input to legacy namespace. [#23286](https://github.com/grafana/grafana/pull/23286), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Admin org edit page. [#23866](https://github.com/grafana/grafana/pull/23866), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Change password. [#23623](https://github.com/grafana/grafana/pull/23623), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Data/Panel link editor. [#23778](https://github.com/grafana/grafana/pull/23778), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Final components to LegacyForms. [#23707](https://github.com/grafana/grafana/pull/23707), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Layout Selector. [#23790](https://github.com/grafana/grafana/pull/23790), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Move Switch from Forms namespace. [#23386](https://github.com/grafana/grafana/pull/23386), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Move last components from Forms namespace. [#23556](https://github.com/grafana/grafana/pull/23556), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Move old Switch to legacy namespace. [#23357](https://github.com/grafana/grafana/pull/23357), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Org users page. [#23372](https://github.com/grafana/grafana/pull/23372), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Orgs list. [#23821](https://github.com/grafana/grafana/pull/23821), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Teams and alert list. [#23810](https://github.com/grafana/grafana/pull/23810), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: TextArea from Forms namespace. [#23436](https://github.com/grafana/grafana/pull/23436), [@tskarhed](https://github.com/tskarhed)
|
||||
* **OAuth **: Introduce new setting for configuring max age of OAuth state cookie . [#23195](https://github.com/grafana/grafana/pull/23195), [@rtrompier](https://github.com/rtrompier)
|
||||
* **OAuth**: Adds Okta provider. [#22972](https://github.com/grafana/grafana/pull/22972), [@alexanderzobnin](https://github.com/alexanderzobnin)
|
||||
* **Packages**: Bundle plugins. [#23787](https://github.com/grafana/grafana/pull/23787), [@aknuds1](https://github.com/aknuds1)
|
||||
* **PanelPlugin**: add deprecation notice to setEditor method. [#23895](https://github.com/grafana/grafana/pull/23895), [@dprokop](https://github.com/dprokop)
|
||||
* **Plugins**: Adds support for URL params in plugin routes. [#23503](https://github.com/grafana/grafana/pull/23503), [@daniellee](https://github.com/daniellee)
|
||||
* **Plugins**: Hide plugins page from viewers, and limit /api/plugins to only core plugins when called by viewer role . [#21901](https://github.com/grafana/grafana/pull/21901), [@dprokop](https://github.com/dprokop)
|
||||
* **Postgres**: Add SSL support for datasource. [#21341](https://github.com/grafana/grafana/pull/21341), [@ryankurte](https://github.com/ryankurte)
|
||||
* **Prometheus**: Render missing labels in legend formats as an empty string. [#22355](https://github.com/grafana/grafana/pull/22355), [@Hixon10](https://github.com/Hixon10)
|
||||
* **Provisioning**: Allows specifying uid for datasource and use that in derived fields. [#23585](https://github.com/grafana/grafana/pull/23585), [@aocenas](https://github.com/aocenas)
|
||||
* **Provisioning**: Validate that dashboard providers have unique names. [#22898](https://github.com/grafana/grafana/pull/22898), [@youshy](https://github.com/youshy)
|
||||
* **Rendering**: Add metrics. [#23827](https://github.com/grafana/grafana/pull/23827), [@alexanderzobnin](https://github.com/alexanderzobnin)
|
||||
* **Search**: Replace search implementation. [#23855](https://github.com/grafana/grafana/pull/23855), [@sakjur](https://github.com/sakjur)
|
||||
* **Stackdriver**: Support for SLO queries. [#22917](https://github.com/grafana/grafana/pull/22917), [@sunker](https://github.com/sunker)
|
||||
* **Table**: Improvements to column resizing, style and alignment . [#23663](https://github.com/grafana/grafana/pull/23663), [@torkelo](https://github.com/torkelo)
|
||||
* **Table**: upgrades react-table to 7.0.0 and typings. [#23247](https://github.com/grafana/grafana/pull/23247), [@hugohaggmark](https://github.com/hugohaggmark)
|
||||
* **TablePanel**: Handle column overflow and horizontal scrolling in table panel. [#4157](https://github.com/grafana/grafana/pull/4157), [@jerryxguo](https://github.com/jerryxguo)
|
||||
* **Tracing**: Zipkin datasource. [#23829](https://github.com/grafana/grafana/pull/23829), [@aocenas](https://github.com/aocenas)
|
||||
* **Transformations**: Improve UI and add some love to filter by name. [#23751](https://github.com/grafana/grafana/pull/23751), [@dprokop](https://github.com/dprokop)
|
||||
* **Transformers**: adds labels as columns transformer. [#23703](https://github.com/grafana/grafana/pull/23703), [@hugohaggmark](https://github.com/hugohaggmark)
|
||||
* **Transformers**: calculate a new field based on the row values. [#23675](https://github.com/grafana/grafana/pull/23675), [@ryantxu](https://github.com/ryantxu)
|
||||
* **Units**: add (IEC) and (Metric) to bits and bytes. [#23175](https://github.com/grafana/grafana/pull/23175), [@flopp999](https://github.com/flopp999)
|
||||
* **Usagestats**: Add usage stats about what type of datasource is used in alerting. . [#23125](https://github.com/grafana/grafana/pull/23125), [@bergquist](https://github.com/bergquist)
|
||||
* **Variable**: Support more variable formatting.. [#21622](https://github.com/grafana/grafana/pull/21622), [@xiaobeiyang](https://github.com/xiaobeiyang)
|
||||
* **azuremonitor**: add alerting support - port azure log analytics to the backend. [#23839](https://github.com/grafana/grafana/pull/23839), [@daniellee](https://github.com/daniellee)
|
||||
* **delete old dashboard versions in multiple batches**. [#23348](https://github.com/grafana/grafana/pull/23348), [@DanCech](https://github.com/DanCech)
|
||||
* **grafana/data**: PanelTypeChangedHandler API update to use PanelModel instead of panel options object [BREAKING]. [#22754](https://github.com/grafana/grafana/pull/22754), [@dprokop](https://github.com/dprokop)
|
||||
* **grafana/ui**: Add basic horizontal and vertical layout components. [#22303](https://github.com/grafana/grafana/pull/22303), [@dprokop](https://github.com/dprokop)
|
||||
|
||||
### Bug Fixes
|
||||
* **@grafana/ui**: Fix time range when only partial datetime is provided. [#23122](https://github.com/grafana/grafana/pull/23122), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Alerting**: Only include image in notifier when enabled. [#23194](https://github.com/grafana/grafana/pull/23194), [@marefr](https://github.com/marefr)
|
||||
* **Alertmanager**: Basic auth should not be required.. [#23691](https://github.com/grafana/grafana/pull/23691), [@bergquist](https://github.com/bergquist)
|
||||
* **Dashboard**: Handle min refresh interval when importing dashboard. [#23959](https://github.com/grafana/grafana/pull/23959), [@marefr](https://github.com/marefr)
|
||||
* **DataSourceProxy**: Handle URL parsing error. [#23731](https://github.com/grafana/grafana/pull/23731), [@aknuds1](https://github.com/aknuds1)
|
||||
* **Fix**: Shows organization popup in alphabetical order. [#22259](https://github.com/grafana/grafana/pull/22259), [@vikkyomkar](https://github.com/vikkyomkar)
|
||||
* **Image Rendering**: Make it work using serve_from_sub_path configured. [#23706](https://github.com/grafana/grafana/pull/23706), [@marefr](https://github.com/marefr)
|
||||
* **Image rendering**: Fix missing icon on plugins list. [#23958](https://github.com/grafana/grafana/pull/23958), [@marefr](https://github.com/marefr)
|
||||
* **Logs**: Fix error when non-string log level supplied. [#23654](https://github.com/grafana/grafana/pull/23654), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Rich history**: Fix create url and run query for various datasources. [#23627](https://github.com/grafana/grafana/pull/23627), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Security**: Fix XSS vulnerability in table panel. [#23816](https://github.com/grafana/grafana/pull/23816), [@torkelo](https://github.com/torkelo)
|
||||
* **Templating**: Fix global variable "__org.id". [#23362](https://github.com/grafana/grafana/pull/23362), [@vikkyomkar](https://github.com/vikkyomkar)
|
||||
* **azuremonitor**: fix for app insights azure china plugin route. [#23877](https://github.com/grafana/grafana/pull/23877), [@daniellee](https://github.com/daniellee)
|
||||
|
||||
# 6.7.3 (2020-04-23)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@@ -695,6 +695,8 @@ disable_sanitize_html = false
|
||||
[plugins]
|
||||
enable_alpha = false
|
||||
app_tls_skip_verify_insecure = false
|
||||
# Enter a comma-separated list of plugin identifiers to identify plugins that are allowed to be loaded even if they lack a valid signature.
|
||||
allow_loading_unsigned_plugins =
|
||||
|
||||
#################################### Grafana Image Renderer Plugin ##########################
|
||||
[plugin.grafana-image-renderer]
|
||||
|
||||
@@ -684,6 +684,8 @@
|
||||
[plugins]
|
||||
;enable_alpha = false
|
||||
;app_tls_skip_verify_insecure = false
|
||||
# Enter a comma-separated list of plugin identifiers to identify plugins that are allowed to be loaded even if they lack a valid signature.
|
||||
;allow_loading_unsigned_plugins =
|
||||
|
||||
#################################### Grafana Image Renderer Plugin ##########################
|
||||
[plugin.grafana-image-renderer]
|
||||
|
||||
@@ -97,29 +97,35 @@ go test -v ./pkg/...
|
||||
|
||||
### Run end-to-end tests
|
||||
|
||||
The end to end tests in Grafana use [Cypress](https://www.cypress.io/) to run automated scripts in a headless Chromium browser. Read more about our [e2e framework](/contribute/style-guides/e2e.md).
|
||||
The end to end tests in Grafana use [Cypress](https://www.cypress.io/) to run automated scripts in a headless Chromium browser. Read more about our [e2e framework](/contribute/style-guides/e2e.md).
|
||||
|
||||
To run the tests:
|
||||
|
||||
```
|
||||
yarn e2e-tests
|
||||
yarn e2e
|
||||
```
|
||||
|
||||
By default, the end-to-end tests assume Grafana is available on `localhost:3000`. To use a specific URL, set the `BASE_URL` environment variable:
|
||||
By default, the end-to-end tests starts a Grafana instance listening on `localhost:3001`. To use a specific URL, set the `BASE_URL` environment variable:
|
||||
|
||||
```
|
||||
BASE_URL=http://localhost:3333 yarn e2e-tests
|
||||
BASE_URL=http://localhost:3333 yarn e2e
|
||||
```
|
||||
|
||||
To follow the tests in the browser while they're running, use the `yarn e2e-tests:debug` instead.
|
||||
To follow the tests in the browser while they're running, use the `yarn e2e:debug`.
|
||||
|
||||
```
|
||||
yarn e2e-tests:debug
|
||||
yarn e2e:debug
|
||||
```
|
||||
|
||||
If you want to pick a test first, use the `yarn e2e:dev`, to pick a test and follow the test in the browser while it runs.
|
||||
|
||||
```
|
||||
yarn e2e:dev
|
||||
```
|
||||
|
||||
## Configure Grafana for development
|
||||
|
||||
The default configuration, `grafana.ini`, is located in the `conf` directory.
|
||||
The default configuration, `grafana.ini`, is located in the `conf` directory.
|
||||
|
||||
To override the default configuration, create a `custom.ini` file in the `conf` directory. You only need to add the options you wish to override.
|
||||
|
||||
@@ -198,14 +204,14 @@ The number of files needed may be different on your environment. To determine th
|
||||
find ./conf ./pkg ./public/views | wc -l
|
||||
```
|
||||
|
||||
Another alternative is to limit the files being watched. The directories that are watched for changes are listed in the `.bra.toml` file in the root directory.
|
||||
Another alternative is to limit the files being watched. The directories that are watched for changes are listed in the `.bra.toml` file in the root directory.
|
||||
|
||||
To retain your `ulimit` configuration, i.e. so it will be remembered for future sessions, you need to commit it to your command line shell initialization file. Which file this will be depends on the shell you are using, here are some examples:
|
||||
|
||||
* zsh -> ~/.zshrc
|
||||
* bash -> ~/.bashrc
|
||||
|
||||
Commit your ulimit configuration to your shell initialization file as follows ($LIMIT being your chosen limit and $INIT_FILE being the initialization file for your shell):
|
||||
Commit your ulimit configuration to your shell initialization file as follows ($LIMIT being your chosen limit and $INIT_FILE being the initialization file for your shell):
|
||||
|
||||
```
|
||||
echo ulimit -S -n $LIMIT >> $INIT_FILE
|
||||
|
||||
@@ -5,15 +5,22 @@ Grafana Labs uses a minimal home grown solution built on top of Cypress for our
|
||||
## Commands
|
||||
|
||||
- `yarn e2e` Creates an isolated grafana-server home under `<repo-root>/e2e/tmp` with provisioned data sources and dashboards. This
|
||||
copies locally build binary and frontned assets from your repo root so you need to have a built backend and frontend
|
||||
copies locally build binary and frontend assets from your repo root so you need to have a built backend and frontend
|
||||
for this to run locally. The server starts on port 3001 so it does not conflict with your normal dev server.
|
||||
- `yarn e2e:debug` Same as above but runs the tests in chrome and does not shutdown after completion.
|
||||
- `yarn e2e:dev` Same as above but does not run any tests on startup. It lets you pick a test first.
|
||||
|
||||
If you already have a Grafana instance running, you can provide a specific URL by setting the `BASE_URL` environment variable:
|
||||
|
||||
```
|
||||
BASE_URL=http://172.0.10.2:3333 yarn e2e
|
||||
```
|
||||
|
||||
The above commands use some utils scripts under `<repo-root>/e2e` that can also be used for more control.
|
||||
|
||||
- `./e2e/start-server` This creates a fresh new grafana server working dir, setup's config and starts the server. It
|
||||
will also kill any previously started server that is still running using pid file at `<repo-root>/e2e/tmp/pid`.
|
||||
- `./e2e/wait-for-grafana` waits for `$HOST` and `$PORT` to be available. Per default localhost and 3001.
|
||||
- `./e2e/run-suite <debug|dev|noarg>` Starts cypress in different modes.
|
||||
|
||||
## Test Suites
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
.PHONY: docs docs-test
|
||||
|
||||
IMAGE = grafana/docs-base@sha256:63758b74e3990ab61e274f5e98da092d5c38378829dad0488aa97c59f0144f34
|
||||
IMAGE = grafana/docs-base:latest
|
||||
|
||||
docs:
|
||||
docker run -v $(shell pwd)/sources:/hugo/content/docs/grafana/latest -p 3002:3002 --rm -it $(IMAGE) /bin/bash -c 'make server'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
+++
|
||||
title = "Grafana Enterprise"
|
||||
description = "Grafana Enterprise overview"
|
||||
keywords = ["grafana", "documentation", "datasource", "permissions", "ldap", "licensing", "enterprise"]
|
||||
keywords = ["grafana", "documentation", "datasource", "permissions", "ldap", "licensing", "enterprise", "insights", "reporting"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Grafana Enterprise"
|
||||
@@ -58,6 +58,10 @@ Supported auth providers:
|
||||
|
||||
[White labeling]({{< relref "white-labeling.md" >}}) allows you to replace the Grafana brand and logo with your own corporate brand and logo. You can also change footer links to point to your custom resources.
|
||||
|
||||
## Usage insights
|
||||
|
||||
[Usage insights]({{< relref "usage-insights.md" >}}) allow you to understand how your Grafana instance is used. You can see who is looking at a dashboard, how often a dashboard is seen, and which dashboards are prone to errors. You'll also be able to discover what are the least and the most used dashboards.
|
||||
|
||||
## Enterprise plugins
|
||||
|
||||
With a Grafana Enterprise license, you get access to premium plugins, including:
|
||||
|
||||
57
docs/sources/enterprise/usage-insights.md
Normal file
57
docs/sources/enterprise/usage-insights.md
Normal file
@@ -0,0 +1,57 @@
|
||||
+++
|
||||
title = "Usage-insights"
|
||||
description = "Usage-insights"
|
||||
keywords = ["grafana", "usage-insights", "enterprise"]
|
||||
aliases = ["/docs/grafana/latest/enterprise/usage-insights/"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Usage-insights"
|
||||
parent = "enterprise"
|
||||
weight = 700
|
||||
+++
|
||||
|
||||
# Usage insights
|
||||
|
||||
Usage insights allows you to have a better understanding of how your Grafana instance is used. The collected data are the number of:
|
||||
|
||||
- Dashboard views (aggregated and per user)
|
||||
- Data source errors
|
||||
- Data source queries
|
||||
|
||||
> Only available in Grafana Enterprise v7.0+.
|
||||
|
||||
## Presence indicator
|
||||
|
||||
The presence indicator is visible to all signed-in users on all dashboards. It shows the avatars of users who interacted with the dashboard recently (last 10 minutes by default). You can see the user's name by hovering your cursor over the user's avatar. The avatars come from [Gravatar](https://gravatar.com) based on the user's email.
|
||||
|
||||
When more users are active on a dashboard than can fit in the presence indicator section, click on the `+X` icon that opens [dashboard insights]({{< relref "#dashboard-insights" >}}) to see more details about recent user activity.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/enterprise/presence_indicators.png" max-width="400px" class="docs-image--no-shadow" >}}
|
||||
|
||||
You can choose your own definition of "recent" by setting it in the [configuration]({{< relref "../installation/configuration.md">}}) file.
|
||||
|
||||
```ini
|
||||
[analytics.views]
|
||||
# Set age for recent active users
|
||||
recent_users_age = 10m
|
||||
```
|
||||
|
||||
## Dashboard insights
|
||||
|
||||
You can see dashboard usage information by clicking on the `Dashboard insights` button in the top bar.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/enterprise/dashboard_insights_button.png" max-width="400px" class="docs-image--no-shadow" >}}
|
||||
|
||||
It shows two kinds of information:
|
||||
|
||||
- **Stats:** Shows the daily query count and error count for the last 30 days.
|
||||
- **Users & activity:** Shows the daily view count for the last 30 days; last activities on the dashboard and recent users (with a limit of 20).
|
||||
|
||||
{{< docs-imagebox img="/img/docs/enterprise/dashboard_insights_stats.png" max-width="400px" class="docs-image--no-shadow" >}}{{< docs-imagebox img="/img/docs/enterprise/dashboard_insights_users.png" max-width="400px" class="docs-image--no-shadow" >}}
|
||||
|
||||
|
||||
## Improved dashboard search
|
||||
|
||||
In the search view, you can sort dashboards using these insights data. It helps you find unused or broken dashboards or discover most viewed ones.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/enterprise/improved_search.png" max-width="650px" class="docs-image--no-shadow" >}}
|
||||
@@ -189,14 +189,15 @@ datasources:
|
||||
jsonData:
|
||||
maxLines: 1000
|
||||
derivedFields:
|
||||
# Field with internal link pointing to datasource in Grafana
|
||||
# Field with internal link pointing to data source in Grafana.
|
||||
# Right now, Grafana supports only Jaeger and Zipkin data sources as link targets.
|
||||
- datasourceUid: my_jaeger_uid
|
||||
matcherRegex: "traceID=(\\w+)"
|
||||
name: TraceID
|
||||
# url will be interpreted as query for the datasource
|
||||
url: "$${__value.raw}"
|
||||
|
||||
# Field with external link
|
||||
# Field with external link.
|
||||
- matcherRegex: "traceID=(\\w+)"
|
||||
name: TraceID
|
||||
url: "http://localhost:16686/trace/$${__value.raw}"
|
||||
|
||||
@@ -51,8 +51,6 @@ You can close the newly created query by clicking on the Close Split button.
|
||||
|
||||
## Query history
|
||||
|
||||
> BETA: Query history is a beta feature.
|
||||
|
||||
Query history is a list of queries that you have used in Explore. The history is local to your browser and is not shared with others. To open and interact with your history, click the **Query history** button in Explore.
|
||||
|
||||
### View query history
|
||||
|
||||
@@ -840,6 +840,10 @@ is false. This settings was introduced in Grafana v6.0.
|
||||
|
||||
Set to true if you want to test alpha plugins that are not yet ready for general usage.
|
||||
|
||||
### allow_loading_unsigned_plugins
|
||||
|
||||
Enter a comma-separated list of plugin identifiers to identify plugins that are allowed to be loaded even if they lack a valid signature.
|
||||
|
||||
## [feature_toggles]
|
||||
### enable
|
||||
|
||||
|
||||
@@ -249,24 +249,26 @@
|
||||
- name: Grafana Enterprise
|
||||
link: /enterprise/
|
||||
children:
|
||||
- name: Overview
|
||||
link: /enterprise/
|
||||
- name: Data source permissions
|
||||
link: /enterprise/datasource_permissions/
|
||||
- name: Enhanced LDAP
|
||||
link: /enterprise/enhanced_ldap/
|
||||
- name: Reporting
|
||||
link: /enterprise/reporting/
|
||||
- name: SAML authentication
|
||||
link: /enterprise/saml/
|
||||
- name: Team sync
|
||||
link: /enterprise/team-sync/
|
||||
- name: White labeling
|
||||
link: /enterprise/white-labeling/
|
||||
- name: License expiration
|
||||
link: /enterprise/license-expiration/
|
||||
- name: Export dashboard as PDF
|
||||
link: /enterprise/export-pdf/
|
||||
- name: Overview
|
||||
link: /enterprise/
|
||||
- name: Data source permissions
|
||||
link: /enterprise/datasource_permissions/
|
||||
- name: Enhanced LDAP
|
||||
link: /enterprise/enhanced_ldap/
|
||||
- name: Reporting
|
||||
link: /enterprise/reporting/
|
||||
- name: Export dashboard as PDF
|
||||
link: /enterprise/export-pdf/
|
||||
- name: SAML authentication
|
||||
link: /enterprise/saml/
|
||||
- name: Team sync
|
||||
link: /enterprise/team-sync/
|
||||
- name: White labeling
|
||||
link: /enterprise/white-labeling/
|
||||
- name: Usage insights
|
||||
link: /enterprise/usage-insights/
|
||||
- name: License expiration
|
||||
link: /enterprise/license-expiration/
|
||||
- name: Plugins
|
||||
link: /plugins/
|
||||
children:
|
||||
|
||||
75
docs/sources/panels/thresholds.md
Normal file
75
docs/sources/panels/thresholds.md
Normal file
@@ -0,0 +1,75 @@
|
||||
title = "Thresholds"
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
identifier = "thresholds"
|
||||
parent = "panels"
|
||||
weight = 300
|
||||
+++
|
||||
|
||||
# Thresholds
|
||||
|
||||
Thresholds set the color of either the value text or the background depending on conditions that you define.
|
||||
|
||||
You can define thresholds one of two ways:
|
||||
* **Absolute** thresholds are defined based on a number. For example, 80 on a scale of 1 to 150.
|
||||
* **Percentage** thresholds are defined relative to minimum and maximums. For example, 80 percent.
|
||||
|
||||
You can apply thresholds to the following visualizations:
|
||||
* Stat
|
||||
* Gauge
|
||||
* Bar gauge
|
||||
* Table
|
||||
* Graph
|
||||
|
||||
## Default thresholds
|
||||
|
||||
On visualizations that support it, Grafana sets default threshold values of:
|
||||
* 80 = red
|
||||
* Base = green
|
||||
* Mode = Absolute
|
||||
|
||||
The **Base** value represents minus infinity. It is generally the “good” color.
|
||||
|
||||
## Add a threshold
|
||||
|
||||
You can add as many thresholds to a panel as you want. Grafana automatically sorts thresholds from highest value to lowest.
|
||||
|
||||
> **Note:** These instructions apply only to the Stat, Gauge, Bar gauge, and Table visualizations.
|
||||
|
||||
1. Navigate to the panel you want to add a threshold to.
|
||||
1. Click the **Field** tab.
|
||||
1. Click **Add threshold**.
|
||||
1. Grafana adds a threshold with suggested numerical and color values.
|
||||
1. Accept the recommendations or edit the new threshold.
|
||||
* **Edit color:** Click the color dot you wish to change and then select a new color.
|
||||
* **Edit number:** Click the number you wish to change and then enter a new number.
|
||||
* **Thresholds mode -** Click the mode to change it for all thresholds on this panel.
|
||||
1. Click **Save** to save the changes in the dashboard.
|
||||
|
||||
## Add a threshold to a Graph panel
|
||||
|
||||
In the Graph panel visualization, thresholds allow you to add arbitrary lines or sections to the graph to make it easier to see when the graph crosses a particular threshold.
|
||||
|
||||
1. Navigate to the graph panel you want to add a threshold to.
|
||||
1. On the Panel tab, click **Thresholds**.
|
||||
1. Click **Add threshold**.
|
||||
1. Fill in as many fields as you want. Only the **T1** fields are required.
|
||||
* **T1 -** Both values are required to display a threshold.
|
||||
* **lt** or **gt** - Select **lt** for less than or **gt** for greater than to indicate what the threshold applies to.
|
||||
* **Value -** Enter a threshold value. Grafana draws a threshold line along the Y-axis at that value.
|
||||
* **Color -** Choose a condition that corresponds to a color, or define your own color.
|
||||
* **custom -** You define the fill color and line color.
|
||||
* **critical -** Fill and line color are red.
|
||||
* **warning -** Fill and line color are yellow.
|
||||
* **ok -** Fill and line color are green.
|
||||
* **Fill -** Controls whether the threshold fill is displayed.
|
||||
* **Line -** Controls whether the threshold line is displayed.
|
||||
* **Y-Axis -** Choose **left** or **right**.
|
||||
1. Click **Save** to save the changes in the dashboard.
|
||||
|
||||
## Delete a threshold
|
||||
|
||||
1. Navigate to the panel you want to add a threshold to.
|
||||
1. Click the **Field** tab. (Or **Panel** tab for a graph panel.)
|
||||
1. Click the trash can icon next to the threshold you want to remove.
|
||||
1. Click **Save** to save the changes in the dashboard.
|
||||
@@ -2,15 +2,12 @@
|
||||
|
||||
. e2e/variables
|
||||
|
||||
echo -e "Waiting for grafana-server to finish starting"
|
||||
|
||||
timeout 60 bash -c 'until nc -z $0 $1; do sleep 1; done' localhost $PORT
|
||||
|
||||
echo -e "Starting Cypress scenarios"
|
||||
|
||||
CMD="start"
|
||||
PARAMS=""
|
||||
SLOWMO=0
|
||||
URL=${BASE_URL:-"http://$DEFAULT_HOST:$DEFAULT_PORT"}
|
||||
|
||||
if [ "$1" == "debug" ]; then
|
||||
echo -e "Debug mode"
|
||||
@@ -25,6 +22,6 @@ fi
|
||||
|
||||
cd packages/grafana-e2e
|
||||
|
||||
yarn $CMD --env BASE_URL=http://localhost:$PORT,CIRCLE_SHA1=$CIRCLE_SHA1,SLOWMO=$SLOWMO \
|
||||
yarn $CMD --env BASE_URL=$URL,CIRCLE_SHA1=$CIRCLE_SHA1,SLOWMO=$SLOWMO \
|
||||
--config integrationFolder=../../e2e/suite1/specs,screenshotsFolder=../../e2e/suite1/screenshots,videosFolder=../../e2e/suite1/videos,fileServerFolder=./cypress,viewportWidth=1920,viewportHeight=1080,trashAssetsBeforeRuns=false \
|
||||
$PARAMS
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
|
||||
. e2e/variables
|
||||
|
||||
# Start it in the background
|
||||
./e2e/start-server 2>&1 > e2e/server.log &
|
||||
if [ "$BASE_URL" != "" ]; then
|
||||
echo -e "BASE_URL set, skipping starting server"
|
||||
else
|
||||
# Start it in the background
|
||||
./e2e/start-server 2>&1 > e2e/server.log &
|
||||
./e2e/wait-for-grafana
|
||||
fi
|
||||
|
||||
./e2e/run-suite "$@"
|
||||
|
||||
@@ -3,6 +3,8 @@ set -eo pipefail
|
||||
|
||||
. e2e/variables
|
||||
|
||||
PORT=${PORT:-$DEFAULT_PORT}
|
||||
|
||||
./e2e/kill-server
|
||||
|
||||
mkdir $RUNDIR
|
||||
|
||||
@@ -31,7 +31,7 @@ e2e.scenario({
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e.components.Drawer.General.title(PANEL_UNDER_TEST)
|
||||
e2e.components.Drawer.General.title(`Inspect: ${PANEL_UNDER_TEST}`)
|
||||
.should('be.visible')
|
||||
.within(() => {
|
||||
e2e.components.Tab.title('Query').should('be.visible');
|
||||
@@ -44,7 +44,7 @@ e2e.scenario({
|
||||
});
|
||||
|
||||
const expectDrawerTabsAndContent = () => {
|
||||
e2e.components.Drawer.General.title(PANEL_UNDER_TEST)
|
||||
e2e.components.Drawer.General.title(`Inspect: ${PANEL_UNDER_TEST}`)
|
||||
.should('be.visible')
|
||||
.within(() => {
|
||||
e2e.components.Tab.title('Data').should('be.visible');
|
||||
@@ -87,7 +87,7 @@ const expectDrawerTabsAndContent = () => {
|
||||
const expectDrawerClose = () => {
|
||||
// close using close button
|
||||
e2e.components.Drawer.General.close().click();
|
||||
e2e.components.Drawer.General.title(PANEL_UNDER_TEST).should('not.be.visible');
|
||||
e2e.components.Drawer.General.title(`Inspect: ${PANEL_UNDER_TEST}`).should('not.be.visible');
|
||||
};
|
||||
|
||||
const expectDrawerExpandAndContract = (viewPortWidth: number) => {
|
||||
|
||||
146
e2e/suite1/specs/panelEdit_queries.spec.ts
Normal file
146
e2e/suite1/specs/panelEdit_queries.spec.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { e2e } from '@grafana/e2e';
|
||||
import { expect } from '../../../public/test/lib/common';
|
||||
|
||||
const PANEL_UNDER_TEST = 'Random walk series';
|
||||
|
||||
e2e.scenario({
|
||||
describeName: 'Panel edit tests - queries',
|
||||
itName: 'Testes various Panel edit queries scenarios',
|
||||
addScenarioDataSource: false,
|
||||
addScenarioDashBoard: false,
|
||||
skipScenario: false,
|
||||
scenario: () => {
|
||||
e2e.flows.openDashboard('5SdHCadmz');
|
||||
|
||||
e2e.flows.openPanelMenuItem(e2e.flows.PanelMenuItems.Edit, PANEL_UNDER_TEST);
|
||||
|
||||
// New panel editor opens when navigating from Panel menu
|
||||
e2e.components.PanelEditor.General.content().should('be.visible');
|
||||
|
||||
// Queries tab is rendered and open by default
|
||||
e2e.components.PanelEditor.DataPane.content().should('be.visible');
|
||||
|
||||
// We expect row with refId A to exist and be visible
|
||||
e2e.components.QueryEditorRows.rows().within(rows => {
|
||||
expect(rows.length).equals(1);
|
||||
});
|
||||
|
||||
// Add query button should be visible and clicking on it should create a new row
|
||||
e2e.components.QueryTab.addQuery()
|
||||
.scrollIntoView()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
// We expect row with refId A and B to exist and be visible
|
||||
e2e.components.QueryEditorRows.rows().within(rows => {
|
||||
expect(rows.length).equals(2);
|
||||
});
|
||||
|
||||
// Remove refId A
|
||||
e2e.components.QueryEditorRow.actionButton('Remove query')
|
||||
.eq(0)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
// We expect row with refId B to exist and be visible
|
||||
e2e.components.QueryEditorRows.rows().within(rows => {
|
||||
expect(rows.length).equals(1);
|
||||
});
|
||||
|
||||
// Duplicate refId B
|
||||
e2e.components.QueryEditorRow.actionButton('Duplicate query')
|
||||
.eq(0)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
// We expect row with refId Band and A to exist and be visible
|
||||
e2e.components.QueryEditorRows.rows().within(rows => {
|
||||
expect(rows.length).equals(2);
|
||||
});
|
||||
|
||||
// Change to CSV Metric Values scenario for A
|
||||
e2e.components.DataSource.TestData.QueryTab.scenarioSelect()
|
||||
.eq(1)
|
||||
.select('CSV Metric Values');
|
||||
|
||||
// Change order or query rows
|
||||
// Check the order of the rows before
|
||||
e2e.components.QueryEditorRows.rows()
|
||||
.eq(0)
|
||||
.within(() => {
|
||||
e2e.components.QueryEditorRow.title('B').should('be.visible');
|
||||
});
|
||||
|
||||
e2e.components.QueryEditorRows.rows()
|
||||
.eq(1)
|
||||
.within(() => {
|
||||
e2e.components.QueryEditorRow.title('A').should('be.visible');
|
||||
});
|
||||
|
||||
// Change so A is first
|
||||
e2e.components.QueryEditorRow.actionButton('Move query up')
|
||||
.eq(1)
|
||||
.click();
|
||||
|
||||
// Check the order of the rows after change
|
||||
e2e.components.QueryEditorRows.rows()
|
||||
.eq(0)
|
||||
.within(() => {
|
||||
e2e.components.QueryEditorRow.title('A').should('be.visible');
|
||||
});
|
||||
|
||||
e2e.components.QueryEditorRows.rows()
|
||||
.eq(1)
|
||||
.within(() => {
|
||||
e2e.components.QueryEditorRow.title('B').should('be.visible');
|
||||
});
|
||||
|
||||
// Disable / enable row
|
||||
expectInspectorResultAndClose(keys => {
|
||||
const length = keys.length;
|
||||
expect(keys[length - 2].innerText).equals('A:');
|
||||
expect(keys[length - 1].innerText).equals('B:');
|
||||
});
|
||||
|
||||
// Disable row with refId B
|
||||
e2e.components.QueryEditorRow.actionButton('Disable/enable query')
|
||||
.eq(1)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
expectInspectorResultAndClose(keys => {
|
||||
const length = keys.length;
|
||||
expect(keys[length - 1].innerText).equals('A:');
|
||||
});
|
||||
|
||||
// Enable row with refId B
|
||||
e2e.components.QueryEditorRow.actionButton('Disable/enable query')
|
||||
.eq(1)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
expectInspectorResultAndClose(keys => {
|
||||
const length = keys.length;
|
||||
expect(keys[length - 2].innerText).equals('A:');
|
||||
expect(keys[length - 1].innerText).equals('B:');
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const expectInspectorResultAndClose = (expectCallBack: (keys: any[]) => void) => {
|
||||
e2e.components.QueryTab.queryInspectorButton()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e.components.PanelInspector.Query.refreshButton()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e.components.PanelInspector.Query.jsonObjectKeys()
|
||||
.should('be.visible')
|
||||
.within((keys: any) => expectCallBack(keys));
|
||||
|
||||
e2e.components.Drawer.General.close()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
};
|
||||
@@ -1,74 +1,171 @@
|
||||
import { e2e } from '@grafana/e2e';
|
||||
|
||||
// This test should really be broken into several smaller tests
|
||||
e2e.scenario({
|
||||
describeName: 'Variables',
|
||||
itName: 'Query Variables CRUD',
|
||||
addScenarioDataSource: true,
|
||||
addScenarioDashBoard: true,
|
||||
skipScenario: false,
|
||||
scenario: () => {
|
||||
e2e.getScenarioContext().then(({ lastAddedDashboardUid }: any) => {
|
||||
// skipped scenario helper because of some perf issue upgrading cypress to 4.5.0 and splitted the whole test into smaller
|
||||
// several it functions. Very important to keep the order of these it functions because they have dependency in the order
|
||||
// https://github.com/cypress-io/cypress/issues/5987
|
||||
// https://github.com/cypress-io/cypress/issues/6023#issuecomment-574031655
|
||||
describe('Variables', () => {
|
||||
let lastUid = '';
|
||||
let lastData = '';
|
||||
let variables: VariablesData[] = [
|
||||
{ name: 'query1', query: '*', label: 'query1-label', options: ['All', 'A', 'B', 'C'], selectedOption: 'A' },
|
||||
{
|
||||
name: 'query2',
|
||||
query: '$query1.*',
|
||||
label: 'query2-label',
|
||||
options: ['All', 'AA', 'AB', 'AC'],
|
||||
selectedOption: 'AA',
|
||||
},
|
||||
{
|
||||
name: 'query3',
|
||||
query: '$query1.$query2.*',
|
||||
label: 'query3-label',
|
||||
options: ['All', 'AAA', 'AAB', 'AAC'],
|
||||
selectedOption: 'AAA',
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
e2e.flows.login('admin', 'admin');
|
||||
if (!lastUid || !lastData) {
|
||||
e2e.flows.addDataSource();
|
||||
e2e.flows.addDashboard();
|
||||
} else {
|
||||
e2e.setScenarioContext({ lastAddedDataSource: lastData, lastAddedDashboardUid: lastUid });
|
||||
}
|
||||
|
||||
e2e.getScenarioContext().then(({ lastAddedDashboardUid, lastAddedDataSource }: any) => {
|
||||
e2e.flows.openDashboard(lastAddedDashboardUid);
|
||||
lastUid = lastAddedDashboardUid;
|
||||
lastData = lastAddedDataSource;
|
||||
});
|
||||
});
|
||||
|
||||
it(`asserts defaults`, () => {
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
e2e.pages.Dashboard.Settings.Variables.List.addVariableCTA().click();
|
||||
|
||||
assertDefaultsForNewVariable();
|
||||
});
|
||||
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('General').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
e2e.pages.Dashboard.Settings.Variables.List.addVariableCTA().click();
|
||||
variables.forEach((variable, index) => {
|
||||
it(`creates variable ${variable.name}`, () => {
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
|
||||
let queryVariables: QueryVariableData[] = [
|
||||
{
|
||||
name: 'query1',
|
||||
query: '*',
|
||||
label: 'query1-label',
|
||||
options: ['All', 'A', 'B', 'C'],
|
||||
selectedOption: 'A',
|
||||
},
|
||||
{
|
||||
name: 'query2',
|
||||
query: '$query1.*',
|
||||
label: 'query2-label',
|
||||
options: ['All', 'AA', 'AB', 'AC'],
|
||||
selectedOption: 'AA',
|
||||
},
|
||||
{
|
||||
name: 'query3',
|
||||
query: '$query1.$query2.*',
|
||||
label: 'query3-label',
|
||||
options: ['All', 'AAA', 'AAB', 'AAC'],
|
||||
selectedOption: 'AAA',
|
||||
},
|
||||
];
|
||||
if (index === 0) {
|
||||
e2e.pages.Dashboard.Settings.Variables.List.addVariableCTA().click();
|
||||
} else {
|
||||
e2e.pages.Dashboard.Settings.Variables.List.newButton().click();
|
||||
}
|
||||
|
||||
assertAdding3dependantQueryVariablesScenario(queryVariables);
|
||||
const { name, label, query, options, selectedOption } = variable;
|
||||
e2e.getScenarioContext().then(({ lastAddedDataSource }: any) => {
|
||||
createQueryVariable({
|
||||
dataSourceName: lastAddedDataSource,
|
||||
name,
|
||||
label,
|
||||
query,
|
||||
options,
|
||||
selectedOption,
|
||||
});
|
||||
});
|
||||
|
||||
// assert select updates
|
||||
assertSelects(queryVariables);
|
||||
e2e.pages.Dashboard.Settings.General.saveDashBoard()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.SaveDashboardModal.save()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.flows.assertSuccessNotification();
|
||||
|
||||
// assert that duplicate works
|
||||
queryVariables = assertDuplicateItem(queryVariables);
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
});
|
||||
});
|
||||
|
||||
// assert that delete works
|
||||
queryVariables = assertDeleteItem(queryVariables);
|
||||
it(`asserts submenus`, () => {
|
||||
assertVariableLabelsAndComponents(variables);
|
||||
});
|
||||
|
||||
// assert that update works
|
||||
queryVariables = assertUpdateItem(queryVariables);
|
||||
it(`asserts variable table`, () => {
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
// assert that move down works
|
||||
queryVariables = assertMoveDownItem(queryVariables);
|
||||
assertVariableTable(variables);
|
||||
});
|
||||
|
||||
// assert that move up works
|
||||
assertMoveUpItem(queryVariables);
|
||||
},
|
||||
it(`asserts variable selects`, () => {
|
||||
assertSelects(variables);
|
||||
});
|
||||
|
||||
it(`asserts duplicate variable`, () => {
|
||||
// mutates variables
|
||||
variables = assertDuplicateItem(variables);
|
||||
e2e.flows.saveDashboard();
|
||||
});
|
||||
|
||||
it(`asserts delete variable`, () => {
|
||||
// mutates variables
|
||||
variables = assertDeleteItem(variables);
|
||||
e2e.flows.saveDashboard();
|
||||
});
|
||||
|
||||
it(`asserts update variable`, () => {
|
||||
// mutates variables
|
||||
variables = assertUpdateItem(variables);
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.flows.saveDashboard();
|
||||
});
|
||||
|
||||
it(`asserts move variable down`, () => {
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
// mutates variables
|
||||
variables = assertMoveDownItem(variables);
|
||||
e2e.flows.saveDashboard();
|
||||
});
|
||||
|
||||
it(`asserts move variable up`, () => {
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
// mutates variables
|
||||
assertMoveUpItem(variables);
|
||||
});
|
||||
});
|
||||
|
||||
interface VariablesData {
|
||||
name: string;
|
||||
query: string;
|
||||
label: string;
|
||||
options: string[];
|
||||
selectedOption: string;
|
||||
}
|
||||
|
||||
interface CreateQueryVariableArguments extends VariablesData {
|
||||
dataSourceName: string;
|
||||
}
|
||||
|
||||
const assertDefaultsForNewVariable = () => {
|
||||
logSection('Asserting defaults for new variable');
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.generalNameInput().within(input => {
|
||||
expect(input.attr('placeholder')).equals('name');
|
||||
expect(input.val()).equals('');
|
||||
@@ -88,18 +185,11 @@ const assertDefaultsForNewVariable = () => {
|
||||
.should('have.text', '');
|
||||
});
|
||||
|
||||
e2e()
|
||||
.window()
|
||||
.then((win: any) => {
|
||||
const chainer = 'have.text';
|
||||
const value = '';
|
||||
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsDataSourceSelect().within(select => {
|
||||
e2e()
|
||||
.get('option:selected')
|
||||
.should(chainer, value);
|
||||
});
|
||||
});
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsDataSourceSelect().within(select => {
|
||||
e2e()
|
||||
.get('option:selected')
|
||||
.should('have.text', '');
|
||||
});
|
||||
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsQueryInput().should('not.exist');
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsRefreshSelect().within(select => {
|
||||
@@ -133,32 +223,16 @@ const assertDefaultsForNewVariable = () => {
|
||||
});
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.previewOfValuesOption().should('not.exist');
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.selectionOptionsCustomAllInput().should('not.exist');
|
||||
logSection('Asserting defaults for new variable, OK!');
|
||||
};
|
||||
|
||||
interface CreateQueryVariableArguments extends QueryVariableData {
|
||||
dataSourceName: string;
|
||||
}
|
||||
|
||||
const createQueryVariable = ({ name, label, dataSourceName, query }: CreateQueryVariableArguments) => {
|
||||
logSection('Creating a Query Variable with', { name, label, dataSourceName, query });
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.generalNameInput().should('be.visible');
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.generalNameInput().type(name);
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.generalLabelInput().type(label);
|
||||
e2e()
|
||||
.window()
|
||||
.then((win: any) => {
|
||||
const text = `${dataSourceName}`;
|
||||
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsDataSourceSelect()
|
||||
.select(text)
|
||||
.blur();
|
||||
});
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsDataSourceSelect()
|
||||
.select(`${dataSourceName}`)
|
||||
.blur();
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.QueryVariable.queryOptionsQueryInput()
|
||||
.within(input => {
|
||||
expect(input.attr('placeholder')).equals('metric name or tags query');
|
||||
expect(input.val()).equals('');
|
||||
})
|
||||
.type(query)
|
||||
.blur();
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.previewOfValuesOption().should('exist');
|
||||
@@ -181,10 +255,34 @@ const createQueryVariable = ({ name, label, dataSourceName, query }: CreateQuery
|
||||
expect(input.val()).equals('');
|
||||
});
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.addButton().click();
|
||||
logSection('Creating a Query Variable with required, OK!');
|
||||
};
|
||||
|
||||
const assertVariableTableRow = ({ name, query }: QueryVariableData, index: number, length: number) => {
|
||||
const assertVariableLabelAndComponent = ({ label, options, selectedOption }: VariablesData) => {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels(label).should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts(selectedOption)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownDropDown().should('be.visible');
|
||||
for (let optionIndex = 0; optionIndex < options.length; optionIndex++) {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts(options[optionIndex]).should('be.visible');
|
||||
}
|
||||
};
|
||||
|
||||
const assertVariableLabelsAndComponents = (args: VariablesData[]) => {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItem().should('have.length', args.length);
|
||||
for (let index = 0; index < args.length; index++) {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItem()
|
||||
.eq(index)
|
||||
.within(() => {
|
||||
e2e()
|
||||
.get('label')
|
||||
.contains(args[index].name);
|
||||
});
|
||||
assertVariableLabelAndComponent(args[index]);
|
||||
}
|
||||
};
|
||||
|
||||
const assertVariableTableRow = ({ name, query }: VariablesData, index: number, length: number) => {
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowNameFields(name)
|
||||
.should('exist')
|
||||
.contains(name);
|
||||
@@ -201,8 +299,7 @@ const assertVariableTableRow = ({ name, query }: QueryVariableData, index: numbe
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowRemoveButtons(name).should('exist');
|
||||
};
|
||||
|
||||
const assertVariableTable = (args: QueryVariableData[]) => {
|
||||
logSection('Asserting variable table with', args);
|
||||
const assertVariableTable = (args: VariablesData[]) => {
|
||||
e2e.pages.Dashboard.Settings.Variables.List.table()
|
||||
.should('be.visible')
|
||||
.within(() => {
|
||||
@@ -214,90 +311,197 @@ const assertVariableTable = (args: QueryVariableData[]) => {
|
||||
for (let index = 0; index < args.length; index++) {
|
||||
assertVariableTableRow(args[index], index, args.length);
|
||||
}
|
||||
|
||||
logSection('Asserting variable table, Ok');
|
||||
};
|
||||
|
||||
const assertVariableLabelAndComponent = ({ label, options, selectedOption }: QueryVariableData) => {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels(label).should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts(selectedOption)
|
||||
const assertSelects = (variables: VariablesData[]) => {
|
||||
// Values in submenus should be
|
||||
// query1: [A] query2: [AA] query3: [AAA]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownDropDown().should('be.visible');
|
||||
for (let optionIndex = 0; optionIndex < options.length; optionIndex++) {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts(options[optionIndex]).should('be.visible');
|
||||
}
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('A')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('B')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [All] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.should('be.visible')
|
||||
.should('have.length', 2);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBB')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 0);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB] query3: [BBB]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BB')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BB + BC')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB + BC] query3: [BBB]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BBB')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCC')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BBB + BCC')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB + BC] query3: [BBB + BCC]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BB + BC')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BA')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BA] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('B')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('A')
|
||||
.should('be.visible')
|
||||
.should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('B')
|
||||
.should('be.visible')
|
||||
.should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('C')
|
||||
.should('be.visible')
|
||||
.should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('A')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('B')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.should('be.visible')
|
||||
.should('have.length', 2);
|
||||
// Values in submenus should be
|
||||
// query1: [A] query2: [All] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('AA')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('AA')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [A] query2: [AA] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('AAA')
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('AA')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('AAA')
|
||||
.should('be.visible')
|
||||
.should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 0);
|
||||
};
|
||||
|
||||
const assertVariableLabelsAndComponents = (args: QueryVariableData[]) => {
|
||||
logSection('Asserting variable components and labels');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItem().should('have.length', args.length);
|
||||
for (let index = 0; index < args.length; index++) {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItem()
|
||||
.eq(index)
|
||||
.within(() => {
|
||||
e2e()
|
||||
.get('label')
|
||||
.contains(args[index].name);
|
||||
});
|
||||
assertVariableLabelAndComponent(args[index]);
|
||||
}
|
||||
logSection('Asserting variable components and labels, Ok');
|
||||
};
|
||||
|
||||
const assertAdding3dependantQueryVariablesScenario = (queryVariables: QueryVariableData[]) => {
|
||||
// This creates 3 variables where 2 depends on 1 and 3 depends on 2 and for each added variable
|
||||
// we assert that the variable looks ok in the variable list and that it looks ok in the submenu in dashboard
|
||||
for (let queryVariableIndex = 0; queryVariableIndex < queryVariables.length; queryVariableIndex++) {
|
||||
const { name, label, query, options, selectedOption } = queryVariables[queryVariableIndex];
|
||||
const asserts = queryVariables.slice(0, queryVariableIndex + 1);
|
||||
e2e.getScenarioContext().then(({ lastAddedDataSource }: any) => {
|
||||
createQueryVariable({
|
||||
dataSourceName: lastAddedDataSource,
|
||||
name,
|
||||
label,
|
||||
query,
|
||||
options,
|
||||
selectedOption,
|
||||
});
|
||||
});
|
||||
|
||||
assertVariableTable(asserts);
|
||||
|
||||
e2e.pages.Dashboard.Settings.General.saveDashBoard().click();
|
||||
e2e.pages.SaveDashboardModal.save().click();
|
||||
e2e.flows.assertSuccessNotification();
|
||||
|
||||
e2e.components.BackButton.backArrow().click();
|
||||
|
||||
assertVariableLabelsAndComponents(asserts);
|
||||
|
||||
if (queryVariableIndex < queryVariables.length - 1) {
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
e2e.pages.Dashboard.Settings.Variables.List.newButton().click();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
interface QueryVariableData {
|
||||
name: string;
|
||||
query: string;
|
||||
label: string;
|
||||
options: string[];
|
||||
selectedOption: string;
|
||||
}
|
||||
|
||||
const logSection = (message: string, args?: any) => {
|
||||
e2e().logToConsole('');
|
||||
e2e().logToConsole(message, args);
|
||||
e2e().logToConsole('===============================================================================');
|
||||
};
|
||||
|
||||
const assertDuplicateItem = (queryVariables: QueryVariableData[]) => {
|
||||
logSection('Asserting variable duplicate');
|
||||
|
||||
const itemToDuplicate = queryVariables[1];
|
||||
const assertDuplicateItem = (variables: VariablesData[]) => {
|
||||
const itemToDuplicate = variables[1];
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowDuplicateButtons(itemToDuplicate.name)
|
||||
@@ -308,10 +512,10 @@ const assertDuplicateItem = (queryVariables: QueryVariableData[]) => {
|
||||
.within(() => {
|
||||
e2e()
|
||||
.get('tbody > tr')
|
||||
.should('have.length', queryVariables.length + 1);
|
||||
.should('have.length', variables.length + 1);
|
||||
});
|
||||
const newItem = { ...itemToDuplicate, name: `copy_of_${itemToDuplicate.name}` };
|
||||
assertVariableTableRow(newItem, queryVariables.length - 1, queryVariables.length);
|
||||
assertVariableTableRow(newItem, variables.length - 1, variables.length);
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowNameFields(newItem.name).click();
|
||||
|
||||
newItem.label = `copy_of_${itemToDuplicate.label}`;
|
||||
@@ -323,7 +527,9 @@ const assertDuplicateItem = (queryVariables: QueryVariableData[]) => {
|
||||
e2e.pages.SaveDashboardModal.save().click();
|
||||
e2e.flows.assertSuccessNotification();
|
||||
|
||||
e2e.components.BackButton.backArrow().click();
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels(newItem.label).should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts(newItem.selectedOption)
|
||||
@@ -335,14 +541,11 @@ const assertDuplicateItem = (queryVariables: QueryVariableData[]) => {
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts(newItem.options[optionIndex]).should('be.visible');
|
||||
}
|
||||
|
||||
logSection('Asserting variable duplicate, OK!');
|
||||
return [...queryVariables, newItem];
|
||||
return [...variables, newItem];
|
||||
};
|
||||
|
||||
const assertDeleteItem = (queryVariables: QueryVariableData[]) => {
|
||||
logSection('Asserting variable delete');
|
||||
|
||||
const itemToDelete = queryVariables[1];
|
||||
const assertDeleteItem = (variables: VariablesData[]) => {
|
||||
const itemToDelete = variables[1];
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
|
||||
@@ -352,26 +555,26 @@ const assertDeleteItem = (queryVariables: QueryVariableData[]) => {
|
||||
.within(() => {
|
||||
e2e()
|
||||
.get('tbody > tr')
|
||||
.should('have.length', queryVariables.length - 1);
|
||||
.should('have.length', variables.length - 1);
|
||||
});
|
||||
|
||||
e2e.pages.Dashboard.Settings.General.saveDashBoard().click();
|
||||
e2e.pages.SaveDashboardModal.save().click();
|
||||
e2e.flows.assertSuccessNotification();
|
||||
|
||||
e2e.components.BackButton.backArrow().click();
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels(itemToDelete.label).should('not.exist');
|
||||
|
||||
logSection('Asserting variable delete, OK!');
|
||||
|
||||
return queryVariables.filter(item => item.name !== itemToDelete.name);
|
||||
return variables.filter(item => item.name !== itemToDelete.name);
|
||||
};
|
||||
|
||||
const assertUpdateItem = (data: QueryVariableData[]) => {
|
||||
const queryVariables = [...data];
|
||||
const assertUpdateItem = (data: VariablesData[]) => {
|
||||
const variables = [...data];
|
||||
// updates an item to a constant variable instead
|
||||
const itemToUpdate = queryVariables[1];
|
||||
const itemToUpdate = variables[1];
|
||||
let updatedItem = {
|
||||
...itemToUpdate,
|
||||
name: `update_of_${itemToUpdate.name}`,
|
||||
@@ -381,8 +584,7 @@ const assertUpdateItem = (data: QueryVariableData[]) => {
|
||||
selectedOption: 'undefined',
|
||||
};
|
||||
|
||||
logSection('Asserting variable update');
|
||||
queryVariables[1] = updatedItem;
|
||||
variables[1] = updatedItem;
|
||||
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
@@ -406,33 +608,29 @@ const assertUpdateItem = (data: QueryVariableData[]) => {
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.generalHideSelect().select('');
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.ConstantVariable.constantOptionsQueryInput().type(updatedItem.query);
|
||||
|
||||
e2e.components.BackButton.backArrow().click();
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e()
|
||||
.window()
|
||||
.then((win: any) => {
|
||||
queryVariables[1].selectedOption = 'A constant';
|
||||
assertVariableLabelAndComponent(queryVariables[1]);
|
||||
});
|
||||
variables[1].selectedOption = 'A constant';
|
||||
assertVariableLabelAndComponent(variables[1]);
|
||||
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
|
||||
assertVariableTableRow(queryVariables[1], 1, queryVariables.length);
|
||||
assertVariableTableRow(variables[1], 1, variables.length);
|
||||
|
||||
queryVariables[1].selectedOption = 'A constant';
|
||||
variables[1].selectedOption = 'A constant';
|
||||
|
||||
logSection('Asserting variable update, OK!');
|
||||
return queryVariables;
|
||||
return variables;
|
||||
};
|
||||
|
||||
const assertMoveDownItem = (data: QueryVariableData[]) => {
|
||||
logSection('Asserting variable move down');
|
||||
const queryVariables = [...data];
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowArrowDownButtons(queryVariables[0].name).click();
|
||||
const temp = { ...queryVariables[0] };
|
||||
queryVariables[0] = { ...queryVariables[1] };
|
||||
queryVariables[1] = temp;
|
||||
const assertMoveDownItem = (data: VariablesData[]) => {
|
||||
const variables = [...data];
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowArrowDownButtons(variables[0].name).click();
|
||||
const temp = { ...variables[0] };
|
||||
variables[0] = { ...variables[1] };
|
||||
variables[1] = temp;
|
||||
e2e.pages.Dashboard.Settings.Variables.List.table().within(() => {
|
||||
e2e()
|
||||
.get('tbody > tr')
|
||||
@@ -441,11 +639,11 @@ const assertMoveDownItem = (data: QueryVariableData[]) => {
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(0)
|
||||
.contains(queryVariables[0].name);
|
||||
.contains(variables[0].name);
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(1)
|
||||
.contains(queryVariables[0].query);
|
||||
.contains(variables[0].query);
|
||||
});
|
||||
e2e()
|
||||
.get('tbody > tr')
|
||||
@@ -454,130 +652,29 @@ const assertMoveDownItem = (data: QueryVariableData[]) => {
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(0)
|
||||
.contains(queryVariables[1].name);
|
||||
.contains(variables[1].name);
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(1)
|
||||
.contains(queryVariables[1].query);
|
||||
.contains(variables[1].query);
|
||||
});
|
||||
});
|
||||
|
||||
e2e.components.BackButton.backArrow().click();
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
assertVariableLabelsAndComponents(queryVariables);
|
||||
assertVariableLabelsAndComponents(variables);
|
||||
|
||||
logSection('Asserting variable move down, OK!');
|
||||
|
||||
return queryVariables;
|
||||
return variables;
|
||||
};
|
||||
|
||||
const assertSelects = (queryVariables: QueryVariableData[]) => {
|
||||
// Values in submenus should be
|
||||
// query1: [A] query2: [AA] query3: [AAA]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('A').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('B').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [All] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 2);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBB').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 0);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB] query3: [BBB]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BB').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BB + BC').should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB + BC] query3: [BBB]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BBB').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BBC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BCC').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BBB + BCC').should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BB + BC] query3: [BBB + BCC]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BB + BC').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BA').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BB').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('BC').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('BA').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [B] query2: [BA] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('B').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('A').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('B').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('C').should('be.visible');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('A').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('B').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 2);
|
||||
// Values in submenus should be
|
||||
// query1: [A] query2: [All] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('AA').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('AA').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 1);
|
||||
// Values in submenus should be
|
||||
// query1: [A] query2: [AA] query3: [All]
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All')
|
||||
.eq(0)
|
||||
.click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('AAA').click();
|
||||
e2e.pages.Dashboard.Toolbar.navBar().click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('A').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('AA').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('AAA').should('have.length', 1);
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownValueLinkTexts('All').should('have.length', 0);
|
||||
};
|
||||
|
||||
const assertMoveUpItem = (data: QueryVariableData[]) => {
|
||||
logSection('Asserting variable move up');
|
||||
const queryVariables = [...data];
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
e2e.pages.Dashboard.Settings.General.sectionItems('Variables').click();
|
||||
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowArrowUpButtons(queryVariables[1].name).click();
|
||||
const temp = { ...queryVariables[0] };
|
||||
queryVariables[0] = { ...queryVariables[1] };
|
||||
queryVariables[1] = temp;
|
||||
const assertMoveUpItem = (data: VariablesData[]) => {
|
||||
const variables = [...data];
|
||||
e2e.pages.Dashboard.Settings.Variables.List.tableRowArrowUpButtons(variables[1].name).click();
|
||||
const temp = { ...variables[0] };
|
||||
variables[0] = { ...variables[1] };
|
||||
variables[1] = temp;
|
||||
e2e.pages.Dashboard.Settings.Variables.List.table().within(() => {
|
||||
e2e()
|
||||
.get('tbody > tr')
|
||||
@@ -586,11 +683,11 @@ const assertMoveUpItem = (data: QueryVariableData[]) => {
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(0)
|
||||
.contains(queryVariables[0].name);
|
||||
.contains(variables[0].name);
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(1)
|
||||
.contains(queryVariables[0].query);
|
||||
.contains(variables[0].query);
|
||||
});
|
||||
e2e()
|
||||
.get('tbody > tr')
|
||||
@@ -599,19 +696,19 @@ const assertMoveUpItem = (data: QueryVariableData[]) => {
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(0)
|
||||
.contains(queryVariables[1].name);
|
||||
.contains(variables[1].name);
|
||||
e2e()
|
||||
.get('td')
|
||||
.eq(1)
|
||||
.contains(queryVariables[1].query);
|
||||
.contains(variables[1].query);
|
||||
});
|
||||
});
|
||||
|
||||
e2e.components.BackButton.backArrow().click();
|
||||
e2e.components.BackButton.backArrow()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
assertVariableLabelsAndComponents(queryVariables);
|
||||
assertVariableLabelsAndComponents(variables);
|
||||
|
||||
logSection('Asserting variable move up, OK!');
|
||||
|
||||
return queryVariables;
|
||||
return variables;
|
||||
};
|
||||
|
||||
40
e2e/suite1/specs/select-focus.spec.ts
Normal file
40
e2e/suite1/specs/select-focus.spec.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { e2e } from '@grafana/e2e';
|
||||
|
||||
e2e.scenario({
|
||||
describeName: 'Select focus/unfocus tests',
|
||||
itName: 'Tests select focus/unfocus scenarios',
|
||||
addScenarioDataSource: false,
|
||||
addScenarioDashBoard: false,
|
||||
skipScenario: false,
|
||||
scenario: () => {
|
||||
e2e.flows.openDashboard('5SdHCadmz');
|
||||
e2e.pages.Dashboard.Toolbar.toolbarItems('Dashboard settings').click();
|
||||
|
||||
e2e.components.FolderPicker.container()
|
||||
.should('be.visible')
|
||||
.within(() => {
|
||||
e2e.components.Select.input()
|
||||
.should('be.visible')
|
||||
.click();
|
||||
|
||||
e2e.components.Select.option()
|
||||
.should('be.visible')
|
||||
.first()
|
||||
.click();
|
||||
|
||||
e2e.components.Select.input()
|
||||
.should('be.visible')
|
||||
.should('have.focus');
|
||||
});
|
||||
|
||||
e2e.pages.Dashboard.Settings.General.title().click();
|
||||
|
||||
e2e.components.FolderPicker.container()
|
||||
.should('be.visible')
|
||||
.within(() => {
|
||||
e2e.components.Select.input()
|
||||
.should('be.visible')
|
||||
.should('not.have.focus');
|
||||
});
|
||||
},
|
||||
});
|
||||
@@ -4,4 +4,5 @@ RUNDIR=e2e/tmp
|
||||
PIDFILE=$RUNDIR/pid
|
||||
PACKAGE_FILE=dist/grafana-*linux-amd64.tar.gz
|
||||
PROV_DIR=$RUNDIR/conf/provisioning
|
||||
PORT=3001
|
||||
DEFAULT_HOST=localhost
|
||||
DEFAULT_PORT=3001
|
||||
|
||||
11
e2e/wait-for-grafana
Executable file
11
e2e/wait-for-grafana
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
. e2e/variables
|
||||
|
||||
HOST=${HOST:-$DEFAULT_HOST}
|
||||
PORT=${PORT:-$DEFAULT_PORT}
|
||||
|
||||
echo -e "Waiting for grafana-server to finish starting, host=$HOST, port=$PORT"
|
||||
|
||||
timeout 60 bash -c 'until nc -z $0 $1; do sleep 1; done' $HOST $PORT
|
||||
2
go.mod
2
go.mod
@@ -30,7 +30,7 @@ require (
|
||||
github.com/gorilla/websocket v1.4.1
|
||||
github.com/gosimple/slug v1.4.2
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.57.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.60.0
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
|
||||
github.com/hashicorp/go-plugin v1.2.2
|
||||
github.com/hashicorp/go-version v1.1.0
|
||||
|
||||
6
go.sum
6
go.sum
@@ -135,8 +135,8 @@ github.com/gosimple/slug v1.4.2 h1:jDmprx3q/9Lfk4FkGZtvzDQ9Cj9eAmsjzeQGp24PeiQ=
|
||||
github.com/gosimple/slug v1.4.2/go.mod h1:ER78kgg1Mv0NQGlXiDe57DpCyfbNywXXZ9mIorhxAf0=
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4 h1:SPdxCL9BChFTlyi0Khv64vdCW4TMna8+sxL7+Chx+Ag=
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4/go.mod h1:nc0XxBzjeGcrMltCDw269LoWF9S8ibhgxolCdA1R8To=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.57.0 h1:eJnAjUh50uGimDCyK9gA2tzFzMntL6j3vWiSfTNj10g=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.57.0/go.mod h1:cJqtJv4uuWOB3SAUToTMa8yQoNxEtF07CabXHwsr5fc=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.60.0 h1:9IhNRdoaRi2RQflmNQHv2VuqXRGigpLacoYHHXZ+XBU=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.60.0/go.mod h1:V3mq3GjPrIOc4K9jOm3mP5scJQgx9NJaYQ3cUlxm8vE=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=
|
||||
@@ -212,6 +212,8 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0j
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77 h1:7GoSOOW2jpsfkntVKaS2rAr1TJqfcxotyaUcuxoZSzg=
|
||||
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE=
|
||||
github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
|
||||
@@ -14,4 +14,7 @@ module.exports = {
|
||||
setupFiles: ['jest-canvas-mock', './public/test/jest-shim.ts', './public/test/jest-setup.ts'],
|
||||
snapshotSerializers: ['enzyme-to-json/serializer'],
|
||||
globals: { 'ts-jest': { isolatedModules: true } },
|
||||
moduleNameMapper: {
|
||||
'\\.svg': '<rootDir>/public/test/mocks/svg.ts',
|
||||
},
|
||||
};
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "7.0.0-pre.0"
|
||||
"version": "7.0.0-beta.2"
|
||||
}
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "7.0.0-pre",
|
||||
"version": "7.0.0-beta2",
|
||||
"repository": "github:grafana/grafana",
|
||||
"scripts": {
|
||||
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
|
||||
"build": "grunt build",
|
||||
"dev": "webpack --progress --colors --config scripts/webpack/webpack.dev.js",
|
||||
"e2e": "./e2e/start-and-run-suite",
|
||||
"e3e:debug": "./e2e/start-and-run-suite debug",
|
||||
"e2e:debug": "./e2e/start-and-run-suite debug",
|
||||
"e2e:dev": "./e2e/start-and-run-suite dev",
|
||||
"jest": "jest --notify --watch",
|
||||
"jest-ci": "mkdir -p reports/junit && export JEST_JUNIT_OUTPUT_DIR=reports/junit && jest --ci --reporters=default --reporters=jest-junit --maxWorkers 2",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "7.0.0-pre.0",
|
||||
"version": "7.0.0-beta.2",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ArrayDataFrame } from './ArrayDataFrame';
|
||||
import { toDataFrameDTO } from './processDataFrame';
|
||||
import { FieldType } from '../types';
|
||||
import { FieldType, DataFrame } from '../types';
|
||||
|
||||
describe('Array DataFrame', () => {
|
||||
const input = [
|
||||
@@ -92,4 +92,14 @@ describe('Array DataFrame', () => {
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('Survives ES6 operations', () => {
|
||||
const copy: DataFrame = {
|
||||
...frame,
|
||||
name: 'hello',
|
||||
};
|
||||
expect(copy.fields).toEqual(frame.fields);
|
||||
expect(copy.length).toEqual(frame.length);
|
||||
expect(copy.length).toEqual(input.length);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -40,14 +40,16 @@ export class ArrayDataFrame<T = any> extends FunctionalVector<T> implements Data
|
||||
refId?: string;
|
||||
meta?: QueryResultMeta;
|
||||
|
||||
private theFields: Field[] = [];
|
||||
fields: Field[] = [];
|
||||
length = 0;
|
||||
|
||||
constructor(private source: T[], names?: string[]) {
|
||||
super();
|
||||
|
||||
this.length = source.length;
|
||||
const first: any = source.length ? source[0] : {};
|
||||
if (names) {
|
||||
this.theFields = names.map(name => {
|
||||
this.fields = names.map(name => {
|
||||
return {
|
||||
name,
|
||||
type: guessFieldTypeFromNameAndValue(name, first[name]),
|
||||
@@ -64,7 +66,7 @@ export class ArrayDataFrame<T = any> extends FunctionalVector<T> implements Data
|
||||
* Add a field for each property in the object. This will guess the type
|
||||
*/
|
||||
setFieldsFromObject(obj: any) {
|
||||
this.theFields = Object.keys(obj).map(name => {
|
||||
this.fields = Object.keys(obj).map(name => {
|
||||
return {
|
||||
name,
|
||||
type: guessFieldTypeFromNameAndValue(name, obj[name]),
|
||||
@@ -94,15 +96,6 @@ export class ArrayDataFrame<T = any> extends FunctionalVector<T> implements Data
|
||||
return field;
|
||||
}
|
||||
|
||||
get fields(): Field[] {
|
||||
return this.theFields;
|
||||
}
|
||||
|
||||
// Defined for Vector interface
|
||||
get length() {
|
||||
return this.source.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an object with a property for each field in the DataFrame
|
||||
*/
|
||||
|
||||
@@ -1,51 +1,11 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { resultsToDataFrames, grafanaDataFrameToArrowTable, arrowTableToDataFrame } from './ArrowDataFrame';
|
||||
import { grafanaDataFrameToArrowTable, arrowTableToDataFrame } from './ArrowDataFrame';
|
||||
import { toDataFrameDTO, toDataFrame } from './processDataFrame';
|
||||
import { FieldType } from '../types';
|
||||
import { Table } from 'apache-arrow';
|
||||
|
||||
/* eslint-disable */
|
||||
const resp = {
|
||||
results: {
|
||||
'': {
|
||||
refId: '',
|
||||
dataframes: [
|
||||
'QVJST1cxAACsAQAAEAAAAAAACgAOAAwACwAEAAoAAAAUAAAAAAAAAQMACgAMAAAACAAEAAoAAAAIAAAAUAAAAAIAAAAoAAAABAAAAOD+//8IAAAADAAAAAIAAABHQwAABQAAAHJlZklkAAAAAP///wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAAACAAAAlAAAAAQAAACG////FAAAAGAAAABgAAAAAAADAWAAAAACAAAALAAAAAQAAABQ////CAAAABAAAAAGAAAAbnVtYmVyAAAEAAAAdHlwZQAAAAB0////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAAAAABm////AAACAAAAAAAAABIAGAAUABMAEgAMAAAACAAEABIAAAAUAAAAbAAAAHQAAAAAAAoBdAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAAB0aW1lAAAAAAQAAAB0eXBlAAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAC8AAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAA0AAAAAAAAAAUAAAAAAAAAwMACgAYAAwACAAEAAoAAAAUAAAAWAAAAA0AAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAAAAAAAAGgAAAAAAAAAAAAAAAAAAABoAAAAAAAAAGgAAAAAAAAAAAAAAAIAAAANAAAAAAAAAAAAAAAAAAAADQAAAAAAAAAAAAAAAAAAAAAAAAAAFp00e2XHFQAIo158ZccVAPqoiH1lxxUA7K6yfmXHFQDetNx/ZccVANC6BoFlxxUAwsAwgmXHFQC0xlqDZccVAKbMhIRlxxUAmNKuhWXHFQCK2NiGZccVAHzeAohlxxUAbuQsiWXHFQAAAAAAAAhAAAAAAAAACEAAAAAAAAAIQAAAAAAAABRAAAAAAAAAFEAAAAAAAAAUQAAAAAAAAAhAAAAAAAAACEAAAAAAAAAIQAAAAAAAABRAAAAAAAAAFEAAAAAAAAAUQAAAAAAAAAhAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAMAAQAAALgBAAAAAAAAwAAAAAAAAADQAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAABQAAAAAgAAACgAAAAEAAAA4P7//wgAAAAMAAAAAgAAAEdDAAAFAAAAcmVmSWQAAAAA////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAIAAACUAAAABAAAAIb///8UAAAAYAAAAGAAAAAAAAMBYAAAAAIAAAAsAAAABAAAAFD///8IAAAAEAAAAAYAAABudW1iZXIAAAQAAAB0eXBlAAAAAHT///8IAAAADAAAAAAAAAAAAAAABAAAAG5hbWUAAAAAAAAAAGb///8AAAIAAAAAAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABsAAAAdAAAAAAACgF0AAAAAgAAADQAAAAEAAAA3P///wgAAAAQAAAABAAAAHRpbWUAAAAABAAAAHR5cGUAAAAACAAMAAgABAAIAAAACAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAAAAAAAAAGAAgABgAGAAAAAAADAAQAAABUaW1lAAAAANgBAABBUlJPVzE=',
|
||||
'QVJST1cxAAC8AQAAEAAAAAAACgAOAAwACwAEAAoAAAAUAAAAAAAAAQMACgAMAAAACAAEAAoAAAAIAAAAUAAAAAIAAAAoAAAABAAAAND+//8IAAAADAAAAAIAAABHQgAABQAAAHJlZklkAAAA8P7//wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAAACAAAApAAAAAQAAAB2////FAAAAGgAAABoAAAAAAADAWgAAAACAAAALAAAAAQAAABA////CAAAABAAAAAGAAAAbnVtYmVyAAAEAAAAdHlwZQAAAABk////CAAAABQAAAAJAAAAR0Itc2VyaWVzAAAABAAAAG5hbWUAAAAAAAAAAF7///8AAAIACQAAAEdCLXNlcmllcwASABgAFAATABIADAAAAAgABAASAAAAFAAAAGwAAAB0AAAAAAAKAXQAAAACAAAANAAAAAQAAADc////CAAAABAAAAAEAAAAdGltZQAAAAAEAAAAdHlwZQAAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAAvAAAABQAAAAAAAAADAAWABQAEwAMAAQADAAAANAAAAAAAAAAFAAAAAAAAAMDAAoAGAAMAAgABAAKAAAAFAAAAFgAAAANAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAAAAAAAABoAAAAAAAAAAAAAAAAAAAAaAAAAAAAAABoAAAAAAAAAAAAAAACAAAADQAAAAAAAAAAAAAAAAAAAA0AAAAAAAAAAAAAAAAAAAAAAAAAABadNHtlxxUACKNefGXHFQD6qIh9ZccVAOyusn5lxxUA3rTcf2XHFQDQugaBZccVAMLAMIJlxxUAtMZag2XHFQCmzISEZccVAJjSroVlxxUAitjYhmXHFQB83gKIZccVAG7kLIllxxUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAEAAAAAAAAAAABAAAAAMABQAEgAMAAgABAAMAAAAEAAAACwAAAA4AAAAAAADAAEAAADIAQAAAAAAAMAAAAAAAAAA0AAAAAAAAAAAAAAAAAAAAAAACgAMAAAACAAEAAoAAAAIAAAAUAAAAAIAAAAoAAAABAAAAND+//8IAAAADAAAAAIAAABHQgAABQAAAHJlZklkAAAA8P7//wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAAACAAAApAAAAAQAAAB2////FAAAAGgAAABoAAAAAAADAWgAAAACAAAALAAAAAQAAABA////CAAAABAAAAAGAAAAbnVtYmVyAAAEAAAAdHlwZQAAAABk////CAAAABQAAAAJAAAAR0Itc2VyaWVzAAAABAAAAG5hbWUAAAAAAAAAAF7///8AAAIACQAAAEdCLXNlcmllcwASABgAFAATABIADAAAAAgABAASAAAAFAAAAGwAAAB0AAAAAAAKAXQAAAACAAAANAAAAAQAAADc////CAAAABAAAAAEAAAAdGltZQAAAAAEAAAAdHlwZQAAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAAAAAAAAAYACAAGAAYAAAAAAAMABAAAAFRpbWUAAAAA6AEAAEFSUk9XMQ==',
|
||||
],
|
||||
series: [] as any[],
|
||||
tables: null as any,
|
||||
frames: null as any,
|
||||
},
|
||||
},
|
||||
};
|
||||
/* eslint-enable */
|
||||
|
||||
describe('GEL Utils', () => {
|
||||
test('should parse output with dataframe', () => {
|
||||
const frames = resultsToDataFrames(resp);
|
||||
for (const frame of frames) {
|
||||
console.log('Frame', frame.refId);
|
||||
for (const field of frame.fields) {
|
||||
console.log(' > ', field.name, field.labels);
|
||||
console.log(' (values)= ', field.values.toArray());
|
||||
}
|
||||
}
|
||||
|
||||
const norm = frames.map(f => toDataFrameDTO(f));
|
||||
expect(norm).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('processEmptyResults', () => {
|
||||
const frames = resultsToDataFrames({
|
||||
results: { '': { refId: '', meta: null, series: null, tables: null, dataframes: null } },
|
||||
});
|
||||
expect(frames.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Read/Write arrow Table to DataFrame', () => {
|
||||
test('should parse output with dataframe', () => {
|
||||
const frame = toDataFrame({
|
||||
|
||||
@@ -161,20 +161,3 @@ export function grafanaDataFrameToArrowTable(data: DataFrame): Table {
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
export function resultsToDataFrames(rsp: any): DataFrame[] {
|
||||
if (rsp === undefined || rsp.results === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const results = rsp.results as Array<{ dataframes: string[] }>;
|
||||
const frames: DataFrame[] = Object.values(results).flatMap(res => {
|
||||
if (!res.dataframes) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return res.dataframes.map((b: string) => arrowTableToDataFrame(base64StringToArrowTable(b)));
|
||||
});
|
||||
|
||||
return frames;
|
||||
}
|
||||
|
||||
@@ -1,108 +1,5 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`GEL Utils should parse output with dataframe 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"fields": Array [
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "Time",
|
||||
"type": "time",
|
||||
"values": Array [
|
||||
1569334575000,
|
||||
1569334580000,
|
||||
1569334585000,
|
||||
1569334590000,
|
||||
1569334595000,
|
||||
1569334600000,
|
||||
1569334605000,
|
||||
1569334610000,
|
||||
1569334615000,
|
||||
1569334620000,
|
||||
1569334625000,
|
||||
1569334630000,
|
||||
1569334635000,
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "",
|
||||
"type": "number",
|
||||
"values": Array [
|
||||
3,
|
||||
3,
|
||||
3,
|
||||
5,
|
||||
5,
|
||||
5,
|
||||
3,
|
||||
3,
|
||||
3,
|
||||
5,
|
||||
5,
|
||||
5,
|
||||
3,
|
||||
],
|
||||
},
|
||||
],
|
||||
"meta": undefined,
|
||||
"name": undefined,
|
||||
"refId": "GC",
|
||||
},
|
||||
Object {
|
||||
"fields": Array [
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "Time",
|
||||
"type": "time",
|
||||
"values": Array [
|
||||
1569334575000,
|
||||
1569334580000,
|
||||
1569334585000,
|
||||
1569334590000,
|
||||
1569334595000,
|
||||
1569334600000,
|
||||
1569334605000,
|
||||
1569334610000,
|
||||
1569334615000,
|
||||
1569334620000,
|
||||
1569334625000,
|
||||
1569334630000,
|
||||
1569334635000,
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "GB-series",
|
||||
"type": "number",
|
||||
"values": Array [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2,
|
||||
2,
|
||||
2,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2,
|
||||
2,
|
||||
2,
|
||||
0,
|
||||
],
|
||||
},
|
||||
],
|
||||
"meta": undefined,
|
||||
"name": undefined,
|
||||
"refId": "GB",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`Read/Write arrow Table to DataFrame should read all types 1`] = `
|
||||
Object {
|
||||
"fields": Array [
|
||||
|
||||
@@ -22,7 +22,8 @@ describe('toDataFrame', () => {
|
||||
],
|
||||
};
|
||||
let series = toDataFrame(input1);
|
||||
expect(series.fields[1].name).toBe(input1.target);
|
||||
expect(series.name).toBe(input1.target);
|
||||
expect(series.fields[1].name).toBe('Value');
|
||||
|
||||
const v0 = series.fields[0].values;
|
||||
const v1 = series.fields[1].values;
|
||||
@@ -182,6 +183,24 @@ describe('SerisData backwards compatibility', () => {
|
||||
expect(roundtrip.target).toBe(timeseries.target);
|
||||
});
|
||||
|
||||
it('can convert TimeSeries to series and back again with tags should render name with tags', () => {
|
||||
const timeseries = {
|
||||
target: 'Series A',
|
||||
tags: { server: 'ServerA', job: 'app' },
|
||||
datapoints: [
|
||||
[100, 1],
|
||||
[200, 2],
|
||||
],
|
||||
};
|
||||
const series = toDataFrame(timeseries);
|
||||
expect(isDataFrame(timeseries)).toBeFalsy();
|
||||
expect(isDataFrame(series)).toBeTruthy();
|
||||
|
||||
const roundtrip = toLegacyResponseData(series) as TimeSeries;
|
||||
expect(isDataFrame(roundtrip)).toBeFalsy();
|
||||
expect(roundtrip.target).toBe('{job="app", server="ServerA"}');
|
||||
});
|
||||
|
||||
it('can convert empty table to DataFrame then back to legacy', () => {
|
||||
const table = {
|
||||
columns: [],
|
||||
|
||||
@@ -14,11 +14,14 @@ import {
|
||||
TimeSeriesValue,
|
||||
FieldDTO,
|
||||
DataFrameDTO,
|
||||
TIME_SERIES_FIELD_NAME,
|
||||
} from '../types/index';
|
||||
import { isDateTime } from '../datetime/moment_wrapper';
|
||||
import { ArrayVector } from '../vector/ArrayVector';
|
||||
import { MutableDataFrame } from './MutableDataFrame';
|
||||
import { SortedVector } from '../vector/SortedVector';
|
||||
import { ArrayDataFrame } from './ArrayDataFrame';
|
||||
import { getFieldTitle } from '../field/fieldState';
|
||||
|
||||
function convertTableToDataFrame(table: TableData): DataFrame {
|
||||
const fields = table.columns.map(c => {
|
||||
@@ -60,6 +63,7 @@ function convertTableToDataFrame(table: TableData): DataFrame {
|
||||
function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||
const times: number[] = [];
|
||||
const values: TimeSeriesValue[] = [];
|
||||
|
||||
for (const point of timeSeries.datapoints) {
|
||||
values.push(point[0]);
|
||||
times.push(point[1] as number);
|
||||
@@ -73,7 +77,7 @@ function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||
values: new ArrayVector<number>(times),
|
||||
},
|
||||
{
|
||||
name: timeSeries.target || 'Value',
|
||||
name: TIME_SERIES_FIELD_NAME,
|
||||
type: FieldType.number,
|
||||
config: {
|
||||
unit: timeSeries.unit,
|
||||
@@ -83,6 +87,10 @@ function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||
},
|
||||
];
|
||||
|
||||
if (timeSeries.title) {
|
||||
(fields[1].config as FieldConfig).title = timeSeries.title;
|
||||
}
|
||||
|
||||
return {
|
||||
name: timeSeries.target,
|
||||
refId: timeSeries.refId,
|
||||
@@ -110,7 +118,7 @@ function convertGraphSeriesToDataFrame(graphSeries: GraphSeriesXY): DataFrame {
|
||||
name: graphSeries.label,
|
||||
fields: [
|
||||
{
|
||||
name: graphSeries.label || 'Value',
|
||||
name: graphSeries.label || TIME_SERIES_FIELD_NAME,
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: x,
|
||||
@@ -262,7 +270,10 @@ export const isTableData = (data: any): data is DataFrame => data && data.hasOwn
|
||||
|
||||
export const isDataFrame = (data: any): data is DataFrame => data && data.hasOwnProperty('fields');
|
||||
|
||||
export const toDataFrame = (data: any): DataFrame => {
|
||||
/**
|
||||
* Inspect any object and return the results as a DataFrame
|
||||
*/
|
||||
export function toDataFrame(data: any): DataFrame {
|
||||
if ('fields' in data) {
|
||||
// DataFrameDTO does not have length
|
||||
if ('length' in data) {
|
||||
@@ -290,9 +301,13 @@ export const toDataFrame = (data: any): DataFrame => {
|
||||
return convertTableToDataFrame(data);
|
||||
}
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
return new ArrayDataFrame(data);
|
||||
}
|
||||
|
||||
console.warn('Can not convert', data);
|
||||
throw new Error('Unsupported data format');
|
||||
};
|
||||
}
|
||||
|
||||
export const toLegacyResponseData = (frame: DataFrame): TimeSeries | TableData => {
|
||||
const { fields } = frame;
|
||||
@@ -304,18 +319,20 @@ export const toLegacyResponseData = (frame: DataFrame): TimeSeries | TableData =
|
||||
const { timeField, timeIndex } = getTimeField(frame);
|
||||
if (timeField) {
|
||||
const valueIndex = timeIndex === 0 ? 1 : 0;
|
||||
const valueField = fields[valueIndex];
|
||||
const timeField = fields[timeIndex!];
|
||||
|
||||
// Make sure it is [value,time]
|
||||
for (let i = 0; i < rowCount; i++) {
|
||||
rows.push([
|
||||
fields[valueIndex].values.get(i), // value
|
||||
fields[timeIndex!].values.get(i), // time
|
||||
valueField.values.get(i), // value
|
||||
timeField.values.get(i), // time
|
||||
]);
|
||||
}
|
||||
|
||||
return {
|
||||
alias: fields[valueIndex].name || frame.name,
|
||||
target: fields[valueIndex].name || frame.name,
|
||||
alias: frame.name,
|
||||
target: getFieldTitle(valueField, frame),
|
||||
datapoints: rows,
|
||||
unit: fields[0].config ? fields[0].config.unit : undefined,
|
||||
refId: frame.refId,
|
||||
@@ -424,18 +441,6 @@ export function reverseDataFrame(data: DataFrame): DataFrame {
|
||||
};
|
||||
}
|
||||
|
||||
export const getTimeField = (series: DataFrame): { timeField?: Field; timeIndex?: number } => {
|
||||
for (let i = 0; i < series.fields.length; i++) {
|
||||
if (series.fields[i].type === FieldType.time) {
|
||||
return {
|
||||
timeField: series.fields[i],
|
||||
timeIndex: i,
|
||||
};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
};
|
||||
|
||||
/**
|
||||
* Wrapper to get an array from each field value
|
||||
*/
|
||||
@@ -479,3 +484,15 @@ export function toDataFrameDTO(data: DataFrame): DataFrameDTO {
|
||||
name: data.name,
|
||||
};
|
||||
}
|
||||
|
||||
export const getTimeField = (series: DataFrame): { timeField?: Field; timeIndex?: number } => {
|
||||
for (let i = 0; i < series.fields.length; i++) {
|
||||
if (series.fields[i].type === FieldType.time) {
|
||||
return {
|
||||
timeField: series.fields[i],
|
||||
timeIndex: i,
|
||||
};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
};
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
DataFrame,
|
||||
DisplayValue,
|
||||
DisplayValueAlignmentFactors,
|
||||
Field,
|
||||
FieldConfig,
|
||||
FieldConfigSource,
|
||||
FieldType,
|
||||
@@ -36,35 +37,17 @@ export interface ReduceDataOptions {
|
||||
// TODO: use built in variables, same as for data links?
|
||||
export const VAR_SERIES_NAME = '__series.name';
|
||||
export const VAR_FIELD_NAME = '__field.name';
|
||||
export const VAR_FIELD_LABELS = '__field.labels';
|
||||
export const VAR_CALC = '__calc';
|
||||
export const VAR_CELL_PREFIX = '__cell_'; // consistent with existing table templates
|
||||
|
||||
function getTitleTemplate(title: string | undefined, stats: string[], data?: DataFrame[]): string {
|
||||
// If the title exists, use it as a template variable
|
||||
if (title) {
|
||||
return title;
|
||||
}
|
||||
if (!data || !data.length) {
|
||||
return 'No Data';
|
||||
}
|
||||
|
||||
let fieldCount = 0;
|
||||
for (const field of data[0].fields) {
|
||||
if (field.type === FieldType.number) {
|
||||
fieldCount++;
|
||||
}
|
||||
}
|
||||
|
||||
function getTitleTemplate(stats: string[]): string {
|
||||
const parts: string[] = [];
|
||||
if (stats.length > 1) {
|
||||
parts.push('${' + VAR_CALC + '}');
|
||||
}
|
||||
if (data.length > 1) {
|
||||
parts.push('${' + VAR_SERIES_NAME + '}');
|
||||
}
|
||||
if (fieldCount > 1 || !parts.length) {
|
||||
parts.push('${' + VAR_FIELD_NAME + '}');
|
||||
}
|
||||
|
||||
parts.push('${' + VAR_FIELD_NAME + '}');
|
||||
|
||||
return parts.join(' ');
|
||||
}
|
||||
@@ -80,6 +63,7 @@ export interface FieldDisplay {
|
||||
colIndex?: number; // The field column index
|
||||
rowIndex?: number; // only filled in when the value is from a row (ie, not a reduction)
|
||||
getLinks?: () => LinkModel[];
|
||||
hasLinks: boolean;
|
||||
}
|
||||
|
||||
export interface GetFieldDisplayValuesOptions {
|
||||
@@ -106,8 +90,8 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
const data = options.data;
|
||||
let hitLimit = false;
|
||||
const limit = reduceOptions.limit ? reduceOptions.limit : DEFAULT_FIELD_DISPLAY_VALUES_LIMIT;
|
||||
const defaultTitle = getTitleTemplate(fieldConfig.defaults.title, calcs, data);
|
||||
const scopedVars: ScopedVars = {};
|
||||
const defaultTitle = getTitleTemplate(calcs);
|
||||
|
||||
for (let s = 0; s < data.length && !hitLimit; s++) {
|
||||
const series = data[s]; // Name is already set
|
||||
@@ -118,11 +102,14 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
for (let i = 0; i < series.fields.length && !hitLimit; i++) {
|
||||
const field = series.fields[i];
|
||||
const fieldLinksSupplier = field.getLinks;
|
||||
// Show all number fields
|
||||
|
||||
// To filter out time field, need an option for this
|
||||
if (field.type !== FieldType.number) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const config = field.config; // already set by the prepare task
|
||||
const title = field.config.title ?? defaultTitle;
|
||||
|
||||
const display =
|
||||
field.display ??
|
||||
@@ -132,7 +119,6 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
timeZone,
|
||||
});
|
||||
|
||||
const title = config.title ? config.title : defaultTitle;
|
||||
// Show all rows
|
||||
if (reduceOptions.values) {
|
||||
const usesCellValues = title.indexOf(VAR_CELL_PREFIX) >= 0;
|
||||
@@ -149,9 +135,10 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const displayValue = display(field.values.get(j));
|
||||
displayValue.title = replaceVariables(title, {
|
||||
...field.config.scopedVars, // series and field scoped vars
|
||||
...field.state?.scopedVars, // series and field scoped vars
|
||||
...scopedVars,
|
||||
});
|
||||
|
||||
@@ -168,6 +155,7 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
valueRowIndex: j,
|
||||
})
|
||||
: () => [],
|
||||
hasLinks: hasLinks(field),
|
||||
});
|
||||
|
||||
if (values.length >= limit) {
|
||||
@@ -194,9 +182,10 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
scopedVars[VAR_CALC] = { value: calc, text: calc };
|
||||
const displayValue = display(results[calc]);
|
||||
displayValue.title = replaceVariables(title, {
|
||||
...field.config.scopedVars, // series and field scoped vars
|
||||
...field.state?.scopedVars, // series and field scoped vars
|
||||
...scopedVars,
|
||||
});
|
||||
|
||||
values.push({
|
||||
name: calc,
|
||||
field: config,
|
||||
@@ -210,6 +199,7 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
calculatedValue: displayValue,
|
||||
})
|
||||
: () => [],
|
||||
hasLinks: hasLinks(field),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -227,6 +217,10 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
return values;
|
||||
};
|
||||
|
||||
export function hasLinks(field: Field): boolean {
|
||||
return field.config?.links?.length ? field.config.links.length > 0 : false;
|
||||
}
|
||||
|
||||
export function getDisplayValueAlignmentFactors(values: FieldDisplay[]): DisplayValueAlignmentFactors {
|
||||
const info: DisplayValueAlignmentFactors = {
|
||||
title: '',
|
||||
@@ -287,6 +281,7 @@ function createNoValuesFieldDisplay(options: GetFieldDisplayValuesOptions): Fiel
|
||||
numeric: 0,
|
||||
color: display.color,
|
||||
},
|
||||
hasLinks: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ import { Registry } from '../utils';
|
||||
import { mockStandardProperties } from '../utils/tests/mockStandardProperties';
|
||||
import { FieldMatcherID } from '../transformations';
|
||||
import { FieldConfigOptionsRegistry } from './FieldConfigOptionsRegistry';
|
||||
import { getFieldTitle } from './fieldState';
|
||||
|
||||
const property1 = {
|
||||
id: 'custom.property1', // Match field properties
|
||||
@@ -111,12 +112,14 @@ describe('applyFieldOverrides', () => {
|
||||
fieldConfigRegistry: new FieldConfigOptionsRegistry(),
|
||||
});
|
||||
|
||||
expect(withOverrides[0].fields[0].config.scopedVars).toMatchInlineSnapshot(`
|
||||
expect(withOverrides[0].fields[0].state!.scopedVars).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"__field": Object {
|
||||
"text": "Field",
|
||||
"value": Object {
|
||||
"name": "message",
|
||||
"label": undefined,
|
||||
"labels": "",
|
||||
"name": "A message",
|
||||
},
|
||||
},
|
||||
"__series": Object {
|
||||
@@ -128,12 +131,14 @@ describe('applyFieldOverrides', () => {
|
||||
}
|
||||
`);
|
||||
|
||||
expect(withOverrides[1].fields[0].config.scopedVars).toMatchInlineSnapshot(`
|
||||
expect(withOverrides[1].fields[0].state!.scopedVars).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"__field": Object {
|
||||
"text": "Field",
|
||||
"value": Object {
|
||||
"name": "info",
|
||||
"label": undefined,
|
||||
"labels": "",
|
||||
"name": "B info",
|
||||
},
|
||||
},
|
||||
"__series": Object {
|
||||
@@ -152,16 +157,19 @@ describe('applyFieldOverrides', () => {
|
||||
min: 0,
|
||||
max: 100,
|
||||
};
|
||||
|
||||
const f1 = {
|
||||
unit: 'ms',
|
||||
dateFormat: '', // should be ignored
|
||||
max: parseFloat('NOPE'), // should be ignored
|
||||
min: null, // should alo be ignored!
|
||||
title: 'newTitle',
|
||||
};
|
||||
|
||||
const f: DataFrame = toDataFrame({
|
||||
fields: [{ type: FieldType.number, name: 'x', config: field, values: [] }],
|
||||
});
|
||||
|
||||
const processed = applyFieldOverrides({
|
||||
data: [f],
|
||||
fieldConfig: {
|
||||
@@ -172,11 +180,13 @@ describe('applyFieldOverrides', () => {
|
||||
replaceVariables: v => v,
|
||||
theme: {} as GrafanaTheme,
|
||||
})[0];
|
||||
const out = processed.fields[0].config;
|
||||
|
||||
expect(out.min).toEqual(0);
|
||||
expect(out.max).toEqual(100);
|
||||
expect(out.unit).toEqual('ms');
|
||||
const outField = processed.fields[0];
|
||||
|
||||
expect(outField.config.min).toEqual(0);
|
||||
expect(outField.config.max).toEqual(100);
|
||||
expect(outField.config.unit).toEqual('ms');
|
||||
expect(getFieldTitle(outField, f)).toEqual('newTitle');
|
||||
});
|
||||
|
||||
it('will apply field overrides', () => {
|
||||
@@ -300,10 +310,8 @@ describe('setDynamicConfigValue', () => {
|
||||
it('applies dynamic config values', () => {
|
||||
const config = {
|
||||
title: 'test',
|
||||
// custom: {
|
||||
// property1: 1,
|
||||
// },
|
||||
};
|
||||
|
||||
setDynamicConfigValue(
|
||||
config,
|
||||
{
|
||||
|
||||
@@ -24,12 +24,15 @@ import set from 'lodash/set';
|
||||
import unset from 'lodash/unset';
|
||||
import get from 'lodash/get';
|
||||
import { getDisplayProcessor } from './displayProcessor';
|
||||
import { getTimeField, guessFieldTypeForField } from '../dataframe';
|
||||
import { guessFieldTypeForField } from '../dataframe';
|
||||
import { standardFieldConfigEditorRegistry } from './standardFieldConfigEditorRegistry';
|
||||
import { FieldConfigOptionsRegistry } from './FieldConfigOptionsRegistry';
|
||||
import { DataLinkBuiltInVars, locationUtil } from '../utils';
|
||||
import { formattedValueToString } from '../valueFormats';
|
||||
import { getFieldDisplayValuesProxy } from './getFieldDisplayValuesProxy';
|
||||
import { formatLabels } from '../utils/labels';
|
||||
import { getFrameDisplayTitle, getFieldTitle } from './fieldState';
|
||||
import { getTimeField } from '../dataframe/processDataFrame';
|
||||
|
||||
interface OverrideProps {
|
||||
match: FieldMatcher;
|
||||
@@ -46,6 +49,7 @@ export function findNumericFieldMinMax(data: DataFrame[]): GlobalMinMax {
|
||||
let max = Number.MIN_VALUE;
|
||||
|
||||
const reducers = [ReducerID.min, ReducerID.max];
|
||||
|
||||
for (const frame of data) {
|
||||
for (const field of frame.fields) {
|
||||
if (field.type === FieldType.number) {
|
||||
@@ -95,25 +99,31 @@ export function applyFieldOverrides(options: ApplyFieldOverrideOptions): DataFra
|
||||
}
|
||||
|
||||
return options.data.map((frame, index) => {
|
||||
let name = frame.name;
|
||||
if (!name) {
|
||||
name = `Series[${index}]`;
|
||||
}
|
||||
|
||||
const scopedVars: ScopedVars = {
|
||||
__series: { text: 'Series', value: { name } },
|
||||
__series: { text: 'Series', value: { name: getFrameDisplayTitle(frame, index) } }, // might be missing
|
||||
};
|
||||
|
||||
const fields: Field[] = frame.fields.map((field, fieldIndex) => {
|
||||
const fields: Field[] = frame.fields.map(field => {
|
||||
// Config is mutable within this scope
|
||||
let fieldName = field.name;
|
||||
if (!fieldName) {
|
||||
fieldName = `Field[${fieldIndex}]`;
|
||||
}
|
||||
const fieldScopedVars = { ...scopedVars };
|
||||
fieldScopedVars['__field'] = { text: 'Field', value: { name: fieldName } };
|
||||
const title = getFieldTitle(field, frame, options.data);
|
||||
|
||||
const config: FieldConfig = { ...field.config, scopedVars: fieldScopedVars } || {};
|
||||
fieldScopedVars['__field'] = {
|
||||
text: 'Field',
|
||||
value: {
|
||||
name: title, // Generally appropriate (may include the series name if useful)
|
||||
labels: formatLabels(field.labels!),
|
||||
label: field.labels,
|
||||
},
|
||||
};
|
||||
|
||||
field.state = {
|
||||
...field.state,
|
||||
title: title,
|
||||
scopedVars: fieldScopedVars,
|
||||
};
|
||||
|
||||
const config: FieldConfig = { ...field.config };
|
||||
const context = {
|
||||
field,
|
||||
data: options.data!,
|
||||
@@ -182,6 +192,10 @@ export function applyFieldOverrides(options: ApplyFieldOverrideOptions): DataFra
|
||||
...field,
|
||||
config,
|
||||
type,
|
||||
state: {
|
||||
...field.state,
|
||||
title: null,
|
||||
},
|
||||
};
|
||||
|
||||
// and set the display processor using it
|
||||
@@ -203,7 +217,6 @@ export function applyFieldOverrides(options: ApplyFieldOverrideOptions): DataFra
|
||||
return {
|
||||
...frame,
|
||||
fields,
|
||||
name,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
136
packages/grafana-data/src/field/fieldState.test.ts
Normal file
136
packages/grafana-data/src/field/fieldState.test.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { DataFrame, TIME_SERIES_FIELD_NAME, FieldType } from '../types';
|
||||
import { getFieldTitle } from './fieldState';
|
||||
import { toDataFrame } from '../dataframe';
|
||||
|
||||
interface TitleScenario {
|
||||
frames: DataFrame[];
|
||||
frameIndex?: number; // assume 0
|
||||
fieldIndex?: number; // assume 0
|
||||
}
|
||||
|
||||
function checkScenario(scenario: TitleScenario): string {
|
||||
const frame = scenario.frames[scenario.frameIndex ?? 0];
|
||||
const field = frame.fields[scenario.fieldIndex ?? 0];
|
||||
return getFieldTitle(field, frame, scenario.frames);
|
||||
}
|
||||
|
||||
describe('Check field state calculations (title and id)', () => {
|
||||
it('should use field name if no frame name', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
fields: [{ name: 'Field 1' }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Field 1');
|
||||
});
|
||||
|
||||
it('should use only field name if only one series', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
name: 'Series A',
|
||||
fields: [{ name: 'Field 1' }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Field 1');
|
||||
});
|
||||
|
||||
it('should use frame name and field name if more than one frame', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
name: 'Series A',
|
||||
fields: [{ name: 'Field 1' }],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'Series B',
|
||||
fields: [{ name: 'Field 1' }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Series A Field 1');
|
||||
});
|
||||
|
||||
it('should only use label value if only one label', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
fields: [{ name: 'Value', labels: { server: 'Server A' } }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Server A');
|
||||
});
|
||||
|
||||
it('should use label value only if all series have same name', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
name: 'cpu',
|
||||
fields: [{ name: 'Value', labels: { server: 'Server A' } }],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'cpu',
|
||||
fields: [{ name: 'Value', labels: { server: 'Server A' } }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Server A');
|
||||
});
|
||||
|
||||
it('should use label name and value if more than one label', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
fields: [{ name: 'Value', labels: { server: 'Server A', mode: 'B' } }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('{mode="B", server="Server A"}');
|
||||
});
|
||||
|
||||
it('should use field name even when it is TIME_SERIES_FIELD_NAME if there are no labels', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
fields: [{ name: TIME_SERIES_FIELD_NAME, labels: {} }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Value');
|
||||
});
|
||||
|
||||
it('should use series name when field name is TIME_SERIES_FIELD_NAME and there are no labels ', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
name: 'Series A',
|
||||
fields: [{ name: TIME_SERIES_FIELD_NAME, labels: {} }],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(title).toEqual('Series A');
|
||||
});
|
||||
|
||||
it('should reder loki frames', () => {
|
||||
const title = checkScenario({
|
||||
frames: [
|
||||
toDataFrame({
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time },
|
||||
{
|
||||
name: 'line',
|
||||
labels: { host: 'ec2-13-53-116-156.eu-north-1.compute.amazonaws.com', region: 'eu-north1' },
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
fieldIndex: 1,
|
||||
});
|
||||
expect(title).toEqual('line {host="ec2-13-53-116-156.eu-north-1.compute.amazonaws.com", region="eu-north1"}');
|
||||
});
|
||||
});
|
||||
153
packages/grafana-data/src/field/fieldState.ts
Normal file
153
packages/grafana-data/src/field/fieldState.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { DataFrame, Field, TIME_SERIES_FIELD_NAME, FieldType } from '../types';
|
||||
import { formatLabels } from '../utils/labels';
|
||||
|
||||
/**
|
||||
* Get an appropriate display title
|
||||
*/
|
||||
export function getFrameDisplayTitle(frame: DataFrame, index?: number) {
|
||||
if (frame.name) {
|
||||
return frame.name;
|
||||
}
|
||||
|
||||
// Single field with tags
|
||||
const valuesWithLabels = frame.fields.filter(f => f.labels !== undefined);
|
||||
if (valuesWithLabels.length === 1) {
|
||||
return formatLabels(valuesWithLabels[0].labels!);
|
||||
}
|
||||
|
||||
// list all the
|
||||
if (index === undefined) {
|
||||
return frame.fields
|
||||
.filter(f => f.type !== FieldType.time)
|
||||
.map(f => getFieldTitle(f, frame))
|
||||
.join(', ');
|
||||
}
|
||||
|
||||
if (frame.refId) {
|
||||
return `Series (${frame.refId})`;
|
||||
}
|
||||
|
||||
return `Series (${index})`;
|
||||
}
|
||||
|
||||
export function getFieldTitle(field: Field, frame?: DataFrame, allFrames?: DataFrame[]): string {
|
||||
const existingTitle = field.state?.title;
|
||||
|
||||
if (existingTitle) {
|
||||
return existingTitle;
|
||||
}
|
||||
|
||||
const title = calculateFieldTitle(field, frame, allFrames);
|
||||
field.state = {
|
||||
...field.state,
|
||||
title,
|
||||
};
|
||||
|
||||
return title;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an appropriate display title. If the 'title' is set, use that
|
||||
*/
|
||||
function calculateFieldTitle(field: Field, frame?: DataFrame, allFrames?: DataFrame[]): string {
|
||||
const hasConfigTitle = field.config?.title && field.config?.title.length;
|
||||
|
||||
let title = hasConfigTitle ? field.config!.title! : field.name;
|
||||
|
||||
if (hasConfigTitle) {
|
||||
return title;
|
||||
}
|
||||
|
||||
// This is an ugly exception for time field
|
||||
// For time series we should normally treat time field with same name
|
||||
// But in case it has a join source we should handle it as normal field
|
||||
if (field.type === FieldType.time && !field.labels) {
|
||||
return title ?? 'Time';
|
||||
}
|
||||
|
||||
let parts: string[] = [];
|
||||
let frameNamesDiffer = false;
|
||||
|
||||
if (allFrames && allFrames.length > 1) {
|
||||
for (let i = 1; i < allFrames.length; i++) {
|
||||
const frame = allFrames[i];
|
||||
if (frame.name !== allFrames[i - 1].name) {
|
||||
frameNamesDiffer = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let frameNameAdded = false;
|
||||
let labelsAdded = false;
|
||||
|
||||
if (frameNamesDiffer && frame?.name) {
|
||||
parts.push(frame.name);
|
||||
frameNameAdded = true;
|
||||
}
|
||||
|
||||
if (field.name && field.name !== TIME_SERIES_FIELD_NAME) {
|
||||
parts.push(field.name);
|
||||
}
|
||||
|
||||
if (field.labels && frame) {
|
||||
let singleLabelName = getSingleLabelName(allFrames ?? [frame]);
|
||||
|
||||
if (!singleLabelName) {
|
||||
let allLabels = formatLabels(field.labels);
|
||||
if (allLabels) {
|
||||
parts.push(allLabels);
|
||||
labelsAdded = true;
|
||||
}
|
||||
} else if (field.labels[singleLabelName]) {
|
||||
parts.push(field.labels[singleLabelName]);
|
||||
labelsAdded = true;
|
||||
}
|
||||
}
|
||||
|
||||
// if we have not added frame name and no labels, and field name = Value, we should add frame name
|
||||
if (frame && !frameNameAdded && !labelsAdded && field.name === TIME_SERIES_FIELD_NAME) {
|
||||
if (frame.name && frame.name.length > 0) {
|
||||
parts.push(frame.name);
|
||||
frameNameAdded = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (parts.length) {
|
||||
title = parts.join(' ');
|
||||
} else if (field.name) {
|
||||
title = field.name;
|
||||
} else {
|
||||
title = TIME_SERIES_FIELD_NAME;
|
||||
}
|
||||
|
||||
return title;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks all data frames and return name of label if there is only one label name in all frames
|
||||
*/
|
||||
function getSingleLabelName(frames: DataFrame[]): string | null {
|
||||
let singleName: string | null = null;
|
||||
|
||||
for (let i = 0; i < frames.length; i++) {
|
||||
const frame = frames[i];
|
||||
|
||||
for (const field of frame.fields) {
|
||||
if (!field.labels) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// yes this should be in!
|
||||
for (const labelKey in field.labels) {
|
||||
if (singleName === null) {
|
||||
singleName = labelKey;
|
||||
} else if (labelKey !== singleName) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return singleName;
|
||||
}
|
||||
@@ -7,3 +7,4 @@ export { FieldConfigOptionsRegistry } from './FieldConfigOptionsRegistry';
|
||||
|
||||
export { applyFieldOverrides, validateFieldConfig } from './fieldOverrides';
|
||||
export { getFieldDisplayValuesProxy } from './getFieldDisplayValuesProxy';
|
||||
export { getFieldTitle, getFrameDisplayTitle } from './fieldState';
|
||||
|
||||
@@ -73,7 +73,7 @@ export const stringOverrideProcessor = (
|
||||
return value;
|
||||
}
|
||||
if (settings && settings.expandTemplateVars && context.replaceVariables) {
|
||||
return context.replaceVariables(value, context.field!.config.scopedVars);
|
||||
return context.replaceVariables(value, context.field!.state!.scopedVars);
|
||||
}
|
||||
return `${value}`;
|
||||
};
|
||||
|
||||
@@ -314,7 +314,7 @@ export class PanelPlugin<TOptions = any, TFieldConfigOptions extends object = an
|
||||
|
||||
for (const customProp of builder.getRegistry().list()) {
|
||||
customProp.isCustom = true;
|
||||
customProp.category = ['Custom field options'].concat(customProp.category || []);
|
||||
customProp.category = ['Custom options'].concat(customProp.category || []);
|
||||
// need to do something to make the custom items not conflict with standard ones
|
||||
// problem is id (registry index) is used as property path
|
||||
// so sort of need a property path on the FieldPropertyEditorItem
|
||||
|
||||
@@ -66,7 +66,7 @@ describe('Stats Calculators', () => {
|
||||
});
|
||||
|
||||
it('should support a single stat also', () => {
|
||||
basicTable.fields[0].calcs = undefined; // clear the cache
|
||||
basicTable.fields[0].state = undefined; // clear the cache
|
||||
const stats = reduceField({
|
||||
field: basicTable.fields[0],
|
||||
reducers: ['first'],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// Libraries
|
||||
import isNumber from 'lodash/isNumber';
|
||||
|
||||
import { NullValueMode, Field } from '../types/index';
|
||||
import { NullValueMode, Field, FieldState, FieldCalcs } from '../types/index';
|
||||
import { Registry, RegistryItem } from '../utils/Registry';
|
||||
|
||||
export enum ReducerID {
|
||||
@@ -28,10 +28,6 @@ export enum ReducerID {
|
||||
allIsNull = 'allIsNull',
|
||||
}
|
||||
|
||||
export interface FieldCalcs {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
// Internal function
|
||||
type FieldReducer = (field: Field, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
|
||||
|
||||
@@ -57,20 +53,23 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (field.calcs) {
|
||||
if (field.state?.calcs) {
|
||||
// Find the values we need to calculate
|
||||
const missing: string[] = [];
|
||||
for (const s of reducers) {
|
||||
if (!field.calcs.hasOwnProperty(s)) {
|
||||
if (!field.state.calcs.hasOwnProperty(s)) {
|
||||
missing.push(s);
|
||||
}
|
||||
}
|
||||
if (missing.length < 1) {
|
||||
return {
|
||||
...field.calcs,
|
||||
...field.state.calcs,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!field.state) {
|
||||
field.state = {} as FieldState;
|
||||
}
|
||||
|
||||
const queue = fieldReducers.list(reducers);
|
||||
|
||||
@@ -78,11 +77,11 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
// This lets the concrete implementations assume at least one row
|
||||
const data = field.values;
|
||||
if (data.length < 1) {
|
||||
const calcs = { ...field.calcs } as FieldCalcs;
|
||||
const calcs = { ...field.state.calcs } as FieldCalcs;
|
||||
for (const reducer of queue) {
|
||||
calcs[reducer.id] = reducer.emptyInputResult !== null ? reducer.emptyInputResult : null;
|
||||
}
|
||||
return (field.calcs = calcs);
|
||||
return (field.state.calcs = calcs);
|
||||
}
|
||||
|
||||
const { nullValueMode } = field.config;
|
||||
@@ -92,8 +91,8 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
// Avoid calculating all the standard stats if possible
|
||||
if (queue.length === 1 && queue[0].reduce) {
|
||||
const values = queue[0].reduce(field, ignoreNulls, nullAsZero);
|
||||
field.calcs = {
|
||||
...field.calcs,
|
||||
field.state.calcs = {
|
||||
...field.state.calcs,
|
||||
...values,
|
||||
};
|
||||
return values;
|
||||
@@ -111,11 +110,10 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
}
|
||||
}
|
||||
|
||||
field.calcs = {
|
||||
...field.calcs,
|
||||
field.state.calcs = {
|
||||
...field.state.calcs,
|
||||
...values,
|
||||
};
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Field, DataFrame } from '../../types/dataFrame';
|
||||
import { FieldMatcherID, FrameMatcherID } from './ids';
|
||||
import { FieldMatcherInfo, FrameMatcherInfo } from '../../types/transformations';
|
||||
import { stringToJsRegex } from '../../text/string';
|
||||
import { getFieldTitle } from '../../field/fieldState';
|
||||
|
||||
// General Field matcher
|
||||
const fieldNameMacher: FieldMatcherInfo<string> = {
|
||||
@@ -18,7 +19,7 @@ const fieldNameMacher: FieldMatcherInfo<string> = {
|
||||
console.error(e);
|
||||
}
|
||||
return (field: Field) => {
|
||||
return regex.test(field.name);
|
||||
return regex.test(getFieldTitle(field) ?? '');
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
@@ -4,19 +4,27 @@ import { FieldType } from '../../types/dataFrame';
|
||||
import { ReducerID } from '../fieldReducer';
|
||||
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
|
||||
import { transformDataFrame } from '../transformDataFrame';
|
||||
import { calculateFieldTransformer } from './calculateField';
|
||||
import { calculateFieldTransformer, CalculateFieldMode } from './calculateField';
|
||||
import { DataFrameView } from '../../dataframe';
|
||||
import { BinaryOperationID } from '../../utils';
|
||||
|
||||
const seriesToTestWith = toDataFrame({
|
||||
const seriesA = toDataFrame({
|
||||
fields: [
|
||||
{ name: 'A', type: FieldType.time, values: [1000, 2000] },
|
||||
{ name: 'B', type: FieldType.number, values: [1, 100] },
|
||||
{ name: 'C', type: FieldType.number, values: [2, 200] },
|
||||
{ name: 'TheTime', type: FieldType.time, values: [1000, 2000] },
|
||||
{ name: 'A', type: FieldType.number, values: [1, 100] },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesBC = toDataFrame({
|
||||
fields: [
|
||||
{ name: 'TheTime', type: FieldType.time, values: [1000, 2000] },
|
||||
{ name: 'B', type: FieldType.number, values: [2, 200] },
|
||||
{ name: 'C', type: FieldType.number, values: [3, 300] },
|
||||
{ name: 'D', type: FieldType.string, values: ['first', 'second'] },
|
||||
],
|
||||
});
|
||||
|
||||
describe('calculateField transformer', () => {
|
||||
describe('calculateField transformer w/ timeseries', () => {
|
||||
beforeAll(() => {
|
||||
mockTransformationsRegistry([calculateFieldTransformer]);
|
||||
});
|
||||
@@ -25,28 +33,30 @@ describe('calculateField transformer', () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.calculateField,
|
||||
options: {
|
||||
// defautls to sum
|
||||
// defaults to `sum` ReduceRow
|
||||
alias: 'The Total',
|
||||
},
|
||||
};
|
||||
|
||||
const filtered = transformDataFrame([cfg], [seriesToTestWith])[0];
|
||||
const filtered = transformDataFrame([cfg], [seriesA, seriesBC])[0];
|
||||
const rows = new DataFrameView(filtered).toArray();
|
||||
expect(rows).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"A": 1000,
|
||||
"B": 1,
|
||||
"C": 2,
|
||||
"A": 1,
|
||||
"B": 2,
|
||||
"C": 3,
|
||||
"D": "first",
|
||||
"The Total": 3,
|
||||
"The Total": 6,
|
||||
"TheTime": 1000,
|
||||
},
|
||||
Object {
|
||||
"A": 2000,
|
||||
"B": 100,
|
||||
"C": 200,
|
||||
"A": 100,
|
||||
"B": 200,
|
||||
"C": 300,
|
||||
"D": "second",
|
||||
"The Total": 300,
|
||||
"The Total": 600,
|
||||
"TheTime": 2000,
|
||||
},
|
||||
]
|
||||
`);
|
||||
@@ -56,20 +66,25 @@ describe('calculateField transformer', () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.calculateField,
|
||||
options: {
|
||||
reducer: ReducerID.mean,
|
||||
mode: CalculateFieldMode.ReduceRow,
|
||||
reduce: {
|
||||
reducer: ReducerID.mean,
|
||||
},
|
||||
replaceFields: true,
|
||||
},
|
||||
};
|
||||
|
||||
const filtered = transformDataFrame([cfg], [seriesToTestWith])[0];
|
||||
const filtered = transformDataFrame([cfg], [seriesA, seriesBC])[0];
|
||||
const rows = new DataFrameView(filtered).toArray();
|
||||
expect(rows).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"Mean": 1.5,
|
||||
"Mean": 2,
|
||||
"TheTime": 1000,
|
||||
},
|
||||
Object {
|
||||
"Mean": 150,
|
||||
"Mean": 200,
|
||||
"TheTime": 2000,
|
||||
},
|
||||
]
|
||||
`);
|
||||
@@ -79,21 +94,86 @@ describe('calculateField transformer', () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.calculateField,
|
||||
options: {
|
||||
reducer: ReducerID.mean,
|
||||
mode: CalculateFieldMode.ReduceRow,
|
||||
reduce: {
|
||||
include: 'B',
|
||||
reducer: ReducerID.mean,
|
||||
},
|
||||
replaceFields: true,
|
||||
include: 'B',
|
||||
},
|
||||
};
|
||||
|
||||
const filtered = transformDataFrame([cfg], [seriesToTestWith])[0];
|
||||
const filtered = transformDataFrame([cfg], [seriesBC])[0];
|
||||
const rows = new DataFrameView(filtered).toArray();
|
||||
expect(rows).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"Mean": 1,
|
||||
"Mean": 2,
|
||||
"TheTime": 1000,
|
||||
},
|
||||
Object {
|
||||
"Mean": 100,
|
||||
"Mean": 200,
|
||||
"TheTime": 2000,
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('binary math', () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.calculateField,
|
||||
options: {
|
||||
mode: CalculateFieldMode.BinaryOperation,
|
||||
binary: {
|
||||
left: 'B',
|
||||
operation: BinaryOperationID.Add,
|
||||
right: 'C',
|
||||
},
|
||||
replaceFields: true,
|
||||
},
|
||||
};
|
||||
|
||||
const filtered = transformDataFrame([cfg], [seriesBC])[0];
|
||||
const rows = new DataFrameView(filtered).toArray();
|
||||
expect(rows).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"B + C": 5,
|
||||
"TheTime": 1000,
|
||||
},
|
||||
Object {
|
||||
"B + C": 500,
|
||||
"TheTime": 2000,
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('field + static number', () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.calculateField,
|
||||
options: {
|
||||
mode: CalculateFieldMode.BinaryOperation,
|
||||
binary: {
|
||||
left: 'B',
|
||||
operation: BinaryOperationID.Add,
|
||||
right: '2',
|
||||
},
|
||||
replaceFields: true,
|
||||
},
|
||||
};
|
||||
|
||||
const filtered = transformDataFrame([cfg], [seriesBC])[0];
|
||||
const rows = new DataFrameView(filtered).toArray();
|
||||
expect(rows).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"B + 2": 4,
|
||||
"TheTime": 1000,
|
||||
},
|
||||
Object {
|
||||
"B + 2": 202,
|
||||
"TheTime": 2000,
|
||||
},
|
||||
]
|
||||
`);
|
||||
|
||||
@@ -4,79 +4,247 @@ import { ReducerID, fieldReducers } from '../fieldReducer';
|
||||
import { getFieldMatcher } from '../matchers';
|
||||
import { FieldMatcherID } from '../matchers/ids';
|
||||
import { RowVector } from '../../vector/RowVector';
|
||||
import { ArrayVector } from '../../vector';
|
||||
import { ArrayVector, BinaryOperationVector, ConstantVector } from '../../vector';
|
||||
import { doStandardCalcs } from '../fieldReducer';
|
||||
import { seriesToColumnsTransformer } from './seriesToColumns';
|
||||
import { getTimeField } from '../../dataframe/processDataFrame';
|
||||
import defaults from 'lodash/defaults';
|
||||
import { BinaryOperationID, binaryOperators } from '../../utils/binaryOperators';
|
||||
|
||||
export interface CalculateFieldTransformerOptions {
|
||||
reducer: ReducerID;
|
||||
export enum CalculateFieldMode {
|
||||
ReduceRow = 'reduceRow',
|
||||
BinaryOperation = 'binary',
|
||||
}
|
||||
|
||||
interface ReduceOptions {
|
||||
include?: string; // Assume all fields
|
||||
alias?: string; // The output field name
|
||||
replaceFields?: boolean;
|
||||
reducer: ReducerID;
|
||||
nullValueMode?: NullValueMode;
|
||||
}
|
||||
|
||||
interface BinaryOptions {
|
||||
left: string;
|
||||
operator: BinaryOperationID;
|
||||
right: string;
|
||||
}
|
||||
|
||||
const defaultReduceOptions: ReduceOptions = {
|
||||
reducer: ReducerID.sum,
|
||||
};
|
||||
|
||||
const defaultBinaryOptions: BinaryOptions = {
|
||||
left: '',
|
||||
operator: BinaryOperationID.Add,
|
||||
right: '',
|
||||
};
|
||||
|
||||
export interface CalculateFieldTransformerOptions {
|
||||
// True/False or auto
|
||||
timeSeries?: boolean;
|
||||
mode: CalculateFieldMode; // defaults to 'reduce'
|
||||
|
||||
// Only one should be filled
|
||||
reduce?: ReduceOptions;
|
||||
binary?: BinaryOptions;
|
||||
|
||||
// Remove other fields
|
||||
replaceFields?: boolean;
|
||||
|
||||
// Output field properties
|
||||
alias?: string; // The output field name
|
||||
// TODO: config?: FieldConfig; or maybe field overrides? since the UI exists
|
||||
}
|
||||
|
||||
type ValuesCreator = (data: DataFrame) => Vector;
|
||||
|
||||
export const calculateFieldTransformer: DataTransformerInfo<CalculateFieldTransformerOptions> = {
|
||||
id: DataTransformerID.calculateField,
|
||||
name: 'Add field from calculation',
|
||||
description: 'Use the row values to calculate a new field',
|
||||
defaultOptions: {
|
||||
reducer: ReducerID.sum,
|
||||
mode: CalculateFieldMode.ReduceRow,
|
||||
reduce: {
|
||||
reducer: ReducerID.sum,
|
||||
},
|
||||
},
|
||||
transformer: options => (data: DataFrame[]) => {
|
||||
let matcher = getFieldMatcher({
|
||||
id: FieldMatcherID.numeric,
|
||||
});
|
||||
if (options.include && options.include.length) {
|
||||
matcher = getFieldMatcher({
|
||||
id: FieldMatcherID.byName,
|
||||
options: options.include,
|
||||
});
|
||||
// Assume timeseries should first be joined by time
|
||||
const timeFieldName = findConsistentTimeFieldName(data);
|
||||
|
||||
if (data.length > 1 && timeFieldName && options.timeSeries !== false) {
|
||||
data = seriesToColumnsTransformer.transformer({
|
||||
byField: timeFieldName,
|
||||
})(data);
|
||||
}
|
||||
|
||||
const info = fieldReducers.get(options.reducer);
|
||||
if (!info) {
|
||||
throw new Error(`Unknown reducer: ${options.reducer}`);
|
||||
const mode = options.mode ?? CalculateFieldMode.ReduceRow;
|
||||
let creator: ValuesCreator | undefined = undefined;
|
||||
|
||||
if (mode === CalculateFieldMode.ReduceRow) {
|
||||
creator = getReduceRowCreator(defaults(options.reduce, defaultReduceOptions));
|
||||
} else if (mode === CalculateFieldMode.BinaryOperation) {
|
||||
creator = getBinaryCreator(defaults(options.binary, defaultBinaryOptions));
|
||||
}
|
||||
|
||||
// Nothing configured
|
||||
if (!creator) {
|
||||
return data;
|
||||
}
|
||||
const reducer = info.reduce ?? doStandardCalcs;
|
||||
const ignoreNulls = options.nullValueMode === NullValueMode.Ignore;
|
||||
const nullAsZero = options.nullValueMode === NullValueMode.AsZero;
|
||||
|
||||
return data.map(frame => {
|
||||
// Find the columns that should be examined
|
||||
const columns: Vector[] = [];
|
||||
frame.fields.forEach(field => {
|
||||
if (matcher(field)) {
|
||||
columns.push(field.values);
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare a "fake" field for the row
|
||||
const iter = new RowVector(columns);
|
||||
const row: Field = {
|
||||
name: 'temp',
|
||||
values: iter,
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
};
|
||||
const vals: number[] = [];
|
||||
for (let i = 0; i < frame.length; i++) {
|
||||
iter.rowIndex = i;
|
||||
row.calcs = undefined; // bust the cache (just in case)
|
||||
const val = reducer(row, ignoreNulls, nullAsZero)[options.reducer];
|
||||
vals.push(val);
|
||||
// delegate field creation to the specific function
|
||||
const values = creator!(frame);
|
||||
if (!values) {
|
||||
return frame;
|
||||
}
|
||||
|
||||
const field = {
|
||||
name: options.alias || info.name,
|
||||
name: getResultFieldNameForCalculateFieldTransformerOptions(options),
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: new ArrayVector(vals),
|
||||
values,
|
||||
};
|
||||
let fields: Field[] = [];
|
||||
|
||||
// Replace all fields with the single field
|
||||
if (options.replaceFields) {
|
||||
const { timeField } = getTimeField(frame);
|
||||
if (timeField && options.timeSeries !== false) {
|
||||
fields = [timeField, field];
|
||||
} else {
|
||||
fields = [field];
|
||||
}
|
||||
} else {
|
||||
fields = [...frame.fields, field];
|
||||
}
|
||||
return {
|
||||
...frame,
|
||||
fields: options.replaceFields ? [field] : [...frame.fields, field],
|
||||
fields,
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
function getReduceRowCreator(options: ReduceOptions): ValuesCreator {
|
||||
let matcher = getFieldMatcher({
|
||||
id: FieldMatcherID.numeric,
|
||||
});
|
||||
|
||||
if (options.include && options.include.length) {
|
||||
matcher = getFieldMatcher({
|
||||
id: FieldMatcherID.byName,
|
||||
options: options.include,
|
||||
});
|
||||
}
|
||||
|
||||
const info = fieldReducers.get(options.reducer);
|
||||
|
||||
if (!info) {
|
||||
throw new Error(`Unknown reducer: ${options.reducer}`);
|
||||
}
|
||||
|
||||
const reducer = info.reduce ?? doStandardCalcs;
|
||||
const ignoreNulls = options.nullValueMode === NullValueMode.Ignore;
|
||||
const nullAsZero = options.nullValueMode === NullValueMode.AsZero;
|
||||
|
||||
return (frame: DataFrame) => {
|
||||
// Find the columns that should be examined
|
||||
const columns: Vector[] = [];
|
||||
for (const field of frame.fields) {
|
||||
if (matcher(field)) {
|
||||
columns.push(field.values);
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare a "fake" field for the row
|
||||
const iter = new RowVector(columns);
|
||||
const row: Field = {
|
||||
name: 'temp',
|
||||
values: iter,
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
};
|
||||
const vals: number[] = [];
|
||||
|
||||
for (let i = 0; i < frame.length; i++) {
|
||||
iter.rowIndex = i;
|
||||
const val = reducer(row, ignoreNulls, nullAsZero)[options.reducer];
|
||||
vals.push(val);
|
||||
}
|
||||
|
||||
return new ArrayVector(vals);
|
||||
};
|
||||
}
|
||||
|
||||
function findFieldValuesWithNameOrConstant(frame: DataFrame, name: string): Vector | undefined {
|
||||
if (!name) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
for (const f of frame.fields) {
|
||||
if (f.name === name) {
|
||||
return f.values;
|
||||
}
|
||||
}
|
||||
|
||||
const v = parseFloat(name);
|
||||
if (!isNaN(v)) {
|
||||
return new ConstantVector(v, frame.length);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getBinaryCreator(options: BinaryOptions): ValuesCreator {
|
||||
const operator = binaryOperators.getIfExists(options.operator);
|
||||
|
||||
return (frame: DataFrame) => {
|
||||
const left = findFieldValuesWithNameOrConstant(frame, options.left);
|
||||
const right = findFieldValuesWithNameOrConstant(frame, options.right);
|
||||
if (!left || !right || !operator) {
|
||||
return (undefined as unknown) as Vector;
|
||||
}
|
||||
|
||||
return new BinaryOperationVector(left, right, operator.operation);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the name for the time field used in all frames (if one exists)
|
||||
*/
|
||||
function findConsistentTimeFieldName(data: DataFrame[]): string | undefined {
|
||||
let name: string | undefined = undefined;
|
||||
for (const frame of data) {
|
||||
const { timeField } = getTimeField(frame);
|
||||
if (!timeField) {
|
||||
return undefined; // Not timeseries
|
||||
}
|
||||
if (!name) {
|
||||
name = timeField.name;
|
||||
} else if (name !== timeField.name) {
|
||||
// Second frame has a different time column?!
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
export function getResultFieldNameForCalculateFieldTransformerOptions(options: CalculateFieldTransformerOptions) {
|
||||
if (options.alias?.length) {
|
||||
return options.alias;
|
||||
}
|
||||
|
||||
if (options.mode === CalculateFieldMode.BinaryOperation) {
|
||||
const { binary } = options;
|
||||
return `${binary?.left ?? ''} ${binary?.operator ?? ''} ${binary?.right ?? ''}`;
|
||||
}
|
||||
|
||||
if (options.mode === CalculateFieldMode.ReduceRow) {
|
||||
const r = fieldReducers.getIfExists(options.reduce?.reducer);
|
||||
if (r) {
|
||||
return r.name;
|
||||
}
|
||||
}
|
||||
|
||||
return 'math';
|
||||
}
|
||||
|
||||
@@ -43,38 +43,6 @@ describe('Labels as Columns', () => {
|
||||
expect(result[0].fields).toEqual(expected);
|
||||
});
|
||||
|
||||
it('data frames where frame name is same as value field name replace field name with name Value', () => {
|
||||
const cfg: DataTransformerConfig<LabelsToFieldsOptions> = {
|
||||
id: DataTransformerID.labelsToFields,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const oneValueOneLabelA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1000] },
|
||||
{ name: 'A', type: FieldType.number, values: [1], labels: { location: 'inside' } },
|
||||
],
|
||||
});
|
||||
|
||||
const oneValueOneLabelB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [2000] },
|
||||
{ name: 'B', type: FieldType.number, values: [-1], labels: { location: 'outside' } },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [oneValueOneLabelA, oneValueOneLabelB]);
|
||||
const expected: Field[] = [
|
||||
{ name: 'time', type: FieldType.time, values: new ArrayVector([1000, 2000]), config: {} },
|
||||
{ name: 'location', type: FieldType.string, values: new ArrayVector(['inside', 'outside']), config: {} },
|
||||
{ name: 'Value', type: FieldType.number, values: new ArrayVector([1, -1]), config: {} },
|
||||
];
|
||||
|
||||
expect(result[0].fields).toEqual(expected);
|
||||
});
|
||||
|
||||
it('data frame with 2 values and 1 label', () => {
|
||||
const cfg: DataTransformerConfig<LabelsToFieldsOptions> = {
|
||||
id: DataTransformerID.labelsToFields,
|
||||
|
||||
@@ -73,21 +73,12 @@ function getFramesWithOnlyValueFields(data: DataFrame[]): DataFrame[] {
|
||||
|
||||
for (let i = 0; i < series.fields.length; i++) {
|
||||
const field = series.fields[i];
|
||||
|
||||
if (field.type !== FieldType.number) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// When we transform a time series to DataFrame we put series name in field name.
|
||||
// This casues problems for this transformer that want all time series values in a Value column
|
||||
// So here we change field names that have same name as DataFrame to just Value
|
||||
if (field.name === series.name) {
|
||||
fields.push({
|
||||
...field,
|
||||
name: 'Value',
|
||||
});
|
||||
} else {
|
||||
fields.push(field);
|
||||
}
|
||||
fields.push(field);
|
||||
}
|
||||
|
||||
if (!fields.length) {
|
||||
|
||||
@@ -47,13 +47,23 @@ describe('OrganizeFields Transformer', () => {
|
||||
expect(organized.fields).toEqual([
|
||||
{
|
||||
config: {},
|
||||
labels: undefined,
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10.3, 10.4, 10.5, 10.6]),
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'renamed_humidity',
|
||||
config: {
|
||||
title: 'renamed_humidity',
|
||||
},
|
||||
labels: undefined,
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'renamed_humidity',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10000.3, 10000.4, 10000.5, 10000.6]),
|
||||
},
|
||||
@@ -93,14 +103,24 @@ describe('OrganizeFields Transformer', () => {
|
||||
|
||||
expect(organized.fields).toEqual([
|
||||
{
|
||||
config: {},
|
||||
name: 'renamed_time',
|
||||
labels: undefined,
|
||||
config: {
|
||||
title: 'renamed_time',
|
||||
},
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'renamed_time',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([3000, 4000, 5000, 6000]),
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
labels: undefined,
|
||||
name: 'pressure',
|
||||
state: {
|
||||
title: 'pressure',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10.3, 10.4, 10.5, 10.6]),
|
||||
},
|
||||
|
||||
@@ -18,7 +18,7 @@ export interface ReduceTransformerOptions {
|
||||
export const reduceTransformer: DataTransformerInfo<ReduceTransformerOptions> = {
|
||||
id: DataTransformerID.reduce,
|
||||
name: 'Reduce',
|
||||
description: 'Reduce all rows to a single row and concatenate all results',
|
||||
description: 'Reduce all rows or data points to a single value using a function like max, min, mean or last',
|
||||
defaultOptions: {
|
||||
reducers: [ReducerID.max],
|
||||
},
|
||||
|
||||
@@ -40,20 +40,38 @@ describe('Rename Transformer', () => {
|
||||
|
||||
expect(renamed.fields).toEqual([
|
||||
{
|
||||
config: {},
|
||||
name: 'Total time',
|
||||
config: {
|
||||
title: 'Total time',
|
||||
},
|
||||
labels: undefined,
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'Total time',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([3000, 4000, 5000, 6000]),
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'how cold is it?',
|
||||
config: {
|
||||
title: 'how cold is it?',
|
||||
},
|
||||
labels: undefined,
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'how cold is it?',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10.3, 10.4, 10.5, 10.6]),
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Moistiness',
|
||||
config: {
|
||||
title: 'Moistiness',
|
||||
},
|
||||
name: 'humidity',
|
||||
labels: undefined,
|
||||
state: {
|
||||
title: 'Moistiness',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10000.3, 10000.4, 10000.5, 10000.6]),
|
||||
},
|
||||
@@ -87,20 +105,36 @@ describe('Rename Transformer', () => {
|
||||
|
||||
expect(renamed.fields).toEqual([
|
||||
{
|
||||
config: {},
|
||||
name: 'ttl',
|
||||
config: {
|
||||
title: 'ttl',
|
||||
},
|
||||
name: 'time',
|
||||
labels: undefined,
|
||||
state: {
|
||||
title: 'ttl',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([3000, 4000, 5000, 6000]),
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
labels: undefined,
|
||||
name: 'pressure',
|
||||
state: {
|
||||
title: 'pressure',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10.3, 10.4, 10.5, 10.6]),
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'hum',
|
||||
config: {
|
||||
title: 'hum',
|
||||
},
|
||||
labels: undefined,
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'hum',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10000.3, 10000.4, 10000.5, 10000.6]),
|
||||
},
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { DataTransformerID } from './ids';
|
||||
import { DataTransformerInfo } from '../../types/transformations';
|
||||
import { DataFrame, Field } from '../..';
|
||||
import { DataFrame, Field } from '../../types/dataFrame';
|
||||
import { getFieldTitle } from '../../field/fieldState';
|
||||
|
||||
export interface RenameFieldsTransformerOptions {
|
||||
renameByName: Record<string, string>;
|
||||
@@ -28,19 +29,20 @@ export const renameFieldsTransformer: DataTransformerInfo<RenameFieldsTransforme
|
||||
|
||||
return data.map(frame => ({
|
||||
...frame,
|
||||
fields: renamer(frame.fields),
|
||||
fields: renamer(frame),
|
||||
}));
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
const createRenamer = (renameByName: Record<string, string>) => (fields: Field[]): Field[] => {
|
||||
const createRenamer = (renameByName: Record<string, string>) => (frame: DataFrame): Field[] => {
|
||||
if (!renameByName || Object.keys(renameByName).length === 0) {
|
||||
return fields;
|
||||
return frame.fields;
|
||||
}
|
||||
|
||||
return fields.map(field => {
|
||||
const renameTo = renameByName[field.name];
|
||||
return frame.fields.map(field => {
|
||||
const title = getFieldTitle(field, frame);
|
||||
const renameTo = renameByName[title];
|
||||
|
||||
if (typeof renameTo !== 'string' || renameTo.length === 0) {
|
||||
return field;
|
||||
@@ -48,7 +50,14 @@ const createRenamer = (renameByName: Record<string, string>) => (fields: Field[]
|
||||
|
||||
return {
|
||||
...field,
|
||||
name: renameTo,
|
||||
config: {
|
||||
...field.config,
|
||||
title: renameTo,
|
||||
},
|
||||
state: {
|
||||
...field.state,
|
||||
title: renameTo,
|
||||
},
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
@@ -14,6 +14,7 @@ describe('SeriesToColumns Transformer', () => {
|
||||
beforeAll(() => {
|
||||
mockTransformationsRegistry([seriesToColumnsTransformer]);
|
||||
});
|
||||
|
||||
const everySecondSeries = toDataFrame({
|
||||
name: 'even',
|
||||
fields: [
|
||||
@@ -44,38 +45,53 @@ describe('SeriesToColumns Transformer', () => {
|
||||
expect(filtered.fields).toEqual([
|
||||
{
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'time',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1000, 3000, 4000, 5000, 6000, 7000]),
|
||||
config: {},
|
||||
labels: { origin: 'even,odd' },
|
||||
labels: undefined,
|
||||
},
|
||||
{
|
||||
name: 'temperature {even}',
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature even',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([null, 10.3, 10.4, 10.5, 10.6, null]),
|
||||
config: {},
|
||||
labels: { origin: 'even' },
|
||||
labels: { name: 'even' },
|
||||
},
|
||||
{
|
||||
name: 'humidity {even}',
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'humidity even',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([null, 10000.3, 10000.4, 10000.5, 10000.6, null]),
|
||||
config: {},
|
||||
labels: { origin: 'even' },
|
||||
labels: { name: 'even' },
|
||||
},
|
||||
{
|
||||
name: 'temperature {odd}',
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature odd',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([11.1, 11.3, null, 11.5, null, 11.7]),
|
||||
config: {},
|
||||
labels: { origin: 'odd' },
|
||||
labels: { name: 'odd' },
|
||||
},
|
||||
{
|
||||
name: 'humidity {odd}',
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'humidity odd',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([11000.1, 11000.3, null, 11000.5, null, 11000.7]),
|
||||
config: {},
|
||||
labels: { origin: 'odd' },
|
||||
labels: { name: 'odd' },
|
||||
},
|
||||
]);
|
||||
});
|
||||
@@ -92,38 +108,53 @@ describe('SeriesToColumns Transformer', () => {
|
||||
expect(filtered.fields).toEqual([
|
||||
{
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10.3, 10.4, 10.5, 10.6, 11.1, 11.3, 11.5, 11.7]),
|
||||
config: {},
|
||||
labels: { origin: 'even,odd' },
|
||||
labels: undefined,
|
||||
},
|
||||
{
|
||||
name: 'time {even}',
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'time even',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([3000, 4000, 5000, 6000, null, null, null, null]),
|
||||
config: {},
|
||||
labels: { origin: 'even' },
|
||||
labels: { name: 'even' },
|
||||
},
|
||||
{
|
||||
name: 'humidity {even}',
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'humidity even',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([10000.3, 10000.4, 10000.5, 10000.6, null, null, null, null]),
|
||||
config: {},
|
||||
labels: { origin: 'even' },
|
||||
labels: { name: 'even' },
|
||||
},
|
||||
{
|
||||
name: 'time {odd}',
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'time odd',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([null, null, null, null, 1000, 3000, 5000, 7000]),
|
||||
config: {},
|
||||
labels: { origin: 'odd' },
|
||||
labels: { name: 'odd' },
|
||||
},
|
||||
{
|
||||
name: 'humidity {odd}',
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'humidity odd',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([null, null, null, null, 11000.1, 11000.3, 11000.5, 11000.7]),
|
||||
config: {},
|
||||
labels: { origin: 'odd' },
|
||||
labels: { name: 'odd' },
|
||||
},
|
||||
]);
|
||||
});
|
||||
@@ -144,38 +175,53 @@ describe('SeriesToColumns Transformer', () => {
|
||||
expect(filtered.fields).toEqual([
|
||||
{
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'time',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1000, 3000, 4000, 5000, 6000, 7000]),
|
||||
config: {},
|
||||
labels: { origin: 'even,odd' },
|
||||
labels: undefined,
|
||||
},
|
||||
{
|
||||
name: 'temperature {even}',
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature even',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([null, 10.3, 10.4, 10.5, 10.6, null]),
|
||||
config: {},
|
||||
labels: { origin: 'even' },
|
||||
labels: { name: 'even' },
|
||||
},
|
||||
{
|
||||
name: 'humidity {even}',
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'humidity even',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([null, 10000.3, 10000.4, 10000.5, 10000.6, null]),
|
||||
config: {},
|
||||
labels: { origin: 'even' },
|
||||
labels: { name: 'even' },
|
||||
},
|
||||
{
|
||||
name: 'temperature {odd}',
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature odd',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([11.1, 11.3, null, 11.5, null, 11.7]),
|
||||
config: {},
|
||||
labels: { origin: 'odd' },
|
||||
labels: { name: 'odd' },
|
||||
},
|
||||
{
|
||||
name: 'humidity {odd}',
|
||||
name: 'humidity',
|
||||
state: {
|
||||
title: 'humidity odd',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([11000.1, 11000.3, null, 11000.5, null, 11000.7]),
|
||||
config: {},
|
||||
labels: { origin: 'odd' },
|
||||
labels: { name: 'odd' },
|
||||
},
|
||||
]);
|
||||
});
|
||||
@@ -209,24 +255,33 @@ describe('SeriesToColumns Transformer', () => {
|
||||
const expected: Field[] = [
|
||||
{
|
||||
name: 'time',
|
||||
state: {
|
||||
title: 'time',
|
||||
},
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1000, 2000, 3000, 4000]),
|
||||
config: {},
|
||||
labels: { origin: 'temperature,B' },
|
||||
labels: undefined,
|
||||
},
|
||||
{
|
||||
name: 'temperature',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([1, 3, 5, 7]),
|
||||
config: {},
|
||||
labels: { origin: 'temperature' },
|
||||
state: {
|
||||
title: 'temperature temperature',
|
||||
},
|
||||
labels: { name: 'temperature' },
|
||||
},
|
||||
{
|
||||
name: 'temperature {B}',
|
||||
name: 'temperature',
|
||||
state: {
|
||||
title: 'temperature B',
|
||||
},
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([2, 4, 6, 8]),
|
||||
config: {},
|
||||
labels: { origin: 'B' },
|
||||
labels: { name: 'B' },
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -1,70 +1,81 @@
|
||||
import { DataFrame, DataTransformerInfo } from '../../types';
|
||||
import { DataFrame, DataTransformerInfo, Field } from '../../types';
|
||||
import { DataTransformerID } from './ids';
|
||||
import { MutableDataFrame } from '../../dataframe';
|
||||
import { filterFieldsByNameTransformer } from './filterByName';
|
||||
import { ArrayVector } from '../../vector';
|
||||
import { getFieldTitle } from '../../field/fieldState';
|
||||
|
||||
export interface SeriesToColumnsOptions {
|
||||
byField?: string;
|
||||
}
|
||||
|
||||
const DEFAULT_KEY_FIELD = 'Time';
|
||||
|
||||
export const seriesToColumnsTransformer: DataTransformerInfo<SeriesToColumnsOptions> = {
|
||||
id: DataTransformerID.seriesToColumns,
|
||||
name: 'Series as columns',
|
||||
description: 'Groups series by field and returns values as columns',
|
||||
defaultOptions: {
|
||||
byField: 'Time',
|
||||
byField: DEFAULT_KEY_FIELD,
|
||||
},
|
||||
transformer: options => (data: DataFrame[]) => {
|
||||
const optionsArray = options.byField ? [options.byField] : [];
|
||||
// not sure if I should use filterFieldsByNameTransformer to get the key field
|
||||
const keyDataFrames = filterFieldsByNameTransformer.transformer({
|
||||
include: optionsArray,
|
||||
})(data);
|
||||
if (!keyDataFrames.length) {
|
||||
// for now we only parse data frames with 2 fields
|
||||
return data;
|
||||
}
|
||||
const keyFieldMatch = options.byField || DEFAULT_KEY_FIELD;
|
||||
const allFields: FieldsToProcess[] = [];
|
||||
|
||||
// not sure if I should use filterFieldsByNameTransformer to get the other fields
|
||||
const otherDataFrames = filterFieldsByNameTransformer.transformer({
|
||||
exclude: optionsArray,
|
||||
})(data);
|
||||
if (!otherDataFrames.length) {
|
||||
// for now we only parse data frames with 2 fields
|
||||
return data;
|
||||
}
|
||||
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
|
||||
const frame = data[frameIndex];
|
||||
const keyField = findKeyField(frame, keyFieldMatch);
|
||||
|
||||
const processed = new MutableDataFrame();
|
||||
const origins: string[] = [];
|
||||
for (let frameIndex = 0; frameIndex < keyDataFrames.length; frameIndex++) {
|
||||
const frame = keyDataFrames[frameIndex];
|
||||
const origin = getOrigin(frame, frameIndex);
|
||||
origins.push(origin);
|
||||
}
|
||||
if (!keyField) {
|
||||
return data;
|
||||
}
|
||||
|
||||
processed.addField({
|
||||
...keyDataFrames[0].fields[0],
|
||||
values: new ArrayVector([]),
|
||||
labels: { origin: origins.join(',') },
|
||||
});
|
||||
|
||||
for (let frameIndex = 0; frameIndex < otherDataFrames.length; frameIndex++) {
|
||||
const frame = otherDataFrames[frameIndex];
|
||||
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
|
||||
const field = frame.fields[fieldIndex];
|
||||
const origin = getOrigin(frame, frameIndex);
|
||||
const name = getColumnName(otherDataFrames, frameIndex, fieldIndex, false);
|
||||
if (processed.fields.find(field => field.name === name)) {
|
||||
const sourceField = frame.fields[fieldIndex];
|
||||
|
||||
if (sourceField === keyField) {
|
||||
continue;
|
||||
}
|
||||
processed.addField({ ...field, name, values: new ArrayVector([]), labels: { origin } });
|
||||
|
||||
let labels = sourceField.labels ?? {};
|
||||
|
||||
if (frame.name) {
|
||||
labels = { ...labels, name: frame.name };
|
||||
}
|
||||
|
||||
allFields.push({
|
||||
keyField,
|
||||
sourceField,
|
||||
newField: {
|
||||
...sourceField,
|
||||
state: null,
|
||||
values: new ArrayVector([]),
|
||||
labels,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// if no key fields or more than one value field
|
||||
if (allFields.length <= 1) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const resultFrame = new MutableDataFrame();
|
||||
|
||||
resultFrame.addField({
|
||||
...allFields[0].keyField,
|
||||
values: new ArrayVector([]),
|
||||
});
|
||||
|
||||
for (const item of allFields) {
|
||||
resultFrame.addField(item.newField);
|
||||
}
|
||||
|
||||
const keyFieldTitle = getFieldTitle(resultFrame.fields[0], resultFrame);
|
||||
const byKeyField: { [key: string]: { [key: string]: any } } = {};
|
||||
// this loop creates a dictionary object that groups the key fields values
|
||||
/*
|
||||
|
||||
/*
|
||||
this loop creates a dictionary object that groups the key fields values
|
||||
{
|
||||
"key field first value as string" : {
|
||||
"key field name": key field first value,
|
||||
@@ -77,26 +88,20 @@ export const seriesToColumnsTransformer: DataTransformerInfo<SeriesToColumnsOpti
|
||||
"other series n name": other series n value
|
||||
}
|
||||
}
|
||||
*/
|
||||
for (let seriesIndex = 0; seriesIndex < keyDataFrames.length; seriesIndex++) {
|
||||
const keyDataFrame = keyDataFrames[seriesIndex];
|
||||
const keyField = keyDataFrame.fields[0];
|
||||
const keyColumnName = getColumnName(keyDataFrames, seriesIndex, 0, true);
|
||||
const keyValues = keyField.values;
|
||||
for (let valueIndex = 0; valueIndex < keyValues.length; valueIndex++) {
|
||||
const keyValue = keyValues.get(valueIndex);
|
||||
const keyValueAsString = keyValue.toString();
|
||||
if (!byKeyField[keyValueAsString]) {
|
||||
byKeyField[keyValueAsString] = { [keyColumnName]: keyValue };
|
||||
}
|
||||
const otherDataFrame = otherDataFrames[seriesIndex];
|
||||
for (let otherIndex = 0; otherIndex < otherDataFrame.fields.length; otherIndex++) {
|
||||
const otherColumnName = getColumnName(otherDataFrames, seriesIndex, otherIndex, false);
|
||||
const otherField = otherDataFrame.fields[otherIndex];
|
||||
const otherValue = otherField.values.get(valueIndex);
|
||||
if (!byKeyField[keyValueAsString][otherColumnName]) {
|
||||
byKeyField[keyValueAsString] = { ...byKeyField[keyValueAsString], [otherColumnName]: otherValue };
|
||||
}
|
||||
*/
|
||||
|
||||
for (let fieldIndex = 0; fieldIndex < allFields.length; fieldIndex++) {
|
||||
const { sourceField, keyField, newField } = allFields[fieldIndex];
|
||||
const newFieldTitle = getFieldTitle(newField, resultFrame);
|
||||
|
||||
for (let valueIndex = 0; valueIndex < sourceField.values.length; valueIndex++) {
|
||||
const value = sourceField.values.get(valueIndex);
|
||||
const keyValue = keyField.values.get(valueIndex);
|
||||
|
||||
if (!byKeyField[keyValue]) {
|
||||
byKeyField[keyValue] = { [newFieldTitle]: value, [keyFieldTitle]: keyValue };
|
||||
} else {
|
||||
byKeyField[keyValue][newFieldTitle] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -104,27 +109,33 @@ export const seriesToColumnsTransformer: DataTransformerInfo<SeriesToColumnsOpti
|
||||
const keyValueStrings = Object.keys(byKeyField);
|
||||
for (let rowIndex = 0; rowIndex < keyValueStrings.length; rowIndex++) {
|
||||
const keyValueAsString = keyValueStrings[rowIndex];
|
||||
for (let fieldIndex = 0; fieldIndex < processed.fields.length; fieldIndex++) {
|
||||
const field = processed.fields[fieldIndex];
|
||||
const value = byKeyField[keyValueAsString][field.name] ?? null;
|
||||
|
||||
for (let fieldIndex = 0; fieldIndex < resultFrame.fields.length; fieldIndex++) {
|
||||
const field = resultFrame.fields[fieldIndex];
|
||||
const otherColumnName = getFieldTitle(field, resultFrame);
|
||||
const value = byKeyField[keyValueAsString][otherColumnName] ?? null;
|
||||
field.values.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
return [processed];
|
||||
return [resultFrame];
|
||||
},
|
||||
};
|
||||
|
||||
const getColumnName = (frames: DataFrame[], frameIndex: number, fieldIndex: number, isKeyField = false) => {
|
||||
const frame = frames[frameIndex];
|
||||
const field = frame.fields[fieldIndex];
|
||||
const frameName = frame.name || `${frameIndex}`;
|
||||
const fieldName = field.name;
|
||||
const seriesName = isKeyField ? fieldName : fieldName === frameName ? fieldName : `${fieldName} {${frameName}}`;
|
||||
function findKeyField(frame: DataFrame, matchTitle: string): Field | null {
|
||||
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
|
||||
const field = frame.fields[fieldIndex];
|
||||
|
||||
return seriesName;
|
||||
};
|
||||
if (matchTitle === getFieldTitle(field)) {
|
||||
return field;
|
||||
}
|
||||
}
|
||||
|
||||
const getOrigin = (frame: DataFrame, index: number) => {
|
||||
return frame.name || `${index}`;
|
||||
};
|
||||
return null;
|
||||
}
|
||||
|
||||
interface FieldsToProcess {
|
||||
newField: Field;
|
||||
sourceField: Field;
|
||||
keyField: Field;
|
||||
}
|
||||
|
||||
@@ -74,3 +74,12 @@ export class AppPlugin<T = KeyValue> extends GrafanaPlugin<AppPluginMeta<T>> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines life cycle of a feature
|
||||
* @internal
|
||||
*/
|
||||
export enum FeatureState {
|
||||
alpha = 'alpha',
|
||||
beta = 'beta',
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@ export enum LoadingState {
|
||||
Error = 'Error',
|
||||
}
|
||||
|
||||
type PreferredVisualisationType = 'graph' | 'table';
|
||||
|
||||
export interface QueryResultMeta {
|
||||
/** DatasSource Specific Values */
|
||||
custom?: Record<string, any>;
|
||||
@@ -28,6 +30,9 @@ export interface QueryResultMeta {
|
||||
/** Used to track transformation ids that where part of the processing */
|
||||
transformations?: string[];
|
||||
|
||||
/** Currently used to show results in Explore only in preferred visualisation option */
|
||||
preferredVisualisationType?: PreferredVisualisationType;
|
||||
|
||||
/**
|
||||
* Legacy data source specific, should be moved to custom
|
||||
* */
|
||||
@@ -106,6 +111,10 @@ export type TimeSeriesPoints = TimeSeriesValue[][];
|
||||
|
||||
export interface TimeSeries extends QueryResultBase {
|
||||
target: string;
|
||||
/**
|
||||
* If name is manually configured via an alias / legend pattern
|
||||
*/
|
||||
title?: string;
|
||||
datapoints: TimeSeriesPoints;
|
||||
unit?: string;
|
||||
tags?: Labels;
|
||||
|
||||
@@ -4,7 +4,6 @@ import { QueryResultBase, Labels, NullValueMode } from './data';
|
||||
import { DisplayProcessor, DisplayValue } from './displayValue';
|
||||
import { DataLink, LinkModel } from './dataLink';
|
||||
import { Vector } from './vector';
|
||||
import { FieldCalcs } from '../transformations/fieldReducer';
|
||||
import { FieldColor } from './fieldColor';
|
||||
import { ScopedVars } from './ScopedVars';
|
||||
|
||||
@@ -53,8 +52,6 @@ export interface FieldConfig<TOptions extends object = any> {
|
||||
|
||||
// Panel Specific Values
|
||||
custom?: TOptions;
|
||||
|
||||
scopedVars?: ScopedVars;
|
||||
}
|
||||
|
||||
export interface ValueLinkConfig {
|
||||
@@ -85,9 +82,9 @@ export interface Field<T = any, V = Vector<T>> {
|
||||
labels?: Labels;
|
||||
|
||||
/**
|
||||
* Cache of reduced values
|
||||
* Cached values with appropriate dispaly and id values
|
||||
*/
|
||||
calcs?: FieldCalcs;
|
||||
state?: FieldState | null;
|
||||
|
||||
/**
|
||||
* Convert text to the field value
|
||||
@@ -105,6 +102,23 @@ export interface Field<T = any, V = Vector<T>> {
|
||||
getLinks?: (config: ValueLinkConfig) => Array<LinkModel<Field>>;
|
||||
}
|
||||
|
||||
export interface FieldState {
|
||||
/**
|
||||
* An appropriate name for the field (does not include frame info)
|
||||
*/
|
||||
title?: string | null;
|
||||
|
||||
/**
|
||||
* Cache of reduced values
|
||||
*/
|
||||
calcs?: FieldCalcs;
|
||||
|
||||
/**
|
||||
* Appropriate values for templating
|
||||
*/
|
||||
scopedVars?: ScopedVars;
|
||||
}
|
||||
|
||||
export interface DataFrame extends QueryResultBase {
|
||||
name?: string;
|
||||
fields: Field[]; // All fields of equal length
|
||||
@@ -131,3 +145,7 @@ export interface DataFrameDTO extends QueryResultBase {
|
||||
name?: string;
|
||||
fields: Array<FieldDTO | Field>;
|
||||
}
|
||||
|
||||
export interface FieldCalcs extends Record<string, any> {}
|
||||
|
||||
export const TIME_SERIES_FIELD_NAME = 'Value';
|
||||
|
||||
@@ -8,6 +8,7 @@ import { DataFrame } from './dataFrame';
|
||||
*/
|
||||
export enum LogLevel {
|
||||
emerg = 'critical',
|
||||
fatal = 'critical',
|
||||
alert = 'critical',
|
||||
crit = 'critical',
|
||||
critical = 'critical',
|
||||
@@ -17,6 +18,7 @@ export enum LogLevel {
|
||||
eror = 'error',
|
||||
error = 'error',
|
||||
info = 'info',
|
||||
information = 'info',
|
||||
notice = 'info',
|
||||
dbug = 'debug',
|
||||
debug = 'debug',
|
||||
|
||||
39
packages/grafana-data/src/utils/binaryOperators.ts
Normal file
39
packages/grafana-data/src/utils/binaryOperators.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { RegistryItem, Registry } from './Registry';
|
||||
|
||||
export enum BinaryOperationID {
|
||||
Add = '+',
|
||||
Subtract = '-',
|
||||
Divide = '/',
|
||||
Multiply = '*',
|
||||
}
|
||||
|
||||
export type BinaryOperation = (left: number, right: number) => number;
|
||||
|
||||
interface BinaryOperatorInfo extends RegistryItem {
|
||||
operation: BinaryOperation;
|
||||
}
|
||||
|
||||
export const binaryOperators = new Registry<BinaryOperatorInfo>(() => {
|
||||
return [
|
||||
{
|
||||
id: BinaryOperationID.Add,
|
||||
name: 'Add',
|
||||
operation: (a: number, b: number) => a + b,
|
||||
},
|
||||
{
|
||||
id: BinaryOperationID.Subtract,
|
||||
name: 'Subtract',
|
||||
operation: (a: number, b: number) => a - b,
|
||||
},
|
||||
{
|
||||
id: BinaryOperationID.Multiply,
|
||||
name: 'Multiply',
|
||||
operation: (a: number, b: number) => a * b,
|
||||
},
|
||||
{
|
||||
id: BinaryOperationID.Divide,
|
||||
name: 'Divide',
|
||||
operation: (a: number, b: number) => a / b,
|
||||
},
|
||||
];
|
||||
});
|
||||
@@ -17,7 +17,7 @@ describe('read csv', () => {
|
||||
const rows = 4;
|
||||
expect(series.length).toBe(rows);
|
||||
|
||||
// Make sure everythign it padded properly
|
||||
// Make sure everything is padded properly
|
||||
for (const field of series.fields) {
|
||||
expect(field.values.length).toBe(rows);
|
||||
}
|
||||
|
||||
9
packages/grafana-data/src/utils/docs.ts
Normal file
9
packages/grafana-data/src/utils/docs.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Enumeration of documentation topics
|
||||
* @internal
|
||||
*/
|
||||
export enum DocsId {
|
||||
Transformations,
|
||||
FieldConfig,
|
||||
FieldConfigOverrides,
|
||||
}
|
||||
@@ -8,6 +8,7 @@ export * from './labels';
|
||||
export * from './object';
|
||||
export * from './namedColorsPalette';
|
||||
export * from './series';
|
||||
export * from './binaryOperators';
|
||||
export { PanelOptionsEditorBuilder, FieldConfigEditorBuilder } from './OptionsUIBuilders';
|
||||
|
||||
export { getMappedValue } from './valueMappings';
|
||||
@@ -15,3 +16,4 @@ export { getFlotPairs, getFlotPairsConstant } from './flotPairs';
|
||||
export { locationUtil } from './location';
|
||||
export { urlUtil, UrlQueryMap, UrlQueryValue } from './url';
|
||||
export { DataLinkBuiltInVars } from './dataLinks';
|
||||
export { DocsId } from './docs';
|
||||
|
||||
@@ -15,7 +15,7 @@ describe('getLoglevel()', () => {
|
||||
});
|
||||
|
||||
it('returns no log level on when level is part of a word', () => {
|
||||
expect(getLogLevel('this is information')).toBe(LogLevel.unknown);
|
||||
expect(getLogLevel('who warns us')).toBe(LogLevel.unknown);
|
||||
});
|
||||
|
||||
it('returns same log level for long and short version', () => {
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import { ArrayVector } from './ArrayVector';
|
||||
import { ScaledVector } from './ScaledVector';
|
||||
import { BinaryOperationVector } from './BinaryOperationVector';
|
||||
import { ConstantVector } from './ConstantVector';
|
||||
import { binaryOperators, BinaryOperationID } from '../utils/binaryOperators';
|
||||
|
||||
describe('ScaledVector', () => {
|
||||
it('should support multiply operations', () => {
|
||||
const source = new ArrayVector([1, 2, 3, 4]);
|
||||
const scale = 2.456;
|
||||
const v = new ScaledVector(source, scale);
|
||||
const operation = binaryOperators.get(BinaryOperationID.Multiply).operation;
|
||||
const v = new BinaryOperationVector(source, new ConstantVector(scale, source.length), operation);
|
||||
expect(v.length).toEqual(source.length);
|
||||
// expect(v.push(10)).toEqual(source.length); // not implemented
|
||||
for (let i = 0; i < 10; i++) {
|
||||
23
packages/grafana-data/src/vector/BinaryOperationVector.ts
Normal file
23
packages/grafana-data/src/vector/BinaryOperationVector.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { Vector } from '../types/vector';
|
||||
import { vectorToArray } from './vectorToArray';
|
||||
import { BinaryOperation } from '../utils/binaryOperators';
|
||||
|
||||
export class BinaryOperationVector implements Vector<number> {
|
||||
constructor(private left: Vector<number>, private right: Vector<number>, private operation: BinaryOperation) {}
|
||||
|
||||
get length(): number {
|
||||
return this.left.length;
|
||||
}
|
||||
|
||||
get(index: number): number {
|
||||
return this.operation(this.left.get(index), this.right.get(index));
|
||||
}
|
||||
|
||||
toArray(): number[] {
|
||||
return vectorToArray(this);
|
||||
}
|
||||
|
||||
toJSON(): number[] {
|
||||
return vectorToArray(this);
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { Vector } from '../types/vector';
|
||||
import { vectorToArray } from './vectorToArray';
|
||||
|
||||
export class ScaledVector implements Vector<number> {
|
||||
constructor(private source: Vector<number>, private scale: number) {}
|
||||
|
||||
get length(): number {
|
||||
return this.source.length;
|
||||
}
|
||||
|
||||
get(index: number): number {
|
||||
return this.source.get(index) * this.scale;
|
||||
}
|
||||
|
||||
toArray(): number[] {
|
||||
return vectorToArray(this);
|
||||
}
|
||||
|
||||
toJSON(): number[] {
|
||||
return vectorToArray(this);
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ export * from './AppendedVectors';
|
||||
export * from './ArrayVector';
|
||||
export * from './CircularVector';
|
||||
export * from './ConstantVector';
|
||||
export * from './ScaledVector';
|
||||
export * from './BinaryOperationVector';
|
||||
export * from './SortedVector';
|
||||
|
||||
export { vectorator } from './FunctionalVector';
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e-selectors",
|
||||
"version": "7.0.0-pre.0",
|
||||
"version": "7.0.0-beta.2",
|
||||
"description": "Grafana End-to-End Test Selectors Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
@@ -25,9 +25,9 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "13.7.7",
|
||||
"@rollup/plugin-commonjs": "11.0.2",
|
||||
"@rollup/plugin-node-resolve": "7.1.1",
|
||||
"@types/node": "13.7.7",
|
||||
"@types/rollup-plugin-visualizer": "2.6.0",
|
||||
"@types/systemjs": "^0.20.6",
|
||||
"pretty-format": "25.1.0",
|
||||
|
||||
@@ -67,6 +67,8 @@ export const Components = {
|
||||
},
|
||||
Query: {
|
||||
content: 'Panel inspector Query content',
|
||||
refreshButton: 'Panel inspector Query refresh button',
|
||||
jsonObjectKeys: () => '.json-formatter-key',
|
||||
},
|
||||
},
|
||||
Tab: {
|
||||
@@ -76,6 +78,14 @@ export const Components = {
|
||||
QueryTab: {
|
||||
content: 'Query editor tab content',
|
||||
queryInspectorButton: 'Query inspector button',
|
||||
addQuery: 'Query editor add query button',
|
||||
},
|
||||
QueryEditorRows: {
|
||||
rows: 'Query editor row',
|
||||
},
|
||||
QueryEditorRow: {
|
||||
actionButton: (title: string) => `${title} query operation action`,
|
||||
title: (refId: string) => `Query editor row title ${refId}`,
|
||||
},
|
||||
AlertTab: {
|
||||
content: 'Alert editor tab content',
|
||||
@@ -98,6 +108,7 @@ export const Components = {
|
||||
},
|
||||
Select: {
|
||||
option: 'Select option',
|
||||
input: () => 'input[id*="react-select-"]',
|
||||
},
|
||||
FieldConfigEditor: {
|
||||
content: 'Field config editor content',
|
||||
@@ -105,4 +116,7 @@ export const Components = {
|
||||
OverridesConfigEditor: {
|
||||
content: 'Field overrides editor content',
|
||||
},
|
||||
FolderPicker: {
|
||||
container: 'Folder picker select container',
|
||||
},
|
||||
};
|
||||
|
||||
@@ -49,6 +49,7 @@ export const Pages = {
|
||||
sectionItems: (item: string) => `Dashboard settings section item ${item}`,
|
||||
saveDashBoard: 'Dashboard settings aside actions Save button',
|
||||
saveAsDashBoard: 'Dashboard settings aside actions Save As button',
|
||||
title: 'Dashboard settings page title',
|
||||
},
|
||||
Variables: {
|
||||
List: {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e",
|
||||
"version": "7.0.0-pre.0",
|
||||
"version": "7.0.0-beta.2",
|
||||
"description": "Grafana End-to-End Test Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
@@ -26,11 +26,10 @@
|
||||
"docsExtract": "mkdir -p ../../reports/docs && api-extractor run 2>&1 | tee ../../reports/docs/$(basename $(pwd)).log",
|
||||
"lint": "eslint cypress/ src/ --ext=.js,.ts,.tsx",
|
||||
"open": "cypress open",
|
||||
"start": "cypress run",
|
||||
"start": "cypress run --headless --browser chrome",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cypress/webpack-preprocessor": "4.1.3",
|
||||
"@rollup/plugin-commonjs": "11.0.2",
|
||||
"@rollup/plugin-node-resolve": "7.1.1",
|
||||
"@types/node": "13.7.7",
|
||||
@@ -40,17 +39,16 @@
|
||||
"rollup-plugin-sourcemaps": "0.5.0",
|
||||
"rollup-plugin-terser": "5.3.0",
|
||||
"rollup-plugin-typescript2": "0.26.0",
|
||||
"rollup-plugin-visualizer": "3.3.1",
|
||||
"ts-loader": "6.2.1",
|
||||
"ts-node": "8.8.1"
|
||||
"rollup-plugin-visualizer": "3.3.1"
|
||||
},
|
||||
"types": "src/index.ts",
|
||||
"dependencies": {
|
||||
"@cypress/webpack-preprocessor": "4.1.3",
|
||||
"@grafana/e2e-selectors": "7.0.0-beta.2",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@grafana/e2e-selectors": "7.0.0-pre.0",
|
||||
"blink-diff": "1.0.13",
|
||||
"commander": "5.0.0",
|
||||
"cypress": "3.7.0",
|
||||
"cypress": "4.5.0",
|
||||
"execa": "4.0.0",
|
||||
"ts-loader": "6.2.1",
|
||||
"typescript": "3.7.5",
|
||||
|
||||
@@ -21,7 +21,7 @@ export const addPanel = (config?: Partial<AddPanelConfig>) => {
|
||||
e2e()
|
||||
.get('.ds-picker')
|
||||
.click()
|
||||
.contains('.gf-form-select-box__desc-option', dataSourceName)
|
||||
.contains('[id^="react-select-"][id*="-option-"]', dataSourceName)
|
||||
.click();
|
||||
queriesForm();
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "7.0.0-pre.0",
|
||||
"version": "7.0.0-beta.2",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -23,8 +23,8 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "7.0.0-pre.0",
|
||||
"@grafana/ui": "7.0.0-pre.0",
|
||||
"@grafana/data": "7.0.0-beta.2",
|
||||
"@grafana/ui": "7.0.0-beta.2",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "0.1.37"
|
||||
},
|
||||
@@ -32,6 +32,7 @@
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@rollup/plugin-commonjs": "11.0.2",
|
||||
"@rollup/plugin-node-resolve": "7.1.1",
|
||||
"@types/jest": "23.3.14",
|
||||
"@types/rollup-plugin-visualizer": "2.6.0",
|
||||
"@types/systemjs": "^0.20.6",
|
||||
"lodash": "4.17.15",
|
||||
|
||||
@@ -9,3 +9,4 @@ export * from './types';
|
||||
export { loadPluginCss, SystemJS, PluginCssOptions } from './utils/plugin';
|
||||
export { reportMetaAnalytics } from './utils/analytics';
|
||||
export { DataSourceWithBackend, HealthCheckResult, HealthStatus } from './utils/DataSourceWithBackend';
|
||||
export { toDataQueryError, toDataQueryResponse } from './utils/queryResponse';
|
||||
|
||||
@@ -4,3 +4,4 @@ export * from './dataSourceSrv';
|
||||
export * from './LocationSrv';
|
||||
export * from './EchoSrv';
|
||||
export * from './templateSrv';
|
||||
export * from './legacyAngularInjector';
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import { auto } from 'angular';
|
||||
|
||||
let singleton: auto.IInjectorService;
|
||||
|
||||
/**
|
||||
* Used during startup by Grafana to temporarily expose the angular injector to
|
||||
* pure javascript plugins using {@link getLegacyAngularInjector}.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export const setLegacyAngularInjector = (instance: auto.IInjectorService) => {
|
||||
singleton = instance;
|
||||
};
|
||||
|
||||
/**
|
||||
* WARNING: this function provides a temporary way for plugins to access anything in the
|
||||
* angular injector. While the migration from angular to react continues, there are a few
|
||||
* options that do not yet have good alternatives. Note that use of this function will
|
||||
* be removed in the future.
|
||||
*
|
||||
* @beta
|
||||
*/
|
||||
export const getLegacyAngularInjector = (): auto.IInjectorService => singleton;
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
import { Observable, from } from 'rxjs';
|
||||
import { config } from '..';
|
||||
import { getBackendSrv } from '../services';
|
||||
import { toDataQueryResponse } from './queryResponse';
|
||||
|
||||
const ExpressionDatasourceID = '__expr__';
|
||||
|
||||
@@ -94,7 +95,11 @@ export class DataSourceWithBackend<
|
||||
requestId,
|
||||
})
|
||||
.then((rsp: any) => {
|
||||
return this.toDataQueryResponse(rsp?.data);
|
||||
return toDataQueryResponse(rsp);
|
||||
})
|
||||
.catch(err => {
|
||||
err.isHandled = true; // Avoid extra popup warning
|
||||
return toDataQueryResponse(err);
|
||||
});
|
||||
|
||||
return from(req);
|
||||
@@ -109,16 +114,6 @@ export class DataSourceWithBackend<
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* This makes the arrow library loading async.
|
||||
*/
|
||||
async toDataQueryResponse(rsp: any): Promise<DataQueryResponse> {
|
||||
const { resultsToDataFrames } = await import(
|
||||
/* webpackChunkName: "apache-arrow-util" */ '@grafana/data/src/dataframe/ArrowDataFrame'
|
||||
);
|
||||
return { data: resultsToDataFrames(rsp) };
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a GET request to the datasource resource path
|
||||
*/
|
||||
|
||||
165
packages/grafana-runtime/src/utils/queryResponse.test.ts
Normal file
165
packages/grafana-runtime/src/utils/queryResponse.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { toDataFrameDTO } from '@grafana/data';
|
||||
|
||||
import { toDataQueryResponse } from './queryResponse';
|
||||
|
||||
/* eslint-disable */
|
||||
const resp = {
|
||||
data: {
|
||||
results: {
|
||||
GC: {
|
||||
dataframes: [
|
||||
'QVJST1cxAACsAQAAEAAAAAAACgAOAAwACwAEAAoAAAAUAAAAAAAAAQMACgAMAAAACAAEAAoAAAAIAAAAUAAAAAIAAAAoAAAABAAAAOD+//8IAAAADAAAAAIAAABHQwAABQAAAHJlZklkAAAAAP///wgAAAAMAAAAAAAAAAAAAAAEAAAAbmFtZQAAAAACAAAAlAAAAAQAAACG////FAAAAGAAAABgAAAAAAADAWAAAAACAAAALAAAAAQAAABQ////CAAAABAAAAAGAAAAbnVtYmVyAAAEAAAAdHlwZQAAAAB0////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAAAAABm////AAACAAAAAAAAABIAGAAUABMAEgAMAAAACAAEABIAAAAUAAAAbAAAAHQAAAAAAAoBdAAAAAIAAAA0AAAABAAAANz///8IAAAAEAAAAAQAAAB0aW1lAAAAAAQAAAB0eXBlAAAAAAgADAAIAAQACAAAAAgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAwAEAAAAVGltZQAAAAC8AAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAA0AAAAAAAAAAUAAAAAAAAAwMACgAYAAwACAAEAAoAAAAUAAAAWAAAAA0AAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAAAAAAAAGgAAAAAAAAAAAAAAAAAAABoAAAAAAAAAGgAAAAAAAAAAAAAAAIAAAANAAAAAAAAAAAAAAAAAAAADQAAAAAAAAAAAAAAAAAAAAAAAAAAFp00e2XHFQAIo158ZccVAPqoiH1lxxUA7K6yfmXHFQDetNx/ZccVANC6BoFlxxUAwsAwgmXHFQC0xlqDZccVAKbMhIRlxxUAmNKuhWXHFQCK2NiGZccVAHzeAohlxxUAbuQsiWXHFQAAAAAAAAhAAAAAAAAACEAAAAAAAAAIQAAAAAAAABRAAAAAAAAAFEAAAAAAAAAUQAAAAAAAAAhAAAAAAAAACEAAAAAAAAAIQAAAAAAAABRAAAAAAAAAFEAAAAAAAAAUQAAAAAAAAAhAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAMAAQAAALgBAAAAAAAAwAAAAAAAAADQAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAABQAAAAAgAAACgAAAAEAAAA4P7//wgAAAAMAAAAAgAAAEdDAAAFAAAAcmVmSWQAAAAA////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAAIAAACUAAAABAAAAIb///8UAAAAYAAAAGAAAAAAAAMBYAAAAAIAAAAsAAAABAAAAFD///8IAAAAEAAAAAYAAABudW1iZXIAAAQAAAB0eXBlAAAAAHT///8IAAAADAAAAAAAAAAAAAAABAAAAG5hbWUAAAAAAAAAAGb///8AAAIAAAAAAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABsAAAAdAAAAAAACgF0AAAAAgAAADQAAAAEAAAA3P///wgAAAAQAAAABAAAAHRpbWUAAAAABAAAAHR5cGUAAAAACAAMAAgABAAIAAAACAAAABAAAAAEAAAAVGltZQAAAAAEAAAAbmFtZQAAAAAAAAAAAAAGAAgABgAGAAAAAAADAAQAAABUaW1lAAAAANgBAABBUlJPVzE=',
|
||||
],
|
||||
frames: null as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const resWithError = {
|
||||
data: {
|
||||
results: {
|
||||
A: {
|
||||
error: 'Hello Error',
|
||||
series: null,
|
||||
tables: null,
|
||||
dataframes: [
|
||||
'QVJST1cxAAD/////WAEAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAJwAAAADAAAATAAAACgAAAAEAAAAPP///wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAABc////CAAAAAwAAAAAAAAAAAAAAAQAAABuYW1lAAAAAHz///8IAAAANAAAACoAAAB7Im5vdGljZXMiOlt7InNldmVyaXR5IjoyLCJ0ZXh0IjoiVGV4dCJ9XX0AAAQAAABtZXRhAAAAAAEAAAAYAAAAAAASABgAFAAAABMADAAAAAgABAASAAAAFAAAAEQAAABMAAAAAAAAA0wAAAABAAAADAAAAAgADAAIAAQACAAAAAgAAAAQAAAABwAAAG51bWJlcnMABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAgAHAAAAbnVtYmVycwAAAAAA/////4gAAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAAAQAAAAAAAAABQAAAAAAAADAwAKABgADAAIAAQACgAAABQAAAA4AAAAAgAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAEAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAA8D8AAAAAAAAIQBAAAAAMABQAEgAMAAgABAAMAAAAEAAAACwAAAA4AAAAAAADAAEAAABoAQAAAAAAAJAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAACgAMAAAACAAEAAoAAAAIAAAAnAAAAAMAAABMAAAAKAAAAAQAAAA8////CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAAFz///8IAAAADAAAAAAAAAAAAAAABAAAAG5hbWUAAAAAfP///wgAAAA0AAAAKgAAAHsibm90aWNlcyI6W3sic2V2ZXJpdHkiOjIsInRleHQiOiJUZXh0In1dfQAABAAAAG1ldGEAAAAAAQAAABgAAAAAABIAGAAUAAAAEwAMAAAACAAEABIAAAAUAAAARAAAAEwAAAAAAAADTAAAAAEAAAAMAAAACAAMAAgABAAIAAAACAAAABAAAAAHAAAAbnVtYmVycwAEAAAAbmFtZQAAAAAAAAAAAAAGAAgABgAGAAAAAAACAAcAAABudW1iZXJzAIABAABBUlJPVzE=',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const emptyResults = {
|
||||
data: { '': { refId: '', meta: null, series: null, tables: null, dataframes: null } },
|
||||
};
|
||||
|
||||
/* eslint-enable */
|
||||
|
||||
describe('GEL Utils', () => {
|
||||
test('should parse output with dataframe', () => {
|
||||
const res = toDataQueryResponse(resp);
|
||||
const frames = res.data;
|
||||
for (const frame of frames) {
|
||||
expect(frame.refId).toEqual('GC');
|
||||
}
|
||||
|
||||
const norm = frames.map(f => toDataFrameDTO(f));
|
||||
expect(norm).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"fields": Array [
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "Time",
|
||||
"type": "time",
|
||||
"values": Array [
|
||||
1569334575000,
|
||||
1569334580000,
|
||||
1569334585000,
|
||||
1569334590000,
|
||||
1569334595000,
|
||||
1569334600000,
|
||||
1569334605000,
|
||||
1569334610000,
|
||||
1569334615000,
|
||||
1569334620000,
|
||||
1569334625000,
|
||||
1569334630000,
|
||||
1569334635000,
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "",
|
||||
"type": "number",
|
||||
"values": Array [
|
||||
3,
|
||||
3,
|
||||
3,
|
||||
5,
|
||||
5,
|
||||
5,
|
||||
3,
|
||||
3,
|
||||
3,
|
||||
5,
|
||||
5,
|
||||
5,
|
||||
3,
|
||||
],
|
||||
},
|
||||
],
|
||||
"meta": undefined,
|
||||
"name": undefined,
|
||||
"refId": "GC",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test('processEmptyResults', () => {
|
||||
const frames = toDataQueryResponse(emptyResults).data;
|
||||
expect(frames.length).toEqual(0);
|
||||
});
|
||||
|
||||
test('resultWithError', () => {
|
||||
// Generated from:
|
||||
// qdr.Responses[q.GetRefID()] = backend.DataResponse{
|
||||
// Error: fmt.Errorf("an Error: %w", fmt.Errorf("another error")),
|
||||
// Frames: data.Frames{
|
||||
// {
|
||||
// Fields: data.Fields{data.NewField("numbers", nil, []float64{1, 3})},
|
||||
// Meta: &data.FrameMeta{
|
||||
// Notices: []data.Notice{
|
||||
// {
|
||||
// Severity: data.NoticeSeverityError,
|
||||
// Text: "Text",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
const res = toDataQueryResponse(resWithError);
|
||||
expect(res.error).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"message": "Hello Error",
|
||||
"refId": "A",
|
||||
}
|
||||
`);
|
||||
|
||||
const norm = res.data.map(f => toDataFrameDTO(f));
|
||||
expect(norm).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"fields": Array [
|
||||
Object {
|
||||
"config": Object {},
|
||||
"labels": undefined,
|
||||
"name": "numbers",
|
||||
"type": "number",
|
||||
"values": Array [
|
||||
1,
|
||||
3,
|
||||
],
|
||||
},
|
||||
],
|
||||
"meta": Object {
|
||||
"notices": Array [
|
||||
Object {
|
||||
"severity": 2,
|
||||
"text": "Text",
|
||||
},
|
||||
],
|
||||
},
|
||||
"name": undefined,
|
||||
"refId": "A",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
||||
91
packages/grafana-runtime/src/utils/queryResponse.ts
Normal file
91
packages/grafana-runtime/src/utils/queryResponse.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import {
|
||||
DataQueryResponse,
|
||||
arrowTableToDataFrame,
|
||||
base64StringToArrowTable,
|
||||
KeyValue,
|
||||
LoadingState,
|
||||
DataQueryError,
|
||||
} from '@grafana/data';
|
||||
|
||||
interface DataResponse {
|
||||
error?: string;
|
||||
refId?: string;
|
||||
dataframes?: string[];
|
||||
// series: null,
|
||||
// tables: null,
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the results from `/api/ds/query
|
||||
*/
|
||||
export function toDataQueryResponse(res: any): DataQueryResponse {
|
||||
const rsp: DataQueryResponse = { data: [], state: LoadingState.Done };
|
||||
if (res.data?.results) {
|
||||
const results: KeyValue = res.data.results;
|
||||
for (const refId of Object.keys(results)) {
|
||||
const dr = results[refId] as DataResponse;
|
||||
if (dr) {
|
||||
if (dr.error) {
|
||||
if (!rsp.error) {
|
||||
rsp.error = {
|
||||
refId,
|
||||
message: dr.error,
|
||||
};
|
||||
rsp.state = LoadingState.Error;
|
||||
}
|
||||
}
|
||||
|
||||
if (dr.dataframes) {
|
||||
for (const b64 of dr.dataframes) {
|
||||
const t = base64StringToArrowTable(b64);
|
||||
const f = arrowTableToDataFrame(t);
|
||||
if (!f.refId) {
|
||||
f.refId = refId;
|
||||
}
|
||||
rsp.data.push(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// When it is not an OK response, make sure the error gets added
|
||||
if (res.status && res.status !== 200) {
|
||||
if (rsp.state !== LoadingState.Error) {
|
||||
rsp.state = LoadingState.Error;
|
||||
}
|
||||
if (!rsp.error) {
|
||||
rsp.error = toDataQueryError(res);
|
||||
}
|
||||
}
|
||||
|
||||
return rsp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an object into a DataQueryError -- if this is an HTTP response,
|
||||
* it will put the correct values in the error filds
|
||||
*/
|
||||
export function toDataQueryError(err: any): DataQueryError {
|
||||
const error = (err || {}) as DataQueryError;
|
||||
|
||||
if (!error.message) {
|
||||
if (typeof err === 'string' || err instanceof String) {
|
||||
return { message: err } as DataQueryError;
|
||||
}
|
||||
|
||||
let message = 'Query error';
|
||||
if (error.message) {
|
||||
message = error.message;
|
||||
} else if (error.data && error.data.message) {
|
||||
message = error.data.message;
|
||||
} else if (error.data && error.data.error) {
|
||||
message = error.data.error;
|
||||
} else if (error.status) {
|
||||
message = `Query error: ${error.status} ${error.statusText}`;
|
||||
}
|
||||
error.message = message;
|
||||
}
|
||||
|
||||
return error;
|
||||
}
|
||||
@@ -11,5 +11,10 @@
|
||||
},
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"extends": "@grafana/tsconfig",
|
||||
"include": ["src/**/*.ts*", "../../public/app/types/jquery/*.ts", "../../public/app/types/sanitize-url.d.ts"]
|
||||
"include": [
|
||||
"src/**/*.ts*",
|
||||
"../../public/app/types/jquery/*.ts",
|
||||
"../../public/app/types/sanitize-url.d.ts",
|
||||
"../../public/app/types/svg.d.ts"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# 7.0.0-beta.1 (2020-04-28)
|
||||
|
||||
### Features / Enhancements
|
||||
* **Grafana Toolkit**: Adds template for backend datasource. [#23864](https://github.com/grafana/grafana/pull/23864), [@bergquist](https://github.com/bergquist)
|
||||
|
||||
# 6.6.0 (unreleased)
|
||||
|
||||
# 6.6.0-beta1 (2020-01-20)
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
source ./common.sh
|
||||
|
||||
output=$(docker build . | tee /dev/tty)
|
||||
hash=$(echo "$output" | tail -1 | sed -ne "s/^Successfully built \(.*\)/\1/p")
|
||||
docker tag "$hash" $DOCKER_IMAGE_NAME:latest
|
||||
docker push $DOCKER_IMAGE_NAME:latest
|
||||
if [ ${#hash} -gt 0 ]; then
|
||||
docker tag "$hash" $DOCKER_IMAGE_NAME:latest
|
||||
docker push $DOCKER_IMAGE_NAME:latest
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/bin/sh
|
||||
set -eo pipefail
|
||||
source "./deploy-common.sh"
|
||||
|
||||
# Make libgcc compatible
|
||||
@@ -27,10 +28,10 @@ get_file "https://github.com/golangci/golangci-lint/releases/download/v1.23.7/$f
|
||||
"/tmp/$filename" \
|
||||
"34df1794a2ea8e168b3c98eed3cc0f3e13ed4cba735e4e40ef141df5c41bc086"
|
||||
untar_file "/tmp/$filename"
|
||||
chmod 755 /usr/local/bin/golangci-lint
|
||||
ln -s /usr/local/golangci-lint-1.23.7-linux-amd64/golangci-lint /usr/local/bin/golangci-lint
|
||||
ln -s /usr/local/go/bin/go /usr/local/bin/go
|
||||
ln -s /usr/local/go/bin/gofmt /usr/local/bin/gofmt
|
||||
chmod 755 /usr/local/bin/golangci-lint
|
||||
|
||||
# Install dependencies
|
||||
apk add fontconfig zip jq
|
||||
@@ -38,9 +39,12 @@ apk add fontconfig zip jq
|
||||
# Install code climate
|
||||
get_file "https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64" \
|
||||
"/usr/local/bin/cc-test-reporter" \
|
||||
"38f2442892027f61a07f52c845818750261b2ba58bffb043a582495339d37c05"
|
||||
"b4138199aa755ebfe171b57cc46910b13258ace5fbc4eaa099c42607cd0bff32"
|
||||
chmod +x /usr/local/bin/cc-test-reporter
|
||||
|
||||
wget -O /usr/local/bin/grabpl "https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v0.4.4/grabpl"
|
||||
chmod +x /usr/local/bin/grabpl
|
||||
|
||||
apk add git
|
||||
# Install Mage
|
||||
mkdir -pv /tmp/mage $HOME/go/bin
|
||||
@@ -56,10 +60,6 @@ for file in $(ls $HOME/go/bin); do
|
||||
mv -v $HOME/go/bin/$file /usr/local/bin/$file
|
||||
done
|
||||
|
||||
ls -l /usr/local/bin
|
||||
revive --help
|
||||
file /usr/local/bin/revive
|
||||
|
||||
# Cleanup after yourself
|
||||
/bin/rm -rf /tmp/mage
|
||||
/bin/rm -rf $HOME/go
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
source ./common.sh
|
||||
|
||||
output=$(docker build . | tee /dev/tty)
|
||||
hash=$(echo "$output" | tail -1 | sed -ne "s/^Successfully built \(.*\)/\1/p")
|
||||
docker tag "$hash" $DOCKER_IMAGE_NAME:latest
|
||||
docker push $DOCKER_IMAGE_NAME:latest
|
||||
if [ ${#hash} -gt 0 ]; then
|
||||
docker tag "$hash" $DOCKER_IMAGE_NAME:latest
|
||||
docker push $DOCKER_IMAGE_NAME:latest
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
source "/etc/profile"
|
||||
source "./deploy-slim.sh"
|
||||
source "./deploy-common.sh"
|
||||
@@ -11,7 +13,6 @@ wget -O - "https://nodejs.org/dist/v12.16.2/node-${NODEVER}.tar.xz" | tar Jvxf -
|
||||
pushd /tmp/node-${NODEVER}
|
||||
/bin/rm -f CHANGELOG.md README.md LICENSE
|
||||
/bin/cp -r * /usr/local
|
||||
/bin/cp -r .??* /usr/local
|
||||
popd
|
||||
/bin/rm -rf /tmp/node-${NODEVER}
|
||||
|
||||
@@ -32,17 +33,20 @@ get_file "https://github.com/golangci/golangci-lint/releases/download/v1.23.7/$f
|
||||
"/tmp/$filename" \
|
||||
"34df1794a2ea8e168b3c98eed3cc0f3e13ed4cba735e4e40ef141df5c41bc086"
|
||||
untar_file "/tmp/$filename"
|
||||
chmod 755 /usr/local/bin/golangci-lint
|
||||
ln -s /usr/local/golangci-lint-1.23.7-linux-amd64/golangci-lint /usr/local/bin/golangci-lint
|
||||
ln -s /usr/local/go/bin/go /usr/local/bin/go
|
||||
ln -s /usr/local/go/bin/gofmt /usr/local/bin/gofmt
|
||||
chmod 755 /usr/local/bin/golangci-lint
|
||||
|
||||
# Install code climate
|
||||
get_file "https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64" \
|
||||
"/usr/local/bin/cc-test-reporter" \
|
||||
"38f2442892027f61a07f52c845818750261b2ba58bffb043a582495339d37c05"
|
||||
"b4138199aa755ebfe171b57cc46910b13258ace5fbc4eaa099c42607cd0bff32"
|
||||
chmod 755 /usr/local/bin/cc-test-reporter
|
||||
|
||||
wget -O /usr/local/bin/grabpl "https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v0.4.4/grabpl"
|
||||
chmod +x /usr/local/bin/grabpl
|
||||
|
||||
# Install Mage
|
||||
mkdir -pv /tmp/mage $HOME/go/bin
|
||||
git clone https://github.com/magefile/mage.git /tmp/mage
|
||||
|
||||
@@ -24,9 +24,12 @@ apt-get update -y && apt-get install -y adduser libfontconfig1 locate && /bin/rm
|
||||
# Install code climate
|
||||
get_file "https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64" \
|
||||
"/usr/local/bin/cc-test-reporter" \
|
||||
"38f2442892027f61a07f52c845818750261b2ba58bffb043a582495339d37c05"
|
||||
"b4138199aa755ebfe171b57cc46910b13258ace5fbc4eaa099c42607cd0bff32"
|
||||
chmod +x /usr/local/bin/cc-test-reporter
|
||||
|
||||
wget -O /usr/local/bin/grabpl "https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v0.4.4/grabpl"
|
||||
chmod +x /usr/local/bin/grabpl
|
||||
|
||||
# Install Mage
|
||||
mkdir -pv /tmp/mage $HOME/go/bin
|
||||
git clone https://github.com/magefile/mage.git /tmp/mage
|
||||
@@ -40,4 +43,4 @@ mv $HOME/go/bin/mage /usr/local/bin
|
||||
sudo -u circleci ./deploy-user.sh
|
||||
|
||||
# Get the size down
|
||||
/bin/rm -rf /var/lib/apt/lists
|
||||
/bin/rm -rf /var/lib/apt/lists
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "7.0.0-pre.0",
|
||||
"version": "7.0.0-beta.2",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -29,10 +29,10 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "7.9.0",
|
||||
"@babel/preset-env": "7.9.0",
|
||||
"@grafana/data": "7.0.0-pre.0",
|
||||
"@grafana/data": "7.0.0-beta.2",
|
||||
"@grafana/eslint-config": "^1.0.0-rc1",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@grafana/ui": "7.0.0-pre.0",
|
||||
"@grafana/ui": "7.0.0-beta.2",
|
||||
"@types/command-exists": "^1.2.0",
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/expect-puppeteer": "3.3.1",
|
||||
|
||||
@@ -5,7 +5,6 @@ import chalk from 'chalk';
|
||||
import { startTask } from './tasks/core.start';
|
||||
import { changelogTask } from './tasks/changelog';
|
||||
import { cherryPickTask } from './tasks/cherrypick';
|
||||
import { manifestTask } from './tasks/manifest';
|
||||
import { precommitTask } from './tasks/precommit';
|
||||
import { templateTask } from './tasks/template';
|
||||
import { pluginBuildTask } from './tasks/plugin.build';
|
||||
@@ -236,14 +235,6 @@ export const run = (includeInternalScripts = false) => {
|
||||
await execTask(pluginUpdateTask)({});
|
||||
});
|
||||
|
||||
// Test the manifest creation
|
||||
program
|
||||
.command('manifest')
|
||||
.description('create a manifest file in the cwd')
|
||||
.action(async cmd => {
|
||||
await execTask(manifestTask)({ folder: process.cwd() });
|
||||
});
|
||||
|
||||
program.on('command:*', () => {
|
||||
console.error('Invalid command: %s\nSee --help for a list of available commands.', program.args.join(' '));
|
||||
process.exit(1);
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import { getFilesForManifest, convertSha1SumsToManifest } from './manifest';
|
||||
|
||||
describe('Manifest', () => {
|
||||
it('should collect file paths', () => {
|
||||
const info = getFilesForManifest(__dirname);
|
||||
expect(info).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"changelog.ts",
|
||||
"cherrypick.ts",
|
||||
"closeMilestone.ts",
|
||||
"component.create.ts",
|
||||
"core.start.ts",
|
||||
"manifest.test.ts",
|
||||
"manifest.ts",
|
||||
"nodeVersionChecker.ts",
|
||||
"package.build.ts",
|
||||
"plugin/bundle.managed.ts",
|
||||
"plugin/bundle.ts",
|
||||
"plugin/create.ts",
|
||||
"plugin/tests.ts",
|
||||
"plugin.build.ts",
|
||||
"plugin.ci.ts",
|
||||
"plugin.create.ts",
|
||||
"plugin.dev.ts",
|
||||
"plugin.tests.ts",
|
||||
"plugin.update.ts",
|
||||
"plugin.utils.ts",
|
||||
"precommit.ts",
|
||||
"searchTestDataSetup.ts",
|
||||
"task.ts",
|
||||
"template.ts",
|
||||
"toolkit.build.ts",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should convert a sha1 sum to manifest structure', () => {
|
||||
const sha1output = `7df059597099bb7dcf25d2a9aedfaf4465f72d8d LICENSE
|
||||
4ebed28a02dc029719296aa847bffcea8eb5b9ff README.md
|
||||
4493f107eb175b085f020c1afea04614232dc0fd gfx_sheets_darwin_amd64
|
||||
d8b05884e3829d1389a9c0e4b79b0aba8c19ca4a gfx_sheets_linux_amd64
|
||||
88f33db20182e17c72c2823fe3bed87d8c45b0fd gfx_sheets_windows_amd64.exe
|
||||
e6d8f6704dbe85d5f032d4e8ba44ebc5d4a68c43 img/config-page.png
|
||||
63d79d0e0f9db21ea168324bd4e180d6892b9d2b img/dashboard.png
|
||||
7ea6295954b24be55b27320af2074852fb088fa1 img/graph.png
|
||||
262f2bfddb004c7ce567042e8096f9e033c9b1bd img/query-editor.png
|
||||
f134ab85caff88b59ea903c5491c6a08c221622f img/sheets.svg
|
||||
40b8c38cea260caed3cdc01d6e3c1eca483ab5c1 module.js
|
||||
3c04068eb581f73a262a2081f4adca2edbb14edf module.js.map
|
||||
bfcae42976f0feca58eed3636655bce51702d3ed plugin.json`;
|
||||
|
||||
const manifest = convertSha1SumsToManifest(sha1output);
|
||||
|
||||
expect(manifest).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"files": Object {
|
||||
"LICENSE": "7df059597099bb7dcf25d2a9aedfaf4465f72d8d",
|
||||
"README.md": "4ebed28a02dc029719296aa847bffcea8eb5b9ff",
|
||||
"gfx_sheets_darwin_amd64": "4493f107eb175b085f020c1afea04614232dc0fd",
|
||||
"gfx_sheets_linux_amd64": "d8b05884e3829d1389a9c0e4b79b0aba8c19ca4a",
|
||||
"gfx_sheets_windows_amd64.exe": "88f33db20182e17c72c2823fe3bed87d8c45b0fd",
|
||||
"img/config-page.png": "e6d8f6704dbe85d5f032d4e8ba44ebc5d4a68c43",
|
||||
"img/dashboard.png": "63d79d0e0f9db21ea168324bd4e180d6892b9d2b",
|
||||
"img/graph.png": "7ea6295954b24be55b27320af2074852fb088fa1",
|
||||
"img/query-editor.png": "262f2bfddb004c7ce567042e8096f9e033c9b1bd",
|
||||
"img/sheets.svg": "f134ab85caff88b59ea903c5491c6a08c221622f",
|
||||
"module.js": "40b8c38cea260caed3cdc01d6e3c1eca483ab5c1",
|
||||
"module.js.map": "3c04068eb581f73a262a2081f4adca2edbb14edf",
|
||||
"plugin.json": "bfcae42976f0feca58eed3636655bce51702d3ed",
|
||||
},
|
||||
"plugin": "<?>",
|
||||
"version": "<?>",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
@@ -1,100 +0,0 @@
|
||||
import { Task, TaskRunner } from './task';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import execa from 'execa';
|
||||
import { ManifestInfo } from '../../plugins/types';
|
||||
|
||||
interface ManifestOptions {
|
||||
folder: string;
|
||||
}
|
||||
|
||||
export function getFilesForManifest(root: string, work?: string, acc?: string[]): string[] {
|
||||
if (!acc) {
|
||||
acc = [];
|
||||
}
|
||||
let abs = work ?? root;
|
||||
const files = fs.readdirSync(abs);
|
||||
files.forEach(file => {
|
||||
const f = path.join(abs, file);
|
||||
const stat = fs.statSync(f);
|
||||
if (stat.isDirectory()) {
|
||||
acc = getFilesForManifest(root, f, acc);
|
||||
} else {
|
||||
const idx = f.lastIndexOf('.');
|
||||
if (idx > 0) {
|
||||
// Don't hash images
|
||||
const suffix = f.substring(idx + 1).toLowerCase();
|
||||
if (suffix === 'png' || suffix == 'gif' || suffix === 'svg') {
|
||||
return;
|
||||
}
|
||||
}
|
||||
acc!.push(f.substring(root.length + 1).replace('\\', '/'));
|
||||
}
|
||||
});
|
||||
return acc;
|
||||
}
|
||||
|
||||
export function convertSha1SumsToManifest(sums: string): ManifestInfo {
|
||||
const files: Record<string, string> = {};
|
||||
for (const line of sums.split(/\r?\n/)) {
|
||||
const idx = line.indexOf(' ');
|
||||
if (idx > 0) {
|
||||
const hash = line.substring(0, idx).trim();
|
||||
const path = line.substring(idx + 1).trim();
|
||||
files[path] = hash;
|
||||
}
|
||||
}
|
||||
return {
|
||||
plugin: '<?>',
|
||||
version: '<?>',
|
||||
files,
|
||||
};
|
||||
}
|
||||
|
||||
const manifestRunner: TaskRunner<ManifestOptions> = async ({ folder }) => {
|
||||
const GRAFANA_API_KEY = process.env.GRAFANA_API_KEY;
|
||||
if (!GRAFANA_API_KEY) {
|
||||
console.log('Plugin signing requires a grafana API key');
|
||||
}
|
||||
const filename = 'MANIFEST.txt';
|
||||
const files = getFilesForManifest(folder).filter(f => f !== filename);
|
||||
|
||||
// Run sha1sum
|
||||
const originalDir = __dirname;
|
||||
process.chdir(folder);
|
||||
const { stdout } = await execa('sha1sum', files);
|
||||
process.chdir(originalDir);
|
||||
|
||||
// Send the manifest to grafana API
|
||||
const manifest = convertSha1SumsToManifest(stdout);
|
||||
const outputPath = path.join(folder, filename);
|
||||
|
||||
const pluginPath = path.join(folder, 'plugin.json');
|
||||
const plugin = require(pluginPath);
|
||||
const url = `https://grafana.com/api/plugins/${plugin.id}/ci/sign`;
|
||||
manifest.plugin = plugin.id;
|
||||
manifest.version = plugin.info.version;
|
||||
|
||||
console.log('Request Signature:', url, manifest);
|
||||
const axios = require('axios');
|
||||
|
||||
try {
|
||||
const info = await axios.post(url, manifest, {
|
||||
headers: { Authorization: 'Bearer ' + GRAFANA_API_KEY },
|
||||
responseType: 'arraybuffer',
|
||||
});
|
||||
if (info.status === 200) {
|
||||
console.log('OK: ', info.data);
|
||||
const buffer = new Buffer(info.data, 'binary');
|
||||
fs.writeFileSync(outputPath, buffer);
|
||||
} else {
|
||||
console.warn('Error: ', info);
|
||||
console.log('Saving the unsigned manifest');
|
||||
fs.writeFileSync(outputPath, JSON.stringify(manifest, null, 2));
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('ERROR Fetching response', err);
|
||||
}
|
||||
};
|
||||
|
||||
export const manifestTask = new Task<ManifestOptions>('Build Manifest', manifestRunner);
|
||||
@@ -3,9 +3,11 @@ import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
// @ts-ignore
|
||||
import * as path from 'path';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
import globby from 'globby';
|
||||
|
||||
let distDir: string, cwd: string;
|
||||
|
||||
@@ -68,6 +70,7 @@ const preparePackage = async (pkg: any) => {
|
||||
|
||||
const moveFiles = () => {
|
||||
const files = ['README.md', 'CHANGELOG.md', 'index.js'];
|
||||
|
||||
// @ts-ignore
|
||||
return useSpinner<void>(`Moving ${files.join(', ')} files`, async () => {
|
||||
const promises = files.map(file => {
|
||||
@@ -86,6 +89,26 @@ const moveFiles = () => {
|
||||
})();
|
||||
};
|
||||
|
||||
const moveStaticFiles = async (pkg: any, cwd: string) => {
|
||||
if (pkg.name.endsWith('/ui')) {
|
||||
const staticFiles = await globby(resolvePath(process.cwd(), 'src/**/*.+(png|svg|gif|jpg)'));
|
||||
return useSpinner<void>(`Moving static files`, async () => {
|
||||
const promises = staticFiles.map(file => {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.copyFile(file, `${cwd}/compiled/${file.replace(`${cwd}/src`, '')}`, (err: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
await Promise.all(promises);
|
||||
})();
|
||||
}
|
||||
};
|
||||
interface PackageBuildOptions {
|
||||
scope: string;
|
||||
}
|
||||
@@ -107,6 +130,7 @@ const buildTaskRunner: TaskRunner<PackageBuildOptions> = async ({ scope }) => {
|
||||
|
||||
await clean();
|
||||
await compile();
|
||||
await moveStaticFiles(pkg, cwd);
|
||||
await rollup();
|
||||
await preparePackage(pkg);
|
||||
await moveFiles();
|
||||
|
||||
@@ -19,8 +19,6 @@ import {
|
||||
} from '../../plugins/env';
|
||||
import { agregateWorkflowInfo, agregateCoverageInfo, agregateTestInfo } from '../../plugins/workflow';
|
||||
import { PluginPackageDetails, PluginBuildReport } from '../../plugins/types';
|
||||
import { manifestTask } from './manifest';
|
||||
import { execTask } from '../utils/execTask';
|
||||
import rimrafCallback from 'rimraf';
|
||||
import { promisify } from 'util';
|
||||
const rimraf = promisify(rimrafCallback);
|
||||
@@ -165,7 +163,7 @@ const packagePluginRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
|
||||
// Write a manifest.txt file in the dist folder
|
||||
try {
|
||||
await execTask(manifestTask)({ folder: distContentDir });
|
||||
await execa('grabpl', ['build-plugin-manifest', distContentDir]);
|
||||
} catch (err) {
|
||||
console.warn(`Error signing manifest: ${distContentDir}`, err);
|
||||
}
|
||||
|
||||
@@ -6,5 +6,10 @@
|
||||
},
|
||||
"exclude": ["../dist", "../node_modules"],
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["../src/**/*.ts", "../src/**/*.tsx", "../../../public/app/types/sanitize-url.d.ts"]
|
||||
"include": [
|
||||
"../src/**/*.ts",
|
||||
"../src/**/*.tsx",
|
||||
"../../../public/app/types/sanitize-url.d.ts",
|
||||
"../../../public/app/types/svg.d.ts"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,21 @@
|
||||
# 7.0.0-beta.1 (2020-04-28)
|
||||
|
||||
### Features / Enhancements
|
||||
* **@grafana/ui**: Create Icon component and replace icons. [#23402](https://github.com/grafana/grafana/pull/23402), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **@grafana/ui**: Create slider component. [#22275](https://github.com/grafana/grafana/pull/22275), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **@grafana/ui**: Remove ColorPallete component. [#23592](https://github.com/grafana/grafana/pull/23592), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Docs**: adding API reference documentation support for the packages libraries.. [#21931](https://github.com/grafana/grafana/pull/21931), [@mckn](https://github.com/mckn)
|
||||
* **Migration**: Add old Input to legacy namespace. [#23286](https://github.com/grafana/grafana/pull/23286), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Final components to LegacyForms. [#23707](https://github.com/grafana/grafana/pull/23707), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Move Switch from Forms namespace. [#23386](https://github.com/grafana/grafana/pull/23386), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Move last components from Forms namespace. [#23556](https://github.com/grafana/grafana/pull/23556), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: Remove Button from Forms namespace. [#23105](https://github.com/grafana/grafana/pull/23105), [@tskarhed](https://github.com/tskarhed)
|
||||
* **Migration**: TextArea from Forms namespace. [#23436](https://github.com/grafana/grafana/pull/23436), [@tskarhed](https://github.com/tskarhed)
|
||||
* **grafana/ui**: Add basic horizontal and vertical layout components. [#22303](https://github.com/grafana/grafana/pull/22303), [@dprokop](https://github.com/dprokop)
|
||||
|
||||
### Bug Fixes
|
||||
* **@grafana/ui**: Fix time range when only partial datetime is provided. [#23122](https://github.com/grafana/grafana/pull/23122), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
|
||||
# 6.6.0-beta.1.0 (2020-01-20)
|
||||
|
||||
### Features / Enhancements
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "7.0.0-pre.0",
|
||||
"version": "7.0.0-beta.2",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -28,8 +28,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/core": "^10.0.27",
|
||||
"@grafana/data": "7.0.0-pre.0",
|
||||
"@grafana/e2e-selectors": "7.0.0-pre.0",
|
||||
"@grafana/data": "7.0.0-beta.2",
|
||||
"@grafana/e2e-selectors": "7.0.0-beta.2",
|
||||
"@grafana/slate-react": "0.22.9-grafana",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@iconscout/react-unicons": "^1.0.0",
|
||||
@@ -71,6 +71,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "11.0.2",
|
||||
"@rollup/plugin-image": "2.0.4",
|
||||
"@rollup/plugin-node-resolve": "7.1.1",
|
||||
"@storybook/addon-actions": "5.3.17",
|
||||
"@storybook/addon-docs": "5.3.17",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import resolve from '@rollup/plugin-node-resolve';
|
||||
import commonjs from '@rollup/plugin-commonjs';
|
||||
import image from '@rollup/plugin-image';
|
||||
// import sourceMaps from 'rollup-plugin-sourcemaps';
|
||||
import { terser } from 'rollup-plugin-terser';
|
||||
|
||||
@@ -71,6 +72,7 @@ const buildCjsPackage = ({ env }) => {
|
||||
}),
|
||||
resolve(),
|
||||
// sourceMaps(),
|
||||
image(),
|
||||
env === 'production' && terser(),
|
||||
],
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user