mirror of
https://github.com/grafana/grafana.git
synced 2026-01-10 14:07:49 +08:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
461b97ee80 | ||
|
|
724731fddc | ||
|
|
905f2c3e16 | ||
|
|
807594fd65 | ||
|
|
38c288bb9a | ||
|
|
82996d6f0a | ||
|
|
f9d0c6525f | ||
|
|
26d5db2b63 | ||
|
|
a13b96521d | ||
|
|
e7d1f1df14 | ||
|
|
42e7cd7d65 | ||
|
|
282dd029aa | ||
|
|
15fd54b21b | ||
|
|
1d9c4cbdfe | ||
|
|
315476e20a |
@@ -19,7 +19,7 @@ version: 2
|
||||
jobs:
|
||||
mysql-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/mysql:5.6-ram
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: rootpass
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install -y default-mysql-client
|
||||
- run: sudo apt install -y mysql-client
|
||||
- run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s
|
||||
- run: cat devenv/docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
|
||||
- run:
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
|
||||
postgres-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/postgres:9.3-ram
|
||||
environment:
|
||||
POSTGRES_USER: grafanatest
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
cache-server-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/redis:4-alpine
|
||||
- image: memcached
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
@@ -98,34 +98,6 @@ jobs:
|
||||
path: public/e2e-test/screenShots/theOutput
|
||||
destination: output-screenshots
|
||||
|
||||
end-to-end-test-release:
|
||||
docker:
|
||||
- image: circleci/node:10-browsers
|
||||
- image: grafana/grafana-dev:$CIRCLE_TAG
|
||||
steps:
|
||||
- run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s
|
||||
- checkout
|
||||
- restore_cache:
|
||||
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||
- run:
|
||||
name: yarn install
|
||||
command: 'yarn install --pure-lockfile --no-progress'
|
||||
no_output_timeout: 5m
|
||||
- save_cache:
|
||||
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||
paths:
|
||||
- node_modules
|
||||
- run:
|
||||
name: run end-to-end tests
|
||||
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
|
||||
no_output_timeout: 5m
|
||||
- store_artifacts:
|
||||
path: public/e2e-test/screenShots/theTruth
|
||||
destination: expected-screenshots
|
||||
- store_artifacts:
|
||||
path: public/e2e-test/screenShots/theOutput
|
||||
destination: output-screenshots
|
||||
|
||||
codespell:
|
||||
docker:
|
||||
- image: circleci/python
|
||||
@@ -144,7 +116,7 @@ jobs:
|
||||
|
||||
lint-go:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.6
|
||||
environment:
|
||||
# we need CGO because of go-sqlite3
|
||||
CGO_ENABLED: 1
|
||||
@@ -155,15 +127,6 @@ jobs:
|
||||
name: Lint Go
|
||||
command: 'make lint-go'
|
||||
|
||||
shellcheck:
|
||||
machine: true
|
||||
working_directory: ~/go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: ShellCheck
|
||||
command: 'make shellcheck'
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
@@ -185,7 +148,7 @@ jobs:
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.6
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -195,7 +158,7 @@ jobs:
|
||||
|
||||
build-all:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -239,7 +202,7 @@ jobs:
|
||||
|
||||
build:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -265,7 +228,7 @@ jobs:
|
||||
|
||||
build-fast-backend:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -282,7 +245,7 @@ jobs:
|
||||
|
||||
build-fast-frontend:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -306,7 +269,7 @@ jobs:
|
||||
|
||||
build-fast-package:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -333,7 +296,7 @@ jobs:
|
||||
|
||||
build-fast-save:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -419,7 +382,7 @@ jobs:
|
||||
|
||||
build-enterprise:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -451,7 +414,7 @@ jobs:
|
||||
|
||||
build-all-enterprise:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.7
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -672,8 +635,6 @@ workflows:
|
||||
filters: *filter-only-master
|
||||
- lint-go:
|
||||
filters: *filter-only-master
|
||||
- shellcheck:
|
||||
filters: *filter-only-master
|
||||
- test-frontend:
|
||||
filters: *filter-only-master
|
||||
- test-backend:
|
||||
@@ -689,7 +650,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-oss-msi
|
||||
@@ -702,7 +662,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
@@ -713,7 +672,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-all-enterprise
|
||||
@@ -725,7 +683,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
@@ -747,8 +704,6 @@ workflows:
|
||||
filters: *filter-only-release
|
||||
- lint-go:
|
||||
filters: *filter-only-release
|
||||
- shellcheck:
|
||||
filters: *filter-only-release
|
||||
- test-frontend:
|
||||
filters: *filter-only-release
|
||||
- test-backend:
|
||||
@@ -764,7 +719,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-oss-msi
|
||||
@@ -777,7 +731,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
@@ -789,7 +742,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
@@ -800,14 +752,9 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
- end-to-end-test-release:
|
||||
requires:
|
||||
- grafana-docker-release
|
||||
filters: *filter-only-release
|
||||
|
||||
build-branches-and-prs:
|
||||
jobs:
|
||||
@@ -824,10 +771,6 @@ workflows:
|
||||
filters: *filter-not-release-or-master
|
||||
- lint-go:
|
||||
filters: *filter-not-release-or-master
|
||||
- lint-go:
|
||||
filters: *filter-not-release-or-master
|
||||
- shellcheck:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-frontend:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-backend:
|
||||
@@ -845,7 +788,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
@@ -857,7 +799,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Golang build container
|
||||
FROM golang:1.12.9
|
||||
FROM golang:1.12.4
|
||||
|
||||
WORKDIR $GOPATH/src/github.com/grafana/grafana
|
||||
|
||||
@@ -33,7 +33,7 @@ ENV NODE_ENV production
|
||||
RUN ./node_modules/.bin/grunt build
|
||||
|
||||
# Final container
|
||||
FROM ubuntu:18.04
|
||||
FROM ubuntu:latest
|
||||
|
||||
LABEL maintainer="Grafana team <hello@grafana.com>"
|
||||
|
||||
|
||||
10
Makefile
10
Makefile
@@ -2,9 +2,8 @@
|
||||
|
||||
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go gosec revive golangci-lint go-vet test-go test-js test run clean devenv devenv-down revive-alerting
|
||||
|
||||
GO = GO111MODULE=on go
|
||||
GO_FILES ?= ./pkg/...
|
||||
SH_FILES ?= $(shell find ./scripts -name *.sh)
|
||||
GO := GO111MODULE=on go
|
||||
GO_FILES := ./pkg/...
|
||||
|
||||
all: deps build
|
||||
|
||||
@@ -112,11 +111,6 @@ go-vet:
|
||||
|
||||
lint-go: go-vet golangci-lint revive revive-alerting gosec
|
||||
|
||||
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
|
||||
shellcheck: $(SH_FILES)
|
||||
@docker run --rm -v "$$PWD:/mnt" koalaman/shellcheck:stable \
|
||||
$(SH_FILES) -e SC1071
|
||||
|
||||
run: scripts/go/bin/bra
|
||||
@scripts/go/bin/bra run
|
||||
|
||||
|
||||
@@ -214,10 +214,6 @@ external_enabled = true
|
||||
external_snapshot_url = https://snapshots-origin.raintank.io
|
||||
external_snapshot_name = Publish to snapshot.raintank.io
|
||||
|
||||
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||
# creating and deleting snapshots.
|
||||
public_mode = false
|
||||
|
||||
# remove expired snapshot
|
||||
snapshot_remove_expired = true
|
||||
|
||||
@@ -383,45 +379,17 @@ send_client_credentials_via_post = false
|
||||
|
||||
#################################### SAML Auth ###########################
|
||||
[auth.saml] # Enterprise only
|
||||
# Defaults to false. If true, the feature is enabled
|
||||
enabled = false
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
private_key =
|
||||
|
||||
# Path to the private key. Used to decrypt assertions from the IdP
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
certificate =
|
||||
certificate_path =
|
||||
idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||
max_issue_delay = 90s
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||
metadata_valid_duration = 48h
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
assertion_attribute_email = mail
|
||||
|
||||
#################################### Basic Auth ##########################
|
||||
[auth.basic]
|
||||
enabled = true
|
||||
|
||||
@@ -209,10 +209,6 @@
|
||||
;external_snapshot_url = https://snapshots-origin.raintank.io
|
||||
;external_snapshot_name = Publish to snapshot.raintank.io
|
||||
|
||||
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||
# creating and deleting snapshots.
|
||||
;public_mode = false
|
||||
|
||||
# remove expired snapshot
|
||||
;snapshot_remove_expired = true
|
||||
|
||||
@@ -338,46 +334,18 @@
|
||||
;send_client_credentials_via_post = false
|
||||
|
||||
#################################### SAML Auth ###########################
|
||||
[auth.saml] # Enterprise only
|
||||
# Defaults to false. If true, the feature is enabled.
|
||||
;[auth.saml] # Enterprise only
|
||||
;enabled = false
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
;certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
;certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
;private_key =
|
||||
|
||||
;# Path to the private key. Used to decrypt assertions from the IdP
|
||||
;private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
;certificate =
|
||||
;certificate_path =
|
||||
;idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds.
|
||||
;max_issue_delay = 90s
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours.
|
||||
;metadata_valid_duration = 48h
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
;assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
;assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
;assertion_attribute_email = mail
|
||||
|
||||
#################################### Grafana.com Auth ####################
|
||||
[auth.grafana_com]
|
||||
;enabled = false
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
"nullPointMode": "null",
|
||||
"options-gauge": {
|
||||
"baseColor": "#299c46",
|
||||
"decimals": 2,
|
||||
"decimals": "2",
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"options": {
|
||||
@@ -111,7 +111,7 @@
|
||||
"nullPointMode": "null",
|
||||
"options-gauge": {
|
||||
"baseColor": "#299c46",
|
||||
"decimals": null,
|
||||
"decimals": "",
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"options": {
|
||||
@@ -178,7 +178,7 @@
|
||||
"nullPointMode": "null",
|
||||
"options-gauge": {
|
||||
"baseColor": "#299c46",
|
||||
"decimals": null,
|
||||
"decimals": "",
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"options": {
|
||||
|
||||
@@ -28,6 +28,38 @@ search_filter = "(cn=%s)"
|
||||
# An array of base dns to search through
|
||||
search_base_dns = ["dc=grafana,dc=org"]
|
||||
|
||||
# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
|
||||
# This is done by enabling group_search_filter below. You must also set member_of= "cn"
|
||||
# in [servers.attributes] below.
|
||||
|
||||
# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
|
||||
# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
|
||||
# below in such a way that the user's recursive group membership is considered.
|
||||
#
|
||||
# Nested Groups + Active Directory (AD) Example:
|
||||
#
|
||||
# AD groups store the Distinguished Names (DNs) of members, so your filter must
|
||||
# recursively search your groups for the authenticating user's DN. For example:
|
||||
#
|
||||
# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
|
||||
# group_search_filter_user_attribute = "distinguishedName"
|
||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
#
|
||||
# [servers.attributes]
|
||||
# ...
|
||||
# member_of = "distinguishedName"
|
||||
|
||||
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
|
||||
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||
## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
|
||||
## Defaults to the value of username in [server.attributes]
|
||||
## Valid options are any of your values in [servers.attributes]
|
||||
## If you are using nested groups you probably want to set this and member_of in
|
||||
## [servers.attributes] to "distinguishedName"
|
||||
# group_search_filter_user_attribute = "distinguishedName"
|
||||
## An array of the base DNs to search through for groups. Typically uses ou=groups
|
||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
|
||||
# Specify names of the ldap attributes your ldap uses
|
||||
[servers.attributes]
|
||||
name = "givenName"
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
|
||||
# [log]
|
||||
# filters = ldap:debug
|
||||
|
||||
[[servers]]
|
||||
# Ldap server host (specify multiple hosts space separated)
|
||||
host = "127.0.0.1"
|
||||
# Default port is 389 or 636 if use_ssl = true
|
||||
port = 389
|
||||
# Set to true if ldap server supports TLS
|
||||
use_ssl = false
|
||||
# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS)
|
||||
start_tls = false
|
||||
# set to true if you want to skip ssl cert validation
|
||||
ssl_skip_verify = false
|
||||
# set to the path to your root CA certificate or leave unset to use system defaults
|
||||
# root_ca_cert = "/path/to/certificate.crt"
|
||||
|
||||
# Search user bind dn
|
||||
bind_dn = "cn=admin,dc=grafana,dc=org"
|
||||
# Search user bind password
|
||||
# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
|
||||
bind_password = 'grafana'
|
||||
|
||||
# An array of base dns to search through
|
||||
search_base_dns = ["dc=grafana,dc=org"]
|
||||
|
||||
search_filter = "(uid=%s)"
|
||||
|
||||
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||
group_search_filter_user_attribute = "uid"
|
||||
group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
|
||||
[servers.attributes]
|
||||
name = "givenName"
|
||||
surname = "sn"
|
||||
username = "cn"
|
||||
member_of = "memberOf"
|
||||
email = "email"
|
||||
|
||||
# Map ldap groups to grafana org roles
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=posix-admins,ou=groups,dc=grafana,dc=org"
|
||||
org_role = "Admin"
|
||||
grafana_admin = true
|
||||
|
||||
# The Grafana organization database id, optional, if left out the default org (id 1) will be used
|
||||
# org_id = 1
|
||||
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=editors,ou=groups,dc=grafana,dc=org"
|
||||
org_role = "Editor"
|
||||
|
||||
[[servers.group_mappings]]
|
||||
# If you want to match all (or no ldap groups) then you can use wildcard
|
||||
group_dn = "*"
|
||||
org_role = "Viewer"
|
||||
@@ -12,7 +12,7 @@ After adding ldif files to `prepopulate`:
|
||||
|
||||
## Enabling LDAP in Grafana
|
||||
|
||||
If you want to use users/groups with `memberOf` support Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||
Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||
|
||||
```ini
|
||||
[auth.ldap]
|
||||
@@ -21,8 +21,6 @@ config_file = conf/ldap_dev.toml
|
||||
; allow_sign_up = true
|
||||
```
|
||||
|
||||
Otherwise perform same actions for `ldap_dev_posix.toml` config.
|
||||
|
||||
## Groups & Users
|
||||
|
||||
admins
|
||||
@@ -40,11 +38,3 @@ editors
|
||||
ldap-editors
|
||||
no groups
|
||||
ldap-viewer
|
||||
|
||||
|
||||
## Groups & Users (POSIX)
|
||||
|
||||
admins
|
||||
ldap-posix-admin
|
||||
no groups
|
||||
ldap-posix
|
||||
|
||||
@@ -78,31 +78,3 @@ objectClass: inetOrgPerson
|
||||
objectClass: organizationalPerson
|
||||
sn: ldap-torkel
|
||||
cn: ldap-torkel
|
||||
|
||||
# admin for posix group (without support for memberOf attribute)
|
||||
dn: uid=ldap-posix-admin,ou=users,dc=grafana,dc=org
|
||||
mail: ldap-posix-admin@grafana.com
|
||||
userPassword: grafana
|
||||
objectclass: top
|
||||
objectclass: posixAccount
|
||||
objectclass: inetOrgPerson
|
||||
homedirectory: /home/ldap-posix-admin
|
||||
sn: ldap-posix-admin
|
||||
cn: ldap-posix-admin
|
||||
uid: ldap-posix-admin
|
||||
uidnumber: 1
|
||||
gidnumber: 1
|
||||
|
||||
# user for posix group (without support for memberOf attribute)
|
||||
dn: uid=ldap-posix,ou=users,dc=grafana,dc=org
|
||||
mail: ldap-posix@grafana.com
|
||||
userPassword: grafana
|
||||
objectclass: top
|
||||
objectclass: posixAccount
|
||||
objectclass: inetOrgPerson
|
||||
homedirectory: /home/ldap-posix
|
||||
sn: ldap-posix
|
||||
cn: ldap-posix
|
||||
uid: ldap-posix
|
||||
uidnumber: 2
|
||||
gidnumber: 2
|
||||
|
||||
@@ -23,21 +23,3 @@ objectClass: groupOfNames
|
||||
member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
|
||||
member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
|
||||
member: cn=ldap-leo,ou=users,dc=grafana,dc=org
|
||||
|
||||
# -- POSIX --
|
||||
|
||||
# posix admin group (without support for memberOf attribute)
|
||||
dn: cn=posix-admins,ou=groups,dc=grafana,dc=org
|
||||
cn: admins
|
||||
objectClass: top
|
||||
objectClass: posixGroup
|
||||
gidNumber: 1
|
||||
memberUid: ldap-posix-admin
|
||||
|
||||
# posix group (without support for memberOf attribute)
|
||||
dn: cn=posix,ou=groups,dc=grafana,dc=org
|
||||
cn: viewers
|
||||
objectClass: top
|
||||
objectClass: posixGroup
|
||||
gidNumber: 2
|
||||
memberUid: ldap-posix
|
||||
|
||||
@@ -60,9 +60,9 @@ aliases = ["/v1.1", "/guides/reference/admin", "/v3.1"]
|
||||
<h4>Provisioning</h4>
|
||||
<p>A guide to help you automate your Grafana setup & configuration.</p>
|
||||
</a>
|
||||
<a href="{{< relref "guides/whats-new-in-v6-3.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>What's new in v6.3</h4>
|
||||
<p>Article on all the new cool features and enhancements in v6.3</p>
|
||||
<a href="{{< relref "guides/whats-new-in-v6-2.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>What's new in v6.2</h4>
|
||||
<p>Article on all the new cool features and enhancements in v6.2</p>
|
||||
</a>
|
||||
<a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>Screencasts</h4>
|
||||
|
||||
@@ -99,18 +99,3 @@ allow_sign_up = true
|
||||
allowed_organizations = github google
|
||||
```
|
||||
|
||||
### Team Sync (Enterprise only)
|
||||
|
||||
> Only available in Grafana Enterprise v6.3+
|
||||
|
||||
With Team Sync you can map your GitHub org teams to teams in Grafana so that your users will automatically be added to
|
||||
the correct teams.
|
||||
|
||||
Your GitHub teams can be referenced in two ways:
|
||||
|
||||
- `https://github.com/orgs/<org>/teams/<team name>`
|
||||
- `@<org>/<team name>`
|
||||
|
||||
Example: `@grafana/developers`
|
||||
|
||||
[Learn more about Team Sync]({{< relref "auth/enhanced_ldap.md" >}})
|
||||
|
||||
@@ -126,6 +126,8 @@ group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
group_search_filter_user_attribute = "uid"
|
||||
```
|
||||
|
||||
Also set `member_of = "dn"` in the `[servers.attributes]` section.
|
||||
|
||||
### Group Mappings
|
||||
|
||||
In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
+++
|
||||
title = "SAML Authentication"
|
||||
description = "Grafana SAML Authentication"
|
||||
keywords = ["grafana", "saml", "documentation", "saml-auth"]
|
||||
aliases = ["/auth/saml/"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "SAML"
|
||||
parent = "authentication"
|
||||
weight = 5
|
||||
+++
|
||||
|
||||
# SAML Authentication
|
||||
|
||||
> SAML Authentication integration is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise" >}}).
|
||||
|
||||
> Only available in Grafana v6.3+
|
||||
|
||||
The SAML authentication integration allows your Grafana users to log in by
|
||||
using an external SAML Identity Provider (IdP). To enable this, Grafana becomes
|
||||
a Service Provider (SP) in the authentication flow, interacting with the IdP to
|
||||
exchange user information.
|
||||
|
||||
## Supported SAML
|
||||
|
||||
The SAML single-sign-on (SSO) standard is varied and flexible. Our implementation contains the subset of features needed to provide a smooth authentication experience into Grafana.
|
||||
|
||||
> Should you encounter any problems with our implementation, please don't hesitate to contact us.
|
||||
|
||||
At the moment of writing, Grafana supports:
|
||||
|
||||
1. From the Service Provider (SP) to the Identity Provider (IdP)
|
||||
|
||||
- `HTTP-POST` binding
|
||||
- `HTTP-Redirect` binding
|
||||
|
||||
2. From the Identity Provider (IdP) to the Service Provider (SP)
|
||||
|
||||
- `HTTP-POST` binding
|
||||
|
||||
3. In terms of security, we currently support signed and encrypted Assertions. However, signed or encrypted requests are not supported.
|
||||
|
||||
4. In terms of initiation, only SP-initiated requests are supported. There's no support for IdP-initiated request.
|
||||
|
||||
## Set up SAML Authentication
|
||||
|
||||
To use the SAML integration, you need to enable SAML in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||
|
||||
```bash
|
||||
[auth.saml]
|
||||
# Defaults to false. If true, the feature is enabled
|
||||
enabled = true
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
private_key =
|
||||
|
||||
# Path to the private key. Used to decrypt assertions from the IdP
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||
max_issue_delay =
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||
metadata_valid_duration =
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
assertion_attribute_email = mail
|
||||
```
|
||||
|
||||
Important to note:
|
||||
|
||||
- like any other Grafana configuration, use of [environment variables for these options is supported]({{< relref "installation/configuration.md#using-environment-variables" >}})
|
||||
- only one form of configuration option is required. Using multiple forms, e.g. both `certificate` and `certificate_path` will result in an error
|
||||
|
||||
## Grafana Configuration
|
||||
|
||||
An example working configuration example looks like:
|
||||
|
||||
```bash
|
||||
[auth.saml]
|
||||
enabled = true
|
||||
certificate_path = "/path/to/certificate.cert"
|
||||
private_key_path = "/path/to/private_key.pem"
|
||||
metadata_path = "/my/metadata.xml"
|
||||
max_issue_delay = 90s
|
||||
metadata_valid_duration = 48h
|
||||
assertion_attribute_name = displayName
|
||||
assertion_attribute_login = mail
|
||||
assertion_attribute_email = mail
|
||||
```
|
||||
|
||||
And here is a comprehensive list of the options:
|
||||
|
||||
| Setting | Required | Description | Default |
|
||||
| ----------------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------- | ------------- |
|
||||
| `enabled` | No | Whenever SAML authentication is allowed | `false` |
|
||||
| `certificate` or `certificate_path` | Yes | Base64-encoded string or Path for the SP X.509 certificate | |
|
||||
| `private_key` or `private_key_path` | Yes | Base64-encoded string or Path for the SP private key | |
|
||||
| `idp_metadata` or `idp_metadata_path` or `idp_metadata_url` | Yes | Base64-encoded string, Path or URL for the IdP SAML metadata XML | |
|
||||
| `max_issue_delay` | No | Duration, since the IdP issued a response and the SP is allowed to process it | `90s` |
|
||||
| `metadata_valid_duration` | No | Duration, for how long the SP's metadata should be valid | `48h` |
|
||||
| `assertion_attribute_name` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's name | `displayName` |
|
||||
| `assertion_attribute_login` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's login handle | `mail` |
|
||||
| `assertion_attribute_email` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's email | `mail` |
|
||||
|
||||
### Cert and Private Key
|
||||
|
||||
The SAML SSO standard uses asymmetric encryption to exchange information between the SP (Grafana) and the IdP. To perform such encryption, you need a public part and a private part. In this case, the X.509 certificate provides the public part, while the private key provides the private part.
|
||||
|
||||
Grafana supports two ways of specifying both the `certificate` and `private_key`. Without a suffix (e.g. `certificate=`), the configuration assumes you've supplied the base64-encoded file contents. However, if specified with the `_path` suffix (e.g. `certificate_path=`) Grafana will treat it as a file path and attempt to read the file from the file system.
|
||||
|
||||
### IdP Metadata
|
||||
|
||||
Expanding on the above, we'll also need the public part from our IdP for message verification. The SAML IdP metadata XML tells us where and how we should exchange the user information.
|
||||
|
||||
Currently, we support three ways of specifying the IdP metadata. Without a suffix `idp_metadata=` Grafana assumes base64-encoded XML file contents, with the `_path` suffix assumes a file path and attempts to read the file from the file system and with the `_url` suffix assumes an URL and attempts to load the metadata from the given location.
|
||||
|
||||
### Max Issue Delay
|
||||
|
||||
Prevention of SAML response replay attacks and internal clock skews between the SP (Grafana), and the IdP is covered. You can set a maximum amount of time between the IdP issuing a response and the SP (Grafana) processing it.
|
||||
|
||||
The configuration options is specified as a duration e.g. `max_issue_delay = 90s` or `max_issue_delay = 1h`
|
||||
|
||||
### Metadata valid duration
|
||||
|
||||
As an SP, our metadata is likely to expire at some point, e.g. due to a certificate rotation or change of location binding. Grafana allows you to specify for how long the metadata should be valid. Leveraging the standard's `validUntil` field, you can tell consumers until when your metadata is going to be valid. The duration is computed by adding the duration to the current time.
|
||||
|
||||
The configuration option is specified as a duration e.g. `metadata_valid_duration = 48h`
|
||||
|
||||
## Identity Provider (IdP) registration
|
||||
|
||||
For the SAML integration to work correctly, you need to make the IdP aware of the SP.
|
||||
|
||||
The integration provides two key endpoints as part of Grafana:
|
||||
|
||||
- The `/saml/metadata` endpoint. Which contains the SP's metadata. You can either download and upload it manually or make the IdP request it directly from the endpoint. Some providers name it Identifier or Entity ID.
|
||||
|
||||
- The `/saml/acs` endpoint. Which is intended to receive the ACS (Assertion Customer Service) callback. Some providers name it SSO URL or Reply URL.
|
||||
|
||||
## Assertion mapping
|
||||
|
||||
During the SAML SSO authentication flow, we receive the ACS (Assertion Customer Service) callback. The callback contains all the relevant information of the user under authentication embedded in the SAML response. Grafana parses the response to create (or update) the user within its internal database.
|
||||
|
||||
For Grafana to map the user information, it looks at the individual attributes within the assertion. You can think of these attributes as Key/Value pairs (although, they contain more information than that).
|
||||
|
||||
Grafana provides configuration options that let you modify which keys to look at for these values. The data we need to create the user in Grafana is Name, Login handle, and email.
|
||||
|
||||
An example is `assertion_attribute_name = "givenName"` where Grafana looks within the assertion for an attribute with a friendly name or name of `givenName`. Both, the friendly name (e.g. `givenName`) or the name (e.g. `urn:oid:2.5.4.42`) can be used interchangeably as the value for the configuration option.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
To troubleshoot and get more log info enable saml debug logging in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||
|
||||
```bash
|
||||
[log]
|
||||
filters = saml.auth:debug
|
||||
```
|
||||
@@ -29,10 +29,6 @@ With Grafana Enterprise you can set up synchronization between LDAP Groups and T
|
||||
|
||||
Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
|
||||
|
||||
### SAML Authentication
|
||||
|
||||
Enables your Grafana Enterprise users to authenticate with SAML. [Learn More]({{< relref "auth/saml.md" >}}).
|
||||
|
||||
### Premium Plugins
|
||||
|
||||
With a Grafana Enterprise license you will get access to premium plugins, including:
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
+++
|
||||
title = "What's New in Grafana v6.3"
|
||||
description = "Feature & improvement highlights for Grafana v6.3"
|
||||
keywords = ["grafana", "new", "documentation", "6.3"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Version 6.3"
|
||||
identifier = "v6.3"
|
||||
parent = "whatsnew"
|
||||
weight = -14
|
||||
+++
|
||||
|
||||
# What's New in Grafana v6.3
|
||||
|
||||
For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md)
|
||||
|
||||
## Highlights
|
||||
|
||||
- New Explore features
|
||||
- [Loki Live Streaming]({{< relref "#loki-live-streaming" >}})
|
||||
- [Loki Context Queries]({{< relref "#loki-context-queries" >}})
|
||||
- [Elasticsearch Logs Support]({{< relref "#elasticsearch-logs-support" >}})
|
||||
- [InfluxDB Logs Support]({{< relref "#influxdb-logs-support" >}})
|
||||
- [Data links]({{< relref "#data-links" >}})
|
||||
- [New Time Picker]({{< relref "#new-time-picker" >}})
|
||||
- [Graph Area Gradients]({{< relref "#graph-gradients" >}}) - A new graph display option!
|
||||
- Grafana Enterprise
|
||||
- [LDAP Active Sync]({{< relref "#ldap-active-sync" >}}) - LDAP Active Sync
|
||||
- [SAML Authentication]({{< relref "#saml-authentication" >}}) - SAML Authentication
|
||||
|
||||
## Explore improvements
|
||||
|
||||
This release adds a ton of enhancements to Explore. Both in terms of new general enhancements but also in
|
||||
new data source specific features.
|
||||
|
||||
### Loki live streaming
|
||||
|
||||
For log queries using the Loki data source you can now stream logs live directly to the Explore UI.
|
||||
|
||||
### Loki context queries
|
||||
|
||||
After finding a log line through the heavy use of query filters it can then be useful to
|
||||
see the log lines surrounding the line your searched for. The `show context` feature
|
||||
allows you to view lines before and after the line of interest.
|
||||
|
||||
### Elasticsearch logs support
|
||||
|
||||
This release adds support for searching & visualizing logs stored in Elasticsearch in the Explore mode. With a special
|
||||
simplified query interface specifically designed for logs search.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/elasticsearch_explore_logs.png" max-width="600px" caption="New Time Picker" >}}
|
||||
|
||||
Please read [Using Elasticsearch in Grafana](/features/datasources/elasticsearch/#querying-logs-beta) for more detailed information on how to get started and use it.
|
||||
|
||||
### InfluxDB logs support
|
||||
|
||||
This release adds support for searching & visualizing logs stored in InfluxDB in the Explore mode. With a special
|
||||
simplified query interface specifically designed for logs search.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/influxdb_explore_logs.png" max-width="600px" caption="New Time Picker" >}}
|
||||
|
||||
Please read [Using InfluxDB in Grafana](/features/datasources/influxdb/#querying-logs-beta) for more detailed information on how to get started and use it.
|
||||
|
||||
## Data Links
|
||||
|
||||
We have simplified the UI for defining panel drilldown links (and renamed them to Panel links). We have also added a
|
||||
new type of link named `Data link`. The reason to have two different types is to make it clear how they are used
|
||||
and what variables you can use in the link. Panel links are only shown in the top left corner of
|
||||
the panel and you cannot reference series name or any data field.
|
||||
|
||||
While `Data links` are used by the actual visualization and can reference data fields.
|
||||
|
||||
Example:
|
||||
```url
|
||||
http://my-grafana.com/d/bPCI6VSZz/other-dashboard?var-server=${__series_name}
|
||||
```
|
||||
|
||||
You have access to these variables:
|
||||
|
||||
Name | Description
|
||||
------------ | -------------
|
||||
*${__series_name}* | The name of the time series (or table)
|
||||
*${__value_time}* | The time of the point your clicking on (in millisecond epoch)
|
||||
*${__url_time_range}* | Interpolates as the full time range (i.e. from=21312323412&to=21312312312)
|
||||
*${__all_variables}* | Adds all current variables (and current values) to the url
|
||||
|
||||
You can then click on point in the Graph.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_datalink.png" max-width="400px" caption="New Time Picker" >}}
|
||||
|
||||
For now only the Graph panel supports `Data links` but we hope to add these to many visualizations.
|
||||
|
||||
## New Time Picker
|
||||
|
||||
The time picker has been re-designed and with a more basic design that makes accessing quick ranges more easy.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/time_picker.png" max-width="400px" caption="New Time Picker" >}}
|
||||
|
||||
## Graph Gradients
|
||||
|
||||
Want more eye candy in your graphs? Then the fill gradient option might be for you! Works really well for
|
||||
graphs with only a single series.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_gradient_area.jpeg" max-width="800px" caption="Graph Gradient Area" >}}
|
||||
|
||||
Looks really nice in light theme as well.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_gradients_white.png" max-width="800px" caption="Graph Gradient Area" >}}
|
||||
|
||||
## Grafana Enterprise
|
||||
|
||||
Substantial refactoring and improvements to the external auth systems has gone in to this release making the features
|
||||
listed below possible as well as laying a foundation for future enhancements.
|
||||
|
||||
### LDAP Active Sync
|
||||
|
||||
This is a new Enterprise feature that enables background syncing of user information, org role and teams memberships.
|
||||
This syncing is otherwise only done at login time. With this feature you can schedule how often this user synchronization should
|
||||
occur.
|
||||
|
||||
For example, lets say a user is removed from an LDAP group. In previous versions of Grafana an admin would have to
|
||||
wait for the user to logout or the session to expire for the Grafana permissions to update, a process that can take days.
|
||||
|
||||
With active sync the user would be automatically removed from the corresponding team in Grafana or even logged out and disabled if no longer
|
||||
belonging to an LDAP group that gives them access to Grafana.
|
||||
|
||||
[Read more](/auth/enhanced_ldap/#active-ldap-synchronization)
|
||||
|
||||
### SAML Authentication
|
||||
|
||||
Built-in support for SAML is now available in Grafana Enterprise.
|
||||
|
||||
[See docs]({{< relref "auth/saml.md" >}})
|
||||
|
||||
### Team Sync for GitHub OAuth
|
||||
|
||||
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.
|
||||
|
||||
[See docs]({{< relref "auth/github.md" >}})
|
||||
|
||||
### Team Sync for Auth Proxy
|
||||
|
||||
We've added support for enriching the Auth Proxy headers with Teams information, which makes it possible
|
||||
to use Team Sync with Auth Proxy.
|
||||
|
||||
[See docs](/auth/auth-proxy/#auth-proxy-authentication)
|
||||
@@ -37,11 +37,15 @@ export class ConfigCtrl {
|
||||
|
||||
postUpdate() {
|
||||
if (!this.appModel.enabled) {
|
||||
return;
|
||||
return this.$q.resolve();
|
||||
}
|
||||
|
||||
// TODO, whatever you want
|
||||
console.log('Post Update:', this);
|
||||
return this.appEditCtrl.importDashboards().then(() => {
|
||||
this.enabled = true;
|
||||
return {
|
||||
url: "plugins/raintank-kubernetes-app/page/clusters",
|
||||
message: "Kubernetes App enabled!"
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
ConfigCtrl.templateUrl = 'components/config/config.html';
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
[
|
||||
{ "version": "v6.3", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v6.2", "path": "/v6.2", "archived": true },
|
||||
{ "version": "v6.2", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v6.1", "path": "/v6.1", "archived": true },
|
||||
{ "version": "v6.0", "path": "/v6.0", "archived": true },
|
||||
{ "version": "v5.4", "path": "/v5.4", "archived": true },
|
||||
|
||||
4
go.mod
4
go.mod
@@ -52,7 +52,7 @@ require (
|
||||
github.com/onsi/gomega v1.5.0 // indirect
|
||||
github.com/opentracing/opentracing-go v1.1.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.8.1 // indirect
|
||||
github.com/pkg/errors v0.8.1
|
||||
github.com/prometheus/client_golang v0.9.2
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90
|
||||
github.com/prometheus/common v0.2.0
|
||||
@@ -64,7 +64,7 @@ require (
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a
|
||||
github.com/stretchr/testify v1.3.0
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf
|
||||
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329
|
||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3
|
||||
github.com/uber-go/atomic v1.3.2 // indirect
|
||||
github.com/uber/jaeger-client-go v2.16.0+incompatible
|
||||
github.com/uber/jaeger-lib v2.0.0+incompatible // indirect
|
||||
|
||||
4
go.sum
4
go.sum
@@ -202,8 +202,8 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf h1:Z2X3Os7oRzpdJ75iPqWZc0HeJWFYNCvKsfpQwFpRNTA=
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf/go.mod h1:M8agBzgqHIhgj7wEn9/0hJUZcrvt9VY+Ln+S1I5Mha0=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329 h1:VBsKFh4W1JEMz3eLCmM9zOJKZdDkP5W4b3Y4hc7SbZc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3 h1:E7xa7Zur8hLPvw+03gAeQ9esrglfV389j2PcwhiGf/I=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
|
||||
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
||||
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
||||
github.com/uber/jaeger-client-go v2.16.0+incompatible h1:Q2Pp6v3QYiocMxomCaJuwQGFt7E53bPYqEgug/AoBtY=
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "6.3.0-alpha.36"
|
||||
"version": "6.3.0-alpha.40"
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"company": "Grafana Labs"
|
||||
},
|
||||
"name": "grafana",
|
||||
"version": "6.3.5",
|
||||
"version": "6.3.0-pre",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/grafana/grafana.git"
|
||||
@@ -148,7 +148,7 @@
|
||||
"themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts",
|
||||
"packages:prepare": "lerna run clean && npm run test && lerna version --tag-version-prefix=\"packages@\" -m \"Packages: publish %s\" --no-push",
|
||||
"packages:build": "lerna run clean && lerna run build",
|
||||
"packages:publish": "lerna publish from-package --contents dist --tag-version-prefix=\"packages@\" --dist-tag next"
|
||||
"packages:publish": "lerna publish from-package --contents dist --dist-tag next --tag-version-prefix=\"packages@\""
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Grafana Data Library
|
||||
|
||||
The core data components
|
||||
This package holds the root data types and functions used within Grafana.
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@grafana/data",
|
||||
"version": "6.3.0-alpha.36",
|
||||
"version": "6.3.0-alpha.39",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import { Threshold } from './threshold';
|
||||
import { ValueMapping } from './valueMapping';
|
||||
|
||||
export enum LoadingState {
|
||||
NotStarted = 'NotStarted',
|
||||
Loading = 'Loading',
|
||||
@@ -52,12 +49,6 @@ export interface Field {
|
||||
decimals?: number | null; // Significant digits (for display)
|
||||
min?: number | null;
|
||||
max?: number | null;
|
||||
|
||||
// Convert input values into a display value
|
||||
mappings?: ValueMapping[];
|
||||
|
||||
// Must be sorted by 'value', first value is always -Infinity
|
||||
thresholds?: Threshold[];
|
||||
}
|
||||
|
||||
export interface Labels {
|
||||
|
||||
@@ -2,7 +2,6 @@ export * from './data';
|
||||
export * from './dataLink';
|
||||
export * from './logs';
|
||||
export * from './navModel';
|
||||
export * from './select';
|
||||
export * from './time';
|
||||
export * from './threshold';
|
||||
export * from './utils';
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
/**
|
||||
* Used in select elements
|
||||
*/
|
||||
export interface SelectableValue<T = any> {
|
||||
label?: string;
|
||||
value?: T;
|
||||
imgUrl?: string;
|
||||
description?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
export interface Threshold {
|
||||
index: number;
|
||||
value: number;
|
||||
color: string;
|
||||
}
|
||||
|
||||
@@ -5,18 +5,6 @@ import { TimeZone } from '../types';
|
||||
|
||||
const units: DurationUnit[] = ['y', 'M', 'w', 'd', 'h', 'm', 's'];
|
||||
|
||||
export function isMathString(text: string | DateTime | Date): boolean {
|
||||
if (!text) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof text === 'string' && (text.substring(0, 3) === 'now' || text.includes('||'))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses different types input to a moment instance. There is a specific formatting language that can be used
|
||||
* if text arg is string. See unit tests for examples.
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
|
||||
import { getFieldReducers, ReducerID, reduceField } from './index';
|
||||
|
||||
import _ from 'lodash';
|
||||
import { DataFrame } from '../types/data';
|
||||
|
||||
/**
|
||||
* Run a reducer and get back the value
|
||||
*/
|
||||
function reduce(series: DataFrame, fieldIndex: number, id: string): any {
|
||||
return reduceField({ series, fieldIndex, reducers: [id] })[id];
|
||||
}
|
||||
|
||||
describe('Stats Calculators', () => {
|
||||
const basicTable = {
|
||||
@@ -17,16 +9,29 @@ describe('Stats Calculators', () => {
|
||||
};
|
||||
|
||||
it('should load all standard stats', () => {
|
||||
for (const id of Object.keys(ReducerID)) {
|
||||
const reducer = fieldReducers.getIfExists(id);
|
||||
const found = reducer ? reducer.id : '<NOT FOUND>';
|
||||
expect(found).toEqual(id);
|
||||
}
|
||||
const names = [
|
||||
ReducerID.sum,
|
||||
ReducerID.max,
|
||||
ReducerID.min,
|
||||
ReducerID.logmin,
|
||||
ReducerID.mean,
|
||||
ReducerID.last,
|
||||
ReducerID.first,
|
||||
ReducerID.count,
|
||||
ReducerID.range,
|
||||
ReducerID.diff,
|
||||
ReducerID.step,
|
||||
ReducerID.delta,
|
||||
// ReducerID.allIsZero,
|
||||
// ReducerID.allIsNull,
|
||||
];
|
||||
const stats = getFieldReducers(names);
|
||||
expect(stats.length).toBe(names.length);
|
||||
});
|
||||
|
||||
it('should fail to load unknown stats', () => {
|
||||
const names = ['not a stat', ReducerID.max, ReducerID.min, 'also not a stat'];
|
||||
const stats = fieldReducers.list(names);
|
||||
const stats = getFieldReducers(names);
|
||||
expect(stats.length).toBe(2);
|
||||
|
||||
const found = stats.map(v => v.id);
|
||||
@@ -87,34 +92,6 @@ describe('Stats Calculators', () => {
|
||||
expect(stats.delta).toEqual(300);
|
||||
});
|
||||
|
||||
it('consistenly check allIsNull/allIsZero', () => {
|
||||
const empty = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [],
|
||||
};
|
||||
const allNull = ({
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [null, null, null, null],
|
||||
} as unknown) as DataFrame;
|
||||
const allNull2 = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [[null], [null], [null], [null]],
|
||||
};
|
||||
const allZero = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [[0], [0], [0], [0]],
|
||||
};
|
||||
|
||||
expect(reduce(empty, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
expect(reduce(allNull, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
expect(reduce(allNull2, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
|
||||
expect(reduce(empty, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allNull, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allNull2, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allZero, 0, ReducerID.allIsZero)).toEqual(true);
|
||||
});
|
||||
|
||||
it('consistent results for first/last value with null', () => {
|
||||
const info = [
|
||||
{
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
// Libraries
|
||||
import isNumber from 'lodash/isNumber';
|
||||
|
||||
import { DataFrame, NullValueMode } from '../types';
|
||||
import { Registry, RegistryItem } from './registry';
|
||||
import { DataFrame, NullValueMode } from '../types/index';
|
||||
|
||||
export enum ReducerID {
|
||||
sum = 'sum',
|
||||
@@ -35,13 +34,38 @@ export interface FieldCalcs {
|
||||
// Internal function
|
||||
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
|
||||
|
||||
export interface FieldReducerInfo extends RegistryItem {
|
||||
export interface FieldReducerInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
alias?: string; // optional secondary key. 'avg' vs 'mean', 'total' vs 'sum'
|
||||
|
||||
// Internal details
|
||||
emptyInputResult?: any; // typically null, but some things like 'count' & 'sum' should be zero
|
||||
standard: boolean; // The most common stats can all be calculated in a single pass
|
||||
reduce?: FieldReducer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param ids list of stat names or null to get all of them
|
||||
*/
|
||||
export function getFieldReducers(ids?: string[]): FieldReducerInfo[] {
|
||||
if (ids === null || ids === undefined) {
|
||||
if (!hasBuiltIndex) {
|
||||
getById(ReducerID.mean);
|
||||
}
|
||||
return listOfStats;
|
||||
}
|
||||
|
||||
return ids.reduce((list, id) => {
|
||||
const stat = getById(id);
|
||||
if (stat) {
|
||||
list.push(stat);
|
||||
}
|
||||
return list;
|
||||
}, new Array<FieldReducerInfo>());
|
||||
}
|
||||
|
||||
interface ReduceFieldOptions {
|
||||
series: DataFrame;
|
||||
fieldIndex: number;
|
||||
@@ -59,7 +83,7 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
return {};
|
||||
}
|
||||
|
||||
const queue = fieldReducers.list(reducers);
|
||||
const queue = getFieldReducers(reducers);
|
||||
|
||||
// Return early for empty series
|
||||
// This lets the concrete implementations assume at least one row
|
||||
@@ -98,107 +122,122 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
//
|
||||
// ------------------------------------------------------------------------------
|
||||
|
||||
export const fieldReducers = new Registry<FieldReducerInfo>(() => [
|
||||
{
|
||||
id: ReducerID.lastNotNull,
|
||||
name: 'Last (not null)',
|
||||
description: 'Last non-null value',
|
||||
standard: true,
|
||||
aliasIds: ['current'],
|
||||
reduce: calculateLastNotNull,
|
||||
},
|
||||
{
|
||||
id: ReducerID.last,
|
||||
name: 'Last',
|
||||
description: 'Last Value',
|
||||
standard: true,
|
||||
reduce: calculateLast,
|
||||
},
|
||||
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
||||
{
|
||||
id: ReducerID.firstNotNull,
|
||||
name: 'First (not null)',
|
||||
description: 'First non-null value',
|
||||
standard: true,
|
||||
reduce: calculateFirstNotNull,
|
||||
},
|
||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, aliasIds: ['avg'] },
|
||||
{
|
||||
id: ReducerID.sum,
|
||||
name: 'Total',
|
||||
description: 'The sum of all values',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
aliasIds: ['total'],
|
||||
},
|
||||
{
|
||||
id: ReducerID.count,
|
||||
name: 'Count',
|
||||
description: 'Number of values in response',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.range,
|
||||
name: 'Range',
|
||||
description: 'Difference between minimum and maximum values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.delta,
|
||||
name: 'Delta',
|
||||
description: 'Cumulative change in value',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.step,
|
||||
name: 'Step',
|
||||
description: 'Minimum interval between values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.diff,
|
||||
name: 'Difference',
|
||||
description: 'Difference between first and last values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.logmin,
|
||||
name: 'Min (above zero)',
|
||||
description: 'Used for log min scale',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.allIsZero,
|
||||
name: 'All Zeros',
|
||||
description: 'All values are zero',
|
||||
emptyInputResult: false,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.allIsNull,
|
||||
name: 'All Nulls',
|
||||
description: 'All values are null',
|
||||
emptyInputResult: true,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.changeCount,
|
||||
name: 'Change Count',
|
||||
description: 'Number of times the value changes',
|
||||
standard: false,
|
||||
reduce: calculateChangeCount,
|
||||
},
|
||||
{
|
||||
id: ReducerID.distinctCount,
|
||||
name: 'Distinct Count',
|
||||
description: 'Number of distinct values',
|
||||
standard: false,
|
||||
reduce: calculateDistinctCount,
|
||||
},
|
||||
]);
|
||||
// private registry of all stats
|
||||
interface TableStatIndex {
|
||||
[id: string]: FieldReducerInfo;
|
||||
}
|
||||
|
||||
const listOfStats: FieldReducerInfo[] = [];
|
||||
const index: TableStatIndex = {};
|
||||
let hasBuiltIndex = false;
|
||||
|
||||
function getById(id: string): FieldReducerInfo | undefined {
|
||||
if (!hasBuiltIndex) {
|
||||
[
|
||||
{
|
||||
id: ReducerID.lastNotNull,
|
||||
name: 'Last (not null)',
|
||||
description: 'Last non-null value',
|
||||
standard: true,
|
||||
alias: 'current',
|
||||
reduce: calculateLastNotNull,
|
||||
},
|
||||
{
|
||||
id: ReducerID.last,
|
||||
name: 'Last',
|
||||
description: 'Last Value',
|
||||
standard: true,
|
||||
reduce: calculateLast,
|
||||
},
|
||||
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
||||
{
|
||||
id: ReducerID.firstNotNull,
|
||||
name: 'First (not null)',
|
||||
description: 'First non-null value',
|
||||
standard: true,
|
||||
reduce: calculateFirstNotNull,
|
||||
},
|
||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
|
||||
{
|
||||
id: ReducerID.sum,
|
||||
name: 'Total',
|
||||
description: 'The sum of all values',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
alias: 'total',
|
||||
},
|
||||
{
|
||||
id: ReducerID.count,
|
||||
name: 'Count',
|
||||
description: 'Number of values in response',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.range,
|
||||
name: 'Range',
|
||||
description: 'Difference between minimum and maximum values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.delta,
|
||||
name: 'Delta',
|
||||
description: 'Cumulative change in value',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.step,
|
||||
name: 'Step',
|
||||
description: 'Minimum interval between values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.diff,
|
||||
name: 'Difference',
|
||||
description: 'Difference between first and last values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.logmin,
|
||||
name: 'Min (above zero)',
|
||||
description: 'Used for log min scale',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.changeCount,
|
||||
name: 'Change Count',
|
||||
description: 'Number of times the value changes',
|
||||
standard: false,
|
||||
reduce: calculateChangeCount,
|
||||
},
|
||||
{
|
||||
id: ReducerID.distinctCount,
|
||||
name: 'Distinct Count',
|
||||
description: 'Number of distinct values',
|
||||
standard: false,
|
||||
reduce: calculateDistinctCount,
|
||||
},
|
||||
].forEach(info => {
|
||||
const { id, alias } = info;
|
||||
if (index.hasOwnProperty(id)) {
|
||||
console.warn('Duplicate Stat', id, info, index);
|
||||
}
|
||||
index[id] = info;
|
||||
if (alias) {
|
||||
if (index.hasOwnProperty(alias)) {
|
||||
console.warn('Duplicate Stat (alias)', alias, info, index);
|
||||
}
|
||||
index[alias] = info;
|
||||
}
|
||||
listOfStats.push(info);
|
||||
});
|
||||
hasBuiltIndex = true;
|
||||
}
|
||||
|
||||
return index[id];
|
||||
}
|
||||
|
||||
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
const calcs = {
|
||||
@@ -214,7 +253,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
count: 0,
|
||||
nonNullCount: 0,
|
||||
allIsNull: true,
|
||||
allIsZero: true,
|
||||
allIsZero: false,
|
||||
range: null,
|
||||
diff: null,
|
||||
delta: 0,
|
||||
@@ -225,7 +264,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
} as FieldCalcs;
|
||||
|
||||
for (let i = 0; i < data.rows.length; i++) {
|
||||
let currentValue = data.rows[i] ? data.rows[i][fieldIndex] : null;
|
||||
let currentValue = data.rows[i][fieldIndex];
|
||||
if (i === 0) {
|
||||
calcs.first = currentValue;
|
||||
}
|
||||
@@ -311,10 +350,6 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
calcs.mean = calcs.sum! / calcs.nonNullCount;
|
||||
}
|
||||
|
||||
if (calcs.allIsNull) {
|
||||
calcs.allIsZero = false;
|
||||
}
|
||||
|
||||
if (calcs.max !== null && calcs.min !== null) {
|
||||
calcs.range = calcs.max - calcs.min;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
export * from './string';
|
||||
export * from './registry';
|
||||
export * from './markdown';
|
||||
export * from './processDataFrame';
|
||||
export * from './csv';
|
||||
|
||||
@@ -29,15 +29,6 @@ describe('toDataFrame', () => {
|
||||
expect(series.fields[0].name).toEqual('Value');
|
||||
});
|
||||
|
||||
it('assumes TimeSeries values are numbers', () => {
|
||||
const input1 = {
|
||||
target: 'time',
|
||||
datapoints: [[100, 1], [200, 2]],
|
||||
};
|
||||
const data = toDataFrame(input1);
|
||||
expect(data.fields[0].type).toBe(FieldType.number);
|
||||
});
|
||||
|
||||
it('keeps dataFrame unchanged', () => {
|
||||
const input = {
|
||||
fields: [{ text: 'A' }, { text: 'B' }, { text: 'C' }],
|
||||
|
||||
@@ -29,7 +29,6 @@ function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||
fields: [
|
||||
{
|
||||
name: timeSeries.target || 'Value',
|
||||
type: FieldType.number,
|
||||
unit: timeSeries.unit,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
import { SelectableValue } from '../types/select';
|
||||
|
||||
export interface RegistryItem {
|
||||
id: string; // Unique Key -- saved in configs
|
||||
name: string; // Display Name, can change without breaking configs
|
||||
description: string;
|
||||
aliasIds?: string[]; // when the ID changes, we may want backwards compatibility ('current' => 'last')
|
||||
|
||||
/**
|
||||
* Some extensions should not be user selectable
|
||||
* like: 'all' and 'any' matchers;
|
||||
*/
|
||||
excludeFromPicker?: boolean;
|
||||
}
|
||||
|
||||
interface RegistrySelectInfo {
|
||||
options: Array<SelectableValue<string>>;
|
||||
current: Array<SelectableValue<string>>;
|
||||
}
|
||||
|
||||
export class Registry<T extends RegistryItem> {
|
||||
private ordered: T[] = [];
|
||||
private byId = new Map<string, T>();
|
||||
private initalized = false;
|
||||
|
||||
constructor(private init?: () => T[]) {}
|
||||
|
||||
getIfExists(id: string | undefined): T | undefined {
|
||||
if (!this.initalized) {
|
||||
if (this.init) {
|
||||
for (const ext of this.init()) {
|
||||
this.register(ext);
|
||||
}
|
||||
}
|
||||
this.sort();
|
||||
this.initalized = true;
|
||||
}
|
||||
if (id) {
|
||||
return this.byId.get(id);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
get(id: string): T {
|
||||
const v = this.getIfExists(id);
|
||||
if (!v) {
|
||||
throw new Error('Undefined: ' + id);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
selectOptions(current?: string[], filter?: (ext: T) => boolean): RegistrySelectInfo {
|
||||
if (!this.initalized) {
|
||||
this.getIfExists('xxx'); // will trigger init
|
||||
}
|
||||
|
||||
const select = {
|
||||
options: [],
|
||||
current: [],
|
||||
} as RegistrySelectInfo;
|
||||
|
||||
const currentIds: any = {};
|
||||
if (current) {
|
||||
for (const id of current) {
|
||||
currentIds[id] = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (const ext of this.ordered) {
|
||||
if (ext.excludeFromPicker) {
|
||||
continue;
|
||||
}
|
||||
if (filter && !filter(ext)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = {
|
||||
value: ext.id,
|
||||
label: ext.name,
|
||||
description: ext.description,
|
||||
};
|
||||
|
||||
select.options.push(option);
|
||||
if (currentIds[ext.id]) {
|
||||
select.current.push(option);
|
||||
}
|
||||
}
|
||||
return select;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of values by ID, or all values if not specified
|
||||
*/
|
||||
list(ids?: any[]): T[] {
|
||||
if (ids) {
|
||||
const found: T[] = [];
|
||||
for (const id of ids) {
|
||||
const v = this.getIfExists(id);
|
||||
if (v) {
|
||||
found.push(v);
|
||||
}
|
||||
}
|
||||
return found;
|
||||
}
|
||||
if (!this.initalized) {
|
||||
this.getIfExists('xxx'); // will trigger init
|
||||
}
|
||||
return [...this.ordered]; // copy of everythign just in case
|
||||
}
|
||||
|
||||
register(ext: T) {
|
||||
if (this.byId.has(ext.id)) {
|
||||
throw new Error('Duplicate Key:' + ext.id);
|
||||
}
|
||||
this.byId.set(ext.id, ext);
|
||||
this.ordered.push(ext);
|
||||
|
||||
if (ext.aliasIds) {
|
||||
for (const alias of ext.aliasIds) {
|
||||
if (!this.byId.has(alias)) {
|
||||
this.byId.set(alias, ext);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.initalized) {
|
||||
this.sort();
|
||||
}
|
||||
}
|
||||
|
||||
private sort() {
|
||||
// TODO sort the list
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,23 @@
|
||||
import { Threshold } from '../types';
|
||||
|
||||
export function getActiveThreshold(value: number, thresholds: Threshold[]): Threshold {
|
||||
let active = thresholds[0];
|
||||
for (const threshold of thresholds) {
|
||||
if (value >= threshold.value) {
|
||||
active = threshold;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
export function getThresholdForValue(
|
||||
thresholds: Threshold[],
|
||||
value: number | null | string | undefined
|
||||
): Threshold | null {
|
||||
if (thresholds.length === 1) {
|
||||
return thresholds[0];
|
||||
}
|
||||
return active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts the thresholds
|
||||
*/
|
||||
export function sortThresholds(thresholds: Threshold[]) {
|
||||
return thresholds.sort((t1, t2) => {
|
||||
return t1.value - t2.value;
|
||||
});
|
||||
const atThreshold = thresholds.filter(threshold => (value as number) === threshold.value)[0];
|
||||
if (atThreshold) {
|
||||
return atThreshold;
|
||||
}
|
||||
|
||||
const belowThreshold = thresholds.filter(threshold => (value as number) > threshold.value);
|
||||
if (belowThreshold.length > 0) {
|
||||
const nearestThreshold = belowThreshold.sort((t1: Threshold, t2: Threshold) => t2.value - t1.value)[0];
|
||||
return nearestThreshold;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": ["."],
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Grafana Runtime library
|
||||
|
||||
Interfaces that let you use the runtime...
|
||||
This package allows access to grafana services. It requires Grafana to be running already and the functions to be imported as externals.
|
||||
@@ -1,11 +1,9 @@
|
||||
{
|
||||
"name": "@grafana/runtime",
|
||||
"version": "6.3.0-alpha.36",
|
||||
"version": "6.3.0-alpha.39",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"react",
|
||||
"react-component"
|
||||
"grafana"
|
||||
],
|
||||
"main": "src/index.ts",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": ["."],
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ Adidtionaly, you can also provide additional Jest config via package.json file.
|
||||
|
||||
|
||||
## Working with CSS & static assets
|
||||
We support pure css, SASS and CSS in JS approach (via Emotion).
|
||||
We support pure css, SASS and CSS in JS approach (via Emotion). All static assets referenced in your code (i.e. images) should be placed under `src/static` directory and referenced using relative paths.
|
||||
|
||||
1. Single css/sass file
|
||||
Create your css/sass file and import it in your plugin entry point (typically module.ts):
|
||||
@@ -101,8 +101,6 @@ If you want to provide different stylesheets for dark/light theme, create `dark.
|
||||
|
||||
TODO: add note about loadPluginCss
|
||||
|
||||
Note that static files (png, svg, json, html) are all copied to dist directory when the plugin is bundled. Relative paths to those files does not change.
|
||||
|
||||
3. Emotion
|
||||
Starting from Grafana 6.2 our suggested way of styling plugins is by using [Emotion](https://emotion.sh). It's a css-in-js library that we use internaly at Grafana. The biggest advantage of using Emotion is that you will get access to Grafana Theme variables.
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "6.3.0-alpha.36",
|
||||
"version": "6.3.0-alpha.40",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"react",
|
||||
"react-component"
|
||||
"grafana",
|
||||
"cli",
|
||||
"plugins"
|
||||
],
|
||||
"bin": {
|
||||
"grafana-toolkit": "./bin/grafana-toolkit.js"
|
||||
@@ -30,6 +30,7 @@
|
||||
"@types/node": "^12.0.4",
|
||||
"@types/react-dev-utils": "^9.0.1",
|
||||
"@types/semver": "^6.0.0",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/webpack": "4.4.34",
|
||||
"axios": "0.19.0",
|
||||
"babel-loader": "8.0.6",
|
||||
|
||||
@@ -13,7 +13,13 @@ import { pluginTestTask } from './tasks/plugin.tests';
|
||||
import { searchTestDataSetupTask } from './tasks/searchTestDataSetup';
|
||||
import { closeMilestoneTask } from './tasks/closeMilestone';
|
||||
import { pluginDevTask } from './tasks/plugin.dev';
|
||||
import { pluginCITask } from './tasks/plugin.ci';
|
||||
import {
|
||||
ciBuildPluginTask,
|
||||
ciBundlePluginTask,
|
||||
ciTestPluginTask,
|
||||
ciDeployPluginTask,
|
||||
ciSetupPluginTask,
|
||||
} from './tasks/plugin.ci';
|
||||
import { buildPackageTask } from './tasks/package.build';
|
||||
|
||||
export const run = (includeInternalScripts = false) => {
|
||||
@@ -141,15 +147,47 @@ export const run = (includeInternalScripts = false) => {
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci')
|
||||
.option('--dryRun', "Dry run (don't post results)")
|
||||
.description('Run Plugin CI task')
|
||||
.command('plugin:ci-build')
|
||||
.option('--platform <platform>', 'For backend task, which backend to run')
|
||||
.description('Build the plugin, leaving artifacts in /dist')
|
||||
.action(async cmd => {
|
||||
await execTask(pluginCITask)({
|
||||
dryRun: cmd.dryRun,
|
||||
await execTask(ciBuildPluginTask)({
|
||||
platform: cmd.platform,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-bundle')
|
||||
.description('Create a zip artifact for the plugin')
|
||||
.action(async cmd => {
|
||||
await execTask(ciBundlePluginTask)({});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-setup')
|
||||
.option('--installer <installer>', 'Name of installer to download and run')
|
||||
.description('Install and configure grafana')
|
||||
.action(async cmd => {
|
||||
await execTask(ciSetupPluginTask)({
|
||||
installer: cmd.installer,
|
||||
});
|
||||
});
|
||||
program
|
||||
.command('plugin:ci-test')
|
||||
.description('end-to-end test using bundle in /artifacts')
|
||||
.action(async cmd => {
|
||||
await execTask(ciTestPluginTask)({
|
||||
platform: cmd.platform,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-deploy')
|
||||
.description('Publish plugin CI results')
|
||||
.action(async cmd => {
|
||||
await execTask(ciDeployPluginTask)({});
|
||||
});
|
||||
|
||||
program.on('command:*', () => {
|
||||
console.error('Invalid command: %s\nSee --help for a list of available commands.', program.args.join(' '));
|
||||
process.exit(1);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import axios from 'axios';
|
||||
// @ts-ignore
|
||||
import * as _ from 'lodash';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -3,6 +3,7 @@ import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
// @ts-ignore
|
||||
import * as path from 'path';
|
||||
import { changeCwdToGrafanaUi, restoreCwd, changeCwdToPackage } from '../utils/cwd';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -4,6 +4,7 @@ import execa = require('execa');
|
||||
import path = require('path');
|
||||
import fs = require('fs');
|
||||
import glob = require('glob');
|
||||
import util = require('util');
|
||||
import { Linter, Configuration, RuleFailure } from 'tslint';
|
||||
import * as prettier from 'prettier';
|
||||
|
||||
@@ -16,6 +17,7 @@ interface PluginBuildOptions {
|
||||
|
||||
export const bundlePlugin = useSpinner<PluginBundleOptions>('Compiling...', async options => await bundleFn(options));
|
||||
|
||||
const readFileAsync = util.promisify(fs.readFile);
|
||||
// @ts-ignore
|
||||
export const clean = useSpinner<void>('Cleaning', async () => await execa('rimraf', [`${process.cwd()}/dist`]));
|
||||
|
||||
|
||||
@@ -9,7 +9,8 @@ import path = require('path');
|
||||
import fs = require('fs');
|
||||
|
||||
export interface PluginCIOptions {
|
||||
dryRun?: boolean;
|
||||
platform?: string;
|
||||
installer?: string;
|
||||
}
|
||||
|
||||
const calcJavascriptSize = (base: string, files?: string[]): number => {
|
||||
@@ -32,22 +33,106 @@ const calcJavascriptSize = (base: string, files?: string[]): number => {
|
||||
return size;
|
||||
};
|
||||
|
||||
const pluginCIRunner: TaskRunner<PluginCIOptions> = async ({ dryRun }) => {
|
||||
const getWorkFolder = () => {
|
||||
let dir = `${process.cwd()}/work`;
|
||||
if (process.env.CIRCLE_JOB) {
|
||||
dir = path.resolve(dir, process.env.CIRCLE_JOB);
|
||||
}
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
const writeWorkStats = (startTime: number, workDir: string) => {
|
||||
const elapsed = Date.now() - startTime;
|
||||
const stats = {
|
||||
job: `${process.env.CIRCLE_JOB}`,
|
||||
startTime,
|
||||
buildTime: elapsed,
|
||||
endTime: Date.now(),
|
||||
};
|
||||
const f = path.resolve(workDir, 'stats.json');
|
||||
fs.writeFile(f, JSON.stringify(stats, null, 2), err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to stats: ' + f);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* 1. BUILD
|
||||
*
|
||||
* when platform exists it is building backend, otherwise frontend
|
||||
*
|
||||
* Each build writes data:
|
||||
* ~/work/build_xxx/
|
||||
*
|
||||
* Anything that should be put into the final zip file should be put in:
|
||||
* ~/work/build_xxx/dist
|
||||
*/
|
||||
const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ platform }) => {
|
||||
const start = Date.now();
|
||||
const distDir = `${process.cwd()}/dist`;
|
||||
const artifactsDir = `${process.cwd()}/artifacts`;
|
||||
await execa('rimraf', [`${process.cwd()}/coverage`]);
|
||||
await execa('rimraf', [artifactsDir]);
|
||||
const workDir = getWorkFolder();
|
||||
await execa('rimraf', [workDir]);
|
||||
fs.mkdirSync(workDir);
|
||||
|
||||
// Do regular build process
|
||||
await pluginBuildRunner({ coverage: true });
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
if (!fs.existsSync(artifactsDir)) {
|
||||
fs.mkdirSync(artifactsDir);
|
||||
if (platform) {
|
||||
console.log('TODO, backend support?');
|
||||
const file = path.resolve(workDir, 'README.txt');
|
||||
fs.writeFile(workDir + '/README.txt', 'TODO... build it!', err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write: ' + file);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Do regular build process with coverage
|
||||
await pluginBuildRunner({ coverage: true });
|
||||
}
|
||||
|
||||
// Move dist to the scoped work folder
|
||||
const distDir = path.resolve(process.cwd(), 'dist');
|
||||
if (fs.existsSync(distDir)) {
|
||||
fs.renameSync(distDir, path.resolve(workDir, 'dist'));
|
||||
}
|
||||
writeWorkStats(start, workDir);
|
||||
};
|
||||
|
||||
export const ciBuildPluginTask = new Task<PluginCIOptions>('Build Plugin', buildPluginRunner);
|
||||
|
||||
/**
|
||||
* 2. BUNDLE
|
||||
*
|
||||
* Take everything from `~/work/build_XXX/dist` and zip it into
|
||||
* artifacts
|
||||
*
|
||||
*/
|
||||
const bundlePluginRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
const start = Date.now();
|
||||
const workDir = getWorkFolder();
|
||||
|
||||
// Copy all `dist` folders to the root dist folder
|
||||
const distDir = path.resolve(process.cwd(), 'dist');
|
||||
if (!fs.existsSync(distDir)) {
|
||||
fs.mkdirSync(distDir);
|
||||
}
|
||||
fs.mkdirSync(distDir, { recursive: true });
|
||||
const dirs = fs.readdirSync(workDir);
|
||||
for (const dir of dirs) {
|
||||
if (dir.startsWith('build_')) {
|
||||
const contents = path.resolve(dir, 'dist');
|
||||
if (fs.existsSync(contents)) {
|
||||
await execa('cp', ['-rp', contents, distDir]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create an artifact
|
||||
const artifactsDir = path.resolve(process.cwd(), 'artifacts');
|
||||
if (!fs.existsSync(artifactsDir)) {
|
||||
fs.mkdirSync(artifactsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// TODO? can this typed from @grafana/ui?
|
||||
const pluginInfo = getPluginJson(`${distDir}/plugin.json`);
|
||||
const zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip';
|
||||
const zipFile = path.resolve(artifactsDir, zipName);
|
||||
@@ -55,23 +140,165 @@ const pluginCIRunner: TaskRunner<PluginCIOptions> = async ({ dryRun }) => {
|
||||
await execa('zip', ['-r', zipFile, '.']);
|
||||
restoreCwd();
|
||||
|
||||
const stats = {
|
||||
startTime: start,
|
||||
buildTime: elapsed,
|
||||
jsSize: calcJavascriptSize(distDir),
|
||||
zipSize: fs.statSync(zipFile).size,
|
||||
endTime: Date.now(),
|
||||
const zipStats = fs.statSync(zipFile);
|
||||
if (zipStats.size < 100) {
|
||||
throw new Error('Invalid zip file: ' + zipFile);
|
||||
}
|
||||
await execa('sha1sum', [zipFile, '>', zipFile + '.sha1']);
|
||||
const info = {
|
||||
name: zipName,
|
||||
size: zipStats.size,
|
||||
};
|
||||
fs.writeFile(artifactsDir + '/stats.json', JSON.stringify(stats, null, 2), err => {
|
||||
const f = path.resolve(artifactsDir, 'info.json');
|
||||
fs.writeFile(f, JSON.stringify(info, null, 2), err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write stats');
|
||||
throw new Error('Error writing artifact info: ' + f);
|
||||
}
|
||||
console.log('Stats', stats);
|
||||
});
|
||||
|
||||
if (!dryRun) {
|
||||
console.log('TODO send info to github?');
|
||||
}
|
||||
writeWorkStats(start, workDir);
|
||||
};
|
||||
|
||||
export const pluginCITask = new Task<PluginCIOptions>('Plugin CI', pluginCIRunner);
|
||||
export const ciBundlePluginTask = new Task<PluginCIOptions>('Bundle Plugin', bundlePluginRunner);
|
||||
|
||||
/**
|
||||
* 3. Setup (install grafana and setup provisioning)
|
||||
*
|
||||
* deploy the zip to a running grafana instance
|
||||
*
|
||||
*/
|
||||
const setupPluginRunner: TaskRunner<PluginCIOptions> = async ({ installer }) => {
|
||||
const start = Date.now();
|
||||
|
||||
if (!installer) {
|
||||
throw new Error('Missing installer path');
|
||||
}
|
||||
|
||||
// Download the grafana installer
|
||||
const workDir = getWorkFolder();
|
||||
const installFile = path.resolve(workDir, installer);
|
||||
if (!fs.existsSync(installFile)) {
|
||||
console.log('download', installer);
|
||||
const exe = await execa('wget', ['-O', installFile, 'https://dl.grafana.com/oss/release/' + installer]);
|
||||
console.log(exe.stdout);
|
||||
}
|
||||
|
||||
// Find the plugin zip file
|
||||
const artifactsDir = path.resolve(process.cwd(), 'artifacts');
|
||||
const artifactsInfo = require(path.resolve(artifactsDir, 'info.json'));
|
||||
const pluginZip = path.resolve(workDir, 'artifacts', artifactsInfo.name);
|
||||
if (!fs.existsSync(pluginZip)) {
|
||||
throw new Error('Missing zip file:' + pluginZip);
|
||||
}
|
||||
|
||||
// Create a grafana runtime folder
|
||||
const grafanaPluginsDir = path.resolve(require('os').homedir(), 'grafana', 'plugins');
|
||||
await execa('rimraf', [grafanaPluginsDir]);
|
||||
fs.mkdirSync(grafanaPluginsDir, { recursive: true });
|
||||
|
||||
// unzip package.zip -d /opt
|
||||
let exe = await execa('unzip', [pluginZip, '-d', grafanaPluginsDir]);
|
||||
console.log(exe.stdout);
|
||||
|
||||
// Write the custom settings
|
||||
const customIniPath = '/usr/share/grafana/conf/custom.ini';
|
||||
const customIniBody = `[paths] \n` + `plugins = ${grafanaPluginsDir}\n` + '';
|
||||
fs.writeFile(customIniPath, customIniBody, err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write: ' + customIniPath);
|
||||
}
|
||||
});
|
||||
|
||||
console.log('Install Grafana');
|
||||
exe = await execa('sudo', ['dpkg', 'i', installFile]);
|
||||
console.log(exe.stdout);
|
||||
|
||||
exe = await execa('sudo', ['grafana-server', 'start']);
|
||||
console.log(exe.stdout);
|
||||
exe = await execa('grafana-cli', ['--version']);
|
||||
|
||||
writeWorkStats(start, workDir + '_setup');
|
||||
};
|
||||
|
||||
export const ciSetupPluginTask = new Task<PluginCIOptions>('Setup Grafana', setupPluginRunner);
|
||||
|
||||
/**
|
||||
* 4. Test (end-to-end)
|
||||
*
|
||||
* deploy the zip to a running grafana instance
|
||||
*
|
||||
*/
|
||||
const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ platform }) => {
|
||||
const start = Date.now();
|
||||
const workDir = getWorkFolder();
|
||||
|
||||
const args = {
|
||||
withCredentials: true,
|
||||
baseURL: process.env.GRAFANA_URL || 'http://localhost:3000/',
|
||||
responseType: 'json',
|
||||
auth: {
|
||||
username: 'admin',
|
||||
password: 'admin',
|
||||
},
|
||||
};
|
||||
|
||||
const axios = require('axios');
|
||||
const frontendSettings = await axios.get('api/frontend/settings', args);
|
||||
|
||||
console.log('Grafana Version: ' + JSON.stringify(frontendSettings.data.buildInfo, null, 2));
|
||||
|
||||
const pluginInfo = getPluginJson(`${process.cwd()}/src/plugin.json`);
|
||||
const pluginSettings = await axios.get(`api/plugins/${pluginInfo.id}/settings`, args);
|
||||
|
||||
console.log('Plugin Info: ' + JSON.stringify(pluginSettings.data, null, 2));
|
||||
|
||||
console.log('TODO puppeteer');
|
||||
|
||||
const elapsed = Date.now() - start;
|
||||
const stats = {
|
||||
job: `${process.env.CIRCLE_JOB}`,
|
||||
sha1: `${process.env.CIRCLE_SHA1}`,
|
||||
startTime: start,
|
||||
buildTime: elapsed,
|
||||
endTime: Date.now(),
|
||||
};
|
||||
|
||||
console.log('TODO Puppeteer Tests', stats);
|
||||
writeWorkStats(start, workDir);
|
||||
};
|
||||
|
||||
export const ciTestPluginTask = new Task<PluginCIOptions>('Test Plugin (e2e)', testPluginRunner);
|
||||
|
||||
/**
|
||||
* 4. Deploy
|
||||
*
|
||||
* deploy the zip to a running grafana instance
|
||||
*
|
||||
*/
|
||||
const deployPluginRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
const start = Date.now();
|
||||
|
||||
// TASK Time
|
||||
if (process.env.CIRCLE_INTERNAL_TASK_DATA) {
|
||||
const timingInfo = fs.readdirSync(`${process.env.CIRCLE_INTERNAL_TASK_DATA}`);
|
||||
if (timingInfo) {
|
||||
timingInfo.forEach(file => {
|
||||
console.log('TIMING INFO: ', file);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - start;
|
||||
const stats = {
|
||||
job: `${process.env.CIRCLE_JOB}`,
|
||||
sha1: `${process.env.CIRCLE_SHA1}`,
|
||||
startTime: start,
|
||||
buildTime: elapsed,
|
||||
endTime: Date.now(),
|
||||
};
|
||||
console.log('TODO DEPLOY??', stats);
|
||||
console.log(' if PR => write a comment to github with difference ');
|
||||
console.log(' if master | vXYZ ==> upload artifacts to some repo ');
|
||||
};
|
||||
|
||||
export const ciDeployPluginTask = new Task<PluginCIOptions>('Deploy plugin', deployPluginRunner);
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import path = require('path');
|
||||
import fs = require('fs');
|
||||
import webpack = require('webpack');
|
||||
import { getWebpackConfig } from '../../../config/webpack.plugin.config';
|
||||
import formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import path = require('path');
|
||||
import * as jestCLI from 'jest-cli';
|
||||
import { useSpinner } from '../../utils/useSpinner';
|
||||
import { jestConfig } from '../../../config/jest.plugin.config';
|
||||
|
||||
@@ -46,6 +46,7 @@ export async function getTeam(team: any): Promise<any> {
|
||||
}
|
||||
|
||||
export async function addToTeam(team: any, user: any): Promise<any> {
|
||||
const members = await client.get(`/teams/${team.id}/members`);
|
||||
console.log(`Adding user ${user.name} to team ${team.name}`);
|
||||
await client.post(`/teams/${team.id}/members`, { userId: user.id });
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
import { changeCwdToGrafanaUi, restoreCwd, changeCwdToGrafanaToolkit } from '../utils/cwd';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -3,7 +3,7 @@ import { getPluginJson, validatePluginJson } from './pluginValidation';
|
||||
describe('pluginValdation', () => {
|
||||
describe('plugin.json', () => {
|
||||
test('missing plugin.json file', () => {
|
||||
expect(() => getPluginJson(`${__dirname}/mocks/missing-plugin-json`)).toThrow('plugin.json file is missing!');
|
||||
expect(() => getPluginJson(`${__dirname}/mocks/missing-plugin.json`)).toThrowError();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -22,15 +22,24 @@ export const validatePluginJson = (pluginJson: any) => {
|
||||
if (!pluginJson.info.version) {
|
||||
throw new Error('Plugin info.version is missing in plugin.json');
|
||||
}
|
||||
|
||||
const types = ['panel', 'datasource', 'app'];
|
||||
const type = pluginJson.type;
|
||||
if (!types.includes(type)) {
|
||||
throw new Error('Invalid plugin type in plugin.json: ' + type);
|
||||
}
|
||||
|
||||
if (!pluginJson.id.endsWith('-' + type)) {
|
||||
throw new Error('[plugin.json] id should end with: -' + type);
|
||||
}
|
||||
};
|
||||
|
||||
export const getPluginJson = (root: string = process.cwd()): PluginJSONSchema => {
|
||||
export const getPluginJson = (path: string): PluginJSONSchema => {
|
||||
let pluginJson;
|
||||
|
||||
try {
|
||||
pluginJson = require(path.resolve(root, 'src/plugin.json'));
|
||||
pluginJson = require(path);
|
||||
} catch (e) {
|
||||
throw new Error('plugin.json file is missing!');
|
||||
throw new Error('Unable to find: ' + path);
|
||||
}
|
||||
|
||||
validatePluginJson(pluginJson);
|
||||
|
||||
@@ -7,7 +7,7 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const OptimizeCssAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
|
||||
import * as webpack from 'webpack';
|
||||
import { getStyleLoaders, getStylesheetEntries, getFileLoaders } from './webpack/loaders';
|
||||
import { hasThemeStylesheets, getStyleLoaders, getStylesheetEntries, getFileLoaders } from './webpack/loaders';
|
||||
|
||||
interface WebpackConfigurationOptions {
|
||||
watch?: boolean;
|
||||
@@ -51,7 +51,6 @@ const getManualChunk = (id: string) => {
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const getEntries = () => {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { getStylesheetEntries, hasThemeStylesheets } from './loaders';
|
||||
describe('Loaders', () => {
|
||||
describe('stylesheet helpers', () => {
|
||||
const logSpy = jest.spyOn(console, 'log').mockImplementation();
|
||||
const errorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
|
||||
afterAll(() => {
|
||||
logSpy.mockRestore();
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { getPluginJson } from '../utils/pluginValidation';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
|
||||
@@ -119,8 +122,8 @@ export const getFileLoaders = () => {
|
||||
? {
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
outputPath: '/',
|
||||
name: '[path][name].[ext]',
|
||||
outputPath: 'static',
|
||||
name: '[name].[hash:8].[ext]',
|
||||
},
|
||||
}
|
||||
: // When using single css import images are inlined as base64 URIs in the result bundle
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"rootDirs": ["."],
|
||||
"outDir": "dist/src",
|
||||
"declaration": false,
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["es2015", "es2017.string"]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"name": "@grafana/ui",
|
||||
"version": "6.3.0-alpha.36",
|
||||
"version": "6.3.0-alpha.39",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"grafana",
|
||||
"react",
|
||||
"react-component"
|
||||
],
|
||||
|
||||
@@ -49,9 +49,9 @@ function addBarGaugeStory(name: string, overrides: Partial<Props>) {
|
||||
orientation: VizOrientation.Vertical,
|
||||
displayMode: 'basic',
|
||||
thresholds: [
|
||||
{ value: -Infinity, color: 'green' },
|
||||
{ value: threshold1Value, color: threshold1Color },
|
||||
{ value: threshold2Value, color: threshold2Color },
|
||||
{ index: 0, value: -Infinity, color: 'green' },
|
||||
{ index: 1, value: threshold1Value, color: threshold1Color },
|
||||
{ index: 1, value: threshold2Value, color: threshold2Color },
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@@ -25,7 +25,11 @@ function getProps(propOverrides?: Partial<Props>): Props {
|
||||
maxValue: 100,
|
||||
minValue: 0,
|
||||
displayMode: 'basic',
|
||||
thresholds: [{ value: -Infinity, color: 'green' }, { value: 70, color: 'orange' }, { value: 90, color: 'red' }],
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: 'green' },
|
||||
{ index: 1, value: 70, color: 'orange' },
|
||||
{ index: 2, value: 90, color: 'red' },
|
||||
],
|
||||
height: 300,
|
||||
width: 300,
|
||||
value: {
|
||||
|
||||
@@ -7,7 +7,7 @@ import { getColorFromHexRgbOrName } from '../../utils';
|
||||
|
||||
// Types
|
||||
import { DisplayValue, Themeable, VizOrientation } from '../../types';
|
||||
import { Threshold, TimeSeriesValue, getActiveThreshold } from '@grafana/data';
|
||||
import { Threshold, TimeSeriesValue, getThresholdForValue } from '@grafana/data';
|
||||
|
||||
const MIN_VALUE_HEIGHT = 18;
|
||||
const MAX_VALUE_HEIGHT = 50;
|
||||
@@ -87,14 +87,8 @@ export class BarGauge extends PureComponent<Props> {
|
||||
|
||||
getCellColor(positionValue: TimeSeriesValue): CellColors {
|
||||
const { thresholds, theme, value } = this.props;
|
||||
if (positionValue === null) {
|
||||
return {
|
||||
background: 'gray',
|
||||
border: 'gray',
|
||||
};
|
||||
}
|
||||
const activeThreshold = getThresholdForValue(thresholds, positionValue);
|
||||
|
||||
const activeThreshold = getActiveThreshold(positionValue, thresholds);
|
||||
if (activeThreshold !== null) {
|
||||
const color = getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||
|
||||
@@ -480,7 +474,7 @@ export function getBarGradient(props: Props, maxSize: number): string {
|
||||
export function getValueColor(props: Props): string {
|
||||
const { thresholds, theme, value } = props;
|
||||
|
||||
const activeThreshold = getActiveThreshold(value.numeric, thresholds);
|
||||
const activeThreshold = getThresholdForValue(thresholds, value.numeric);
|
||||
|
||||
if (activeThreshold !== null) {
|
||||
return getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||
|
||||
@@ -77,7 +77,7 @@ export class CustomScrollbar extends Component<Props> {
|
||||
{...passedProps}
|
||||
className={cx(
|
||||
css`
|
||||
visibility: ${hideTrack ? 'hidden' : 'visible'};
|
||||
visibility: ${hideTrack ? 'none' : 'visible'};
|
||||
`,
|
||||
track
|
||||
)}
|
||||
|
||||
@@ -14,7 +14,7 @@ const setup = (propOverrides?: object) => {
|
||||
minValue: 0,
|
||||
showThresholdMarkers: true,
|
||||
showThresholdLabels: false,
|
||||
thresholds: [{ value: -Infinity, color: '#7EB26D' }],
|
||||
thresholds: [{ index: 0, value: -Infinity, color: '#7EB26D' }],
|
||||
height: 300,
|
||||
width: 300,
|
||||
value: {
|
||||
@@ -48,9 +48,9 @@ describe('Get thresholds formatted', () => {
|
||||
it('should get the correct formatted values when thresholds are added', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
@@ -43,12 +43,12 @@ export class Gauge extends PureComponent<Props> {
|
||||
const lastThreshold = thresholds[thresholds.length - 1];
|
||||
|
||||
return [
|
||||
...thresholds.map((threshold, index) => {
|
||||
if (index === 0) {
|
||||
...thresholds.map(threshold => {
|
||||
if (threshold.index === 0) {
|
||||
return { value: minValue, color: getColorFromHexRgbOrName(threshold.color, theme.type) };
|
||||
}
|
||||
|
||||
const previousThreshold = thresholds[index - 1];
|
||||
const previousThreshold = thresholds[threshold.index - 1];
|
||||
return { value: threshold.value, color: getColorFromHexRgbOrName(previousThreshold.color, theme.type) };
|
||||
}),
|
||||
{ value: maxValue, color: getColorFromHexRgbOrName(lastThreshold.color, theme.type) },
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import classNames from 'classnames';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
import { ButtonSelect } from '../Select/ButtonSelect';
|
||||
|
||||
@@ -23,7 +23,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
||||
super(props);
|
||||
}
|
||||
|
||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectableValue<string>> => {
|
||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectOptionItem<string>> => {
|
||||
const intervalsOrDefault = intervals || defaultIntervals;
|
||||
const options = intervalsOrDefault
|
||||
.filter(str => str !== '')
|
||||
@@ -37,7 +37,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
||||
return options;
|
||||
};
|
||||
|
||||
onChangeSelect = (item: SelectableValue<string>) => {
|
||||
onChangeSelect = (item: SelectOptionItem<string>) => {
|
||||
const { onIntervalChanged } = this.props;
|
||||
if (onIntervalChanged) {
|
||||
// @ts-ignore
|
||||
|
||||
@@ -4,7 +4,7 @@ import { action } from '@storybook/addon-actions';
|
||||
import { withKnobs, object, text } from '@storybook/addon-knobs';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import { UseState } from '../../utils/storybook/UseState';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { SelectOptionItem } from './Select';
|
||||
import { ButtonSelect } from './ButtonSelect';
|
||||
|
||||
const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||
@@ -12,9 +12,9 @@ const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||
ButtonSelectStories.addDecorator(withCenteredStory).addDecorator(withKnobs);
|
||||
|
||||
ButtonSelectStories.add('default', () => {
|
||||
const intialState: SelectableValue<string> = { label: 'A label', value: 'A value' };
|
||||
const value = object<SelectableValue<string>>('Selected Value:', intialState);
|
||||
const options = object<Array<SelectableValue<string>>>('Options:', [
|
||||
const intialState: SelectOptionItem<string> = { label: 'A label', value: 'A value' };
|
||||
const value = object<SelectOptionItem<string>>('Selected Value:', intialState);
|
||||
const options = object<Array<SelectOptionItem<string>>>('Options:', [
|
||||
intialState,
|
||||
{ label: 'Another label', value: 'Another value' },
|
||||
]);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import React, { PureComponent, ReactElement } from 'react';
|
||||
import Select from './Select';
|
||||
import Select, { SelectOptionItem } from './Select';
|
||||
import { PopperContent } from '../Tooltip/PopperController';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
|
||||
interface ButtonComponentProps {
|
||||
label: ReactElement | string | undefined;
|
||||
@@ -31,13 +30,13 @@ const ButtonComponent = (buttonProps: ButtonComponentProps) => (props: any) => {
|
||||
|
||||
export interface Props<T> {
|
||||
className: string | undefined;
|
||||
options: Array<SelectableValue<T>>;
|
||||
value?: SelectableValue<T>;
|
||||
options: Array<SelectOptionItem<T>>;
|
||||
value?: SelectOptionItem<T>;
|
||||
label?: ReactElement | string;
|
||||
iconClass?: string;
|
||||
components?: any;
|
||||
maxMenuHeight?: number;
|
||||
onChange: (item: SelectableValue<T>) => void;
|
||||
onChange: (item: SelectOptionItem<T>) => void;
|
||||
tooltipContent?: PopperContent<any>;
|
||||
isMenuOpen?: boolean;
|
||||
onOpenMenu?: () => void;
|
||||
@@ -46,7 +45,7 @@ export interface Props<T> {
|
||||
}
|
||||
|
||||
export class ButtonSelect<T> extends PureComponent<Props<T>> {
|
||||
onChange = (item: SelectableValue<T>) => {
|
||||
onChange = (item: SelectOptionItem<T>) => {
|
||||
const { onChange } = this.props;
|
||||
onChange(item);
|
||||
};
|
||||
|
||||
@@ -19,16 +19,23 @@ import resetSelectStyles from './resetSelectStyles';
|
||||
import { CustomScrollbar } from '../CustomScrollbar/CustomScrollbar';
|
||||
import { PopperContent } from '../Tooltip/PopperController';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
|
||||
export interface SelectOptionItem<T> {
|
||||
label?: string;
|
||||
value?: T;
|
||||
imgUrl?: string;
|
||||
description?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
export interface CommonProps<T> {
|
||||
defaultValue?: any;
|
||||
getOptionLabel?: (item: SelectableValue<T>) => string;
|
||||
getOptionValue?: (item: SelectableValue<T>) => string;
|
||||
onChange: (item: SelectableValue<T>) => {} | void;
|
||||
getOptionLabel?: (item: SelectOptionItem<T>) => string;
|
||||
getOptionValue?: (item: SelectOptionItem<T>) => string;
|
||||
onChange: (item: SelectOptionItem<T>) => {} | void;
|
||||
placeholder?: string;
|
||||
width?: number;
|
||||
value?: SelectableValue<T>;
|
||||
value?: SelectOptionItem<T>;
|
||||
className?: string;
|
||||
isDisabled?: boolean;
|
||||
isSearchable?: boolean;
|
||||
@@ -50,12 +57,12 @@ export interface CommonProps<T> {
|
||||
}
|
||||
|
||||
export interface SelectProps<T> extends CommonProps<T> {
|
||||
options: Array<SelectableValue<T>>;
|
||||
options: Array<SelectOptionItem<T>>;
|
||||
}
|
||||
|
||||
interface AsyncProps<T> extends CommonProps<T> {
|
||||
defaultOptions: boolean;
|
||||
loadOptions: (query: string) => Promise<Array<SelectableValue<T>>>;
|
||||
loadOptions: (query: string) => Promise<Array<SelectOptionItem<T>>>;
|
||||
loadingMessage?: () => string;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,11 @@ import { interval, Subscription, Subject, of, NEVER } from 'rxjs';
|
||||
import { tap, switchMap } from 'rxjs/operators';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { stringToMs, SelectableValue } from '@grafana/data';
|
||||
import { stringToMs } from '@grafana/data';
|
||||
import { isLive } from '../RefreshPicker/RefreshPicker';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
|
||||
export function getIntervalFromString(strInterval: string): SelectableValue<number> {
|
||||
export function getIntervalFromString(strInterval: string): SelectOptionItem<number> {
|
||||
return {
|
||||
label: strInterval,
|
||||
value: stringToMs(strInterval),
|
||||
|
||||
@@ -8,10 +8,10 @@ import { StatsPicker } from '../StatsPicker/StatsPicker';
|
||||
|
||||
// Types
|
||||
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
|
||||
import Select from '../Select/Select';
|
||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue } from '@grafana/data';
|
||||
import Select, { SelectOptionItem } from '../Select/Select';
|
||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined } from '@grafana/data';
|
||||
|
||||
const showOptions: Array<SelectableValue<boolean>> = [
|
||||
const showOptions: Array<SelectOptionItem<boolean>> = [
|
||||
{
|
||||
value: true,
|
||||
label: 'All Values',
|
||||
@@ -31,7 +31,7 @@ export interface Props {
|
||||
}
|
||||
|
||||
export class FieldDisplayEditor extends PureComponent<Props> {
|
||||
onShowValuesChange = (item: SelectableValue<boolean>) => {
|
||||
onShowValuesChange = (item: SelectOptionItem<boolean>) => {
|
||||
const val = item.value === true;
|
||||
this.props.onChange({ ...this.props.value, values: val });
|
||||
};
|
||||
|
||||
@@ -7,7 +7,8 @@ import { FormLabel } from '../FormLabel/FormLabel';
|
||||
import { UnitPicker } from '../UnitPicker/UnitPicker';
|
||||
|
||||
// Types
|
||||
import { toIntegerOrUndefined, Field, SelectableValue } from '@grafana/data';
|
||||
import { toIntegerOrUndefined, Field } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
|
||||
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
|
||||
|
||||
@@ -53,7 +54,7 @@ export const FieldPropertiesEditor: React.FC<Props> = ({ value, onChange, showMi
|
||||
[value.max, onChange]
|
||||
);
|
||||
|
||||
const onUnitChange = (unit: SelectableValue<string>) => {
|
||||
const onUnitChange = (unit: SelectOptionItem<string>) => {
|
||||
onChange({ ...value, unit: unit.value });
|
||||
};
|
||||
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
import { sharedSingleStatMigrationCheck } from './SingleStatBaseOptions';
|
||||
|
||||
describe('sharedSingleStatMigrationCheck', () => {
|
||||
it('from old valueOptions model without pluginVersion', () => {
|
||||
const panel = {
|
||||
options: {
|
||||
valueOptions: {
|
||||
unit: 'watt',
|
||||
stat: 'last',
|
||||
decimals: 5,
|
||||
},
|
||||
minValue: 10,
|
||||
maxValue: 100,
|
||||
valueMappings: [{ type: 1, value: '1', text: 'OK' }],
|
||||
thresholds: [
|
||||
{
|
||||
color: 'green',
|
||||
index: 0,
|
||||
value: null,
|
||||
},
|
||||
{
|
||||
color: 'orange',
|
||||
index: 1,
|
||||
value: 40,
|
||||
},
|
||||
{
|
||||
color: 'red',
|
||||
index: 2,
|
||||
value: 80,
|
||||
},
|
||||
],
|
||||
},
|
||||
title: 'Usage',
|
||||
type: 'bargauge',
|
||||
};
|
||||
|
||||
expect(sharedSingleStatMigrationCheck(panel as any)).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
@@ -3,7 +3,7 @@ import omit from 'lodash/omit';
|
||||
|
||||
import { VizOrientation, PanelModel } from '../../types/panel';
|
||||
import { FieldDisplayOptions } from '../../utils/fieldDisplay';
|
||||
import { fieldReducers, Threshold, sortThresholds } from '@grafana/data';
|
||||
import { Field, getFieldReducers } from '@grafana/data';
|
||||
|
||||
export interface SingleStatBaseOptions {
|
||||
fieldOptions: FieldDisplayOptions;
|
||||
@@ -25,99 +25,40 @@ export const sharedSingleStatOptionsCheck = (
|
||||
return options;
|
||||
};
|
||||
|
||||
export function sharedSingleStatMigrationCheck(panel: PanelModel<SingleStatBaseOptions>) {
|
||||
export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseOptions>) => {
|
||||
if (!panel.options) {
|
||||
// This happens on the first load or when migrating from angular
|
||||
return {};
|
||||
}
|
||||
|
||||
const previousVersion = parseFloat(panel.pluginVersion || '6.1');
|
||||
let options = panel.options as any;
|
||||
// This migration aims to keep the most recent changes up-to-date
|
||||
// Plugins should explicitly migrate for known version changes and only use this
|
||||
// as a backup
|
||||
const old = panel.options as any;
|
||||
if (old.valueOptions) {
|
||||
const { valueOptions } = old;
|
||||
|
||||
if (previousVersion < 6.2) {
|
||||
options = migrateFromValueOptions(options);
|
||||
}
|
||||
const fieldOptions = (old.fieldOptions = {} as FieldDisplayOptions);
|
||||
fieldOptions.mappings = old.valueMappings;
|
||||
fieldOptions.thresholds = old.thresholds;
|
||||
|
||||
if (previousVersion < 6.3) {
|
||||
options = moveThresholdsAndMappingsToField(options);
|
||||
}
|
||||
const field = (fieldOptions.defaults = {} as Field);
|
||||
if (valueOptions) {
|
||||
field.unit = valueOptions.unit;
|
||||
field.decimals = valueOptions.decimals;
|
||||
|
||||
return options as SingleStatBaseOptions;
|
||||
}
|
||||
|
||||
export function moveThresholdsAndMappingsToField(old: any) {
|
||||
const { fieldOptions } = old;
|
||||
|
||||
if (!fieldOptions) {
|
||||
return old;
|
||||
}
|
||||
|
||||
const { mappings, thresholds, ...rest } = old.fieldOptions;
|
||||
|
||||
return {
|
||||
...old,
|
||||
fieldOptions: {
|
||||
...rest,
|
||||
defaults: {
|
||||
...fieldOptions.defaults,
|
||||
mappings,
|
||||
thresholds: migrateOldThresholds(thresholds),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Moves valueMappings and thresholds from root to new fieldOptions object
|
||||
* Renames valueOptions to to defaults and moves it under fieldOptions
|
||||
*/
|
||||
export function migrateFromValueOptions(old: any) {
|
||||
const { valueOptions } = old;
|
||||
if (!valueOptions) {
|
||||
return old;
|
||||
}
|
||||
|
||||
const fieldOptions: any = {};
|
||||
const fieldDefaults: any = {};
|
||||
|
||||
fieldOptions.mappings = old.valueMappings;
|
||||
fieldOptions.thresholds = old.thresholds;
|
||||
fieldOptions.defaults = fieldDefaults;
|
||||
|
||||
fieldDefaults.unit = valueOptions.unit;
|
||||
fieldDefaults.decimals = valueOptions.decimals;
|
||||
|
||||
// Make sure the stats have a valid name
|
||||
if (valueOptions.stat) {
|
||||
const reducer = fieldReducers.get(valueOptions.stat);
|
||||
if (reducer) {
|
||||
fieldOptions.calcs = [reducer.id];
|
||||
// Make sure the stats have a valid name
|
||||
if (valueOptions.stat) {
|
||||
fieldOptions.calcs = getFieldReducers([valueOptions.stat]).map(s => s.id);
|
||||
}
|
||||
}
|
||||
|
||||
field.min = old.minValue;
|
||||
field.max = old.maxValue;
|
||||
|
||||
// remove old props
|
||||
return omit(old, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
||||
}
|
||||
|
||||
fieldDefaults.min = old.minValue;
|
||||
fieldDefaults.max = old.maxValue;
|
||||
|
||||
const newOptions = {
|
||||
...old,
|
||||
fieldOptions,
|
||||
};
|
||||
|
||||
return omit(newOptions, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
||||
}
|
||||
|
||||
export function migrateOldThresholds(thresholds?: any[]): Threshold[] | undefined {
|
||||
if (!thresholds || !thresholds.length) {
|
||||
return undefined;
|
||||
}
|
||||
const copy = thresholds.map(t => {
|
||||
return {
|
||||
// Drops 'index'
|
||||
value: t.value === null ? -Infinity : t.value,
|
||||
color: t.color,
|
||||
};
|
||||
});
|
||||
sortThresholds(copy);
|
||||
copy[0].value = -Infinity;
|
||||
return copy;
|
||||
}
|
||||
return panel.options;
|
||||
};
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`sharedSingleStatMigrationCheck from old valueOptions model without pluginVersion 1`] = `
|
||||
Object {
|
||||
"fieldOptions": Object {
|
||||
"calcs": Array [
|
||||
"last",
|
||||
],
|
||||
"defaults": Object {
|
||||
"decimals": 5,
|
||||
"mappings": Array [
|
||||
Object {
|
||||
"text": "OK",
|
||||
"type": 1,
|
||||
"value": "1",
|
||||
},
|
||||
],
|
||||
"max": 100,
|
||||
"min": 10,
|
||||
"thresholds": Array [
|
||||
Object {
|
||||
"color": "green",
|
||||
"value": -Infinity,
|
||||
},
|
||||
Object {
|
||||
"color": "orange",
|
||||
"value": 40,
|
||||
},
|
||||
Object {
|
||||
"color": "red",
|
||||
"value": 80,
|
||||
},
|
||||
],
|
||||
"unit": "watt",
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
@@ -5,7 +5,8 @@ import difference from 'lodash/difference';
|
||||
|
||||
import { Select } from '../index';
|
||||
|
||||
import { fieldReducers, SelectableValue } from '@grafana/data';
|
||||
import { getFieldReducers } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
|
||||
interface Props {
|
||||
placeholder?: string;
|
||||
@@ -33,7 +34,7 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
checkInput = () => {
|
||||
const { stats, allowMultiple, defaultStat, onChange } = this.props;
|
||||
|
||||
const current = fieldReducers.list(stats);
|
||||
const current = getFieldReducers(stats);
|
||||
if (current.length !== stats.length) {
|
||||
const found = current.map(v => v.id);
|
||||
const notFound = difference(stats, found);
|
||||
@@ -53,7 +54,7 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
}
|
||||
};
|
||||
|
||||
onSelectionChange = (item: SelectableValue<string>) => {
|
||||
onSelectionChange = (item: SelectOptionItem<string>) => {
|
||||
const { onChange } = this.props;
|
||||
if (isArray(item)) {
|
||||
onChange(item.map(v => v.value));
|
||||
@@ -64,16 +65,24 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
|
||||
render() {
|
||||
const { width, stats, allowMultiple, defaultStat, placeholder } = this.props;
|
||||
const options = getFieldReducers().map(s => {
|
||||
return {
|
||||
value: s.id,
|
||||
label: s.name,
|
||||
description: s.description,
|
||||
};
|
||||
});
|
||||
|
||||
const value: Array<SelectOptionItem<string>> = options.filter(option => stats.find(stat => option.value === stat));
|
||||
|
||||
const select = fieldReducers.selectOptions(stats);
|
||||
return (
|
||||
<Select
|
||||
width={width}
|
||||
value={select.current}
|
||||
value={value}
|
||||
isClearable={!defaultStat}
|
||||
isMulti={allowMultiple}
|
||||
isSearchable={true}
|
||||
options={select.options}
|
||||
options={options}
|
||||
placeholder={placeholder}
|
||||
onChange={this.onSelectionChange}
|
||||
/>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { ChangeEvent } from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { ThresholdsEditor, Props, threshodsWithoutKey } from './ThresholdsEditor';
|
||||
import { ThresholdsEditor, Props } from './ThresholdsEditor';
|
||||
import { colors } from '../../utils';
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
@@ -20,10 +20,6 @@ const setup = (propOverrides?: Partial<Props>) => {
|
||||
};
|
||||
};
|
||||
|
||||
function getCurrentThresholds(editor: ThresholdsEditor) {
|
||||
return threshodsWithoutKey(editor.state.thresholds);
|
||||
}
|
||||
|
||||
describe('Render', () => {
|
||||
it('should render with base threshold', () => {
|
||||
const { wrapper } = setup();
|
||||
@@ -36,55 +32,60 @@ describe('Initialization', () => {
|
||||
it('should add a base threshold if missing', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([{ value: -Infinity, color: colors[0] }]);
|
||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add threshold', () => {
|
||||
it('should not add threshold at index 0', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.onAddThreshold(0);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
||||
});
|
||||
|
||||
it('should add threshold', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[0]);
|
||||
instance.onAddThreshold(1);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add another threshold above a first', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
],
|
||||
thresholds: [{ index: 0, value: -Infinity, color: colors[0] }, { index: 1, value: 50, color: colors[2] }],
|
||||
});
|
||||
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||
instance.onAddThreshold(2);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
{ value: 75, color: colors[3] }, // 2
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 75, color: colors[3] },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add another threshold between first and second index', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ value: -Infinity, color: colors[0] },
|
||||
{ value: 50, color: colors[2] },
|
||||
{ value: 75, color: colors[3] },
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 75, color: colors[3] },
|
||||
],
|
||||
});
|
||||
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||
instance.onAddThreshold(2);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] },
|
||||
{ value: 50, color: colors[2] },
|
||||
{ value: 62.5, color: colors[4] },
|
||||
{ value: 75, color: colors[3] },
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 62.5, color: colors[4] },
|
||||
{ index: 3, value: 75, color: colors[3] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -92,30 +93,30 @@ describe('Add threshold', () => {
|
||||
describe('Remove threshold', () => {
|
||||
it('should not remove threshold at index 0', () => {
|
||||
const thresholds = [
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
instance.onRemoveThreshold(instance.state.thresholds[0]);
|
||||
instance.onRemoveThreshold(thresholds[0]);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||
expect(instance.state.thresholds).toEqual(thresholds);
|
||||
});
|
||||
|
||||
it('should remove threshold', () => {
|
||||
const thresholds = [
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
instance.onRemoveThreshold(instance.state.thresholds[1]);
|
||||
instance.onRemoveThreshold(thresholds[1]);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -123,25 +124,25 @@ describe('Remove threshold', () => {
|
||||
describe('change threshold value', () => {
|
||||
it('should not change threshold at index 0', () => {
|
||||
const thresholds = [
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
const mockEvent = ({ target: { value: '12' } } as any) as ChangeEvent<HTMLInputElement>;
|
||||
|
||||
instance.onChangeThresholdValue(mockEvent, instance.state.thresholds[0]);
|
||||
instance.onChangeThresholdValue(mockEvent, thresholds[0]);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||
expect(instance.state.thresholds).toEqual(thresholds);
|
||||
});
|
||||
|
||||
it('should update value', () => {
|
||||
const { instance } = setup();
|
||||
const thresholds = [
|
||||
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||
{ value: 50, color: '#EAB839', key: 2 },
|
||||
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
|
||||
instance.state = {
|
||||
@@ -152,10 +153,10 @@ describe('change threshold value', () => {
|
||||
|
||||
instance.onChangeThresholdValue(mockEvent, thresholds[1]);
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 78, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 78, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -164,9 +165,9 @@ describe('on blur threshold value', () => {
|
||||
it('should resort rows and update indexes', () => {
|
||||
const { instance } = setup();
|
||||
const thresholds = [
|
||||
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||
{ value: 78, color: '#EAB839', key: 2 },
|
||||
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 78, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
|
||||
instance.setState({
|
||||
@@ -175,10 +176,10 @@ describe('on blur threshold value', () => {
|
||||
|
||||
instance.onBlur();
|
||||
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ value: 78, color: '#EAB839' },
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
||||
{ index: 2, value: 78, color: '#EAB839' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { PureComponent, ChangeEvent } from 'react';
|
||||
import { Threshold, sortThresholds } from '@grafana/data';
|
||||
import { Threshold } from '@grafana/data';
|
||||
import { colors } from '../../utils';
|
||||
import { ThemeContext } from '../../themes';
|
||||
import { getColorFromHexRgbOrName } from '../../utils';
|
||||
@@ -13,121 +13,115 @@ export interface Props {
|
||||
}
|
||||
|
||||
interface State {
|
||||
thresholds: ThresholdWithKey[];
|
||||
thresholds: Threshold[];
|
||||
}
|
||||
|
||||
interface ThresholdWithKey extends Threshold {
|
||||
key: number;
|
||||
}
|
||||
|
||||
let counter = 100;
|
||||
|
||||
export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
|
||||
const thresholds = props.thresholds
|
||||
? props.thresholds.map(t => {
|
||||
return {
|
||||
color: t.color,
|
||||
value: t.value === null ? -Infinity : t.value,
|
||||
key: counter++,
|
||||
};
|
||||
})
|
||||
: ([] as ThresholdWithKey[]);
|
||||
|
||||
let needsCallback = false;
|
||||
if (!thresholds.length) {
|
||||
thresholds.push({ value: -Infinity, color: colors[0], key: counter++ });
|
||||
needsCallback = true;
|
||||
} else {
|
||||
// First value is always base
|
||||
thresholds[0].value = -Infinity;
|
||||
}
|
||||
|
||||
// Update the state
|
||||
const addDefaultThreshold = this.props.thresholds.length === 0;
|
||||
const thresholds: Threshold[] = addDefaultThreshold
|
||||
? [{ index: 0, value: -Infinity, color: colors[0] }]
|
||||
: props.thresholds;
|
||||
this.state = { thresholds };
|
||||
|
||||
if (needsCallback) {
|
||||
if (addDefaultThreshold) {
|
||||
this.onChange();
|
||||
}
|
||||
}
|
||||
|
||||
onAddThresholdAfter = (threshold: ThresholdWithKey) => {
|
||||
onAddThreshold = (index: number) => {
|
||||
const { thresholds } = this.state;
|
||||
|
||||
const maxValue = 100;
|
||||
const minValue = 0;
|
||||
|
||||
let prev: ThresholdWithKey | undefined = undefined;
|
||||
let next: ThresholdWithKey | undefined = undefined;
|
||||
for (const t of thresholds) {
|
||||
if (prev && prev.key === threshold.key) {
|
||||
next = t;
|
||||
break;
|
||||
if (index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const newThresholds = thresholds.map(threshold => {
|
||||
if (threshold.index >= index) {
|
||||
const index = threshold.index + 1;
|
||||
threshold = { ...threshold, index };
|
||||
}
|
||||
prev = t;
|
||||
}
|
||||
return threshold;
|
||||
});
|
||||
|
||||
const prevValue = prev && isFinite(prev.value) ? prev.value : minValue;
|
||||
const nextValue = next && isFinite(next.value) ? next.value : maxValue;
|
||||
// Setting value to a value between the previous thresholds
|
||||
const beforeThreshold = newThresholds.filter(t => t.index === index - 1 && t.index !== 0)[0];
|
||||
const afterThreshold = newThresholds.filter(t => t.index === index + 1 && t.index !== 0)[0];
|
||||
const beforeThresholdValue = beforeThreshold !== undefined ? beforeThreshold.value : minValue;
|
||||
const afterThresholdValue = afterThreshold !== undefined ? afterThreshold.value : maxValue;
|
||||
const value = afterThresholdValue - (afterThresholdValue - beforeThresholdValue) / 2;
|
||||
|
||||
const color = colors.filter(c => !thresholds.some(t => t.color === c))[1];
|
||||
const add = {
|
||||
value: prevValue + (nextValue - prevValue) / 2.0,
|
||||
color: color,
|
||||
key: counter++,
|
||||
};
|
||||
const newThresholds = [...thresholds, add];
|
||||
sortThresholds(newThresholds);
|
||||
// Set a color
|
||||
const color = colors.filter(c => !newThresholds.some(t => t.color === c))[1];
|
||||
|
||||
this.setState(
|
||||
{
|
||||
thresholds: newThresholds,
|
||||
thresholds: this.sortThresholds([
|
||||
...newThresholds,
|
||||
{
|
||||
color,
|
||||
index,
|
||||
value: value as number,
|
||||
},
|
||||
]),
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onRemoveThreshold = (threshold: ThresholdWithKey) => {
|
||||
onRemoveThreshold = (threshold: Threshold) => {
|
||||
if (threshold.index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.setState(
|
||||
prevState => {
|
||||
const newThresholds = prevState.thresholds.map(t => {
|
||||
if (t.index > threshold.index) {
|
||||
const index = t.index - 1;
|
||||
t = { ...t, index };
|
||||
}
|
||||
return t;
|
||||
});
|
||||
|
||||
return {
|
||||
thresholds: newThresholds.filter(t => t !== threshold),
|
||||
};
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: Threshold) => {
|
||||
if (threshold.index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { thresholds } = this.state;
|
||||
if (!thresholds.length) {
|
||||
return;
|
||||
}
|
||||
// Don't remove index 0
|
||||
if (threshold.key === thresholds[0].key) {
|
||||
return;
|
||||
}
|
||||
this.setState(
|
||||
{
|
||||
thresholds: thresholds.filter(t => t.key !== threshold.key),
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: ThresholdWithKey) => {
|
||||
const cleanValue = event.target.value.replace(/,/g, '.');
|
||||
const parsedValue = parseFloat(cleanValue);
|
||||
const value = isNaN(parsedValue) ? '' : parsedValue;
|
||||
|
||||
const thresholds = this.state.thresholds.map(t => {
|
||||
if (t.key === threshold.key) {
|
||||
const newThresholds = thresholds.map(t => {
|
||||
if (t === threshold && t.index !== 0) {
|
||||
t = { ...t, value: value as number };
|
||||
}
|
||||
|
||||
return t;
|
||||
});
|
||||
if (thresholds.length) {
|
||||
thresholds[0].value = -Infinity;
|
||||
}
|
||||
this.setState({ thresholds });
|
||||
|
||||
this.setState({ thresholds: newThresholds });
|
||||
};
|
||||
|
||||
onChangeThresholdColor = (threshold: ThresholdWithKey, color: string) => {
|
||||
onChangeThresholdColor = (threshold: Threshold, color: string) => {
|
||||
const { thresholds } = this.state;
|
||||
|
||||
const newThresholds = thresholds.map(t => {
|
||||
if (t.key === threshold.key) {
|
||||
if (t === threshold) {
|
||||
t = { ...t, color: color };
|
||||
}
|
||||
|
||||
@@ -143,22 +137,30 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
};
|
||||
|
||||
onBlur = () => {
|
||||
const thresholds = [...this.state.thresholds];
|
||||
sortThresholds(thresholds);
|
||||
this.setState(
|
||||
{
|
||||
thresholds,
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
this.setState(prevState => {
|
||||
const sortThresholds = this.sortThresholds([...prevState.thresholds]);
|
||||
let index = 0;
|
||||
sortThresholds.forEach(t => {
|
||||
t.index = index++;
|
||||
});
|
||||
|
||||
return { thresholds: sortThresholds };
|
||||
});
|
||||
|
||||
this.onChange();
|
||||
};
|
||||
|
||||
onChange = () => {
|
||||
const { thresholds } = this.state;
|
||||
this.props.onChange(threshodsWithoutKey(thresholds));
|
||||
this.props.onChange(this.state.thresholds);
|
||||
};
|
||||
|
||||
renderInput = (threshold: ThresholdWithKey) => {
|
||||
sortThresholds = (thresholds: Threshold[]) => {
|
||||
return thresholds.sort((t1, t2) => {
|
||||
return t1.value - t2.value;
|
||||
});
|
||||
};
|
||||
|
||||
renderInput = (threshold: Threshold) => {
|
||||
return (
|
||||
<div className="thresholds-row-input-inner">
|
||||
<span className="thresholds-row-input-inner-arrow" />
|
||||
@@ -173,11 +175,12 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{!isFinite(threshold.value) ? (
|
||||
{threshold.index === 0 && (
|
||||
<div className="thresholds-row-input-inner-value">
|
||||
<Input type="text" value="Base" readOnly />
|
||||
</div>
|
||||
) : (
|
||||
)}
|
||||
{threshold.index > 0 && (
|
||||
<>
|
||||
<div className="thresholds-row-input-inner-value">
|
||||
<Input
|
||||
@@ -186,6 +189,7 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
onChange={(event: ChangeEvent<HTMLInputElement>) => this.onChangeThresholdValue(event, threshold)}
|
||||
value={threshold.value}
|
||||
onBlur={this.onBlur}
|
||||
readOnly={threshold.index === 0}
|
||||
/>
|
||||
</div>
|
||||
<div className="thresholds-row-input-inner-remove" onClick={() => this.onRemoveThreshold(threshold)}>
|
||||
@@ -208,10 +212,13 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
{thresholds
|
||||
.slice(0)
|
||||
.reverse()
|
||||
.map(threshold => {
|
||||
.map((threshold, index) => {
|
||||
return (
|
||||
<div className="thresholds-row" key={`${threshold.key}`}>
|
||||
<div className="thresholds-row-add-button" onClick={() => this.onAddThresholdAfter(threshold)}>
|
||||
<div className="thresholds-row" key={`${threshold.index}-${index}`}>
|
||||
<div
|
||||
className="thresholds-row-add-button"
|
||||
onClick={() => this.onAddThreshold(threshold.index + 1)}
|
||||
>
|
||||
<i className="fa fa-plus" />
|
||||
</div>
|
||||
<div
|
||||
@@ -230,10 +237,3 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function threshodsWithoutKey(thresholds: ThresholdWithKey[]): Threshold[] {
|
||||
return thresholds.map(t => {
|
||||
const { key, ...rest } = t;
|
||||
return rest; // everything except key
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ exports[`Render should render with base threshold 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"color": "#7EB26D",
|
||||
"index": 0,
|
||||
"value": -Infinity,
|
||||
},
|
||||
],
|
||||
@@ -47,7 +48,7 @@ exports[`Render should render with base threshold 1`] = `
|
||||
>
|
||||
<div
|
||||
className="thresholds-row"
|
||||
key="100"
|
||||
key="0-0"
|
||||
>
|
||||
<div
|
||||
className="thresholds-row-add-button"
|
||||
|
||||
@@ -8,13 +8,13 @@ import { TimePickerPopover } from './TimePickerPopover';
|
||||
import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper';
|
||||
|
||||
// Utils & Services
|
||||
import { isDateTime, DateTime } from '@grafana/data';
|
||||
import { isDateTime } from '@grafana/data';
|
||||
import { rangeUtil } from '@grafana/data';
|
||||
import { rawToTimeRange } from './time';
|
||||
|
||||
// Types
|
||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT, SelectableValue } from '@grafana/data';
|
||||
import { isMathString } from '@grafana/data/src/utils/datemath';
|
||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
|
||||
export interface Props {
|
||||
value: TimeRange;
|
||||
@@ -77,7 +77,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
isCustomOpen: false,
|
||||
};
|
||||
|
||||
mapTimeOptionsToSelectableValues = (selectOptions: TimeOption[]) => {
|
||||
mapTimeOptionsToSelectOptionItems = (selectOptions: TimeOption[]) => {
|
||||
const options = selectOptions.map(timeOption => {
|
||||
return {
|
||||
label: timeOption.display,
|
||||
@@ -93,7 +93,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
return options;
|
||||
};
|
||||
|
||||
onSelectChanged = (item: SelectableValue<TimeOption>) => {
|
||||
onSelectChanged = (item: SelectOptionItem<TimeOption>) => {
|
||||
const { onChange, timeZone } = this.props;
|
||||
|
||||
if (item.value && item.value.from === 'custom') {
|
||||
@@ -122,23 +122,15 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
render() {
|
||||
const { selectOptions: selectTimeOptions, value, onMoveBackward, onMoveForward, onZoom, timeZone } = this.props;
|
||||
const { isCustomOpen } = this.state;
|
||||
const options = this.mapTimeOptionsToSelectableValues(selectTimeOptions);
|
||||
const options = this.mapTimeOptionsToSelectOptionItems(selectTimeOptions);
|
||||
const currentOption = options.find(item => isTimeOptionEqualToTimeRange(item.value, value));
|
||||
|
||||
const isUTC = timeZone === 'utc';
|
||||
|
||||
const adjustedTime = (time: DateTime) => (isUTC ? time.utc() : time.local()) || null;
|
||||
const adjustedTimeRange = {
|
||||
to: isMathString(value.raw.to) ? value.raw.to : adjustedTime(value.to),
|
||||
from: isMathString(value.raw.from) ? value.raw.from : adjustedTime(value.from),
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange(adjustedTimeRange);
|
||||
const rangeString = rangeUtil.describeTimeRange(value.raw);
|
||||
|
||||
const label = (
|
||||
<>
|
||||
{isCustomOpen && <span>Custom time range</span>}
|
||||
{!isCustomOpen && <span>{rangeString}</span>}
|
||||
{isUTC && <span className="time-picker-utc">UTC</span>}
|
||||
{timeZone === 'utc' && <span className="time-picker-utc">UTC</span>}
|
||||
</>
|
||||
);
|
||||
const isAbsolute = isDateTime(value.raw.to);
|
||||
@@ -156,7 +148,6 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
value={currentOption}
|
||||
label={label}
|
||||
options={options}
|
||||
maxMenuHeight={600}
|
||||
onChange={this.onSelectChanged}
|
||||
iconClass={'fa fa-clock-o fa-fw'}
|
||||
tooltipContent={<TimePickerTooltipContent timeRange={value} />}
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
|
||||
.time-picker-popover {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
border: 1px solid $popover-border-color;
|
||||
border-radius: $border-radius;
|
||||
@@ -30,41 +31,41 @@
|
||||
max-width: 600px;
|
||||
top: 41px;
|
||||
right: 0px;
|
||||
}
|
||||
|
||||
.time-picker-popover-body {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
padding: $space-md;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.time-picker-popover-title {
|
||||
font-size: $font-size-md;
|
||||
font-weight: $font-weight-semi-bold;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges:first-child {
|
||||
margin-right: $space-md;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges-input {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
align-items: center;
|
||||
margin-bottom: $space-sm;
|
||||
|
||||
.time-picker-input-error {
|
||||
box-shadow: inset 0 0px 5px $red;
|
||||
.time-picker-popover-body {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
padding: $space-md;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.time-picker-popover-footer {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: center;
|
||||
padding: $space-md;
|
||||
.time-picker-popover-title {
|
||||
font-size: $font-size-md;
|
||||
font-weight: $font-weight-semi-bold;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges:first-child {
|
||||
margin-right: $space-md;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges-input {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
align-items: center;
|
||||
margin-bottom: $space-sm;
|
||||
|
||||
.time-picker-input-error {
|
||||
box-shadow: inset 0 0px 5px $red;
|
||||
}
|
||||
}
|
||||
|
||||
.time-picker-popover-footer {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: center;
|
||||
padding: $space-md;
|
||||
}
|
||||
}
|
||||
|
||||
.time-picker-popover-header {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export { DeleteButton } from './DeleteButton/DeleteButton';
|
||||
export { Tooltip } from './Tooltip/Tooltip';
|
||||
export { PopperController, PopperContent } from './Tooltip/PopperController';
|
||||
export { PopperController } from './Tooltip/PopperController';
|
||||
export { Popper } from './Tooltip/Popper';
|
||||
export { Portal } from './Portal/Portal';
|
||||
export { CustomScrollbar } from './CustomScrollbar/CustomScrollbar';
|
||||
@@ -9,7 +9,7 @@ export * from './Button/Button';
|
||||
export { ButtonVariant } from './Button/AbstractButton';
|
||||
|
||||
// Select
|
||||
export { Select, AsyncSelect } from './Select/Select';
|
||||
export { Select, AsyncSelect, SelectOptionItem } from './Select/Select';
|
||||
export { IndicatorsContainer } from './Select/IndicatorsContainer';
|
||||
export { NoOptionsMessage } from './Select/NoOptionsMessage';
|
||||
export { default as resetSelectStyles } from './Select/resetSelectStyles';
|
||||
|
||||
@@ -77,13 +77,6 @@ interface PluginMetaInfoLink {
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface PluginBuildInfo {
|
||||
time?: number;
|
||||
repo?: string;
|
||||
branch?: string;
|
||||
hash?: string;
|
||||
}
|
||||
|
||||
export interface PluginMetaInfo {
|
||||
author: {
|
||||
name: string;
|
||||
@@ -95,7 +88,6 @@ export interface PluginMetaInfo {
|
||||
large: string;
|
||||
small: string;
|
||||
};
|
||||
build?: PluginBuildInfo;
|
||||
screenshots: any[];
|
||||
updated: string;
|
||||
version: string;
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
export const deprecationWarning = (file: string, oldName: string, newName?: string) => {
|
||||
let message = `[Deprecation warning] ${file}: ${oldName} is deprecated`;
|
||||
if (newName) {
|
||||
message += `. Use ${newName} instead`;
|
||||
}
|
||||
export const deprecationWarning = (file: string, oldName: string, newName: string) => {
|
||||
const message = `[Deprecation warning] ${file}: ${oldName} is deprecated. Use ${newName} instead`;
|
||||
console.warn(message);
|
||||
};
|
||||
|
||||
@@ -103,7 +103,7 @@ describe('Format value', () => {
|
||||
it('should return if value isNaN', () => {
|
||||
const valueMappings: ValueMapping[] = [];
|
||||
const value = 'N/A';
|
||||
const instance = getDisplayProcessor({ field: { mappings: valueMappings } });
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -114,7 +114,7 @@ describe('Format value', () => {
|
||||
const valueMappings: ValueMapping[] = [];
|
||||
const value = '6';
|
||||
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -127,7 +127,7 @@ describe('Format value', () => {
|
||||
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
|
||||
];
|
||||
const value = '10';
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -160,7 +160,7 @@ describe('Format value', () => {
|
||||
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
|
||||
];
|
||||
const value = '11';
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
|
||||
expect(instance(value).text).toEqual('1-20');
|
||||
});
|
||||
|
||||
@@ -7,13 +7,16 @@ import { getColorFromHexRgbOrName } from './namedColorsPalette';
|
||||
|
||||
// Types
|
||||
import { DecimalInfo, DisplayValue, GrafanaTheme, GrafanaThemeType, DecimalCount } from '../types';
|
||||
import { DateTime, dateTime, Threshold, getMappedValue, Field } from '@grafana/data';
|
||||
import { DateTime, dateTime, Threshold, ValueMapping, getMappedValue, Field } from '@grafana/data';
|
||||
|
||||
export type DisplayProcessor = (value: any) => DisplayValue;
|
||||
|
||||
export interface DisplayValueOptions {
|
||||
field?: Partial<Field>;
|
||||
|
||||
mappings?: ValueMapping[];
|
||||
thresholds?: Threshold[];
|
||||
|
||||
// Alternative to empty string
|
||||
noValue?: string;
|
||||
|
||||
@@ -28,8 +31,7 @@ export function getDisplayProcessor(options?: DisplayValueOptions): DisplayProce
|
||||
const formatFunc = getValueFormat(field.unit || 'none');
|
||||
|
||||
return (value: any) => {
|
||||
const { theme } = options;
|
||||
const { mappings, thresholds } = field;
|
||||
const { mappings, thresholds, theme } = options;
|
||||
let color;
|
||||
|
||||
let text = _.toString(value);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
|
||||
import { FieldType, ReducerID, Threshold } from '@grafana/data';
|
||||
import { FieldType, ReducerID } from '@grafana/data';
|
||||
import { GrafanaThemeType } from '../types/theme';
|
||||
import { getTheme } from '../themes/index';
|
||||
|
||||
@@ -55,6 +55,8 @@ describe('FieldDisplay', () => {
|
||||
},
|
||||
fieldOptions: {
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -66,6 +68,8 @@ describe('FieldDisplay', () => {
|
||||
...options,
|
||||
fieldOptions: {
|
||||
calcs: [ReducerID.first],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {
|
||||
title: '$__cell_0 * $__field_name * $__series_name',
|
||||
@@ -84,6 +88,8 @@ describe('FieldDisplay', () => {
|
||||
...options,
|
||||
fieldOptions: {
|
||||
calcs: [ReducerID.last],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -98,6 +104,8 @@ describe('FieldDisplay', () => {
|
||||
values: true, //
|
||||
limit: 1000,
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -112,53 +120,12 @@ describe('FieldDisplay', () => {
|
||||
values: true, //
|
||||
limit: 2,
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
});
|
||||
expect(display.map(v => v.display.numeric)).toEqual([1, 3]); // First 2 are from the first field
|
||||
});
|
||||
|
||||
it('should restore -Infinity value for base threshold', () => {
|
||||
const field = getFieldProperties({
|
||||
thresholds: [
|
||||
({
|
||||
color: '#73BF69',
|
||||
value: null,
|
||||
} as unknown) as Threshold,
|
||||
{
|
||||
color: '#F2495C',
|
||||
value: 50,
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(field.thresholds!.length).toEqual(2);
|
||||
expect(field.thresholds![0].value).toBe(-Infinity);
|
||||
});
|
||||
|
||||
it('Should return field thresholds when there is no data', () => {
|
||||
const options: GetFieldDisplayValuesOptions = {
|
||||
data: [
|
||||
{
|
||||
name: 'No data',
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
],
|
||||
replaceVariables: (value: string) => {
|
||||
return value;
|
||||
},
|
||||
fieldOptions: {
|
||||
calcs: [],
|
||||
override: {},
|
||||
defaults: {
|
||||
thresholds: [{ color: '#F2495C', value: 50 }],
|
||||
},
|
||||
},
|
||||
theme: getTheme(GrafanaThemeType.Dark),
|
||||
};
|
||||
|
||||
const display = getFieldDisplayValues(options);
|
||||
expect(display[0].field.thresholds!.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,16 @@ import toString from 'lodash/toString';
|
||||
import { DisplayValue, GrafanaTheme, InterpolateFunction, ScopedVars, GraphSeriesValue } from '../types/index';
|
||||
import { getDisplayProcessor } from './displayValue';
|
||||
import { getFlotPairs } from './flotPairs';
|
||||
import { ReducerID, reduceField, FieldType, NullValueMode, DataFrame, Field } from '@grafana/data';
|
||||
import {
|
||||
ValueMapping,
|
||||
Threshold,
|
||||
ReducerID,
|
||||
reduceField,
|
||||
FieldType,
|
||||
NullValueMode,
|
||||
DataFrame,
|
||||
Field,
|
||||
} from '@grafana/data';
|
||||
|
||||
export interface FieldDisplayOptions {
|
||||
values?: boolean; // If true show each row value
|
||||
@@ -13,6 +22,10 @@ export interface FieldDisplayOptions {
|
||||
|
||||
defaults: Partial<Field>; // Use these values unless otherwise stated
|
||||
override: Partial<Field>; // Set these values regardless of the source
|
||||
|
||||
// Could these be data driven also?
|
||||
thresholds: Threshold[];
|
||||
mappings: ValueMapping[];
|
||||
}
|
||||
|
||||
export const VAR_SERIES_NAME = '__series_name';
|
||||
@@ -114,6 +127,8 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
const display = getDisplayProcessor({
|
||||
field,
|
||||
mappings: fieldOptions.mappings,
|
||||
thresholds: fieldOptions.thresholds,
|
||||
theme: options.theme,
|
||||
});
|
||||
|
||||
@@ -182,10 +197,7 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
if (values.length === 0) {
|
||||
values.push({
|
||||
field: {
|
||||
...defaults,
|
||||
name: 'No Data',
|
||||
},
|
||||
field: { name: 'No Data' },
|
||||
display: {
|
||||
numeric: 0,
|
||||
text: 'No data',
|
||||
@@ -247,16 +259,10 @@ type PartialField = Partial<Field>;
|
||||
|
||||
export function getFieldProperties(...props: PartialField[]): Field {
|
||||
let field = props[0] as Field;
|
||||
|
||||
for (let i = 1; i < props.length; i++) {
|
||||
field = applyFieldProperties(field, props[i]);
|
||||
}
|
||||
|
||||
// First value is always -Infinity
|
||||
if (field.thresholds && field.thresholds.length) {
|
||||
field.thresholds[0].value = -Infinity;
|
||||
}
|
||||
|
||||
// Verify that max > min
|
||||
if (field.hasOwnProperty('min') && field.hasOwnProperty('max') && field.min! > field.max!) {
|
||||
return {
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"extends": "../../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": [".", "stories"],
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"declaration": true,
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE_IMAGE=ubuntu:18.04
|
||||
ARG BASE_IMAGE=ubuntu:latest
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
|
||||
@@ -12,7 +12,7 @@ COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
|
||||
# Change to tar xfzv to make tar print every file it extracts
|
||||
RUN mkdir /tmp/grafana && tar xfz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
|
||||
|
||||
ARG BASE_IMAGE=ubuntu:18.04
|
||||
ARG BASE_IMAGE=ubuntu:latest
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ARG GF_UID="472"
|
||||
|
||||
@@ -59,16 +59,14 @@ docker_tag_all () {
|
||||
fi
|
||||
}
|
||||
|
||||
docker_build "ubuntu:18.04" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||
docker_build "ubuntu:latest" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||
if [ $BUILD_FAST = "0" ]; then
|
||||
docker_build "arm32v7/ubuntu:18.04" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||
docker_build "arm64v8/ubuntu:18.04" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||
docker_build "arm32v7/ubuntu:latest" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||
docker_build "arm64v8/ubuntu:latest" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||
fi
|
||||
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
docker_tag_all "${_docker_repo}" "latest"
|
||||
# Create the expected tag for running the end to end tests successfully
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_tag}"
|
||||
else
|
||||
docker_tag_all "${_docker_repo}" "master"
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_version}"
|
||||
|
||||
@@ -38,14 +38,8 @@ if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"
|
||||
echo "pushing ${_docker_repo}:latest"
|
||||
docker_push_all "${_docker_repo}" "latest"
|
||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||
# Push to the grafana-dev repository with the expected tag
|
||||
# for running the end to end tests successfully
|
||||
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||
elif echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -q "beta"; then
|
||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||
# Push to the grafana-dev repository with the expected tag
|
||||
# for running the end to end tests successfully
|
||||
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||
elif echo "$_grafana_tag" | grep -q "master"; then
|
||||
docker_push_all "${_docker_repo}" "master"
|
||||
docker push "grafana/grafana-dev:${_grafana_version}"
|
||||
|
||||
@@ -34,7 +34,7 @@ func AdminCreateUser(c *models.ReqContext, form dtos.AdminCreateUserForm) {
|
||||
return
|
||||
}
|
||||
|
||||
metrics.MApiAdminUserCreate.Inc()
|
||||
metrics.M_Api_Admin_User_Create.Inc()
|
||||
|
||||
user := cmd.Result
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
reqEditorRole := middleware.ReqEditorRole
|
||||
reqOrgAdmin := middleware.ReqOrgAdmin
|
||||
reqCanAccessTeams := middleware.AdminOrFeatureEnabled(hs.Cfg.EditorsCanAdmin)
|
||||
reqSnapshotPublicModeOrSignedIn := middleware.SnapshotPublicModeOrSignedIn()
|
||||
redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL()
|
||||
redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL()
|
||||
quota := middleware.Quota(hs.QuotaService)
|
||||
@@ -105,6 +104,13 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
r.Get("/dashboard/snapshot/*", hs.Index)
|
||||
r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index)
|
||||
|
||||
// api for dashboard snapshots
|
||||
r.Post("/api/snapshots/", bind(models.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
|
||||
r.Get("/api/snapshot/shared-options/", GetSharingOptions)
|
||||
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
|
||||
r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
|
||||
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
||||
|
||||
// api renew session based on cookie
|
||||
r.Get("/api/login/ping", quota("session"), Wrap(hs.LoginAPIPing))
|
||||
|
||||
@@ -407,11 +413,4 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
|
||||
// streams
|
||||
//r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
|
||||
|
||||
// Snapshots
|
||||
r.Post("/api/snapshots/", reqSnapshotPublicModeOrSignedIn, bind(models.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
|
||||
r.Get("/api/snapshot/shared-options/", reqSignedIn, GetSharingOptions)
|
||||
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
|
||||
r.Get("/api/snapshots-delete/:deleteKey", reqSnapshotPublicModeOrSignedIn, Wrap(DeleteDashboardSnapshotByDeleteKey))
|
||||
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ func (hs *HTTPServer) GetDashboard(c *m.ReqContext) Response {
|
||||
Meta: meta,
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.MApiDashboardGet)
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Get)
|
||||
return JSON(200, dto)
|
||||
}
|
||||
|
||||
@@ -278,11 +278,12 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
|
||||
inFolder := cmd.FolderId > 0
|
||||
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
|
||||
if err != nil {
|
||||
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of dashboard", err)
|
||||
}
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.MApiDashboardSave)
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
||||
return JSON(200, util.DynMap{
|
||||
"status": "success",
|
||||
"slug": dashboard.Slug,
|
||||
|
||||
@@ -97,7 +97,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
||||
cmd.ExternalDeleteUrl = response.DeleteUrl
|
||||
cmd.Dashboard = simplejson.New()
|
||||
|
||||
metrics.MApiDashboardSnapshotExternal.Inc()
|
||||
metrics.M_Api_Dashboard_Snapshot_External.Inc()
|
||||
} else {
|
||||
if cmd.Key == "" {
|
||||
cmd.Key = util.GetRandomString(32)
|
||||
@@ -109,7 +109,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
||||
|
||||
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
|
||||
|
||||
metrics.MApiDashboardSnapshotCreate.Inc()
|
||||
metrics.M_Api_Dashboard_Snapshot_Create.Inc()
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
@@ -154,7 +154,7 @@ func GetDashboardSnapshot(c *m.ReqContext) {
|
||||
},
|
||||
}
|
||||
|
||||
metrics.MApiDashboardSnapshotGet.Inc()
|
||||
metrics.M_Api_Dashboard_Snapshot_Get.Inc()
|
||||
|
||||
c.Resp.Header().Set("Cache-Control", "public, max-age=3600")
|
||||
c.JSON(200, dto)
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
)
|
||||
|
||||
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
|
||||
c.TimeRequest(metrics.MDataSourceProxyReqTimer)
|
||||
c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
|
||||
|
||||
dsId := c.ParamsInt64(":id")
|
||||
ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)
|
||||
|
||||
@@ -64,6 +64,7 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
|
||||
if hs.Cfg.EditorsCanAdmin {
|
||||
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
|
||||
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of folder", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -269,8 +269,7 @@ func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
promhttp.
|
||||
HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||
promhttp.HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||
ServeHTTP(ctx.Resp, ctx.Req.Request)
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user