mirror of
https://github.com/grafana/grafana.git
synced 2025-12-20 19:44:55 +08:00
Compare commits
87 Commits
docs/add-d
...
v6.3.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdd211758e | ||
|
|
5d6512a7a2 | ||
|
|
b467557614 | ||
|
|
2148a9ff6e | ||
|
|
dc6219d8e0 | ||
|
|
67bad726f1 | ||
|
|
3f8624bffb | ||
|
|
7f1db70213 | ||
|
|
b2d86c76c6 | ||
|
|
8c168a6b83 | ||
|
|
f02d6c7be2 | ||
|
|
496d0323bd | ||
|
|
f455f02318 | ||
|
|
1e58fdaffd | ||
|
|
c27fd346d2 | ||
|
|
59fa8cc82e | ||
|
|
a557646484 | ||
|
|
be2e2330f5 | ||
|
|
84d0a71b25 | ||
|
|
e0ee72a2ff | ||
|
|
881c229ee3 | ||
|
|
9d97f48374 | ||
|
|
39f00259f3 | ||
|
|
84022650cb | ||
|
|
e368080dea | ||
|
|
a02c2b21d2 | ||
|
|
3a58974314 | ||
|
|
5954cb7220 | ||
|
|
f24ef80e52 | ||
|
|
917b278e45 | ||
|
|
483246016b | ||
|
|
43fe057baa | ||
|
|
f2fffadcd6 | ||
|
|
de06c1c1b8 | ||
|
|
830da0fda0 | ||
|
|
78fff0161a | ||
|
|
06d4641a8f | ||
|
|
e232629917 | ||
|
|
57a57932af | ||
|
|
62a226b1c3 | ||
|
|
a38dcc3ac7 | ||
|
|
c39b0e246b | ||
|
|
f8f1f506ed | ||
|
|
9d57a1f192 | ||
|
|
ba4a870632 | ||
|
|
ef9ec32c32 | ||
|
|
17235e4bd1 | ||
|
|
eb82b77782 | ||
|
|
3af8aa5c4f | ||
|
|
5b588af73c | ||
|
|
5ec6eccfac | ||
|
|
237e0e8631 | ||
|
|
7a165febf3 | ||
|
|
fd7c38c62f | ||
|
|
00519f1105 | ||
|
|
c8740d98b0 | ||
|
|
54030deaf6 | ||
|
|
880f6b63a1 | ||
|
|
d8ddfa5622 | ||
|
|
9af2ea1b53 | ||
|
|
d98ee66bd8 | ||
|
|
d3ecc963e6 | ||
|
|
0961ec9748 | ||
|
|
a2c7570c5c | ||
|
|
9cbe0abb44 | ||
|
|
2fecdaf6b4 | ||
|
|
a5f360e50e | ||
|
|
99f2386bd9 | ||
|
|
bffa956068 | ||
|
|
1b7a54c084 | ||
|
|
8128eb6c77 | ||
|
|
10c4d6eccc | ||
|
|
8a062dde35 | ||
|
|
041c1c3cb1 | ||
|
|
aba6df2e09 | ||
|
|
5192504748 | ||
|
|
8a0e2ac3a4 | ||
|
|
373d082363 | ||
|
|
94765294a2 | ||
|
|
1f482a5f0b | ||
|
|
ae04813045 | ||
|
|
3d7c52dcd1 | ||
|
|
81f8158010 | ||
|
|
5de014a91d | ||
|
|
777311a759 | ||
|
|
829b5d0d40 | ||
|
|
12878409db |
@@ -19,7 +19,7 @@ version: 2
|
||||
jobs:
|
||||
mysql-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/mysql:5.6-ram
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: rootpass
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install -y mysql-client
|
||||
- run: sudo apt install -y default-mysql-client
|
||||
- run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s
|
||||
- run: cat devenv/docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
|
||||
- run:
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
|
||||
postgres-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/postgres:9.3-ram
|
||||
environment:
|
||||
POSTGRES_USER: grafanatest
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
cache-server-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/redis:4-alpine
|
||||
- image: memcached
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
@@ -98,6 +98,34 @@ jobs:
|
||||
path: public/e2e-test/screenShots/theOutput
|
||||
destination: output-screenshots
|
||||
|
||||
end-to-end-test-release:
|
||||
docker:
|
||||
- image: circleci/node:10-browsers
|
||||
- image: grafana/grafana-dev:$CIRCLE_TAG
|
||||
steps:
|
||||
- run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s
|
||||
- checkout
|
||||
- restore_cache:
|
||||
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||
- run:
|
||||
name: yarn install
|
||||
command: 'yarn install --pure-lockfile --no-progress'
|
||||
no_output_timeout: 5m
|
||||
- save_cache:
|
||||
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||
paths:
|
||||
- node_modules
|
||||
- run:
|
||||
name: run end-to-end tests
|
||||
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
|
||||
no_output_timeout: 5m
|
||||
- store_artifacts:
|
||||
path: public/e2e-test/screenShots/theTruth
|
||||
destination: expected-screenshots
|
||||
- store_artifacts:
|
||||
path: public/e2e-test/screenShots/theOutput
|
||||
destination: output-screenshots
|
||||
|
||||
codespell:
|
||||
docker:
|
||||
- image: circleci/python
|
||||
@@ -116,7 +144,7 @@ jobs:
|
||||
|
||||
lint-go:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/golang:1.12.9
|
||||
environment:
|
||||
# we need CGO because of go-sqlite3
|
||||
CGO_ENABLED: 1
|
||||
@@ -127,6 +155,15 @@ jobs:
|
||||
name: Lint Go
|
||||
command: 'make lint-go'
|
||||
|
||||
shellcheck:
|
||||
machine: true
|
||||
working_directory: ~/go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: ShellCheck
|
||||
command: 'make shellcheck'
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
@@ -148,7 +185,7 @@ jobs:
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.6
|
||||
- image: circleci/golang:1.12.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -158,7 +195,7 @@ jobs:
|
||||
|
||||
build-all:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -202,7 +239,7 @@ jobs:
|
||||
|
||||
build:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -228,7 +265,7 @@ jobs:
|
||||
|
||||
build-fast-backend:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -245,7 +282,7 @@ jobs:
|
||||
|
||||
build-fast-frontend:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -269,7 +306,7 @@ jobs:
|
||||
|
||||
build-fast-package:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -296,7 +333,7 @@ jobs:
|
||||
|
||||
build-fast-save:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -382,7 +419,7 @@ jobs:
|
||||
|
||||
build-enterprise:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -414,7 +451,7 @@ jobs:
|
||||
|
||||
build-all-enterprise:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.7
|
||||
- image: grafana/build-container:1.2.8
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -635,6 +672,8 @@ workflows:
|
||||
filters: *filter-only-master
|
||||
- lint-go:
|
||||
filters: *filter-only-master
|
||||
- shellcheck:
|
||||
filters: *filter-only-master
|
||||
- test-frontend:
|
||||
filters: *filter-only-master
|
||||
- test-backend:
|
||||
@@ -650,6 +689,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-oss-msi
|
||||
@@ -662,6 +702,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
@@ -672,6 +713,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-all-enterprise
|
||||
@@ -683,6 +725,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
@@ -704,6 +747,8 @@ workflows:
|
||||
filters: *filter-only-release
|
||||
- lint-go:
|
||||
filters: *filter-only-release
|
||||
- shellcheck:
|
||||
filters: *filter-only-release
|
||||
- test-frontend:
|
||||
filters: *filter-only-release
|
||||
- test-backend:
|
||||
@@ -719,6 +764,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-oss-msi
|
||||
@@ -731,6 +777,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
@@ -742,6 +789,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
@@ -752,9 +800,14 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
- end-to-end-test-release:
|
||||
requires:
|
||||
- grafana-docker-release
|
||||
filters: *filter-only-release
|
||||
|
||||
build-branches-and-prs:
|
||||
jobs:
|
||||
@@ -771,6 +824,10 @@ workflows:
|
||||
filters: *filter-not-release-or-master
|
||||
- lint-go:
|
||||
filters: *filter-not-release-or-master
|
||||
- lint-go:
|
||||
filters: *filter-not-release-or-master
|
||||
- shellcheck:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-frontend:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-backend:
|
||||
@@ -788,6 +845,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
@@ -799,6 +857,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Golang build container
|
||||
FROM golang:1.12.4
|
||||
FROM golang:1.12.9
|
||||
|
||||
WORKDIR $GOPATH/src/github.com/grafana/grafana
|
||||
|
||||
@@ -33,7 +33,7 @@ ENV NODE_ENV production
|
||||
RUN ./node_modules/.bin/grunt build
|
||||
|
||||
# Final container
|
||||
FROM ubuntu:latest
|
||||
FROM ubuntu:18.04
|
||||
|
||||
LABEL maintainer="Grafana team <hello@grafana.com>"
|
||||
|
||||
|
||||
10
Makefile
10
Makefile
@@ -2,8 +2,9 @@
|
||||
|
||||
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go gosec revive golangci-lint go-vet test-go test-js test run clean devenv devenv-down revive-alerting
|
||||
|
||||
GO := GO111MODULE=on go
|
||||
GO_FILES := ./pkg/...
|
||||
GO = GO111MODULE=on go
|
||||
GO_FILES ?= ./pkg/...
|
||||
SH_FILES ?= $(shell find ./scripts -name *.sh)
|
||||
|
||||
all: deps build
|
||||
|
||||
@@ -111,6 +112,11 @@ go-vet:
|
||||
|
||||
lint-go: go-vet golangci-lint revive revive-alerting gosec
|
||||
|
||||
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
|
||||
shellcheck: $(SH_FILES)
|
||||
@docker run --rm -v "$$PWD:/mnt" koalaman/shellcheck:stable \
|
||||
$(SH_FILES) -e SC1071
|
||||
|
||||
run: scripts/go/bin/bra
|
||||
@scripts/go/bin/bra run
|
||||
|
||||
|
||||
@@ -214,6 +214,10 @@ external_enabled = true
|
||||
external_snapshot_url = https://snapshots-origin.raintank.io
|
||||
external_snapshot_name = Publish to snapshot.raintank.io
|
||||
|
||||
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||
# creating and deleting snapshots.
|
||||
public_mode = false
|
||||
|
||||
# remove expired snapshot
|
||||
snapshot_remove_expired = true
|
||||
|
||||
@@ -379,17 +383,45 @@ send_client_credentials_via_post = false
|
||||
|
||||
#################################### SAML Auth ###########################
|
||||
[auth.saml] # Enterprise only
|
||||
# Defaults to false. If true, the feature is enabled
|
||||
enabled = false
|
||||
private_key =
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
private_key =
|
||||
|
||||
# Path to the private key. Used to decrypt assertions from the IdP
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||
max_issue_delay = 90s
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||
metadata_valid_duration = 48h
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
assertion_attribute_email = mail
|
||||
|
||||
#################################### Basic Auth ##########################
|
||||
[auth.basic]
|
||||
enabled = true
|
||||
@@ -562,6 +594,8 @@ enabled = true
|
||||
[metrics]
|
||||
enabled = true
|
||||
interval_seconds = 10
|
||||
# Disable total stats (stat_totals_*) metrics to be generated
|
||||
disable_total_stats = false
|
||||
|
||||
#If both are set, basic auth will be required for the metrics endpoint.
|
||||
basic_auth_username =
|
||||
|
||||
@@ -209,6 +209,10 @@
|
||||
;external_snapshot_url = https://snapshots-origin.raintank.io
|
||||
;external_snapshot_name = Publish to snapshot.raintank.io
|
||||
|
||||
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||
# creating and deleting snapshots.
|
||||
;public_mode = false
|
||||
|
||||
# remove expired snapshot
|
||||
;snapshot_remove_expired = true
|
||||
|
||||
@@ -334,18 +338,46 @@
|
||||
;send_client_credentials_via_post = false
|
||||
|
||||
#################################### SAML Auth ###########################
|
||||
;[auth.saml] # Enterprise only
|
||||
[auth.saml] # Enterprise only
|
||||
# Defaults to false. If true, the feature is enabled.
|
||||
;enabled = false
|
||||
;private_key =
|
||||
;private_key_path =
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
;certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
;certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
;private_key =
|
||||
|
||||
;# Path to the private key. Used to decrypt assertions from the IdP
|
||||
;private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds.
|
||||
;max_issue_delay = 90s
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours.
|
||||
;metadata_valid_duration = 48h
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
;assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
;assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
;assertion_attribute_email = mail
|
||||
|
||||
#################################### Grafana.com Auth ####################
|
||||
[auth.grafana_com]
|
||||
;enabled = false
|
||||
@@ -492,6 +524,8 @@
|
||||
[metrics]
|
||||
# Disable / Enable internal metrics
|
||||
;enabled = true
|
||||
# Disable total stats (stat_totals_*) metrics to be generated
|
||||
;disable_total_stats = false
|
||||
|
||||
# Publish interval
|
||||
;interval_seconds = 10
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
"nullPointMode": "null",
|
||||
"options-gauge": {
|
||||
"baseColor": "#299c46",
|
||||
"decimals": "2",
|
||||
"decimals": 2,
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"options": {
|
||||
@@ -111,7 +111,7 @@
|
||||
"nullPointMode": "null",
|
||||
"options-gauge": {
|
||||
"baseColor": "#299c46",
|
||||
"decimals": "",
|
||||
"decimals": null,
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"options": {
|
||||
@@ -178,7 +178,7 @@
|
||||
"nullPointMode": "null",
|
||||
"options-gauge": {
|
||||
"baseColor": "#299c46",
|
||||
"decimals": "",
|
||||
"decimals": null,
|
||||
"maxValue": 100,
|
||||
"minValue": 0,
|
||||
"options": {
|
||||
|
||||
@@ -28,38 +28,6 @@ search_filter = "(cn=%s)"
|
||||
# An array of base dns to search through
|
||||
search_base_dns = ["dc=grafana,dc=org"]
|
||||
|
||||
# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
|
||||
# This is done by enabling group_search_filter below. You must also set member_of= "cn"
|
||||
# in [servers.attributes] below.
|
||||
|
||||
# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
|
||||
# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
|
||||
# below in such a way that the user's recursive group membership is considered.
|
||||
#
|
||||
# Nested Groups + Active Directory (AD) Example:
|
||||
#
|
||||
# AD groups store the Distinguished Names (DNs) of members, so your filter must
|
||||
# recursively search your groups for the authenticating user's DN. For example:
|
||||
#
|
||||
# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
|
||||
# group_search_filter_user_attribute = "distinguishedName"
|
||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
#
|
||||
# [servers.attributes]
|
||||
# ...
|
||||
# member_of = "distinguishedName"
|
||||
|
||||
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
|
||||
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||
## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
|
||||
## Defaults to the value of username in [server.attributes]
|
||||
## Valid options are any of your values in [servers.attributes]
|
||||
## If you are using nested groups you probably want to set this and member_of in
|
||||
## [servers.attributes] to "distinguishedName"
|
||||
# group_search_filter_user_attribute = "distinguishedName"
|
||||
## An array of the base DNs to search through for groups. Typically uses ou=groups
|
||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
|
||||
# Specify names of the ldap attributes your ldap uses
|
||||
[servers.attributes]
|
||||
name = "givenName"
|
||||
|
||||
57
devenv/docker/blocks/openldap/ldap_posix_dev.toml
Normal file
57
devenv/docker/blocks/openldap/ldap_posix_dev.toml
Normal file
@@ -0,0 +1,57 @@
|
||||
# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
|
||||
# [log]
|
||||
# filters = ldap:debug
|
||||
|
||||
[[servers]]
|
||||
# Ldap server host (specify multiple hosts space separated)
|
||||
host = "127.0.0.1"
|
||||
# Default port is 389 or 636 if use_ssl = true
|
||||
port = 389
|
||||
# Set to true if ldap server supports TLS
|
||||
use_ssl = false
|
||||
# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS)
|
||||
start_tls = false
|
||||
# set to true if you want to skip ssl cert validation
|
||||
ssl_skip_verify = false
|
||||
# set to the path to your root CA certificate or leave unset to use system defaults
|
||||
# root_ca_cert = "/path/to/certificate.crt"
|
||||
|
||||
# Search user bind dn
|
||||
bind_dn = "cn=admin,dc=grafana,dc=org"
|
||||
# Search user bind password
|
||||
# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
|
||||
bind_password = 'grafana'
|
||||
|
||||
# An array of base dns to search through
|
||||
search_base_dns = ["dc=grafana,dc=org"]
|
||||
|
||||
search_filter = "(uid=%s)"
|
||||
|
||||
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||
group_search_filter_user_attribute = "uid"
|
||||
group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
|
||||
[servers.attributes]
|
||||
name = "givenName"
|
||||
surname = "sn"
|
||||
username = "cn"
|
||||
member_of = "memberOf"
|
||||
email = "email"
|
||||
|
||||
# Map ldap groups to grafana org roles
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=posix-admins,ou=groups,dc=grafana,dc=org"
|
||||
org_role = "Admin"
|
||||
grafana_admin = true
|
||||
|
||||
# The Grafana organization database id, optional, if left out the default org (id 1) will be used
|
||||
# org_id = 1
|
||||
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=editors,ou=groups,dc=grafana,dc=org"
|
||||
org_role = "Editor"
|
||||
|
||||
[[servers.group_mappings]]
|
||||
# If you want to match all (or no ldap groups) then you can use wildcard
|
||||
group_dn = "*"
|
||||
org_role = "Viewer"
|
||||
@@ -12,7 +12,7 @@ After adding ldif files to `prepopulate`:
|
||||
|
||||
## Enabling LDAP in Grafana
|
||||
|
||||
Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||
If you want to use users/groups with `memberOf` support Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||
|
||||
```ini
|
||||
[auth.ldap]
|
||||
@@ -21,6 +21,8 @@ config_file = conf/ldap_dev.toml
|
||||
; allow_sign_up = true
|
||||
```
|
||||
|
||||
Otherwise perform same actions for `ldap_dev_posix.toml` config.
|
||||
|
||||
## Groups & Users
|
||||
|
||||
admins
|
||||
@@ -38,3 +40,11 @@ editors
|
||||
ldap-editors
|
||||
no groups
|
||||
ldap-viewer
|
||||
|
||||
|
||||
## Groups & Users (POSIX)
|
||||
|
||||
admins
|
||||
ldap-posix-admin
|
||||
no groups
|
||||
ldap-posix
|
||||
|
||||
@@ -78,3 +78,31 @@ objectClass: inetOrgPerson
|
||||
objectClass: organizationalPerson
|
||||
sn: ldap-torkel
|
||||
cn: ldap-torkel
|
||||
|
||||
# admin for posix group (without support for memberOf attribute)
|
||||
dn: uid=ldap-posix-admin,ou=users,dc=grafana,dc=org
|
||||
mail: ldap-posix-admin@grafana.com
|
||||
userPassword: grafana
|
||||
objectclass: top
|
||||
objectclass: posixAccount
|
||||
objectclass: inetOrgPerson
|
||||
homedirectory: /home/ldap-posix-admin
|
||||
sn: ldap-posix-admin
|
||||
cn: ldap-posix-admin
|
||||
uid: ldap-posix-admin
|
||||
uidnumber: 1
|
||||
gidnumber: 1
|
||||
|
||||
# user for posix group (without support for memberOf attribute)
|
||||
dn: uid=ldap-posix,ou=users,dc=grafana,dc=org
|
||||
mail: ldap-posix@grafana.com
|
||||
userPassword: grafana
|
||||
objectclass: top
|
||||
objectclass: posixAccount
|
||||
objectclass: inetOrgPerson
|
||||
homedirectory: /home/ldap-posix
|
||||
sn: ldap-posix
|
||||
cn: ldap-posix
|
||||
uid: ldap-posix
|
||||
uidnumber: 2
|
||||
gidnumber: 2
|
||||
|
||||
@@ -23,3 +23,21 @@ objectClass: groupOfNames
|
||||
member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
|
||||
member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
|
||||
member: cn=ldap-leo,ou=users,dc=grafana,dc=org
|
||||
|
||||
# -- POSIX --
|
||||
|
||||
# posix admin group (without support for memberOf attribute)
|
||||
dn: cn=posix-admins,ou=groups,dc=grafana,dc=org
|
||||
cn: admins
|
||||
objectClass: top
|
||||
objectClass: posixGroup
|
||||
gidNumber: 1
|
||||
memberUid: ldap-posix-admin
|
||||
|
||||
# posix group (without support for memberOf attribute)
|
||||
dn: cn=posix,ou=groups,dc=grafana,dc=org
|
||||
cn: viewers
|
||||
objectClass: top
|
||||
objectClass: posixGroup
|
||||
gidNumber: 2
|
||||
memberUid: ldap-posix
|
||||
|
||||
@@ -126,8 +126,6 @@ group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
group_search_filter_user_attribute = "uid"
|
||||
```
|
||||
|
||||
Also set `member_of = "dn"` in the `[servers.attributes]` section.
|
||||
|
||||
### Group Mappings
|
||||
|
||||
In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being
|
||||
|
||||
178
docs/sources/auth/saml.md
Normal file
178
docs/sources/auth/saml.md
Normal file
@@ -0,0 +1,178 @@
|
||||
+++
|
||||
title = "SAML Authentication"
|
||||
description = "Grafana SAML Authentication"
|
||||
keywords = ["grafana", "saml", "documentation", "saml-auth"]
|
||||
aliases = ["/auth/saml/"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "SAML"
|
||||
parent = "authentication"
|
||||
weight = 5
|
||||
+++
|
||||
|
||||
# SAML Authentication
|
||||
|
||||
> SAML Authentication integration is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise" >}}).
|
||||
|
||||
> Only available in Grafana v6.3+
|
||||
|
||||
The SAML authentication integration allows your Grafana users to log in by
|
||||
using an external SAML Identity Provider (IdP). To enable this, Grafana becomes
|
||||
a Service Provider (SP) in the authentication flow, interacting with the IdP to
|
||||
exchange user information.
|
||||
|
||||
## Supported SAML
|
||||
|
||||
The SAML single-sign-on (SSO) standard is varied and flexible. Our implementation contains the subset of features needed to provide a smooth authentication experience into Grafana.
|
||||
|
||||
> Should you encounter any problems with our implementation, please don't hesitate to contact us.
|
||||
|
||||
At the moment of writing, Grafana supports:
|
||||
|
||||
1. From the Service Provider (SP) to the Identity Provider (IdP)
|
||||
|
||||
- `HTTP-POST` binding
|
||||
- `HTTP-Redirect` binding
|
||||
|
||||
2. From the Identity Provider (IdP) to the Service Provider (SP)
|
||||
|
||||
- `HTTP-POST` binding
|
||||
|
||||
3. In terms of security, we currently support signed and encrypted Assertions. However, signed or encrypted requests are not supported.
|
||||
|
||||
4. In terms of initiation, only SP-initiated requests are supported. There's no support for IdP-initiated request.
|
||||
|
||||
## Set up SAML Authentication
|
||||
|
||||
To use the SAML integration, you need to enable SAML in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||
|
||||
```bash
|
||||
[auth.saml]
|
||||
# Defaults to false. If true, the feature is enabled
|
||||
enabled = true
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
private_key =
|
||||
|
||||
# Path to the private key. Used to decrypt assertions from the IdP
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||
max_issue_delay =
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||
metadata_valid_duration =
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
assertion_attribute_email = mail
|
||||
```
|
||||
|
||||
Important to note:
|
||||
|
||||
- like any other Grafana configuration, use of [environment variables for these options is supported]({{< relref "installation/configuration.md#using-environment-variables" >}})
|
||||
- only one form of configuration option is required. Using multiple forms, e.g. both `certificate` and `certificate_path` will result in an error
|
||||
|
||||
## Grafana Configuration
|
||||
|
||||
An example working configuration example looks like:
|
||||
|
||||
```bash
|
||||
[auth.saml]
|
||||
enabled = true
|
||||
certificate_path = "/path/to/certificate.cert"
|
||||
private_key_path = "/path/to/private_key.pem"
|
||||
metadata_path = "/my/metadata.xml"
|
||||
max_issue_delay = 90s
|
||||
metadata_valid_duration = 48h
|
||||
assertion_attribute_name = displayName
|
||||
assertion_attribute_login = mail
|
||||
assertion_attribute_email = mail
|
||||
```
|
||||
|
||||
And here is a comprehensive list of the options:
|
||||
|
||||
| Setting | Required | Description | Default |
|
||||
| ----------------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------- | ------------- |
|
||||
| `enabled` | No | Whenever SAML authentication is allowed | `false` |
|
||||
| `certificate` or `certificate_path` | Yes | Base64-encoded string or Path for the SP X.509 certificate | |
|
||||
| `private_key` or `private_key_path` | Yes | Base64-encoded string or Path for the SP private key | |
|
||||
| `idp_metadata` or `idp_metadata_path` or `idp_metadata_url` | Yes | Base64-encoded string, Path or URL for the IdP SAML metadata XML | |
|
||||
| `max_issue_delay` | No | Duration, since the IdP issued a response and the SP is allowed to process it | `90s` |
|
||||
| `metadata_valid_duration` | No | Duration, for how long the SP's metadata should be valid | `48h` |
|
||||
| `assertion_attribute_name` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's name | `displayName` |
|
||||
| `assertion_attribute_login` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's login handle | `mail` |
|
||||
| `assertion_attribute_email` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's email | `mail` |
|
||||
|
||||
### Cert and Private Key
|
||||
|
||||
The SAML SSO standard uses asymmetric encryption to exchange information between the SP (Grafana) and the IdP. To perform such encryption, you need a public part and a private part. In this case, the X.509 certificate provides the public part, while the private key provides the private part.
|
||||
|
||||
Grafana supports two ways of specifying both the `certificate` and `private_key`. Without a suffix (e.g. `certificate=`), the configuration assumes you've supplied the base64-encoded file contents. However, if specified with the `_path` suffix (e.g. `certificate_path=`) Grafana will treat it as a file path and attempt to read the file from the file system.
|
||||
|
||||
### IdP Metadata
|
||||
|
||||
Expanding on the above, we'll also need the public part from our IdP for message verification. The SAML IdP metadata XML tells us where and how we should exchange the user information.
|
||||
|
||||
Currently, we support three ways of specifying the IdP metadata. Without a suffix `idp_metadata=` Grafana assumes base64-encoded XML file contents, with the `_path` suffix assumes a file path and attempts to read the file from the file system and with the `_url` suffix assumes an URL and attempts to load the metadata from the given location.
|
||||
|
||||
### Max Issue Delay
|
||||
|
||||
Prevention of SAML response replay attacks and internal clock skews between the SP (Grafana), and the IdP is covered. You can set a maximum amount of time between the IdP issuing a response and the SP (Grafana) processing it.
|
||||
|
||||
The configuration options is specified as a duration e.g. `max_issue_delay = 90s` or `max_issue_delay = 1h`
|
||||
|
||||
### Metadata valid duration
|
||||
|
||||
As an SP, our metadata is likely to expire at some point, e.g. due to a certificate rotation or change of location binding. Grafana allows you to specify for how long the metadata should be valid. Leveraging the standard's `validUntil` field, you can tell consumers until when your metadata is going to be valid. The duration is computed by adding the duration to the current time.
|
||||
|
||||
The configuration option is specified as a duration e.g. `metadata_valid_duration = 48h`
|
||||
|
||||
## Identity Provider (IdP) registration
|
||||
|
||||
For the SAML integration to work correctly, you need to make the IdP aware of the SP.
|
||||
|
||||
The integration provides two key endpoints as part of Grafana:
|
||||
|
||||
- The `/saml/metadata` endpoint. Which contains the SP's metadata. You can either download and upload it manually or make the IdP request it directly from the endpoint. Some providers name it Identifier or Entity ID.
|
||||
|
||||
- The `/saml/acs` endpoint. Which is intended to receive the ACS (Assertion Customer Service) callback. Some providers name it SSO URL or Reply URL.
|
||||
|
||||
## Assertion mapping
|
||||
|
||||
During the SAML SSO authentication flow, we receive the ACS (Assertion Customer Service) callback. The callback contains all the relevant information of the user under authentication embedded in the SAML response. Grafana parses the response to create (or update) the user within its internal database.
|
||||
|
||||
For Grafana to map the user information, it looks at the individual attributes within the assertion. You can think of these attributes as Key/Value pairs (although, they contain more information than that).
|
||||
|
||||
Grafana provides configuration options that let you modify which keys to look at for these values. The data we need to create the user in Grafana is Name, Login handle, and email.
|
||||
|
||||
An example is `assertion_attribute_name = "givenName"` where Grafana looks within the assertion for an attribute with a friendly name or name of `givenName`. Both, the friendly name (e.g. `givenName`) or the name (e.g. `urn:oid:2.5.4.42`) can be used interchangeably as the value for the configuration option.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
To troubleshoot and get more log info enable saml debug logging in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||
|
||||
```bash
|
||||
[log]
|
||||
filters = saml.auth:debug
|
||||
```
|
||||
@@ -29,6 +29,10 @@ With Grafana Enterprise you can set up synchronization between LDAP Groups and T
|
||||
|
||||
Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
|
||||
|
||||
### SAML Authentication
|
||||
|
||||
Enables your Grafana Enterprise users to authenticate with SAML. [Learn More]({{< relref "auth/saml.md" >}}).
|
||||
|
||||
### Premium Plugins
|
||||
|
||||
With a Grafana Enterprise license you will get access to premium plugins, including:
|
||||
|
||||
@@ -56,7 +56,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins
|
||||
This option is now renamed (and moved to Options sub section above your queries):
|
||||

|
||||
|
||||
Datas source selection & options & help are now above your metric queries.
|
||||
Data source selection & options & help are now above your metric queries.
|
||||

|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -130,6 +130,8 @@ belonging to an LDAP group that gives them access to Grafana.
|
||||
|
||||
Built-in support for SAML is now available in Grafana Enterprise.
|
||||
|
||||
[See docs]({{< relref "auth/saml.md" >}})
|
||||
|
||||
### Team Sync for GitHub OAuth
|
||||
|
||||
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.
|
||||
|
||||
@@ -533,6 +533,9 @@ If set configures the username to use for basic authentication on the metrics en
|
||||
### basic_auth_password
|
||||
If set configures the password to use for basic authentication on the metrics endpoint.
|
||||
|
||||
### disable_total_stats
|
||||
If set to `true`, then total stats generation (`stat_totals_*` metrics) is disabled. The default is `false`.
|
||||
|
||||
### interval_seconds
|
||||
|
||||
Flush/Write interval when sending metrics to external TSDB. Defaults to 10s.
|
||||
|
||||
@@ -37,15 +37,11 @@ export class ConfigCtrl {
|
||||
|
||||
postUpdate() {
|
||||
if (!this.appModel.enabled) {
|
||||
return this.$q.resolve();
|
||||
return;
|
||||
}
|
||||
return this.appEditCtrl.importDashboards().then(() => {
|
||||
this.enabled = true;
|
||||
return {
|
||||
url: "plugins/raintank-kubernetes-app/page/clusters",
|
||||
message: "Kubernetes App enabled!"
|
||||
};
|
||||
});
|
||||
|
||||
// TODO, whatever you want
|
||||
console.log('Post Update:', this);
|
||||
}
|
||||
}
|
||||
ConfigCtrl.templateUrl = 'components/config/config.html';
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[
|
||||
{ "version": "v6.2", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v6.3", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v6.2", "path": "/v6.2", "archived": true },
|
||||
{ "version": "v6.1", "path": "/v6.1", "archived": true },
|
||||
{ "version": "v6.0", "path": "/v6.0", "archived": true },
|
||||
{ "version": "v5.4", "path": "/v5.4", "archived": true },
|
||||
|
||||
4
go.mod
4
go.mod
@@ -52,7 +52,7 @@ require (
|
||||
github.com/onsi/gomega v1.5.0 // indirect
|
||||
github.com/opentracing/opentracing-go v1.1.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.8.1
|
||||
github.com/pkg/errors v0.8.1 // indirect
|
||||
github.com/prometheus/client_golang v0.9.2
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90
|
||||
github.com/prometheus/common v0.2.0
|
||||
@@ -64,7 +64,7 @@ require (
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a
|
||||
github.com/stretchr/testify v1.3.0
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf
|
||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3
|
||||
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329
|
||||
github.com/uber-go/atomic v1.3.2 // indirect
|
||||
github.com/uber/jaeger-client-go v2.16.0+incompatible
|
||||
github.com/uber/jaeger-lib v2.0.0+incompatible // indirect
|
||||
|
||||
4
go.sum
4
go.sum
@@ -202,8 +202,8 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf h1:Z2X3Os7oRzpdJ75iPqWZc0HeJWFYNCvKsfpQwFpRNTA=
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf/go.mod h1:M8agBzgqHIhgj7wEn9/0hJUZcrvt9VY+Ln+S1I5Mha0=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3 h1:E7xa7Zur8hLPvw+03gAeQ9esrglfV389j2PcwhiGf/I=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329 h1:VBsKFh4W1JEMz3eLCmM9zOJKZdDkP5W4b3Y4hc7SbZc=
|
||||
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
|
||||
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
||||
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
||||
github.com/uber/jaeger-client-go v2.16.0+incompatible h1:Q2Pp6v3QYiocMxomCaJuwQGFt7E53bPYqEgug/AoBtY=
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"company": "Grafana Labs"
|
||||
},
|
||||
"name": "grafana",
|
||||
"version": "6.3.0-pre",
|
||||
"version": "6.3.6",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/grafana/grafana.git"
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { Threshold } from './threshold';
|
||||
import { ValueMapping } from './valueMapping';
|
||||
|
||||
export enum LoadingState {
|
||||
NotStarted = 'NotStarted',
|
||||
Loading = 'Loading',
|
||||
@@ -49,6 +52,12 @@ export interface Field {
|
||||
decimals?: number | null; // Significant digits (for display)
|
||||
min?: number | null;
|
||||
max?: number | null;
|
||||
|
||||
// Convert input values into a display value
|
||||
mappings?: ValueMapping[];
|
||||
|
||||
// Must be sorted by 'value', first value is always -Infinity
|
||||
thresholds?: Threshold[];
|
||||
}
|
||||
|
||||
export interface Labels {
|
||||
|
||||
@@ -2,6 +2,7 @@ export * from './data';
|
||||
export * from './dataLink';
|
||||
export * from './logs';
|
||||
export * from './navModel';
|
||||
export * from './select';
|
||||
export * from './time';
|
||||
export * from './threshold';
|
||||
export * from './utils';
|
||||
|
||||
10
packages/grafana-data/src/types/select.ts
Normal file
10
packages/grafana-data/src/types/select.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* Used in select elements
|
||||
*/
|
||||
export interface SelectableValue<T = any> {
|
||||
label?: string;
|
||||
value?: T;
|
||||
imgUrl?: string;
|
||||
description?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
export interface Threshold {
|
||||
index: number;
|
||||
value: number;
|
||||
color: string;
|
||||
}
|
||||
|
||||
@@ -5,6 +5,18 @@ import { TimeZone } from '../types';
|
||||
|
||||
const units: DurationUnit[] = ['y', 'M', 'w', 'd', 'h', 'm', 's'];
|
||||
|
||||
export function isMathString(text: string | DateTime | Date): boolean {
|
||||
if (!text) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof text === 'string' && (text.substring(0, 3) === 'now' || text.includes('||'))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses different types input to a moment instance. There is a specific formatting language that can be used
|
||||
* if text arg is string. See unit tests for examples.
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import { getFieldReducers, ReducerID, reduceField } from './index';
|
||||
import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
|
||||
|
||||
import _ from 'lodash';
|
||||
import { DataFrame } from '../types/data';
|
||||
|
||||
/**
|
||||
* Run a reducer and get back the value
|
||||
*/
|
||||
function reduce(series: DataFrame, fieldIndex: number, id: string): any {
|
||||
return reduceField({ series, fieldIndex, reducers: [id] })[id];
|
||||
}
|
||||
|
||||
describe('Stats Calculators', () => {
|
||||
const basicTable = {
|
||||
@@ -9,29 +17,16 @@ describe('Stats Calculators', () => {
|
||||
};
|
||||
|
||||
it('should load all standard stats', () => {
|
||||
const names = [
|
||||
ReducerID.sum,
|
||||
ReducerID.max,
|
||||
ReducerID.min,
|
||||
ReducerID.logmin,
|
||||
ReducerID.mean,
|
||||
ReducerID.last,
|
||||
ReducerID.first,
|
||||
ReducerID.count,
|
||||
ReducerID.range,
|
||||
ReducerID.diff,
|
||||
ReducerID.step,
|
||||
ReducerID.delta,
|
||||
// ReducerID.allIsZero,
|
||||
// ReducerID.allIsNull,
|
||||
];
|
||||
const stats = getFieldReducers(names);
|
||||
expect(stats.length).toBe(names.length);
|
||||
for (const id of Object.keys(ReducerID)) {
|
||||
const reducer = fieldReducers.getIfExists(id);
|
||||
const found = reducer ? reducer.id : '<NOT FOUND>';
|
||||
expect(found).toEqual(id);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail to load unknown stats', () => {
|
||||
const names = ['not a stat', ReducerID.max, ReducerID.min, 'also not a stat'];
|
||||
const stats = getFieldReducers(names);
|
||||
const stats = fieldReducers.list(names);
|
||||
expect(stats.length).toBe(2);
|
||||
|
||||
const found = stats.map(v => v.id);
|
||||
@@ -92,6 +87,34 @@ describe('Stats Calculators', () => {
|
||||
expect(stats.delta).toEqual(300);
|
||||
});
|
||||
|
||||
it('consistenly check allIsNull/allIsZero', () => {
|
||||
const empty = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [],
|
||||
};
|
||||
const allNull = ({
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [null, null, null, null],
|
||||
} as unknown) as DataFrame;
|
||||
const allNull2 = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [[null], [null], [null], [null]],
|
||||
};
|
||||
const allZero = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [[0], [0], [0], [0]],
|
||||
};
|
||||
|
||||
expect(reduce(empty, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
expect(reduce(allNull, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
expect(reduce(allNull2, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
|
||||
expect(reduce(empty, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allNull, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allNull2, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allZero, 0, ReducerID.allIsZero)).toEqual(true);
|
||||
});
|
||||
|
||||
it('consistent results for first/last value with null', () => {
|
||||
const info = [
|
||||
{
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// Libraries
|
||||
import isNumber from 'lodash/isNumber';
|
||||
|
||||
import { DataFrame, NullValueMode } from '../types/index';
|
||||
import { DataFrame, NullValueMode } from '../types';
|
||||
import { Registry, RegistryItem } from './registry';
|
||||
|
||||
export enum ReducerID {
|
||||
sum = 'sum',
|
||||
@@ -34,38 +35,13 @@ export interface FieldCalcs {
|
||||
// Internal function
|
||||
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
|
||||
|
||||
export interface FieldReducerInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
alias?: string; // optional secondary key. 'avg' vs 'mean', 'total' vs 'sum'
|
||||
|
||||
export interface FieldReducerInfo extends RegistryItem {
|
||||
// Internal details
|
||||
emptyInputResult?: any; // typically null, but some things like 'count' & 'sum' should be zero
|
||||
standard: boolean; // The most common stats can all be calculated in a single pass
|
||||
reduce?: FieldReducer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param ids list of stat names or null to get all of them
|
||||
*/
|
||||
export function getFieldReducers(ids?: string[]): FieldReducerInfo[] {
|
||||
if (ids === null || ids === undefined) {
|
||||
if (!hasBuiltIndex) {
|
||||
getById(ReducerID.mean);
|
||||
}
|
||||
return listOfStats;
|
||||
}
|
||||
|
||||
return ids.reduce((list, id) => {
|
||||
const stat = getById(id);
|
||||
if (stat) {
|
||||
list.push(stat);
|
||||
}
|
||||
return list;
|
||||
}, new Array<FieldReducerInfo>());
|
||||
}
|
||||
|
||||
interface ReduceFieldOptions {
|
||||
series: DataFrame;
|
||||
fieldIndex: number;
|
||||
@@ -83,7 +59,7 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
return {};
|
||||
}
|
||||
|
||||
const queue = getFieldReducers(reducers);
|
||||
const queue = fieldReducers.list(reducers);
|
||||
|
||||
// Return early for empty series
|
||||
// This lets the concrete implementations assume at least one row
|
||||
@@ -122,24 +98,13 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
//
|
||||
// ------------------------------------------------------------------------------
|
||||
|
||||
// private registry of all stats
|
||||
interface TableStatIndex {
|
||||
[id: string]: FieldReducerInfo;
|
||||
}
|
||||
|
||||
const listOfStats: FieldReducerInfo[] = [];
|
||||
const index: TableStatIndex = {};
|
||||
let hasBuiltIndex = false;
|
||||
|
||||
function getById(id: string): FieldReducerInfo | undefined {
|
||||
if (!hasBuiltIndex) {
|
||||
[
|
||||
export const fieldReducers = new Registry<FieldReducerInfo>(() => [
|
||||
{
|
||||
id: ReducerID.lastNotNull,
|
||||
name: 'Last (not null)',
|
||||
description: 'Last non-null value',
|
||||
standard: true,
|
||||
alias: 'current',
|
||||
aliasIds: ['current'],
|
||||
reduce: calculateLastNotNull,
|
||||
},
|
||||
{
|
||||
@@ -159,14 +124,14 @@ function getById(id: string): FieldReducerInfo | undefined {
|
||||
},
|
||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, aliasIds: ['avg'] },
|
||||
{
|
||||
id: ReducerID.sum,
|
||||
name: 'Total',
|
||||
description: 'The sum of all values',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
alias: 'total',
|
||||
aliasIds: ['total'],
|
||||
},
|
||||
{
|
||||
id: ReducerID.count,
|
||||
@@ -205,6 +170,20 @@ function getById(id: string): FieldReducerInfo | undefined {
|
||||
description: 'Used for log min scale',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.allIsZero,
|
||||
name: 'All Zeros',
|
||||
description: 'All values are zero',
|
||||
emptyInputResult: false,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.allIsNull,
|
||||
name: 'All Nulls',
|
||||
description: 'All values are null',
|
||||
emptyInputResult: true,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.changeCount,
|
||||
name: 'Change Count',
|
||||
@@ -219,25 +198,7 @@ function getById(id: string): FieldReducerInfo | undefined {
|
||||
standard: false,
|
||||
reduce: calculateDistinctCount,
|
||||
},
|
||||
].forEach(info => {
|
||||
const { id, alias } = info;
|
||||
if (index.hasOwnProperty(id)) {
|
||||
console.warn('Duplicate Stat', id, info, index);
|
||||
}
|
||||
index[id] = info;
|
||||
if (alias) {
|
||||
if (index.hasOwnProperty(alias)) {
|
||||
console.warn('Duplicate Stat (alias)', alias, info, index);
|
||||
}
|
||||
index[alias] = info;
|
||||
}
|
||||
listOfStats.push(info);
|
||||
});
|
||||
hasBuiltIndex = true;
|
||||
}
|
||||
|
||||
return index[id];
|
||||
}
|
||||
]);
|
||||
|
||||
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
const calcs = {
|
||||
@@ -253,7 +214,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
count: 0,
|
||||
nonNullCount: 0,
|
||||
allIsNull: true,
|
||||
allIsZero: false,
|
||||
allIsZero: true,
|
||||
range: null,
|
||||
diff: null,
|
||||
delta: 0,
|
||||
@@ -264,7 +225,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
} as FieldCalcs;
|
||||
|
||||
for (let i = 0; i < data.rows.length; i++) {
|
||||
let currentValue = data.rows[i][fieldIndex];
|
||||
let currentValue = data.rows[i] ? data.rows[i][fieldIndex] : null;
|
||||
if (i === 0) {
|
||||
calcs.first = currentValue;
|
||||
}
|
||||
@@ -350,6 +311,10 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
calcs.mean = calcs.sum! / calcs.nonNullCount;
|
||||
}
|
||||
|
||||
if (calcs.allIsNull) {
|
||||
calcs.allIsZero = false;
|
||||
}
|
||||
|
||||
if (calcs.max !== null && calcs.min !== null) {
|
||||
calcs.range = calcs.max - calcs.min;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export * from './string';
|
||||
export * from './registry';
|
||||
export * from './markdown';
|
||||
export * from './processDataFrame';
|
||||
export * from './csv';
|
||||
|
||||
@@ -29,6 +29,15 @@ describe('toDataFrame', () => {
|
||||
expect(series.fields[0].name).toEqual('Value');
|
||||
});
|
||||
|
||||
it('assumes TimeSeries values are numbers', () => {
|
||||
const input1 = {
|
||||
target: 'time',
|
||||
datapoints: [[100, 1], [200, 2]],
|
||||
};
|
||||
const data = toDataFrame(input1);
|
||||
expect(data.fields[0].type).toBe(FieldType.number);
|
||||
});
|
||||
|
||||
it('keeps dataFrame unchanged', () => {
|
||||
const input = {
|
||||
fields: [{ text: 'A' }, { text: 'B' }, { text: 'C' }],
|
||||
|
||||
@@ -29,6 +29,7 @@ function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||
fields: [
|
||||
{
|
||||
name: timeSeries.target || 'Value',
|
||||
type: FieldType.number,
|
||||
unit: timeSeries.unit,
|
||||
},
|
||||
{
|
||||
|
||||
134
packages/grafana-data/src/utils/registry.ts
Normal file
134
packages/grafana-data/src/utils/registry.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { SelectableValue } from '../types/select';
|
||||
|
||||
export interface RegistryItem {
|
||||
id: string; // Unique Key -- saved in configs
|
||||
name: string; // Display Name, can change without breaking configs
|
||||
description: string;
|
||||
aliasIds?: string[]; // when the ID changes, we may want backwards compatibility ('current' => 'last')
|
||||
|
||||
/**
|
||||
* Some extensions should not be user selectable
|
||||
* like: 'all' and 'any' matchers;
|
||||
*/
|
||||
excludeFromPicker?: boolean;
|
||||
}
|
||||
|
||||
interface RegistrySelectInfo {
|
||||
options: Array<SelectableValue<string>>;
|
||||
current: Array<SelectableValue<string>>;
|
||||
}
|
||||
|
||||
export class Registry<T extends RegistryItem> {
|
||||
private ordered: T[] = [];
|
||||
private byId = new Map<string, T>();
|
||||
private initalized = false;
|
||||
|
||||
constructor(private init?: () => T[]) {}
|
||||
|
||||
getIfExists(id: string | undefined): T | undefined {
|
||||
if (!this.initalized) {
|
||||
if (this.init) {
|
||||
for (const ext of this.init()) {
|
||||
this.register(ext);
|
||||
}
|
||||
}
|
||||
this.sort();
|
||||
this.initalized = true;
|
||||
}
|
||||
if (id) {
|
||||
return this.byId.get(id);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
get(id: string): T {
|
||||
const v = this.getIfExists(id);
|
||||
if (!v) {
|
||||
throw new Error('Undefined: ' + id);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
selectOptions(current?: string[], filter?: (ext: T) => boolean): RegistrySelectInfo {
|
||||
if (!this.initalized) {
|
||||
this.getIfExists('xxx'); // will trigger init
|
||||
}
|
||||
|
||||
const select = {
|
||||
options: [],
|
||||
current: [],
|
||||
} as RegistrySelectInfo;
|
||||
|
||||
const currentIds: any = {};
|
||||
if (current) {
|
||||
for (const id of current) {
|
||||
currentIds[id] = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (const ext of this.ordered) {
|
||||
if (ext.excludeFromPicker) {
|
||||
continue;
|
||||
}
|
||||
if (filter && !filter(ext)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = {
|
||||
value: ext.id,
|
||||
label: ext.name,
|
||||
description: ext.description,
|
||||
};
|
||||
|
||||
select.options.push(option);
|
||||
if (currentIds[ext.id]) {
|
||||
select.current.push(option);
|
||||
}
|
||||
}
|
||||
return select;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of values by ID, or all values if not specified
|
||||
*/
|
||||
list(ids?: any[]): T[] {
|
||||
if (ids) {
|
||||
const found: T[] = [];
|
||||
for (const id of ids) {
|
||||
const v = this.getIfExists(id);
|
||||
if (v) {
|
||||
found.push(v);
|
||||
}
|
||||
}
|
||||
return found;
|
||||
}
|
||||
if (!this.initalized) {
|
||||
this.getIfExists('xxx'); // will trigger init
|
||||
}
|
||||
return [...this.ordered]; // copy of everythign just in case
|
||||
}
|
||||
|
||||
register(ext: T) {
|
||||
if (this.byId.has(ext.id)) {
|
||||
throw new Error('Duplicate Key:' + ext.id);
|
||||
}
|
||||
this.byId.set(ext.id, ext);
|
||||
this.ordered.push(ext);
|
||||
|
||||
if (ext.aliasIds) {
|
||||
for (const alias of ext.aliasIds) {
|
||||
if (!this.byId.has(alias)) {
|
||||
this.byId.set(alias, ext);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.initalized) {
|
||||
this.sort();
|
||||
}
|
||||
}
|
||||
|
||||
private sort() {
|
||||
// TODO sort the list
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,22 @@
|
||||
import { Threshold } from '../types';
|
||||
|
||||
export function getThresholdForValue(
|
||||
thresholds: Threshold[],
|
||||
value: number | null | string | undefined
|
||||
): Threshold | null {
|
||||
if (thresholds.length === 1) {
|
||||
return thresholds[0];
|
||||
export function getActiveThreshold(value: number, thresholds: Threshold[]): Threshold {
|
||||
let active = thresholds[0];
|
||||
for (const threshold of thresholds) {
|
||||
if (value >= threshold.value) {
|
||||
active = threshold;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return active;
|
||||
}
|
||||
|
||||
const atThreshold = thresholds.filter(threshold => (value as number) === threshold.value)[0];
|
||||
if (atThreshold) {
|
||||
return atThreshold;
|
||||
}
|
||||
|
||||
const belowThreshold = thresholds.filter(threshold => (value as number) > threshold.value);
|
||||
if (belowThreshold.length > 0) {
|
||||
const nearestThreshold = belowThreshold.sort((t1: Threshold, t2: Threshold) => t2.value - t1.value)[0];
|
||||
return nearestThreshold;
|
||||
}
|
||||
|
||||
return null;
|
||||
/**
|
||||
* Sorts the thresholds
|
||||
*/
|
||||
export function sortThresholds(thresholds: Threshold[]) {
|
||||
return thresholds.sort((t1, t2) => {
|
||||
return t1.value - t2.value;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -49,9 +49,9 @@ function addBarGaugeStory(name: string, overrides: Partial<Props>) {
|
||||
orientation: VizOrientation.Vertical,
|
||||
displayMode: 'basic',
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: 'green' },
|
||||
{ index: 1, value: threshold1Value, color: threshold1Color },
|
||||
{ index: 1, value: threshold2Value, color: threshold2Color },
|
||||
{ value: -Infinity, color: 'green' },
|
||||
{ value: threshold1Value, color: threshold1Color },
|
||||
{ value: threshold2Value, color: threshold2Color },
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@@ -25,11 +25,7 @@ function getProps(propOverrides?: Partial<Props>): Props {
|
||||
maxValue: 100,
|
||||
minValue: 0,
|
||||
displayMode: 'basic',
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: 'green' },
|
||||
{ index: 1, value: 70, color: 'orange' },
|
||||
{ index: 2, value: 90, color: 'red' },
|
||||
],
|
||||
thresholds: [{ value: -Infinity, color: 'green' }, { value: 70, color: 'orange' }, { value: 90, color: 'red' }],
|
||||
height: 300,
|
||||
width: 300,
|
||||
value: {
|
||||
|
||||
@@ -7,7 +7,7 @@ import { getColorFromHexRgbOrName } from '../../utils';
|
||||
|
||||
// Types
|
||||
import { DisplayValue, Themeable, VizOrientation } from '../../types';
|
||||
import { Threshold, TimeSeriesValue, getThresholdForValue } from '@grafana/data';
|
||||
import { Threshold, TimeSeriesValue, getActiveThreshold } from '@grafana/data';
|
||||
|
||||
const MIN_VALUE_HEIGHT = 18;
|
||||
const MAX_VALUE_HEIGHT = 50;
|
||||
@@ -87,8 +87,14 @@ export class BarGauge extends PureComponent<Props> {
|
||||
|
||||
getCellColor(positionValue: TimeSeriesValue): CellColors {
|
||||
const { thresholds, theme, value } = this.props;
|
||||
const activeThreshold = getThresholdForValue(thresholds, positionValue);
|
||||
if (positionValue === null) {
|
||||
return {
|
||||
background: 'gray',
|
||||
border: 'gray',
|
||||
};
|
||||
}
|
||||
|
||||
const activeThreshold = getActiveThreshold(positionValue, thresholds);
|
||||
if (activeThreshold !== null) {
|
||||
const color = getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||
|
||||
@@ -474,7 +480,7 @@ export function getBarGradient(props: Props, maxSize: number): string {
|
||||
export function getValueColor(props: Props): string {
|
||||
const { thresholds, theme, value } = props;
|
||||
|
||||
const activeThreshold = getThresholdForValue(thresholds, value.numeric);
|
||||
const activeThreshold = getActiveThreshold(value.numeric, thresholds);
|
||||
|
||||
if (activeThreshold !== null) {
|
||||
return getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||
|
||||
@@ -77,7 +77,7 @@ export class CustomScrollbar extends Component<Props> {
|
||||
{...passedProps}
|
||||
className={cx(
|
||||
css`
|
||||
visibility: ${hideTrack ? 'none' : 'visible'};
|
||||
visibility: ${hideTrack ? 'hidden' : 'visible'};
|
||||
`,
|
||||
track
|
||||
)}
|
||||
|
||||
@@ -14,7 +14,7 @@ const setup = (propOverrides?: object) => {
|
||||
minValue: 0,
|
||||
showThresholdMarkers: true,
|
||||
showThresholdLabels: false,
|
||||
thresholds: [{ index: 0, value: -Infinity, color: '#7EB26D' }],
|
||||
thresholds: [{ value: -Infinity, color: '#7EB26D' }],
|
||||
height: 300,
|
||||
width: 300,
|
||||
value: {
|
||||
@@ -48,9 +48,9 @@ describe('Get thresholds formatted', () => {
|
||||
it('should get the correct formatted values when thresholds are added', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
@@ -43,12 +43,12 @@ export class Gauge extends PureComponent<Props> {
|
||||
const lastThreshold = thresholds[thresholds.length - 1];
|
||||
|
||||
return [
|
||||
...thresholds.map(threshold => {
|
||||
if (threshold.index === 0) {
|
||||
...thresholds.map((threshold, index) => {
|
||||
if (index === 0) {
|
||||
return { value: minValue, color: getColorFromHexRgbOrName(threshold.color, theme.type) };
|
||||
}
|
||||
|
||||
const previousThreshold = thresholds[threshold.index - 1];
|
||||
const previousThreshold = thresholds[index - 1];
|
||||
return { value: threshold.value, color: getColorFromHexRgbOrName(previousThreshold.color, theme.type) };
|
||||
}),
|
||||
{ value: maxValue, color: getColorFromHexRgbOrName(lastThreshold.color, theme.type) },
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import classNames from 'classnames';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
import { ButtonSelect } from '../Select/ButtonSelect';
|
||||
|
||||
@@ -23,7 +23,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
||||
super(props);
|
||||
}
|
||||
|
||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectOptionItem<string>> => {
|
||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectableValue<string>> => {
|
||||
const intervalsOrDefault = intervals || defaultIntervals;
|
||||
const options = intervalsOrDefault
|
||||
.filter(str => str !== '')
|
||||
@@ -37,7 +37,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
||||
return options;
|
||||
};
|
||||
|
||||
onChangeSelect = (item: SelectOptionItem<string>) => {
|
||||
onChangeSelect = (item: SelectableValue<string>) => {
|
||||
const { onIntervalChanged } = this.props;
|
||||
if (onIntervalChanged) {
|
||||
// @ts-ignore
|
||||
|
||||
@@ -4,7 +4,7 @@ import { action } from '@storybook/addon-actions';
|
||||
import { withKnobs, object, text } from '@storybook/addon-knobs';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import { UseState } from '../../utils/storybook/UseState';
|
||||
import { SelectOptionItem } from './Select';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { ButtonSelect } from './ButtonSelect';
|
||||
|
||||
const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||
@@ -12,9 +12,9 @@ const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||
ButtonSelectStories.addDecorator(withCenteredStory).addDecorator(withKnobs);
|
||||
|
||||
ButtonSelectStories.add('default', () => {
|
||||
const intialState: SelectOptionItem<string> = { label: 'A label', value: 'A value' };
|
||||
const value = object<SelectOptionItem<string>>('Selected Value:', intialState);
|
||||
const options = object<Array<SelectOptionItem<string>>>('Options:', [
|
||||
const intialState: SelectableValue<string> = { label: 'A label', value: 'A value' };
|
||||
const value = object<SelectableValue<string>>('Selected Value:', intialState);
|
||||
const options = object<Array<SelectableValue<string>>>('Options:', [
|
||||
intialState,
|
||||
{ label: 'Another label', value: 'Another value' },
|
||||
]);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { PureComponent, ReactElement } from 'react';
|
||||
import Select, { SelectOptionItem } from './Select';
|
||||
import Select from './Select';
|
||||
import { PopperContent } from '../Tooltip/PopperController';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
|
||||
interface ButtonComponentProps {
|
||||
label: ReactElement | string | undefined;
|
||||
@@ -30,13 +31,13 @@ const ButtonComponent = (buttonProps: ButtonComponentProps) => (props: any) => {
|
||||
|
||||
export interface Props<T> {
|
||||
className: string | undefined;
|
||||
options: Array<SelectOptionItem<T>>;
|
||||
value?: SelectOptionItem<T>;
|
||||
options: Array<SelectableValue<T>>;
|
||||
value?: SelectableValue<T>;
|
||||
label?: ReactElement | string;
|
||||
iconClass?: string;
|
||||
components?: any;
|
||||
maxMenuHeight?: number;
|
||||
onChange: (item: SelectOptionItem<T>) => void;
|
||||
onChange: (item: SelectableValue<T>) => void;
|
||||
tooltipContent?: PopperContent<any>;
|
||||
isMenuOpen?: boolean;
|
||||
onOpenMenu?: () => void;
|
||||
@@ -45,7 +46,7 @@ export interface Props<T> {
|
||||
}
|
||||
|
||||
export class ButtonSelect<T> extends PureComponent<Props<T>> {
|
||||
onChange = (item: SelectOptionItem<T>) => {
|
||||
onChange = (item: SelectableValue<T>) => {
|
||||
const { onChange } = this.props;
|
||||
onChange(item);
|
||||
};
|
||||
|
||||
@@ -19,23 +19,16 @@ import resetSelectStyles from './resetSelectStyles';
|
||||
import { CustomScrollbar } from '../CustomScrollbar/CustomScrollbar';
|
||||
import { PopperContent } from '../Tooltip/PopperController';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
|
||||
export interface SelectOptionItem<T> {
|
||||
label?: string;
|
||||
value?: T;
|
||||
imgUrl?: string;
|
||||
description?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
|
||||
export interface CommonProps<T> {
|
||||
defaultValue?: any;
|
||||
getOptionLabel?: (item: SelectOptionItem<T>) => string;
|
||||
getOptionValue?: (item: SelectOptionItem<T>) => string;
|
||||
onChange: (item: SelectOptionItem<T>) => {} | void;
|
||||
getOptionLabel?: (item: SelectableValue<T>) => string;
|
||||
getOptionValue?: (item: SelectableValue<T>) => string;
|
||||
onChange: (item: SelectableValue<T>) => {} | void;
|
||||
placeholder?: string;
|
||||
width?: number;
|
||||
value?: SelectOptionItem<T>;
|
||||
value?: SelectableValue<T>;
|
||||
className?: string;
|
||||
isDisabled?: boolean;
|
||||
isSearchable?: boolean;
|
||||
@@ -57,12 +50,12 @@ export interface CommonProps<T> {
|
||||
}
|
||||
|
||||
export interface SelectProps<T> extends CommonProps<T> {
|
||||
options: Array<SelectOptionItem<T>>;
|
||||
options: Array<SelectableValue<T>>;
|
||||
}
|
||||
|
||||
interface AsyncProps<T> extends CommonProps<T> {
|
||||
defaultOptions: boolean;
|
||||
loadOptions: (query: string) => Promise<Array<SelectOptionItem<T>>>;
|
||||
loadOptions: (query: string) => Promise<Array<SelectableValue<T>>>;
|
||||
loadingMessage?: () => string;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,11 +3,10 @@ import { interval, Subscription, Subject, of, NEVER } from 'rxjs';
|
||||
import { tap, switchMap } from 'rxjs/operators';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { stringToMs } from '@grafana/data';
|
||||
import { stringToMs, SelectableValue } from '@grafana/data';
|
||||
import { isLive } from '../RefreshPicker/RefreshPicker';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
|
||||
export function getIntervalFromString(strInterval: string): SelectOptionItem<number> {
|
||||
export function getIntervalFromString(strInterval: string): SelectableValue<number> {
|
||||
return {
|
||||
label: strInterval,
|
||||
value: stringToMs(strInterval),
|
||||
|
||||
@@ -8,10 +8,10 @@ import { StatsPicker } from '../StatsPicker/StatsPicker';
|
||||
|
||||
// Types
|
||||
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
|
||||
import Select, { SelectOptionItem } from '../Select/Select';
|
||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined } from '@grafana/data';
|
||||
import Select from '../Select/Select';
|
||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue } from '@grafana/data';
|
||||
|
||||
const showOptions: Array<SelectOptionItem<boolean>> = [
|
||||
const showOptions: Array<SelectableValue<boolean>> = [
|
||||
{
|
||||
value: true,
|
||||
label: 'All Values',
|
||||
@@ -31,7 +31,7 @@ export interface Props {
|
||||
}
|
||||
|
||||
export class FieldDisplayEditor extends PureComponent<Props> {
|
||||
onShowValuesChange = (item: SelectOptionItem<boolean>) => {
|
||||
onShowValuesChange = (item: SelectableValue<boolean>) => {
|
||||
const val = item.value === true;
|
||||
this.props.onChange({ ...this.props.value, values: val });
|
||||
};
|
||||
|
||||
@@ -7,8 +7,7 @@ import { FormLabel } from '../FormLabel/FormLabel';
|
||||
import { UnitPicker } from '../UnitPicker/UnitPicker';
|
||||
|
||||
// Types
|
||||
import { toIntegerOrUndefined, Field } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { toIntegerOrUndefined, Field, SelectableValue } from '@grafana/data';
|
||||
|
||||
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
|
||||
|
||||
@@ -54,7 +53,7 @@ export const FieldPropertiesEditor: React.FC<Props> = ({ value, onChange, showMi
|
||||
[value.max, onChange]
|
||||
);
|
||||
|
||||
const onUnitChange = (unit: SelectOptionItem<string>) => {
|
||||
const onUnitChange = (unit: SelectableValue<string>) => {
|
||||
onChange({ ...value, unit: unit.value });
|
||||
};
|
||||
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import { sharedSingleStatMigrationCheck } from './SingleStatBaseOptions';
|
||||
|
||||
describe('sharedSingleStatMigrationCheck', () => {
|
||||
it('from old valueOptions model without pluginVersion', () => {
|
||||
const panel = {
|
||||
options: {
|
||||
valueOptions: {
|
||||
unit: 'watt',
|
||||
stat: 'last',
|
||||
decimals: 5,
|
||||
},
|
||||
minValue: 10,
|
||||
maxValue: 100,
|
||||
valueMappings: [{ type: 1, value: '1', text: 'OK' }],
|
||||
thresholds: [
|
||||
{
|
||||
color: 'green',
|
||||
index: 0,
|
||||
value: null,
|
||||
},
|
||||
{
|
||||
color: 'orange',
|
||||
index: 1,
|
||||
value: 40,
|
||||
},
|
||||
{
|
||||
color: 'red',
|
||||
index: 2,
|
||||
value: 80,
|
||||
},
|
||||
],
|
||||
},
|
||||
title: 'Usage',
|
||||
type: 'bargauge',
|
||||
};
|
||||
|
||||
expect(sharedSingleStatMigrationCheck(panel as any)).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
@@ -3,7 +3,7 @@ import omit from 'lodash/omit';
|
||||
|
||||
import { VizOrientation, PanelModel } from '../../types/panel';
|
||||
import { FieldDisplayOptions } from '../../utils/fieldDisplay';
|
||||
import { Field, getFieldReducers } from '@grafana/data';
|
||||
import { fieldReducers, Threshold, sortThresholds } from '@grafana/data';
|
||||
|
||||
export interface SingleStatBaseOptions {
|
||||
fieldOptions: FieldDisplayOptions;
|
||||
@@ -25,40 +25,99 @@ export const sharedSingleStatOptionsCheck = (
|
||||
return options;
|
||||
};
|
||||
|
||||
export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseOptions>) => {
|
||||
export function sharedSingleStatMigrationCheck(panel: PanelModel<SingleStatBaseOptions>) {
|
||||
if (!panel.options) {
|
||||
// This happens on the first load or when migrating from angular
|
||||
return {};
|
||||
}
|
||||
|
||||
// This migration aims to keep the most recent changes up-to-date
|
||||
// Plugins should explicitly migrate for known version changes and only use this
|
||||
// as a backup
|
||||
const old = panel.options as any;
|
||||
if (old.valueOptions) {
|
||||
const { valueOptions } = old;
|
||||
const previousVersion = parseFloat(panel.pluginVersion || '6.1');
|
||||
let options = panel.options as any;
|
||||
|
||||
if (previousVersion < 6.2) {
|
||||
options = migrateFromValueOptions(options);
|
||||
}
|
||||
|
||||
if (previousVersion < 6.3) {
|
||||
options = moveThresholdsAndMappingsToField(options);
|
||||
}
|
||||
|
||||
return options as SingleStatBaseOptions;
|
||||
}
|
||||
|
||||
export function moveThresholdsAndMappingsToField(old: any) {
|
||||
const { fieldOptions } = old;
|
||||
|
||||
if (!fieldOptions) {
|
||||
return old;
|
||||
}
|
||||
|
||||
const { mappings, thresholds, ...rest } = old.fieldOptions;
|
||||
|
||||
return {
|
||||
...old,
|
||||
fieldOptions: {
|
||||
...rest,
|
||||
defaults: {
|
||||
...fieldOptions.defaults,
|
||||
mappings,
|
||||
thresholds: migrateOldThresholds(thresholds),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Moves valueMappings and thresholds from root to new fieldOptions object
|
||||
* Renames valueOptions to to defaults and moves it under fieldOptions
|
||||
*/
|
||||
export function migrateFromValueOptions(old: any) {
|
||||
const { valueOptions } = old;
|
||||
if (!valueOptions) {
|
||||
return old;
|
||||
}
|
||||
|
||||
const fieldOptions: any = {};
|
||||
const fieldDefaults: any = {};
|
||||
|
||||
const fieldOptions = (old.fieldOptions = {} as FieldDisplayOptions);
|
||||
fieldOptions.mappings = old.valueMappings;
|
||||
fieldOptions.thresholds = old.thresholds;
|
||||
fieldOptions.defaults = fieldDefaults;
|
||||
|
||||
const field = (fieldOptions.defaults = {} as Field);
|
||||
if (valueOptions) {
|
||||
field.unit = valueOptions.unit;
|
||||
field.decimals = valueOptions.decimals;
|
||||
fieldDefaults.unit = valueOptions.unit;
|
||||
fieldDefaults.decimals = valueOptions.decimals;
|
||||
|
||||
// Make sure the stats have a valid name
|
||||
if (valueOptions.stat) {
|
||||
fieldOptions.calcs = getFieldReducers([valueOptions.stat]).map(s => s.id);
|
||||
const reducer = fieldReducers.get(valueOptions.stat);
|
||||
if (reducer) {
|
||||
fieldOptions.calcs = [reducer.id];
|
||||
}
|
||||
}
|
||||
|
||||
field.min = old.minValue;
|
||||
field.max = old.maxValue;
|
||||
fieldDefaults.min = old.minValue;
|
||||
fieldDefaults.max = old.maxValue;
|
||||
|
||||
// remove old props
|
||||
return omit(old, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
||||
}
|
||||
|
||||
return panel.options;
|
||||
const newOptions = {
|
||||
...old,
|
||||
fieldOptions,
|
||||
};
|
||||
|
||||
return omit(newOptions, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
||||
}
|
||||
|
||||
export function migrateOldThresholds(thresholds?: any[]): Threshold[] | undefined {
|
||||
if (!thresholds || !thresholds.length) {
|
||||
return undefined;
|
||||
}
|
||||
const copy = thresholds.map(t => {
|
||||
return {
|
||||
// Drops 'index'
|
||||
value: t.value === null ? -Infinity : t.value,
|
||||
color: t.color,
|
||||
};
|
||||
});
|
||||
sortThresholds(copy);
|
||||
copy[0].value = -Infinity;
|
||||
return copy;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`sharedSingleStatMigrationCheck from old valueOptions model without pluginVersion 1`] = `
|
||||
Object {
|
||||
"fieldOptions": Object {
|
||||
"calcs": Array [
|
||||
"last",
|
||||
],
|
||||
"defaults": Object {
|
||||
"decimals": 5,
|
||||
"mappings": Array [
|
||||
Object {
|
||||
"text": "OK",
|
||||
"type": 1,
|
||||
"value": "1",
|
||||
},
|
||||
],
|
||||
"max": 100,
|
||||
"min": 10,
|
||||
"thresholds": Array [
|
||||
Object {
|
||||
"color": "green",
|
||||
"value": -Infinity,
|
||||
},
|
||||
Object {
|
||||
"color": "orange",
|
||||
"value": 40,
|
||||
},
|
||||
Object {
|
||||
"color": "red",
|
||||
"value": 80,
|
||||
},
|
||||
],
|
||||
"unit": "watt",
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
||||
@@ -5,8 +5,7 @@ import difference from 'lodash/difference';
|
||||
|
||||
import { Select } from '../index';
|
||||
|
||||
import { getFieldReducers } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { fieldReducers, SelectableValue } from '@grafana/data';
|
||||
|
||||
interface Props {
|
||||
placeholder?: string;
|
||||
@@ -34,7 +33,7 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
checkInput = () => {
|
||||
const { stats, allowMultiple, defaultStat, onChange } = this.props;
|
||||
|
||||
const current = getFieldReducers(stats);
|
||||
const current = fieldReducers.list(stats);
|
||||
if (current.length !== stats.length) {
|
||||
const found = current.map(v => v.id);
|
||||
const notFound = difference(stats, found);
|
||||
@@ -54,7 +53,7 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
}
|
||||
};
|
||||
|
||||
onSelectionChange = (item: SelectOptionItem<string>) => {
|
||||
onSelectionChange = (item: SelectableValue<string>) => {
|
||||
const { onChange } = this.props;
|
||||
if (isArray(item)) {
|
||||
onChange(item.map(v => v.value));
|
||||
@@ -65,24 +64,16 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
|
||||
render() {
|
||||
const { width, stats, allowMultiple, defaultStat, placeholder } = this.props;
|
||||
const options = getFieldReducers().map(s => {
|
||||
return {
|
||||
value: s.id,
|
||||
label: s.name,
|
||||
description: s.description,
|
||||
};
|
||||
});
|
||||
|
||||
const value: Array<SelectOptionItem<string>> = options.filter(option => stats.find(stat => option.value === stat));
|
||||
|
||||
const select = fieldReducers.selectOptions(stats);
|
||||
return (
|
||||
<Select
|
||||
width={width}
|
||||
value={value}
|
||||
value={select.current}
|
||||
isClearable={!defaultStat}
|
||||
isMulti={allowMultiple}
|
||||
isSearchable={true}
|
||||
options={options}
|
||||
options={select.options}
|
||||
placeholder={placeholder}
|
||||
onChange={this.onSelectionChange}
|
||||
/>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { ChangeEvent } from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { ThresholdsEditor, Props } from './ThresholdsEditor';
|
||||
import { ThresholdsEditor, Props, threshodsWithoutKey } from './ThresholdsEditor';
|
||||
import { colors } from '../../utils';
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
@@ -20,6 +20,10 @@ const setup = (propOverrides?: Partial<Props>) => {
|
||||
};
|
||||
};
|
||||
|
||||
function getCurrentThresholds(editor: ThresholdsEditor) {
|
||||
return threshodsWithoutKey(editor.state.thresholds);
|
||||
}
|
||||
|
||||
describe('Render', () => {
|
||||
it('should render with base threshold', () => {
|
||||
const { wrapper } = setup();
|
||||
@@ -32,60 +36,55 @@ describe('Initialization', () => {
|
||||
it('should add a base threshold if missing', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
||||
expect(getCurrentThresholds(instance)).toEqual([{ value: -Infinity, color: colors[0] }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add threshold', () => {
|
||||
it('should not add threshold at index 0', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.onAddThreshold(0);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
||||
});
|
||||
|
||||
it('should add threshold', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.onAddThreshold(1);
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[0]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add another threshold above a first', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [{ index: 0, value: -Infinity, color: colors[0] }, { index: 1, value: 50, color: colors[2] }],
|
||||
thresholds: [
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
],
|
||||
});
|
||||
|
||||
instance.onAddThreshold(2);
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 75, color: colors[3] },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
{ value: 75, color: colors[3] }, // 2
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add another threshold between first and second index', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 75, color: colors[3] },
|
||||
{ value: -Infinity, color: colors[0] },
|
||||
{ value: 50, color: colors[2] },
|
||||
{ value: 75, color: colors[3] },
|
||||
],
|
||||
});
|
||||
|
||||
instance.onAddThreshold(2);
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 62.5, color: colors[4] },
|
||||
{ index: 3, value: 75, color: colors[3] },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] },
|
||||
{ value: 50, color: colors[2] },
|
||||
{ value: 62.5, color: colors[4] },
|
||||
{ value: 75, color: colors[3] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -93,30 +92,30 @@ describe('Add threshold', () => {
|
||||
describe('Remove threshold', () => {
|
||||
it('should not remove threshold at index 0', () => {
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
instance.onRemoveThreshold(thresholds[0]);
|
||||
instance.onRemoveThreshold(instance.state.thresholds[0]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual(thresholds);
|
||||
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||
});
|
||||
|
||||
it('should remove threshold', () => {
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
instance.onRemoveThreshold(thresholds[1]);
|
||||
instance.onRemoveThreshold(instance.state.thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -124,25 +123,25 @@ describe('Remove threshold', () => {
|
||||
describe('change threshold value', () => {
|
||||
it('should not change threshold at index 0', () => {
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
const mockEvent = ({ target: { value: '12' } } as any) as ChangeEvent<HTMLInputElement>;
|
||||
|
||||
instance.onChangeThresholdValue(mockEvent, thresholds[0]);
|
||||
instance.onChangeThresholdValue(mockEvent, instance.state.thresholds[0]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual(thresholds);
|
||||
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||
});
|
||||
|
||||
it('should update value', () => {
|
||||
const { instance } = setup();
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||
{ value: 50, color: '#EAB839', key: 2 },
|
||||
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||
];
|
||||
|
||||
instance.state = {
|
||||
@@ -153,10 +152,10 @@ describe('change threshold value', () => {
|
||||
|
||||
instance.onChangeThresholdValue(mockEvent, thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 78, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 78, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -165,9 +164,9 @@ describe('on blur threshold value', () => {
|
||||
it('should resort rows and update indexes', () => {
|
||||
const { instance } = setup();
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 78, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||
{ value: 78, color: '#EAB839', key: 2 },
|
||||
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||
];
|
||||
|
||||
instance.setState({
|
||||
@@ -176,10 +175,10 @@ describe('on blur threshold value', () => {
|
||||
|
||||
instance.onBlur();
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
||||
{ index: 2, value: 78, color: '#EAB839' },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ value: 78, color: '#EAB839' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { PureComponent, ChangeEvent } from 'react';
|
||||
import { Threshold } from '@grafana/data';
|
||||
import { Threshold, sortThresholds } from '@grafana/data';
|
||||
import { colors } from '../../utils';
|
||||
import { ThemeContext } from '../../themes';
|
||||
import { getColorFromHexRgbOrName } from '../../utils';
|
||||
@@ -13,115 +13,121 @@ export interface Props {
|
||||
}
|
||||
|
||||
interface State {
|
||||
thresholds: Threshold[];
|
||||
thresholds: ThresholdWithKey[];
|
||||
}
|
||||
|
||||
interface ThresholdWithKey extends Threshold {
|
||||
key: number;
|
||||
}
|
||||
|
||||
let counter = 100;
|
||||
|
||||
export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
|
||||
const addDefaultThreshold = this.props.thresholds.length === 0;
|
||||
const thresholds: Threshold[] = addDefaultThreshold
|
||||
? [{ index: 0, value: -Infinity, color: colors[0] }]
|
||||
: props.thresholds;
|
||||
const thresholds = props.thresholds
|
||||
? props.thresholds.map(t => {
|
||||
return {
|
||||
color: t.color,
|
||||
value: t.value === null ? -Infinity : t.value,
|
||||
key: counter++,
|
||||
};
|
||||
})
|
||||
: ([] as ThresholdWithKey[]);
|
||||
|
||||
let needsCallback = false;
|
||||
if (!thresholds.length) {
|
||||
thresholds.push({ value: -Infinity, color: colors[0], key: counter++ });
|
||||
needsCallback = true;
|
||||
} else {
|
||||
// First value is always base
|
||||
thresholds[0].value = -Infinity;
|
||||
}
|
||||
|
||||
// Update the state
|
||||
this.state = { thresholds };
|
||||
|
||||
if (addDefaultThreshold) {
|
||||
if (needsCallback) {
|
||||
this.onChange();
|
||||
}
|
||||
}
|
||||
|
||||
onAddThreshold = (index: number) => {
|
||||
onAddThresholdAfter = (threshold: ThresholdWithKey) => {
|
||||
const { thresholds } = this.state;
|
||||
|
||||
const maxValue = 100;
|
||||
const minValue = 0;
|
||||
|
||||
if (index === 0) {
|
||||
return;
|
||||
let prev: ThresholdWithKey | undefined = undefined;
|
||||
let next: ThresholdWithKey | undefined = undefined;
|
||||
for (const t of thresholds) {
|
||||
if (prev && prev.key === threshold.key) {
|
||||
next = t;
|
||||
break;
|
||||
}
|
||||
prev = t;
|
||||
}
|
||||
|
||||
const newThresholds = thresholds.map(threshold => {
|
||||
if (threshold.index >= index) {
|
||||
const index = threshold.index + 1;
|
||||
threshold = { ...threshold, index };
|
||||
}
|
||||
return threshold;
|
||||
});
|
||||
const prevValue = prev && isFinite(prev.value) ? prev.value : minValue;
|
||||
const nextValue = next && isFinite(next.value) ? next.value : maxValue;
|
||||
|
||||
// Setting value to a value between the previous thresholds
|
||||
const beforeThreshold = newThresholds.filter(t => t.index === index - 1 && t.index !== 0)[0];
|
||||
const afterThreshold = newThresholds.filter(t => t.index === index + 1 && t.index !== 0)[0];
|
||||
const beforeThresholdValue = beforeThreshold !== undefined ? beforeThreshold.value : minValue;
|
||||
const afterThresholdValue = afterThreshold !== undefined ? afterThreshold.value : maxValue;
|
||||
const value = afterThresholdValue - (afterThresholdValue - beforeThresholdValue) / 2;
|
||||
|
||||
// Set a color
|
||||
const color = colors.filter(c => !newThresholds.some(t => t.color === c))[1];
|
||||
const color = colors.filter(c => !thresholds.some(t => t.color === c))[1];
|
||||
const add = {
|
||||
value: prevValue + (nextValue - prevValue) / 2.0,
|
||||
color: color,
|
||||
key: counter++,
|
||||
};
|
||||
const newThresholds = [...thresholds, add];
|
||||
sortThresholds(newThresholds);
|
||||
|
||||
this.setState(
|
||||
{
|
||||
thresholds: this.sortThresholds([
|
||||
...newThresholds,
|
||||
{
|
||||
color,
|
||||
index,
|
||||
value: value as number,
|
||||
},
|
||||
]),
|
||||
thresholds: newThresholds,
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onRemoveThreshold = (threshold: Threshold) => {
|
||||
if (threshold.index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.setState(
|
||||
prevState => {
|
||||
const newThresholds = prevState.thresholds.map(t => {
|
||||
if (t.index > threshold.index) {
|
||||
const index = t.index - 1;
|
||||
t = { ...t, index };
|
||||
}
|
||||
return t;
|
||||
});
|
||||
|
||||
return {
|
||||
thresholds: newThresholds.filter(t => t !== threshold),
|
||||
};
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: Threshold) => {
|
||||
if (threshold.index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
onRemoveThreshold = (threshold: ThresholdWithKey) => {
|
||||
const { thresholds } = this.state;
|
||||
if (!thresholds.length) {
|
||||
return;
|
||||
}
|
||||
// Don't remove index 0
|
||||
if (threshold.key === thresholds[0].key) {
|
||||
return;
|
||||
}
|
||||
this.setState(
|
||||
{
|
||||
thresholds: thresholds.filter(t => t.key !== threshold.key),
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: ThresholdWithKey) => {
|
||||
const cleanValue = event.target.value.replace(/,/g, '.');
|
||||
const parsedValue = parseFloat(cleanValue);
|
||||
const value = isNaN(parsedValue) ? '' : parsedValue;
|
||||
|
||||
const newThresholds = thresholds.map(t => {
|
||||
if (t === threshold && t.index !== 0) {
|
||||
const thresholds = this.state.thresholds.map(t => {
|
||||
if (t.key === threshold.key) {
|
||||
t = { ...t, value: value as number };
|
||||
}
|
||||
|
||||
return t;
|
||||
});
|
||||
|
||||
this.setState({ thresholds: newThresholds });
|
||||
if (thresholds.length) {
|
||||
thresholds[0].value = -Infinity;
|
||||
}
|
||||
this.setState({ thresholds });
|
||||
};
|
||||
|
||||
onChangeThresholdColor = (threshold: Threshold, color: string) => {
|
||||
onChangeThresholdColor = (threshold: ThresholdWithKey, color: string) => {
|
||||
const { thresholds } = this.state;
|
||||
|
||||
const newThresholds = thresholds.map(t => {
|
||||
if (t === threshold) {
|
||||
if (t.key === threshold.key) {
|
||||
t = { ...t, color: color };
|
||||
}
|
||||
|
||||
@@ -137,30 +143,22 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
};
|
||||
|
||||
onBlur = () => {
|
||||
this.setState(prevState => {
|
||||
const sortThresholds = this.sortThresholds([...prevState.thresholds]);
|
||||
let index = 0;
|
||||
sortThresholds.forEach(t => {
|
||||
t.index = index++;
|
||||
});
|
||||
|
||||
return { thresholds: sortThresholds };
|
||||
});
|
||||
|
||||
this.onChange();
|
||||
const thresholds = [...this.state.thresholds];
|
||||
sortThresholds(thresholds);
|
||||
this.setState(
|
||||
{
|
||||
thresholds,
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChange = () => {
|
||||
this.props.onChange(this.state.thresholds);
|
||||
const { thresholds } = this.state;
|
||||
this.props.onChange(threshodsWithoutKey(thresholds));
|
||||
};
|
||||
|
||||
sortThresholds = (thresholds: Threshold[]) => {
|
||||
return thresholds.sort((t1, t2) => {
|
||||
return t1.value - t2.value;
|
||||
});
|
||||
};
|
||||
|
||||
renderInput = (threshold: Threshold) => {
|
||||
renderInput = (threshold: ThresholdWithKey) => {
|
||||
return (
|
||||
<div className="thresholds-row-input-inner">
|
||||
<span className="thresholds-row-input-inner-arrow" />
|
||||
@@ -175,12 +173,11 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{threshold.index === 0 && (
|
||||
{!isFinite(threshold.value) ? (
|
||||
<div className="thresholds-row-input-inner-value">
|
||||
<Input type="text" value="Base" readOnly />
|
||||
</div>
|
||||
)}
|
||||
{threshold.index > 0 && (
|
||||
) : (
|
||||
<>
|
||||
<div className="thresholds-row-input-inner-value">
|
||||
<Input
|
||||
@@ -189,7 +186,6 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
onChange={(event: ChangeEvent<HTMLInputElement>) => this.onChangeThresholdValue(event, threshold)}
|
||||
value={threshold.value}
|
||||
onBlur={this.onBlur}
|
||||
readOnly={threshold.index === 0}
|
||||
/>
|
||||
</div>
|
||||
<div className="thresholds-row-input-inner-remove" onClick={() => this.onRemoveThreshold(threshold)}>
|
||||
@@ -212,13 +208,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
{thresholds
|
||||
.slice(0)
|
||||
.reverse()
|
||||
.map((threshold, index) => {
|
||||
.map(threshold => {
|
||||
return (
|
||||
<div className="thresholds-row" key={`${threshold.index}-${index}`}>
|
||||
<div
|
||||
className="thresholds-row-add-button"
|
||||
onClick={() => this.onAddThreshold(threshold.index + 1)}
|
||||
>
|
||||
<div className="thresholds-row" key={`${threshold.key}`}>
|
||||
<div className="thresholds-row-add-button" onClick={() => this.onAddThresholdAfter(threshold)}>
|
||||
<i className="fa fa-plus" />
|
||||
</div>
|
||||
<div
|
||||
@@ -237,3 +230,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function threshodsWithoutKey(thresholds: ThresholdWithKey[]): Threshold[] {
|
||||
return thresholds.map(t => {
|
||||
const { key, ...rest } = t;
|
||||
return rest; // everything except key
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ exports[`Render should render with base threshold 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"color": "#7EB26D",
|
||||
"index": 0,
|
||||
"value": -Infinity,
|
||||
},
|
||||
],
|
||||
@@ -48,7 +47,7 @@ exports[`Render should render with base threshold 1`] = `
|
||||
>
|
||||
<div
|
||||
className="thresholds-row"
|
||||
key="0-0"
|
||||
key="100"
|
||||
>
|
||||
<div
|
||||
className="thresholds-row-add-button"
|
||||
|
||||
@@ -8,13 +8,13 @@ import { TimePickerPopover } from './TimePickerPopover';
|
||||
import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper';
|
||||
|
||||
// Utils & Services
|
||||
import { isDateTime } from '@grafana/data';
|
||||
import { isDateTime, DateTime } from '@grafana/data';
|
||||
import { rangeUtil } from '@grafana/data';
|
||||
import { rawToTimeRange } from './time';
|
||||
|
||||
// Types
|
||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT, SelectableValue } from '@grafana/data';
|
||||
import { isMathString } from '@grafana/data/src/utils/datemath';
|
||||
|
||||
export interface Props {
|
||||
value: TimeRange;
|
||||
@@ -77,7 +77,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
isCustomOpen: false,
|
||||
};
|
||||
|
||||
mapTimeOptionsToSelectOptionItems = (selectOptions: TimeOption[]) => {
|
||||
mapTimeOptionsToSelectableValues = (selectOptions: TimeOption[]) => {
|
||||
const options = selectOptions.map(timeOption => {
|
||||
return {
|
||||
label: timeOption.display,
|
||||
@@ -93,7 +93,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
return options;
|
||||
};
|
||||
|
||||
onSelectChanged = (item: SelectOptionItem<TimeOption>) => {
|
||||
onSelectChanged = (item: SelectableValue<TimeOption>) => {
|
||||
const { onChange, timeZone } = this.props;
|
||||
|
||||
if (item.value && item.value.from === 'custom') {
|
||||
@@ -122,15 +122,23 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
render() {
|
||||
const { selectOptions: selectTimeOptions, value, onMoveBackward, onMoveForward, onZoom, timeZone } = this.props;
|
||||
const { isCustomOpen } = this.state;
|
||||
const options = this.mapTimeOptionsToSelectOptionItems(selectTimeOptions);
|
||||
const options = this.mapTimeOptionsToSelectableValues(selectTimeOptions);
|
||||
const currentOption = options.find(item => isTimeOptionEqualToTimeRange(item.value, value));
|
||||
const rangeString = rangeUtil.describeTimeRange(value.raw);
|
||||
|
||||
const isUTC = timeZone === 'utc';
|
||||
|
||||
const adjustedTime = (time: DateTime) => (isUTC ? time.utc() : time.local()) || null;
|
||||
const adjustedTimeRange = {
|
||||
to: isMathString(value.raw.to) ? value.raw.to : adjustedTime(value.to),
|
||||
from: isMathString(value.raw.from) ? value.raw.from : adjustedTime(value.from),
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange(adjustedTimeRange);
|
||||
|
||||
const label = (
|
||||
<>
|
||||
{isCustomOpen && <span>Custom time range</span>}
|
||||
{!isCustomOpen && <span>{rangeString}</span>}
|
||||
{timeZone === 'utc' && <span className="time-picker-utc">UTC</span>}
|
||||
{isUTC && <span className="time-picker-utc">UTC</span>}
|
||||
</>
|
||||
);
|
||||
const isAbsolute = isDateTime(value.raw.to);
|
||||
@@ -148,6 +156,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
value={currentOption}
|
||||
label={label}
|
||||
options={options}
|
||||
maxMenuHeight={600}
|
||||
onChange={this.onSelectChanged}
|
||||
iconClass={'fa fa-clock-o fa-fw'}
|
||||
tooltipContent={<TimePickerTooltipContent timeRange={value} />}
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
.time-picker-popover {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
border: 1px solid $popover-border-color;
|
||||
border-radius: $border-radius;
|
||||
@@ -31,6 +30,7 @@
|
||||
max-width: 600px;
|
||||
top: 41px;
|
||||
right: 0px;
|
||||
}
|
||||
|
||||
.time-picker-popover-body {
|
||||
display: flex;
|
||||
@@ -66,7 +66,6 @@
|
||||
justify-content: center;
|
||||
padding: $space-md;
|
||||
}
|
||||
}
|
||||
|
||||
.time-picker-popover-header {
|
||||
background: $popover-header-bg;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export { DeleteButton } from './DeleteButton/DeleteButton';
|
||||
export { Tooltip } from './Tooltip/Tooltip';
|
||||
export { PopperController } from './Tooltip/PopperController';
|
||||
export { PopperController, PopperContent } from './Tooltip/PopperController';
|
||||
export { Popper } from './Tooltip/Popper';
|
||||
export { Portal } from './Portal/Portal';
|
||||
export { CustomScrollbar } from './CustomScrollbar/CustomScrollbar';
|
||||
@@ -9,7 +9,7 @@ export * from './Button/Button';
|
||||
export { ButtonVariant } from './Button/AbstractButton';
|
||||
|
||||
// Select
|
||||
export { Select, AsyncSelect, SelectOptionItem } from './Select/Select';
|
||||
export { Select, AsyncSelect } from './Select/Select';
|
||||
export { IndicatorsContainer } from './Select/IndicatorsContainer';
|
||||
export { NoOptionsMessage } from './Select/NoOptionsMessage';
|
||||
export { default as resetSelectStyles } from './Select/resetSelectStyles';
|
||||
|
||||
@@ -77,6 +77,13 @@ interface PluginMetaInfoLink {
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface PluginBuildInfo {
|
||||
time?: number;
|
||||
repo?: string;
|
||||
branch?: string;
|
||||
hash?: string;
|
||||
}
|
||||
|
||||
export interface PluginMetaInfo {
|
||||
author: {
|
||||
name: string;
|
||||
@@ -88,6 +95,7 @@ export interface PluginMetaInfo {
|
||||
large: string;
|
||||
small: string;
|
||||
};
|
||||
build?: PluginBuildInfo;
|
||||
screenshots: any[];
|
||||
updated: string;
|
||||
version: string;
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
export const deprecationWarning = (file: string, oldName: string, newName: string) => {
|
||||
const message = `[Deprecation warning] ${file}: ${oldName} is deprecated. Use ${newName} instead`;
|
||||
export const deprecationWarning = (file: string, oldName: string, newName?: string) => {
|
||||
let message = `[Deprecation warning] ${file}: ${oldName} is deprecated`;
|
||||
if (newName) {
|
||||
message += `. Use ${newName} instead`;
|
||||
}
|
||||
console.warn(message);
|
||||
};
|
||||
|
||||
@@ -103,7 +103,7 @@ describe('Format value', () => {
|
||||
it('should return if value isNaN', () => {
|
||||
const valueMappings: ValueMapping[] = [];
|
||||
const value = 'N/A';
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings });
|
||||
const instance = getDisplayProcessor({ field: { mappings: valueMappings } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -114,7 +114,7 @@ describe('Format value', () => {
|
||||
const valueMappings: ValueMapping[] = [];
|
||||
const value = '6';
|
||||
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -127,7 +127,7 @@ describe('Format value', () => {
|
||||
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
|
||||
];
|
||||
const value = '10';
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -160,7 +160,7 @@ describe('Format value', () => {
|
||||
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
|
||||
];
|
||||
const value = '11';
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
|
||||
expect(instance(value).text).toEqual('1-20');
|
||||
});
|
||||
|
||||
@@ -7,16 +7,13 @@ import { getColorFromHexRgbOrName } from './namedColorsPalette';
|
||||
|
||||
// Types
|
||||
import { DecimalInfo, DisplayValue, GrafanaTheme, GrafanaThemeType, DecimalCount } from '../types';
|
||||
import { DateTime, dateTime, Threshold, ValueMapping, getMappedValue, Field } from '@grafana/data';
|
||||
import { DateTime, dateTime, Threshold, getMappedValue, Field } from '@grafana/data';
|
||||
|
||||
export type DisplayProcessor = (value: any) => DisplayValue;
|
||||
|
||||
export interface DisplayValueOptions {
|
||||
field?: Partial<Field>;
|
||||
|
||||
mappings?: ValueMapping[];
|
||||
thresholds?: Threshold[];
|
||||
|
||||
// Alternative to empty string
|
||||
noValue?: string;
|
||||
|
||||
@@ -31,7 +28,8 @@ export function getDisplayProcessor(options?: DisplayValueOptions): DisplayProce
|
||||
const formatFunc = getValueFormat(field.unit || 'none');
|
||||
|
||||
return (value: any) => {
|
||||
const { mappings, thresholds, theme } = options;
|
||||
const { theme } = options;
|
||||
const { mappings, thresholds } = field;
|
||||
let color;
|
||||
|
||||
let text = _.toString(value);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
|
||||
import { FieldType, ReducerID } from '@grafana/data';
|
||||
import { FieldType, ReducerID, Threshold } from '@grafana/data';
|
||||
import { GrafanaThemeType } from '../types/theme';
|
||||
import { getTheme } from '../themes/index';
|
||||
|
||||
@@ -55,8 +55,6 @@ describe('FieldDisplay', () => {
|
||||
},
|
||||
fieldOptions: {
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -68,8 +66,6 @@ describe('FieldDisplay', () => {
|
||||
...options,
|
||||
fieldOptions: {
|
||||
calcs: [ReducerID.first],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {
|
||||
title: '$__cell_0 * $__field_name * $__series_name',
|
||||
@@ -88,8 +84,6 @@ describe('FieldDisplay', () => {
|
||||
...options,
|
||||
fieldOptions: {
|
||||
calcs: [ReducerID.last],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -104,8 +98,6 @@ describe('FieldDisplay', () => {
|
||||
values: true, //
|
||||
limit: 1000,
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -120,12 +112,53 @@ describe('FieldDisplay', () => {
|
||||
values: true, //
|
||||
limit: 2,
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
});
|
||||
expect(display.map(v => v.display.numeric)).toEqual([1, 3]); // First 2 are from the first field
|
||||
});
|
||||
|
||||
it('should restore -Infinity value for base threshold', () => {
|
||||
const field = getFieldProperties({
|
||||
thresholds: [
|
||||
({
|
||||
color: '#73BF69',
|
||||
value: null,
|
||||
} as unknown) as Threshold,
|
||||
{
|
||||
color: '#F2495C',
|
||||
value: 50,
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(field.thresholds!.length).toEqual(2);
|
||||
expect(field.thresholds![0].value).toBe(-Infinity);
|
||||
});
|
||||
|
||||
it('Should return field thresholds when there is no data', () => {
|
||||
const options: GetFieldDisplayValuesOptions = {
|
||||
data: [
|
||||
{
|
||||
name: 'No data',
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
],
|
||||
replaceVariables: (value: string) => {
|
||||
return value;
|
||||
},
|
||||
fieldOptions: {
|
||||
calcs: [],
|
||||
override: {},
|
||||
defaults: {
|
||||
thresholds: [{ color: '#F2495C', value: 50 }],
|
||||
},
|
||||
},
|
||||
theme: getTheme(GrafanaThemeType.Dark),
|
||||
};
|
||||
|
||||
const display = getFieldDisplayValues(options);
|
||||
expect(display[0].field.thresholds!.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,16 +4,7 @@ import toString from 'lodash/toString';
|
||||
import { DisplayValue, GrafanaTheme, InterpolateFunction, ScopedVars, GraphSeriesValue } from '../types/index';
|
||||
import { getDisplayProcessor } from './displayValue';
|
||||
import { getFlotPairs } from './flotPairs';
|
||||
import {
|
||||
ValueMapping,
|
||||
Threshold,
|
||||
ReducerID,
|
||||
reduceField,
|
||||
FieldType,
|
||||
NullValueMode,
|
||||
DataFrame,
|
||||
Field,
|
||||
} from '@grafana/data';
|
||||
import { ReducerID, reduceField, FieldType, NullValueMode, DataFrame, Field } from '@grafana/data';
|
||||
|
||||
export interface FieldDisplayOptions {
|
||||
values?: boolean; // If true show each row value
|
||||
@@ -22,10 +13,6 @@ export interface FieldDisplayOptions {
|
||||
|
||||
defaults: Partial<Field>; // Use these values unless otherwise stated
|
||||
override: Partial<Field>; // Set these values regardless of the source
|
||||
|
||||
// Could these be data driven also?
|
||||
thresholds: Threshold[];
|
||||
mappings: ValueMapping[];
|
||||
}
|
||||
|
||||
export const VAR_SERIES_NAME = '__series_name';
|
||||
@@ -127,8 +114,6 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
const display = getDisplayProcessor({
|
||||
field,
|
||||
mappings: fieldOptions.mappings,
|
||||
thresholds: fieldOptions.thresholds,
|
||||
theme: options.theme,
|
||||
});
|
||||
|
||||
@@ -197,7 +182,10 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
if (values.length === 0) {
|
||||
values.push({
|
||||
field: { name: 'No Data' },
|
||||
field: {
|
||||
...defaults,
|
||||
name: 'No Data',
|
||||
},
|
||||
display: {
|
||||
numeric: 0,
|
||||
text: 'No data',
|
||||
@@ -259,10 +247,16 @@ type PartialField = Partial<Field>;
|
||||
|
||||
export function getFieldProperties(...props: PartialField[]): Field {
|
||||
let field = props[0] as Field;
|
||||
|
||||
for (let i = 1; i < props.length; i++) {
|
||||
field = applyFieldProperties(field, props[i]);
|
||||
}
|
||||
|
||||
// First value is always -Infinity
|
||||
if (field.thresholds && field.thresholds.length) {
|
||||
field.thresholds[0].value = -Infinity;
|
||||
}
|
||||
|
||||
// Verify that max > min
|
||||
if (field.hasOwnProperty('min') && field.hasOwnProperty('max') && field.min! > field.max!) {
|
||||
return {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE_IMAGE=ubuntu:latest
|
||||
ARG BASE_IMAGE=ubuntu:18.04
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
|
||||
@@ -12,7 +12,7 @@ COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
|
||||
# Change to tar xfzv to make tar print every file it extracts
|
||||
RUN mkdir /tmp/grafana && tar xfz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
|
||||
|
||||
ARG BASE_IMAGE=ubuntu:latest
|
||||
ARG BASE_IMAGE=ubuntu:18.04
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ARG GF_UID="472"
|
||||
|
||||
@@ -59,14 +59,16 @@ docker_tag_all () {
|
||||
fi
|
||||
}
|
||||
|
||||
docker_build "ubuntu:latest" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||
docker_build "ubuntu:18.04" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||
if [ $BUILD_FAST = "0" ]; then
|
||||
docker_build "arm32v7/ubuntu:latest" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||
docker_build "arm64v8/ubuntu:latest" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||
docker_build "arm32v7/ubuntu:18.04" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||
docker_build "arm64v8/ubuntu:18.04" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||
fi
|
||||
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
docker_tag_all "${_docker_repo}" "latest"
|
||||
# Create the expected tag for running the end to end tests successfully
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_tag}"
|
||||
else
|
||||
docker_tag_all "${_docker_repo}" "master"
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_version}"
|
||||
|
||||
@@ -38,8 +38,14 @@ if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"
|
||||
echo "pushing ${_docker_repo}:latest"
|
||||
docker_push_all "${_docker_repo}" "latest"
|
||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||
# Push to the grafana-dev repository with the expected tag
|
||||
# for running the end to end tests successfully
|
||||
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||
elif echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -q "beta"; then
|
||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||
# Push to the grafana-dev repository with the expected tag
|
||||
# for running the end to end tests successfully
|
||||
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||
elif echo "$_grafana_tag" | grep -q "master"; then
|
||||
docker_push_all "${_docker_repo}" "master"
|
||||
docker push "grafana/grafana-dev:${_grafana_version}"
|
||||
|
||||
@@ -34,7 +34,7 @@ func AdminCreateUser(c *models.ReqContext, form dtos.AdminCreateUserForm) {
|
||||
return
|
||||
}
|
||||
|
||||
metrics.M_Api_Admin_User_Create.Inc()
|
||||
metrics.MApiAdminUserCreate.Inc()
|
||||
|
||||
user := cmd.Result
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
reqEditorRole := middleware.ReqEditorRole
|
||||
reqOrgAdmin := middleware.ReqOrgAdmin
|
||||
reqCanAccessTeams := middleware.AdminOrFeatureEnabled(hs.Cfg.EditorsCanAdmin)
|
||||
reqSnapshotPublicModeOrSignedIn := middleware.SnapshotPublicModeOrSignedIn()
|
||||
redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL()
|
||||
redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL()
|
||||
quota := middleware.Quota(hs.QuotaService)
|
||||
@@ -104,13 +105,6 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
r.Get("/dashboard/snapshot/*", hs.Index)
|
||||
r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index)
|
||||
|
||||
// api for dashboard snapshots
|
||||
r.Post("/api/snapshots/", bind(models.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
|
||||
r.Get("/api/snapshot/shared-options/", GetSharingOptions)
|
||||
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
|
||||
r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
|
||||
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
||||
|
||||
// api renew session based on cookie
|
||||
r.Get("/api/login/ping", quota("session"), Wrap(hs.LoginAPIPing))
|
||||
|
||||
@@ -413,4 +407,11 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
|
||||
// streams
|
||||
//r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
|
||||
|
||||
// Snapshots
|
||||
r.Post("/api/snapshots/", reqSnapshotPublicModeOrSignedIn, bind(models.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
|
||||
r.Get("/api/snapshot/shared-options/", reqSignedIn, GetSharingOptions)
|
||||
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
|
||||
r.Get("/api/snapshots-delete/:deleteKey", reqSnapshotPublicModeOrSignedIn, Wrap(DeleteDashboardSnapshotByDeleteKey))
|
||||
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ func (hs *HTTPServer) GetDashboard(c *m.ReqContext) Response {
|
||||
Meta: meta,
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Get)
|
||||
c.TimeRequest(metrics.MApiDashboardGet)
|
||||
return JSON(200, dto)
|
||||
}
|
||||
|
||||
@@ -278,12 +278,11 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
|
||||
inFolder := cmd.FolderId > 0
|
||||
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
|
||||
if err != nil {
|
||||
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of dashboard", err)
|
||||
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
||||
c.TimeRequest(metrics.MApiDashboardSave)
|
||||
return JSON(200, util.DynMap{
|
||||
"status": "success",
|
||||
"slug": dashboard.Slug,
|
||||
|
||||
@@ -97,7 +97,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
||||
cmd.ExternalDeleteUrl = response.DeleteUrl
|
||||
cmd.Dashboard = simplejson.New()
|
||||
|
||||
metrics.M_Api_Dashboard_Snapshot_External.Inc()
|
||||
metrics.MApiDashboardSnapshotExternal.Inc()
|
||||
} else {
|
||||
if cmd.Key == "" {
|
||||
cmd.Key = util.GetRandomString(32)
|
||||
@@ -109,7 +109,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
||||
|
||||
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
|
||||
|
||||
metrics.M_Api_Dashboard_Snapshot_Create.Inc()
|
||||
metrics.MApiDashboardSnapshotCreate.Inc()
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
@@ -154,7 +154,7 @@ func GetDashboardSnapshot(c *m.ReqContext) {
|
||||
},
|
||||
}
|
||||
|
||||
metrics.M_Api_Dashboard_Snapshot_Get.Inc()
|
||||
metrics.MApiDashboardSnapshotGet.Inc()
|
||||
|
||||
c.Resp.Header().Set("Cache-Control", "public, max-age=3600")
|
||||
c.JSON(200, dto)
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
)
|
||||
|
||||
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
|
||||
c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
|
||||
c.TimeRequest(metrics.MDataSourceProxyReqTimer)
|
||||
|
||||
dsId := c.ParamsInt64(":id")
|
||||
ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)
|
||||
|
||||
@@ -64,7 +64,6 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
|
||||
if hs.Cfg.EditorsCanAdmin {
|
||||
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
|
||||
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of folder", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -269,7 +269,8 @@ func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
promhttp.HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||
promhttp.
|
||||
HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||
ServeHTTP(ctx.Resp, ctx.Req.Request)
|
||||
}
|
||||
|
||||
|
||||
@@ -242,73 +242,68 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
|
||||
}
|
||||
}
|
||||
|
||||
if c.IsGrafanaAdmin || c.OrgRole == m.ROLE_ADMIN {
|
||||
cfgNode := &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: setting.AppSubUrl + "/datasources",
|
||||
Children: []*dtos.NavLink{
|
||||
{
|
||||
configNodes := []*dtos.NavLink{}
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN {
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Data Sources",
|
||||
Icon: "gicon gicon-datasources",
|
||||
Description: "Add and configure data sources",
|
||||
Id: "datasources",
|
||||
Url: setting.AppSubUrl + "/datasources",
|
||||
},
|
||||
{
|
||||
})
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Users",
|
||||
Id: "users",
|
||||
Description: "Manage org members",
|
||||
Icon: "gicon gicon-user",
|
||||
Url: setting.AppSubUrl + "/org/users",
|
||||
},
|
||||
{
|
||||
})
|
||||
}
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN || hs.Cfg.EditorsCanAdmin {
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Teams",
|
||||
Id: "teams",
|
||||
Description: "Manage org groups",
|
||||
Icon: "gicon gicon-team",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
},
|
||||
{
|
||||
})
|
||||
}
|
||||
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Plugins",
|
||||
Id: "plugins",
|
||||
Description: "View and configure plugins",
|
||||
Icon: "gicon gicon-plugins",
|
||||
Url: setting.AppSubUrl + "/plugins",
|
||||
},
|
||||
{
|
||||
})
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN {
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Preferences",
|
||||
Id: "org-settings",
|
||||
Description: "Organization preferences",
|
||||
Icon: "gicon gicon-preferences",
|
||||
Url: setting.AppSubUrl + "/org",
|
||||
},
|
||||
|
||||
{
|
||||
})
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "API Keys",
|
||||
Id: "apikeys",
|
||||
Description: "Create & manage API keys",
|
||||
Icon: "gicon gicon-apikeys",
|
||||
Url: setting.AppSubUrl + "/org/apikeys",
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if c.OrgRole != m.ROLE_ADMIN {
|
||||
cfgNode = &dtos.NavLink{
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: setting.AppSubUrl + "/admin/users",
|
||||
Children: make([]*dtos.NavLink, 0),
|
||||
}
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, cfgNode)
|
||||
}
|
||||
Url: configNodes[0].Url,
|
||||
Children: configNodes,
|
||||
})
|
||||
|
||||
if c.IsGrafanaAdmin {
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
@@ -327,27 +322,6 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
|
||||
})
|
||||
}
|
||||
|
||||
if (c.OrgRole == m.ROLE_EDITOR || c.OrgRole == m.ROLE_VIEWER) && hs.Cfg.EditorsCanAdmin {
|
||||
cfgNode := &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
Children: []*dtos.NavLink{
|
||||
{
|
||||
Text: "Teams",
|
||||
Id: "teams",
|
||||
Description: "Manage org groups",
|
||||
Icon: "gicon gicon-team",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, cfgNode)
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
Text: "Help",
|
||||
SubTitle: fmt.Sprintf(`%s v%s (%s)`, setting.ApplicationName, setting.BuildVersion, setting.BuildCommit),
|
||||
|
||||
@@ -44,7 +44,7 @@ func (hs *HTTPServer) LoginView(c *models.ReqContext) {
|
||||
viewData.Settings["loginHint"] = setting.LoginHint
|
||||
viewData.Settings["passwordHint"] = setting.PasswordHint
|
||||
viewData.Settings["disableLoginForm"] = setting.DisableLoginForm
|
||||
viewData.Settings["samlEnabled"] = hs.Cfg.SAMLEnabled
|
||||
viewData.Settings["samlEnabled"] = setting.IsEnterprise && hs.Cfg.SAMLEnabled
|
||||
|
||||
if loginError, ok := tryGetEncryptedCookie(c, LoginErrorCookieName); ok {
|
||||
//this cookie is only set whenever an OAuth login fails
|
||||
@@ -81,7 +81,7 @@ func tryOAuthAutoLogin(c *models.ReqContext) bool {
|
||||
}
|
||||
oauthInfos := setting.OAuthService.OAuthInfos
|
||||
if len(oauthInfos) != 1 {
|
||||
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured.")
|
||||
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured")
|
||||
return false
|
||||
}
|
||||
for key := range setting.OAuthService.OAuthInfos {
|
||||
@@ -114,12 +114,16 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(authQuery); err != nil {
|
||||
e401 := Error(401, "Invalid username or password", err)
|
||||
if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts {
|
||||
return Error(401, "Invalid username or password", err)
|
||||
return e401
|
||||
}
|
||||
|
||||
// Do not expose disabled status,
|
||||
// just show incorrect user credentials error (see #17947)
|
||||
if err == login.ErrUserDisabled {
|
||||
return Error(401, "User is disabled", err)
|
||||
hs.log.Warn("User is disabled", "user", cmd.User)
|
||||
return e401
|
||||
}
|
||||
|
||||
return Error(500, "Error while trying to authenticate user", err)
|
||||
@@ -138,7 +142,7 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
|
||||
c.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
||||
}
|
||||
|
||||
metrics.M_Api_Login_Post.Inc()
|
||||
metrics.MApiLoginPost.Inc()
|
||||
return JSON(200, result)
|
||||
}
|
||||
|
||||
@@ -195,15 +199,18 @@ func (hs *HTTPServer) trySetEncryptedCookie(ctx *models.ReqContext, cookieName s
|
||||
return err
|
||||
}
|
||||
|
||||
http.SetCookie(ctx.Resp, &http.Cookie{
|
||||
cookie := http.Cookie{
|
||||
Name: cookieName,
|
||||
MaxAge: 60,
|
||||
Value: hex.EncodeToString(encryptedError),
|
||||
HttpOnly: true,
|
||||
Path: setting.AppSubUrl + "/",
|
||||
Secure: hs.Cfg.CookieSecure,
|
||||
SameSite: hs.Cfg.CookieSameSite,
|
||||
})
|
||||
}
|
||||
if hs.Cfg.CookieSameSite != http.SameSiteDefaultMode {
|
||||
cookie.SameSite = hs.Cfg.CookieSameSite
|
||||
}
|
||||
http.SetCookie(ctx.Resp, &cookie)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
if code == "" {
|
||||
state := GenStateString()
|
||||
hashedState := hashStatecode(state, setting.OAuthService.OAuthInfos[name].ClientSecret)
|
||||
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60)
|
||||
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60, hs.Cfg.CookieSameSite)
|
||||
if setting.OAuthService.OAuthInfos[name].HostedDomain == "" {
|
||||
ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline))
|
||||
} else {
|
||||
@@ -73,7 +73,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
|
||||
// delete cookie
|
||||
ctx.Resp.Header().Del("Set-Cookie")
|
||||
hs.deleteCookie(ctx.Resp, OauthStateCookieName)
|
||||
hs.deleteCookie(ctx.Resp, OauthStateCookieName, hs.Cfg.CookieSameSite)
|
||||
|
||||
if cookieState == "" {
|
||||
ctx.Handle(500, "login.OAuthLogin(missing saved state)", nil)
|
||||
@@ -191,15 +191,18 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
return
|
||||
}
|
||||
|
||||
// Do not expose disabled status,
|
||||
// just show incorrect user credentials error (see #17947)
|
||||
if cmd.Result.IsDisabled {
|
||||
hs.redirectWithError(ctx, login.ErrUserDisabled)
|
||||
oauthLogger.Warn("User is disabled", "user", cmd.Result.Login)
|
||||
hs.redirectWithError(ctx, login.ErrInvalidCredentials)
|
||||
return
|
||||
}
|
||||
|
||||
// login
|
||||
hs.loginUserWithUser(cmd.Result, ctx)
|
||||
|
||||
metrics.M_Api_Login_OAuth.Inc()
|
||||
metrics.MApiLoginOAuth.Inc()
|
||||
|
||||
if redirectTo, _ := url.QueryUnescape(ctx.GetCookie("redirect_to")); len(redirectTo) > 0 {
|
||||
ctx.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
||||
@@ -210,20 +213,23 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
ctx.Redirect(setting.AppSubUrl + "/")
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string) {
|
||||
hs.writeCookie(w, name, "", -1)
|
||||
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string, sameSite http.SameSite) {
|
||||
hs.writeCookie(w, name, "", -1, sameSite)
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int, sameSite http.SameSite) {
|
||||
cookie := http.Cookie{
|
||||
Name: name,
|
||||
MaxAge: maxAge,
|
||||
Value: value,
|
||||
HttpOnly: true,
|
||||
Path: setting.AppSubUrl + "/",
|
||||
Secure: hs.Cfg.CookieSecure,
|
||||
SameSite: hs.Cfg.CookieSameSite,
|
||||
})
|
||||
}
|
||||
if sameSite != http.SameSiteDefaultMode {
|
||||
cookie.SameSite = sameSite
|
||||
}
|
||||
http.SetCookie(w, &cookie)
|
||||
}
|
||||
|
||||
func hashStatecode(code, seed string) string {
|
||||
|
||||
@@ -88,7 +88,7 @@ func CreateOrg(c *m.ReqContext, cmd m.CreateOrgCommand) Response {
|
||||
return Error(500, "Failed to create organization", err)
|
||||
}
|
||||
|
||||
metrics.M_Api_Org_Create.Inc()
|
||||
metrics.MApiOrgCreate.Inc()
|
||||
|
||||
return JSON(200, &util.DynMap{
|
||||
"orgId": cmd.Result.Id,
|
||||
|
||||
@@ -188,8 +188,8 @@ func (hs *HTTPServer) CompleteInvite(c *m.ReqContext, completeInvite dtos.Comple
|
||||
|
||||
hs.loginUserWithUser(user, c)
|
||||
|
||||
metrics.M_Api_User_SignUpCompleted.Inc()
|
||||
metrics.M_Api_User_SignUpInvite.Inc()
|
||||
metrics.MApiUserSignUpCompleted.Inc()
|
||||
metrics.MApiUserSignUpInvite.Inc()
|
||||
|
||||
return Success("User created and logged in")
|
||||
}
|
||||
|
||||
@@ -61,6 +61,6 @@ func Search(c *m.ReqContext) Response {
|
||||
return Error(500, "Search failed", err)
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Search)
|
||||
c.TimeRequest(metrics.MApiDashboardSearch)
|
||||
return JSON(200, searchQuery.Result)
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ func SignUp(c *m.ReqContext, form dtos.SignUpForm) Response {
|
||||
Code: cmd.Code,
|
||||
})
|
||||
|
||||
metrics.M_Api_User_SignUpStarted.Inc()
|
||||
metrics.MApiUserSignUpStarted.Inc()
|
||||
|
||||
return JSON(200, util.DynMap{"status": "SignUpCreated"})
|
||||
}
|
||||
@@ -110,7 +110,7 @@ func (hs *HTTPServer) SignUpStep2(c *m.ReqContext, form dtos.SignUpStep2Form) Re
|
||||
}
|
||||
|
||||
hs.loginUserWithUser(user, c)
|
||||
metrics.M_Api_User_SignUpCompleted.Inc()
|
||||
metrics.MApiUserSignUpCompleted.Inc()
|
||||
|
||||
return JSON(200, apiResponse)
|
||||
}
|
||||
|
||||
@@ -335,6 +335,8 @@ func GetAuthProviderLabel(authModule string) string {
|
||||
return "GitLab"
|
||||
case "oauth_grafana_com", "oauth_grafananet":
|
||||
return "grafana.com"
|
||||
case "auth.saml":
|
||||
return "SAML"
|
||||
case "ldap", "":
|
||||
return "LDAP"
|
||||
default:
|
||||
|
||||
@@ -6,37 +6,53 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func GetGrafanaPluginDir(currentOS string) string {
|
||||
if isDevEnvironment() {
|
||||
return "../data/plugins"
|
||||
if rootPath, ok := tryGetRootForDevEnvironment(); ok {
|
||||
return filepath.Join(rootPath, "data/plugins")
|
||||
}
|
||||
|
||||
return returnOsDefault(currentOS)
|
||||
}
|
||||
|
||||
func isDevEnvironment() bool {
|
||||
// if ../conf/defaults.ini exists, grafana is not installed as package
|
||||
// that its in development environment.
|
||||
// getGrafanaRoot tries to get root of directory when developing grafana ie repo root. It is not perfect it just
|
||||
// checks what is the binary path and tries to guess based on that but if it is not running in dev env you get a bogus
|
||||
// path back.
|
||||
func getGrafanaRoot() (string, error) {
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
logger.Error("Could not get executable path. Assuming non dev environment.")
|
||||
return false
|
||||
return "", xerrors.New("Failed to get executable path")
|
||||
}
|
||||
exPath := filepath.Dir(ex)
|
||||
_, last := path.Split(exPath)
|
||||
if last == "bin" {
|
||||
// In dev env the executable for current platform is created in 'bin/' dir
|
||||
defaultsPath := filepath.Join(exPath, "../conf/defaults.ini")
|
||||
_, err = os.Stat(defaultsPath)
|
||||
return err == nil
|
||||
return filepath.Join(exPath, ".."), nil
|
||||
}
|
||||
|
||||
// But at the same time there are per platform directories that contain the binaries and can also be used.
|
||||
defaultsPath := filepath.Join(exPath, "../../conf/defaults.ini")
|
||||
_, err = os.Stat(defaultsPath)
|
||||
return err == nil
|
||||
return filepath.Join(exPath, "../.."), nil
|
||||
}
|
||||
|
||||
// tryGetRootForDevEnvironment returns root path if we are in dev environment. It checks if conf/defaults.ini exists
|
||||
// which should only exist in dev. Second param is false if we are not in dev or if it wasn't possible to determine it.
|
||||
func tryGetRootForDevEnvironment() (string, bool) {
|
||||
rootPath, err := getGrafanaRoot()
|
||||
if err != nil {
|
||||
logger.Error("Could not get executable path. Assuming non dev environment.", err)
|
||||
return "", false
|
||||
}
|
||||
|
||||
devenvPath := filepath.Join(rootPath, "devenv")
|
||||
|
||||
_, err = os.Stat(devenvPath)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return rootPath, true
|
||||
}
|
||||
|
||||
func returnOsDefault(currentOs string) string {
|
||||
|
||||
@@ -3,103 +3,180 @@ package metrics
|
||||
import (
|
||||
"runtime"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
const exporterName = "grafana"
|
||||
|
||||
var (
|
||||
M_Instance_Start prometheus.Counter
|
||||
M_Page_Status *prometheus.CounterVec
|
||||
M_Api_Status *prometheus.CounterVec
|
||||
M_Proxy_Status *prometheus.CounterVec
|
||||
M_Http_Request_Total *prometheus.CounterVec
|
||||
M_Http_Request_Summary *prometheus.SummaryVec
|
||||
// MInstanceStart is a metric counter for started instances
|
||||
MInstanceStart prometheus.Counter
|
||||
|
||||
M_Api_User_SignUpStarted prometheus.Counter
|
||||
M_Api_User_SignUpCompleted prometheus.Counter
|
||||
M_Api_User_SignUpInvite prometheus.Counter
|
||||
M_Api_Dashboard_Save prometheus.Summary
|
||||
M_Api_Dashboard_Get prometheus.Summary
|
||||
M_Api_Dashboard_Search prometheus.Summary
|
||||
M_Api_Admin_User_Create prometheus.Counter
|
||||
M_Api_Login_Post prometheus.Counter
|
||||
M_Api_Login_OAuth prometheus.Counter
|
||||
M_Api_Org_Create prometheus.Counter
|
||||
// MPageStatus is a metric page http response status
|
||||
MPageStatus *prometheus.CounterVec
|
||||
|
||||
M_Api_Dashboard_Snapshot_Create prometheus.Counter
|
||||
M_Api_Dashboard_Snapshot_External prometheus.Counter
|
||||
M_Api_Dashboard_Snapshot_Get prometheus.Counter
|
||||
M_Api_Dashboard_Insert prometheus.Counter
|
||||
M_Alerting_Result_State *prometheus.CounterVec
|
||||
M_Alerting_Notification_Sent *prometheus.CounterVec
|
||||
M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter
|
||||
M_Aws_CloudWatch_ListMetrics prometheus.Counter
|
||||
M_Aws_CloudWatch_GetMetricData prometheus.Counter
|
||||
M_DB_DataSource_QueryById prometheus.Counter
|
||||
// MApiStatus is a metric api http response status
|
||||
MApiStatus *prometheus.CounterVec
|
||||
|
||||
// MProxyStatus is a metric proxy http response status
|
||||
MProxyStatus *prometheus.CounterVec
|
||||
|
||||
// MHttpRequestTotal is a metric http request counter
|
||||
MHttpRequestTotal *prometheus.CounterVec
|
||||
|
||||
// MHttpRequestSummary is a metric http request summary
|
||||
MHttpRequestSummary *prometheus.SummaryVec
|
||||
|
||||
// MApiUserSignUpStarted is a metric amount of users who started the signup flow
|
||||
MApiUserSignUpStarted prometheus.Counter
|
||||
|
||||
// MApiUserSignUpCompleted is a metric amount of users who completed the signup flow
|
||||
MApiUserSignUpCompleted prometheus.Counter
|
||||
|
||||
// MApiUserSignUpInvite is a metric amount of users who have been invited
|
||||
MApiUserSignUpInvite prometheus.Counter
|
||||
|
||||
// MApiDashboardSave is a metric summary for dashboard save duration
|
||||
MApiDashboardSave prometheus.Summary
|
||||
|
||||
// MApiDashboardGet is a metric summary for dashboard get duration
|
||||
MApiDashboardGet prometheus.Summary
|
||||
|
||||
// MApiDashboardSearch is a metric summary for dashboard search duration
|
||||
MApiDashboardSearch prometheus.Summary
|
||||
|
||||
// MApiAdminUserCreate is a metric api admin user created counter
|
||||
MApiAdminUserCreate prometheus.Counter
|
||||
|
||||
// MApiLoginPost is a metric api login post counter
|
||||
MApiLoginPost prometheus.Counter
|
||||
|
||||
// MApiLoginOAuth is a metric api login oauth counter
|
||||
MApiLoginOAuth prometheus.Counter
|
||||
|
||||
// MApiLoginSAML is a metric api login SAML counter
|
||||
MApiLoginSAML prometheus.Counter
|
||||
|
||||
// MApiOrgCreate is a metric api org created counter
|
||||
MApiOrgCreate prometheus.Counter
|
||||
|
||||
// MApiDashboardSnapshotCreate is a metric dashboard snapshots created
|
||||
MApiDashboardSnapshotCreate prometheus.Counter
|
||||
|
||||
// MApiDashboardSnapshotExternal is a metric external dashboard snapshots created
|
||||
MApiDashboardSnapshotExternal prometheus.Counter
|
||||
|
||||
// MApiDashboardSnapshotGet is a metric loaded dashboards
|
||||
MApiDashboardSnapshotGet prometheus.Counter
|
||||
|
||||
// MApiDashboardInsert is a metric dashboards inserted
|
||||
MApiDashboardInsert prometheus.Counter
|
||||
|
||||
// MAlertingResultState is a metric alert execution result counter
|
||||
MAlertingResultState *prometheus.CounterVec
|
||||
|
||||
// MAlertingNotificationSent is a metric counter for how many alert notifications been sent
|
||||
MAlertingNotificationSent *prometheus.CounterVec
|
||||
|
||||
// MAwsCloudWatchGetMetricStatistics is a metric counter for getting metric statistics from aws
|
||||
MAwsCloudWatchGetMetricStatistics prometheus.Counter
|
||||
|
||||
// MAwsCloudWatchListMetrics is a metric counter for getting list of metrics from aws
|
||||
MAwsCloudWatchListMetrics prometheus.Counter
|
||||
|
||||
// MAwsCloudWatchGetMetricData is a metric counter for getting metric data time series from aws
|
||||
MAwsCloudWatchGetMetricData prometheus.Counter
|
||||
|
||||
// MDBDataSourceQueryByID is a metric counter for getting datasource by id
|
||||
MDBDataSourceQueryByID prometheus.Counter
|
||||
|
||||
// LDAPUsersSyncExecutionTime is a metric summary for LDAP users sync execution duration
|
||||
LDAPUsersSyncExecutionTime prometheus.Summary
|
||||
)
|
||||
|
||||
// Timers
|
||||
M_DataSource_ProxyReq_Timer prometheus.Summary
|
||||
M_Alerting_Execution_Time prometheus.Summary
|
||||
var (
|
||||
// MDataSourceProxyReqTimer is a metric summary for dataproxy request duration
|
||||
MDataSourceProxyReqTimer prometheus.Summary
|
||||
|
||||
// MAlertingExecutionTime is a metric summary of alert exeuction duration
|
||||
MAlertingExecutionTime prometheus.Summary
|
||||
)
|
||||
|
||||
// StatTotals
|
||||
var (
|
||||
M_Alerting_Active_Alerts prometheus.Gauge
|
||||
M_StatTotal_Dashboards prometheus.Gauge
|
||||
M_StatTotal_Users prometheus.Gauge
|
||||
M_StatActive_Users prometheus.Gauge
|
||||
M_StatTotal_Orgs prometheus.Gauge
|
||||
M_StatTotal_Playlists prometheus.Gauge
|
||||
// MAlertingActiveAlerts is a metric amount of active alerts
|
||||
MAlertingActiveAlerts prometheus.Gauge
|
||||
|
||||
// MStatTotalDashboards is a metric total amount of dashboards
|
||||
MStatTotalDashboards prometheus.Gauge
|
||||
|
||||
// MStatTotalUsers is a metric total amount of users
|
||||
MStatTotalUsers prometheus.Gauge
|
||||
|
||||
// MStatActiveUsers is a metric number of active users
|
||||
MStatActiveUsers prometheus.Gauge
|
||||
|
||||
// MStatTotalOrgs is a metric total amount of orgs
|
||||
MStatTotalOrgs prometheus.Gauge
|
||||
|
||||
// MStatTotalPlaylists is a metric total amount of playlists
|
||||
MStatTotalPlaylists prometheus.Gauge
|
||||
|
||||
// StatsTotalViewers is a metric total amount of viewers
|
||||
StatsTotalViewers prometheus.Gauge
|
||||
|
||||
// StatsTotalEditors is a metric total amount of editors
|
||||
StatsTotalEditors prometheus.Gauge
|
||||
|
||||
// StatsTotalAdmins is a metric total amount of admins
|
||||
StatsTotalAdmins prometheus.Gauge
|
||||
|
||||
// StatsTotalActiveViewers is a metric total amount of viewers
|
||||
StatsTotalActiveViewers prometheus.Gauge
|
||||
|
||||
// StatsTotalActiveEditors is a metric total amount of active editors
|
||||
StatsTotalActiveEditors prometheus.Gauge
|
||||
|
||||
// StatsTotalActiveAdmins is a metric total amount of active admins
|
||||
StatsTotalActiveAdmins prometheus.Gauge
|
||||
|
||||
// M_Grafana_Version is a gauge that contains build info about this binary
|
||||
//
|
||||
// Deprecated: use M_Grafana_Build_Version instead.
|
||||
M_Grafana_Version *prometheus.GaugeVec
|
||||
|
||||
// grafanaBuildVersion is a gauge that contains build info about this binary
|
||||
// grafanaBuildVersion is a metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built
|
||||
grafanaBuildVersion *prometheus.GaugeVec
|
||||
)
|
||||
|
||||
func init() {
|
||||
M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{
|
||||
httpStatusCodes := []string{"200", "404", "500", "unknown"}
|
||||
MInstanceStart = prometheus.NewCounter(prometheus.CounterOpts{
|
||||
Name: "instance_start_total",
|
||||
Help: "counter for started instances",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
httpStatusCodes := []string{"200", "404", "500", "unknown"}
|
||||
M_Page_Status = newCounterVecStartingAtZero(
|
||||
MPageStatus = newCounterVecStartingAtZero(
|
||||
prometheus.CounterOpts{
|
||||
Name: "page_response_status_total",
|
||||
Help: "page http response status",
|
||||
Namespace: exporterName,
|
||||
}, []string{"code"}, httpStatusCodes...)
|
||||
|
||||
M_Api_Status = newCounterVecStartingAtZero(
|
||||
MApiStatus = newCounterVecStartingAtZero(
|
||||
prometheus.CounterOpts{
|
||||
Name: "api_response_status_total",
|
||||
Help: "api http response status",
|
||||
Namespace: exporterName,
|
||||
}, []string{"code"}, httpStatusCodes...)
|
||||
|
||||
M_Proxy_Status = newCounterVecStartingAtZero(
|
||||
MProxyStatus = newCounterVecStartingAtZero(
|
||||
prometheus.CounterOpts{
|
||||
Name: "proxy_response_status_total",
|
||||
Help: "proxy http response status",
|
||||
Namespace: exporterName,
|
||||
}, []string{"code"}, httpStatusCodes...)
|
||||
|
||||
M_Http_Request_Total = prometheus.NewCounterVec(
|
||||
MHttpRequestTotal = prometheus.NewCounterVec(
|
||||
prometheus.CounterOpts{
|
||||
Name: "http_request_total",
|
||||
Help: "http request counter",
|
||||
@@ -107,7 +184,7 @@ func init() {
|
||||
[]string{"handler", "statuscode", "method"},
|
||||
)
|
||||
|
||||
M_Http_Request_Summary = prometheus.NewSummaryVec(
|
||||
MHttpRequestSummary = prometheus.NewSummaryVec(
|
||||
prometheus.SummaryOpts{
|
||||
Name: "http_request_duration_milliseconds",
|
||||
Help: "http request summary",
|
||||
@@ -115,169 +192,181 @@ func init() {
|
||||
[]string{"handler", "statuscode", "method"},
|
||||
)
|
||||
|
||||
M_Api_User_SignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiUserSignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_user_signup_started_total",
|
||||
Help: "amount of users who started the signup flow",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_User_SignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiUserSignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_user_signup_completed_total",
|
||||
Help: "amount of users who completed the signup flow",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_User_SignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiUserSignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_user_signup_invite_total",
|
||||
Help: "amount of users who have been invited",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MApiDashboardSave = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dashboard_save_milliseconds",
|
||||
Help: "summary for dashboard save duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MApiDashboardGet = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dashboard_get_milliseconds",
|
||||
Help: "summary for dashboard get duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MApiDashboardSearch = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dashboard_search_milliseconds",
|
||||
Help: "summary for dashboard search duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Admin_User_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiAdminUserCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_admin_user_created_total",
|
||||
Help: "api admin user created counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Login_Post = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiLoginPost = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_login_post_total",
|
||||
Help: "api login post counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Login_OAuth = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiLoginOAuth = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_login_oauth_total",
|
||||
Help: "api login oauth counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Org_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiLoginSAML = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_login_saml_total",
|
||||
Help: "api login saml counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
MApiOrgCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_org_create_total",
|
||||
Help: "api org created counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Snapshot_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardSnapshotCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_dashboard_snapshot_create_total",
|
||||
Help: "dashboard snapshots created",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Snapshot_External = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardSnapshotExternal = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_dashboard_snapshot_external_total",
|
||||
Help: "external dashboard snapshots created",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Snapshot_Get = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardSnapshotGet = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_dashboard_snapshot_get_total",
|
||||
Help: "loaded dashboards",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Insert = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardInsert = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_models_dashboard_insert_total",
|
||||
Help: "dashboards inserted ",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
MAlertingResultState = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
Name: "alerting_result_total",
|
||||
Help: "alert execution result counter",
|
||||
Namespace: exporterName,
|
||||
}, []string{"state"})
|
||||
|
||||
M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
MAlertingNotificationSent = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
Name: "alerting_notification_sent_total",
|
||||
Help: "counter for how many alert notifications been sent",
|
||||
Namespace: exporterName,
|
||||
}, []string{"type"})
|
||||
|
||||
M_Aws_CloudWatch_GetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MAwsCloudWatchGetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "aws_cloudwatch_get_metric_statistics_total",
|
||||
Help: "counter for getting metric statistics from aws",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Aws_CloudWatch_ListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MAwsCloudWatchListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "aws_cloudwatch_list_metrics_total",
|
||||
Help: "counter for getting list of metrics from aws",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Aws_CloudWatch_GetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MAwsCloudWatchGetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "aws_cloudwatch_get_metric_data_total",
|
||||
Help: "counter for getting metric data time series from aws",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_DB_DataSource_QueryById = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MDBDataSourceQueryByID = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "db_datasource_query_by_id_total",
|
||||
Help: "counter for getting datasource by id",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
LDAPUsersSyncExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "ldap_users_sync_execution_time",
|
||||
Help: "summary for LDAP users sync execution duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
MDataSourceProxyReqTimer = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dataproxy_request_all_milliseconds",
|
||||
Help: "summary for dataproxy request duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MAlertingExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "alerting_execution_time_milliseconds",
|
||||
Help: "summary of alert exeuction duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MAlertingActiveAlerts = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "alerting_active_alerts",
|
||||
Help: "amount of active alerts",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalDashboards = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_totals_dashboard",
|
||||
Help: "total amount of dashboards",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalUsers = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_total_users",
|
||||
Help: "total amount of users",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatActive_Users = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatActiveUsers = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_active_users",
|
||||
Help: "number of active users",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalOrgs = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_total_orgs",
|
||||
Help: "total amount of orgs",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalPlaylists = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_total_playlists",
|
||||
Help: "total amount of playlists",
|
||||
Namespace: exporterName,
|
||||
@@ -319,78 +408,69 @@ func init() {
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Grafana_Version = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "info",
|
||||
Help: "Information about the Grafana. This metric is deprecated. please use `grafana_build_info`",
|
||||
Namespace: exporterName,
|
||||
}, []string{"version"})
|
||||
|
||||
grafanaBuildVersion = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "build_info",
|
||||
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built.",
|
||||
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built",
|
||||
Namespace: exporterName,
|
||||
}, []string{"version", "revision", "branch", "goversion", "edition"})
|
||||
}
|
||||
|
||||
// SetBuildInformation sets the build information for this binary
|
||||
func SetBuildInformation(version, revision, branch string) {
|
||||
// We export this info twice for backwards compatibility.
|
||||
// Once this have been released for some time we should be able to remote `M_Grafana_Version`
|
||||
// The reason we added a new one is that its common practice in the prometheus community
|
||||
// to name this metric `*_build_info` so its easy to do aggregation on all programs.
|
||||
edition := "oss"
|
||||
if setting.IsEnterprise {
|
||||
edition = "enterprise"
|
||||
}
|
||||
|
||||
M_Grafana_Version.WithLabelValues(version).Set(1)
|
||||
grafanaBuildVersion.WithLabelValues(version, revision, branch, runtime.Version(), edition).Set(1)
|
||||
}
|
||||
|
||||
func initMetricVars() {
|
||||
prometheus.MustRegister(
|
||||
M_Instance_Start,
|
||||
M_Page_Status,
|
||||
M_Api_Status,
|
||||
M_Proxy_Status,
|
||||
M_Http_Request_Total,
|
||||
M_Http_Request_Summary,
|
||||
M_Api_User_SignUpStarted,
|
||||
M_Api_User_SignUpCompleted,
|
||||
M_Api_User_SignUpInvite,
|
||||
M_Api_Dashboard_Save,
|
||||
M_Api_Dashboard_Get,
|
||||
M_Api_Dashboard_Search,
|
||||
M_DataSource_ProxyReq_Timer,
|
||||
M_Alerting_Execution_Time,
|
||||
M_Api_Admin_User_Create,
|
||||
M_Api_Login_Post,
|
||||
M_Api_Login_OAuth,
|
||||
M_Api_Org_Create,
|
||||
M_Api_Dashboard_Snapshot_Create,
|
||||
M_Api_Dashboard_Snapshot_External,
|
||||
M_Api_Dashboard_Snapshot_Get,
|
||||
M_Api_Dashboard_Insert,
|
||||
M_Alerting_Result_State,
|
||||
M_Alerting_Notification_Sent,
|
||||
M_Aws_CloudWatch_GetMetricStatistics,
|
||||
M_Aws_CloudWatch_ListMetrics,
|
||||
M_Aws_CloudWatch_GetMetricData,
|
||||
M_DB_DataSource_QueryById,
|
||||
M_Alerting_Active_Alerts,
|
||||
M_StatTotal_Dashboards,
|
||||
M_StatTotal_Users,
|
||||
M_StatActive_Users,
|
||||
M_StatTotal_Orgs,
|
||||
M_StatTotal_Playlists,
|
||||
M_Grafana_Version,
|
||||
MInstanceStart,
|
||||
MPageStatus,
|
||||
MApiStatus,
|
||||
MProxyStatus,
|
||||
MHttpRequestTotal,
|
||||
MHttpRequestSummary,
|
||||
MApiUserSignUpStarted,
|
||||
MApiUserSignUpCompleted,
|
||||
MApiUserSignUpInvite,
|
||||
MApiDashboardSave,
|
||||
MApiDashboardGet,
|
||||
MApiDashboardSearch,
|
||||
MDataSourceProxyReqTimer,
|
||||
MAlertingExecutionTime,
|
||||
MApiAdminUserCreate,
|
||||
MApiLoginPost,
|
||||
MApiLoginOAuth,
|
||||
MApiLoginSAML,
|
||||
MApiOrgCreate,
|
||||
MApiDashboardSnapshotCreate,
|
||||
MApiDashboardSnapshotExternal,
|
||||
MApiDashboardSnapshotGet,
|
||||
MApiDashboardInsert,
|
||||
MAlertingResultState,
|
||||
MAlertingNotificationSent,
|
||||
MAwsCloudWatchGetMetricStatistics,
|
||||
MAwsCloudWatchListMetrics,
|
||||
MAwsCloudWatchGetMetricData,
|
||||
MDBDataSourceQueryByID,
|
||||
LDAPUsersSyncExecutionTime,
|
||||
MAlertingActiveAlerts,
|
||||
MStatTotalDashboards,
|
||||
MStatTotalUsers,
|
||||
MStatActiveUsers,
|
||||
MStatTotalOrgs,
|
||||
MStatTotalPlaylists,
|
||||
StatsTotalViewers,
|
||||
StatsTotalEditors,
|
||||
StatsTotalAdmins,
|
||||
StatsTotalActiveViewers,
|
||||
StatsTotalActiveEditors,
|
||||
StatsTotalActiveAdmins,
|
||||
grafanaBuildVersion)
|
||||
grafanaBuildVersion,
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ func (im *InternalMetricsService) Run(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
M_Instance_Start.Inc()
|
||||
MInstanceStart.Inc()
|
||||
|
||||
<-ctx.Done()
|
||||
return ctx.Err()
|
||||
|
||||
@@ -22,8 +22,12 @@ func parseRedisConnStr(connStr string) (*redis.Options, error) {
|
||||
keyValueCSV := strings.Split(connStr, ",")
|
||||
options := &redis.Options{Network: "tcp"}
|
||||
for _, rawKeyValue := range keyValueCSV {
|
||||
keyValueTuple := strings.Split(rawKeyValue, "=")
|
||||
keyValueTuple := strings.SplitN(rawKeyValue, "=", 2)
|
||||
if len(keyValueTuple) != 2 {
|
||||
if strings.HasPrefix(rawKeyValue, "password") {
|
||||
// don't log the password
|
||||
rawKeyValue = "password******"
|
||||
}
|
||||
return nil, fmt.Errorf("incorrect redis connection string format detected for '%v', format is key=value,key=value", rawKeyValue)
|
||||
}
|
||||
connKey := keyValueTuple[0]
|
||||
|
||||
@@ -155,17 +155,21 @@ func (uss *UsageStatsService) sendUsageStats(oauthProviders map[string]bool) {
|
||||
}
|
||||
|
||||
func (uss *UsageStatsService) updateTotalStats() {
|
||||
if !uss.Cfg.MetricsEndpointEnabled || uss.Cfg.MetricsEndpointDisableTotalStats {
|
||||
return
|
||||
}
|
||||
|
||||
statsQuery := models.GetSystemStatsQuery{}
|
||||
if err := uss.Bus.Dispatch(&statsQuery); err != nil {
|
||||
metricsLogger.Error("Failed to get system stats", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
metrics.M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards))
|
||||
metrics.M_StatTotal_Users.Set(float64(statsQuery.Result.Users))
|
||||
metrics.M_StatActive_Users.Set(float64(statsQuery.Result.ActiveUsers))
|
||||
metrics.M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists))
|
||||
metrics.M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs))
|
||||
metrics.MStatTotalDashboards.Set(float64(statsQuery.Result.Dashboards))
|
||||
metrics.MStatTotalUsers.Set(float64(statsQuery.Result.Users))
|
||||
metrics.MStatActiveUsers.Set(float64(statsQuery.Result.ActiveUsers))
|
||||
metrics.MStatTotalPlaylists.Set(float64(statsQuery.Result.Playlists))
|
||||
metrics.MStatTotalOrgs.Set(float64(statsQuery.Result.Orgs))
|
||||
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
|
||||
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
|
||||
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))
|
||||
|
||||
@@ -264,6 +264,49 @@ func TestMetrics(t *testing.T) {
|
||||
ts.Close()
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Test update total stats", t, func() {
|
||||
uss := &UsageStatsService{
|
||||
Bus: bus.New(),
|
||||
Cfg: setting.NewCfg(),
|
||||
}
|
||||
uss.Cfg.MetricsEndpointEnabled = true
|
||||
uss.Cfg.MetricsEndpointDisableTotalStats = false
|
||||
getSystemStatsWasCalled := false
|
||||
uss.Bus.AddHandler(func(query *models.GetSystemStatsQuery) error {
|
||||
query.Result = &models.SystemStats{}
|
||||
getSystemStatsWasCalled = true
|
||||
return nil
|
||||
})
|
||||
|
||||
Convey("should not update stats when metrics is disabled and total stats is disabled", func() {
|
||||
uss.Cfg.MetricsEndpointEnabled = false
|
||||
uss.Cfg.MetricsEndpointDisableTotalStats = true
|
||||
uss.updateTotalStats()
|
||||
So(getSystemStatsWasCalled, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("should not update stats when metrics is disabled and total stats enabled", func() {
|
||||
uss.Cfg.MetricsEndpointEnabled = false
|
||||
uss.Cfg.MetricsEndpointDisableTotalStats = false
|
||||
uss.updateTotalStats()
|
||||
So(getSystemStatsWasCalled, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("should not update stats when metrics is enabled and total stats disabled", func() {
|
||||
uss.Cfg.MetricsEndpointEnabled = true
|
||||
uss.Cfg.MetricsEndpointDisableTotalStats = true
|
||||
uss.updateTotalStats()
|
||||
So(getSystemStatsWasCalled, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("should update stats when metrics is enabled and total stats enabled", func() {
|
||||
uss.Cfg.MetricsEndpointEnabled = true
|
||||
uss.Cfg.MetricsEndpointDisableTotalStats = false
|
||||
uss.updateTotalStats()
|
||||
So(getSystemStatsWasCalled, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
|
||||
|
||||
@@ -103,3 +103,16 @@ func AdminOrFeatureEnabled(enabled bool) macaron.Handler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func SnapshotPublicModeOrSignedIn() macaron.Handler {
|
||||
return func(c *m.ReqContext) {
|
||||
if setting.SnapshotPublicMode {
|
||||
return
|
||||
}
|
||||
|
||||
_, err := c.Invoke(ReqSignedIn)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Failed to invoke required signed in middleware", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package authproxy
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/mail"
|
||||
@@ -32,6 +33,9 @@ var isLDAPEnabled = ldap.IsEnabled
|
||||
// newLDAP creates multiple LDAP instance
|
||||
var newLDAP = multildap.New
|
||||
|
||||
// supportedHeaders states the supported headers configuration fields
|
||||
var supportedHeaderFields = []string{"Name", "Email", "Login", "Groups"}
|
||||
|
||||
// AuthProxy struct
|
||||
type AuthProxy struct {
|
||||
store *remotecache.RemoteCache
|
||||
@@ -142,9 +146,18 @@ func (auth *AuthProxy) IsAllowedIP() (bool, *Error) {
|
||||
return false, newError("Proxy authentication required", err)
|
||||
}
|
||||
|
||||
// getKey forms a key for the cache
|
||||
// getKey forms a key for the cache based on the headers received as part of the authentication flow.
|
||||
// Our configuration supports multiple headers. The main header contains the email or username.
|
||||
// And the additional ones that allow us to specify extra attributes: Name, Email or Groups.
|
||||
func (auth *AuthProxy) getKey() string {
|
||||
return fmt.Sprintf(CachePrefix, auth.header)
|
||||
key := strings.TrimSpace(auth.header) // start the key with the main header
|
||||
|
||||
auth.headersIterator(func(_, header string) {
|
||||
key = strings.Join([]string{key, header}, "-") // compose the key with any additional headers
|
||||
})
|
||||
|
||||
hashedKey := base32.StdEncoding.EncodeToString([]byte(key))
|
||||
return fmt.Sprintf(CachePrefix, hashedKey)
|
||||
}
|
||||
|
||||
// Login logs in user id with whatever means possible
|
||||
@@ -232,40 +245,36 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
||||
AuthId: auth.header,
|
||||
}
|
||||
|
||||
if auth.headerType == "username" {
|
||||
switch auth.headerType {
|
||||
case "username":
|
||||
extUser.Login = auth.header
|
||||
|
||||
// only set Email if it can be parsed as an email address
|
||||
emailAddr, emailErr := mail.ParseAddress(auth.header)
|
||||
emailAddr, emailErr := mail.ParseAddress(auth.header) // only set Email if it can be parsed as an email address
|
||||
if emailErr == nil {
|
||||
extUser.Email = emailAddr.Address
|
||||
}
|
||||
} else if auth.headerType == "email" {
|
||||
case "email":
|
||||
extUser.Email = auth.header
|
||||
extUser.Login = auth.header
|
||||
} else {
|
||||
default:
|
||||
return 0, newError("Auth proxy header property invalid", nil)
|
||||
|
||||
}
|
||||
|
||||
for _, field := range []string{"Name", "Email", "Login", "Groups"} {
|
||||
if auth.headers[field] == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if val := auth.ctx.Req.Header.Get(auth.headers[field]); val != "" {
|
||||
auth.headersIterator(func(field string, header string) {
|
||||
if field == "Groups" {
|
||||
extUser.Groups = util.SplitString(val)
|
||||
extUser.Groups = util.SplitString(header)
|
||||
} else {
|
||||
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(val)
|
||||
}
|
||||
}
|
||||
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(header)
|
||||
}
|
||||
})
|
||||
|
||||
upsert := &models.UpsertUserCommand{
|
||||
ReqContext: auth.ctx,
|
||||
SignupAllowed: setting.AuthProxyAutoSignUp,
|
||||
ExternalUser: extUser,
|
||||
}
|
||||
|
||||
err := bus.Dispatch(upsert)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
@@ -274,6 +283,21 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
||||
return upsert.Result.Id, nil
|
||||
}
|
||||
|
||||
// headersIterator iterates over all non-empty supported additional headers
|
||||
func (auth *AuthProxy) headersIterator(fn func(field string, header string)) {
|
||||
for _, field := range supportedHeaderFields {
|
||||
h := auth.headers[field]
|
||||
|
||||
if h == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if value := auth.ctx.Req.Header.Get(h); value != "" {
|
||||
fn(field, strings.TrimSpace(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetSignedUser get full signed user info
|
||||
func (auth *AuthProxy) GetSignedUser(userID int64) (*models.SignedInUser, *Error) {
|
||||
query := &models.GetSignedInUserQuery{
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
package authproxy
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"gopkg.in/macaron.v1"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/infra/remotecache"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/ldap"
|
||||
"github.com/grafana/grafana/pkg/services/multildap"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"gopkg.in/macaron.v1"
|
||||
)
|
||||
|
||||
type TestMultiLDAP struct {
|
||||
@@ -45,13 +45,8 @@ func (stub *TestMultiLDAP) User(login string) (
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func TestMiddlewareContext(t *testing.T) {
|
||||
Convey("auth_proxy helper", t, func() {
|
||||
req, _ := http.NewRequest("POST", "http://example.com", nil)
|
||||
setting.AuthProxyHeaderName = "X-Killa"
|
||||
name := "markelog"
|
||||
|
||||
req.Header.Add(setting.AuthProxyHeaderName, name)
|
||||
func prepareMiddleware(t *testing.T, req *http.Request, store *remotecache.RemoteCache) *AuthProxy {
|
||||
t.Helper()
|
||||
|
||||
ctx := &models.ReqContext{
|
||||
Context: &macaron.Context{
|
||||
@@ -61,23 +56,61 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
Convey("logs in user from the cache", func() {
|
||||
store := remotecache.NewFakeStore(t)
|
||||
key := fmt.Sprintf(CachePrefix, name)
|
||||
store.Set(key, int64(33), 0)
|
||||
|
||||
auth := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
})
|
||||
|
||||
return auth
|
||||
}
|
||||
|
||||
func TestMiddlewareContext(t *testing.T) {
|
||||
Convey("auth_proxy helper", t, func() {
|
||||
req, _ := http.NewRequest("POST", "http://example.com", nil)
|
||||
setting.AuthProxyHeaderName = "X-Killa"
|
||||
store := remotecache.NewFakeStore(t)
|
||||
|
||||
name := "markelog"
|
||||
req.Header.Add(setting.AuthProxyHeaderName, name)
|
||||
|
||||
Convey("when the cache only contains the main header", func() {
|
||||
|
||||
Convey("with a simple cache key", func() {
|
||||
// Set cache key
|
||||
key := fmt.Sprintf(CachePrefix, base32.StdEncoding.EncodeToString([]byte(name)))
|
||||
store.Set(key, int64(33), 0)
|
||||
|
||||
// Set up the middleware
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
id, err := auth.Login()
|
||||
|
||||
So(auth.getKey(), ShouldEqual, "auth-proxy-sync-ttl:NVQXE23FNRXWO===")
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 33)
|
||||
})
|
||||
|
||||
Convey("when the cache key contains additional headers", func() {
|
||||
setting.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||
group := "grafana-core-team"
|
||||
req.Header.Add("X-WEBAUTH-GROUPS", group)
|
||||
|
||||
key := fmt.Sprintf(CachePrefix, base32.StdEncoding.EncodeToString([]byte(name+"-"+group)))
|
||||
store.Set(key, int64(33), 0)
|
||||
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
|
||||
id, err := auth.Login()
|
||||
|
||||
So(auth.getKey(), ShouldEqual, "auth-proxy-sync-ttl:NVQXE23FNRXWOLLHOJQWMYLOMEWWG33SMUWXIZLBNU======")
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 33)
|
||||
})
|
||||
|
||||
Convey("when the does not exist", func() {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("LDAP", func() {
|
||||
Convey("logs in via LDAP", func() {
|
||||
bus.AddHandler("test", func(cmd *models.UpsertUserCommand) error {
|
||||
@@ -119,13 +152,9 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
|
||||
store := remotecache.NewFakeStore(t)
|
||||
|
||||
server := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
})
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
|
||||
id, err := server.Login()
|
||||
id, err := auth.Login()
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 42)
|
||||
@@ -149,11 +178,7 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
|
||||
store := remotecache.NewFakeStore(t)
|
||||
|
||||
auth := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
})
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
|
||||
stub := &TestMultiLDAP{
|
||||
ID: 42,
|
||||
@@ -170,7 +195,6 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
So(id, ShouldNotEqual, 42)
|
||||
So(stub.loginCalled, ShouldEqual, false)
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@ package middleware
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
@@ -31,5 +33,19 @@ func TestMiddlewareAuth(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("snapshot public mode or signed in", func() {
|
||||
middlewareScenario(t, "Snapshot public mode disabled and unauthenticated request should return 401", func(sc *scenarioContext) {
|
||||
sc.m.Get("/api/snapshot", SnapshotPublicModeOrSignedIn(), sc.defaultHandler)
|
||||
sc.fakeReq("GET", "/api/snapshot").exec()
|
||||
So(sc.resp.Code, ShouldEqual, 401)
|
||||
})
|
||||
|
||||
middlewareScenario(t, "Snapshot public mode enabled and unauthenticated request should return 200", func(sc *scenarioContext) {
|
||||
setting.SnapshotPublicMode = true
|
||||
sc.m.Get("/api/snapshot", SnapshotPublicModeOrSignedIn(), sc.defaultHandler)
|
||||
sc.fakeReq("GET", "/api/snapshot").exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -21,8 +21,16 @@ import (
|
||||
|
||||
var getTime = time.Now
|
||||
|
||||
const (
|
||||
errStringInvalidUsernamePassword = "Invalid username or password"
|
||||
errStringInvalidAPIKey = "Invalid API key"
|
||||
)
|
||||
|
||||
var (
|
||||
ReqGrafanaAdmin = Auth(&AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
|
||||
ReqGrafanaAdmin = Auth(&AuthOptions{
|
||||
ReqSignedIn: true,
|
||||
ReqGrafanaAdmin: true,
|
||||
})
|
||||
ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true})
|
||||
ReqEditorRole = RoleAuth(models.ROLE_EDITOR, models.ROLE_ADMIN)
|
||||
ReqOrgAdmin = RoleAuth(models.ROLE_ADMIN)
|
||||
@@ -106,14 +114,14 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
|
||||
// base64 decode key
|
||||
decoded, err := apikeygen.Decode(keyString)
|
||||
if err != nil {
|
||||
ctx.JsonApiErr(401, "Invalid API key", err)
|
||||
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
|
||||
return true
|
||||
}
|
||||
|
||||
// fetch key
|
||||
keyQuery := models.GetApiKeyByNameQuery{KeyName: decoded.Name, OrgId: decoded.OrgId}
|
||||
if err := bus.Dispatch(&keyQuery); err != nil {
|
||||
ctx.JsonApiErr(401, "Invalid API key", err)
|
||||
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -121,7 +129,7 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
|
||||
|
||||
// validate api key
|
||||
if !apikeygen.IsValid(decoded, apikey.Key) {
|
||||
ctx.JsonApiErr(401, "Invalid API key", err)
|
||||
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -140,7 +148,6 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
|
||||
}
|
||||
|
||||
func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
|
||||
|
||||
if !setting.BasicAuthEnabled {
|
||||
return false
|
||||
}
|
||||
@@ -158,21 +165,39 @@ func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
|
||||
|
||||
loginQuery := models.GetUserByLoginQuery{LoginOrEmail: username}
|
||||
if err := bus.Dispatch(&loginQuery); err != nil {
|
||||
ctx.JsonApiErr(401, "Basic auth failed", err)
|
||||
ctx.Logger.Debug(
|
||||
"Failed to look up the username",
|
||||
"username", username,
|
||||
)
|
||||
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
user := loginQuery.Result
|
||||
|
||||
loginUserQuery := models.LoginUserQuery{Username: username, Password: password, User: user}
|
||||
loginUserQuery := models.LoginUserQuery{
|
||||
Username: username,
|
||||
Password: password,
|
||||
User: user,
|
||||
}
|
||||
if err := bus.Dispatch(&loginUserQuery); err != nil {
|
||||
ctx.JsonApiErr(401, "Invalid username or password", err)
|
||||
ctx.Logger.Debug(
|
||||
"Failed to authorize the user",
|
||||
"username", username,
|
||||
)
|
||||
|
||||
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
|
||||
return true
|
||||
}
|
||||
|
||||
query := models.GetSignedInUserQuery{UserId: user.Id, OrgId: orgId}
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
ctx.JsonApiErr(401, "Authentication error", err)
|
||||
ctx.Logger.Error(
|
||||
"Failed at user signed in",
|
||||
"id", user.Id,
|
||||
"org", orgId,
|
||||
)
|
||||
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -193,14 +218,14 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
|
||||
|
||||
token, err := authTokenService.LookupToken(ctx.Req.Context(), rawToken)
|
||||
if err != nil {
|
||||
ctx.Logger.Error("failed to look up user based on cookie", "error", err)
|
||||
ctx.Logger.Error("Failed to look up user based on cookie", "error", err)
|
||||
WriteSessionCookie(ctx, "", -1)
|
||||
return false
|
||||
}
|
||||
|
||||
query := models.GetSignedInUserQuery{UserId: token.UserId, OrgId: orgID}
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
ctx.Logger.Error("failed to get user with id", "userId", token.UserId, "error", err)
|
||||
ctx.Logger.Error("Failed to get user with id", "userId", token.UserId, "error", err)
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -210,7 +235,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
|
||||
|
||||
rotated, err := authTokenService.TryRotateToken(ctx.Req.Context(), token, ctx.RemoteAddr(), ctx.Req.UserAgent())
|
||||
if err != nil {
|
||||
ctx.Logger.Error("failed to rotate token", "error", err)
|
||||
ctx.Logger.Error("Failed to rotate token", "error", err)
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -223,7 +248,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
|
||||
|
||||
func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays int) {
|
||||
if setting.Env == setting.DEV {
|
||||
ctx.Logger.Info("new token", "unhashed token", value)
|
||||
ctx.Logger.Info("New token", "unhashed token", value)
|
||||
}
|
||||
|
||||
var maxAge int
|
||||
@@ -242,7 +267,9 @@ func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays in
|
||||
Path: setting.AppSubUrl + "/",
|
||||
Secure: setting.CookieSecure,
|
||||
MaxAge: maxAge,
|
||||
SameSite: setting.CookieSameSite,
|
||||
}
|
||||
if setting.CookieSameSite != http.SameSiteDefaultMode {
|
||||
cookie.SameSite = setting.CookieSameSite
|
||||
}
|
||||
|
||||
http.SetCookie(ctx.Resp, &cookie)
|
||||
|
||||
@@ -2,6 +2,7 @@ package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base32"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
@@ -11,6 +12,7 @@ import (
|
||||
"time"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gopkg.in/macaron.v1"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
@@ -21,7 +23,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/login"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const errorTemplate = "error-template"
|
||||
@@ -305,6 +306,13 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
maxAgeHours := (time.Duration(setting.LoginMaxLifetimeDays) * 24 * time.Hour)
|
||||
maxAge := (maxAgeHours + time.Hour).Seconds()
|
||||
|
||||
sameSitePolicies := []http.SameSite{
|
||||
http.SameSiteDefaultMode,
|
||||
http.SameSiteLaxMode,
|
||||
http.SameSiteStrictMode,
|
||||
}
|
||||
for _, sameSitePolicy := range sameSitePolicies {
|
||||
setting.CookieSameSite = sameSitePolicy
|
||||
expectedCookie := &http.Cookie{
|
||||
Name: setting.LoginCookieName,
|
||||
Value: "rotated",
|
||||
@@ -312,21 +320,24 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
HttpOnly: true,
|
||||
MaxAge: int(maxAge),
|
||||
Secure: setting.CookieSecure,
|
||||
SameSite: setting.CookieSameSite,
|
||||
}
|
||||
if sameSitePolicy != http.SameSiteDefaultMode {
|
||||
expectedCookie.SameSite = sameSitePolicy
|
||||
}
|
||||
|
||||
sc.fakeReq("GET", "/").exec()
|
||||
|
||||
Convey("should init context with user info", func() {
|
||||
Convey(fmt.Sprintf("Should init context with user info and setting.SameSite=%v", sameSitePolicy), func() {
|
||||
So(sc.context.IsSignedIn, ShouldBeTrue)
|
||||
So(sc.context.UserId, ShouldEqual, 12)
|
||||
So(sc.context.UserToken.UserId, ShouldEqual, 12)
|
||||
So(sc.context.UserToken.UnhashedToken, ShouldEqual, "rotated")
|
||||
})
|
||||
|
||||
Convey("should set cookie", func() {
|
||||
Convey(fmt.Sprintf("Should set cookie with setting.SameSite=%v", sameSitePolicy), func() {
|
||||
So(sc.resp.Header().Get("Set-Cookie"), ShouldEqual, expectedCookie.String())
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
middlewareScenario(t, "Invalid/expired auth token in cookie", func(sc *scenarioContext) {
|
||||
@@ -377,7 +388,9 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
setting.LDAPEnabled = true
|
||||
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
|
||||
setting.AuthProxyHeaderProperty = "username"
|
||||
setting.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||
name := "markelog"
|
||||
group := "grafana-core-team"
|
||||
|
||||
middlewareScenario(t, "should not sync the user if it's in the cache", func(sc *scenarioContext) {
|
||||
bus.AddHandler("test", func(query *models.GetSignedInUserQuery) error {
|
||||
@@ -385,11 +398,12 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
key := fmt.Sprintf(cachePrefix, name)
|
||||
key := fmt.Sprintf(cachePrefix, base32.StdEncoding.EncodeToString([]byte(name+"-"+group)))
|
||||
sc.remoteCacheService.Set(key, int64(33), 0)
|
||||
sc.fakeReq("GET", "/")
|
||||
|
||||
sc.req.Header.Add(setting.AuthProxyHeaderName, name)
|
||||
sc.req.Header.Add("X-WEBAUTH-GROUPS", group)
|
||||
sc.exec()
|
||||
|
||||
Convey("Should init user via cache", func() {
|
||||
|
||||
@@ -20,9 +20,9 @@ func RequestMetrics(handler string) macaron.Handler {
|
||||
|
||||
code := sanitizeCode(status)
|
||||
method := sanitizeMethod(req.Method)
|
||||
metrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()
|
||||
metrics.MHttpRequestTotal.WithLabelValues(handler, code, method).Inc()
|
||||
duration := time.Since(now).Nanoseconds() / int64(time.Millisecond)
|
||||
metrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))
|
||||
metrics.MHttpRequestSummary.WithLabelValues(handler, code, method).Observe(float64(duration))
|
||||
|
||||
if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") {
|
||||
countProxyRequests(status)
|
||||
@@ -37,39 +37,39 @@ func RequestMetrics(handler string) macaron.Handler {
|
||||
func countApiRequests(status int) {
|
||||
switch status {
|
||||
case 200:
|
||||
metrics.M_Api_Status.WithLabelValues("200").Inc()
|
||||
metrics.MApiStatus.WithLabelValues("200").Inc()
|
||||
case 404:
|
||||
metrics.M_Api_Status.WithLabelValues("404").Inc()
|
||||
metrics.MApiStatus.WithLabelValues("404").Inc()
|
||||
case 500:
|
||||
metrics.M_Api_Status.WithLabelValues("500").Inc()
|
||||
metrics.MApiStatus.WithLabelValues("500").Inc()
|
||||
default:
|
||||
metrics.M_Api_Status.WithLabelValues("unknown").Inc()
|
||||
metrics.MApiStatus.WithLabelValues("unknown").Inc()
|
||||
}
|
||||
}
|
||||
|
||||
func countPageRequests(status int) {
|
||||
switch status {
|
||||
case 200:
|
||||
metrics.M_Page_Status.WithLabelValues("200").Inc()
|
||||
metrics.MPageStatus.WithLabelValues("200").Inc()
|
||||
case 404:
|
||||
metrics.M_Page_Status.WithLabelValues("404").Inc()
|
||||
metrics.MPageStatus.WithLabelValues("404").Inc()
|
||||
case 500:
|
||||
metrics.M_Page_Status.WithLabelValues("500").Inc()
|
||||
metrics.MPageStatus.WithLabelValues("500").Inc()
|
||||
default:
|
||||
metrics.M_Page_Status.WithLabelValues("unknown").Inc()
|
||||
metrics.MPageStatus.WithLabelValues("unknown").Inc()
|
||||
}
|
||||
}
|
||||
|
||||
func countProxyRequests(status int) {
|
||||
switch status {
|
||||
case 200:
|
||||
metrics.M_Proxy_Status.WithLabelValues("200").Inc()
|
||||
metrics.MProxyStatus.WithLabelValues("200").Inc()
|
||||
case 404:
|
||||
metrics.M_Proxy_Status.WithLabelValues("400").Inc()
|
||||
metrics.MProxyStatus.WithLabelValues("400").Inc()
|
||||
case 500:
|
||||
metrics.M_Proxy_Status.WithLabelValues("500").Inc()
|
||||
metrics.MProxyStatus.WithLabelValues("500").Inc()
|
||||
default:
|
||||
metrics.M_Proxy_Status.WithLabelValues("unknown").Inc()
|
||||
metrics.MProxyStatus.WithLabelValues("unknown").Inc()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -62,6 +62,7 @@ type CreateUserCommand struct {
|
||||
Password string
|
||||
EmailVerified bool
|
||||
IsAdmin bool
|
||||
IsDisabled bool
|
||||
SkipOrgSetup bool
|
||||
DefaultOrgRole string
|
||||
|
||||
@@ -146,6 +147,8 @@ type SearchUsersQuery struct {
|
||||
Limit int
|
||||
AuthModule string
|
||||
|
||||
IsDisabled *bool
|
||||
|
||||
Result SearchUserQueryResult
|
||||
}
|
||||
|
||||
|
||||
@@ -112,11 +112,19 @@ type PluginDependencyItem struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type PluginBuildInfo struct {
|
||||
Time int64 `json:"time,omitempty"`
|
||||
Repo string `json:"repo,omitempty"`
|
||||
Branch string `json:"branch,omitempty"`
|
||||
Hash string `json:"hash,omitempty"`
|
||||
}
|
||||
|
||||
type PluginInfo struct {
|
||||
Author PluginInfoLink `json:"author"`
|
||||
Description string `json:"description"`
|
||||
Links []PluginInfoLink `json:"links"`
|
||||
Logos PluginLogos `json:"logos"`
|
||||
Build PluginBuildInfo `json:"build"`
|
||||
Screenshots []PluginScreenshots `json:"screenshots"`
|
||||
Version string `json:"version"`
|
||||
Updated string `json:"updated"`
|
||||
|
||||
@@ -70,5 +70,5 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) {
|
||||
context.EndTime = time.Now()
|
||||
|
||||
elapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)
|
||||
metrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))
|
||||
metrics.MAlertingExecutionTime.Observe(float64(elapsedTime))
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, no
|
||||
notifier := notifierState.notifier
|
||||
|
||||
n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault())
|
||||
metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc()
|
||||
metrics.MAlertingNotificationSent.WithLabelValues(notifier.GetType()).Inc()
|
||||
|
||||
err := notifier.Notify(evalContext)
|
||||
|
||||
|
||||
@@ -43,6 +43,6 @@ func (arr *defaultRuleReader) fetch() []*Rule {
|
||||
}
|
||||
}
|
||||
|
||||
metrics.M_Alerting_Active_Alerts.Set(float64(len(res)))
|
||||
metrics.MAlertingActiveAlerts.Set(float64(len(res)))
|
||||
return res
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error {
|
||||
annotationData.Set("noData", true)
|
||||
}
|
||||
|
||||
metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc()
|
||||
metrics.MAlertingResultState.WithLabelValues(string(evalContext.Rule.State)).Inc()
|
||||
if evalContext.shouldUpdateAlertState() {
|
||||
handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user