mirror of
https://github.com/grafana/grafana.git
synced 2025-12-20 19:44:55 +08:00
Compare commits
87 Commits
docs/add-d
...
v6.3.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdd211758e | ||
|
|
5d6512a7a2 | ||
|
|
b467557614 | ||
|
|
2148a9ff6e | ||
|
|
dc6219d8e0 | ||
|
|
67bad726f1 | ||
|
|
3f8624bffb | ||
|
|
7f1db70213 | ||
|
|
b2d86c76c6 | ||
|
|
8c168a6b83 | ||
|
|
f02d6c7be2 | ||
|
|
496d0323bd | ||
|
|
f455f02318 | ||
|
|
1e58fdaffd | ||
|
|
c27fd346d2 | ||
|
|
59fa8cc82e | ||
|
|
a557646484 | ||
|
|
be2e2330f5 | ||
|
|
84d0a71b25 | ||
|
|
e0ee72a2ff | ||
|
|
881c229ee3 | ||
|
|
9d97f48374 | ||
|
|
39f00259f3 | ||
|
|
84022650cb | ||
|
|
e368080dea | ||
|
|
a02c2b21d2 | ||
|
|
3a58974314 | ||
|
|
5954cb7220 | ||
|
|
f24ef80e52 | ||
|
|
917b278e45 | ||
|
|
483246016b | ||
|
|
43fe057baa | ||
|
|
f2fffadcd6 | ||
|
|
de06c1c1b8 | ||
|
|
830da0fda0 | ||
|
|
78fff0161a | ||
|
|
06d4641a8f | ||
|
|
e232629917 | ||
|
|
57a57932af | ||
|
|
62a226b1c3 | ||
|
|
a38dcc3ac7 | ||
|
|
c39b0e246b | ||
|
|
f8f1f506ed | ||
|
|
9d57a1f192 | ||
|
|
ba4a870632 | ||
|
|
ef9ec32c32 | ||
|
|
17235e4bd1 | ||
|
|
eb82b77782 | ||
|
|
3af8aa5c4f | ||
|
|
5b588af73c | ||
|
|
5ec6eccfac | ||
|
|
237e0e8631 | ||
|
|
7a165febf3 | ||
|
|
fd7c38c62f | ||
|
|
00519f1105 | ||
|
|
c8740d98b0 | ||
|
|
54030deaf6 | ||
|
|
880f6b63a1 | ||
|
|
d8ddfa5622 | ||
|
|
9af2ea1b53 | ||
|
|
d98ee66bd8 | ||
|
|
d3ecc963e6 | ||
|
|
0961ec9748 | ||
|
|
a2c7570c5c | ||
|
|
9cbe0abb44 | ||
|
|
2fecdaf6b4 | ||
|
|
a5f360e50e | ||
|
|
99f2386bd9 | ||
|
|
bffa956068 | ||
|
|
1b7a54c084 | ||
|
|
8128eb6c77 | ||
|
|
10c4d6eccc | ||
|
|
8a062dde35 | ||
|
|
041c1c3cb1 | ||
|
|
aba6df2e09 | ||
|
|
5192504748 | ||
|
|
8a0e2ac3a4 | ||
|
|
373d082363 | ||
|
|
94765294a2 | ||
|
|
1f482a5f0b | ||
|
|
ae04813045 | ||
|
|
3d7c52dcd1 | ||
|
|
81f8158010 | ||
|
|
5de014a91d | ||
|
|
777311a759 | ||
|
|
829b5d0d40 | ||
|
|
12878409db |
@@ -19,7 +19,7 @@ version: 2
|
|||||||
jobs:
|
jobs:
|
||||||
mysql-integration-test:
|
mysql-integration-test:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/golang:1.12.6
|
- image: circleci/golang:1.12.9
|
||||||
- image: circleci/mysql:5.6-ram
|
- image: circleci/mysql:5.6-ram
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: rootpass
|
MYSQL_ROOT_PASSWORD: rootpass
|
||||||
@@ -30,7 +30,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run: sudo apt update
|
- run: sudo apt update
|
||||||
- run: sudo apt install -y mysql-client
|
- run: sudo apt install -y default-mysql-client
|
||||||
- run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s
|
- run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s
|
||||||
- run: cat devenv/docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
|
- run: cat devenv/docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
|
||||||
- run:
|
- run:
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
|
|
||||||
postgres-integration-test:
|
postgres-integration-test:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/golang:1.12.6
|
- image: circleci/golang:1.12.9
|
||||||
- image: circleci/postgres:9.3-ram
|
- image: circleci/postgres:9.3-ram
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: grafanatest
|
POSTGRES_USER: grafanatest
|
||||||
@@ -58,7 +58,7 @@ jobs:
|
|||||||
|
|
||||||
cache-server-test:
|
cache-server-test:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/golang:1.12.6
|
- image: circleci/golang:1.12.9
|
||||||
- image: circleci/redis:4-alpine
|
- image: circleci/redis:4-alpine
|
||||||
- image: memcached
|
- image: memcached
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
@@ -98,6 +98,34 @@ jobs:
|
|||||||
path: public/e2e-test/screenShots/theOutput
|
path: public/e2e-test/screenShots/theOutput
|
||||||
destination: output-screenshots
|
destination: output-screenshots
|
||||||
|
|
||||||
|
end-to-end-test-release:
|
||||||
|
docker:
|
||||||
|
- image: circleci/node:10-browsers
|
||||||
|
- image: grafana/grafana-dev:$CIRCLE_TAG
|
||||||
|
steps:
|
||||||
|
- run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s
|
||||||
|
- checkout
|
||||||
|
- restore_cache:
|
||||||
|
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||||
|
- run:
|
||||||
|
name: yarn install
|
||||||
|
command: 'yarn install --pure-lockfile --no-progress'
|
||||||
|
no_output_timeout: 5m
|
||||||
|
- save_cache:
|
||||||
|
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||||
|
paths:
|
||||||
|
- node_modules
|
||||||
|
- run:
|
||||||
|
name: run end-to-end tests
|
||||||
|
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
|
||||||
|
no_output_timeout: 5m
|
||||||
|
- store_artifacts:
|
||||||
|
path: public/e2e-test/screenShots/theTruth
|
||||||
|
destination: expected-screenshots
|
||||||
|
- store_artifacts:
|
||||||
|
path: public/e2e-test/screenShots/theOutput
|
||||||
|
destination: output-screenshots
|
||||||
|
|
||||||
codespell:
|
codespell:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/python
|
- image: circleci/python
|
||||||
@@ -116,7 +144,7 @@ jobs:
|
|||||||
|
|
||||||
lint-go:
|
lint-go:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/golang:1.12.6
|
- image: circleci/golang:1.12.9
|
||||||
environment:
|
environment:
|
||||||
# we need CGO because of go-sqlite3
|
# we need CGO because of go-sqlite3
|
||||||
CGO_ENABLED: 1
|
CGO_ENABLED: 1
|
||||||
@@ -127,6 +155,15 @@ jobs:
|
|||||||
name: Lint Go
|
name: Lint Go
|
||||||
command: 'make lint-go'
|
command: 'make lint-go'
|
||||||
|
|
||||||
|
shellcheck:
|
||||||
|
machine: true
|
||||||
|
working_directory: ~/go/src/github.com/grafana/grafana
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- run:
|
||||||
|
name: ShellCheck
|
||||||
|
command: 'make shellcheck'
|
||||||
|
|
||||||
test-frontend:
|
test-frontend:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/node:10
|
- image: circleci/node:10
|
||||||
@@ -148,7 +185,7 @@ jobs:
|
|||||||
|
|
||||||
test-backend:
|
test-backend:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/golang:1.12.6
|
- image: circleci/golang:1.12.9
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -158,7 +195,7 @@ jobs:
|
|||||||
|
|
||||||
build-all:
|
build-all:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -202,7 +239,7 @@ jobs:
|
|||||||
|
|
||||||
build:
|
build:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -228,7 +265,7 @@ jobs:
|
|||||||
|
|
||||||
build-fast-backend:
|
build-fast-backend:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -245,7 +282,7 @@ jobs:
|
|||||||
|
|
||||||
build-fast-frontend:
|
build-fast-frontend:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -269,7 +306,7 @@ jobs:
|
|||||||
|
|
||||||
build-fast-package:
|
build-fast-package:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -296,7 +333,7 @@ jobs:
|
|||||||
|
|
||||||
build-fast-save:
|
build-fast-save:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -382,7 +419,7 @@ jobs:
|
|||||||
|
|
||||||
build-enterprise:
|
build-enterprise:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -414,7 +451,7 @@ jobs:
|
|||||||
|
|
||||||
build-all-enterprise:
|
build-all-enterprise:
|
||||||
docker:
|
docker:
|
||||||
- image: grafana/build-container:1.2.7
|
- image: grafana/build-container:1.2.8
|
||||||
working_directory: /go/src/github.com/grafana/grafana
|
working_directory: /go/src/github.com/grafana/grafana
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
@@ -635,6 +672,8 @@ workflows:
|
|||||||
filters: *filter-only-master
|
filters: *filter-only-master
|
||||||
- lint-go:
|
- lint-go:
|
||||||
filters: *filter-only-master
|
filters: *filter-only-master
|
||||||
|
- shellcheck:
|
||||||
|
filters: *filter-only-master
|
||||||
- test-frontend:
|
- test-frontend:
|
||||||
filters: *filter-only-master
|
filters: *filter-only-master
|
||||||
- test-backend:
|
- test-backend:
|
||||||
@@ -650,6 +689,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
- build-oss-msi
|
- build-oss-msi
|
||||||
@@ -662,6 +702,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
filters: *filter-only-master
|
filters: *filter-only-master
|
||||||
@@ -672,6 +713,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
- build-all-enterprise
|
- build-all-enterprise
|
||||||
@@ -683,6 +725,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
filters: *filter-only-master
|
filters: *filter-only-master
|
||||||
@@ -704,6 +747,8 @@ workflows:
|
|||||||
filters: *filter-only-release
|
filters: *filter-only-release
|
||||||
- lint-go:
|
- lint-go:
|
||||||
filters: *filter-only-release
|
filters: *filter-only-release
|
||||||
|
- shellcheck:
|
||||||
|
filters: *filter-only-release
|
||||||
- test-frontend:
|
- test-frontend:
|
||||||
filters: *filter-only-release
|
filters: *filter-only-release
|
||||||
- test-backend:
|
- test-backend:
|
||||||
@@ -719,6 +764,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
- build-oss-msi
|
- build-oss-msi
|
||||||
@@ -731,6 +777,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
filters: *filter-only-release
|
filters: *filter-only-release
|
||||||
@@ -742,6 +789,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
filters: *filter-only-release
|
filters: *filter-only-release
|
||||||
@@ -752,9 +800,14 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
filters: *filter-only-release
|
filters: *filter-only-release
|
||||||
|
- end-to-end-test-release:
|
||||||
|
requires:
|
||||||
|
- grafana-docker-release
|
||||||
|
filters: *filter-only-release
|
||||||
|
|
||||||
build-branches-and-prs:
|
build-branches-and-prs:
|
||||||
jobs:
|
jobs:
|
||||||
@@ -771,6 +824,10 @@ workflows:
|
|||||||
filters: *filter-not-release-or-master
|
filters: *filter-not-release-or-master
|
||||||
- lint-go:
|
- lint-go:
|
||||||
filters: *filter-not-release-or-master
|
filters: *filter-not-release-or-master
|
||||||
|
- lint-go:
|
||||||
|
filters: *filter-not-release-or-master
|
||||||
|
- shellcheck:
|
||||||
|
filters: *filter-not-release-or-master
|
||||||
- test-frontend:
|
- test-frontend:
|
||||||
filters: *filter-not-release-or-master
|
filters: *filter-not-release-or-master
|
||||||
- test-backend:
|
- test-backend:
|
||||||
@@ -788,6 +845,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
- cache-server-test
|
- cache-server-test
|
||||||
@@ -799,6 +857,7 @@ workflows:
|
|||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
- lint-go
|
- lint-go
|
||||||
|
- shellcheck
|
||||||
- mysql-integration-test
|
- mysql-integration-test
|
||||||
- postgres-integration-test
|
- postgres-integration-test
|
||||||
- cache-server-test
|
- cache-server-test
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Golang build container
|
# Golang build container
|
||||||
FROM golang:1.12.4
|
FROM golang:1.12.9
|
||||||
|
|
||||||
WORKDIR $GOPATH/src/github.com/grafana/grafana
|
WORKDIR $GOPATH/src/github.com/grafana/grafana
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ ENV NODE_ENV production
|
|||||||
RUN ./node_modules/.bin/grunt build
|
RUN ./node_modules/.bin/grunt build
|
||||||
|
|
||||||
# Final container
|
# Final container
|
||||||
FROM ubuntu:latest
|
FROM ubuntu:18.04
|
||||||
|
|
||||||
LABEL maintainer="Grafana team <hello@grafana.com>"
|
LABEL maintainer="Grafana team <hello@grafana.com>"
|
||||||
|
|
||||||
|
|||||||
10
Makefile
10
Makefile
@@ -2,8 +2,9 @@
|
|||||||
|
|
||||||
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go gosec revive golangci-lint go-vet test-go test-js test run clean devenv devenv-down revive-alerting
|
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go gosec revive golangci-lint go-vet test-go test-js test run clean devenv devenv-down revive-alerting
|
||||||
|
|
||||||
GO := GO111MODULE=on go
|
GO = GO111MODULE=on go
|
||||||
GO_FILES := ./pkg/...
|
GO_FILES ?= ./pkg/...
|
||||||
|
SH_FILES ?= $(shell find ./scripts -name *.sh)
|
||||||
|
|
||||||
all: deps build
|
all: deps build
|
||||||
|
|
||||||
@@ -111,6 +112,11 @@ go-vet:
|
|||||||
|
|
||||||
lint-go: go-vet golangci-lint revive revive-alerting gosec
|
lint-go: go-vet golangci-lint revive revive-alerting gosec
|
||||||
|
|
||||||
|
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
|
||||||
|
shellcheck: $(SH_FILES)
|
||||||
|
@docker run --rm -v "$$PWD:/mnt" koalaman/shellcheck:stable \
|
||||||
|
$(SH_FILES) -e SC1071
|
||||||
|
|
||||||
run: scripts/go/bin/bra
|
run: scripts/go/bin/bra
|
||||||
@scripts/go/bin/bra run
|
@scripts/go/bin/bra run
|
||||||
|
|
||||||
|
|||||||
@@ -214,6 +214,10 @@ external_enabled = true
|
|||||||
external_snapshot_url = https://snapshots-origin.raintank.io
|
external_snapshot_url = https://snapshots-origin.raintank.io
|
||||||
external_snapshot_name = Publish to snapshot.raintank.io
|
external_snapshot_name = Publish to snapshot.raintank.io
|
||||||
|
|
||||||
|
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||||
|
# creating and deleting snapshots.
|
||||||
|
public_mode = false
|
||||||
|
|
||||||
# remove expired snapshot
|
# remove expired snapshot
|
||||||
snapshot_remove_expired = true
|
snapshot_remove_expired = true
|
||||||
|
|
||||||
@@ -379,17 +383,45 @@ send_client_credentials_via_post = false
|
|||||||
|
|
||||||
#################################### SAML Auth ###########################
|
#################################### SAML Auth ###########################
|
||||||
[auth.saml] # Enterprise only
|
[auth.saml] # Enterprise only
|
||||||
|
# Defaults to false. If true, the feature is enabled
|
||||||
enabled = false
|
enabled = false
|
||||||
private_key =
|
|
||||||
private_key_path =
|
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||||
certificate =
|
certificate =
|
||||||
|
|
||||||
|
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||||
certificate_path =
|
certificate_path =
|
||||||
|
|
||||||
|
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||||
|
private_key =
|
||||||
|
|
||||||
|
# Path to the private key. Used to decrypt assertions from the IdP
|
||||||
|
private_key_path =
|
||||||
|
|
||||||
|
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||||
idp_metadata =
|
idp_metadata =
|
||||||
|
|
||||||
|
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||||
idp_metadata_path =
|
idp_metadata_path =
|
||||||
|
|
||||||
|
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||||
idp_metadata_url =
|
idp_metadata_url =
|
||||||
|
|
||||||
|
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||||
max_issue_delay = 90s
|
max_issue_delay = 90s
|
||||||
|
|
||||||
|
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||||
metadata_valid_duration = 48h
|
metadata_valid_duration = 48h
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||||
|
assertion_attribute_name = displayName
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||||
|
assertion_attribute_login = mail
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||||
|
assertion_attribute_email = mail
|
||||||
|
|
||||||
#################################### Basic Auth ##########################
|
#################################### Basic Auth ##########################
|
||||||
[auth.basic]
|
[auth.basic]
|
||||||
enabled = true
|
enabled = true
|
||||||
@@ -560,8 +592,10 @@ enabled = true
|
|||||||
#################################### Internal Grafana Metrics ############
|
#################################### Internal Grafana Metrics ############
|
||||||
# Metrics available at HTTP API Url /metrics
|
# Metrics available at HTTP API Url /metrics
|
||||||
[metrics]
|
[metrics]
|
||||||
enabled = true
|
enabled = true
|
||||||
interval_seconds = 10
|
interval_seconds = 10
|
||||||
|
# Disable total stats (stat_totals_*) metrics to be generated
|
||||||
|
disable_total_stats = false
|
||||||
|
|
||||||
#If both are set, basic auth will be required for the metrics endpoint.
|
#If both are set, basic auth will be required for the metrics endpoint.
|
||||||
basic_auth_username =
|
basic_auth_username =
|
||||||
|
|||||||
@@ -209,6 +209,10 @@
|
|||||||
;external_snapshot_url = https://snapshots-origin.raintank.io
|
;external_snapshot_url = https://snapshots-origin.raintank.io
|
||||||
;external_snapshot_name = Publish to snapshot.raintank.io
|
;external_snapshot_name = Publish to snapshot.raintank.io
|
||||||
|
|
||||||
|
# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
|
||||||
|
# creating and deleting snapshots.
|
||||||
|
;public_mode = false
|
||||||
|
|
||||||
# remove expired snapshot
|
# remove expired snapshot
|
||||||
;snapshot_remove_expired = true
|
;snapshot_remove_expired = true
|
||||||
|
|
||||||
@@ -334,18 +338,46 @@
|
|||||||
;send_client_credentials_via_post = false
|
;send_client_credentials_via_post = false
|
||||||
|
|
||||||
#################################### SAML Auth ###########################
|
#################################### SAML Auth ###########################
|
||||||
;[auth.saml] # Enterprise only
|
[auth.saml] # Enterprise only
|
||||||
|
# Defaults to false. If true, the feature is enabled.
|
||||||
;enabled = false
|
;enabled = false
|
||||||
;private_key =
|
|
||||||
;private_key_path =
|
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||||
;certificate =
|
;certificate =
|
||||||
|
|
||||||
|
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||||
;certificate_path =
|
;certificate_path =
|
||||||
|
|
||||||
|
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||||
|
;private_key =
|
||||||
|
|
||||||
|
;# Path to the private key. Used to decrypt assertions from the IdP
|
||||||
|
;private_key_path =
|
||||||
|
|
||||||
|
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||||
;idp_metadata =
|
;idp_metadata =
|
||||||
|
|
||||||
|
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||||
;idp_metadata_path =
|
;idp_metadata_path =
|
||||||
|
|
||||||
|
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||||
;idp_metadata_url =
|
;idp_metadata_url =
|
||||||
|
|
||||||
|
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds.
|
||||||
;max_issue_delay = 90s
|
;max_issue_delay = 90s
|
||||||
|
|
||||||
|
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours.
|
||||||
;metadata_valid_duration = 48h
|
;metadata_valid_duration = 48h
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||||
|
;assertion_attribute_name = displayName
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||||
|
;assertion_attribute_login = mail
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||||
|
;assertion_attribute_email = mail
|
||||||
|
|
||||||
#################################### Grafana.com Auth ####################
|
#################################### Grafana.com Auth ####################
|
||||||
[auth.grafana_com]
|
[auth.grafana_com]
|
||||||
;enabled = false
|
;enabled = false
|
||||||
@@ -492,6 +524,8 @@
|
|||||||
[metrics]
|
[metrics]
|
||||||
# Disable / Enable internal metrics
|
# Disable / Enable internal metrics
|
||||||
;enabled = true
|
;enabled = true
|
||||||
|
# Disable total stats (stat_totals_*) metrics to be generated
|
||||||
|
;disable_total_stats = false
|
||||||
|
|
||||||
# Publish interval
|
# Publish interval
|
||||||
;interval_seconds = 10
|
;interval_seconds = 10
|
||||||
|
|||||||
@@ -44,7 +44,7 @@
|
|||||||
"nullPointMode": "null",
|
"nullPointMode": "null",
|
||||||
"options-gauge": {
|
"options-gauge": {
|
||||||
"baseColor": "#299c46",
|
"baseColor": "#299c46",
|
||||||
"decimals": "2",
|
"decimals": 2,
|
||||||
"maxValue": 100,
|
"maxValue": 100,
|
||||||
"minValue": 0,
|
"minValue": 0,
|
||||||
"options": {
|
"options": {
|
||||||
@@ -111,7 +111,7 @@
|
|||||||
"nullPointMode": "null",
|
"nullPointMode": "null",
|
||||||
"options-gauge": {
|
"options-gauge": {
|
||||||
"baseColor": "#299c46",
|
"baseColor": "#299c46",
|
||||||
"decimals": "",
|
"decimals": null,
|
||||||
"maxValue": 100,
|
"maxValue": 100,
|
||||||
"minValue": 0,
|
"minValue": 0,
|
||||||
"options": {
|
"options": {
|
||||||
@@ -178,7 +178,7 @@
|
|||||||
"nullPointMode": "null",
|
"nullPointMode": "null",
|
||||||
"options-gauge": {
|
"options-gauge": {
|
||||||
"baseColor": "#299c46",
|
"baseColor": "#299c46",
|
||||||
"decimals": "",
|
"decimals": null,
|
||||||
"maxValue": 100,
|
"maxValue": 100,
|
||||||
"minValue": 0,
|
"minValue": 0,
|
||||||
"options": {
|
"options": {
|
||||||
|
|||||||
@@ -28,38 +28,6 @@ search_filter = "(cn=%s)"
|
|||||||
# An array of base dns to search through
|
# An array of base dns to search through
|
||||||
search_base_dns = ["dc=grafana,dc=org"]
|
search_base_dns = ["dc=grafana,dc=org"]
|
||||||
|
|
||||||
# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
|
|
||||||
# This is done by enabling group_search_filter below. You must also set member_of= "cn"
|
|
||||||
# in [servers.attributes] below.
|
|
||||||
|
|
||||||
# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
|
|
||||||
# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
|
|
||||||
# below in such a way that the user's recursive group membership is considered.
|
|
||||||
#
|
|
||||||
# Nested Groups + Active Directory (AD) Example:
|
|
||||||
#
|
|
||||||
# AD groups store the Distinguished Names (DNs) of members, so your filter must
|
|
||||||
# recursively search your groups for the authenticating user's DN. For example:
|
|
||||||
#
|
|
||||||
# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
|
|
||||||
# group_search_filter_user_attribute = "distinguishedName"
|
|
||||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
|
||||||
#
|
|
||||||
# [servers.attributes]
|
|
||||||
# ...
|
|
||||||
# member_of = "distinguishedName"
|
|
||||||
|
|
||||||
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
|
|
||||||
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
|
||||||
## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
|
|
||||||
## Defaults to the value of username in [server.attributes]
|
|
||||||
## Valid options are any of your values in [servers.attributes]
|
|
||||||
## If you are using nested groups you probably want to set this and member_of in
|
|
||||||
## [servers.attributes] to "distinguishedName"
|
|
||||||
# group_search_filter_user_attribute = "distinguishedName"
|
|
||||||
## An array of the base DNs to search through for groups. Typically uses ou=groups
|
|
||||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
|
||||||
|
|
||||||
# Specify names of the ldap attributes your ldap uses
|
# Specify names of the ldap attributes your ldap uses
|
||||||
[servers.attributes]
|
[servers.attributes]
|
||||||
name = "givenName"
|
name = "givenName"
|
||||||
|
|||||||
57
devenv/docker/blocks/openldap/ldap_posix_dev.toml
Normal file
57
devenv/docker/blocks/openldap/ldap_posix_dev.toml
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
|
||||||
|
# [log]
|
||||||
|
# filters = ldap:debug
|
||||||
|
|
||||||
|
[[servers]]
|
||||||
|
# Ldap server host (specify multiple hosts space separated)
|
||||||
|
host = "127.0.0.1"
|
||||||
|
# Default port is 389 or 636 if use_ssl = true
|
||||||
|
port = 389
|
||||||
|
# Set to true if ldap server supports TLS
|
||||||
|
use_ssl = false
|
||||||
|
# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS)
|
||||||
|
start_tls = false
|
||||||
|
# set to true if you want to skip ssl cert validation
|
||||||
|
ssl_skip_verify = false
|
||||||
|
# set to the path to your root CA certificate or leave unset to use system defaults
|
||||||
|
# root_ca_cert = "/path/to/certificate.crt"
|
||||||
|
|
||||||
|
# Search user bind dn
|
||||||
|
bind_dn = "cn=admin,dc=grafana,dc=org"
|
||||||
|
# Search user bind password
|
||||||
|
# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
|
||||||
|
bind_password = 'grafana'
|
||||||
|
|
||||||
|
# An array of base dns to search through
|
||||||
|
search_base_dns = ["dc=grafana,dc=org"]
|
||||||
|
|
||||||
|
search_filter = "(uid=%s)"
|
||||||
|
|
||||||
|
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||||
|
group_search_filter_user_attribute = "uid"
|
||||||
|
group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||||
|
|
||||||
|
[servers.attributes]
|
||||||
|
name = "givenName"
|
||||||
|
surname = "sn"
|
||||||
|
username = "cn"
|
||||||
|
member_of = "memberOf"
|
||||||
|
email = "email"
|
||||||
|
|
||||||
|
# Map ldap groups to grafana org roles
|
||||||
|
[[servers.group_mappings]]
|
||||||
|
group_dn = "cn=posix-admins,ou=groups,dc=grafana,dc=org"
|
||||||
|
org_role = "Admin"
|
||||||
|
grafana_admin = true
|
||||||
|
|
||||||
|
# The Grafana organization database id, optional, if left out the default org (id 1) will be used
|
||||||
|
# org_id = 1
|
||||||
|
|
||||||
|
[[servers.group_mappings]]
|
||||||
|
group_dn = "cn=editors,ou=groups,dc=grafana,dc=org"
|
||||||
|
org_role = "Editor"
|
||||||
|
|
||||||
|
[[servers.group_mappings]]
|
||||||
|
# If you want to match all (or no ldap groups) then you can use wildcard
|
||||||
|
group_dn = "*"
|
||||||
|
org_role = "Viewer"
|
||||||
@@ -12,7 +12,7 @@ After adding ldif files to `prepopulate`:
|
|||||||
|
|
||||||
## Enabling LDAP in Grafana
|
## Enabling LDAP in Grafana
|
||||||
|
|
||||||
Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
If you want to use users/groups with `memberOf` support Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||||
|
|
||||||
```ini
|
```ini
|
||||||
[auth.ldap]
|
[auth.ldap]
|
||||||
@@ -21,6 +21,8 @@ config_file = conf/ldap_dev.toml
|
|||||||
; allow_sign_up = true
|
; allow_sign_up = true
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Otherwise perform same actions for `ldap_dev_posix.toml` config.
|
||||||
|
|
||||||
## Groups & Users
|
## Groups & Users
|
||||||
|
|
||||||
admins
|
admins
|
||||||
@@ -38,3 +40,11 @@ editors
|
|||||||
ldap-editors
|
ldap-editors
|
||||||
no groups
|
no groups
|
||||||
ldap-viewer
|
ldap-viewer
|
||||||
|
|
||||||
|
|
||||||
|
## Groups & Users (POSIX)
|
||||||
|
|
||||||
|
admins
|
||||||
|
ldap-posix-admin
|
||||||
|
no groups
|
||||||
|
ldap-posix
|
||||||
|
|||||||
@@ -78,3 +78,31 @@ objectClass: inetOrgPerson
|
|||||||
objectClass: organizationalPerson
|
objectClass: organizationalPerson
|
||||||
sn: ldap-torkel
|
sn: ldap-torkel
|
||||||
cn: ldap-torkel
|
cn: ldap-torkel
|
||||||
|
|
||||||
|
# admin for posix group (without support for memberOf attribute)
|
||||||
|
dn: uid=ldap-posix-admin,ou=users,dc=grafana,dc=org
|
||||||
|
mail: ldap-posix-admin@grafana.com
|
||||||
|
userPassword: grafana
|
||||||
|
objectclass: top
|
||||||
|
objectclass: posixAccount
|
||||||
|
objectclass: inetOrgPerson
|
||||||
|
homedirectory: /home/ldap-posix-admin
|
||||||
|
sn: ldap-posix-admin
|
||||||
|
cn: ldap-posix-admin
|
||||||
|
uid: ldap-posix-admin
|
||||||
|
uidnumber: 1
|
||||||
|
gidnumber: 1
|
||||||
|
|
||||||
|
# user for posix group (without support for memberOf attribute)
|
||||||
|
dn: uid=ldap-posix,ou=users,dc=grafana,dc=org
|
||||||
|
mail: ldap-posix@grafana.com
|
||||||
|
userPassword: grafana
|
||||||
|
objectclass: top
|
||||||
|
objectclass: posixAccount
|
||||||
|
objectclass: inetOrgPerson
|
||||||
|
homedirectory: /home/ldap-posix
|
||||||
|
sn: ldap-posix
|
||||||
|
cn: ldap-posix
|
||||||
|
uid: ldap-posix
|
||||||
|
uidnumber: 2
|
||||||
|
gidnumber: 2
|
||||||
|
|||||||
@@ -23,3 +23,21 @@ objectClass: groupOfNames
|
|||||||
member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
|
member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
|
||||||
member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
|
member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
|
||||||
member: cn=ldap-leo,ou=users,dc=grafana,dc=org
|
member: cn=ldap-leo,ou=users,dc=grafana,dc=org
|
||||||
|
|
||||||
|
# -- POSIX --
|
||||||
|
|
||||||
|
# posix admin group (without support for memberOf attribute)
|
||||||
|
dn: cn=posix-admins,ou=groups,dc=grafana,dc=org
|
||||||
|
cn: admins
|
||||||
|
objectClass: top
|
||||||
|
objectClass: posixGroup
|
||||||
|
gidNumber: 1
|
||||||
|
memberUid: ldap-posix-admin
|
||||||
|
|
||||||
|
# posix group (without support for memberOf attribute)
|
||||||
|
dn: cn=posix,ou=groups,dc=grafana,dc=org
|
||||||
|
cn: viewers
|
||||||
|
objectClass: top
|
||||||
|
objectClass: posixGroup
|
||||||
|
gidNumber: 2
|
||||||
|
memberUid: ldap-posix
|
||||||
|
|||||||
@@ -126,8 +126,6 @@ group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
|||||||
group_search_filter_user_attribute = "uid"
|
group_search_filter_user_attribute = "uid"
|
||||||
```
|
```
|
||||||
|
|
||||||
Also set `member_of = "dn"` in the `[servers.attributes]` section.
|
|
||||||
|
|
||||||
### Group Mappings
|
### Group Mappings
|
||||||
|
|
||||||
In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being
|
In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being
|
||||||
|
|||||||
178
docs/sources/auth/saml.md
Normal file
178
docs/sources/auth/saml.md
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
+++
|
||||||
|
title = "SAML Authentication"
|
||||||
|
description = "Grafana SAML Authentication"
|
||||||
|
keywords = ["grafana", "saml", "documentation", "saml-auth"]
|
||||||
|
aliases = ["/auth/saml/"]
|
||||||
|
type = "docs"
|
||||||
|
[menu.docs]
|
||||||
|
name = "SAML"
|
||||||
|
parent = "authentication"
|
||||||
|
weight = 5
|
||||||
|
+++
|
||||||
|
|
||||||
|
# SAML Authentication
|
||||||
|
|
||||||
|
> SAML Authentication integration is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise" >}}).
|
||||||
|
|
||||||
|
> Only available in Grafana v6.3+
|
||||||
|
|
||||||
|
The SAML authentication integration allows your Grafana users to log in by
|
||||||
|
using an external SAML Identity Provider (IdP). To enable this, Grafana becomes
|
||||||
|
a Service Provider (SP) in the authentication flow, interacting with the IdP to
|
||||||
|
exchange user information.
|
||||||
|
|
||||||
|
## Supported SAML
|
||||||
|
|
||||||
|
The SAML single-sign-on (SSO) standard is varied and flexible. Our implementation contains the subset of features needed to provide a smooth authentication experience into Grafana.
|
||||||
|
|
||||||
|
> Should you encounter any problems with our implementation, please don't hesitate to contact us.
|
||||||
|
|
||||||
|
At the moment of writing, Grafana supports:
|
||||||
|
|
||||||
|
1. From the Service Provider (SP) to the Identity Provider (IdP)
|
||||||
|
|
||||||
|
- `HTTP-POST` binding
|
||||||
|
- `HTTP-Redirect` binding
|
||||||
|
|
||||||
|
2. From the Identity Provider (IdP) to the Service Provider (SP)
|
||||||
|
|
||||||
|
- `HTTP-POST` binding
|
||||||
|
|
||||||
|
3. In terms of security, we currently support signed and encrypted Assertions. However, signed or encrypted requests are not supported.
|
||||||
|
|
||||||
|
4. In terms of initiation, only SP-initiated requests are supported. There's no support for IdP-initiated request.
|
||||||
|
|
||||||
|
## Set up SAML Authentication
|
||||||
|
|
||||||
|
To use the SAML integration, you need to enable SAML in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
[auth.saml]
|
||||||
|
# Defaults to false. If true, the feature is enabled
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||||
|
certificate =
|
||||||
|
|
||||||
|
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||||
|
certificate_path =
|
||||||
|
|
||||||
|
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||||
|
private_key =
|
||||||
|
|
||||||
|
# Path to the private key. Used to decrypt assertions from the IdP
|
||||||
|
private_key_path =
|
||||||
|
|
||||||
|
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||||
|
idp_metadata =
|
||||||
|
|
||||||
|
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||||
|
idp_metadata_path =
|
||||||
|
|
||||||
|
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||||
|
idp_metadata_url =
|
||||||
|
|
||||||
|
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||||
|
max_issue_delay =
|
||||||
|
|
||||||
|
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||||
|
metadata_valid_duration =
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||||
|
assertion_attribute_name = displayName
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||||
|
assertion_attribute_login = mail
|
||||||
|
|
||||||
|
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||||
|
assertion_attribute_email = mail
|
||||||
|
```
|
||||||
|
|
||||||
|
Important to note:
|
||||||
|
|
||||||
|
- like any other Grafana configuration, use of [environment variables for these options is supported]({{< relref "installation/configuration.md#using-environment-variables" >}})
|
||||||
|
- only one form of configuration option is required. Using multiple forms, e.g. both `certificate` and `certificate_path` will result in an error
|
||||||
|
|
||||||
|
## Grafana Configuration
|
||||||
|
|
||||||
|
An example working configuration example looks like:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
[auth.saml]
|
||||||
|
enabled = true
|
||||||
|
certificate_path = "/path/to/certificate.cert"
|
||||||
|
private_key_path = "/path/to/private_key.pem"
|
||||||
|
metadata_path = "/my/metadata.xml"
|
||||||
|
max_issue_delay = 90s
|
||||||
|
metadata_valid_duration = 48h
|
||||||
|
assertion_attribute_name = displayName
|
||||||
|
assertion_attribute_login = mail
|
||||||
|
assertion_attribute_email = mail
|
||||||
|
```
|
||||||
|
|
||||||
|
And here is a comprehensive list of the options:
|
||||||
|
|
||||||
|
| Setting | Required | Description | Default |
|
||||||
|
| ----------------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------- | ------------- |
|
||||||
|
| `enabled` | No | Whenever SAML authentication is allowed | `false` |
|
||||||
|
| `certificate` or `certificate_path` | Yes | Base64-encoded string or Path for the SP X.509 certificate | |
|
||||||
|
| `private_key` or `private_key_path` | Yes | Base64-encoded string or Path for the SP private key | |
|
||||||
|
| `idp_metadata` or `idp_metadata_path` or `idp_metadata_url` | Yes | Base64-encoded string, Path or URL for the IdP SAML metadata XML | |
|
||||||
|
| `max_issue_delay` | No | Duration, since the IdP issued a response and the SP is allowed to process it | `90s` |
|
||||||
|
| `metadata_valid_duration` | No | Duration, for how long the SP's metadata should be valid | `48h` |
|
||||||
|
| `assertion_attribute_name` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's name | `displayName` |
|
||||||
|
| `assertion_attribute_login` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's login handle | `mail` |
|
||||||
|
| `assertion_attribute_email` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's email | `mail` |
|
||||||
|
|
||||||
|
### Cert and Private Key
|
||||||
|
|
||||||
|
The SAML SSO standard uses asymmetric encryption to exchange information between the SP (Grafana) and the IdP. To perform such encryption, you need a public part and a private part. In this case, the X.509 certificate provides the public part, while the private key provides the private part.
|
||||||
|
|
||||||
|
Grafana supports two ways of specifying both the `certificate` and `private_key`. Without a suffix (e.g. `certificate=`), the configuration assumes you've supplied the base64-encoded file contents. However, if specified with the `_path` suffix (e.g. `certificate_path=`) Grafana will treat it as a file path and attempt to read the file from the file system.
|
||||||
|
|
||||||
|
### IdP Metadata
|
||||||
|
|
||||||
|
Expanding on the above, we'll also need the public part from our IdP for message verification. The SAML IdP metadata XML tells us where and how we should exchange the user information.
|
||||||
|
|
||||||
|
Currently, we support three ways of specifying the IdP metadata. Without a suffix `idp_metadata=` Grafana assumes base64-encoded XML file contents, with the `_path` suffix assumes a file path and attempts to read the file from the file system and with the `_url` suffix assumes an URL and attempts to load the metadata from the given location.
|
||||||
|
|
||||||
|
### Max Issue Delay
|
||||||
|
|
||||||
|
Prevention of SAML response replay attacks and internal clock skews between the SP (Grafana), and the IdP is covered. You can set a maximum amount of time between the IdP issuing a response and the SP (Grafana) processing it.
|
||||||
|
|
||||||
|
The configuration options is specified as a duration e.g. `max_issue_delay = 90s` or `max_issue_delay = 1h`
|
||||||
|
|
||||||
|
### Metadata valid duration
|
||||||
|
|
||||||
|
As an SP, our metadata is likely to expire at some point, e.g. due to a certificate rotation or change of location binding. Grafana allows you to specify for how long the metadata should be valid. Leveraging the standard's `validUntil` field, you can tell consumers until when your metadata is going to be valid. The duration is computed by adding the duration to the current time.
|
||||||
|
|
||||||
|
The configuration option is specified as a duration e.g. `metadata_valid_duration = 48h`
|
||||||
|
|
||||||
|
## Identity Provider (IdP) registration
|
||||||
|
|
||||||
|
For the SAML integration to work correctly, you need to make the IdP aware of the SP.
|
||||||
|
|
||||||
|
The integration provides two key endpoints as part of Grafana:
|
||||||
|
|
||||||
|
- The `/saml/metadata` endpoint. Which contains the SP's metadata. You can either download and upload it manually or make the IdP request it directly from the endpoint. Some providers name it Identifier or Entity ID.
|
||||||
|
|
||||||
|
- The `/saml/acs` endpoint. Which is intended to receive the ACS (Assertion Customer Service) callback. Some providers name it SSO URL or Reply URL.
|
||||||
|
|
||||||
|
## Assertion mapping
|
||||||
|
|
||||||
|
During the SAML SSO authentication flow, we receive the ACS (Assertion Customer Service) callback. The callback contains all the relevant information of the user under authentication embedded in the SAML response. Grafana parses the response to create (or update) the user within its internal database.
|
||||||
|
|
||||||
|
For Grafana to map the user information, it looks at the individual attributes within the assertion. You can think of these attributes as Key/Value pairs (although, they contain more information than that).
|
||||||
|
|
||||||
|
Grafana provides configuration options that let you modify which keys to look at for these values. The data we need to create the user in Grafana is Name, Login handle, and email.
|
||||||
|
|
||||||
|
An example is `assertion_attribute_name = "givenName"` where Grafana looks within the assertion for an attribute with a friendly name or name of `givenName`. Both, the friendly name (e.g. `givenName`) or the name (e.g. `urn:oid:2.5.4.42`) can be used interchangeably as the value for the configuration option.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
To troubleshoot and get more log info enable saml debug logging in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
[log]
|
||||||
|
filters = saml.auth:debug
|
||||||
|
```
|
||||||
@@ -29,6 +29,10 @@ With Grafana Enterprise you can set up synchronization between LDAP Groups and T
|
|||||||
|
|
||||||
Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
|
Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
|
||||||
|
|
||||||
|
### SAML Authentication
|
||||||
|
|
||||||
|
Enables your Grafana Enterprise users to authenticate with SAML. [Learn More]({{< relref "auth/saml.md" >}}).
|
||||||
|
|
||||||
### Premium Plugins
|
### Premium Plugins
|
||||||
|
|
||||||
With a Grafana Enterprise license you will get access to premium plugins, including:
|
With a Grafana Enterprise license you will get access to premium plugins, including:
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins
|
|||||||
This option is now renamed (and moved to Options sub section above your queries):
|
This option is now renamed (and moved to Options sub section above your queries):
|
||||||

|

|
||||||
|
|
||||||
Datas source selection & options & help are now above your metric queries.
|
Data source selection & options & help are now above your metric queries.
|
||||||

|

|
||||||
|
|
||||||
### Minor Changes
|
### Minor Changes
|
||||||
|
|||||||
@@ -130,6 +130,8 @@ belonging to an LDAP group that gives them access to Grafana.
|
|||||||
|
|
||||||
Built-in support for SAML is now available in Grafana Enterprise.
|
Built-in support for SAML is now available in Grafana Enterprise.
|
||||||
|
|
||||||
|
[See docs]({{< relref "auth/saml.md" >}})
|
||||||
|
|
||||||
### Team Sync for GitHub OAuth
|
### Team Sync for GitHub OAuth
|
||||||
|
|
||||||
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.
|
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.
|
||||||
|
|||||||
@@ -533,6 +533,9 @@ If set configures the username to use for basic authentication on the metrics en
|
|||||||
### basic_auth_password
|
### basic_auth_password
|
||||||
If set configures the password to use for basic authentication on the metrics endpoint.
|
If set configures the password to use for basic authentication on the metrics endpoint.
|
||||||
|
|
||||||
|
### disable_total_stats
|
||||||
|
If set to `true`, then total stats generation (`stat_totals_*` metrics) is disabled. The default is `false`.
|
||||||
|
|
||||||
### interval_seconds
|
### interval_seconds
|
||||||
|
|
||||||
Flush/Write interval when sending metrics to external TSDB. Defaults to 10s.
|
Flush/Write interval when sending metrics to external TSDB. Defaults to 10s.
|
||||||
|
|||||||
@@ -37,15 +37,11 @@ export class ConfigCtrl {
|
|||||||
|
|
||||||
postUpdate() {
|
postUpdate() {
|
||||||
if (!this.appModel.enabled) {
|
if (!this.appModel.enabled) {
|
||||||
return this.$q.resolve();
|
return;
|
||||||
}
|
}
|
||||||
return this.appEditCtrl.importDashboards().then(() => {
|
|
||||||
this.enabled = true;
|
// TODO, whatever you want
|
||||||
return {
|
console.log('Post Update:', this);
|
||||||
url: "plugins/raintank-kubernetes-app/page/clusters",
|
|
||||||
message: "Kubernetes App enabled!"
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ConfigCtrl.templateUrl = 'components/config/config.html';
|
ConfigCtrl.templateUrl = 'components/config/config.html';
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
[
|
[
|
||||||
{ "version": "v6.2", "path": "/", "archived": false, "current": true },
|
{ "version": "v6.3", "path": "/", "archived": false, "current": true },
|
||||||
|
{ "version": "v6.2", "path": "/v6.2", "archived": true },
|
||||||
{ "version": "v6.1", "path": "/v6.1", "archived": true },
|
{ "version": "v6.1", "path": "/v6.1", "archived": true },
|
||||||
{ "version": "v6.0", "path": "/v6.0", "archived": true },
|
{ "version": "v6.0", "path": "/v6.0", "archived": true },
|
||||||
{ "version": "v5.4", "path": "/v5.4", "archived": true },
|
{ "version": "v5.4", "path": "/v5.4", "archived": true },
|
||||||
|
|||||||
4
go.mod
4
go.mod
@@ -52,7 +52,7 @@ require (
|
|||||||
github.com/onsi/gomega v1.5.0 // indirect
|
github.com/onsi/gomega v1.5.0 // indirect
|
||||||
github.com/opentracing/opentracing-go v1.1.0
|
github.com/opentracing/opentracing-go v1.1.0
|
||||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||||
github.com/pkg/errors v0.8.1
|
github.com/pkg/errors v0.8.1 // indirect
|
||||||
github.com/prometheus/client_golang v0.9.2
|
github.com/prometheus/client_golang v0.9.2
|
||||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90
|
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90
|
||||||
github.com/prometheus/common v0.2.0
|
github.com/prometheus/common v0.2.0
|
||||||
@@ -64,7 +64,7 @@ require (
|
|||||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a
|
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a
|
||||||
github.com/stretchr/testify v1.3.0
|
github.com/stretchr/testify v1.3.0
|
||||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf
|
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf
|
||||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3
|
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329
|
||||||
github.com/uber-go/atomic v1.3.2 // indirect
|
github.com/uber-go/atomic v1.3.2 // indirect
|
||||||
github.com/uber/jaeger-client-go v2.16.0+incompatible
|
github.com/uber/jaeger-client-go v2.16.0+incompatible
|
||||||
github.com/uber/jaeger-lib v2.0.0+incompatible // indirect
|
github.com/uber/jaeger-lib v2.0.0+incompatible // indirect
|
||||||
|
|||||||
4
go.sum
4
go.sum
@@ -202,8 +202,8 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
|
|||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf h1:Z2X3Os7oRzpdJ75iPqWZc0HeJWFYNCvKsfpQwFpRNTA=
|
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf h1:Z2X3Os7oRzpdJ75iPqWZc0HeJWFYNCvKsfpQwFpRNTA=
|
||||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf/go.mod h1:M8agBzgqHIhgj7wEn9/0hJUZcrvt9VY+Ln+S1I5Mha0=
|
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf/go.mod h1:M8agBzgqHIhgj7wEn9/0hJUZcrvt9VY+Ln+S1I5Mha0=
|
||||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3 h1:E7xa7Zur8hLPvw+03gAeQ9esrglfV389j2PcwhiGf/I=
|
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329 h1:VBsKFh4W1JEMz3eLCmM9zOJKZdDkP5W4b3Y4hc7SbZc=
|
||||||
github.com/ua-parser/uap-go v0.0.0-20190303233514-1004ccd816b3/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
|
github.com/ua-parser/uap-go v0.0.0-20190826212731-daf92ba38329/go.mod h1:OBcG9bn7sHtXgarhUEb3OfCnNsgtGnkVf41ilSZ3K3E=
|
||||||
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
||||||
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
||||||
github.com/uber/jaeger-client-go v2.16.0+incompatible h1:Q2Pp6v3QYiocMxomCaJuwQGFt7E53bPYqEgug/AoBtY=
|
github.com/uber/jaeger-client-go v2.16.0+incompatible h1:Q2Pp6v3QYiocMxomCaJuwQGFt7E53bPYqEgug/AoBtY=
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
"company": "Grafana Labs"
|
"company": "Grafana Labs"
|
||||||
},
|
},
|
||||||
"name": "grafana",
|
"name": "grafana",
|
||||||
"version": "6.3.0-pre",
|
"version": "6.3.6",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "http://github.com/grafana/grafana.git"
|
"url": "http://github.com/grafana/grafana.git"
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
import { Threshold } from './threshold';
|
||||||
|
import { ValueMapping } from './valueMapping';
|
||||||
|
|
||||||
export enum LoadingState {
|
export enum LoadingState {
|
||||||
NotStarted = 'NotStarted',
|
NotStarted = 'NotStarted',
|
||||||
Loading = 'Loading',
|
Loading = 'Loading',
|
||||||
@@ -49,6 +52,12 @@ export interface Field {
|
|||||||
decimals?: number | null; // Significant digits (for display)
|
decimals?: number | null; // Significant digits (for display)
|
||||||
min?: number | null;
|
min?: number | null;
|
||||||
max?: number | null;
|
max?: number | null;
|
||||||
|
|
||||||
|
// Convert input values into a display value
|
||||||
|
mappings?: ValueMapping[];
|
||||||
|
|
||||||
|
// Must be sorted by 'value', first value is always -Infinity
|
||||||
|
thresholds?: Threshold[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Labels {
|
export interface Labels {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ export * from './data';
|
|||||||
export * from './dataLink';
|
export * from './dataLink';
|
||||||
export * from './logs';
|
export * from './logs';
|
||||||
export * from './navModel';
|
export * from './navModel';
|
||||||
|
export * from './select';
|
||||||
export * from './time';
|
export * from './time';
|
||||||
export * from './threshold';
|
export * from './threshold';
|
||||||
export * from './utils';
|
export * from './utils';
|
||||||
|
|||||||
10
packages/grafana-data/src/types/select.ts
Normal file
10
packages/grafana-data/src/types/select.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
/**
|
||||||
|
* Used in select elements
|
||||||
|
*/
|
||||||
|
export interface SelectableValue<T = any> {
|
||||||
|
label?: string;
|
||||||
|
value?: T;
|
||||||
|
imgUrl?: string;
|
||||||
|
description?: string;
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
export interface Threshold {
|
export interface Threshold {
|
||||||
index: number;
|
|
||||||
value: number;
|
value: number;
|
||||||
color: string;
|
color: string;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,18 @@ import { TimeZone } from '../types';
|
|||||||
|
|
||||||
const units: DurationUnit[] = ['y', 'M', 'w', 'd', 'h', 'm', 's'];
|
const units: DurationUnit[] = ['y', 'M', 'w', 'd', 'h', 'm', 's'];
|
||||||
|
|
||||||
|
export function isMathString(text: string | DateTime | Date): boolean {
|
||||||
|
if (!text) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof text === 'string' && (text.substring(0, 3) === 'now' || text.includes('||'))) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses different types input to a moment instance. There is a specific formatting language that can be used
|
* Parses different types input to a moment instance. There is a specific formatting language that can be used
|
||||||
* if text arg is string. See unit tests for examples.
|
* if text arg is string. See unit tests for examples.
|
||||||
|
|||||||
@@ -1,6 +1,14 @@
|
|||||||
import { getFieldReducers, ReducerID, reduceField } from './index';
|
import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
|
||||||
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
|
import { DataFrame } from '../types/data';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run a reducer and get back the value
|
||||||
|
*/
|
||||||
|
function reduce(series: DataFrame, fieldIndex: number, id: string): any {
|
||||||
|
return reduceField({ series, fieldIndex, reducers: [id] })[id];
|
||||||
|
}
|
||||||
|
|
||||||
describe('Stats Calculators', () => {
|
describe('Stats Calculators', () => {
|
||||||
const basicTable = {
|
const basicTable = {
|
||||||
@@ -9,29 +17,16 @@ describe('Stats Calculators', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
it('should load all standard stats', () => {
|
it('should load all standard stats', () => {
|
||||||
const names = [
|
for (const id of Object.keys(ReducerID)) {
|
||||||
ReducerID.sum,
|
const reducer = fieldReducers.getIfExists(id);
|
||||||
ReducerID.max,
|
const found = reducer ? reducer.id : '<NOT FOUND>';
|
||||||
ReducerID.min,
|
expect(found).toEqual(id);
|
||||||
ReducerID.logmin,
|
}
|
||||||
ReducerID.mean,
|
|
||||||
ReducerID.last,
|
|
||||||
ReducerID.first,
|
|
||||||
ReducerID.count,
|
|
||||||
ReducerID.range,
|
|
||||||
ReducerID.diff,
|
|
||||||
ReducerID.step,
|
|
||||||
ReducerID.delta,
|
|
||||||
// ReducerID.allIsZero,
|
|
||||||
// ReducerID.allIsNull,
|
|
||||||
];
|
|
||||||
const stats = getFieldReducers(names);
|
|
||||||
expect(stats.length).toBe(names.length);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail to load unknown stats', () => {
|
it('should fail to load unknown stats', () => {
|
||||||
const names = ['not a stat', ReducerID.max, ReducerID.min, 'also not a stat'];
|
const names = ['not a stat', ReducerID.max, ReducerID.min, 'also not a stat'];
|
||||||
const stats = getFieldReducers(names);
|
const stats = fieldReducers.list(names);
|
||||||
expect(stats.length).toBe(2);
|
expect(stats.length).toBe(2);
|
||||||
|
|
||||||
const found = stats.map(v => v.id);
|
const found = stats.map(v => v.id);
|
||||||
@@ -92,6 +87,34 @@ describe('Stats Calculators', () => {
|
|||||||
expect(stats.delta).toEqual(300);
|
expect(stats.delta).toEqual(300);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('consistenly check allIsNull/allIsZero', () => {
|
||||||
|
const empty = {
|
||||||
|
fields: [{ name: 'A' }],
|
||||||
|
rows: [],
|
||||||
|
};
|
||||||
|
const allNull = ({
|
||||||
|
fields: [{ name: 'A' }],
|
||||||
|
rows: [null, null, null, null],
|
||||||
|
} as unknown) as DataFrame;
|
||||||
|
const allNull2 = {
|
||||||
|
fields: [{ name: 'A' }],
|
||||||
|
rows: [[null], [null], [null], [null]],
|
||||||
|
};
|
||||||
|
const allZero = {
|
||||||
|
fields: [{ name: 'A' }],
|
||||||
|
rows: [[0], [0], [0], [0]],
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(reduce(empty, 0, ReducerID.allIsNull)).toEqual(true);
|
||||||
|
expect(reduce(allNull, 0, ReducerID.allIsNull)).toEqual(true);
|
||||||
|
expect(reduce(allNull2, 0, ReducerID.allIsNull)).toEqual(true);
|
||||||
|
|
||||||
|
expect(reduce(empty, 0, ReducerID.allIsZero)).toEqual(false);
|
||||||
|
expect(reduce(allNull, 0, ReducerID.allIsZero)).toEqual(false);
|
||||||
|
expect(reduce(allNull2, 0, ReducerID.allIsZero)).toEqual(false);
|
||||||
|
expect(reduce(allZero, 0, ReducerID.allIsZero)).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
it('consistent results for first/last value with null', () => {
|
it('consistent results for first/last value with null', () => {
|
||||||
const info = [
|
const info = [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
// Libraries
|
// Libraries
|
||||||
import isNumber from 'lodash/isNumber';
|
import isNumber from 'lodash/isNumber';
|
||||||
|
|
||||||
import { DataFrame, NullValueMode } from '../types/index';
|
import { DataFrame, NullValueMode } from '../types';
|
||||||
|
import { Registry, RegistryItem } from './registry';
|
||||||
|
|
||||||
export enum ReducerID {
|
export enum ReducerID {
|
||||||
sum = 'sum',
|
sum = 'sum',
|
||||||
@@ -34,38 +35,13 @@ export interface FieldCalcs {
|
|||||||
// Internal function
|
// Internal function
|
||||||
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
|
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
|
||||||
|
|
||||||
export interface FieldReducerInfo {
|
export interface FieldReducerInfo extends RegistryItem {
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
description: string;
|
|
||||||
alias?: string; // optional secondary key. 'avg' vs 'mean', 'total' vs 'sum'
|
|
||||||
|
|
||||||
// Internal details
|
// Internal details
|
||||||
emptyInputResult?: any; // typically null, but some things like 'count' & 'sum' should be zero
|
emptyInputResult?: any; // typically null, but some things like 'count' & 'sum' should be zero
|
||||||
standard: boolean; // The most common stats can all be calculated in a single pass
|
standard: boolean; // The most common stats can all be calculated in a single pass
|
||||||
reduce?: FieldReducer;
|
reduce?: FieldReducer;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param ids list of stat names or null to get all of them
|
|
||||||
*/
|
|
||||||
export function getFieldReducers(ids?: string[]): FieldReducerInfo[] {
|
|
||||||
if (ids === null || ids === undefined) {
|
|
||||||
if (!hasBuiltIndex) {
|
|
||||||
getById(ReducerID.mean);
|
|
||||||
}
|
|
||||||
return listOfStats;
|
|
||||||
}
|
|
||||||
|
|
||||||
return ids.reduce((list, id) => {
|
|
||||||
const stat = getById(id);
|
|
||||||
if (stat) {
|
|
||||||
list.push(stat);
|
|
||||||
}
|
|
||||||
return list;
|
|
||||||
}, new Array<FieldReducerInfo>());
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ReduceFieldOptions {
|
interface ReduceFieldOptions {
|
||||||
series: DataFrame;
|
series: DataFrame;
|
||||||
fieldIndex: number;
|
fieldIndex: number;
|
||||||
@@ -83,7 +59,7 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
const queue = getFieldReducers(reducers);
|
const queue = fieldReducers.list(reducers);
|
||||||
|
|
||||||
// Return early for empty series
|
// Return early for empty series
|
||||||
// This lets the concrete implementations assume at least one row
|
// This lets the concrete implementations assume at least one row
|
||||||
@@ -122,122 +98,107 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
|||||||
//
|
//
|
||||||
// ------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------
|
||||||
|
|
||||||
// private registry of all stats
|
export const fieldReducers = new Registry<FieldReducerInfo>(() => [
|
||||||
interface TableStatIndex {
|
{
|
||||||
[id: string]: FieldReducerInfo;
|
id: ReducerID.lastNotNull,
|
||||||
}
|
name: 'Last (not null)',
|
||||||
|
description: 'Last non-null value',
|
||||||
const listOfStats: FieldReducerInfo[] = [];
|
standard: true,
|
||||||
const index: TableStatIndex = {};
|
aliasIds: ['current'],
|
||||||
let hasBuiltIndex = false;
|
reduce: calculateLastNotNull,
|
||||||
|
},
|
||||||
function getById(id: string): FieldReducerInfo | undefined {
|
{
|
||||||
if (!hasBuiltIndex) {
|
id: ReducerID.last,
|
||||||
[
|
name: 'Last',
|
||||||
{
|
description: 'Last Value',
|
||||||
id: ReducerID.lastNotNull,
|
standard: true,
|
||||||
name: 'Last (not null)',
|
reduce: calculateLast,
|
||||||
description: 'Last non-null value',
|
},
|
||||||
standard: true,
|
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
||||||
alias: 'current',
|
{
|
||||||
reduce: calculateLastNotNull,
|
id: ReducerID.firstNotNull,
|
||||||
},
|
name: 'First (not null)',
|
||||||
{
|
description: 'First non-null value',
|
||||||
id: ReducerID.last,
|
standard: true,
|
||||||
name: 'Last',
|
reduce: calculateFirstNotNull,
|
||||||
description: 'Last Value',
|
},
|
||||||
standard: true,
|
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||||
reduce: calculateLast,
|
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||||
},
|
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, aliasIds: ['avg'] },
|
||||||
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
{
|
||||||
{
|
id: ReducerID.sum,
|
||||||
id: ReducerID.firstNotNull,
|
name: 'Total',
|
||||||
name: 'First (not null)',
|
description: 'The sum of all values',
|
||||||
description: 'First non-null value',
|
emptyInputResult: 0,
|
||||||
standard: true,
|
standard: true,
|
||||||
reduce: calculateFirstNotNull,
|
aliasIds: ['total'],
|
||||||
},
|
},
|
||||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
{
|
||||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
id: ReducerID.count,
|
||||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
|
name: 'Count',
|
||||||
{
|
description: 'Number of values in response',
|
||||||
id: ReducerID.sum,
|
emptyInputResult: 0,
|
||||||
name: 'Total',
|
standard: true,
|
||||||
description: 'The sum of all values',
|
},
|
||||||
emptyInputResult: 0,
|
{
|
||||||
standard: true,
|
id: ReducerID.range,
|
||||||
alias: 'total',
|
name: 'Range',
|
||||||
},
|
description: 'Difference between minimum and maximum values',
|
||||||
{
|
standard: true,
|
||||||
id: ReducerID.count,
|
},
|
||||||
name: 'Count',
|
{
|
||||||
description: 'Number of values in response',
|
id: ReducerID.delta,
|
||||||
emptyInputResult: 0,
|
name: 'Delta',
|
||||||
standard: true,
|
description: 'Cumulative change in value',
|
||||||
},
|
standard: true,
|
||||||
{
|
},
|
||||||
id: ReducerID.range,
|
{
|
||||||
name: 'Range',
|
id: ReducerID.step,
|
||||||
description: 'Difference between minimum and maximum values',
|
name: 'Step',
|
||||||
standard: true,
|
description: 'Minimum interval between values',
|
||||||
},
|
standard: true,
|
||||||
{
|
},
|
||||||
id: ReducerID.delta,
|
{
|
||||||
name: 'Delta',
|
id: ReducerID.diff,
|
||||||
description: 'Cumulative change in value',
|
name: 'Difference',
|
||||||
standard: true,
|
description: 'Difference between first and last values',
|
||||||
},
|
standard: true,
|
||||||
{
|
},
|
||||||
id: ReducerID.step,
|
{
|
||||||
name: 'Step',
|
id: ReducerID.logmin,
|
||||||
description: 'Minimum interval between values',
|
name: 'Min (above zero)',
|
||||||
standard: true,
|
description: 'Used for log min scale',
|
||||||
},
|
standard: true,
|
||||||
{
|
},
|
||||||
id: ReducerID.diff,
|
{
|
||||||
name: 'Difference',
|
id: ReducerID.allIsZero,
|
||||||
description: 'Difference between first and last values',
|
name: 'All Zeros',
|
||||||
standard: true,
|
description: 'All values are zero',
|
||||||
},
|
emptyInputResult: false,
|
||||||
{
|
standard: true,
|
||||||
id: ReducerID.logmin,
|
},
|
||||||
name: 'Min (above zero)',
|
{
|
||||||
description: 'Used for log min scale',
|
id: ReducerID.allIsNull,
|
||||||
standard: true,
|
name: 'All Nulls',
|
||||||
},
|
description: 'All values are null',
|
||||||
{
|
emptyInputResult: true,
|
||||||
id: ReducerID.changeCount,
|
standard: true,
|
||||||
name: 'Change Count',
|
},
|
||||||
description: 'Number of times the value changes',
|
{
|
||||||
standard: false,
|
id: ReducerID.changeCount,
|
||||||
reduce: calculateChangeCount,
|
name: 'Change Count',
|
||||||
},
|
description: 'Number of times the value changes',
|
||||||
{
|
standard: false,
|
||||||
id: ReducerID.distinctCount,
|
reduce: calculateChangeCount,
|
||||||
name: 'Distinct Count',
|
},
|
||||||
description: 'Number of distinct values',
|
{
|
||||||
standard: false,
|
id: ReducerID.distinctCount,
|
||||||
reduce: calculateDistinctCount,
|
name: 'Distinct Count',
|
||||||
},
|
description: 'Number of distinct values',
|
||||||
].forEach(info => {
|
standard: false,
|
||||||
const { id, alias } = info;
|
reduce: calculateDistinctCount,
|
||||||
if (index.hasOwnProperty(id)) {
|
},
|
||||||
console.warn('Duplicate Stat', id, info, index);
|
]);
|
||||||
}
|
|
||||||
index[id] = info;
|
|
||||||
if (alias) {
|
|
||||||
if (index.hasOwnProperty(alias)) {
|
|
||||||
console.warn('Duplicate Stat (alias)', alias, info, index);
|
|
||||||
}
|
|
||||||
index[alias] = info;
|
|
||||||
}
|
|
||||||
listOfStats.push(info);
|
|
||||||
});
|
|
||||||
hasBuiltIndex = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return index[id];
|
|
||||||
}
|
|
||||||
|
|
||||||
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||||
const calcs = {
|
const calcs = {
|
||||||
@@ -253,7 +214,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
|||||||
count: 0,
|
count: 0,
|
||||||
nonNullCount: 0,
|
nonNullCount: 0,
|
||||||
allIsNull: true,
|
allIsNull: true,
|
||||||
allIsZero: false,
|
allIsZero: true,
|
||||||
range: null,
|
range: null,
|
||||||
diff: null,
|
diff: null,
|
||||||
delta: 0,
|
delta: 0,
|
||||||
@@ -264,7 +225,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
|||||||
} as FieldCalcs;
|
} as FieldCalcs;
|
||||||
|
|
||||||
for (let i = 0; i < data.rows.length; i++) {
|
for (let i = 0; i < data.rows.length; i++) {
|
||||||
let currentValue = data.rows[i][fieldIndex];
|
let currentValue = data.rows[i] ? data.rows[i][fieldIndex] : null;
|
||||||
if (i === 0) {
|
if (i === 0) {
|
||||||
calcs.first = currentValue;
|
calcs.first = currentValue;
|
||||||
}
|
}
|
||||||
@@ -350,6 +311,10 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
|||||||
calcs.mean = calcs.sum! / calcs.nonNullCount;
|
calcs.mean = calcs.sum! / calcs.nonNullCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (calcs.allIsNull) {
|
||||||
|
calcs.allIsZero = false;
|
||||||
|
}
|
||||||
|
|
||||||
if (calcs.max !== null && calcs.min !== null) {
|
if (calcs.max !== null && calcs.min !== null) {
|
||||||
calcs.range = calcs.max - calcs.min;
|
calcs.range = calcs.max - calcs.min;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
export * from './string';
|
export * from './string';
|
||||||
|
export * from './registry';
|
||||||
export * from './markdown';
|
export * from './markdown';
|
||||||
export * from './processDataFrame';
|
export * from './processDataFrame';
|
||||||
export * from './csv';
|
export * from './csv';
|
||||||
|
|||||||
@@ -29,6 +29,15 @@ describe('toDataFrame', () => {
|
|||||||
expect(series.fields[0].name).toEqual('Value');
|
expect(series.fields[0].name).toEqual('Value');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('assumes TimeSeries values are numbers', () => {
|
||||||
|
const input1 = {
|
||||||
|
target: 'time',
|
||||||
|
datapoints: [[100, 1], [200, 2]],
|
||||||
|
};
|
||||||
|
const data = toDataFrame(input1);
|
||||||
|
expect(data.fields[0].type).toBe(FieldType.number);
|
||||||
|
});
|
||||||
|
|
||||||
it('keeps dataFrame unchanged', () => {
|
it('keeps dataFrame unchanged', () => {
|
||||||
const input = {
|
const input = {
|
||||||
fields: [{ text: 'A' }, { text: 'B' }, { text: 'C' }],
|
fields: [{ text: 'A' }, { text: 'B' }, { text: 'C' }],
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
|||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
name: timeSeries.target || 'Value',
|
name: timeSeries.target || 'Value',
|
||||||
|
type: FieldType.number,
|
||||||
unit: timeSeries.unit,
|
unit: timeSeries.unit,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
134
packages/grafana-data/src/utils/registry.ts
Normal file
134
packages/grafana-data/src/utils/registry.ts
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import { SelectableValue } from '../types/select';
|
||||||
|
|
||||||
|
export interface RegistryItem {
|
||||||
|
id: string; // Unique Key -- saved in configs
|
||||||
|
name: string; // Display Name, can change without breaking configs
|
||||||
|
description: string;
|
||||||
|
aliasIds?: string[]; // when the ID changes, we may want backwards compatibility ('current' => 'last')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Some extensions should not be user selectable
|
||||||
|
* like: 'all' and 'any' matchers;
|
||||||
|
*/
|
||||||
|
excludeFromPicker?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RegistrySelectInfo {
|
||||||
|
options: Array<SelectableValue<string>>;
|
||||||
|
current: Array<SelectableValue<string>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Registry<T extends RegistryItem> {
|
||||||
|
private ordered: T[] = [];
|
||||||
|
private byId = new Map<string, T>();
|
||||||
|
private initalized = false;
|
||||||
|
|
||||||
|
constructor(private init?: () => T[]) {}
|
||||||
|
|
||||||
|
getIfExists(id: string | undefined): T | undefined {
|
||||||
|
if (!this.initalized) {
|
||||||
|
if (this.init) {
|
||||||
|
for (const ext of this.init()) {
|
||||||
|
this.register(ext);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.sort();
|
||||||
|
this.initalized = true;
|
||||||
|
}
|
||||||
|
if (id) {
|
||||||
|
return this.byId.get(id);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(id: string): T {
|
||||||
|
const v = this.getIfExists(id);
|
||||||
|
if (!v) {
|
||||||
|
throw new Error('Undefined: ' + id);
|
||||||
|
}
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
selectOptions(current?: string[], filter?: (ext: T) => boolean): RegistrySelectInfo {
|
||||||
|
if (!this.initalized) {
|
||||||
|
this.getIfExists('xxx'); // will trigger init
|
||||||
|
}
|
||||||
|
|
||||||
|
const select = {
|
||||||
|
options: [],
|
||||||
|
current: [],
|
||||||
|
} as RegistrySelectInfo;
|
||||||
|
|
||||||
|
const currentIds: any = {};
|
||||||
|
if (current) {
|
||||||
|
for (const id of current) {
|
||||||
|
currentIds[id] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const ext of this.ordered) {
|
||||||
|
if (ext.excludeFromPicker) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (filter && !filter(ext)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const option = {
|
||||||
|
value: ext.id,
|
||||||
|
label: ext.name,
|
||||||
|
description: ext.description,
|
||||||
|
};
|
||||||
|
|
||||||
|
select.options.push(option);
|
||||||
|
if (currentIds[ext.id]) {
|
||||||
|
select.current.push(option);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return select;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a list of values by ID, or all values if not specified
|
||||||
|
*/
|
||||||
|
list(ids?: any[]): T[] {
|
||||||
|
if (ids) {
|
||||||
|
const found: T[] = [];
|
||||||
|
for (const id of ids) {
|
||||||
|
const v = this.getIfExists(id);
|
||||||
|
if (v) {
|
||||||
|
found.push(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return found;
|
||||||
|
}
|
||||||
|
if (!this.initalized) {
|
||||||
|
this.getIfExists('xxx'); // will trigger init
|
||||||
|
}
|
||||||
|
return [...this.ordered]; // copy of everythign just in case
|
||||||
|
}
|
||||||
|
|
||||||
|
register(ext: T) {
|
||||||
|
if (this.byId.has(ext.id)) {
|
||||||
|
throw new Error('Duplicate Key:' + ext.id);
|
||||||
|
}
|
||||||
|
this.byId.set(ext.id, ext);
|
||||||
|
this.ordered.push(ext);
|
||||||
|
|
||||||
|
if (ext.aliasIds) {
|
||||||
|
for (const alias of ext.aliasIds) {
|
||||||
|
if (!this.byId.has(alias)) {
|
||||||
|
this.byId.set(alias, ext);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.initalized) {
|
||||||
|
this.sort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private sort() {
|
||||||
|
// TODO sort the list
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,23 +1,22 @@
|
|||||||
import { Threshold } from '../types';
|
import { Threshold } from '../types';
|
||||||
|
|
||||||
export function getThresholdForValue(
|
export function getActiveThreshold(value: number, thresholds: Threshold[]): Threshold {
|
||||||
thresholds: Threshold[],
|
let active = thresholds[0];
|
||||||
value: number | null | string | undefined
|
for (const threshold of thresholds) {
|
||||||
): Threshold | null {
|
if (value >= threshold.value) {
|
||||||
if (thresholds.length === 1) {
|
active = threshold;
|
||||||
return thresholds[0];
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return active;
|
||||||
const atThreshold = thresholds.filter(threshold => (value as number) === threshold.value)[0];
|
}
|
||||||
if (atThreshold) {
|
|
||||||
return atThreshold;
|
/**
|
||||||
}
|
* Sorts the thresholds
|
||||||
|
*/
|
||||||
const belowThreshold = thresholds.filter(threshold => (value as number) > threshold.value);
|
export function sortThresholds(thresholds: Threshold[]) {
|
||||||
if (belowThreshold.length > 0) {
|
return thresholds.sort((t1, t2) => {
|
||||||
const nearestThreshold = belowThreshold.sort((t1: Threshold, t2: Threshold) => t2.value - t1.value)[0];
|
return t1.value - t2.value;
|
||||||
return nearestThreshold;
|
});
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -49,9 +49,9 @@ function addBarGaugeStory(name: string, overrides: Partial<Props>) {
|
|||||||
orientation: VizOrientation.Vertical,
|
orientation: VizOrientation.Vertical,
|
||||||
displayMode: 'basic',
|
displayMode: 'basic',
|
||||||
thresholds: [
|
thresholds: [
|
||||||
{ index: 0, value: -Infinity, color: 'green' },
|
{ value: -Infinity, color: 'green' },
|
||||||
{ index: 1, value: threshold1Value, color: threshold1Color },
|
{ value: threshold1Value, color: threshold1Color },
|
||||||
{ index: 1, value: threshold2Value, color: threshold2Color },
|
{ value: threshold2Value, color: threshold2Color },
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -25,11 +25,7 @@ function getProps(propOverrides?: Partial<Props>): Props {
|
|||||||
maxValue: 100,
|
maxValue: 100,
|
||||||
minValue: 0,
|
minValue: 0,
|
||||||
displayMode: 'basic',
|
displayMode: 'basic',
|
||||||
thresholds: [
|
thresholds: [{ value: -Infinity, color: 'green' }, { value: 70, color: 'orange' }, { value: 90, color: 'red' }],
|
||||||
{ index: 0, value: -Infinity, color: 'green' },
|
|
||||||
{ index: 1, value: 70, color: 'orange' },
|
|
||||||
{ index: 2, value: 90, color: 'red' },
|
|
||||||
],
|
|
||||||
height: 300,
|
height: 300,
|
||||||
width: 300,
|
width: 300,
|
||||||
value: {
|
value: {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { getColorFromHexRgbOrName } from '../../utils';
|
|||||||
|
|
||||||
// Types
|
// Types
|
||||||
import { DisplayValue, Themeable, VizOrientation } from '../../types';
|
import { DisplayValue, Themeable, VizOrientation } from '../../types';
|
||||||
import { Threshold, TimeSeriesValue, getThresholdForValue } from '@grafana/data';
|
import { Threshold, TimeSeriesValue, getActiveThreshold } from '@grafana/data';
|
||||||
|
|
||||||
const MIN_VALUE_HEIGHT = 18;
|
const MIN_VALUE_HEIGHT = 18;
|
||||||
const MAX_VALUE_HEIGHT = 50;
|
const MAX_VALUE_HEIGHT = 50;
|
||||||
@@ -87,8 +87,14 @@ export class BarGauge extends PureComponent<Props> {
|
|||||||
|
|
||||||
getCellColor(positionValue: TimeSeriesValue): CellColors {
|
getCellColor(positionValue: TimeSeriesValue): CellColors {
|
||||||
const { thresholds, theme, value } = this.props;
|
const { thresholds, theme, value } = this.props;
|
||||||
const activeThreshold = getThresholdForValue(thresholds, positionValue);
|
if (positionValue === null) {
|
||||||
|
return {
|
||||||
|
background: 'gray',
|
||||||
|
border: 'gray',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const activeThreshold = getActiveThreshold(positionValue, thresholds);
|
||||||
if (activeThreshold !== null) {
|
if (activeThreshold !== null) {
|
||||||
const color = getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
const color = getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||||
|
|
||||||
@@ -474,7 +480,7 @@ export function getBarGradient(props: Props, maxSize: number): string {
|
|||||||
export function getValueColor(props: Props): string {
|
export function getValueColor(props: Props): string {
|
||||||
const { thresholds, theme, value } = props;
|
const { thresholds, theme, value } = props;
|
||||||
|
|
||||||
const activeThreshold = getThresholdForValue(thresholds, value.numeric);
|
const activeThreshold = getActiveThreshold(value.numeric, thresholds);
|
||||||
|
|
||||||
if (activeThreshold !== null) {
|
if (activeThreshold !== null) {
|
||||||
return getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
return getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ export class CustomScrollbar extends Component<Props> {
|
|||||||
{...passedProps}
|
{...passedProps}
|
||||||
className={cx(
|
className={cx(
|
||||||
css`
|
css`
|
||||||
visibility: ${hideTrack ? 'none' : 'visible'};
|
visibility: ${hideTrack ? 'hidden' : 'visible'};
|
||||||
`,
|
`,
|
||||||
track
|
track
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ const setup = (propOverrides?: object) => {
|
|||||||
minValue: 0,
|
minValue: 0,
|
||||||
showThresholdMarkers: true,
|
showThresholdMarkers: true,
|
||||||
showThresholdLabels: false,
|
showThresholdLabels: false,
|
||||||
thresholds: [{ index: 0, value: -Infinity, color: '#7EB26D' }],
|
thresholds: [{ value: -Infinity, color: '#7EB26D' }],
|
||||||
height: 300,
|
height: 300,
|
||||||
width: 300,
|
width: 300,
|
||||||
value: {
|
value: {
|
||||||
@@ -48,9 +48,9 @@ describe('Get thresholds formatted', () => {
|
|||||||
it('should get the correct formatted values when thresholds are added', () => {
|
it('should get the correct formatted values when thresholds are added', () => {
|
||||||
const { instance } = setup({
|
const { instance } = setup({
|
||||||
thresholds: [
|
thresholds: [
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 50, color: '#EAB839' },
|
{ value: 50, color: '#EAB839' },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -43,12 +43,12 @@ export class Gauge extends PureComponent<Props> {
|
|||||||
const lastThreshold = thresholds[thresholds.length - 1];
|
const lastThreshold = thresholds[thresholds.length - 1];
|
||||||
|
|
||||||
return [
|
return [
|
||||||
...thresholds.map(threshold => {
|
...thresholds.map((threshold, index) => {
|
||||||
if (threshold.index === 0) {
|
if (index === 0) {
|
||||||
return { value: minValue, color: getColorFromHexRgbOrName(threshold.color, theme.type) };
|
return { value: minValue, color: getColorFromHexRgbOrName(threshold.color, theme.type) };
|
||||||
}
|
}
|
||||||
|
|
||||||
const previousThreshold = thresholds[threshold.index - 1];
|
const previousThreshold = thresholds[index - 1];
|
||||||
return { value: threshold.value, color: getColorFromHexRgbOrName(previousThreshold.color, theme.type) };
|
return { value: threshold.value, color: getColorFromHexRgbOrName(previousThreshold.color, theme.type) };
|
||||||
}),
|
}),
|
||||||
{ value: maxValue, color: getColorFromHexRgbOrName(lastThreshold.color, theme.type) },
|
{ value: maxValue, color: getColorFromHexRgbOrName(lastThreshold.color, theme.type) },
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import React, { PureComponent } from 'react';
|
import React, { PureComponent } from 'react';
|
||||||
import classNames from 'classnames';
|
import classNames from 'classnames';
|
||||||
import { SelectOptionItem } from '../Select/Select';
|
import { SelectableValue } from '@grafana/data';
|
||||||
import { Tooltip } from '../Tooltip/Tooltip';
|
import { Tooltip } from '../Tooltip/Tooltip';
|
||||||
import { ButtonSelect } from '../Select/ButtonSelect';
|
import { ButtonSelect } from '../Select/ButtonSelect';
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
|||||||
super(props);
|
super(props);
|
||||||
}
|
}
|
||||||
|
|
||||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectOptionItem<string>> => {
|
intervalsToOptions = (intervals: string[] | undefined): Array<SelectableValue<string>> => {
|
||||||
const intervalsOrDefault = intervals || defaultIntervals;
|
const intervalsOrDefault = intervals || defaultIntervals;
|
||||||
const options = intervalsOrDefault
|
const options = intervalsOrDefault
|
||||||
.filter(str => str !== '')
|
.filter(str => str !== '')
|
||||||
@@ -37,7 +37,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
|||||||
return options;
|
return options;
|
||||||
};
|
};
|
||||||
|
|
||||||
onChangeSelect = (item: SelectOptionItem<string>) => {
|
onChangeSelect = (item: SelectableValue<string>) => {
|
||||||
const { onIntervalChanged } = this.props;
|
const { onIntervalChanged } = this.props;
|
||||||
if (onIntervalChanged) {
|
if (onIntervalChanged) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { action } from '@storybook/addon-actions';
|
|||||||
import { withKnobs, object, text } from '@storybook/addon-knobs';
|
import { withKnobs, object, text } from '@storybook/addon-knobs';
|
||||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||||
import { UseState } from '../../utils/storybook/UseState';
|
import { UseState } from '../../utils/storybook/UseState';
|
||||||
import { SelectOptionItem } from './Select';
|
import { SelectableValue } from '@grafana/data';
|
||||||
import { ButtonSelect } from './ButtonSelect';
|
import { ButtonSelect } from './ButtonSelect';
|
||||||
|
|
||||||
const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||||
@@ -12,9 +12,9 @@ const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
|||||||
ButtonSelectStories.addDecorator(withCenteredStory).addDecorator(withKnobs);
|
ButtonSelectStories.addDecorator(withCenteredStory).addDecorator(withKnobs);
|
||||||
|
|
||||||
ButtonSelectStories.add('default', () => {
|
ButtonSelectStories.add('default', () => {
|
||||||
const intialState: SelectOptionItem<string> = { label: 'A label', value: 'A value' };
|
const intialState: SelectableValue<string> = { label: 'A label', value: 'A value' };
|
||||||
const value = object<SelectOptionItem<string>>('Selected Value:', intialState);
|
const value = object<SelectableValue<string>>('Selected Value:', intialState);
|
||||||
const options = object<Array<SelectOptionItem<string>>>('Options:', [
|
const options = object<Array<SelectableValue<string>>>('Options:', [
|
||||||
intialState,
|
intialState,
|
||||||
{ label: 'Another label', value: 'Another value' },
|
{ label: 'Another label', value: 'Another value' },
|
||||||
]);
|
]);
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import React, { PureComponent, ReactElement } from 'react';
|
import React, { PureComponent, ReactElement } from 'react';
|
||||||
import Select, { SelectOptionItem } from './Select';
|
import Select from './Select';
|
||||||
import { PopperContent } from '../Tooltip/PopperController';
|
import { PopperContent } from '../Tooltip/PopperController';
|
||||||
|
import { SelectableValue } from '@grafana/data';
|
||||||
|
|
||||||
interface ButtonComponentProps {
|
interface ButtonComponentProps {
|
||||||
label: ReactElement | string | undefined;
|
label: ReactElement | string | undefined;
|
||||||
@@ -30,13 +31,13 @@ const ButtonComponent = (buttonProps: ButtonComponentProps) => (props: any) => {
|
|||||||
|
|
||||||
export interface Props<T> {
|
export interface Props<T> {
|
||||||
className: string | undefined;
|
className: string | undefined;
|
||||||
options: Array<SelectOptionItem<T>>;
|
options: Array<SelectableValue<T>>;
|
||||||
value?: SelectOptionItem<T>;
|
value?: SelectableValue<T>;
|
||||||
label?: ReactElement | string;
|
label?: ReactElement | string;
|
||||||
iconClass?: string;
|
iconClass?: string;
|
||||||
components?: any;
|
components?: any;
|
||||||
maxMenuHeight?: number;
|
maxMenuHeight?: number;
|
||||||
onChange: (item: SelectOptionItem<T>) => void;
|
onChange: (item: SelectableValue<T>) => void;
|
||||||
tooltipContent?: PopperContent<any>;
|
tooltipContent?: PopperContent<any>;
|
||||||
isMenuOpen?: boolean;
|
isMenuOpen?: boolean;
|
||||||
onOpenMenu?: () => void;
|
onOpenMenu?: () => void;
|
||||||
@@ -45,7 +46,7 @@ export interface Props<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class ButtonSelect<T> extends PureComponent<Props<T>> {
|
export class ButtonSelect<T> extends PureComponent<Props<T>> {
|
||||||
onChange = (item: SelectOptionItem<T>) => {
|
onChange = (item: SelectableValue<T>) => {
|
||||||
const { onChange } = this.props;
|
const { onChange } = this.props;
|
||||||
onChange(item);
|
onChange(item);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -19,23 +19,16 @@ import resetSelectStyles from './resetSelectStyles';
|
|||||||
import { CustomScrollbar } from '../CustomScrollbar/CustomScrollbar';
|
import { CustomScrollbar } from '../CustomScrollbar/CustomScrollbar';
|
||||||
import { PopperContent } from '../Tooltip/PopperController';
|
import { PopperContent } from '../Tooltip/PopperController';
|
||||||
import { Tooltip } from '../Tooltip/Tooltip';
|
import { Tooltip } from '../Tooltip/Tooltip';
|
||||||
|
import { SelectableValue } from '@grafana/data';
|
||||||
export interface SelectOptionItem<T> {
|
|
||||||
label?: string;
|
|
||||||
value?: T;
|
|
||||||
imgUrl?: string;
|
|
||||||
description?: string;
|
|
||||||
[key: string]: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CommonProps<T> {
|
export interface CommonProps<T> {
|
||||||
defaultValue?: any;
|
defaultValue?: any;
|
||||||
getOptionLabel?: (item: SelectOptionItem<T>) => string;
|
getOptionLabel?: (item: SelectableValue<T>) => string;
|
||||||
getOptionValue?: (item: SelectOptionItem<T>) => string;
|
getOptionValue?: (item: SelectableValue<T>) => string;
|
||||||
onChange: (item: SelectOptionItem<T>) => {} | void;
|
onChange: (item: SelectableValue<T>) => {} | void;
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
width?: number;
|
width?: number;
|
||||||
value?: SelectOptionItem<T>;
|
value?: SelectableValue<T>;
|
||||||
className?: string;
|
className?: string;
|
||||||
isDisabled?: boolean;
|
isDisabled?: boolean;
|
||||||
isSearchable?: boolean;
|
isSearchable?: boolean;
|
||||||
@@ -57,12 +50,12 @@ export interface CommonProps<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface SelectProps<T> extends CommonProps<T> {
|
export interface SelectProps<T> extends CommonProps<T> {
|
||||||
options: Array<SelectOptionItem<T>>;
|
options: Array<SelectableValue<T>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface AsyncProps<T> extends CommonProps<T> {
|
interface AsyncProps<T> extends CommonProps<T> {
|
||||||
defaultOptions: boolean;
|
defaultOptions: boolean;
|
||||||
loadOptions: (query: string) => Promise<Array<SelectOptionItem<T>>>;
|
loadOptions: (query: string) => Promise<Array<SelectableValue<T>>>;
|
||||||
loadingMessage?: () => string;
|
loadingMessage?: () => string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,11 +3,10 @@ import { interval, Subscription, Subject, of, NEVER } from 'rxjs';
|
|||||||
import { tap, switchMap } from 'rxjs/operators';
|
import { tap, switchMap } from 'rxjs/operators';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
|
|
||||||
import { stringToMs } from '@grafana/data';
|
import { stringToMs, SelectableValue } from '@grafana/data';
|
||||||
import { isLive } from '../RefreshPicker/RefreshPicker';
|
import { isLive } from '../RefreshPicker/RefreshPicker';
|
||||||
import { SelectOptionItem } from '../Select/Select';
|
|
||||||
|
|
||||||
export function getIntervalFromString(strInterval: string): SelectOptionItem<number> {
|
export function getIntervalFromString(strInterval: string): SelectableValue<number> {
|
||||||
return {
|
return {
|
||||||
label: strInterval,
|
label: strInterval,
|
||||||
value: stringToMs(strInterval),
|
value: stringToMs(strInterval),
|
||||||
|
|||||||
@@ -8,10 +8,10 @@ import { StatsPicker } from '../StatsPicker/StatsPicker';
|
|||||||
|
|
||||||
// Types
|
// Types
|
||||||
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
|
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
|
||||||
import Select, { SelectOptionItem } from '../Select/Select';
|
import Select from '../Select/Select';
|
||||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined } from '@grafana/data';
|
import { Field, ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue } from '@grafana/data';
|
||||||
|
|
||||||
const showOptions: Array<SelectOptionItem<boolean>> = [
|
const showOptions: Array<SelectableValue<boolean>> = [
|
||||||
{
|
{
|
||||||
value: true,
|
value: true,
|
||||||
label: 'All Values',
|
label: 'All Values',
|
||||||
@@ -31,7 +31,7 @@ export interface Props {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class FieldDisplayEditor extends PureComponent<Props> {
|
export class FieldDisplayEditor extends PureComponent<Props> {
|
||||||
onShowValuesChange = (item: SelectOptionItem<boolean>) => {
|
onShowValuesChange = (item: SelectableValue<boolean>) => {
|
||||||
const val = item.value === true;
|
const val = item.value === true;
|
||||||
this.props.onChange({ ...this.props.value, values: val });
|
this.props.onChange({ ...this.props.value, values: val });
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,8 +7,7 @@ import { FormLabel } from '../FormLabel/FormLabel';
|
|||||||
import { UnitPicker } from '../UnitPicker/UnitPicker';
|
import { UnitPicker } from '../UnitPicker/UnitPicker';
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
import { toIntegerOrUndefined, Field } from '@grafana/data';
|
import { toIntegerOrUndefined, Field, SelectableValue } from '@grafana/data';
|
||||||
import { SelectOptionItem } from '../Select/Select';
|
|
||||||
|
|
||||||
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
|
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
|
||||||
|
|
||||||
@@ -54,7 +53,7 @@ export const FieldPropertiesEditor: React.FC<Props> = ({ value, onChange, showMi
|
|||||||
[value.max, onChange]
|
[value.max, onChange]
|
||||||
);
|
);
|
||||||
|
|
||||||
const onUnitChange = (unit: SelectOptionItem<string>) => {
|
const onUnitChange = (unit: SelectableValue<string>) => {
|
||||||
onChange({ ...value, unit: unit.value });
|
onChange({ ...value, unit: unit.value });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,39 @@
|
|||||||
|
import { sharedSingleStatMigrationCheck } from './SingleStatBaseOptions';
|
||||||
|
|
||||||
|
describe('sharedSingleStatMigrationCheck', () => {
|
||||||
|
it('from old valueOptions model without pluginVersion', () => {
|
||||||
|
const panel = {
|
||||||
|
options: {
|
||||||
|
valueOptions: {
|
||||||
|
unit: 'watt',
|
||||||
|
stat: 'last',
|
||||||
|
decimals: 5,
|
||||||
|
},
|
||||||
|
minValue: 10,
|
||||||
|
maxValue: 100,
|
||||||
|
valueMappings: [{ type: 1, value: '1', text: 'OK' }],
|
||||||
|
thresholds: [
|
||||||
|
{
|
||||||
|
color: 'green',
|
||||||
|
index: 0,
|
||||||
|
value: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
color: 'orange',
|
||||||
|
index: 1,
|
||||||
|
value: 40,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
color: 'red',
|
||||||
|
index: 2,
|
||||||
|
value: 80,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
title: 'Usage',
|
||||||
|
type: 'bargauge',
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(sharedSingleStatMigrationCheck(panel as any)).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -3,7 +3,7 @@ import omit from 'lodash/omit';
|
|||||||
|
|
||||||
import { VizOrientation, PanelModel } from '../../types/panel';
|
import { VizOrientation, PanelModel } from '../../types/panel';
|
||||||
import { FieldDisplayOptions } from '../../utils/fieldDisplay';
|
import { FieldDisplayOptions } from '../../utils/fieldDisplay';
|
||||||
import { Field, getFieldReducers } from '@grafana/data';
|
import { fieldReducers, Threshold, sortThresholds } from '@grafana/data';
|
||||||
|
|
||||||
export interface SingleStatBaseOptions {
|
export interface SingleStatBaseOptions {
|
||||||
fieldOptions: FieldDisplayOptions;
|
fieldOptions: FieldDisplayOptions;
|
||||||
@@ -25,40 +25,99 @@ export const sharedSingleStatOptionsCheck = (
|
|||||||
return options;
|
return options;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseOptions>) => {
|
export function sharedSingleStatMigrationCheck(panel: PanelModel<SingleStatBaseOptions>) {
|
||||||
if (!panel.options) {
|
if (!panel.options) {
|
||||||
// This happens on the first load or when migrating from angular
|
// This happens on the first load or when migrating from angular
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
// This migration aims to keep the most recent changes up-to-date
|
const previousVersion = parseFloat(panel.pluginVersion || '6.1');
|
||||||
// Plugins should explicitly migrate for known version changes and only use this
|
let options = panel.options as any;
|
||||||
// as a backup
|
|
||||||
const old = panel.options as any;
|
|
||||||
if (old.valueOptions) {
|
|
||||||
const { valueOptions } = old;
|
|
||||||
|
|
||||||
const fieldOptions = (old.fieldOptions = {} as FieldDisplayOptions);
|
if (previousVersion < 6.2) {
|
||||||
fieldOptions.mappings = old.valueMappings;
|
options = migrateFromValueOptions(options);
|
||||||
fieldOptions.thresholds = old.thresholds;
|
|
||||||
|
|
||||||
const field = (fieldOptions.defaults = {} as Field);
|
|
||||||
if (valueOptions) {
|
|
||||||
field.unit = valueOptions.unit;
|
|
||||||
field.decimals = valueOptions.decimals;
|
|
||||||
|
|
||||||
// Make sure the stats have a valid name
|
|
||||||
if (valueOptions.stat) {
|
|
||||||
fieldOptions.calcs = getFieldReducers([valueOptions.stat]).map(s => s.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
field.min = old.minValue;
|
|
||||||
field.max = old.maxValue;
|
|
||||||
|
|
||||||
// remove old props
|
|
||||||
return omit(old, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return panel.options;
|
if (previousVersion < 6.3) {
|
||||||
};
|
options = moveThresholdsAndMappingsToField(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
return options as SingleStatBaseOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function moveThresholdsAndMappingsToField(old: any) {
|
||||||
|
const { fieldOptions } = old;
|
||||||
|
|
||||||
|
if (!fieldOptions) {
|
||||||
|
return old;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { mappings, thresholds, ...rest } = old.fieldOptions;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...old,
|
||||||
|
fieldOptions: {
|
||||||
|
...rest,
|
||||||
|
defaults: {
|
||||||
|
...fieldOptions.defaults,
|
||||||
|
mappings,
|
||||||
|
thresholds: migrateOldThresholds(thresholds),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Moves valueMappings and thresholds from root to new fieldOptions object
|
||||||
|
* Renames valueOptions to to defaults and moves it under fieldOptions
|
||||||
|
*/
|
||||||
|
export function migrateFromValueOptions(old: any) {
|
||||||
|
const { valueOptions } = old;
|
||||||
|
if (!valueOptions) {
|
||||||
|
return old;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fieldOptions: any = {};
|
||||||
|
const fieldDefaults: any = {};
|
||||||
|
|
||||||
|
fieldOptions.mappings = old.valueMappings;
|
||||||
|
fieldOptions.thresholds = old.thresholds;
|
||||||
|
fieldOptions.defaults = fieldDefaults;
|
||||||
|
|
||||||
|
fieldDefaults.unit = valueOptions.unit;
|
||||||
|
fieldDefaults.decimals = valueOptions.decimals;
|
||||||
|
|
||||||
|
// Make sure the stats have a valid name
|
||||||
|
if (valueOptions.stat) {
|
||||||
|
const reducer = fieldReducers.get(valueOptions.stat);
|
||||||
|
if (reducer) {
|
||||||
|
fieldOptions.calcs = [reducer.id];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldDefaults.min = old.minValue;
|
||||||
|
fieldDefaults.max = old.maxValue;
|
||||||
|
|
||||||
|
const newOptions = {
|
||||||
|
...old,
|
||||||
|
fieldOptions,
|
||||||
|
};
|
||||||
|
|
||||||
|
return omit(newOptions, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function migrateOldThresholds(thresholds?: any[]): Threshold[] | undefined {
|
||||||
|
if (!thresholds || !thresholds.length) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const copy = thresholds.map(t => {
|
||||||
|
return {
|
||||||
|
// Drops 'index'
|
||||||
|
value: t.value === null ? -Infinity : t.value,
|
||||||
|
color: t.color,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
sortThresholds(copy);
|
||||||
|
copy[0].value = -Infinity;
|
||||||
|
return copy;
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,38 @@
|
|||||||
|
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||||
|
|
||||||
|
exports[`sharedSingleStatMigrationCheck from old valueOptions model without pluginVersion 1`] = `
|
||||||
|
Object {
|
||||||
|
"fieldOptions": Object {
|
||||||
|
"calcs": Array [
|
||||||
|
"last",
|
||||||
|
],
|
||||||
|
"defaults": Object {
|
||||||
|
"decimals": 5,
|
||||||
|
"mappings": Array [
|
||||||
|
Object {
|
||||||
|
"text": "OK",
|
||||||
|
"type": 1,
|
||||||
|
"value": "1",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"max": 100,
|
||||||
|
"min": 10,
|
||||||
|
"thresholds": Array [
|
||||||
|
Object {
|
||||||
|
"color": "green",
|
||||||
|
"value": -Infinity,
|
||||||
|
},
|
||||||
|
Object {
|
||||||
|
"color": "orange",
|
||||||
|
"value": 40,
|
||||||
|
},
|
||||||
|
Object {
|
||||||
|
"color": "red",
|
||||||
|
"value": 80,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"unit": "watt",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
`;
|
||||||
@@ -5,8 +5,7 @@ import difference from 'lodash/difference';
|
|||||||
|
|
||||||
import { Select } from '../index';
|
import { Select } from '../index';
|
||||||
|
|
||||||
import { getFieldReducers } from '@grafana/data';
|
import { fieldReducers, SelectableValue } from '@grafana/data';
|
||||||
import { SelectOptionItem } from '../Select/Select';
|
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
@@ -34,7 +33,7 @@ export class StatsPicker extends PureComponent<Props> {
|
|||||||
checkInput = () => {
|
checkInput = () => {
|
||||||
const { stats, allowMultiple, defaultStat, onChange } = this.props;
|
const { stats, allowMultiple, defaultStat, onChange } = this.props;
|
||||||
|
|
||||||
const current = getFieldReducers(stats);
|
const current = fieldReducers.list(stats);
|
||||||
if (current.length !== stats.length) {
|
if (current.length !== stats.length) {
|
||||||
const found = current.map(v => v.id);
|
const found = current.map(v => v.id);
|
||||||
const notFound = difference(stats, found);
|
const notFound = difference(stats, found);
|
||||||
@@ -54,7 +53,7 @@ export class StatsPicker extends PureComponent<Props> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
onSelectionChange = (item: SelectOptionItem<string>) => {
|
onSelectionChange = (item: SelectableValue<string>) => {
|
||||||
const { onChange } = this.props;
|
const { onChange } = this.props;
|
||||||
if (isArray(item)) {
|
if (isArray(item)) {
|
||||||
onChange(item.map(v => v.value));
|
onChange(item.map(v => v.value));
|
||||||
@@ -65,24 +64,16 @@ export class StatsPicker extends PureComponent<Props> {
|
|||||||
|
|
||||||
render() {
|
render() {
|
||||||
const { width, stats, allowMultiple, defaultStat, placeholder } = this.props;
|
const { width, stats, allowMultiple, defaultStat, placeholder } = this.props;
|
||||||
const options = getFieldReducers().map(s => {
|
|
||||||
return {
|
|
||||||
value: s.id,
|
|
||||||
label: s.name,
|
|
||||||
description: s.description,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const value: Array<SelectOptionItem<string>> = options.filter(option => stats.find(stat => option.value === stat));
|
|
||||||
|
|
||||||
|
const select = fieldReducers.selectOptions(stats);
|
||||||
return (
|
return (
|
||||||
<Select
|
<Select
|
||||||
width={width}
|
width={width}
|
||||||
value={value}
|
value={select.current}
|
||||||
isClearable={!defaultStat}
|
isClearable={!defaultStat}
|
||||||
isMulti={allowMultiple}
|
isMulti={allowMultiple}
|
||||||
isSearchable={true}
|
isSearchable={true}
|
||||||
options={options}
|
options={select.options}
|
||||||
placeholder={placeholder}
|
placeholder={placeholder}
|
||||||
onChange={this.onSelectionChange}
|
onChange={this.onSelectionChange}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import React, { ChangeEvent } from 'react';
|
import React, { ChangeEvent } from 'react';
|
||||||
import { mount } from 'enzyme';
|
import { mount } from 'enzyme';
|
||||||
import { ThresholdsEditor, Props } from './ThresholdsEditor';
|
import { ThresholdsEditor, Props, threshodsWithoutKey } from './ThresholdsEditor';
|
||||||
import { colors } from '../../utils';
|
import { colors } from '../../utils';
|
||||||
|
|
||||||
const setup = (propOverrides?: Partial<Props>) => {
|
const setup = (propOverrides?: Partial<Props>) => {
|
||||||
@@ -20,6 +20,10 @@ const setup = (propOverrides?: Partial<Props>) => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function getCurrentThresholds(editor: ThresholdsEditor) {
|
||||||
|
return threshodsWithoutKey(editor.state.thresholds);
|
||||||
|
}
|
||||||
|
|
||||||
describe('Render', () => {
|
describe('Render', () => {
|
||||||
it('should render with base threshold', () => {
|
it('should render with base threshold', () => {
|
||||||
const { wrapper } = setup();
|
const { wrapper } = setup();
|
||||||
@@ -32,60 +36,55 @@ describe('Initialization', () => {
|
|||||||
it('should add a base threshold if missing', () => {
|
it('should add a base threshold if missing', () => {
|
||||||
const { instance } = setup();
|
const { instance } = setup();
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
expect(getCurrentThresholds(instance)).toEqual([{ value: -Infinity, color: colors[0] }]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Add threshold', () => {
|
describe('Add threshold', () => {
|
||||||
it('should not add threshold at index 0', () => {
|
|
||||||
const { instance } = setup();
|
|
||||||
|
|
||||||
instance.onAddThreshold(0);
|
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should add threshold', () => {
|
it('should add threshold', () => {
|
||||||
const { instance } = setup();
|
const { instance } = setup();
|
||||||
|
|
||||||
instance.onAddThreshold(1);
|
instance.onAddThresholdAfter(instance.state.thresholds[0]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([
|
expect(getCurrentThresholds(instance)).toEqual([
|
||||||
{ index: 0, value: -Infinity, color: colors[0] },
|
{ value: -Infinity, color: colors[0] }, // 0
|
||||||
{ index: 1, value: 50, color: colors[2] },
|
{ value: 50, color: colors[2] }, // 1
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add another threshold above a first', () => {
|
it('should add another threshold above a first', () => {
|
||||||
const { instance } = setup({
|
const { instance } = setup({
|
||||||
thresholds: [{ index: 0, value: -Infinity, color: colors[0] }, { index: 1, value: 50, color: colors[2] }],
|
thresholds: [
|
||||||
|
{ value: -Infinity, color: colors[0] }, // 0
|
||||||
|
{ value: 50, color: colors[2] }, // 1
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
instance.onAddThreshold(2);
|
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([
|
expect(getCurrentThresholds(instance)).toEqual([
|
||||||
{ index: 0, value: -Infinity, color: colors[0] },
|
{ value: -Infinity, color: colors[0] }, // 0
|
||||||
{ index: 1, value: 50, color: colors[2] },
|
{ value: 50, color: colors[2] }, // 1
|
||||||
{ index: 2, value: 75, color: colors[3] },
|
{ value: 75, color: colors[3] }, // 2
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add another threshold between first and second index', () => {
|
it('should add another threshold between first and second index', () => {
|
||||||
const { instance } = setup({
|
const { instance } = setup({
|
||||||
thresholds: [
|
thresholds: [
|
||||||
{ index: 0, value: -Infinity, color: colors[0] },
|
{ value: -Infinity, color: colors[0] },
|
||||||
{ index: 1, value: 50, color: colors[2] },
|
{ value: 50, color: colors[2] },
|
||||||
{ index: 2, value: 75, color: colors[3] },
|
{ value: 75, color: colors[3] },
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
instance.onAddThreshold(2);
|
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([
|
expect(getCurrentThresholds(instance)).toEqual([
|
||||||
{ index: 0, value: -Infinity, color: colors[0] },
|
{ value: -Infinity, color: colors[0] },
|
||||||
{ index: 1, value: 50, color: colors[2] },
|
{ value: 50, color: colors[2] },
|
||||||
{ index: 2, value: 62.5, color: colors[4] },
|
{ value: 62.5, color: colors[4] },
|
||||||
{ index: 3, value: 75, color: colors[3] },
|
{ value: 75, color: colors[3] },
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -93,30 +92,30 @@ describe('Add threshold', () => {
|
|||||||
describe('Remove threshold', () => {
|
describe('Remove threshold', () => {
|
||||||
it('should not remove threshold at index 0', () => {
|
it('should not remove threshold at index 0', () => {
|
||||||
const thresholds = [
|
const thresholds = [
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 50, color: '#EAB839' },
|
{ value: 50, color: '#EAB839' },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
];
|
];
|
||||||
const { instance } = setup({ thresholds });
|
const { instance } = setup({ thresholds });
|
||||||
|
|
||||||
instance.onRemoveThreshold(thresholds[0]);
|
instance.onRemoveThreshold(instance.state.thresholds[0]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual(thresholds);
|
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should remove threshold', () => {
|
it('should remove threshold', () => {
|
||||||
const thresholds = [
|
const thresholds = [
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 50, color: '#EAB839' },
|
{ value: 50, color: '#EAB839' },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
];
|
];
|
||||||
const { instance } = setup({ thresholds });
|
const { instance } = setup({ thresholds });
|
||||||
|
|
||||||
instance.onRemoveThreshold(thresholds[1]);
|
instance.onRemoveThreshold(instance.state.thresholds[1]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([
|
expect(getCurrentThresholds(instance)).toEqual([
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -124,25 +123,25 @@ describe('Remove threshold', () => {
|
|||||||
describe('change threshold value', () => {
|
describe('change threshold value', () => {
|
||||||
it('should not change threshold at index 0', () => {
|
it('should not change threshold at index 0', () => {
|
||||||
const thresholds = [
|
const thresholds = [
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 50, color: '#EAB839' },
|
{ value: 50, color: '#EAB839' },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
];
|
];
|
||||||
const { instance } = setup({ thresholds });
|
const { instance } = setup({ thresholds });
|
||||||
|
|
||||||
const mockEvent = ({ target: { value: '12' } } as any) as ChangeEvent<HTMLInputElement>;
|
const mockEvent = ({ target: { value: '12' } } as any) as ChangeEvent<HTMLInputElement>;
|
||||||
|
|
||||||
instance.onChangeThresholdValue(mockEvent, thresholds[0]);
|
instance.onChangeThresholdValue(mockEvent, instance.state.thresholds[0]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual(thresholds);
|
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should update value', () => {
|
it('should update value', () => {
|
||||||
const { instance } = setup();
|
const { instance } = setup();
|
||||||
const thresholds = [
|
const thresholds = [
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||||
{ index: 1, value: 50, color: '#EAB839' },
|
{ value: 50, color: '#EAB839', key: 2 },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||||
];
|
];
|
||||||
|
|
||||||
instance.state = {
|
instance.state = {
|
||||||
@@ -153,10 +152,10 @@ describe('change threshold value', () => {
|
|||||||
|
|
||||||
instance.onChangeThresholdValue(mockEvent, thresholds[1]);
|
instance.onChangeThresholdValue(mockEvent, thresholds[1]);
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([
|
expect(getCurrentThresholds(instance)).toEqual([
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 78, color: '#EAB839' },
|
{ value: 78, color: '#EAB839' },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -165,9 +164,9 @@ describe('on blur threshold value', () => {
|
|||||||
it('should resort rows and update indexes', () => {
|
it('should resort rows and update indexes', () => {
|
||||||
const { instance } = setup();
|
const { instance } = setup();
|
||||||
const thresholds = [
|
const thresholds = [
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||||
{ index: 1, value: 78, color: '#EAB839' },
|
{ value: 78, color: '#EAB839', key: 2 },
|
||||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||||
];
|
];
|
||||||
|
|
||||||
instance.setState({
|
instance.setState({
|
||||||
@@ -176,10 +175,10 @@ describe('on blur threshold value', () => {
|
|||||||
|
|
||||||
instance.onBlur();
|
instance.onBlur();
|
||||||
|
|
||||||
expect(instance.state.thresholds).toEqual([
|
expect(getCurrentThresholds(instance)).toEqual([
|
||||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
{ value: -Infinity, color: '#7EB26D' },
|
||||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
{ value: 75, color: '#6ED0E0' },
|
||||||
{ index: 2, value: 78, color: '#EAB839' },
|
{ value: 78, color: '#EAB839' },
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import React, { PureComponent, ChangeEvent } from 'react';
|
import React, { PureComponent, ChangeEvent } from 'react';
|
||||||
import { Threshold } from '@grafana/data';
|
import { Threshold, sortThresholds } from '@grafana/data';
|
||||||
import { colors } from '../../utils';
|
import { colors } from '../../utils';
|
||||||
import { ThemeContext } from '../../themes';
|
import { ThemeContext } from '../../themes';
|
||||||
import { getColorFromHexRgbOrName } from '../../utils';
|
import { getColorFromHexRgbOrName } from '../../utils';
|
||||||
@@ -13,115 +13,121 @@ export interface Props {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface State {
|
interface State {
|
||||||
thresholds: Threshold[];
|
thresholds: ThresholdWithKey[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ThresholdWithKey extends Threshold {
|
||||||
|
key: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
let counter = 100;
|
||||||
|
|
||||||
export class ThresholdsEditor extends PureComponent<Props, State> {
|
export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||||
constructor(props: Props) {
|
constructor(props: Props) {
|
||||||
super(props);
|
super(props);
|
||||||
|
|
||||||
const addDefaultThreshold = this.props.thresholds.length === 0;
|
const thresholds = props.thresholds
|
||||||
const thresholds: Threshold[] = addDefaultThreshold
|
? props.thresholds.map(t => {
|
||||||
? [{ index: 0, value: -Infinity, color: colors[0] }]
|
return {
|
||||||
: props.thresholds;
|
color: t.color,
|
||||||
|
value: t.value === null ? -Infinity : t.value,
|
||||||
|
key: counter++,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
: ([] as ThresholdWithKey[]);
|
||||||
|
|
||||||
|
let needsCallback = false;
|
||||||
|
if (!thresholds.length) {
|
||||||
|
thresholds.push({ value: -Infinity, color: colors[0], key: counter++ });
|
||||||
|
needsCallback = true;
|
||||||
|
} else {
|
||||||
|
// First value is always base
|
||||||
|
thresholds[0].value = -Infinity;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the state
|
||||||
this.state = { thresholds };
|
this.state = { thresholds };
|
||||||
|
|
||||||
if (addDefaultThreshold) {
|
if (needsCallback) {
|
||||||
this.onChange();
|
this.onChange();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
onAddThreshold = (index: number) => {
|
onAddThresholdAfter = (threshold: ThresholdWithKey) => {
|
||||||
const { thresholds } = this.state;
|
const { thresholds } = this.state;
|
||||||
|
|
||||||
const maxValue = 100;
|
const maxValue = 100;
|
||||||
const minValue = 0;
|
const minValue = 0;
|
||||||
|
|
||||||
if (index === 0) {
|
let prev: ThresholdWithKey | undefined = undefined;
|
||||||
return;
|
let next: ThresholdWithKey | undefined = undefined;
|
||||||
|
for (const t of thresholds) {
|
||||||
|
if (prev && prev.key === threshold.key) {
|
||||||
|
next = t;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
prev = t;
|
||||||
}
|
}
|
||||||
|
|
||||||
const newThresholds = thresholds.map(threshold => {
|
const prevValue = prev && isFinite(prev.value) ? prev.value : minValue;
|
||||||
if (threshold.index >= index) {
|
const nextValue = next && isFinite(next.value) ? next.value : maxValue;
|
||||||
const index = threshold.index + 1;
|
|
||||||
threshold = { ...threshold, index };
|
|
||||||
}
|
|
||||||
return threshold;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Setting value to a value between the previous thresholds
|
const color = colors.filter(c => !thresholds.some(t => t.color === c))[1];
|
||||||
const beforeThreshold = newThresholds.filter(t => t.index === index - 1 && t.index !== 0)[0];
|
const add = {
|
||||||
const afterThreshold = newThresholds.filter(t => t.index === index + 1 && t.index !== 0)[0];
|
value: prevValue + (nextValue - prevValue) / 2.0,
|
||||||
const beforeThresholdValue = beforeThreshold !== undefined ? beforeThreshold.value : minValue;
|
color: color,
|
||||||
const afterThresholdValue = afterThreshold !== undefined ? afterThreshold.value : maxValue;
|
key: counter++,
|
||||||
const value = afterThresholdValue - (afterThresholdValue - beforeThresholdValue) / 2;
|
};
|
||||||
|
const newThresholds = [...thresholds, add];
|
||||||
// Set a color
|
sortThresholds(newThresholds);
|
||||||
const color = colors.filter(c => !newThresholds.some(t => t.color === c))[1];
|
|
||||||
|
|
||||||
this.setState(
|
this.setState(
|
||||||
{
|
{
|
||||||
thresholds: this.sortThresholds([
|
thresholds: newThresholds,
|
||||||
...newThresholds,
|
|
||||||
{
|
|
||||||
color,
|
|
||||||
index,
|
|
||||||
value: value as number,
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
},
|
},
|
||||||
() => this.onChange()
|
() => this.onChange()
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
onRemoveThreshold = (threshold: Threshold) => {
|
onRemoveThreshold = (threshold: ThresholdWithKey) => {
|
||||||
if (threshold.index === 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(
|
|
||||||
prevState => {
|
|
||||||
const newThresholds = prevState.thresholds.map(t => {
|
|
||||||
if (t.index > threshold.index) {
|
|
||||||
const index = t.index - 1;
|
|
||||||
t = { ...t, index };
|
|
||||||
}
|
|
||||||
return t;
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
thresholds: newThresholds.filter(t => t !== threshold),
|
|
||||||
};
|
|
||||||
},
|
|
||||||
() => this.onChange()
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: Threshold) => {
|
|
||||||
if (threshold.index === 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { thresholds } = this.state;
|
const { thresholds } = this.state;
|
||||||
|
if (!thresholds.length) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Don't remove index 0
|
||||||
|
if (threshold.key === thresholds[0].key) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.setState(
|
||||||
|
{
|
||||||
|
thresholds: thresholds.filter(t => t.key !== threshold.key),
|
||||||
|
},
|
||||||
|
() => this.onChange()
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: ThresholdWithKey) => {
|
||||||
const cleanValue = event.target.value.replace(/,/g, '.');
|
const cleanValue = event.target.value.replace(/,/g, '.');
|
||||||
const parsedValue = parseFloat(cleanValue);
|
const parsedValue = parseFloat(cleanValue);
|
||||||
const value = isNaN(parsedValue) ? '' : parsedValue;
|
const value = isNaN(parsedValue) ? '' : parsedValue;
|
||||||
|
|
||||||
const newThresholds = thresholds.map(t => {
|
const thresholds = this.state.thresholds.map(t => {
|
||||||
if (t === threshold && t.index !== 0) {
|
if (t.key === threshold.key) {
|
||||||
t = { ...t, value: value as number };
|
t = { ...t, value: value as number };
|
||||||
}
|
}
|
||||||
|
|
||||||
return t;
|
return t;
|
||||||
});
|
});
|
||||||
|
if (thresholds.length) {
|
||||||
this.setState({ thresholds: newThresholds });
|
thresholds[0].value = -Infinity;
|
||||||
|
}
|
||||||
|
this.setState({ thresholds });
|
||||||
};
|
};
|
||||||
|
|
||||||
onChangeThresholdColor = (threshold: Threshold, color: string) => {
|
onChangeThresholdColor = (threshold: ThresholdWithKey, color: string) => {
|
||||||
const { thresholds } = this.state;
|
const { thresholds } = this.state;
|
||||||
|
|
||||||
const newThresholds = thresholds.map(t => {
|
const newThresholds = thresholds.map(t => {
|
||||||
if (t === threshold) {
|
if (t.key === threshold.key) {
|
||||||
t = { ...t, color: color };
|
t = { ...t, color: color };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -137,30 +143,22 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
onBlur = () => {
|
onBlur = () => {
|
||||||
this.setState(prevState => {
|
const thresholds = [...this.state.thresholds];
|
||||||
const sortThresholds = this.sortThresholds([...prevState.thresholds]);
|
sortThresholds(thresholds);
|
||||||
let index = 0;
|
this.setState(
|
||||||
sortThresholds.forEach(t => {
|
{
|
||||||
t.index = index++;
|
thresholds,
|
||||||
});
|
},
|
||||||
|
() => this.onChange()
|
||||||
return { thresholds: sortThresholds };
|
);
|
||||||
});
|
|
||||||
|
|
||||||
this.onChange();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
onChange = () => {
|
onChange = () => {
|
||||||
this.props.onChange(this.state.thresholds);
|
const { thresholds } = this.state;
|
||||||
|
this.props.onChange(threshodsWithoutKey(thresholds));
|
||||||
};
|
};
|
||||||
|
|
||||||
sortThresholds = (thresholds: Threshold[]) => {
|
renderInput = (threshold: ThresholdWithKey) => {
|
||||||
return thresholds.sort((t1, t2) => {
|
|
||||||
return t1.value - t2.value;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
renderInput = (threshold: Threshold) => {
|
|
||||||
return (
|
return (
|
||||||
<div className="thresholds-row-input-inner">
|
<div className="thresholds-row-input-inner">
|
||||||
<span className="thresholds-row-input-inner-arrow" />
|
<span className="thresholds-row-input-inner-arrow" />
|
||||||
@@ -175,12 +173,11 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
{threshold.index === 0 && (
|
{!isFinite(threshold.value) ? (
|
||||||
<div className="thresholds-row-input-inner-value">
|
<div className="thresholds-row-input-inner-value">
|
||||||
<Input type="text" value="Base" readOnly />
|
<Input type="text" value="Base" readOnly />
|
||||||
</div>
|
</div>
|
||||||
)}
|
) : (
|
||||||
{threshold.index > 0 && (
|
|
||||||
<>
|
<>
|
||||||
<div className="thresholds-row-input-inner-value">
|
<div className="thresholds-row-input-inner-value">
|
||||||
<Input
|
<Input
|
||||||
@@ -189,7 +186,6 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
|||||||
onChange={(event: ChangeEvent<HTMLInputElement>) => this.onChangeThresholdValue(event, threshold)}
|
onChange={(event: ChangeEvent<HTMLInputElement>) => this.onChangeThresholdValue(event, threshold)}
|
||||||
value={threshold.value}
|
value={threshold.value}
|
||||||
onBlur={this.onBlur}
|
onBlur={this.onBlur}
|
||||||
readOnly={threshold.index === 0}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="thresholds-row-input-inner-remove" onClick={() => this.onRemoveThreshold(threshold)}>
|
<div className="thresholds-row-input-inner-remove" onClick={() => this.onRemoveThreshold(threshold)}>
|
||||||
@@ -212,13 +208,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
|||||||
{thresholds
|
{thresholds
|
||||||
.slice(0)
|
.slice(0)
|
||||||
.reverse()
|
.reverse()
|
||||||
.map((threshold, index) => {
|
.map(threshold => {
|
||||||
return (
|
return (
|
||||||
<div className="thresholds-row" key={`${threshold.index}-${index}`}>
|
<div className="thresholds-row" key={`${threshold.key}`}>
|
||||||
<div
|
<div className="thresholds-row-add-button" onClick={() => this.onAddThresholdAfter(threshold)}>
|
||||||
className="thresholds-row-add-button"
|
|
||||||
onClick={() => this.onAddThreshold(threshold.index + 1)}
|
|
||||||
>
|
|
||||||
<i className="fa fa-plus" />
|
<i className="fa fa-plus" />
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
@@ -237,3 +230,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function threshodsWithoutKey(thresholds: ThresholdWithKey[]): Threshold[] {
|
||||||
|
return thresholds.map(t => {
|
||||||
|
const { key, ...rest } = t;
|
||||||
|
return rest; // everything except key
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ exports[`Render should render with base threshold 1`] = `
|
|||||||
Array [
|
Array [
|
||||||
Object {
|
Object {
|
||||||
"color": "#7EB26D",
|
"color": "#7EB26D",
|
||||||
"index": 0,
|
|
||||||
"value": -Infinity,
|
"value": -Infinity,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@@ -48,7 +47,7 @@ exports[`Render should render with base threshold 1`] = `
|
|||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
className="thresholds-row"
|
className="thresholds-row"
|
||||||
key="0-0"
|
key="100"
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
className="thresholds-row-add-button"
|
className="thresholds-row-add-button"
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ import { TimePickerPopover } from './TimePickerPopover';
|
|||||||
import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper';
|
import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper';
|
||||||
|
|
||||||
// Utils & Services
|
// Utils & Services
|
||||||
import { isDateTime } from '@grafana/data';
|
import { isDateTime, DateTime } from '@grafana/data';
|
||||||
import { rangeUtil } from '@grafana/data';
|
import { rangeUtil } from '@grafana/data';
|
||||||
import { rawToTimeRange } from './time';
|
import { rawToTimeRange } from './time';
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT } from '@grafana/data';
|
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT, SelectableValue } from '@grafana/data';
|
||||||
import { SelectOptionItem } from '../Select/Select';
|
import { isMathString } from '@grafana/data/src/utils/datemath';
|
||||||
|
|
||||||
export interface Props {
|
export interface Props {
|
||||||
value: TimeRange;
|
value: TimeRange;
|
||||||
@@ -77,7 +77,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
|||||||
isCustomOpen: false,
|
isCustomOpen: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
mapTimeOptionsToSelectOptionItems = (selectOptions: TimeOption[]) => {
|
mapTimeOptionsToSelectableValues = (selectOptions: TimeOption[]) => {
|
||||||
const options = selectOptions.map(timeOption => {
|
const options = selectOptions.map(timeOption => {
|
||||||
return {
|
return {
|
||||||
label: timeOption.display,
|
label: timeOption.display,
|
||||||
@@ -93,7 +93,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
|||||||
return options;
|
return options;
|
||||||
};
|
};
|
||||||
|
|
||||||
onSelectChanged = (item: SelectOptionItem<TimeOption>) => {
|
onSelectChanged = (item: SelectableValue<TimeOption>) => {
|
||||||
const { onChange, timeZone } = this.props;
|
const { onChange, timeZone } = this.props;
|
||||||
|
|
||||||
if (item.value && item.value.from === 'custom') {
|
if (item.value && item.value.from === 'custom') {
|
||||||
@@ -122,15 +122,23 @@ export class TimePicker extends PureComponent<Props, State> {
|
|||||||
render() {
|
render() {
|
||||||
const { selectOptions: selectTimeOptions, value, onMoveBackward, onMoveForward, onZoom, timeZone } = this.props;
|
const { selectOptions: selectTimeOptions, value, onMoveBackward, onMoveForward, onZoom, timeZone } = this.props;
|
||||||
const { isCustomOpen } = this.state;
|
const { isCustomOpen } = this.state;
|
||||||
const options = this.mapTimeOptionsToSelectOptionItems(selectTimeOptions);
|
const options = this.mapTimeOptionsToSelectableValues(selectTimeOptions);
|
||||||
const currentOption = options.find(item => isTimeOptionEqualToTimeRange(item.value, value));
|
const currentOption = options.find(item => isTimeOptionEqualToTimeRange(item.value, value));
|
||||||
const rangeString = rangeUtil.describeTimeRange(value.raw);
|
|
||||||
|
const isUTC = timeZone === 'utc';
|
||||||
|
|
||||||
|
const adjustedTime = (time: DateTime) => (isUTC ? time.utc() : time.local()) || null;
|
||||||
|
const adjustedTimeRange = {
|
||||||
|
to: isMathString(value.raw.to) ? value.raw.to : adjustedTime(value.to),
|
||||||
|
from: isMathString(value.raw.from) ? value.raw.from : adjustedTime(value.from),
|
||||||
|
};
|
||||||
|
const rangeString = rangeUtil.describeTimeRange(adjustedTimeRange);
|
||||||
|
|
||||||
const label = (
|
const label = (
|
||||||
<>
|
<>
|
||||||
{isCustomOpen && <span>Custom time range</span>}
|
{isCustomOpen && <span>Custom time range</span>}
|
||||||
{!isCustomOpen && <span>{rangeString}</span>}
|
{!isCustomOpen && <span>{rangeString}</span>}
|
||||||
{timeZone === 'utc' && <span className="time-picker-utc">UTC</span>}
|
{isUTC && <span className="time-picker-utc">UTC</span>}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
const isAbsolute = isDateTime(value.raw.to);
|
const isAbsolute = isDateTime(value.raw.to);
|
||||||
@@ -148,6 +156,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
|||||||
value={currentOption}
|
value={currentOption}
|
||||||
label={label}
|
label={label}
|
||||||
options={options}
|
options={options}
|
||||||
|
maxMenuHeight={600}
|
||||||
onChange={this.onSelectChanged}
|
onChange={this.onSelectChanged}
|
||||||
iconClass={'fa fa-clock-o fa-fw'}
|
iconClass={'fa fa-clock-o fa-fw'}
|
||||||
tooltipContent={<TimePickerTooltipContent timeRange={value} />}
|
tooltipContent={<TimePickerTooltipContent timeRange={value} />}
|
||||||
|
|||||||
@@ -18,7 +18,6 @@
|
|||||||
|
|
||||||
.time-picker-popover {
|
.time-picker-popover {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-flow: row nowrap;
|
|
||||||
justify-content: space-around;
|
justify-content: space-around;
|
||||||
border: 1px solid $popover-border-color;
|
border: 1px solid $popover-border-color;
|
||||||
border-radius: $border-radius;
|
border-radius: $border-radius;
|
||||||
@@ -31,41 +30,41 @@
|
|||||||
max-width: 600px;
|
max-width: 600px;
|
||||||
top: 41px;
|
top: 41px;
|
||||||
right: 0px;
|
right: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
.time-picker-popover-body {
|
.time-picker-popover-body {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-flow: row nowrap;
|
flex-flow: row nowrap;
|
||||||
justify-content: space-around;
|
justify-content: space-around;
|
||||||
padding: $space-md;
|
padding: $space-md;
|
||||||
padding-bottom: 0;
|
padding-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.time-picker-popover-title {
|
||||||
|
font-size: $font-size-md;
|
||||||
|
font-weight: $font-weight-semi-bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.time-picker-popover-body-custom-ranges:first-child {
|
||||||
|
margin-right: $space-md;
|
||||||
|
}
|
||||||
|
|
||||||
|
.time-picker-popover-body-custom-ranges-input {
|
||||||
|
display: flex;
|
||||||
|
flex-flow: row nowrap;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: $space-sm;
|
||||||
|
|
||||||
|
.time-picker-input-error {
|
||||||
|
box-shadow: inset 0 0px 5px $red;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.time-picker-popover-title {
|
.time-picker-popover-footer {
|
||||||
font-size: $font-size-md;
|
display: flex;
|
||||||
font-weight: $font-weight-semi-bold;
|
flex-flow: row nowrap;
|
||||||
}
|
justify-content: center;
|
||||||
|
padding: $space-md;
|
||||||
.time-picker-popover-body-custom-ranges:first-child {
|
|
||||||
margin-right: $space-md;
|
|
||||||
}
|
|
||||||
|
|
||||||
.time-picker-popover-body-custom-ranges-input {
|
|
||||||
display: flex;
|
|
||||||
flex-flow: row nowrap;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: $space-sm;
|
|
||||||
|
|
||||||
.time-picker-input-error {
|
|
||||||
box-shadow: inset 0 0px 5px $red;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.time-picker-popover-footer {
|
|
||||||
display: flex;
|
|
||||||
flex-flow: row nowrap;
|
|
||||||
justify-content: center;
|
|
||||||
padding: $space-md;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.time-picker-popover-header {
|
.time-picker-popover-header {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
export { DeleteButton } from './DeleteButton/DeleteButton';
|
export { DeleteButton } from './DeleteButton/DeleteButton';
|
||||||
export { Tooltip } from './Tooltip/Tooltip';
|
export { Tooltip } from './Tooltip/Tooltip';
|
||||||
export { PopperController } from './Tooltip/PopperController';
|
export { PopperController, PopperContent } from './Tooltip/PopperController';
|
||||||
export { Popper } from './Tooltip/Popper';
|
export { Popper } from './Tooltip/Popper';
|
||||||
export { Portal } from './Portal/Portal';
|
export { Portal } from './Portal/Portal';
|
||||||
export { CustomScrollbar } from './CustomScrollbar/CustomScrollbar';
|
export { CustomScrollbar } from './CustomScrollbar/CustomScrollbar';
|
||||||
@@ -9,7 +9,7 @@ export * from './Button/Button';
|
|||||||
export { ButtonVariant } from './Button/AbstractButton';
|
export { ButtonVariant } from './Button/AbstractButton';
|
||||||
|
|
||||||
// Select
|
// Select
|
||||||
export { Select, AsyncSelect, SelectOptionItem } from './Select/Select';
|
export { Select, AsyncSelect } from './Select/Select';
|
||||||
export { IndicatorsContainer } from './Select/IndicatorsContainer';
|
export { IndicatorsContainer } from './Select/IndicatorsContainer';
|
||||||
export { NoOptionsMessage } from './Select/NoOptionsMessage';
|
export { NoOptionsMessage } from './Select/NoOptionsMessage';
|
||||||
export { default as resetSelectStyles } from './Select/resetSelectStyles';
|
export { default as resetSelectStyles } from './Select/resetSelectStyles';
|
||||||
|
|||||||
@@ -77,6 +77,13 @@ interface PluginMetaInfoLink {
|
|||||||
url: string;
|
url: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface PluginBuildInfo {
|
||||||
|
time?: number;
|
||||||
|
repo?: string;
|
||||||
|
branch?: string;
|
||||||
|
hash?: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface PluginMetaInfo {
|
export interface PluginMetaInfo {
|
||||||
author: {
|
author: {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -88,6 +95,7 @@ export interface PluginMetaInfo {
|
|||||||
large: string;
|
large: string;
|
||||||
small: string;
|
small: string;
|
||||||
};
|
};
|
||||||
|
build?: PluginBuildInfo;
|
||||||
screenshots: any[];
|
screenshots: any[];
|
||||||
updated: string;
|
updated: string;
|
||||||
version: string;
|
version: string;
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
export const deprecationWarning = (file: string, oldName: string, newName: string) => {
|
export const deprecationWarning = (file: string, oldName: string, newName?: string) => {
|
||||||
const message = `[Deprecation warning] ${file}: ${oldName} is deprecated. Use ${newName} instead`;
|
let message = `[Deprecation warning] ${file}: ${oldName} is deprecated`;
|
||||||
|
if (newName) {
|
||||||
|
message += `. Use ${newName} instead`;
|
||||||
|
}
|
||||||
console.warn(message);
|
console.warn(message);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ describe('Format value', () => {
|
|||||||
it('should return if value isNaN', () => {
|
it('should return if value isNaN', () => {
|
||||||
const valueMappings: ValueMapping[] = [];
|
const valueMappings: ValueMapping[] = [];
|
||||||
const value = 'N/A';
|
const value = 'N/A';
|
||||||
const instance = getDisplayProcessor({ mappings: valueMappings });
|
const instance = getDisplayProcessor({ field: { mappings: valueMappings } });
|
||||||
|
|
||||||
const result = instance(value);
|
const result = instance(value);
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ describe('Format value', () => {
|
|||||||
const valueMappings: ValueMapping[] = [];
|
const valueMappings: ValueMapping[] = [];
|
||||||
const value = '6';
|
const value = '6';
|
||||||
|
|
||||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||||
|
|
||||||
const result = instance(value);
|
const result = instance(value);
|
||||||
|
|
||||||
@@ -127,7 +127,7 @@ describe('Format value', () => {
|
|||||||
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
|
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
|
||||||
];
|
];
|
||||||
const value = '10';
|
const value = '10';
|
||||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||||
|
|
||||||
const result = instance(value);
|
const result = instance(value);
|
||||||
|
|
||||||
@@ -160,7 +160,7 @@ describe('Format value', () => {
|
|||||||
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
|
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
|
||||||
];
|
];
|
||||||
const value = '11';
|
const value = '11';
|
||||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||||
|
|
||||||
expect(instance(value).text).toEqual('1-20');
|
expect(instance(value).text).toEqual('1-20');
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,16 +7,13 @@ import { getColorFromHexRgbOrName } from './namedColorsPalette';
|
|||||||
|
|
||||||
// Types
|
// Types
|
||||||
import { DecimalInfo, DisplayValue, GrafanaTheme, GrafanaThemeType, DecimalCount } from '../types';
|
import { DecimalInfo, DisplayValue, GrafanaTheme, GrafanaThemeType, DecimalCount } from '../types';
|
||||||
import { DateTime, dateTime, Threshold, ValueMapping, getMappedValue, Field } from '@grafana/data';
|
import { DateTime, dateTime, Threshold, getMappedValue, Field } from '@grafana/data';
|
||||||
|
|
||||||
export type DisplayProcessor = (value: any) => DisplayValue;
|
export type DisplayProcessor = (value: any) => DisplayValue;
|
||||||
|
|
||||||
export interface DisplayValueOptions {
|
export interface DisplayValueOptions {
|
||||||
field?: Partial<Field>;
|
field?: Partial<Field>;
|
||||||
|
|
||||||
mappings?: ValueMapping[];
|
|
||||||
thresholds?: Threshold[];
|
|
||||||
|
|
||||||
// Alternative to empty string
|
// Alternative to empty string
|
||||||
noValue?: string;
|
noValue?: string;
|
||||||
|
|
||||||
@@ -31,7 +28,8 @@ export function getDisplayProcessor(options?: DisplayValueOptions): DisplayProce
|
|||||||
const formatFunc = getValueFormat(field.unit || 'none');
|
const formatFunc = getValueFormat(field.unit || 'none');
|
||||||
|
|
||||||
return (value: any) => {
|
return (value: any) => {
|
||||||
const { mappings, thresholds, theme } = options;
|
const { theme } = options;
|
||||||
|
const { mappings, thresholds } = field;
|
||||||
let color;
|
let color;
|
||||||
|
|
||||||
let text = _.toString(value);
|
let text = _.toString(value);
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
|
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
|
||||||
import { FieldType, ReducerID } from '@grafana/data';
|
import { FieldType, ReducerID, Threshold } from '@grafana/data';
|
||||||
import { GrafanaThemeType } from '../types/theme';
|
import { GrafanaThemeType } from '../types/theme';
|
||||||
import { getTheme } from '../themes/index';
|
import { getTheme } from '../themes/index';
|
||||||
|
|
||||||
@@ -55,8 +55,6 @@ describe('FieldDisplay', () => {
|
|||||||
},
|
},
|
||||||
fieldOptions: {
|
fieldOptions: {
|
||||||
calcs: [],
|
calcs: [],
|
||||||
mappings: [],
|
|
||||||
thresholds: [],
|
|
||||||
override: {},
|
override: {},
|
||||||
defaults: {},
|
defaults: {},
|
||||||
},
|
},
|
||||||
@@ -68,8 +66,6 @@ describe('FieldDisplay', () => {
|
|||||||
...options,
|
...options,
|
||||||
fieldOptions: {
|
fieldOptions: {
|
||||||
calcs: [ReducerID.first],
|
calcs: [ReducerID.first],
|
||||||
mappings: [],
|
|
||||||
thresholds: [],
|
|
||||||
override: {},
|
override: {},
|
||||||
defaults: {
|
defaults: {
|
||||||
title: '$__cell_0 * $__field_name * $__series_name',
|
title: '$__cell_0 * $__field_name * $__series_name',
|
||||||
@@ -88,8 +84,6 @@ describe('FieldDisplay', () => {
|
|||||||
...options,
|
...options,
|
||||||
fieldOptions: {
|
fieldOptions: {
|
||||||
calcs: [ReducerID.last],
|
calcs: [ReducerID.last],
|
||||||
mappings: [],
|
|
||||||
thresholds: [],
|
|
||||||
override: {},
|
override: {},
|
||||||
defaults: {},
|
defaults: {},
|
||||||
},
|
},
|
||||||
@@ -104,8 +98,6 @@ describe('FieldDisplay', () => {
|
|||||||
values: true, //
|
values: true, //
|
||||||
limit: 1000,
|
limit: 1000,
|
||||||
calcs: [],
|
calcs: [],
|
||||||
mappings: [],
|
|
||||||
thresholds: [],
|
|
||||||
override: {},
|
override: {},
|
||||||
defaults: {},
|
defaults: {},
|
||||||
},
|
},
|
||||||
@@ -120,12 +112,53 @@ describe('FieldDisplay', () => {
|
|||||||
values: true, //
|
values: true, //
|
||||||
limit: 2,
|
limit: 2,
|
||||||
calcs: [],
|
calcs: [],
|
||||||
mappings: [],
|
|
||||||
thresholds: [],
|
|
||||||
override: {},
|
override: {},
|
||||||
defaults: {},
|
defaults: {},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
expect(display.map(v => v.display.numeric)).toEqual([1, 3]); // First 2 are from the first field
|
expect(display.map(v => v.display.numeric)).toEqual([1, 3]); // First 2 are from the first field
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should restore -Infinity value for base threshold', () => {
|
||||||
|
const field = getFieldProperties({
|
||||||
|
thresholds: [
|
||||||
|
({
|
||||||
|
color: '#73BF69',
|
||||||
|
value: null,
|
||||||
|
} as unknown) as Threshold,
|
||||||
|
{
|
||||||
|
color: '#F2495C',
|
||||||
|
value: 50,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
expect(field.thresholds!.length).toEqual(2);
|
||||||
|
expect(field.thresholds![0].value).toBe(-Infinity);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return field thresholds when there is no data', () => {
|
||||||
|
const options: GetFieldDisplayValuesOptions = {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
name: 'No data',
|
||||||
|
fields: [],
|
||||||
|
rows: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
replaceVariables: (value: string) => {
|
||||||
|
return value;
|
||||||
|
},
|
||||||
|
fieldOptions: {
|
||||||
|
calcs: [],
|
||||||
|
override: {},
|
||||||
|
defaults: {
|
||||||
|
thresholds: [{ color: '#F2495C', value: 50 }],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
theme: getTheme(GrafanaThemeType.Dark),
|
||||||
|
};
|
||||||
|
|
||||||
|
const display = getFieldDisplayValues(options);
|
||||||
|
expect(display[0].field.thresholds!.length).toEqual(1);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,16 +4,7 @@ import toString from 'lodash/toString';
|
|||||||
import { DisplayValue, GrafanaTheme, InterpolateFunction, ScopedVars, GraphSeriesValue } from '../types/index';
|
import { DisplayValue, GrafanaTheme, InterpolateFunction, ScopedVars, GraphSeriesValue } from '../types/index';
|
||||||
import { getDisplayProcessor } from './displayValue';
|
import { getDisplayProcessor } from './displayValue';
|
||||||
import { getFlotPairs } from './flotPairs';
|
import { getFlotPairs } from './flotPairs';
|
||||||
import {
|
import { ReducerID, reduceField, FieldType, NullValueMode, DataFrame, Field } from '@grafana/data';
|
||||||
ValueMapping,
|
|
||||||
Threshold,
|
|
||||||
ReducerID,
|
|
||||||
reduceField,
|
|
||||||
FieldType,
|
|
||||||
NullValueMode,
|
|
||||||
DataFrame,
|
|
||||||
Field,
|
|
||||||
} from '@grafana/data';
|
|
||||||
|
|
||||||
export interface FieldDisplayOptions {
|
export interface FieldDisplayOptions {
|
||||||
values?: boolean; // If true show each row value
|
values?: boolean; // If true show each row value
|
||||||
@@ -22,10 +13,6 @@ export interface FieldDisplayOptions {
|
|||||||
|
|
||||||
defaults: Partial<Field>; // Use these values unless otherwise stated
|
defaults: Partial<Field>; // Use these values unless otherwise stated
|
||||||
override: Partial<Field>; // Set these values regardless of the source
|
override: Partial<Field>; // Set these values regardless of the source
|
||||||
|
|
||||||
// Could these be data driven also?
|
|
||||||
thresholds: Threshold[];
|
|
||||||
mappings: ValueMapping[];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const VAR_SERIES_NAME = '__series_name';
|
export const VAR_SERIES_NAME = '__series_name';
|
||||||
@@ -127,8 +114,6 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
|||||||
|
|
||||||
const display = getDisplayProcessor({
|
const display = getDisplayProcessor({
|
||||||
field,
|
field,
|
||||||
mappings: fieldOptions.mappings,
|
|
||||||
thresholds: fieldOptions.thresholds,
|
|
||||||
theme: options.theme,
|
theme: options.theme,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -197,7 +182,10 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
|||||||
|
|
||||||
if (values.length === 0) {
|
if (values.length === 0) {
|
||||||
values.push({
|
values.push({
|
||||||
field: { name: 'No Data' },
|
field: {
|
||||||
|
...defaults,
|
||||||
|
name: 'No Data',
|
||||||
|
},
|
||||||
display: {
|
display: {
|
||||||
numeric: 0,
|
numeric: 0,
|
||||||
text: 'No data',
|
text: 'No data',
|
||||||
@@ -259,10 +247,16 @@ type PartialField = Partial<Field>;
|
|||||||
|
|
||||||
export function getFieldProperties(...props: PartialField[]): Field {
|
export function getFieldProperties(...props: PartialField[]): Field {
|
||||||
let field = props[0] as Field;
|
let field = props[0] as Field;
|
||||||
|
|
||||||
for (let i = 1; i < props.length; i++) {
|
for (let i = 1; i < props.length; i++) {
|
||||||
field = applyFieldProperties(field, props[i]);
|
field = applyFieldProperties(field, props[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// First value is always -Infinity
|
||||||
|
if (field.thresholds && field.thresholds.length) {
|
||||||
|
field.thresholds[0].value = -Infinity;
|
||||||
|
}
|
||||||
|
|
||||||
// Verify that max > min
|
// Verify that max > min
|
||||||
if (field.hasOwnProperty('min') && field.hasOwnProperty('max') && field.min! > field.max!) {
|
if (field.hasOwnProperty('min') && field.hasOwnProperty('max') && field.min! > field.max!) {
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
ARG BASE_IMAGE=ubuntu:latest
|
ARG BASE_IMAGE=ubuntu:18.04
|
||||||
FROM ${BASE_IMAGE}
|
FROM ${BASE_IMAGE}
|
||||||
|
|
||||||
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
|
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
|
||||||
@@ -12,7 +12,7 @@ COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
|
|||||||
# Change to tar xfzv to make tar print every file it extracts
|
# Change to tar xfzv to make tar print every file it extracts
|
||||||
RUN mkdir /tmp/grafana && tar xfz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
|
RUN mkdir /tmp/grafana && tar xfz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
|
||||||
|
|
||||||
ARG BASE_IMAGE=ubuntu:latest
|
ARG BASE_IMAGE=ubuntu:18.04
|
||||||
FROM ${BASE_IMAGE}
|
FROM ${BASE_IMAGE}
|
||||||
|
|
||||||
ARG GF_UID="472"
|
ARG GF_UID="472"
|
||||||
|
|||||||
@@ -59,14 +59,16 @@ docker_tag_all () {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
docker_build "ubuntu:latest" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
docker_build "ubuntu:18.04" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||||
if [ $BUILD_FAST = "0" ]; then
|
if [ $BUILD_FAST = "0" ]; then
|
||||||
docker_build "arm32v7/ubuntu:latest" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
docker_build "arm32v7/ubuntu:18.04" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||||
docker_build "arm64v8/ubuntu:latest" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
docker_build "arm64v8/ubuntu:18.04" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||||
fi
|
fi
|
||||||
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
|
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
|
||||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||||
docker_tag_all "${_docker_repo}" "latest"
|
docker_tag_all "${_docker_repo}" "latest"
|
||||||
|
# Create the expected tag for running the end to end tests successfully
|
||||||
|
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_tag}"
|
||||||
else
|
else
|
||||||
docker_tag_all "${_docker_repo}" "master"
|
docker_tag_all "${_docker_repo}" "master"
|
||||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_version}"
|
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_version}"
|
||||||
|
|||||||
@@ -38,8 +38,14 @@ if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"
|
|||||||
echo "pushing ${_docker_repo}:latest"
|
echo "pushing ${_docker_repo}:latest"
|
||||||
docker_push_all "${_docker_repo}" "latest"
|
docker_push_all "${_docker_repo}" "latest"
|
||||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||||
|
# Push to the grafana-dev repository with the expected tag
|
||||||
|
# for running the end to end tests successfully
|
||||||
|
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||||
elif echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -q "beta"; then
|
elif echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -q "beta"; then
|
||||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||||
|
# Push to the grafana-dev repository with the expected tag
|
||||||
|
# for running the end to end tests successfully
|
||||||
|
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||||
elif echo "$_grafana_tag" | grep -q "master"; then
|
elif echo "$_grafana_tag" | grep -q "master"; then
|
||||||
docker_push_all "${_docker_repo}" "master"
|
docker_push_all "${_docker_repo}" "master"
|
||||||
docker push "grafana/grafana-dev:${_grafana_version}"
|
docker push "grafana/grafana-dev:${_grafana_version}"
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ func AdminCreateUser(c *models.ReqContext, form dtos.AdminCreateUserForm) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_Api_Admin_User_Create.Inc()
|
metrics.MApiAdminUserCreate.Inc()
|
||||||
|
|
||||||
user := cmd.Result
|
user := cmd.Result
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
reqEditorRole := middleware.ReqEditorRole
|
reqEditorRole := middleware.ReqEditorRole
|
||||||
reqOrgAdmin := middleware.ReqOrgAdmin
|
reqOrgAdmin := middleware.ReqOrgAdmin
|
||||||
reqCanAccessTeams := middleware.AdminOrFeatureEnabled(hs.Cfg.EditorsCanAdmin)
|
reqCanAccessTeams := middleware.AdminOrFeatureEnabled(hs.Cfg.EditorsCanAdmin)
|
||||||
|
reqSnapshotPublicModeOrSignedIn := middleware.SnapshotPublicModeOrSignedIn()
|
||||||
redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL()
|
redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL()
|
||||||
redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL()
|
redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL()
|
||||||
quota := middleware.Quota(hs.QuotaService)
|
quota := middleware.Quota(hs.QuotaService)
|
||||||
@@ -104,13 +105,6 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
r.Get("/dashboard/snapshot/*", hs.Index)
|
r.Get("/dashboard/snapshot/*", hs.Index)
|
||||||
r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index)
|
r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index)
|
||||||
|
|
||||||
// api for dashboard snapshots
|
|
||||||
r.Post("/api/snapshots/", bind(models.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
|
|
||||||
r.Get("/api/snapshot/shared-options/", GetSharingOptions)
|
|
||||||
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
|
|
||||||
r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
|
|
||||||
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
|
||||||
|
|
||||||
// api renew session based on cookie
|
// api renew session based on cookie
|
||||||
r.Get("/api/login/ping", quota("session"), Wrap(hs.LoginAPIPing))
|
r.Get("/api/login/ping", quota("session"), Wrap(hs.LoginAPIPing))
|
||||||
|
|
||||||
@@ -413,4 +407,11 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
|
|
||||||
// streams
|
// streams
|
||||||
//r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
|
//r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
|
||||||
|
|
||||||
|
// Snapshots
|
||||||
|
r.Post("/api/snapshots/", reqSnapshotPublicModeOrSignedIn, bind(models.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
|
||||||
|
r.Get("/api/snapshot/shared-options/", reqSignedIn, GetSharingOptions)
|
||||||
|
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
|
||||||
|
r.Get("/api/snapshots-delete/:deleteKey", reqSnapshotPublicModeOrSignedIn, Wrap(DeleteDashboardSnapshotByDeleteKey))
|
||||||
|
r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ func (hs *HTTPServer) GetDashboard(c *m.ReqContext) Response {
|
|||||||
Meta: meta,
|
Meta: meta,
|
||||||
}
|
}
|
||||||
|
|
||||||
c.TimeRequest(metrics.M_Api_Dashboard_Get)
|
c.TimeRequest(metrics.MApiDashboardGet)
|
||||||
return JSON(200, dto)
|
return JSON(200, dto)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -278,12 +278,11 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
|
|||||||
inFolder := cmd.FolderId > 0
|
inFolder := cmd.FolderId > 0
|
||||||
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
|
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||||
return Error(500, "Failed to make user admin of dashboard", err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
c.TimeRequest(metrics.MApiDashboardSave)
|
||||||
return JSON(200, util.DynMap{
|
return JSON(200, util.DynMap{
|
||||||
"status": "success",
|
"status": "success",
|
||||||
"slug": dashboard.Slug,
|
"slug": dashboard.Slug,
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
|||||||
cmd.ExternalDeleteUrl = response.DeleteUrl
|
cmd.ExternalDeleteUrl = response.DeleteUrl
|
||||||
cmd.Dashboard = simplejson.New()
|
cmd.Dashboard = simplejson.New()
|
||||||
|
|
||||||
metrics.M_Api_Dashboard_Snapshot_External.Inc()
|
metrics.MApiDashboardSnapshotExternal.Inc()
|
||||||
} else {
|
} else {
|
||||||
if cmd.Key == "" {
|
if cmd.Key == "" {
|
||||||
cmd.Key = util.GetRandomString(32)
|
cmd.Key = util.GetRandomString(32)
|
||||||
@@ -109,7 +109,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
|||||||
|
|
||||||
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
|
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
|
||||||
|
|
||||||
metrics.M_Api_Dashboard_Snapshot_Create.Inc()
|
metrics.MApiDashboardSnapshotCreate.Inc()
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := bus.Dispatch(&cmd); err != nil {
|
if err := bus.Dispatch(&cmd); err != nil {
|
||||||
@@ -154,7 +154,7 @@ func GetDashboardSnapshot(c *m.ReqContext) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_Api_Dashboard_Snapshot_Get.Inc()
|
metrics.MApiDashboardSnapshotGet.Inc()
|
||||||
|
|
||||||
c.Resp.Header().Set("Cache-Control", "public, max-age=3600")
|
c.Resp.Header().Set("Cache-Control", "public, max-age=3600")
|
||||||
c.JSON(200, dto)
|
c.JSON(200, dto)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
|
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
|
||||||
c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
|
c.TimeRequest(metrics.MDataSourceProxyReqTimer)
|
||||||
|
|
||||||
dsId := c.ParamsInt64(":id")
|
dsId := c.ParamsInt64(":id")
|
||||||
ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)
|
ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)
|
||||||
|
|||||||
@@ -64,7 +64,6 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
|
|||||||
if hs.Cfg.EditorsCanAdmin {
|
if hs.Cfg.EditorsCanAdmin {
|
||||||
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
|
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
|
||||||
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||||
return Error(500, "Failed to make user admin of folder", err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -269,7 +269,8 @@ func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
promhttp.HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
promhttp.
|
||||||
|
HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||||
ServeHTTP(ctx.Resp, ctx.Req.Request)
|
ServeHTTP(ctx.Resp, ctx.Req.Request)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
146
pkg/api/index.go
146
pkg/api/index.go
@@ -242,74 +242,69 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.IsGrafanaAdmin || c.OrgRole == m.ROLE_ADMIN {
|
configNodes := []*dtos.NavLink{}
|
||||||
cfgNode := &dtos.NavLink{
|
|
||||||
Id: "cfg",
|
|
||||||
Text: "Configuration",
|
|
||||||
SubTitle: "Organization: " + c.OrgName,
|
|
||||||
Icon: "gicon gicon-cog",
|
|
||||||
Url: setting.AppSubUrl + "/datasources",
|
|
||||||
Children: []*dtos.NavLink{
|
|
||||||
{
|
|
||||||
Text: "Data Sources",
|
|
||||||
Icon: "gicon gicon-datasources",
|
|
||||||
Description: "Add and configure data sources",
|
|
||||||
Id: "datasources",
|
|
||||||
Url: setting.AppSubUrl + "/datasources",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Text: "Users",
|
|
||||||
Id: "users",
|
|
||||||
Description: "Manage org members",
|
|
||||||
Icon: "gicon gicon-user",
|
|
||||||
Url: setting.AppSubUrl + "/org/users",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Text: "Teams",
|
|
||||||
Id: "teams",
|
|
||||||
Description: "Manage org groups",
|
|
||||||
Icon: "gicon gicon-team",
|
|
||||||
Url: setting.AppSubUrl + "/org/teams",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Text: "Plugins",
|
|
||||||
Id: "plugins",
|
|
||||||
Description: "View and configure plugins",
|
|
||||||
Icon: "gicon gicon-plugins",
|
|
||||||
Url: setting.AppSubUrl + "/plugins",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Text: "Preferences",
|
|
||||||
Id: "org-settings",
|
|
||||||
Description: "Organization preferences",
|
|
||||||
Icon: "gicon gicon-preferences",
|
|
||||||
Url: setting.AppSubUrl + "/org",
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
if c.OrgRole == m.ROLE_ADMIN {
|
||||||
Text: "API Keys",
|
configNodes = append(configNodes, &dtos.NavLink{
|
||||||
Id: "apikeys",
|
Text: "Data Sources",
|
||||||
Description: "Create & manage API keys",
|
Icon: "gicon gicon-datasources",
|
||||||
Icon: "gicon gicon-apikeys",
|
Description: "Add and configure data sources",
|
||||||
Url: setting.AppSubUrl + "/org/apikeys",
|
Id: "datasources",
|
||||||
},
|
Url: setting.AppSubUrl + "/datasources",
|
||||||
},
|
})
|
||||||
}
|
configNodes = append(configNodes, &dtos.NavLink{
|
||||||
|
Text: "Users",
|
||||||
if c.OrgRole != m.ROLE_ADMIN {
|
Id: "users",
|
||||||
cfgNode = &dtos.NavLink{
|
Description: "Manage org members",
|
||||||
Id: "cfg",
|
Icon: "gicon gicon-user",
|
||||||
Text: "Configuration",
|
Url: setting.AppSubUrl + "/org/users",
|
||||||
SubTitle: "Organization: " + c.OrgName,
|
})
|
||||||
Icon: "gicon gicon-cog",
|
|
||||||
Url: setting.AppSubUrl + "/admin/users",
|
|
||||||
Children: make([]*dtos.NavLink, 0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
data.NavTree = append(data.NavTree, cfgNode)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.OrgRole == m.ROLE_ADMIN || hs.Cfg.EditorsCanAdmin {
|
||||||
|
configNodes = append(configNodes, &dtos.NavLink{
|
||||||
|
Text: "Teams",
|
||||||
|
Id: "teams",
|
||||||
|
Description: "Manage org groups",
|
||||||
|
Icon: "gicon gicon-team",
|
||||||
|
Url: setting.AppSubUrl + "/org/teams",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
configNodes = append(configNodes, &dtos.NavLink{
|
||||||
|
Text: "Plugins",
|
||||||
|
Id: "plugins",
|
||||||
|
Description: "View and configure plugins",
|
||||||
|
Icon: "gicon gicon-plugins",
|
||||||
|
Url: setting.AppSubUrl + "/plugins",
|
||||||
|
})
|
||||||
|
|
||||||
|
if c.OrgRole == m.ROLE_ADMIN {
|
||||||
|
configNodes = append(configNodes, &dtos.NavLink{
|
||||||
|
Text: "Preferences",
|
||||||
|
Id: "org-settings",
|
||||||
|
Description: "Organization preferences",
|
||||||
|
Icon: "gicon gicon-preferences",
|
||||||
|
Url: setting.AppSubUrl + "/org",
|
||||||
|
})
|
||||||
|
configNodes = append(configNodes, &dtos.NavLink{
|
||||||
|
Text: "API Keys",
|
||||||
|
Id: "apikeys",
|
||||||
|
Description: "Create & manage API keys",
|
||||||
|
Icon: "gicon gicon-apikeys",
|
||||||
|
Url: setting.AppSubUrl + "/org/apikeys",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||||
|
Id: "cfg",
|
||||||
|
Text: "Configuration",
|
||||||
|
SubTitle: "Organization: " + c.OrgName,
|
||||||
|
Icon: "gicon gicon-cog",
|
||||||
|
Url: configNodes[0].Url,
|
||||||
|
Children: configNodes,
|
||||||
|
})
|
||||||
|
|
||||||
if c.IsGrafanaAdmin {
|
if c.IsGrafanaAdmin {
|
||||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||||
Text: "Server Admin",
|
Text: "Server Admin",
|
||||||
@@ -327,27 +322,6 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (c.OrgRole == m.ROLE_EDITOR || c.OrgRole == m.ROLE_VIEWER) && hs.Cfg.EditorsCanAdmin {
|
|
||||||
cfgNode := &dtos.NavLink{
|
|
||||||
Id: "cfg",
|
|
||||||
Text: "Configuration",
|
|
||||||
SubTitle: "Organization: " + c.OrgName,
|
|
||||||
Icon: "gicon gicon-cog",
|
|
||||||
Url: setting.AppSubUrl + "/org/teams",
|
|
||||||
Children: []*dtos.NavLink{
|
|
||||||
{
|
|
||||||
Text: "Teams",
|
|
||||||
Id: "teams",
|
|
||||||
Description: "Manage org groups",
|
|
||||||
Icon: "gicon gicon-team",
|
|
||||||
Url: setting.AppSubUrl + "/org/teams",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
data.NavTree = append(data.NavTree, cfgNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||||
Text: "Help",
|
Text: "Help",
|
||||||
SubTitle: fmt.Sprintf(`%s v%s (%s)`, setting.ApplicationName, setting.BuildVersion, setting.BuildCommit),
|
SubTitle: fmt.Sprintf(`%s v%s (%s)`, setting.ApplicationName, setting.BuildVersion, setting.BuildCommit),
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ func (hs *HTTPServer) LoginView(c *models.ReqContext) {
|
|||||||
viewData.Settings["loginHint"] = setting.LoginHint
|
viewData.Settings["loginHint"] = setting.LoginHint
|
||||||
viewData.Settings["passwordHint"] = setting.PasswordHint
|
viewData.Settings["passwordHint"] = setting.PasswordHint
|
||||||
viewData.Settings["disableLoginForm"] = setting.DisableLoginForm
|
viewData.Settings["disableLoginForm"] = setting.DisableLoginForm
|
||||||
viewData.Settings["samlEnabled"] = hs.Cfg.SAMLEnabled
|
viewData.Settings["samlEnabled"] = setting.IsEnterprise && hs.Cfg.SAMLEnabled
|
||||||
|
|
||||||
if loginError, ok := tryGetEncryptedCookie(c, LoginErrorCookieName); ok {
|
if loginError, ok := tryGetEncryptedCookie(c, LoginErrorCookieName); ok {
|
||||||
//this cookie is only set whenever an OAuth login fails
|
//this cookie is only set whenever an OAuth login fails
|
||||||
@@ -81,7 +81,7 @@ func tryOAuthAutoLogin(c *models.ReqContext) bool {
|
|||||||
}
|
}
|
||||||
oauthInfos := setting.OAuthService.OAuthInfos
|
oauthInfos := setting.OAuthService.OAuthInfos
|
||||||
if len(oauthInfos) != 1 {
|
if len(oauthInfos) != 1 {
|
||||||
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured.")
|
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured")
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for key := range setting.OAuthService.OAuthInfos {
|
for key := range setting.OAuthService.OAuthInfos {
|
||||||
@@ -114,12 +114,16 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err := bus.Dispatch(authQuery); err != nil {
|
if err := bus.Dispatch(authQuery); err != nil {
|
||||||
|
e401 := Error(401, "Invalid username or password", err)
|
||||||
if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts {
|
if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts {
|
||||||
return Error(401, "Invalid username or password", err)
|
return e401
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Do not expose disabled status,
|
||||||
|
// just show incorrect user credentials error (see #17947)
|
||||||
if err == login.ErrUserDisabled {
|
if err == login.ErrUserDisabled {
|
||||||
return Error(401, "User is disabled", err)
|
hs.log.Warn("User is disabled", "user", cmd.User)
|
||||||
|
return e401
|
||||||
}
|
}
|
||||||
|
|
||||||
return Error(500, "Error while trying to authenticate user", err)
|
return Error(500, "Error while trying to authenticate user", err)
|
||||||
@@ -138,7 +142,7 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
|
|||||||
c.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
c.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_Api_Login_Post.Inc()
|
metrics.MApiLoginPost.Inc()
|
||||||
return JSON(200, result)
|
return JSON(200, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,15 +199,18 @@ func (hs *HTTPServer) trySetEncryptedCookie(ctx *models.ReqContext, cookieName s
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
http.SetCookie(ctx.Resp, &http.Cookie{
|
cookie := http.Cookie{
|
||||||
Name: cookieName,
|
Name: cookieName,
|
||||||
MaxAge: 60,
|
MaxAge: 60,
|
||||||
Value: hex.EncodeToString(encryptedError),
|
Value: hex.EncodeToString(encryptedError),
|
||||||
HttpOnly: true,
|
HttpOnly: true,
|
||||||
Path: setting.AppSubUrl + "/",
|
Path: setting.AppSubUrl + "/",
|
||||||
Secure: hs.Cfg.CookieSecure,
|
Secure: hs.Cfg.CookieSecure,
|
||||||
SameSite: hs.Cfg.CookieSameSite,
|
}
|
||||||
})
|
if hs.Cfg.CookieSameSite != http.SameSiteDefaultMode {
|
||||||
|
cookie.SameSite = hs.Cfg.CookieSameSite
|
||||||
|
}
|
||||||
|
http.SetCookie(ctx.Resp, &cookie)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
|||||||
if code == "" {
|
if code == "" {
|
||||||
state := GenStateString()
|
state := GenStateString()
|
||||||
hashedState := hashStatecode(state, setting.OAuthService.OAuthInfos[name].ClientSecret)
|
hashedState := hashStatecode(state, setting.OAuthService.OAuthInfos[name].ClientSecret)
|
||||||
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60)
|
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60, hs.Cfg.CookieSameSite)
|
||||||
if setting.OAuthService.OAuthInfos[name].HostedDomain == "" {
|
if setting.OAuthService.OAuthInfos[name].HostedDomain == "" {
|
||||||
ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline))
|
ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline))
|
||||||
} else {
|
} else {
|
||||||
@@ -73,7 +73,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
|||||||
|
|
||||||
// delete cookie
|
// delete cookie
|
||||||
ctx.Resp.Header().Del("Set-Cookie")
|
ctx.Resp.Header().Del("Set-Cookie")
|
||||||
hs.deleteCookie(ctx.Resp, OauthStateCookieName)
|
hs.deleteCookie(ctx.Resp, OauthStateCookieName, hs.Cfg.CookieSameSite)
|
||||||
|
|
||||||
if cookieState == "" {
|
if cookieState == "" {
|
||||||
ctx.Handle(500, "login.OAuthLogin(missing saved state)", nil)
|
ctx.Handle(500, "login.OAuthLogin(missing saved state)", nil)
|
||||||
@@ -191,15 +191,18 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Do not expose disabled status,
|
||||||
|
// just show incorrect user credentials error (see #17947)
|
||||||
if cmd.Result.IsDisabled {
|
if cmd.Result.IsDisabled {
|
||||||
hs.redirectWithError(ctx, login.ErrUserDisabled)
|
oauthLogger.Warn("User is disabled", "user", cmd.Result.Login)
|
||||||
|
hs.redirectWithError(ctx, login.ErrInvalidCredentials)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// login
|
// login
|
||||||
hs.loginUserWithUser(cmd.Result, ctx)
|
hs.loginUserWithUser(cmd.Result, ctx)
|
||||||
|
|
||||||
metrics.M_Api_Login_OAuth.Inc()
|
metrics.MApiLoginOAuth.Inc()
|
||||||
|
|
||||||
if redirectTo, _ := url.QueryUnescape(ctx.GetCookie("redirect_to")); len(redirectTo) > 0 {
|
if redirectTo, _ := url.QueryUnescape(ctx.GetCookie("redirect_to")); len(redirectTo) > 0 {
|
||||||
ctx.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
ctx.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
||||||
@@ -210,20 +213,23 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
|||||||
ctx.Redirect(setting.AppSubUrl + "/")
|
ctx.Redirect(setting.AppSubUrl + "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string) {
|
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string, sameSite http.SameSite) {
|
||||||
hs.writeCookie(w, name, "", -1)
|
hs.writeCookie(w, name, "", -1, sameSite)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int) {
|
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int, sameSite http.SameSite) {
|
||||||
http.SetCookie(w, &http.Cookie{
|
cookie := http.Cookie{
|
||||||
Name: name,
|
Name: name,
|
||||||
MaxAge: maxAge,
|
MaxAge: maxAge,
|
||||||
Value: value,
|
Value: value,
|
||||||
HttpOnly: true,
|
HttpOnly: true,
|
||||||
Path: setting.AppSubUrl + "/",
|
Path: setting.AppSubUrl + "/",
|
||||||
Secure: hs.Cfg.CookieSecure,
|
Secure: hs.Cfg.CookieSecure,
|
||||||
SameSite: hs.Cfg.CookieSameSite,
|
}
|
||||||
})
|
if sameSite != http.SameSiteDefaultMode {
|
||||||
|
cookie.SameSite = sameSite
|
||||||
|
}
|
||||||
|
http.SetCookie(w, &cookie)
|
||||||
}
|
}
|
||||||
|
|
||||||
func hashStatecode(code, seed string) string {
|
func hashStatecode(code, seed string) string {
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ func CreateOrg(c *m.ReqContext, cmd m.CreateOrgCommand) Response {
|
|||||||
return Error(500, "Failed to create organization", err)
|
return Error(500, "Failed to create organization", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_Api_Org_Create.Inc()
|
metrics.MApiOrgCreate.Inc()
|
||||||
|
|
||||||
return JSON(200, &util.DynMap{
|
return JSON(200, &util.DynMap{
|
||||||
"orgId": cmd.Result.Id,
|
"orgId": cmd.Result.Id,
|
||||||
|
|||||||
@@ -188,8 +188,8 @@ func (hs *HTTPServer) CompleteInvite(c *m.ReqContext, completeInvite dtos.Comple
|
|||||||
|
|
||||||
hs.loginUserWithUser(user, c)
|
hs.loginUserWithUser(user, c)
|
||||||
|
|
||||||
metrics.M_Api_User_SignUpCompleted.Inc()
|
metrics.MApiUserSignUpCompleted.Inc()
|
||||||
metrics.M_Api_User_SignUpInvite.Inc()
|
metrics.MApiUserSignUpInvite.Inc()
|
||||||
|
|
||||||
return Success("User created and logged in")
|
return Success("User created and logged in")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,6 +61,6 @@ func Search(c *m.ReqContext) Response {
|
|||||||
return Error(500, "Search failed", err)
|
return Error(500, "Search failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.TimeRequest(metrics.M_Api_Dashboard_Search)
|
c.TimeRequest(metrics.MApiDashboardSearch)
|
||||||
return JSON(200, searchQuery.Result)
|
return JSON(200, searchQuery.Result)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ func SignUp(c *m.ReqContext, form dtos.SignUpForm) Response {
|
|||||||
Code: cmd.Code,
|
Code: cmd.Code,
|
||||||
})
|
})
|
||||||
|
|
||||||
metrics.M_Api_User_SignUpStarted.Inc()
|
metrics.MApiUserSignUpStarted.Inc()
|
||||||
|
|
||||||
return JSON(200, util.DynMap{"status": "SignUpCreated"})
|
return JSON(200, util.DynMap{"status": "SignUpCreated"})
|
||||||
}
|
}
|
||||||
@@ -110,7 +110,7 @@ func (hs *HTTPServer) SignUpStep2(c *m.ReqContext, form dtos.SignUpStep2Form) Re
|
|||||||
}
|
}
|
||||||
|
|
||||||
hs.loginUserWithUser(user, c)
|
hs.loginUserWithUser(user, c)
|
||||||
metrics.M_Api_User_SignUpCompleted.Inc()
|
metrics.MApiUserSignUpCompleted.Inc()
|
||||||
|
|
||||||
return JSON(200, apiResponse)
|
return JSON(200, apiResponse)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -335,6 +335,8 @@ func GetAuthProviderLabel(authModule string) string {
|
|||||||
return "GitLab"
|
return "GitLab"
|
||||||
case "oauth_grafana_com", "oauth_grafananet":
|
case "oauth_grafana_com", "oauth_grafananet":
|
||||||
return "grafana.com"
|
return "grafana.com"
|
||||||
|
case "auth.saml":
|
||||||
|
return "SAML"
|
||||||
case "ldap", "":
|
case "ldap", "":
|
||||||
return "LDAP"
|
return "LDAP"
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -6,37 +6,53 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
|
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
|
||||||
|
"golang.org/x/xerrors"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetGrafanaPluginDir(currentOS string) string {
|
func GetGrafanaPluginDir(currentOS string) string {
|
||||||
if isDevEnvironment() {
|
if rootPath, ok := tryGetRootForDevEnvironment(); ok {
|
||||||
return "../data/plugins"
|
return filepath.Join(rootPath, "data/plugins")
|
||||||
}
|
}
|
||||||
|
|
||||||
return returnOsDefault(currentOS)
|
return returnOsDefault(currentOS)
|
||||||
}
|
}
|
||||||
|
|
||||||
func isDevEnvironment() bool {
|
// getGrafanaRoot tries to get root of directory when developing grafana ie repo root. It is not perfect it just
|
||||||
// if ../conf/defaults.ini exists, grafana is not installed as package
|
// checks what is the binary path and tries to guess based on that but if it is not running in dev env you get a bogus
|
||||||
// that its in development environment.
|
// path back.
|
||||||
|
func getGrafanaRoot() (string, error) {
|
||||||
ex, err := os.Executable()
|
ex, err := os.Executable()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("Could not get executable path. Assuming non dev environment.")
|
return "", xerrors.New("Failed to get executable path")
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
exPath := filepath.Dir(ex)
|
exPath := filepath.Dir(ex)
|
||||||
_, last := path.Split(exPath)
|
_, last := path.Split(exPath)
|
||||||
if last == "bin" {
|
if last == "bin" {
|
||||||
// In dev env the executable for current platform is created in 'bin/' dir
|
// In dev env the executable for current platform is created in 'bin/' dir
|
||||||
defaultsPath := filepath.Join(exPath, "../conf/defaults.ini")
|
return filepath.Join(exPath, ".."), nil
|
||||||
_, err = os.Stat(defaultsPath)
|
|
||||||
return err == nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// But at the same time there are per platform directories that contain the binaries and can also be used.
|
// But at the same time there are per platform directories that contain the binaries and can also be used.
|
||||||
defaultsPath := filepath.Join(exPath, "../../conf/defaults.ini")
|
return filepath.Join(exPath, "../.."), nil
|
||||||
_, err = os.Stat(defaultsPath)
|
}
|
||||||
return err == nil
|
|
||||||
|
// tryGetRootForDevEnvironment returns root path if we are in dev environment. It checks if conf/defaults.ini exists
|
||||||
|
// which should only exist in dev. Second param is false if we are not in dev or if it wasn't possible to determine it.
|
||||||
|
func tryGetRootForDevEnvironment() (string, bool) {
|
||||||
|
rootPath, err := getGrafanaRoot()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Could not get executable path. Assuming non dev environment.", err)
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
devenvPath := filepath.Join(rootPath, "devenv")
|
||||||
|
|
||||||
|
_, err = os.Stat(devenvPath)
|
||||||
|
if err != nil {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
return rootPath, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func returnOsDefault(currentOs string) string {
|
func returnOsDefault(currentOs string) string {
|
||||||
|
|||||||
@@ -3,103 +3,180 @@ package metrics
|
|||||||
import (
|
import (
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
)
|
)
|
||||||
|
|
||||||
const exporterName = "grafana"
|
const exporterName = "grafana"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
M_Instance_Start prometheus.Counter
|
// MInstanceStart is a metric counter for started instances
|
||||||
M_Page_Status *prometheus.CounterVec
|
MInstanceStart prometheus.Counter
|
||||||
M_Api_Status *prometheus.CounterVec
|
|
||||||
M_Proxy_Status *prometheus.CounterVec
|
|
||||||
M_Http_Request_Total *prometheus.CounterVec
|
|
||||||
M_Http_Request_Summary *prometheus.SummaryVec
|
|
||||||
|
|
||||||
M_Api_User_SignUpStarted prometheus.Counter
|
// MPageStatus is a metric page http response status
|
||||||
M_Api_User_SignUpCompleted prometheus.Counter
|
MPageStatus *prometheus.CounterVec
|
||||||
M_Api_User_SignUpInvite prometheus.Counter
|
|
||||||
M_Api_Dashboard_Save prometheus.Summary
|
|
||||||
M_Api_Dashboard_Get prometheus.Summary
|
|
||||||
M_Api_Dashboard_Search prometheus.Summary
|
|
||||||
M_Api_Admin_User_Create prometheus.Counter
|
|
||||||
M_Api_Login_Post prometheus.Counter
|
|
||||||
M_Api_Login_OAuth prometheus.Counter
|
|
||||||
M_Api_Org_Create prometheus.Counter
|
|
||||||
|
|
||||||
M_Api_Dashboard_Snapshot_Create prometheus.Counter
|
// MApiStatus is a metric api http response status
|
||||||
M_Api_Dashboard_Snapshot_External prometheus.Counter
|
MApiStatus *prometheus.CounterVec
|
||||||
M_Api_Dashboard_Snapshot_Get prometheus.Counter
|
|
||||||
M_Api_Dashboard_Insert prometheus.Counter
|
|
||||||
M_Alerting_Result_State *prometheus.CounterVec
|
|
||||||
M_Alerting_Notification_Sent *prometheus.CounterVec
|
|
||||||
M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter
|
|
||||||
M_Aws_CloudWatch_ListMetrics prometheus.Counter
|
|
||||||
M_Aws_CloudWatch_GetMetricData prometheus.Counter
|
|
||||||
M_DB_DataSource_QueryById prometheus.Counter
|
|
||||||
|
|
||||||
// Timers
|
// MProxyStatus is a metric proxy http response status
|
||||||
M_DataSource_ProxyReq_Timer prometheus.Summary
|
MProxyStatus *prometheus.CounterVec
|
||||||
M_Alerting_Execution_Time prometheus.Summary
|
|
||||||
|
// MHttpRequestTotal is a metric http request counter
|
||||||
|
MHttpRequestTotal *prometheus.CounterVec
|
||||||
|
|
||||||
|
// MHttpRequestSummary is a metric http request summary
|
||||||
|
MHttpRequestSummary *prometheus.SummaryVec
|
||||||
|
|
||||||
|
// MApiUserSignUpStarted is a metric amount of users who started the signup flow
|
||||||
|
MApiUserSignUpStarted prometheus.Counter
|
||||||
|
|
||||||
|
// MApiUserSignUpCompleted is a metric amount of users who completed the signup flow
|
||||||
|
MApiUserSignUpCompleted prometheus.Counter
|
||||||
|
|
||||||
|
// MApiUserSignUpInvite is a metric amount of users who have been invited
|
||||||
|
MApiUserSignUpInvite prometheus.Counter
|
||||||
|
|
||||||
|
// MApiDashboardSave is a metric summary for dashboard save duration
|
||||||
|
MApiDashboardSave prometheus.Summary
|
||||||
|
|
||||||
|
// MApiDashboardGet is a metric summary for dashboard get duration
|
||||||
|
MApiDashboardGet prometheus.Summary
|
||||||
|
|
||||||
|
// MApiDashboardSearch is a metric summary for dashboard search duration
|
||||||
|
MApiDashboardSearch prometheus.Summary
|
||||||
|
|
||||||
|
// MApiAdminUserCreate is a metric api admin user created counter
|
||||||
|
MApiAdminUserCreate prometheus.Counter
|
||||||
|
|
||||||
|
// MApiLoginPost is a metric api login post counter
|
||||||
|
MApiLoginPost prometheus.Counter
|
||||||
|
|
||||||
|
// MApiLoginOAuth is a metric api login oauth counter
|
||||||
|
MApiLoginOAuth prometheus.Counter
|
||||||
|
|
||||||
|
// MApiLoginSAML is a metric api login SAML counter
|
||||||
|
MApiLoginSAML prometheus.Counter
|
||||||
|
|
||||||
|
// MApiOrgCreate is a metric api org created counter
|
||||||
|
MApiOrgCreate prometheus.Counter
|
||||||
|
|
||||||
|
// MApiDashboardSnapshotCreate is a metric dashboard snapshots created
|
||||||
|
MApiDashboardSnapshotCreate prometheus.Counter
|
||||||
|
|
||||||
|
// MApiDashboardSnapshotExternal is a metric external dashboard snapshots created
|
||||||
|
MApiDashboardSnapshotExternal prometheus.Counter
|
||||||
|
|
||||||
|
// MApiDashboardSnapshotGet is a metric loaded dashboards
|
||||||
|
MApiDashboardSnapshotGet prometheus.Counter
|
||||||
|
|
||||||
|
// MApiDashboardInsert is a metric dashboards inserted
|
||||||
|
MApiDashboardInsert prometheus.Counter
|
||||||
|
|
||||||
|
// MAlertingResultState is a metric alert execution result counter
|
||||||
|
MAlertingResultState *prometheus.CounterVec
|
||||||
|
|
||||||
|
// MAlertingNotificationSent is a metric counter for how many alert notifications been sent
|
||||||
|
MAlertingNotificationSent *prometheus.CounterVec
|
||||||
|
|
||||||
|
// MAwsCloudWatchGetMetricStatistics is a metric counter for getting metric statistics from aws
|
||||||
|
MAwsCloudWatchGetMetricStatistics prometheus.Counter
|
||||||
|
|
||||||
|
// MAwsCloudWatchListMetrics is a metric counter for getting list of metrics from aws
|
||||||
|
MAwsCloudWatchListMetrics prometheus.Counter
|
||||||
|
|
||||||
|
// MAwsCloudWatchGetMetricData is a metric counter for getting metric data time series from aws
|
||||||
|
MAwsCloudWatchGetMetricData prometheus.Counter
|
||||||
|
|
||||||
|
// MDBDataSourceQueryByID is a metric counter for getting datasource by id
|
||||||
|
MDBDataSourceQueryByID prometheus.Counter
|
||||||
|
|
||||||
|
// LDAPUsersSyncExecutionTime is a metric summary for LDAP users sync execution duration
|
||||||
|
LDAPUsersSyncExecutionTime prometheus.Summary
|
||||||
|
)
|
||||||
|
|
||||||
|
// Timers
|
||||||
|
var (
|
||||||
|
// MDataSourceProxyReqTimer is a metric summary for dataproxy request duration
|
||||||
|
MDataSourceProxyReqTimer prometheus.Summary
|
||||||
|
|
||||||
|
// MAlertingExecutionTime is a metric summary of alert exeuction duration
|
||||||
|
MAlertingExecutionTime prometheus.Summary
|
||||||
)
|
)
|
||||||
|
|
||||||
// StatTotals
|
// StatTotals
|
||||||
var (
|
var (
|
||||||
M_Alerting_Active_Alerts prometheus.Gauge
|
// MAlertingActiveAlerts is a metric amount of active alerts
|
||||||
M_StatTotal_Dashboards prometheus.Gauge
|
MAlertingActiveAlerts prometheus.Gauge
|
||||||
M_StatTotal_Users prometheus.Gauge
|
|
||||||
M_StatActive_Users prometheus.Gauge
|
|
||||||
M_StatTotal_Orgs prometheus.Gauge
|
|
||||||
M_StatTotal_Playlists prometheus.Gauge
|
|
||||||
|
|
||||||
StatsTotalViewers prometheus.Gauge
|
// MStatTotalDashboards is a metric total amount of dashboards
|
||||||
StatsTotalEditors prometheus.Gauge
|
MStatTotalDashboards prometheus.Gauge
|
||||||
StatsTotalAdmins prometheus.Gauge
|
|
||||||
|
// MStatTotalUsers is a metric total amount of users
|
||||||
|
MStatTotalUsers prometheus.Gauge
|
||||||
|
|
||||||
|
// MStatActiveUsers is a metric number of active users
|
||||||
|
MStatActiveUsers prometheus.Gauge
|
||||||
|
|
||||||
|
// MStatTotalOrgs is a metric total amount of orgs
|
||||||
|
MStatTotalOrgs prometheus.Gauge
|
||||||
|
|
||||||
|
// MStatTotalPlaylists is a metric total amount of playlists
|
||||||
|
MStatTotalPlaylists prometheus.Gauge
|
||||||
|
|
||||||
|
// StatsTotalViewers is a metric total amount of viewers
|
||||||
|
StatsTotalViewers prometheus.Gauge
|
||||||
|
|
||||||
|
// StatsTotalEditors is a metric total amount of editors
|
||||||
|
StatsTotalEditors prometheus.Gauge
|
||||||
|
|
||||||
|
// StatsTotalAdmins is a metric total amount of admins
|
||||||
|
StatsTotalAdmins prometheus.Gauge
|
||||||
|
|
||||||
|
// StatsTotalActiveViewers is a metric total amount of viewers
|
||||||
StatsTotalActiveViewers prometheus.Gauge
|
StatsTotalActiveViewers prometheus.Gauge
|
||||||
|
|
||||||
|
// StatsTotalActiveEditors is a metric total amount of active editors
|
||||||
StatsTotalActiveEditors prometheus.Gauge
|
StatsTotalActiveEditors prometheus.Gauge
|
||||||
StatsTotalActiveAdmins prometheus.Gauge
|
|
||||||
|
|
||||||
// M_Grafana_Version is a gauge that contains build info about this binary
|
// StatsTotalActiveAdmins is a metric total amount of active admins
|
||||||
//
|
StatsTotalActiveAdmins prometheus.Gauge
|
||||||
// Deprecated: use M_Grafana_Build_Version instead.
|
|
||||||
M_Grafana_Version *prometheus.GaugeVec
|
|
||||||
|
|
||||||
// grafanaBuildVersion is a gauge that contains build info about this binary
|
// grafanaBuildVersion is a metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built
|
||||||
grafanaBuildVersion *prometheus.GaugeVec
|
grafanaBuildVersion *prometheus.GaugeVec
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{
|
httpStatusCodes := []string{"200", "404", "500", "unknown"}
|
||||||
|
MInstanceStart = prometheus.NewCounter(prometheus.CounterOpts{
|
||||||
Name: "instance_start_total",
|
Name: "instance_start_total",
|
||||||
Help: "counter for started instances",
|
Help: "counter for started instances",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
httpStatusCodes := []string{"200", "404", "500", "unknown"}
|
MPageStatus = newCounterVecStartingAtZero(
|
||||||
M_Page_Status = newCounterVecStartingAtZero(
|
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "page_response_status_total",
|
Name: "page_response_status_total",
|
||||||
Help: "page http response status",
|
Help: "page http response status",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
}, []string{"code"}, httpStatusCodes...)
|
}, []string{"code"}, httpStatusCodes...)
|
||||||
|
|
||||||
M_Api_Status = newCounterVecStartingAtZero(
|
MApiStatus = newCounterVecStartingAtZero(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "api_response_status_total",
|
Name: "api_response_status_total",
|
||||||
Help: "api http response status",
|
Help: "api http response status",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
}, []string{"code"}, httpStatusCodes...)
|
}, []string{"code"}, httpStatusCodes...)
|
||||||
|
|
||||||
M_Proxy_Status = newCounterVecStartingAtZero(
|
MProxyStatus = newCounterVecStartingAtZero(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "proxy_response_status_total",
|
Name: "proxy_response_status_total",
|
||||||
Help: "proxy http response status",
|
Help: "proxy http response status",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
}, []string{"code"}, httpStatusCodes...)
|
}, []string{"code"}, httpStatusCodes...)
|
||||||
|
|
||||||
M_Http_Request_Total = prometheus.NewCounterVec(
|
MHttpRequestTotal = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "http_request_total",
|
Name: "http_request_total",
|
||||||
Help: "http request counter",
|
Help: "http request counter",
|
||||||
@@ -107,7 +184,7 @@ func init() {
|
|||||||
[]string{"handler", "statuscode", "method"},
|
[]string{"handler", "statuscode", "method"},
|
||||||
)
|
)
|
||||||
|
|
||||||
M_Http_Request_Summary = prometheus.NewSummaryVec(
|
MHttpRequestSummary = prometheus.NewSummaryVec(
|
||||||
prometheus.SummaryOpts{
|
prometheus.SummaryOpts{
|
||||||
Name: "http_request_duration_milliseconds",
|
Name: "http_request_duration_milliseconds",
|
||||||
Help: "http request summary",
|
Help: "http request summary",
|
||||||
@@ -115,169 +192,181 @@ func init() {
|
|||||||
[]string{"handler", "statuscode", "method"},
|
[]string{"handler", "statuscode", "method"},
|
||||||
)
|
)
|
||||||
|
|
||||||
M_Api_User_SignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiUserSignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_user_signup_started_total",
|
Name: "api_user_signup_started_total",
|
||||||
Help: "amount of users who started the signup flow",
|
Help: "amount of users who started the signup flow",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_User_SignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiUserSignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_user_signup_completed_total",
|
Name: "api_user_signup_completed_total",
|
||||||
Help: "amount of users who completed the signup flow",
|
Help: "amount of users who completed the signup flow",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_User_SignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiUserSignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_user_signup_invite_total",
|
Name: "api_user_signup_invite_total",
|
||||||
Help: "amount of users who have been invited",
|
Help: "amount of users who have been invited",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{
|
MApiDashboardSave = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
Name: "api_dashboard_save_milliseconds",
|
Name: "api_dashboard_save_milliseconds",
|
||||||
Help: "summary for dashboard save duration",
|
Help: "summary for dashboard save duration",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{
|
MApiDashboardGet = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
Name: "api_dashboard_get_milliseconds",
|
Name: "api_dashboard_get_milliseconds",
|
||||||
Help: "summary for dashboard get duration",
|
Help: "summary for dashboard get duration",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{
|
MApiDashboardSearch = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
Name: "api_dashboard_search_milliseconds",
|
Name: "api_dashboard_search_milliseconds",
|
||||||
Help: "summary for dashboard search duration",
|
Help: "summary for dashboard search duration",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Admin_User_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiAdminUserCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_admin_user_created_total",
|
Name: "api_admin_user_created_total",
|
||||||
Help: "api admin user created counter",
|
Help: "api admin user created counter",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Login_Post = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiLoginPost = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_login_post_total",
|
Name: "api_login_post_total",
|
||||||
Help: "api login post counter",
|
Help: "api login post counter",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Login_OAuth = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiLoginOAuth = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_login_oauth_total",
|
Name: "api_login_oauth_total",
|
||||||
Help: "api login oauth counter",
|
Help: "api login oauth counter",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Org_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiLoginSAML = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
|
Name: "api_login_saml_total",
|
||||||
|
Help: "api login saml counter",
|
||||||
|
Namespace: exporterName,
|
||||||
|
})
|
||||||
|
|
||||||
|
MApiOrgCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_org_create_total",
|
Name: "api_org_create_total",
|
||||||
Help: "api org created counter",
|
Help: "api org created counter",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Snapshot_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiDashboardSnapshotCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_dashboard_snapshot_create_total",
|
Name: "api_dashboard_snapshot_create_total",
|
||||||
Help: "dashboard snapshots created",
|
Help: "dashboard snapshots created",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Snapshot_External = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiDashboardSnapshotExternal = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_dashboard_snapshot_external_total",
|
Name: "api_dashboard_snapshot_external_total",
|
||||||
Help: "external dashboard snapshots created",
|
Help: "external dashboard snapshots created",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Snapshot_Get = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiDashboardSnapshotGet = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_dashboard_snapshot_get_total",
|
Name: "api_dashboard_snapshot_get_total",
|
||||||
Help: "loaded dashboards",
|
Help: "loaded dashboards",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Api_Dashboard_Insert = newCounterStartingAtZero(prometheus.CounterOpts{
|
MApiDashboardInsert = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "api_models_dashboard_insert_total",
|
Name: "api_models_dashboard_insert_total",
|
||||||
Help: "dashboards inserted ",
|
Help: "dashboards inserted ",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{
|
MAlertingResultState = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||||
Name: "alerting_result_total",
|
Name: "alerting_result_total",
|
||||||
Help: "alert execution result counter",
|
Help: "alert execution result counter",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
}, []string{"state"})
|
}, []string{"state"})
|
||||||
|
|
||||||
M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{
|
MAlertingNotificationSent = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||||
Name: "alerting_notification_sent_total",
|
Name: "alerting_notification_sent_total",
|
||||||
Help: "counter for how many alert notifications been sent",
|
Help: "counter for how many alert notifications been sent",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
}, []string{"type"})
|
}, []string{"type"})
|
||||||
|
|
||||||
M_Aws_CloudWatch_GetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
|
MAwsCloudWatchGetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "aws_cloudwatch_get_metric_statistics_total",
|
Name: "aws_cloudwatch_get_metric_statistics_total",
|
||||||
Help: "counter for getting metric statistics from aws",
|
Help: "counter for getting metric statistics from aws",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Aws_CloudWatch_ListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
|
MAwsCloudWatchListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "aws_cloudwatch_list_metrics_total",
|
Name: "aws_cloudwatch_list_metrics_total",
|
||||||
Help: "counter for getting list of metrics from aws",
|
Help: "counter for getting list of metrics from aws",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Aws_CloudWatch_GetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
|
MAwsCloudWatchGetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "aws_cloudwatch_get_metric_data_total",
|
Name: "aws_cloudwatch_get_metric_data_total",
|
||||||
Help: "counter for getting metric data time series from aws",
|
Help: "counter for getting metric data time series from aws",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_DB_DataSource_QueryById = newCounterStartingAtZero(prometheus.CounterOpts{
|
MDBDataSourceQueryByID = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||||
Name: "db_datasource_query_by_id_total",
|
Name: "db_datasource_query_by_id_total",
|
||||||
Help: "counter for getting datasource by id",
|
Help: "counter for getting datasource by id",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{
|
LDAPUsersSyncExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
|
Name: "ldap_users_sync_execution_time",
|
||||||
|
Help: "summary for LDAP users sync execution duration",
|
||||||
|
Namespace: exporterName,
|
||||||
|
})
|
||||||
|
|
||||||
|
MDataSourceProxyReqTimer = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
Name: "api_dataproxy_request_all_milliseconds",
|
Name: "api_dataproxy_request_all_milliseconds",
|
||||||
Help: "summary for dataproxy request duration",
|
Help: "summary for dataproxy request duration",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{
|
MAlertingExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||||
Name: "alerting_execution_time_milliseconds",
|
Name: "alerting_execution_time_milliseconds",
|
||||||
Help: "summary of alert exeuction duration",
|
Help: "summary of alert exeuction duration",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{
|
MAlertingActiveAlerts = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "alerting_active_alerts",
|
Name: "alerting_active_alerts",
|
||||||
Help: "amount of active alerts",
|
Help: "amount of active alerts",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{
|
MStatTotalDashboards = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "stat_totals_dashboard",
|
Name: "stat_totals_dashboard",
|
||||||
Help: "total amount of dashboards",
|
Help: "total amount of dashboards",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{
|
MStatTotalUsers = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "stat_total_users",
|
Name: "stat_total_users",
|
||||||
Help: "total amount of users",
|
Help: "total amount of users",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_StatActive_Users = prometheus.NewGauge(prometheus.GaugeOpts{
|
MStatActiveUsers = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "stat_active_users",
|
Name: "stat_active_users",
|
||||||
Help: "number of active users",
|
Help: "number of active users",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{
|
MStatTotalOrgs = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "stat_total_orgs",
|
Name: "stat_total_orgs",
|
||||||
Help: "total amount of orgs",
|
Help: "total amount of orgs",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{
|
MStatTotalPlaylists = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Name: "stat_total_playlists",
|
Name: "stat_total_playlists",
|
||||||
Help: "total amount of playlists",
|
Help: "total amount of playlists",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
@@ -319,78 +408,69 @@ func init() {
|
|||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
})
|
})
|
||||||
|
|
||||||
M_Grafana_Version = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
|
||||||
Name: "info",
|
|
||||||
Help: "Information about the Grafana. This metric is deprecated. please use `grafana_build_info`",
|
|
||||||
Namespace: exporterName,
|
|
||||||
}, []string{"version"})
|
|
||||||
|
|
||||||
grafanaBuildVersion = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
grafanaBuildVersion = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||||
Name: "build_info",
|
Name: "build_info",
|
||||||
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built.",
|
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built",
|
||||||
Namespace: exporterName,
|
Namespace: exporterName,
|
||||||
}, []string{"version", "revision", "branch", "goversion", "edition"})
|
}, []string{"version", "revision", "branch", "goversion", "edition"})
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetBuildInformation sets the build information for this binary
|
// SetBuildInformation sets the build information for this binary
|
||||||
func SetBuildInformation(version, revision, branch string) {
|
func SetBuildInformation(version, revision, branch string) {
|
||||||
// We export this info twice for backwards compatibility.
|
|
||||||
// Once this have been released for some time we should be able to remote `M_Grafana_Version`
|
|
||||||
// The reason we added a new one is that its common practice in the prometheus community
|
|
||||||
// to name this metric `*_build_info` so its easy to do aggregation on all programs.
|
|
||||||
edition := "oss"
|
edition := "oss"
|
||||||
if setting.IsEnterprise {
|
if setting.IsEnterprise {
|
||||||
edition = "enterprise"
|
edition = "enterprise"
|
||||||
}
|
}
|
||||||
|
|
||||||
M_Grafana_Version.WithLabelValues(version).Set(1)
|
|
||||||
grafanaBuildVersion.WithLabelValues(version, revision, branch, runtime.Version(), edition).Set(1)
|
grafanaBuildVersion.WithLabelValues(version, revision, branch, runtime.Version(), edition).Set(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func initMetricVars() {
|
func initMetricVars() {
|
||||||
prometheus.MustRegister(
|
prometheus.MustRegister(
|
||||||
M_Instance_Start,
|
MInstanceStart,
|
||||||
M_Page_Status,
|
MPageStatus,
|
||||||
M_Api_Status,
|
MApiStatus,
|
||||||
M_Proxy_Status,
|
MProxyStatus,
|
||||||
M_Http_Request_Total,
|
MHttpRequestTotal,
|
||||||
M_Http_Request_Summary,
|
MHttpRequestSummary,
|
||||||
M_Api_User_SignUpStarted,
|
MApiUserSignUpStarted,
|
||||||
M_Api_User_SignUpCompleted,
|
MApiUserSignUpCompleted,
|
||||||
M_Api_User_SignUpInvite,
|
MApiUserSignUpInvite,
|
||||||
M_Api_Dashboard_Save,
|
MApiDashboardSave,
|
||||||
M_Api_Dashboard_Get,
|
MApiDashboardGet,
|
||||||
M_Api_Dashboard_Search,
|
MApiDashboardSearch,
|
||||||
M_DataSource_ProxyReq_Timer,
|
MDataSourceProxyReqTimer,
|
||||||
M_Alerting_Execution_Time,
|
MAlertingExecutionTime,
|
||||||
M_Api_Admin_User_Create,
|
MApiAdminUserCreate,
|
||||||
M_Api_Login_Post,
|
MApiLoginPost,
|
||||||
M_Api_Login_OAuth,
|
MApiLoginOAuth,
|
||||||
M_Api_Org_Create,
|
MApiLoginSAML,
|
||||||
M_Api_Dashboard_Snapshot_Create,
|
MApiOrgCreate,
|
||||||
M_Api_Dashboard_Snapshot_External,
|
MApiDashboardSnapshotCreate,
|
||||||
M_Api_Dashboard_Snapshot_Get,
|
MApiDashboardSnapshotExternal,
|
||||||
M_Api_Dashboard_Insert,
|
MApiDashboardSnapshotGet,
|
||||||
M_Alerting_Result_State,
|
MApiDashboardInsert,
|
||||||
M_Alerting_Notification_Sent,
|
MAlertingResultState,
|
||||||
M_Aws_CloudWatch_GetMetricStatistics,
|
MAlertingNotificationSent,
|
||||||
M_Aws_CloudWatch_ListMetrics,
|
MAwsCloudWatchGetMetricStatistics,
|
||||||
M_Aws_CloudWatch_GetMetricData,
|
MAwsCloudWatchListMetrics,
|
||||||
M_DB_DataSource_QueryById,
|
MAwsCloudWatchGetMetricData,
|
||||||
M_Alerting_Active_Alerts,
|
MDBDataSourceQueryByID,
|
||||||
M_StatTotal_Dashboards,
|
LDAPUsersSyncExecutionTime,
|
||||||
M_StatTotal_Users,
|
MAlertingActiveAlerts,
|
||||||
M_StatActive_Users,
|
MStatTotalDashboards,
|
||||||
M_StatTotal_Orgs,
|
MStatTotalUsers,
|
||||||
M_StatTotal_Playlists,
|
MStatActiveUsers,
|
||||||
M_Grafana_Version,
|
MStatTotalOrgs,
|
||||||
|
MStatTotalPlaylists,
|
||||||
StatsTotalViewers,
|
StatsTotalViewers,
|
||||||
StatsTotalEditors,
|
StatsTotalEditors,
|
||||||
StatsTotalAdmins,
|
StatsTotalAdmins,
|
||||||
StatsTotalActiveViewers,
|
StatsTotalActiveViewers,
|
||||||
StatsTotalActiveEditors,
|
StatsTotalActiveEditors,
|
||||||
StatsTotalActiveAdmins,
|
StatsTotalActiveAdmins,
|
||||||
grafanaBuildVersion)
|
grafanaBuildVersion,
|
||||||
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ func (im *InternalMetricsService) Run(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
M_Instance_Start.Inc()
|
MInstanceStart.Inc()
|
||||||
|
|
||||||
<-ctx.Done()
|
<-ctx.Done()
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
|
|||||||
@@ -22,8 +22,12 @@ func parseRedisConnStr(connStr string) (*redis.Options, error) {
|
|||||||
keyValueCSV := strings.Split(connStr, ",")
|
keyValueCSV := strings.Split(connStr, ",")
|
||||||
options := &redis.Options{Network: "tcp"}
|
options := &redis.Options{Network: "tcp"}
|
||||||
for _, rawKeyValue := range keyValueCSV {
|
for _, rawKeyValue := range keyValueCSV {
|
||||||
keyValueTuple := strings.Split(rawKeyValue, "=")
|
keyValueTuple := strings.SplitN(rawKeyValue, "=", 2)
|
||||||
if len(keyValueTuple) != 2 {
|
if len(keyValueTuple) != 2 {
|
||||||
|
if strings.HasPrefix(rawKeyValue, "password") {
|
||||||
|
// don't log the password
|
||||||
|
rawKeyValue = "password******"
|
||||||
|
}
|
||||||
return nil, fmt.Errorf("incorrect redis connection string format detected for '%v', format is key=value,key=value", rawKeyValue)
|
return nil, fmt.Errorf("incorrect redis connection string format detected for '%v', format is key=value,key=value", rawKeyValue)
|
||||||
}
|
}
|
||||||
connKey := keyValueTuple[0]
|
connKey := keyValueTuple[0]
|
||||||
|
|||||||
@@ -155,17 +155,21 @@ func (uss *UsageStatsService) sendUsageStats(oauthProviders map[string]bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (uss *UsageStatsService) updateTotalStats() {
|
func (uss *UsageStatsService) updateTotalStats() {
|
||||||
|
if !uss.Cfg.MetricsEndpointEnabled || uss.Cfg.MetricsEndpointDisableTotalStats {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
statsQuery := models.GetSystemStatsQuery{}
|
statsQuery := models.GetSystemStatsQuery{}
|
||||||
if err := uss.Bus.Dispatch(&statsQuery); err != nil {
|
if err := uss.Bus.Dispatch(&statsQuery); err != nil {
|
||||||
metricsLogger.Error("Failed to get system stats", "error", err)
|
metricsLogger.Error("Failed to get system stats", "error", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards))
|
metrics.MStatTotalDashboards.Set(float64(statsQuery.Result.Dashboards))
|
||||||
metrics.M_StatTotal_Users.Set(float64(statsQuery.Result.Users))
|
metrics.MStatTotalUsers.Set(float64(statsQuery.Result.Users))
|
||||||
metrics.M_StatActive_Users.Set(float64(statsQuery.Result.ActiveUsers))
|
metrics.MStatActiveUsers.Set(float64(statsQuery.Result.ActiveUsers))
|
||||||
metrics.M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists))
|
metrics.MStatTotalPlaylists.Set(float64(statsQuery.Result.Playlists))
|
||||||
metrics.M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs))
|
metrics.MStatTotalOrgs.Set(float64(statsQuery.Result.Orgs))
|
||||||
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
|
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
|
||||||
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
|
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
|
||||||
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))
|
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))
|
||||||
|
|||||||
@@ -264,6 +264,49 @@ func TestMetrics(t *testing.T) {
|
|||||||
ts.Close()
|
ts.Close()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("Test update total stats", t, func() {
|
||||||
|
uss := &UsageStatsService{
|
||||||
|
Bus: bus.New(),
|
||||||
|
Cfg: setting.NewCfg(),
|
||||||
|
}
|
||||||
|
uss.Cfg.MetricsEndpointEnabled = true
|
||||||
|
uss.Cfg.MetricsEndpointDisableTotalStats = false
|
||||||
|
getSystemStatsWasCalled := false
|
||||||
|
uss.Bus.AddHandler(func(query *models.GetSystemStatsQuery) error {
|
||||||
|
query.Result = &models.SystemStats{}
|
||||||
|
getSystemStatsWasCalled = true
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("should not update stats when metrics is disabled and total stats is disabled", func() {
|
||||||
|
uss.Cfg.MetricsEndpointEnabled = false
|
||||||
|
uss.Cfg.MetricsEndpointDisableTotalStats = true
|
||||||
|
uss.updateTotalStats()
|
||||||
|
So(getSystemStatsWasCalled, ShouldBeFalse)
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("should not update stats when metrics is disabled and total stats enabled", func() {
|
||||||
|
uss.Cfg.MetricsEndpointEnabled = false
|
||||||
|
uss.Cfg.MetricsEndpointDisableTotalStats = false
|
||||||
|
uss.updateTotalStats()
|
||||||
|
So(getSystemStatsWasCalled, ShouldBeFalse)
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("should not update stats when metrics is enabled and total stats disabled", func() {
|
||||||
|
uss.Cfg.MetricsEndpointEnabled = true
|
||||||
|
uss.Cfg.MetricsEndpointDisableTotalStats = true
|
||||||
|
uss.updateTotalStats()
|
||||||
|
So(getSystemStatsWasCalled, ShouldBeFalse)
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("should update stats when metrics is enabled and total stats enabled", func() {
|
||||||
|
uss.Cfg.MetricsEndpointEnabled = true
|
||||||
|
uss.Cfg.MetricsEndpointDisableTotalStats = false
|
||||||
|
uss.updateTotalStats()
|
||||||
|
So(getSystemStatsWasCalled, ShouldBeTrue)
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
|
func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
|
||||||
|
|||||||
@@ -103,3 +103,16 @@ func AdminOrFeatureEnabled(enabled bool) macaron.Handler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func SnapshotPublicModeOrSignedIn() macaron.Handler {
|
||||||
|
return func(c *m.ReqContext) {
|
||||||
|
if setting.SnapshotPublicMode {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := c.Invoke(ReqSignedIn)
|
||||||
|
if err != nil {
|
||||||
|
c.JsonApiErr(500, "Failed to invoke required signed in middleware", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package authproxy
|
package authproxy
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base32"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/mail"
|
"net/mail"
|
||||||
@@ -32,6 +33,9 @@ var isLDAPEnabled = ldap.IsEnabled
|
|||||||
// newLDAP creates multiple LDAP instance
|
// newLDAP creates multiple LDAP instance
|
||||||
var newLDAP = multildap.New
|
var newLDAP = multildap.New
|
||||||
|
|
||||||
|
// supportedHeaders states the supported headers configuration fields
|
||||||
|
var supportedHeaderFields = []string{"Name", "Email", "Login", "Groups"}
|
||||||
|
|
||||||
// AuthProxy struct
|
// AuthProxy struct
|
||||||
type AuthProxy struct {
|
type AuthProxy struct {
|
||||||
store *remotecache.RemoteCache
|
store *remotecache.RemoteCache
|
||||||
@@ -142,9 +146,18 @@ func (auth *AuthProxy) IsAllowedIP() (bool, *Error) {
|
|||||||
return false, newError("Proxy authentication required", err)
|
return false, newError("Proxy authentication required", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getKey forms a key for the cache
|
// getKey forms a key for the cache based on the headers received as part of the authentication flow.
|
||||||
|
// Our configuration supports multiple headers. The main header contains the email or username.
|
||||||
|
// And the additional ones that allow us to specify extra attributes: Name, Email or Groups.
|
||||||
func (auth *AuthProxy) getKey() string {
|
func (auth *AuthProxy) getKey() string {
|
||||||
return fmt.Sprintf(CachePrefix, auth.header)
|
key := strings.TrimSpace(auth.header) // start the key with the main header
|
||||||
|
|
||||||
|
auth.headersIterator(func(_, header string) {
|
||||||
|
key = strings.Join([]string{key, header}, "-") // compose the key with any additional headers
|
||||||
|
})
|
||||||
|
|
||||||
|
hashedKey := base32.StdEncoding.EncodeToString([]byte(key))
|
||||||
|
return fmt.Sprintf(CachePrefix, hashedKey)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Login logs in user id with whatever means possible
|
// Login logs in user id with whatever means possible
|
||||||
@@ -232,40 +245,36 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
|||||||
AuthId: auth.header,
|
AuthId: auth.header,
|
||||||
}
|
}
|
||||||
|
|
||||||
if auth.headerType == "username" {
|
switch auth.headerType {
|
||||||
|
case "username":
|
||||||
extUser.Login = auth.header
|
extUser.Login = auth.header
|
||||||
|
|
||||||
// only set Email if it can be parsed as an email address
|
emailAddr, emailErr := mail.ParseAddress(auth.header) // only set Email if it can be parsed as an email address
|
||||||
emailAddr, emailErr := mail.ParseAddress(auth.header)
|
|
||||||
if emailErr == nil {
|
if emailErr == nil {
|
||||||
extUser.Email = emailAddr.Address
|
extUser.Email = emailAddr.Address
|
||||||
}
|
}
|
||||||
} else if auth.headerType == "email" {
|
case "email":
|
||||||
extUser.Email = auth.header
|
extUser.Email = auth.header
|
||||||
extUser.Login = auth.header
|
extUser.Login = auth.header
|
||||||
} else {
|
default:
|
||||||
return 0, newError("Auth proxy header property invalid", nil)
|
return 0, newError("Auth proxy header property invalid", nil)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, field := range []string{"Name", "Email", "Login", "Groups"} {
|
auth.headersIterator(func(field string, header string) {
|
||||||
if auth.headers[field] == "" {
|
if field == "Groups" {
|
||||||
continue
|
extUser.Groups = util.SplitString(header)
|
||||||
|
} else {
|
||||||
|
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(header)
|
||||||
}
|
}
|
||||||
|
})
|
||||||
if val := auth.ctx.Req.Header.Get(auth.headers[field]); val != "" {
|
|
||||||
if field == "Groups" {
|
|
||||||
extUser.Groups = util.SplitString(val)
|
|
||||||
} else {
|
|
||||||
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
upsert := &models.UpsertUserCommand{
|
upsert := &models.UpsertUserCommand{
|
||||||
ReqContext: auth.ctx,
|
ReqContext: auth.ctx,
|
||||||
SignupAllowed: setting.AuthProxyAutoSignUp,
|
SignupAllowed: setting.AuthProxyAutoSignUp,
|
||||||
ExternalUser: extUser,
|
ExternalUser: extUser,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := bus.Dispatch(upsert)
|
err := bus.Dispatch(upsert)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
@@ -274,6 +283,21 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
|||||||
return upsert.Result.Id, nil
|
return upsert.Result.Id, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// headersIterator iterates over all non-empty supported additional headers
|
||||||
|
func (auth *AuthProxy) headersIterator(fn func(field string, header string)) {
|
||||||
|
for _, field := range supportedHeaderFields {
|
||||||
|
h := auth.headers[field]
|
||||||
|
|
||||||
|
if h == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if value := auth.ctx.Req.Header.Get(h); value != "" {
|
||||||
|
fn(field, strings.TrimSpace(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// GetSignedUser get full signed user info
|
// GetSignedUser get full signed user info
|
||||||
func (auth *AuthProxy) GetSignedUser(userID int64) (*models.SignedInUser, *Error) {
|
func (auth *AuthProxy) GetSignedUser(userID int64) (*models.SignedInUser, *Error) {
|
||||||
query := &models.GetSignedInUserQuery{
|
query := &models.GetSignedInUserQuery{
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
package authproxy
|
package authproxy
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base32"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
|
||||||
"gopkg.in/macaron.v1"
|
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/bus"
|
"github.com/grafana/grafana/pkg/bus"
|
||||||
"github.com/grafana/grafana/pkg/infra/remotecache"
|
"github.com/grafana/grafana/pkg/infra/remotecache"
|
||||||
"github.com/grafana/grafana/pkg/models"
|
"github.com/grafana/grafana/pkg/models"
|
||||||
"github.com/grafana/grafana/pkg/services/ldap"
|
"github.com/grafana/grafana/pkg/services/ldap"
|
||||||
"github.com/grafana/grafana/pkg/services/multildap"
|
"github.com/grafana/grafana/pkg/services/multildap"
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
"gopkg.in/macaron.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
type TestMultiLDAP struct {
|
type TestMultiLDAP struct {
|
||||||
@@ -45,37 +45,70 @@ func (stub *TestMultiLDAP) User(login string) (
|
|||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func prepareMiddleware(t *testing.T, req *http.Request, store *remotecache.RemoteCache) *AuthProxy {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
ctx := &models.ReqContext{
|
||||||
|
Context: &macaron.Context{
|
||||||
|
Req: macaron.Request{
|
||||||
|
Request: req,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
auth := New(&Options{
|
||||||
|
Store: store,
|
||||||
|
Ctx: ctx,
|
||||||
|
OrgID: 4,
|
||||||
|
})
|
||||||
|
|
||||||
|
return auth
|
||||||
|
}
|
||||||
|
|
||||||
func TestMiddlewareContext(t *testing.T) {
|
func TestMiddlewareContext(t *testing.T) {
|
||||||
Convey("auth_proxy helper", t, func() {
|
Convey("auth_proxy helper", t, func() {
|
||||||
req, _ := http.NewRequest("POST", "http://example.com", nil)
|
req, _ := http.NewRequest("POST", "http://example.com", nil)
|
||||||
setting.AuthProxyHeaderName = "X-Killa"
|
setting.AuthProxyHeaderName = "X-Killa"
|
||||||
name := "markelog"
|
store := remotecache.NewFakeStore(t)
|
||||||
|
|
||||||
|
name := "markelog"
|
||||||
req.Header.Add(setting.AuthProxyHeaderName, name)
|
req.Header.Add(setting.AuthProxyHeaderName, name)
|
||||||
|
|
||||||
ctx := &models.ReqContext{
|
Convey("when the cache only contains the main header", func() {
|
||||||
Context: &macaron.Context{
|
|
||||||
Req: macaron.Request{
|
|
||||||
Request: req,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
Convey("logs in user from the cache", func() {
|
Convey("with a simple cache key", func() {
|
||||||
store := remotecache.NewFakeStore(t)
|
// Set cache key
|
||||||
key := fmt.Sprintf(CachePrefix, name)
|
key := fmt.Sprintf(CachePrefix, base32.StdEncoding.EncodeToString([]byte(name)))
|
||||||
store.Set(key, int64(33), 0)
|
store.Set(key, int64(33), 0)
|
||||||
|
|
||||||
auth := New(&Options{
|
// Set up the middleware
|
||||||
Store: store,
|
auth := prepareMiddleware(t, req, store)
|
||||||
Ctx: ctx,
|
id, err := auth.Login()
|
||||||
OrgID: 4,
|
|
||||||
|
So(auth.getKey(), ShouldEqual, "auth-proxy-sync-ttl:NVQXE23FNRXWO===")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(id, ShouldEqual, 33)
|
||||||
})
|
})
|
||||||
|
|
||||||
id, err := auth.Login()
|
Convey("when the cache key contains additional headers", func() {
|
||||||
|
setting.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||||
|
group := "grafana-core-team"
|
||||||
|
req.Header.Add("X-WEBAUTH-GROUPS", group)
|
||||||
|
|
||||||
So(err, ShouldBeNil)
|
key := fmt.Sprintf(CachePrefix, base32.StdEncoding.EncodeToString([]byte(name+"-"+group)))
|
||||||
So(id, ShouldEqual, 33)
|
store.Set(key, int64(33), 0)
|
||||||
|
|
||||||
|
auth := prepareMiddleware(t, req, store)
|
||||||
|
|
||||||
|
id, err := auth.Login()
|
||||||
|
|
||||||
|
So(auth.getKey(), ShouldEqual, "auth-proxy-sync-ttl:NVQXE23FNRXWOLLHOJQWMYLOMEWWG33SMUWXIZLBNU======")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(id, ShouldEqual, 33)
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("when the does not exist", func() {
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("LDAP", func() {
|
Convey("LDAP", func() {
|
||||||
@@ -119,13 +152,9 @@ func TestMiddlewareContext(t *testing.T) {
|
|||||||
|
|
||||||
store := remotecache.NewFakeStore(t)
|
store := remotecache.NewFakeStore(t)
|
||||||
|
|
||||||
server := New(&Options{
|
auth := prepareMiddleware(t, req, store)
|
||||||
Store: store,
|
|
||||||
Ctx: ctx,
|
|
||||||
OrgID: 4,
|
|
||||||
})
|
|
||||||
|
|
||||||
id, err := server.Login()
|
id, err := auth.Login()
|
||||||
|
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
So(id, ShouldEqual, 42)
|
So(id, ShouldEqual, 42)
|
||||||
@@ -149,11 +178,7 @@ func TestMiddlewareContext(t *testing.T) {
|
|||||||
|
|
||||||
store := remotecache.NewFakeStore(t)
|
store := remotecache.NewFakeStore(t)
|
||||||
|
|
||||||
auth := New(&Options{
|
auth := prepareMiddleware(t, req, store)
|
||||||
Store: store,
|
|
||||||
Ctx: ctx,
|
|
||||||
OrgID: 4,
|
|
||||||
})
|
|
||||||
|
|
||||||
stub := &TestMultiLDAP{
|
stub := &TestMultiLDAP{
|
||||||
ID: 42,
|
ID: 42,
|
||||||
@@ -170,7 +195,6 @@ func TestMiddlewareContext(t *testing.T) {
|
|||||||
So(id, ShouldNotEqual, 42)
|
So(id, ShouldNotEqual, 42)
|
||||||
So(stub.loginCalled, ShouldEqual, false)
|
So(stub.loginCalled, ShouldEqual, false)
|
||||||
})
|
})
|
||||||
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ package middleware
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -31,5 +33,19 @@ func TestMiddlewareAuth(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("snapshot public mode or signed in", func() {
|
||||||
|
middlewareScenario(t, "Snapshot public mode disabled and unauthenticated request should return 401", func(sc *scenarioContext) {
|
||||||
|
sc.m.Get("/api/snapshot", SnapshotPublicModeOrSignedIn(), sc.defaultHandler)
|
||||||
|
sc.fakeReq("GET", "/api/snapshot").exec()
|
||||||
|
So(sc.resp.Code, ShouldEqual, 401)
|
||||||
|
})
|
||||||
|
|
||||||
|
middlewareScenario(t, "Snapshot public mode enabled and unauthenticated request should return 200", func(sc *scenarioContext) {
|
||||||
|
setting.SnapshotPublicMode = true
|
||||||
|
sc.m.Get("/api/snapshot", SnapshotPublicModeOrSignedIn(), sc.defaultHandler)
|
||||||
|
sc.fakeReq("GET", "/api/snapshot").exec()
|
||||||
|
So(sc.resp.Code, ShouldEqual, 200)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,11 +21,19 @@ import (
|
|||||||
|
|
||||||
var getTime = time.Now
|
var getTime = time.Now
|
||||||
|
|
||||||
|
const (
|
||||||
|
errStringInvalidUsernamePassword = "Invalid username or password"
|
||||||
|
errStringInvalidAPIKey = "Invalid API key"
|
||||||
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ReqGrafanaAdmin = Auth(&AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
|
ReqGrafanaAdmin = Auth(&AuthOptions{
|
||||||
ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true})
|
ReqSignedIn: true,
|
||||||
ReqEditorRole = RoleAuth(models.ROLE_EDITOR, models.ROLE_ADMIN)
|
ReqGrafanaAdmin: true,
|
||||||
ReqOrgAdmin = RoleAuth(models.ROLE_ADMIN)
|
})
|
||||||
|
ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true})
|
||||||
|
ReqEditorRole = RoleAuth(models.ROLE_EDITOR, models.ROLE_ADMIN)
|
||||||
|
ReqOrgAdmin = RoleAuth(models.ROLE_ADMIN)
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetContextHandler(
|
func GetContextHandler(
|
||||||
@@ -106,14 +114,14 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
|
|||||||
// base64 decode key
|
// base64 decode key
|
||||||
decoded, err := apikeygen.Decode(keyString)
|
decoded, err := apikeygen.Decode(keyString)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.JsonApiErr(401, "Invalid API key", err)
|
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// fetch key
|
// fetch key
|
||||||
keyQuery := models.GetApiKeyByNameQuery{KeyName: decoded.Name, OrgId: decoded.OrgId}
|
keyQuery := models.GetApiKeyByNameQuery{KeyName: decoded.Name, OrgId: decoded.OrgId}
|
||||||
if err := bus.Dispatch(&keyQuery); err != nil {
|
if err := bus.Dispatch(&keyQuery); err != nil {
|
||||||
ctx.JsonApiErr(401, "Invalid API key", err)
|
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,7 +129,7 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
|
|||||||
|
|
||||||
// validate api key
|
// validate api key
|
||||||
if !apikeygen.IsValid(decoded, apikey.Key) {
|
if !apikeygen.IsValid(decoded, apikey.Key) {
|
||||||
ctx.JsonApiErr(401, "Invalid API key", err)
|
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -140,7 +148,6 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
|
func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
|
||||||
|
|
||||||
if !setting.BasicAuthEnabled {
|
if !setting.BasicAuthEnabled {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@@ -158,21 +165,39 @@ func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
|
|||||||
|
|
||||||
loginQuery := models.GetUserByLoginQuery{LoginOrEmail: username}
|
loginQuery := models.GetUserByLoginQuery{LoginOrEmail: username}
|
||||||
if err := bus.Dispatch(&loginQuery); err != nil {
|
if err := bus.Dispatch(&loginQuery); err != nil {
|
||||||
ctx.JsonApiErr(401, "Basic auth failed", err)
|
ctx.Logger.Debug(
|
||||||
|
"Failed to look up the username",
|
||||||
|
"username", username,
|
||||||
|
)
|
||||||
|
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
user := loginQuery.Result
|
user := loginQuery.Result
|
||||||
|
loginUserQuery := models.LoginUserQuery{
|
||||||
loginUserQuery := models.LoginUserQuery{Username: username, Password: password, User: user}
|
Username: username,
|
||||||
|
Password: password,
|
||||||
|
User: user,
|
||||||
|
}
|
||||||
if err := bus.Dispatch(&loginUserQuery); err != nil {
|
if err := bus.Dispatch(&loginUserQuery); err != nil {
|
||||||
ctx.JsonApiErr(401, "Invalid username or password", err)
|
ctx.Logger.Debug(
|
||||||
|
"Failed to authorize the user",
|
||||||
|
"username", username,
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
query := models.GetSignedInUserQuery{UserId: user.Id, OrgId: orgId}
|
query := models.GetSignedInUserQuery{UserId: user.Id, OrgId: orgId}
|
||||||
if err := bus.Dispatch(&query); err != nil {
|
if err := bus.Dispatch(&query); err != nil {
|
||||||
ctx.JsonApiErr(401, "Authentication error", err)
|
ctx.Logger.Error(
|
||||||
|
"Failed at user signed in",
|
||||||
|
"id", user.Id,
|
||||||
|
"org", orgId,
|
||||||
|
)
|
||||||
|
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -193,14 +218,14 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
|
|||||||
|
|
||||||
token, err := authTokenService.LookupToken(ctx.Req.Context(), rawToken)
|
token, err := authTokenService.LookupToken(ctx.Req.Context(), rawToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.Logger.Error("failed to look up user based on cookie", "error", err)
|
ctx.Logger.Error("Failed to look up user based on cookie", "error", err)
|
||||||
WriteSessionCookie(ctx, "", -1)
|
WriteSessionCookie(ctx, "", -1)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
query := models.GetSignedInUserQuery{UserId: token.UserId, OrgId: orgID}
|
query := models.GetSignedInUserQuery{UserId: token.UserId, OrgId: orgID}
|
||||||
if err := bus.Dispatch(&query); err != nil {
|
if err := bus.Dispatch(&query); err != nil {
|
||||||
ctx.Logger.Error("failed to get user with id", "userId", token.UserId, "error", err)
|
ctx.Logger.Error("Failed to get user with id", "userId", token.UserId, "error", err)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -210,7 +235,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
|
|||||||
|
|
||||||
rotated, err := authTokenService.TryRotateToken(ctx.Req.Context(), token, ctx.RemoteAddr(), ctx.Req.UserAgent())
|
rotated, err := authTokenService.TryRotateToken(ctx.Req.Context(), token, ctx.RemoteAddr(), ctx.Req.UserAgent())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.Logger.Error("failed to rotate token", "error", err)
|
ctx.Logger.Error("Failed to rotate token", "error", err)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -223,7 +248,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
|
|||||||
|
|
||||||
func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays int) {
|
func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays int) {
|
||||||
if setting.Env == setting.DEV {
|
if setting.Env == setting.DEV {
|
||||||
ctx.Logger.Info("new token", "unhashed token", value)
|
ctx.Logger.Info("New token", "unhashed token", value)
|
||||||
}
|
}
|
||||||
|
|
||||||
var maxAge int
|
var maxAge int
|
||||||
@@ -242,7 +267,9 @@ func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays in
|
|||||||
Path: setting.AppSubUrl + "/",
|
Path: setting.AppSubUrl + "/",
|
||||||
Secure: setting.CookieSecure,
|
Secure: setting.CookieSecure,
|
||||||
MaxAge: maxAge,
|
MaxAge: maxAge,
|
||||||
SameSite: setting.CookieSameSite,
|
}
|
||||||
|
if setting.CookieSameSite != http.SameSiteDefaultMode {
|
||||||
|
cookie.SameSite = setting.CookieSameSite
|
||||||
}
|
}
|
||||||
|
|
||||||
http.SetCookie(ctx.Resp, &cookie)
|
http.SetCookie(ctx.Resp, &cookie)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package middleware
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/base32"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -11,6 +12,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"gopkg.in/macaron.v1"
|
"gopkg.in/macaron.v1"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/api/dtos"
|
"github.com/grafana/grafana/pkg/api/dtos"
|
||||||
@@ -21,7 +23,6 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/services/login"
|
"github.com/grafana/grafana/pkg/services/login"
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
"github.com/grafana/grafana/pkg/util"
|
"github.com/grafana/grafana/pkg/util"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const errorTemplate = "error-template"
|
const errorTemplate = "error-template"
|
||||||
@@ -305,28 +306,38 @@ func TestMiddlewareContext(t *testing.T) {
|
|||||||
maxAgeHours := (time.Duration(setting.LoginMaxLifetimeDays) * 24 * time.Hour)
|
maxAgeHours := (time.Duration(setting.LoginMaxLifetimeDays) * 24 * time.Hour)
|
||||||
maxAge := (maxAgeHours + time.Hour).Seconds()
|
maxAge := (maxAgeHours + time.Hour).Seconds()
|
||||||
|
|
||||||
expectedCookie := &http.Cookie{
|
sameSitePolicies := []http.SameSite{
|
||||||
Name: setting.LoginCookieName,
|
http.SameSiteDefaultMode,
|
||||||
Value: "rotated",
|
http.SameSiteLaxMode,
|
||||||
Path: setting.AppSubUrl + "/",
|
http.SameSiteStrictMode,
|
||||||
HttpOnly: true,
|
|
||||||
MaxAge: int(maxAge),
|
|
||||||
Secure: setting.CookieSecure,
|
|
||||||
SameSite: setting.CookieSameSite,
|
|
||||||
}
|
}
|
||||||
|
for _, sameSitePolicy := range sameSitePolicies {
|
||||||
|
setting.CookieSameSite = sameSitePolicy
|
||||||
|
expectedCookie := &http.Cookie{
|
||||||
|
Name: setting.LoginCookieName,
|
||||||
|
Value: "rotated",
|
||||||
|
Path: setting.AppSubUrl + "/",
|
||||||
|
HttpOnly: true,
|
||||||
|
MaxAge: int(maxAge),
|
||||||
|
Secure: setting.CookieSecure,
|
||||||
|
}
|
||||||
|
if sameSitePolicy != http.SameSiteDefaultMode {
|
||||||
|
expectedCookie.SameSite = sameSitePolicy
|
||||||
|
}
|
||||||
|
|
||||||
sc.fakeReq("GET", "/").exec()
|
sc.fakeReq("GET", "/").exec()
|
||||||
|
|
||||||
Convey("should init context with user info", func() {
|
Convey(fmt.Sprintf("Should init context with user info and setting.SameSite=%v", sameSitePolicy), func() {
|
||||||
So(sc.context.IsSignedIn, ShouldBeTrue)
|
So(sc.context.IsSignedIn, ShouldBeTrue)
|
||||||
So(sc.context.UserId, ShouldEqual, 12)
|
So(sc.context.UserId, ShouldEqual, 12)
|
||||||
So(sc.context.UserToken.UserId, ShouldEqual, 12)
|
So(sc.context.UserToken.UserId, ShouldEqual, 12)
|
||||||
So(sc.context.UserToken.UnhashedToken, ShouldEqual, "rotated")
|
So(sc.context.UserToken.UnhashedToken, ShouldEqual, "rotated")
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("should set cookie", func() {
|
Convey(fmt.Sprintf("Should set cookie with setting.SameSite=%v", sameSitePolicy), func() {
|
||||||
So(sc.resp.Header().Get("Set-Cookie"), ShouldEqual, expectedCookie.String())
|
So(sc.resp.Header().Get("Set-Cookie"), ShouldEqual, expectedCookie.String())
|
||||||
})
|
})
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
middlewareScenario(t, "Invalid/expired auth token in cookie", func(sc *scenarioContext) {
|
middlewareScenario(t, "Invalid/expired auth token in cookie", func(sc *scenarioContext) {
|
||||||
@@ -377,7 +388,9 @@ func TestMiddlewareContext(t *testing.T) {
|
|||||||
setting.LDAPEnabled = true
|
setting.LDAPEnabled = true
|
||||||
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
|
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
|
||||||
setting.AuthProxyHeaderProperty = "username"
|
setting.AuthProxyHeaderProperty = "username"
|
||||||
|
setting.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||||
name := "markelog"
|
name := "markelog"
|
||||||
|
group := "grafana-core-team"
|
||||||
|
|
||||||
middlewareScenario(t, "should not sync the user if it's in the cache", func(sc *scenarioContext) {
|
middlewareScenario(t, "should not sync the user if it's in the cache", func(sc *scenarioContext) {
|
||||||
bus.AddHandler("test", func(query *models.GetSignedInUserQuery) error {
|
bus.AddHandler("test", func(query *models.GetSignedInUserQuery) error {
|
||||||
@@ -385,11 +398,12 @@ func TestMiddlewareContext(t *testing.T) {
|
|||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
key := fmt.Sprintf(cachePrefix, name)
|
key := fmt.Sprintf(cachePrefix, base32.StdEncoding.EncodeToString([]byte(name+"-"+group)))
|
||||||
sc.remoteCacheService.Set(key, int64(33), 0)
|
sc.remoteCacheService.Set(key, int64(33), 0)
|
||||||
sc.fakeReq("GET", "/")
|
sc.fakeReq("GET", "/")
|
||||||
|
|
||||||
sc.req.Header.Add(setting.AuthProxyHeaderName, name)
|
sc.req.Header.Add(setting.AuthProxyHeaderName, name)
|
||||||
|
sc.req.Header.Add("X-WEBAUTH-GROUPS", group)
|
||||||
sc.exec()
|
sc.exec()
|
||||||
|
|
||||||
Convey("Should init user via cache", func() {
|
Convey("Should init user via cache", func() {
|
||||||
|
|||||||
@@ -20,9 +20,9 @@ func RequestMetrics(handler string) macaron.Handler {
|
|||||||
|
|
||||||
code := sanitizeCode(status)
|
code := sanitizeCode(status)
|
||||||
method := sanitizeMethod(req.Method)
|
method := sanitizeMethod(req.Method)
|
||||||
metrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()
|
metrics.MHttpRequestTotal.WithLabelValues(handler, code, method).Inc()
|
||||||
duration := time.Since(now).Nanoseconds() / int64(time.Millisecond)
|
duration := time.Since(now).Nanoseconds() / int64(time.Millisecond)
|
||||||
metrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))
|
metrics.MHttpRequestSummary.WithLabelValues(handler, code, method).Observe(float64(duration))
|
||||||
|
|
||||||
if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") {
|
if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") {
|
||||||
countProxyRequests(status)
|
countProxyRequests(status)
|
||||||
@@ -37,39 +37,39 @@ func RequestMetrics(handler string) macaron.Handler {
|
|||||||
func countApiRequests(status int) {
|
func countApiRequests(status int) {
|
||||||
switch status {
|
switch status {
|
||||||
case 200:
|
case 200:
|
||||||
metrics.M_Api_Status.WithLabelValues("200").Inc()
|
metrics.MApiStatus.WithLabelValues("200").Inc()
|
||||||
case 404:
|
case 404:
|
||||||
metrics.M_Api_Status.WithLabelValues("404").Inc()
|
metrics.MApiStatus.WithLabelValues("404").Inc()
|
||||||
case 500:
|
case 500:
|
||||||
metrics.M_Api_Status.WithLabelValues("500").Inc()
|
metrics.MApiStatus.WithLabelValues("500").Inc()
|
||||||
default:
|
default:
|
||||||
metrics.M_Api_Status.WithLabelValues("unknown").Inc()
|
metrics.MApiStatus.WithLabelValues("unknown").Inc()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func countPageRequests(status int) {
|
func countPageRequests(status int) {
|
||||||
switch status {
|
switch status {
|
||||||
case 200:
|
case 200:
|
||||||
metrics.M_Page_Status.WithLabelValues("200").Inc()
|
metrics.MPageStatus.WithLabelValues("200").Inc()
|
||||||
case 404:
|
case 404:
|
||||||
metrics.M_Page_Status.WithLabelValues("404").Inc()
|
metrics.MPageStatus.WithLabelValues("404").Inc()
|
||||||
case 500:
|
case 500:
|
||||||
metrics.M_Page_Status.WithLabelValues("500").Inc()
|
metrics.MPageStatus.WithLabelValues("500").Inc()
|
||||||
default:
|
default:
|
||||||
metrics.M_Page_Status.WithLabelValues("unknown").Inc()
|
metrics.MPageStatus.WithLabelValues("unknown").Inc()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func countProxyRequests(status int) {
|
func countProxyRequests(status int) {
|
||||||
switch status {
|
switch status {
|
||||||
case 200:
|
case 200:
|
||||||
metrics.M_Proxy_Status.WithLabelValues("200").Inc()
|
metrics.MProxyStatus.WithLabelValues("200").Inc()
|
||||||
case 404:
|
case 404:
|
||||||
metrics.M_Proxy_Status.WithLabelValues("400").Inc()
|
metrics.MProxyStatus.WithLabelValues("400").Inc()
|
||||||
case 500:
|
case 500:
|
||||||
metrics.M_Proxy_Status.WithLabelValues("500").Inc()
|
metrics.MProxyStatus.WithLabelValues("500").Inc()
|
||||||
default:
|
default:
|
||||||
metrics.M_Proxy_Status.WithLabelValues("unknown").Inc()
|
metrics.MProxyStatus.WithLabelValues("unknown").Inc()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -62,6 +62,7 @@ type CreateUserCommand struct {
|
|||||||
Password string
|
Password string
|
||||||
EmailVerified bool
|
EmailVerified bool
|
||||||
IsAdmin bool
|
IsAdmin bool
|
||||||
|
IsDisabled bool
|
||||||
SkipOrgSetup bool
|
SkipOrgSetup bool
|
||||||
DefaultOrgRole string
|
DefaultOrgRole string
|
||||||
|
|
||||||
@@ -146,6 +147,8 @@ type SearchUsersQuery struct {
|
|||||||
Limit int
|
Limit int
|
||||||
AuthModule string
|
AuthModule string
|
||||||
|
|
||||||
|
IsDisabled *bool
|
||||||
|
|
||||||
Result SearchUserQueryResult
|
Result SearchUserQueryResult
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -112,11 +112,19 @@ type PluginDependencyItem struct {
|
|||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PluginBuildInfo struct {
|
||||||
|
Time int64 `json:"time,omitempty"`
|
||||||
|
Repo string `json:"repo,omitempty"`
|
||||||
|
Branch string `json:"branch,omitempty"`
|
||||||
|
Hash string `json:"hash,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type PluginInfo struct {
|
type PluginInfo struct {
|
||||||
Author PluginInfoLink `json:"author"`
|
Author PluginInfoLink `json:"author"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
Links []PluginInfoLink `json:"links"`
|
Links []PluginInfoLink `json:"links"`
|
||||||
Logos PluginLogos `json:"logos"`
|
Logos PluginLogos `json:"logos"`
|
||||||
|
Build PluginBuildInfo `json:"build"`
|
||||||
Screenshots []PluginScreenshots `json:"screenshots"`
|
Screenshots []PluginScreenshots `json:"screenshots"`
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
Updated string `json:"updated"`
|
Updated string `json:"updated"`
|
||||||
|
|||||||
@@ -70,5 +70,5 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) {
|
|||||||
context.EndTime = time.Now()
|
context.EndTime = time.Now()
|
||||||
|
|
||||||
elapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)
|
elapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)
|
||||||
metrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))
|
metrics.MAlertingExecutionTime.Observe(float64(elapsedTime))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, no
|
|||||||
notifier := notifierState.notifier
|
notifier := notifierState.notifier
|
||||||
|
|
||||||
n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault())
|
n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault())
|
||||||
metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc()
|
metrics.MAlertingNotificationSent.WithLabelValues(notifier.GetType()).Inc()
|
||||||
|
|
||||||
err := notifier.Notify(evalContext)
|
err := notifier.Notify(evalContext)
|
||||||
|
|
||||||
|
|||||||
@@ -43,6 +43,6 @@ func (arr *defaultRuleReader) fetch() []*Rule {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_Alerting_Active_Alerts.Set(float64(len(res)))
|
metrics.MAlertingActiveAlerts.Set(float64(len(res)))
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error {
|
|||||||
annotationData.Set("noData", true)
|
annotationData.Set("noData", true)
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc()
|
metrics.MAlertingResultState.WithLabelValues(string(evalContext.Rule.State)).Inc()
|
||||||
if evalContext.shouldUpdateAlertState() {
|
if evalContext.shouldUpdateAlertState() {
|
||||||
handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState)
|
handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState)
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user