Compare commits

...

43 Commits

Author SHA1 Message Date
Leonard Gram
ba4a870632 release 6.3.0-beta3 2019-08-02 11:47:05 +02:00
kay delaney
ef9ec32c32 QueryEditors: Fixes flakey text edit mode toggle (#18335)
Closes #18037

(cherry picked from commit 9b5890cc62)
2019-08-02 11:47:05 +02:00
Oleg Gaidarenko
17235e4bd1 Auth: consistently return same basic auth errors (#18310)
* Auth: consistently return same basic auth errors

* Put repeated errors in consts and return only those consts as error strings

* Add tests for errors basic auth cases and moves tests to separate test-case.
  Also names test cases consistently

* Add more error logs and makes their messages consistent

* A bit of code style

* Add additional test helper

* Auth: do not expose even incorrect password

* Auth: address review comments

Use `Debug` for the cases when it's an user error

(cherry picked from commit 82661b9f69)
2019-08-02 11:47:05 +02:00
Leonard Gram
eb82b77782 cli: fix for recognizing when in dev mode. (#18334)
(cherry picked from commit c675449aa2)
2019-08-02 11:47:05 +02:00
Sofia Papagiannaki
3af8aa5c4f Fix failing end to end tests job for release (#18323)
Create and push the expected tag to grafana-dev repository
and use this instead for running the end to end tests
for the release.

(cherry picked from commit c9f7e3059f)
2019-08-02 11:47:05 +02:00
Sofia Papagiannaki
5b588af73c Fix OAuth error due to SameSite cookie policy (#18332)
The `oauth_state` cookie used to be created with the SameSite value set
according to the `cookie_samesite` configuration.
However, due to a Safari bug SameSite=None or SameSite=invalid are treated
as Strict which results in "missing saved state" OAuth login failures
because the cookie is not sent with the redirect requests to the OAuth
provider.
This commit always creates the `oauth_state` cookie with SameSite=Lax
to compensate for this.

(cherry picked from commit 69b7b8bb46)
2019-08-02 11:47:05 +02:00
kay delaney
5ec6eccfac [Shortcuts] Fixes shortcuts for moving time range backwards and forwards (#18305)
Closes #18159

(cherry picked from commit bcf28cb7a2)
2019-08-02 11:47:05 +02:00
Oleg Gaidarenko
237e0e8631 Build: fix use of env vars in parentheses execs (#18249)
* Build: fix use of env vars in parentheses execs

* Build: disable shellcheck rule SC2086 for a file

Follow up for 4b16cd6cc8

(cherry picked from commit f9a6414b92)
2019-08-02 11:47:05 +02:00
Oleg Gaidarenko
7a165febf3 Build: correct verify script (#18236)
Follow-up for 4b16cd6cc8

(cherry picked from commit ba46cf40e4)
2019-08-02 11:47:05 +02:00
Oleg Gaidarenko
fd7c38c62f Build: Introduce shellcheck (#18081)
* Build: introduce shellcheck

Fixes #16198

(cherry picked from commit 4b16cd6cc8)
2019-08-02 11:47:05 +02:00
Oleg Gaidarenko
00519f1105 Build: change definition of the vars in makefile (#18151)
Prompted by the @slim-bean talk (thanks!). With that definition
`GO_FILES` var can be dynamically changed, that will be very helpful
in grafana enterprise repo

(cherry picked from commit 8202fa2fde)
2019-08-02 11:47:05 +02:00
Leonard Gram
c8740d98b0 release 6.3.0-beta2 2019-07-26 15:56:18 +02:00
gotjosh
54030deaf6 SAML: Only show SAML login button on Enterprise version (#18270)
SAML Authentication is an Enterprise only feature. This change enables
the SAML login button to only be shown on enterprise.

(cherry picked from commit 2f240a11ce)
2019-07-26 15:56:18 +02:00
Andrej Ocenas
880f6b63a1 Permissions: Show plugins in nav for non admin users but hide plugin configuration (#18234)
Allow non admins to see plugins list but only with readme. Any config tabs are hidden from the plugin page. Also plugin panel does not show action buttons (like Enable) for non admins.

(cherry picked from commit 7f1214ac46)
2019-07-26 15:56:18 +02:00
Sofia Papagiannaki
d8ddfa5622 Run End-to-End tests for release builds (#18211)
(cherry picked from commit 13c41b781a)
2019-07-26 15:56:18 +02:00
Torkel Ödegaard
9af2ea1b53 DataLinks: Fixed interpolation of series name, fixes #18250 (#18251)
(cherry picked from commit 43543d5682)
2019-07-26 15:56:18 +02:00
kay delaney
d98ee66bd8 Timerange: Fixes a bug where custom time ranges didn't respect UTC (#18248)
Closes #18170
Closes #18178

(cherry picked from commit c01bbf2058)
2019-07-26 15:56:18 +02:00
Ryan McKinley
d3ecc963e6 Registry: add a reusable function registry (#17047)
(cherry picked from commit c194ae1ba5)
2019-07-26 15:56:18 +02:00
Ryan McKinley
0961ec9748 FieldDisplay: move threshold and mapping to Field (#17043)
(cherry picked from commit 14caa6a068)
2019-07-26 15:56:18 +02:00
Torkel Ödegaard
a2c7570c5c TimePicker: Increase max height of quick range dropdown (#18247)
(cherry picked from commit 996567ac42)
2019-07-26 15:56:18 +02:00
Torkel Ödegaard
9cbe0abb44 TimePicker: Fixed css issue casued by CSS Optimizer (#18244)
Fixes #18158

(cherry picked from commit 3145a1dd29)
2019-07-26 15:56:18 +02:00
Oleg Gaidarenko
2fecdaf6b4 LDAP: improve POSIX support (#18235)
* LDAP: improve POSIX support

* Correctly abtain DN attributes result

* Allow more flexibility with comparison mapping between POSIX group & user

* Add devenv for POSIX LDAP server

* Correct the docs

Fixes #18140

(cherry picked from commit 1e5fc76601)
2019-07-26 15:56:18 +02:00
Hassan Farid
a5f360e50e Replaced ubuntu:latest with ubuntu:18.04; specific image version to make grafana build images consistent (#18224)
(cherry picked from commit 711f62c813)
2019-07-26 15:56:18 +02:00
Markus Blaschke
99f2386bd9 remote_cache: Fix redis connstr parsing (#18204)
* Fix redis connstr parsing

* Don’t log the password

(cherry picked from commit 31547597d3)
2019-07-26 15:56:18 +02:00
Oleg Gaidarenko
bffa956068 Auth: do not expose disabled user disabled status (#18229)
Fixes #17947

(cherry picked from commit d16fd58bdb)
2019-07-26 15:56:18 +02:00
gotjosh
1b7a54c084 Docs: Update documentation with new SAML features (#18163)
* Update defaults.ini and sample.ini with the SAML assertion mapping
fields

* Document Grafana's ability to map ACS attributes while a Grafana user is created

(cherry picked from commit 87a794fe0a)
2019-07-26 15:56:18 +02:00
David
8128eb6c77 Graph: fix time label description for datalink suggestions (#18214)
(cherry picked from commit 9cd069f2c5)
2019-07-26 15:56:18 +02:00
Leonard Gram
10c4d6eccc PhantomJS: Fixes rendering on Debian Buster (#18162)
Closes #17588

(cherry picked from commit 2014549fa5)
2019-07-26 15:56:18 +02:00
Alexander Zobnin
8a062dde35 SAML: add auth provider label (#18197)
(cherry picked from commit ccf11fb7a2)
2019-07-26 15:56:18 +02:00
Ryan McKinley
041c1c3cb1 Plugins: fix previous commit, output "build" property in json
(cherry picked from commit 0ca1e90b2e)
2019-07-26 15:56:18 +02:00
Alexander Zobnin
aba6df2e09 SAML: add metrics (#18194)
(cherry picked from commit 214195441f)
2019-07-26 15:56:18 +02:00
Oleg Gaidarenko
5192504748 Metrics: use consistent naming for exported variables (#18134)
* Metrics: remove unused metrics

Metric `M_Grafana_Version` is not used anywhere, nor the mentioned
`M_Grafana_Build_Version`. Seems to be an artefact?

* Metrics: make the naming consistent

* Metrics: add comments to exported vars

* Metrics: use proper naming

Fixes #18110

(cherry picked from commit 75fa1f0207)
2019-07-26 15:56:18 +02:00
Ryan McKinley
8a0e2ac3a4 Plugins: add build info to plugin metadata (#18164)
(cherry picked from commit 3e0625ff3d)
2019-07-26 15:56:18 +02:00
Alexander Zobnin
373d082363 Docs: SAML idp_metadata_url option (#18181)
(cherry picked from commit e47546d529)
2019-07-26 15:56:18 +02:00
gotjosh
94765294a2 Docs: SAML (#18069)
* docs: Link to SAML docs and document configuration options

- Document configuration options `defaults.ini` and `sample.ini`
- Add the SAML documentation
- Link to the SAML documentation from "what's new in 6.3"

(cherry picked from commit d006f7c916)
2019-07-26 15:56:18 +02:00
Oleg Gaidarenko
1f482a5f0b SQLStore: use bool pointer instead of string (#18111)
(cherry picked from commit d9f01cb822)
2019-07-26 15:56:18 +02:00
Oleg Gaidarenko
ae04813045 SQLStore: allow to look for is_disabled flag (#18032)
* Add support for `is_disabled` to `CreateUser()`

* Add support for `is_disabled` to `SearchUsers()`
  Had to add it as a `string` type not as `bool`, since if that's property
  is omitted, we would have add it to SQL request, which might be dangerous

* Restructure desctructive tests and add more

(cherry picked from commit 8e0f091f14)
2019-07-26 15:56:18 +02:00
Oleg Gaidarenko
3d7c52dcd1 Metrics: add LDAP active sync summary metric (#18079)
(cherry picked from commit 09eb9a45ce)
2019-07-26 15:56:18 +02:00
Sofia Papagiannaki
81f8158010 API: Minor fix for nil pointer when trying to log error during creating new dashboard via the API (#18003)
* Minor fix for nil pointer when trying to log error

* Do not return error if a dashboard is created

Only log the failures

* Do not return error if the folder is created

Only log the failures

(cherry picked from commit f2ad3242be)
2019-07-26 15:56:18 +02:00
kay delaney
5de014a91d Explore/Loki: Display live tailed logs in correct order (#18031)
Closes #18027

(cherry picked from commit bf7fb67f73)
2019-07-26 15:56:18 +02:00
gotjosh
777311a759 Docs: Archive 6.2 and make 6.3 current version (#18257) 2019-07-24 19:30:03 +01:00
Marcus Efraimsson
829b5d0d40 Build: Update links to release notes and what's new article for v6.3.x 2019-07-11 16:34:05 +02:00
Sofia Papagiannaki
12878409db release 6.3.0-beta1 2019-07-10 15:58:07 +03:00
167 changed files with 2384 additions and 1298 deletions

View File

@@ -98,6 +98,34 @@ jobs:
path: public/e2e-test/screenShots/theOutput
destination: output-screenshots
end-to-end-test-release:
docker:
- image: circleci/node:10-browsers
- image: grafana/grafana-dev:$CIRCLE_TAG
steps:
- run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s
- checkout
- restore_cache:
key: dependency-cache-{{ checksum "yarn.lock" }}
- run:
name: yarn install
command: 'yarn install --pure-lockfile --no-progress'
no_output_timeout: 5m
- save_cache:
key: dependency-cache-{{ checksum "yarn.lock" }}
paths:
- node_modules
- run:
name: run end-to-end tests
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
no_output_timeout: 5m
- store_artifacts:
path: public/e2e-test/screenShots/theTruth
destination: expected-screenshots
- store_artifacts:
path: public/e2e-test/screenShots/theOutput
destination: output-screenshots
codespell:
docker:
- image: circleci/python
@@ -127,6 +155,15 @@ jobs:
name: Lint Go
command: 'make lint-go'
shellcheck:
machine: true
working_directory: ~/go/src/github.com/grafana/grafana
steps:
- checkout
- run:
name: ShellCheck
command: 'make shellcheck'
test-frontend:
docker:
- image: circleci/node:10
@@ -635,6 +672,8 @@ workflows:
filters: *filter-only-master
- lint-go:
filters: *filter-only-master
- shellcheck:
filters: *filter-only-master
- test-frontend:
filters: *filter-only-master
- test-backend:
@@ -650,6 +689,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
- build-oss-msi
@@ -662,6 +702,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-master
@@ -672,6 +713,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
- build-all-enterprise
@@ -683,6 +725,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-master
@@ -704,6 +747,8 @@ workflows:
filters: *filter-only-release
- lint-go:
filters: *filter-only-release
- shellcheck:
filters: *filter-only-release
- test-frontend:
filters: *filter-only-release
- test-backend:
@@ -719,6 +764,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
- build-oss-msi
@@ -731,6 +777,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-release
@@ -742,6 +789,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-release
@@ -752,9 +800,14 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-release
- end-to-end-test-release:
requires:
- grafana-docker-release
filters: *filter-only-release
build-branches-and-prs:
jobs:
@@ -771,6 +824,10 @@ workflows:
filters: *filter-not-release-or-master
- lint-go:
filters: *filter-not-release-or-master
- lint-go:
filters: *filter-not-release-or-master
- shellcheck:
filters: *filter-not-release-or-master
- test-frontend:
filters: *filter-not-release-or-master
- test-backend:
@@ -788,6 +845,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
- cache-server-test
@@ -799,6 +857,7 @@ workflows:
- test-frontend
- codespell
- lint-go
- shellcheck
- mysql-integration-test
- postgres-integration-test
- cache-server-test

View File

@@ -33,7 +33,7 @@ ENV NODE_ENV production
RUN ./node_modules/.bin/grunt build
# Final container
FROM ubuntu:latest
FROM ubuntu:18.04
LABEL maintainer="Grafana team <hello@grafana.com>"

View File

@@ -2,8 +2,9 @@
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go gosec revive golangci-lint go-vet test-go test-js test run clean devenv devenv-down revive-alerting
GO := GO111MODULE=on go
GO_FILES := ./pkg/...
GO = GO111MODULE=on go
GO_FILES ?= ./pkg/...
SH_FILES ?= $(shell find ./scripts -name *.sh)
all: deps build
@@ -111,6 +112,11 @@ go-vet:
lint-go: go-vet golangci-lint revive revive-alerting gosec
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
shellcheck: $(SH_FILES)
@docker run --rm -v "$$PWD:/mnt" koalaman/shellcheck:stable \
$(SH_FILES) -e SC1071
run: scripts/go/bin/bra
@scripts/go/bin/bra run

View File

@@ -379,17 +379,45 @@ send_client_credentials_via_post = false
#################################### SAML Auth ###########################
[auth.saml] # Enterprise only
# Defaults to false. If true, the feature is enabled
enabled = false
private_key =
private_key_path =
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
certificate =
# Path to the public X.509 certificate. Used to sign requests to the IdP
certificate_path =
# Base64-encoded private key. Used to decrypt assertions from the IdP
private_key =
# Path to the private key. Used to decrypt assertions from the IdP
private_key_path =
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
idp_metadata =
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
idp_metadata_path =
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
idp_metadata_url =
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
max_issue_delay = 90s
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
metadata_valid_duration = 48h
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
assertion_attribute_name = displayName
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
assertion_attribute_login = mail
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
assertion_attribute_email = mail
#################################### Basic Auth ##########################
[auth.basic]
enabled = true

View File

@@ -334,18 +334,46 @@
;send_client_credentials_via_post = false
#################################### SAML Auth ###########################
;[auth.saml] # Enterprise only
[auth.saml] # Enterprise only
# Defaults to false. If true, the feature is enabled.
;enabled = false
;private_key =
;private_key_path =
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
;certificate =
# Path to the public X.509 certificate. Used to sign requests to the IdP
;certificate_path =
# Base64-encoded private key. Used to decrypt assertions from the IdP
;private_key =
;# Path to the private key. Used to decrypt assertions from the IdP
;private_key_path =
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
;idp_metadata =
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
;idp_metadata_path =
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
;idp_metadata_url =
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds.
;max_issue_delay = 90s
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours.
;metadata_valid_duration = 48h
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
;assertion_attribute_name = displayName
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
;assertion_attribute_login = mail
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
;assertion_attribute_email = mail
#################################### Grafana.com Auth ####################
[auth.grafana_com]
;enabled = false

View File

@@ -28,38 +28,6 @@ search_filter = "(cn=%s)"
# An array of base dns to search through
search_base_dns = ["dc=grafana,dc=org"]
# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
# This is done by enabling group_search_filter below. You must also set member_of= "cn"
# in [servers.attributes] below.
# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
# below in such a way that the user's recursive group membership is considered.
#
# Nested Groups + Active Directory (AD) Example:
#
# AD groups store the Distinguished Names (DNs) of members, so your filter must
# recursively search your groups for the authenticating user's DN. For example:
#
# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
# group_search_filter_user_attribute = "distinguishedName"
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
#
# [servers.attributes]
# ...
# member_of = "distinguishedName"
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
## Defaults to the value of username in [server.attributes]
## Valid options are any of your values in [servers.attributes]
## If you are using nested groups you probably want to set this and member_of in
## [servers.attributes] to "distinguishedName"
# group_search_filter_user_attribute = "distinguishedName"
## An array of the base DNs to search through for groups. Typically uses ou=groups
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
# Specify names of the ldap attributes your ldap uses
[servers.attributes]
name = "givenName"

View File

@@ -0,0 +1,57 @@
# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
# [log]
# filters = ldap:debug
[[servers]]
# Ldap server host (specify multiple hosts space separated)
host = "127.0.0.1"
# Default port is 389 or 636 if use_ssl = true
port = 389
# Set to true if ldap server supports TLS
use_ssl = false
# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS)
start_tls = false
# set to true if you want to skip ssl cert validation
ssl_skip_verify = false
# set to the path to your root CA certificate or leave unset to use system defaults
# root_ca_cert = "/path/to/certificate.crt"
# Search user bind dn
bind_dn = "cn=admin,dc=grafana,dc=org"
# Search user bind password
# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
bind_password = 'grafana'
# An array of base dns to search through
search_base_dns = ["dc=grafana,dc=org"]
search_filter = "(uid=%s)"
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
group_search_filter_user_attribute = "uid"
group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
[servers.attributes]
name = "givenName"
surname = "sn"
username = "cn"
member_of = "memberOf"
email = "email"
# Map ldap groups to grafana org roles
[[servers.group_mappings]]
group_dn = "cn=posix-admins,ou=groups,dc=grafana,dc=org"
org_role = "Admin"
grafana_admin = true
# The Grafana organization database id, optional, if left out the default org (id 1) will be used
# org_id = 1
[[servers.group_mappings]]
group_dn = "cn=editors,ou=groups,dc=grafana,dc=org"
org_role = "Editor"
[[servers.group_mappings]]
# If you want to match all (or no ldap groups) then you can use wildcard
group_dn = "*"
org_role = "Viewer"

View File

@@ -12,7 +12,7 @@ After adding ldif files to `prepopulate`:
## Enabling LDAP in Grafana
Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
If you want to use users/groups with `memberOf` support Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
```ini
[auth.ldap]
@@ -21,6 +21,8 @@ config_file = conf/ldap_dev.toml
; allow_sign_up = true
```
Otherwise perform same actions for `ldap_dev_posix.toml` config.
## Groups & Users
admins
@@ -38,3 +40,11 @@ editors
ldap-editors
no groups
ldap-viewer
## Groups & Users (POSIX)
admins
ldap-posix-admin
no groups
ldap-posix

View File

@@ -78,3 +78,31 @@ objectClass: inetOrgPerson
objectClass: organizationalPerson
sn: ldap-torkel
cn: ldap-torkel
# admin for posix group (without support for memberOf attribute)
dn: uid=ldap-posix-admin,ou=users,dc=grafana,dc=org
mail: ldap-posix-admin@grafana.com
userPassword: grafana
objectclass: top
objectclass: posixAccount
objectclass: inetOrgPerson
homedirectory: /home/ldap-posix-admin
sn: ldap-posix-admin
cn: ldap-posix-admin
uid: ldap-posix-admin
uidnumber: 1
gidnumber: 1
# user for posix group (without support for memberOf attribute)
dn: uid=ldap-posix,ou=users,dc=grafana,dc=org
mail: ldap-posix@grafana.com
userPassword: grafana
objectclass: top
objectclass: posixAccount
objectclass: inetOrgPerson
homedirectory: /home/ldap-posix
sn: ldap-posix
cn: ldap-posix
uid: ldap-posix
uidnumber: 2
gidnumber: 2

View File

@@ -23,3 +23,21 @@ objectClass: groupOfNames
member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
member: cn=ldap-leo,ou=users,dc=grafana,dc=org
# -- POSIX --
# posix admin group (without support for memberOf attribute)
dn: cn=posix-admins,ou=groups,dc=grafana,dc=org
cn: admins
objectClass: top
objectClass: posixGroup
gidNumber: 1
memberUid: ldap-posix-admin
# posix group (without support for memberOf attribute)
dn: cn=posix,ou=groups,dc=grafana,dc=org
cn: viewers
objectClass: top
objectClass: posixGroup
gidNumber: 2
memberUid: ldap-posix

View File

@@ -126,8 +126,6 @@ group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
group_search_filter_user_attribute = "uid"
```
Also set `member_of = "dn"` in the `[servers.attributes]` section.
### Group Mappings
In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being

178
docs/sources/auth/saml.md Normal file
View File

@@ -0,0 +1,178 @@
+++
title = "SAML Authentication"
description = "Grafana SAML Authentication"
keywords = ["grafana", "saml", "documentation", "saml-auth"]
aliases = ["/auth/saml/"]
type = "docs"
[menu.docs]
name = "SAML"
parent = "authentication"
weight = 5
+++
# SAML Authentication
> SAML Authentication integration is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise" >}}).
> Only available in Grafana v6.3+
The SAML authentication integration allows your Grafana users to log in by
using an external SAML Identity Provider (IdP). To enable this, Grafana becomes
a Service Provider (SP) in the authentication flow, interacting with the IdP to
exchange user information.
## Supported SAML
The SAML single-sign-on (SSO) standard is varied and flexible. Our implementation contains the subset of features needed to provide a smooth authentication experience into Grafana.
> Should you encounter any problems with our implementation, please don't hesitate to contact us.
At the moment of writing, Grafana supports:
1. From the Service Provider (SP) to the Identity Provider (IdP)
- `HTTP-POST` binding
- `HTTP-Redirect` binding
2. From the Identity Provider (IdP) to the Service Provider (SP)
- `HTTP-POST` binding
3. In terms of security, we currently support signed and encrypted Assertions. However, signed or encrypted requests are not supported.
4. In terms of initiation, only SP-initiated requests are supported. There's no support for IdP-initiated request.
## Set up SAML Authentication
To use the SAML integration, you need to enable SAML in the [main config file]({{< relref "installation/configuration.md" >}}).
```bash
[auth.saml]
# Defaults to false. If true, the feature is enabled
enabled = true
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
certificate =
# Path to the public X.509 certificate. Used to sign requests to the IdP
certificate_path =
# Base64-encoded private key. Used to decrypt assertions from the IdP
private_key =
# Path to the private key. Used to decrypt assertions from the IdP
private_key_path =
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
idp_metadata =
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
idp_metadata_path =
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
idp_metadata_url =
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
max_issue_delay =
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
metadata_valid_duration =
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
assertion_attribute_name = displayName
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
assertion_attribute_login = mail
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
assertion_attribute_email = mail
```
Important to note:
- like any other Grafana configuration, use of [environment variables for these options is supported]({{< relref "installation/configuration.md#using-environment-variables" >}})
- only one form of configuration option is required. Using multiple forms, e.g. both `certificate` and `certificate_path` will result in an error
## Grafana Configuration
An example working configuration example looks like:
```bash
[auth.saml]
enabled = true
certificate_path = "/path/to/certificate.cert"
private_key_path = "/path/to/private_key.pem"
metadata_path = "/my/metadata.xml"
max_issue_delay = 90s
metadata_valid_duration = 48h
assertion_attribute_name = displayName
assertion_attribute_login = mail
assertion_attribute_email = mail
```
And here is a comprehensive list of the options:
| Setting | Required | Description | Default |
| ----------------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------- | ------------- |
| `enabled` | No | Whenever SAML authentication is allowed | `false` |
| `certificate` or `certificate_path` | Yes | Base64-encoded string or Path for the SP X.509 certificate | |
| `private_key` or `private_key_path` | Yes | Base64-encoded string or Path for the SP private key | |
| `idp_metadata` or `idp_metadata_path` or `idp_metadata_url` | Yes | Base64-encoded string, Path or URL for the IdP SAML metadata XML | |
| `max_issue_delay` | No | Duration, since the IdP issued a response and the SP is allowed to process it | `90s` |
| `metadata_valid_duration` | No | Duration, for how long the SP's metadata should be valid | `48h` |
| `assertion_attribute_name` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's name | `displayName` |
| `assertion_attribute_login` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's login handle | `mail` |
| `assertion_attribute_email` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's email | `mail` |
### Cert and Private Key
The SAML SSO standard uses asymmetric encryption to exchange information between the SP (Grafana) and the IdP. To perform such encryption, you need a public part and a private part. In this case, the X.509 certificate provides the public part, while the private key provides the private part.
Grafana supports two ways of specifying both the `certificate` and `private_key`. Without a suffix (e.g. `certificate=`), the configuration assumes you've supplied the base64-encoded file contents. However, if specified with the `_path` suffix (e.g. `certificate_path=`) Grafana will treat it as a file path and attempt to read the file from the file system.
### IdP Metadata
Expanding on the above, we'll also need the public part from our IdP for message verification. The SAML IdP metadata XML tells us where and how we should exchange the user information.
Currently, we support three ways of specifying the IdP metadata. Without a suffix `idp_metadata=` Grafana assumes base64-encoded XML file contents, with the `_path` suffix assumes a file path and attempts to read the file from the file system and with the `_url` suffix assumes an URL and attempts to load the metadata from the given location.
### Max Issue Delay
Prevention of SAML response replay attacks and internal clock skews between the SP (Grafana), and the IdP is covered. You can set a maximum amount of time between the IdP issuing a response and the SP (Grafana) processing it.
The configuration options is specified as a duration e.g. `max_issue_delay = 90s` or `max_issue_delay = 1h`
### Metadata valid duration
As an SP, our metadata is likely to expire at some point, e.g. due to a certificate rotation or change of location binding. Grafana allows you to specify for how long the metadata should be valid. Leveraging the standard's `validUntil` field, you can tell consumers until when your metadata is going to be valid. The duration is computed by adding the duration to the current time.
The configuration option is specified as a duration e.g. `metadata_valid_duration = 48h`
## Identity Provider (IdP) registration
For the SAML integration to work correctly, you need to make the IdP aware of the SP.
The integration provides two key endpoints as part of Grafana:
- The `/saml/metadata` endpoint. Which contains the SP's metadata. You can either download and upload it manually or make the IdP request it directly from the endpoint. Some providers name it Identifier or Entity ID.
- The `/saml/acs` endpoint. Which is intended to receive the ACS (Assertion Customer Service) callback. Some providers name it SSO URL or Reply URL.
## Assertion mapping
During the SAML SSO authentication flow, we receive the ACS (Assertion Customer Service) callback. The callback contains all the relevant information of the user under authentication embedded in the SAML response. Grafana parses the response to create (or update) the user within its internal database.
For Grafana to map the user information, it looks at the individual attributes within the assertion. You can think of these attributes as Key/Value pairs (although, they contain more information than that).
Grafana provides configuration options that let you modify which keys to look at for these values. The data we need to create the user in Grafana is Name, Login handle, and email.
An example is `assertion_attribute_name = "givenName"` where Grafana looks within the assertion for an attribute with a friendly name or name of `givenName`. Both, the friendly name (e.g. `givenName`) or the name (e.g. `urn:oid:2.5.4.42`) can be used interchangeably as the value for the configuration option.
## Troubleshooting
To troubleshoot and get more log info enable saml debug logging in the [main config file]({{< relref "installation/configuration.md" >}}).
```bash
[log]
filters = saml.auth:debug
```

View File

@@ -29,6 +29,10 @@ With Grafana Enterprise you can set up synchronization between LDAP Groups and T
Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
### SAML Authentication
Enables your Grafana Enterprise users to authenticate with SAML. [Learn More]({{< relref "auth/saml.md" >}}).
### Premium Plugins
With a Grafana Enterprise license you will get access to premium plugins, including:

View File

@@ -130,6 +130,8 @@ belonging to an LDAP group that gives them access to Grafana.
Built-in support for SAML is now available in Grafana Enterprise.
[See docs]({{< relref "auth/saml.md" >}})
### Team Sync for GitHub OAuth
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.

View File

@@ -1,5 +1,6 @@
[
{ "version": "v6.2", "path": "/", "archived": false, "current": true },
{ "version": "v6.3", "path": "/", "archived": false, "current": true },
{ "version": "v6.2", "path": "/v6.2", "archived": true },
{ "version": "v6.1", "path": "/v6.1", "archived": true },
{ "version": "v6.0", "path": "/v6.0", "archived": true },
{ "version": "v5.4", "path": "/v5.4", "archived": true },

View File

@@ -5,7 +5,7 @@
"company": "Grafana Labs"
},
"name": "grafana",
"version": "6.3.0-pre",
"version": "6.3.0-beta3",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"

View File

@@ -1,3 +1,6 @@
import { Threshold } from './threshold';
import { ValueMapping } from './valueMapping';
export enum LoadingState {
NotStarted = 'NotStarted',
Loading = 'Loading',
@@ -49,6 +52,12 @@ export interface Field {
decimals?: number | null; // Significant digits (for display)
min?: number | null;
max?: number | null;
// Convert input values into a display value
mappings?: ValueMapping[];
// Must be sorted by 'value', first value is always -Infinity
thresholds?: Threshold[];
}
export interface Labels {

View File

@@ -2,6 +2,7 @@ export * from './data';
export * from './dataLink';
export * from './logs';
export * from './navModel';
export * from './select';
export * from './time';
export * from './threshold';
export * from './utils';

View File

@@ -0,0 +1,10 @@
/**
* Used in select elements
*/
export interface SelectableValue<T = any> {
label?: string;
value?: T;
imgUrl?: string;
description?: string;
[key: string]: any;
}

View File

@@ -1,5 +1,4 @@
export interface Threshold {
index: number;
value: number;
color: string;
}

View File

@@ -5,6 +5,18 @@ import { TimeZone } from '../types';
const units: DurationUnit[] = ['y', 'M', 'w', 'd', 'h', 'm', 's'];
export function isMathString(text: string | DateTime | Date): boolean {
if (!text) {
return false;
}
if (typeof text === 'string' && (text.substring(0, 3) === 'now' || text.includes('||'))) {
return true;
} else {
return false;
}
}
/**
* Parses different types input to a moment instance. There is a specific formatting language that can be used
* if text arg is string. See unit tests for examples.

View File

@@ -1,6 +1,14 @@
import { getFieldReducers, ReducerID, reduceField } from './index';
import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
import _ from 'lodash';
import { DataFrame } from '../types/data';
/**
* Run a reducer and get back the value
*/
function reduce(series: DataFrame, fieldIndex: number, id: string): any {
return reduceField({ series, fieldIndex, reducers: [id] })[id];
}
describe('Stats Calculators', () => {
const basicTable = {
@@ -9,29 +17,16 @@ describe('Stats Calculators', () => {
};
it('should load all standard stats', () => {
const names = [
ReducerID.sum,
ReducerID.max,
ReducerID.min,
ReducerID.logmin,
ReducerID.mean,
ReducerID.last,
ReducerID.first,
ReducerID.count,
ReducerID.range,
ReducerID.diff,
ReducerID.step,
ReducerID.delta,
// ReducerID.allIsZero,
// ReducerID.allIsNull,
];
const stats = getFieldReducers(names);
expect(stats.length).toBe(names.length);
for (const id of Object.keys(ReducerID)) {
const reducer = fieldReducers.getIfExists(id);
const found = reducer ? reducer.id : '<NOT FOUND>';
expect(found).toEqual(id);
}
});
it('should fail to load unknown stats', () => {
const names = ['not a stat', ReducerID.max, ReducerID.min, 'also not a stat'];
const stats = getFieldReducers(names);
const stats = fieldReducers.list(names);
expect(stats.length).toBe(2);
const found = stats.map(v => v.id);
@@ -92,6 +87,34 @@ describe('Stats Calculators', () => {
expect(stats.delta).toEqual(300);
});
it('consistenly check allIsNull/allIsZero', () => {
const empty = {
fields: [{ name: 'A' }],
rows: [],
};
const allNull = ({
fields: [{ name: 'A' }],
rows: [null, null, null, null],
} as unknown) as DataFrame;
const allNull2 = {
fields: [{ name: 'A' }],
rows: [[null], [null], [null], [null]],
};
const allZero = {
fields: [{ name: 'A' }],
rows: [[0], [0], [0], [0]],
};
expect(reduce(empty, 0, ReducerID.allIsNull)).toEqual(true);
expect(reduce(allNull, 0, ReducerID.allIsNull)).toEqual(true);
expect(reduce(allNull2, 0, ReducerID.allIsNull)).toEqual(true);
expect(reduce(empty, 0, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allNull, 0, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allNull2, 0, ReducerID.allIsZero)).toEqual(false);
expect(reduce(allZero, 0, ReducerID.allIsZero)).toEqual(true);
});
it('consistent results for first/last value with null', () => {
const info = [
{

View File

@@ -1,7 +1,8 @@
// Libraries
import isNumber from 'lodash/isNumber';
import { DataFrame, NullValueMode } from '../types/index';
import { DataFrame, NullValueMode } from '../types';
import { Registry, RegistryItem } from './registry';
export enum ReducerID {
sum = 'sum',
@@ -34,38 +35,13 @@ export interface FieldCalcs {
// Internal function
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
export interface FieldReducerInfo {
id: string;
name: string;
description: string;
alias?: string; // optional secondary key. 'avg' vs 'mean', 'total' vs 'sum'
export interface FieldReducerInfo extends RegistryItem {
// Internal details
emptyInputResult?: any; // typically null, but some things like 'count' & 'sum' should be zero
standard: boolean; // The most common stats can all be calculated in a single pass
reduce?: FieldReducer;
}
/**
* @param ids list of stat names or null to get all of them
*/
export function getFieldReducers(ids?: string[]): FieldReducerInfo[] {
if (ids === null || ids === undefined) {
if (!hasBuiltIndex) {
getById(ReducerID.mean);
}
return listOfStats;
}
return ids.reduce((list, id) => {
const stat = getById(id);
if (stat) {
list.push(stat);
}
return list;
}, new Array<FieldReducerInfo>());
}
interface ReduceFieldOptions {
series: DataFrame;
fieldIndex: number;
@@ -83,7 +59,7 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
return {};
}
const queue = getFieldReducers(reducers);
const queue = fieldReducers.list(reducers);
// Return early for empty series
// This lets the concrete implementations assume at least one row
@@ -122,122 +98,107 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
//
// ------------------------------------------------------------------------------
// private registry of all stats
interface TableStatIndex {
[id: string]: FieldReducerInfo;
}
const listOfStats: FieldReducerInfo[] = [];
const index: TableStatIndex = {};
let hasBuiltIndex = false;
function getById(id: string): FieldReducerInfo | undefined {
if (!hasBuiltIndex) {
[
{
id: ReducerID.lastNotNull,
name: 'Last (not null)',
description: 'Last non-null value',
standard: true,
alias: 'current',
reduce: calculateLastNotNull,
},
{
id: ReducerID.last,
name: 'Last',
description: 'Last Value',
standard: true,
reduce: calculateLast,
},
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
{
id: ReducerID.firstNotNull,
name: 'First (not null)',
description: 'First non-null value',
standard: true,
reduce: calculateFirstNotNull,
},
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
{
id: ReducerID.sum,
name: 'Total',
description: 'The sum of all values',
emptyInputResult: 0,
standard: true,
alias: 'total',
},
{
id: ReducerID.count,
name: 'Count',
description: 'Number of values in response',
emptyInputResult: 0,
standard: true,
},
{
id: ReducerID.range,
name: 'Range',
description: 'Difference between minimum and maximum values',
standard: true,
},
{
id: ReducerID.delta,
name: 'Delta',
description: 'Cumulative change in value',
standard: true,
},
{
id: ReducerID.step,
name: 'Step',
description: 'Minimum interval between values',
standard: true,
},
{
id: ReducerID.diff,
name: 'Difference',
description: 'Difference between first and last values',
standard: true,
},
{
id: ReducerID.logmin,
name: 'Min (above zero)',
description: 'Used for log min scale',
standard: true,
},
{
id: ReducerID.changeCount,
name: 'Change Count',
description: 'Number of times the value changes',
standard: false,
reduce: calculateChangeCount,
},
{
id: ReducerID.distinctCount,
name: 'Distinct Count',
description: 'Number of distinct values',
standard: false,
reduce: calculateDistinctCount,
},
].forEach(info => {
const { id, alias } = info;
if (index.hasOwnProperty(id)) {
console.warn('Duplicate Stat', id, info, index);
}
index[id] = info;
if (alias) {
if (index.hasOwnProperty(alias)) {
console.warn('Duplicate Stat (alias)', alias, info, index);
}
index[alias] = info;
}
listOfStats.push(info);
});
hasBuiltIndex = true;
}
return index[id];
}
export const fieldReducers = new Registry<FieldReducerInfo>(() => [
{
id: ReducerID.lastNotNull,
name: 'Last (not null)',
description: 'Last non-null value',
standard: true,
alias: 'current',
reduce: calculateLastNotNull,
},
{
id: ReducerID.last,
name: 'Last',
description: 'Last Value',
standard: true,
reduce: calculateLast,
},
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
{
id: ReducerID.firstNotNull,
name: 'First (not null)',
description: 'First non-null value',
standard: true,
reduce: calculateFirstNotNull,
},
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
{
id: ReducerID.sum,
name: 'Total',
description: 'The sum of all values',
emptyInputResult: 0,
standard: true,
alias: 'total',
},
{
id: ReducerID.count,
name: 'Count',
description: 'Number of values in response',
emptyInputResult: 0,
standard: true,
},
{
id: ReducerID.range,
name: 'Range',
description: 'Difference between minimum and maximum values',
standard: true,
},
{
id: ReducerID.delta,
name: 'Delta',
description: 'Cumulative change in value',
standard: true,
},
{
id: ReducerID.step,
name: 'Step',
description: 'Minimum interval between values',
standard: true,
},
{
id: ReducerID.diff,
name: 'Difference',
description: 'Difference between first and last values',
standard: true,
},
{
id: ReducerID.logmin,
name: 'Min (above zero)',
description: 'Used for log min scale',
standard: true,
},
{
id: ReducerID.allIsZero,
name: 'All Zeros',
description: 'All values are zero',
emptyInputResult: false,
standard: true,
},
{
id: ReducerID.allIsNull,
name: 'All Nulls',
description: 'All values are null',
emptyInputResult: true,
standard: true,
},
{
id: ReducerID.changeCount,
name: 'Change Count',
description: 'Number of times the value changes',
standard: false,
reduce: calculateChangeCount,
},
{
id: ReducerID.distinctCount,
name: 'Distinct Count',
description: 'Number of distinct values',
standard: false,
reduce: calculateDistinctCount,
},
]);
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const calcs = {
@@ -253,7 +214,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
count: 0,
nonNullCount: 0,
allIsNull: true,
allIsZero: false,
allIsZero: true,
range: null,
diff: null,
delta: 0,
@@ -264,7 +225,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
} as FieldCalcs;
for (let i = 0; i < data.rows.length; i++) {
let currentValue = data.rows[i][fieldIndex];
let currentValue = data.rows[i] ? data.rows[i][fieldIndex] : null;
if (i === 0) {
calcs.first = currentValue;
}
@@ -350,6 +311,10 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
calcs.mean = calcs.sum! / calcs.nonNullCount;
}
if (calcs.allIsNull) {
calcs.allIsZero = false;
}
if (calcs.max !== null && calcs.min !== null) {
calcs.range = calcs.max - calcs.min;
}

View File

@@ -1,4 +1,5 @@
export * from './string';
export * from './registry';
export * from './markdown';
export * from './processDataFrame';
export * from './csv';

View File

@@ -0,0 +1,134 @@
import { SelectableValue } from '../types/select';
export interface RegistryItem {
id: string; // Unique Key -- saved in configs
name: string; // Display Name, can change without breaking configs
description: string;
aliasIds?: string[]; // when the ID changes, we may want backwards compatibility ('current' => 'last')
/**
* Some extensions should not be user selectable
* like: 'all' and 'any' matchers;
*/
excludeFromPicker?: boolean;
}
interface RegistrySelectInfo {
options: Array<SelectableValue<string>>;
current: Array<SelectableValue<string>>;
}
export class Registry<T extends RegistryItem> {
private ordered: T[] = [];
private byId = new Map<string, T>();
private initalized = false;
constructor(private init?: () => T[]) {}
getIfExists(id: string | undefined): T | undefined {
if (!this.initalized) {
if (this.init) {
for (const ext of this.init()) {
this.register(ext);
}
}
this.sort();
this.initalized = true;
}
if (id) {
return this.byId.get(id);
}
return undefined;
}
get(id: string): T {
const v = this.getIfExists(id);
if (!v) {
throw new Error('Undefined: ' + id);
}
return v;
}
selectOptions(current?: string[], filter?: (ext: T) => boolean): RegistrySelectInfo {
if (!this.initalized) {
this.getIfExists('xxx'); // will trigger init
}
const select = {
options: [],
current: [],
} as RegistrySelectInfo;
const currentIds: any = {};
if (current) {
for (const id of current) {
currentIds[id] = true;
}
}
for (const ext of this.ordered) {
if (ext.excludeFromPicker) {
continue;
}
if (filter && !filter(ext)) {
continue;
}
const option = {
value: ext.id,
label: ext.name,
description: ext.description,
};
select.options.push(option);
if (currentIds[ext.id]) {
select.current.push(option);
}
}
return select;
}
/**
* Return a list of values by ID, or all values if not specified
*/
list(ids?: any[]): T[] {
if (ids) {
const found: T[] = [];
for (const id of ids) {
const v = this.getIfExists(id);
if (v) {
found.push(v);
}
}
return found;
}
if (!this.initalized) {
this.getIfExists('xxx'); // will trigger init
}
return [...this.ordered]; // copy of everythign just in case
}
register(ext: T) {
if (this.byId.has(ext.id)) {
throw new Error('Duplicate Key:' + ext.id);
}
this.byId.set(ext.id, ext);
this.ordered.push(ext);
if (ext.aliasIds) {
for (const alias of ext.aliasIds) {
if (!this.byId.has(alias)) {
this.byId.set(alias, ext);
}
}
}
if (this.initalized) {
this.sort();
}
}
private sort() {
// TODO sort the list
}
}

View File

@@ -1,23 +1,22 @@
import { Threshold } from '../types';
export function getThresholdForValue(
thresholds: Threshold[],
value: number | null | string | undefined
): Threshold | null {
if (thresholds.length === 1) {
return thresholds[0];
export function getActiveThreshold(value: number, thresholds: Threshold[]): Threshold {
let active = thresholds[0];
for (const threshold of thresholds) {
if (value >= threshold.value) {
active = threshold;
} else {
break;
}
}
const atThreshold = thresholds.filter(threshold => (value as number) === threshold.value)[0];
if (atThreshold) {
return atThreshold;
}
const belowThreshold = thresholds.filter(threshold => (value as number) > threshold.value);
if (belowThreshold.length > 0) {
const nearestThreshold = belowThreshold.sort((t1: Threshold, t2: Threshold) => t2.value - t1.value)[0];
return nearestThreshold;
}
return null;
return active;
}
/**
* Sorts the thresholds
*/
export function sortThresholds(thresholds: Threshold[]) {
return thresholds.sort((t1, t2) => {
return t1.value - t2.value;
});
}

View File

@@ -49,9 +49,9 @@ function addBarGaugeStory(name: string, overrides: Partial<Props>) {
orientation: VizOrientation.Vertical,
displayMode: 'basic',
thresholds: [
{ index: 0, value: -Infinity, color: 'green' },
{ index: 1, value: threshold1Value, color: threshold1Color },
{ index: 1, value: threshold2Value, color: threshold2Color },
{ value: -Infinity, color: 'green' },
{ value: threshold1Value, color: threshold1Color },
{ value: threshold2Value, color: threshold2Color },
],
};

View File

@@ -25,11 +25,7 @@ function getProps(propOverrides?: Partial<Props>): Props {
maxValue: 100,
minValue: 0,
displayMode: 'basic',
thresholds: [
{ index: 0, value: -Infinity, color: 'green' },
{ index: 1, value: 70, color: 'orange' },
{ index: 2, value: 90, color: 'red' },
],
thresholds: [{ value: -Infinity, color: 'green' }, { value: 70, color: 'orange' }, { value: 90, color: 'red' }],
height: 300,
width: 300,
value: {

View File

@@ -7,7 +7,7 @@ import { getColorFromHexRgbOrName } from '../../utils';
// Types
import { DisplayValue, Themeable, VizOrientation } from '../../types';
import { Threshold, TimeSeriesValue, getThresholdForValue } from '@grafana/data';
import { Threshold, TimeSeriesValue, getActiveThreshold } from '@grafana/data';
const MIN_VALUE_HEIGHT = 18;
const MAX_VALUE_HEIGHT = 50;
@@ -87,8 +87,14 @@ export class BarGauge extends PureComponent<Props> {
getCellColor(positionValue: TimeSeriesValue): CellColors {
const { thresholds, theme, value } = this.props;
const activeThreshold = getThresholdForValue(thresholds, positionValue);
if (positionValue === null) {
return {
background: 'gray',
border: 'gray',
};
}
const activeThreshold = getActiveThreshold(positionValue, thresholds);
if (activeThreshold !== null) {
const color = getColorFromHexRgbOrName(activeThreshold.color, theme.type);
@@ -474,7 +480,7 @@ export function getBarGradient(props: Props, maxSize: number): string {
export function getValueColor(props: Props): string {
const { thresholds, theme, value } = props;
const activeThreshold = getThresholdForValue(thresholds, value.numeric);
const activeThreshold = getActiveThreshold(value.numeric, thresholds);
if (activeThreshold !== null) {
return getColorFromHexRgbOrName(activeThreshold.color, theme.type);

View File

@@ -14,7 +14,7 @@ const setup = (propOverrides?: object) => {
minValue: 0,
showThresholdMarkers: true,
showThresholdLabels: false,
thresholds: [{ index: 0, value: -Infinity, color: '#7EB26D' }],
thresholds: [{ value: -Infinity, color: '#7EB26D' }],
height: 300,
width: 300,
value: {
@@ -48,9 +48,9 @@ describe('Get thresholds formatted', () => {
it('should get the correct formatted values when thresholds are added', () => {
const { instance } = setup({
thresholds: [
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 50, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
{ value: -Infinity, color: '#7EB26D' },
{ value: 50, color: '#EAB839' },
{ value: 75, color: '#6ED0E0' },
],
});

View File

@@ -43,12 +43,12 @@ export class Gauge extends PureComponent<Props> {
const lastThreshold = thresholds[thresholds.length - 1];
return [
...thresholds.map(threshold => {
if (threshold.index === 0) {
...thresholds.map((threshold, index) => {
if (index === 0) {
return { value: minValue, color: getColorFromHexRgbOrName(threshold.color, theme.type) };
}
const previousThreshold = thresholds[threshold.index - 1];
const previousThreshold = thresholds[index - 1];
return { value: threshold.value, color: getColorFromHexRgbOrName(previousThreshold.color, theme.type) };
}),
{ value: maxValue, color: getColorFromHexRgbOrName(lastThreshold.color, theme.type) },

View File

@@ -1,6 +1,6 @@
import React, { PureComponent } from 'react';
import classNames from 'classnames';
import { SelectOptionItem } from '../Select/Select';
import { SelectableValue } from '@grafana/data';
import { Tooltip } from '../Tooltip/Tooltip';
import { ButtonSelect } from '../Select/ButtonSelect';
@@ -23,7 +23,7 @@ export class RefreshPicker extends PureComponent<Props> {
super(props);
}
intervalsToOptions = (intervals: string[] | undefined): Array<SelectOptionItem<string>> => {
intervalsToOptions = (intervals: string[] | undefined): Array<SelectableValue<string>> => {
const intervalsOrDefault = intervals || defaultIntervals;
const options = intervalsOrDefault
.filter(str => str !== '')
@@ -37,7 +37,7 @@ export class RefreshPicker extends PureComponent<Props> {
return options;
};
onChangeSelect = (item: SelectOptionItem<string>) => {
onChangeSelect = (item: SelectableValue<string>) => {
const { onIntervalChanged } = this.props;
if (onIntervalChanged) {
// @ts-ignore

View File

@@ -4,7 +4,7 @@ import { action } from '@storybook/addon-actions';
import { withKnobs, object, text } from '@storybook/addon-knobs';
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
import { UseState } from '../../utils/storybook/UseState';
import { SelectOptionItem } from './Select';
import { SelectableValue } from '@grafana/data';
import { ButtonSelect } from './ButtonSelect';
const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
@@ -12,9 +12,9 @@ const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
ButtonSelectStories.addDecorator(withCenteredStory).addDecorator(withKnobs);
ButtonSelectStories.add('default', () => {
const intialState: SelectOptionItem<string> = { label: 'A label', value: 'A value' };
const value = object<SelectOptionItem<string>>('Selected Value:', intialState);
const options = object<Array<SelectOptionItem<string>>>('Options:', [
const intialState: SelectableValue<string> = { label: 'A label', value: 'A value' };
const value = object<SelectableValue<string>>('Selected Value:', intialState);
const options = object<Array<SelectableValue<string>>>('Options:', [
intialState,
{ label: 'Another label', value: 'Another value' },
]);

View File

@@ -1,6 +1,7 @@
import React, { PureComponent, ReactElement } from 'react';
import Select, { SelectOptionItem } from './Select';
import Select from './Select';
import { PopperContent } from '../Tooltip/PopperController';
import { SelectableValue } from '@grafana/data';
interface ButtonComponentProps {
label: ReactElement | string | undefined;
@@ -30,13 +31,13 @@ const ButtonComponent = (buttonProps: ButtonComponentProps) => (props: any) => {
export interface Props<T> {
className: string | undefined;
options: Array<SelectOptionItem<T>>;
value?: SelectOptionItem<T>;
options: Array<SelectableValue<T>>;
value?: SelectableValue<T>;
label?: ReactElement | string;
iconClass?: string;
components?: any;
maxMenuHeight?: number;
onChange: (item: SelectOptionItem<T>) => void;
onChange: (item: SelectableValue<T>) => void;
tooltipContent?: PopperContent<any>;
isMenuOpen?: boolean;
onOpenMenu?: () => void;
@@ -45,7 +46,7 @@ export interface Props<T> {
}
export class ButtonSelect<T> extends PureComponent<Props<T>> {
onChange = (item: SelectOptionItem<T>) => {
onChange = (item: SelectableValue<T>) => {
const { onChange } = this.props;
onChange(item);
};

View File

@@ -19,23 +19,16 @@ import resetSelectStyles from './resetSelectStyles';
import { CustomScrollbar } from '../CustomScrollbar/CustomScrollbar';
import { PopperContent } from '../Tooltip/PopperController';
import { Tooltip } from '../Tooltip/Tooltip';
export interface SelectOptionItem<T> {
label?: string;
value?: T;
imgUrl?: string;
description?: string;
[key: string]: any;
}
import { SelectableValue } from '@grafana/data';
export interface CommonProps<T> {
defaultValue?: any;
getOptionLabel?: (item: SelectOptionItem<T>) => string;
getOptionValue?: (item: SelectOptionItem<T>) => string;
onChange: (item: SelectOptionItem<T>) => {} | void;
getOptionLabel?: (item: SelectableValue<T>) => string;
getOptionValue?: (item: SelectableValue<T>) => string;
onChange: (item: SelectableValue<T>) => {} | void;
placeholder?: string;
width?: number;
value?: SelectOptionItem<T>;
value?: SelectableValue<T>;
className?: string;
isDisabled?: boolean;
isSearchable?: boolean;
@@ -57,12 +50,12 @@ export interface CommonProps<T> {
}
export interface SelectProps<T> extends CommonProps<T> {
options: Array<SelectOptionItem<T>>;
options: Array<SelectableValue<T>>;
}
interface AsyncProps<T> extends CommonProps<T> {
defaultOptions: boolean;
loadOptions: (query: string) => Promise<Array<SelectOptionItem<T>>>;
loadOptions: (query: string) => Promise<Array<SelectableValue<T>>>;
loadingMessage?: () => string;
}

View File

@@ -3,11 +3,10 @@ import { interval, Subscription, Subject, of, NEVER } from 'rxjs';
import { tap, switchMap } from 'rxjs/operators';
import _ from 'lodash';
import { stringToMs } from '@grafana/data';
import { stringToMs, SelectableValue } from '@grafana/data';
import { isLive } from '../RefreshPicker/RefreshPicker';
import { SelectOptionItem } from '../Select/Select';
export function getIntervalFromString(strInterval: string): SelectOptionItem<number> {
export function getIntervalFromString(strInterval: string): SelectableValue<number> {
return {
label: strInterval,
value: stringToMs(strInterval),

View File

@@ -8,10 +8,10 @@ import { StatsPicker } from '../StatsPicker/StatsPicker';
// Types
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
import Select, { SelectOptionItem } from '../Select/Select';
import { Field, ReducerID, toNumberString, toIntegerOrUndefined } from '@grafana/data';
import Select from '../Select/Select';
import { Field, ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue } from '@grafana/data';
const showOptions: Array<SelectOptionItem<boolean>> = [
const showOptions: Array<SelectableValue<boolean>> = [
{
value: true,
label: 'All Values',
@@ -31,7 +31,7 @@ export interface Props {
}
export class FieldDisplayEditor extends PureComponent<Props> {
onShowValuesChange = (item: SelectOptionItem<boolean>) => {
onShowValuesChange = (item: SelectableValue<boolean>) => {
const val = item.value === true;
this.props.onChange({ ...this.props.value, values: val });
};

View File

@@ -7,8 +7,7 @@ import { FormLabel } from '../FormLabel/FormLabel';
import { UnitPicker } from '../UnitPicker/UnitPicker';
// Types
import { toIntegerOrUndefined, Field } from '@grafana/data';
import { SelectOptionItem } from '../Select/Select';
import { toIntegerOrUndefined, Field, SelectableValue } from '@grafana/data';
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
@@ -54,7 +53,7 @@ export const FieldPropertiesEditor: React.FC<Props> = ({ value, onChange, showMi
[value.max, onChange]
);
const onUnitChange = (unit: SelectOptionItem<string>) => {
const onUnitChange = (unit: SelectableValue<string>) => {
onChange({ ...value, unit: unit.value });
};

View File

@@ -3,7 +3,7 @@ import omit from 'lodash/omit';
import { VizOrientation, PanelModel } from '../../types/panel';
import { FieldDisplayOptions } from '../../utils/fieldDisplay';
import { Field, getFieldReducers } from '@grafana/data';
import { Field, fieldReducers, Threshold, sortThresholds } from '@grafana/data';
export interface SingleStatBaseOptions {
fieldOptions: FieldDisplayOptions;
@@ -39,17 +39,18 @@ export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseO
const { valueOptions } = old;
const fieldOptions = (old.fieldOptions = {} as FieldDisplayOptions);
fieldOptions.mappings = old.valueMappings;
fieldOptions.thresholds = old.thresholds;
const field = (fieldOptions.defaults = {} as Field);
if (valueOptions) {
field.unit = valueOptions.unit;
field.decimals = valueOptions.decimals;
field.mappings = old.valueMappings;
field.thresholds = migrateOldThresholds(old.thresholds);
field.unit = valueOptions.unit;
field.decimals = valueOptions.decimals;
// Make sure the stats have a valid name
if (valueOptions.stat) {
fieldOptions.calcs = getFieldReducers([valueOptions.stat]).map(s => s.id);
// Make sure the stats have a valid name
if (valueOptions.stat) {
const reducer = fieldReducers.get(valueOptions.stat);
if (reducer) {
fieldOptions.calcs = [reducer.id];
}
}
@@ -58,7 +59,33 @@ export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseO
// remove old props
return omit(old, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
} else if (old.fieldOptions) {
// Move mappins & thresholds to field defautls (6.4+)
const { mappings, thresholds, ...fieldOptions } = old.fieldOptions;
fieldOptions.defaults = {
mappings,
thresholds: migrateOldThresholds(thresholds),
...fieldOptions.defaults,
};
old.fieldOptions = fieldOptions;
return old;
}
return panel.options;
};
export function migrateOldThresholds(thresholds?: any[]): Threshold[] | undefined {
if (!thresholds || !thresholds.length) {
return undefined;
}
const copy = thresholds.map(t => {
return {
// Drops 'index'
value: t.value === null ? -Infinity : t.value,
color: t.color,
};
});
sortThresholds(copy);
copy[0].value = -Infinity;
return copy;
}

View File

@@ -5,8 +5,7 @@ import difference from 'lodash/difference';
import { Select } from '../index';
import { getFieldReducers } from '@grafana/data';
import { SelectOptionItem } from '../Select/Select';
import { fieldReducers, SelectableValue } from '@grafana/data';
interface Props {
placeholder?: string;
@@ -34,7 +33,7 @@ export class StatsPicker extends PureComponent<Props> {
checkInput = () => {
const { stats, allowMultiple, defaultStat, onChange } = this.props;
const current = getFieldReducers(stats);
const current = fieldReducers.list(stats);
if (current.length !== stats.length) {
const found = current.map(v => v.id);
const notFound = difference(stats, found);
@@ -54,7 +53,7 @@ export class StatsPicker extends PureComponent<Props> {
}
};
onSelectionChange = (item: SelectOptionItem<string>) => {
onSelectionChange = (item: SelectableValue<string>) => {
const { onChange } = this.props;
if (isArray(item)) {
onChange(item.map(v => v.value));
@@ -65,24 +64,16 @@ export class StatsPicker extends PureComponent<Props> {
render() {
const { width, stats, allowMultiple, defaultStat, placeholder } = this.props;
const options = getFieldReducers().map(s => {
return {
value: s.id,
label: s.name,
description: s.description,
};
});
const value: Array<SelectOptionItem<string>> = options.filter(option => stats.find(stat => option.value === stat));
const select = fieldReducers.selectOptions(stats);
return (
<Select
width={width}
value={value}
value={select.current}
isClearable={!defaultStat}
isMulti={allowMultiple}
isSearchable={true}
options={options}
options={select.options}
placeholder={placeholder}
onChange={this.onSelectionChange}
/>

View File

@@ -1,6 +1,6 @@
import React, { ChangeEvent } from 'react';
import { mount } from 'enzyme';
import { ThresholdsEditor, Props } from './ThresholdsEditor';
import { ThresholdsEditor, Props, threshodsWithoutKey } from './ThresholdsEditor';
import { colors } from '../../utils';
const setup = (propOverrides?: Partial<Props>) => {
@@ -20,6 +20,10 @@ const setup = (propOverrides?: Partial<Props>) => {
};
};
function getCurrentThresholds(editor: ThresholdsEditor) {
return threshodsWithoutKey(editor.state.thresholds);
}
describe('Render', () => {
it('should render with base threshold', () => {
const { wrapper } = setup();
@@ -32,60 +36,55 @@ describe('Initialization', () => {
it('should add a base threshold if missing', () => {
const { instance } = setup();
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
expect(getCurrentThresholds(instance)).toEqual([{ value: -Infinity, color: colors[0] }]);
});
});
describe('Add threshold', () => {
it('should not add threshold at index 0', () => {
const { instance } = setup();
instance.onAddThreshold(0);
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
});
it('should add threshold', () => {
const { instance } = setup();
instance.onAddThreshold(1);
instance.onAddThresholdAfter(instance.state.thresholds[0]);
expect(instance.state.thresholds).toEqual([
{ index: 0, value: -Infinity, color: colors[0] },
{ index: 1, value: 50, color: colors[2] },
expect(getCurrentThresholds(instance)).toEqual([
{ value: -Infinity, color: colors[0] }, // 0
{ value: 50, color: colors[2] }, // 1
]);
});
it('should add another threshold above a first', () => {
const { instance } = setup({
thresholds: [{ index: 0, value: -Infinity, color: colors[0] }, { index: 1, value: 50, color: colors[2] }],
thresholds: [
{ value: -Infinity, color: colors[0] }, // 0
{ value: 50, color: colors[2] }, // 1
],
});
instance.onAddThreshold(2);
instance.onAddThresholdAfter(instance.state.thresholds[1]);
expect(instance.state.thresholds).toEqual([
{ index: 0, value: -Infinity, color: colors[0] },
{ index: 1, value: 50, color: colors[2] },
{ index: 2, value: 75, color: colors[3] },
expect(getCurrentThresholds(instance)).toEqual([
{ value: -Infinity, color: colors[0] }, // 0
{ value: 50, color: colors[2] }, // 1
{ value: 75, color: colors[3] }, // 2
]);
});
it('should add another threshold between first and second index', () => {
const { instance } = setup({
thresholds: [
{ index: 0, value: -Infinity, color: colors[0] },
{ index: 1, value: 50, color: colors[2] },
{ index: 2, value: 75, color: colors[3] },
{ value: -Infinity, color: colors[0] },
{ value: 50, color: colors[2] },
{ value: 75, color: colors[3] },
],
});
instance.onAddThreshold(2);
instance.onAddThresholdAfter(instance.state.thresholds[1]);
expect(instance.state.thresholds).toEqual([
{ index: 0, value: -Infinity, color: colors[0] },
{ index: 1, value: 50, color: colors[2] },
{ index: 2, value: 62.5, color: colors[4] },
{ index: 3, value: 75, color: colors[3] },
expect(getCurrentThresholds(instance)).toEqual([
{ value: -Infinity, color: colors[0] },
{ value: 50, color: colors[2] },
{ value: 62.5, color: colors[4] },
{ value: 75, color: colors[3] },
]);
});
});
@@ -93,30 +92,30 @@ describe('Add threshold', () => {
describe('Remove threshold', () => {
it('should not remove threshold at index 0', () => {
const thresholds = [
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 50, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
{ value: -Infinity, color: '#7EB26D' },
{ value: 50, color: '#EAB839' },
{ value: 75, color: '#6ED0E0' },
];
const { instance } = setup({ thresholds });
instance.onRemoveThreshold(thresholds[0]);
instance.onRemoveThreshold(instance.state.thresholds[0]);
expect(instance.state.thresholds).toEqual(thresholds);
expect(getCurrentThresholds(instance)).toEqual(thresholds);
});
it('should remove threshold', () => {
const thresholds = [
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 50, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
{ value: -Infinity, color: '#7EB26D' },
{ value: 50, color: '#EAB839' },
{ value: 75, color: '#6ED0E0' },
];
const { instance } = setup({ thresholds });
instance.onRemoveThreshold(thresholds[1]);
instance.onRemoveThreshold(instance.state.thresholds[1]);
expect(instance.state.thresholds).toEqual([
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 75, color: '#6ED0E0' },
expect(getCurrentThresholds(instance)).toEqual([
{ value: -Infinity, color: '#7EB26D' },
{ value: 75, color: '#6ED0E0' },
]);
});
});
@@ -124,25 +123,25 @@ describe('Remove threshold', () => {
describe('change threshold value', () => {
it('should not change threshold at index 0', () => {
const thresholds = [
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 50, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
{ value: -Infinity, color: '#7EB26D' },
{ value: 50, color: '#EAB839' },
{ value: 75, color: '#6ED0E0' },
];
const { instance } = setup({ thresholds });
const mockEvent = ({ target: { value: '12' } } as any) as ChangeEvent<HTMLInputElement>;
instance.onChangeThresholdValue(mockEvent, thresholds[0]);
instance.onChangeThresholdValue(mockEvent, instance.state.thresholds[0]);
expect(instance.state.thresholds).toEqual(thresholds);
expect(getCurrentThresholds(instance)).toEqual(thresholds);
});
it('should update value', () => {
const { instance } = setup();
const thresholds = [
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 50, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
{ value: -Infinity, color: '#7EB26D', key: 1 },
{ value: 50, color: '#EAB839', key: 2 },
{ value: 75, color: '#6ED0E0', key: 3 },
];
instance.state = {
@@ -153,10 +152,10 @@ describe('change threshold value', () => {
instance.onChangeThresholdValue(mockEvent, thresholds[1]);
expect(instance.state.thresholds).toEqual([
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 78, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
expect(getCurrentThresholds(instance)).toEqual([
{ value: -Infinity, color: '#7EB26D' },
{ value: 78, color: '#EAB839' },
{ value: 75, color: '#6ED0E0' },
]);
});
});
@@ -165,9 +164,9 @@ describe('on blur threshold value', () => {
it('should resort rows and update indexes', () => {
const { instance } = setup();
const thresholds = [
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 78, color: '#EAB839' },
{ index: 2, value: 75, color: '#6ED0E0' },
{ value: -Infinity, color: '#7EB26D', key: 1 },
{ value: 78, color: '#EAB839', key: 2 },
{ value: 75, color: '#6ED0E0', key: 3 },
];
instance.setState({
@@ -176,10 +175,10 @@ describe('on blur threshold value', () => {
instance.onBlur();
expect(instance.state.thresholds).toEqual([
{ index: 0, value: -Infinity, color: '#7EB26D' },
{ index: 1, value: 75, color: '#6ED0E0' },
{ index: 2, value: 78, color: '#EAB839' },
expect(getCurrentThresholds(instance)).toEqual([
{ value: -Infinity, color: '#7EB26D' },
{ value: 75, color: '#6ED0E0' },
{ value: 78, color: '#EAB839' },
]);
});
});

View File

@@ -1,5 +1,5 @@
import React, { PureComponent, ChangeEvent } from 'react';
import { Threshold } from '@grafana/data';
import { Threshold, sortThresholds } from '@grafana/data';
import { colors } from '../../utils';
import { ThemeContext } from '../../themes';
import { getColorFromHexRgbOrName } from '../../utils';
@@ -13,115 +13,121 @@ export interface Props {
}
interface State {
thresholds: Threshold[];
thresholds: ThresholdWithKey[];
}
interface ThresholdWithKey extends Threshold {
key: number;
}
let counter = 100;
export class ThresholdsEditor extends PureComponent<Props, State> {
constructor(props: Props) {
super(props);
const addDefaultThreshold = this.props.thresholds.length === 0;
const thresholds: Threshold[] = addDefaultThreshold
? [{ index: 0, value: -Infinity, color: colors[0] }]
: props.thresholds;
const thresholds = props.thresholds
? props.thresholds.map(t => {
return {
color: t.color,
value: t.value === null ? -Infinity : t.value,
key: counter++,
};
})
: ([] as ThresholdWithKey[]);
let needsCallback = false;
if (!thresholds.length) {
thresholds.push({ value: -Infinity, color: colors[0], key: counter++ });
needsCallback = true;
} else {
// First value is always base
thresholds[0].value = -Infinity;
}
// Update the state
this.state = { thresholds };
if (addDefaultThreshold) {
if (needsCallback) {
this.onChange();
}
}
onAddThreshold = (index: number) => {
onAddThresholdAfter = (threshold: ThresholdWithKey) => {
const { thresholds } = this.state;
const maxValue = 100;
const minValue = 0;
if (index === 0) {
return;
let prev: ThresholdWithKey | undefined = undefined;
let next: ThresholdWithKey | undefined = undefined;
for (const t of thresholds) {
if (prev && prev.key === threshold.key) {
next = t;
break;
}
prev = t;
}
const newThresholds = thresholds.map(threshold => {
if (threshold.index >= index) {
const index = threshold.index + 1;
threshold = { ...threshold, index };
}
return threshold;
});
const prevValue = prev && isFinite(prev.value) ? prev.value : minValue;
const nextValue = next && isFinite(next.value) ? next.value : maxValue;
// Setting value to a value between the previous thresholds
const beforeThreshold = newThresholds.filter(t => t.index === index - 1 && t.index !== 0)[0];
const afterThreshold = newThresholds.filter(t => t.index === index + 1 && t.index !== 0)[0];
const beforeThresholdValue = beforeThreshold !== undefined ? beforeThreshold.value : minValue;
const afterThresholdValue = afterThreshold !== undefined ? afterThreshold.value : maxValue;
const value = afterThresholdValue - (afterThresholdValue - beforeThresholdValue) / 2;
// Set a color
const color = colors.filter(c => !newThresholds.some(t => t.color === c))[1];
const color = colors.filter(c => !thresholds.some(t => t.color === c))[1];
const add = {
value: prevValue + (nextValue - prevValue) / 2.0,
color: color,
key: counter++,
};
const newThresholds = [...thresholds, add];
sortThresholds(newThresholds);
this.setState(
{
thresholds: this.sortThresholds([
...newThresholds,
{
color,
index,
value: value as number,
},
]),
thresholds: newThresholds,
},
() => this.onChange()
);
};
onRemoveThreshold = (threshold: Threshold) => {
if (threshold.index === 0) {
return;
}
this.setState(
prevState => {
const newThresholds = prevState.thresholds.map(t => {
if (t.index > threshold.index) {
const index = t.index - 1;
t = { ...t, index };
}
return t;
});
return {
thresholds: newThresholds.filter(t => t !== threshold),
};
},
() => this.onChange()
);
};
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: Threshold) => {
if (threshold.index === 0) {
return;
}
onRemoveThreshold = (threshold: ThresholdWithKey) => {
const { thresholds } = this.state;
if (!thresholds.length) {
return;
}
// Don't remove index 0
if (threshold.key === thresholds[0].key) {
return;
}
this.setState(
{
thresholds: thresholds.filter(t => t.key !== threshold.key),
},
() => this.onChange()
);
};
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: ThresholdWithKey) => {
const cleanValue = event.target.value.replace(/,/g, '.');
const parsedValue = parseFloat(cleanValue);
const value = isNaN(parsedValue) ? '' : parsedValue;
const newThresholds = thresholds.map(t => {
if (t === threshold && t.index !== 0) {
const thresholds = this.state.thresholds.map(t => {
if (t.key === threshold.key) {
t = { ...t, value: value as number };
}
return t;
});
this.setState({ thresholds: newThresholds });
if (thresholds.length) {
thresholds[0].value = -Infinity;
}
this.setState({ thresholds });
};
onChangeThresholdColor = (threshold: Threshold, color: string) => {
onChangeThresholdColor = (threshold: ThresholdWithKey, color: string) => {
const { thresholds } = this.state;
const newThresholds = thresholds.map(t => {
if (t === threshold) {
if (t.key === threshold.key) {
t = { ...t, color: color };
}
@@ -137,30 +143,22 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
};
onBlur = () => {
this.setState(prevState => {
const sortThresholds = this.sortThresholds([...prevState.thresholds]);
let index = 0;
sortThresholds.forEach(t => {
t.index = index++;
});
return { thresholds: sortThresholds };
});
this.onChange();
const thresholds = [...this.state.thresholds];
sortThresholds(thresholds);
this.setState(
{
thresholds,
},
() => this.onChange()
);
};
onChange = () => {
this.props.onChange(this.state.thresholds);
const { thresholds } = this.state;
this.props.onChange(threshodsWithoutKey(thresholds));
};
sortThresholds = (thresholds: Threshold[]) => {
return thresholds.sort((t1, t2) => {
return t1.value - t2.value;
});
};
renderInput = (threshold: Threshold) => {
renderInput = (threshold: ThresholdWithKey) => {
return (
<div className="thresholds-row-input-inner">
<span className="thresholds-row-input-inner-arrow" />
@@ -175,12 +173,11 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
</div>
)}
</div>
{threshold.index === 0 && (
{!isFinite(threshold.value) ? (
<div className="thresholds-row-input-inner-value">
<Input type="text" value="Base" readOnly />
</div>
)}
{threshold.index > 0 && (
) : (
<>
<div className="thresholds-row-input-inner-value">
<Input
@@ -189,7 +186,6 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
onChange={(event: ChangeEvent<HTMLInputElement>) => this.onChangeThresholdValue(event, threshold)}
value={threshold.value}
onBlur={this.onBlur}
readOnly={threshold.index === 0}
/>
</div>
<div className="thresholds-row-input-inner-remove" onClick={() => this.onRemoveThreshold(threshold)}>
@@ -212,13 +208,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
{thresholds
.slice(0)
.reverse()
.map((threshold, index) => {
.map(threshold => {
return (
<div className="thresholds-row" key={`${threshold.index}-${index}`}>
<div
className="thresholds-row-add-button"
onClick={() => this.onAddThreshold(threshold.index + 1)}
>
<div className="thresholds-row" key={`${threshold.key}`}>
<div className="thresholds-row-add-button" onClick={() => this.onAddThresholdAfter(threshold)}>
<i className="fa fa-plus" />
</div>
<div
@@ -237,3 +230,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
);
}
}
export function threshodsWithoutKey(thresholds: ThresholdWithKey[]): Threshold[] {
return thresholds.map(t => {
const { key, ...rest } = t;
return rest; // everything except key
});
}

View File

@@ -9,7 +9,6 @@ exports[`Render should render with base threshold 1`] = `
Array [
Object {
"color": "#7EB26D",
"index": 0,
"value": -Infinity,
},
],
@@ -48,7 +47,7 @@ exports[`Render should render with base threshold 1`] = `
>
<div
className="thresholds-row"
key="0-0"
key="100"
>
<div
className="thresholds-row-add-button"

View File

@@ -8,13 +8,13 @@ import { TimePickerPopover } from './TimePickerPopover';
import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper';
// Utils & Services
import { isDateTime } from '@grafana/data';
import { isDateTime, DateTime } from '@grafana/data';
import { rangeUtil } from '@grafana/data';
import { rawToTimeRange } from './time';
// Types
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT } from '@grafana/data';
import { SelectOptionItem } from '../Select/Select';
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT, SelectableValue } from '@grafana/data';
import { isMathString } from '@grafana/data/src/utils/datemath';
export interface Props {
value: TimeRange;
@@ -77,7 +77,7 @@ export class TimePicker extends PureComponent<Props, State> {
isCustomOpen: false,
};
mapTimeOptionsToSelectOptionItems = (selectOptions: TimeOption[]) => {
mapTimeOptionsToSelectableValues = (selectOptions: TimeOption[]) => {
const options = selectOptions.map(timeOption => {
return {
label: timeOption.display,
@@ -93,7 +93,7 @@ export class TimePicker extends PureComponent<Props, State> {
return options;
};
onSelectChanged = (item: SelectOptionItem<TimeOption>) => {
onSelectChanged = (item: SelectableValue<TimeOption>) => {
const { onChange, timeZone } = this.props;
if (item.value && item.value.from === 'custom') {
@@ -122,15 +122,23 @@ export class TimePicker extends PureComponent<Props, State> {
render() {
const { selectOptions: selectTimeOptions, value, onMoveBackward, onMoveForward, onZoom, timeZone } = this.props;
const { isCustomOpen } = this.state;
const options = this.mapTimeOptionsToSelectOptionItems(selectTimeOptions);
const options = this.mapTimeOptionsToSelectableValues(selectTimeOptions);
const currentOption = options.find(item => isTimeOptionEqualToTimeRange(item.value, value));
const rangeString = rangeUtil.describeTimeRange(value.raw);
const isUTC = timeZone === 'utc';
const adjustedTime = (time: DateTime) => (isUTC ? time.utc() : time.local()) || null;
const adjustedTimeRange = {
to: isMathString(value.raw.to) ? value.raw.to : adjustedTime(value.to),
from: isMathString(value.raw.from) ? value.raw.from : adjustedTime(value.from),
};
const rangeString = rangeUtil.describeTimeRange(adjustedTimeRange);
const label = (
<>
{isCustomOpen && <span>Custom time range</span>}
{!isCustomOpen && <span>{rangeString}</span>}
{timeZone === 'utc' && <span className="time-picker-utc">UTC</span>}
{isUTC && <span className="time-picker-utc">UTC</span>}
</>
);
const isAbsolute = isDateTime(value.raw.to);
@@ -148,6 +156,7 @@ export class TimePicker extends PureComponent<Props, State> {
value={currentOption}
label={label}
options={options}
maxMenuHeight={600}
onChange={this.onSelectChanged}
iconClass={'fa fa-clock-o fa-fw'}
tooltipContent={<TimePickerTooltipContent timeRange={value} />}

View File

@@ -18,7 +18,6 @@
.time-picker-popover {
display: flex;
flex-flow: row nowrap;
justify-content: space-around;
border: 1px solid $popover-border-color;
border-radius: $border-radius;
@@ -31,41 +30,41 @@
max-width: 600px;
top: 41px;
right: 0px;
}
.time-picker-popover-body {
display: flex;
flex-flow: row nowrap;
justify-content: space-around;
padding: $space-md;
padding-bottom: 0;
.time-picker-popover-body {
display: flex;
flex-flow: row nowrap;
justify-content: space-around;
padding: $space-md;
padding-bottom: 0;
}
.time-picker-popover-title {
font-size: $font-size-md;
font-weight: $font-weight-semi-bold;
}
.time-picker-popover-body-custom-ranges:first-child {
margin-right: $space-md;
}
.time-picker-popover-body-custom-ranges-input {
display: flex;
flex-flow: row nowrap;
align-items: center;
margin-bottom: $space-sm;
.time-picker-input-error {
box-shadow: inset 0 0px 5px $red;
}
}
.time-picker-popover-title {
font-size: $font-size-md;
font-weight: $font-weight-semi-bold;
}
.time-picker-popover-body-custom-ranges:first-child {
margin-right: $space-md;
}
.time-picker-popover-body-custom-ranges-input {
display: flex;
flex-flow: row nowrap;
align-items: center;
margin-bottom: $space-sm;
.time-picker-input-error {
box-shadow: inset 0 0px 5px $red;
}
}
.time-picker-popover-footer {
display: flex;
flex-flow: row nowrap;
justify-content: center;
padding: $space-md;
}
.time-picker-popover-footer {
display: flex;
flex-flow: row nowrap;
justify-content: center;
padding: $space-md;
}
.time-picker-popover-header {

View File

@@ -9,7 +9,7 @@ export * from './Button/Button';
export { ButtonVariant } from './Button/AbstractButton';
// Select
export { Select, AsyncSelect, SelectOptionItem } from './Select/Select';
export { Select, AsyncSelect } from './Select/Select';
export { IndicatorsContainer } from './Select/IndicatorsContainer';
export { NoOptionsMessage } from './Select/NoOptionsMessage';
export { default as resetSelectStyles } from './Select/resetSelectStyles';

View File

@@ -77,6 +77,13 @@ interface PluginMetaInfoLink {
url: string;
}
export interface PluginBuildInfo {
time?: number;
repo?: string;
branch?: string;
hash?: string;
}
export interface PluginMetaInfo {
author: {
name: string;
@@ -88,6 +95,7 @@ export interface PluginMetaInfo {
large: string;
small: string;
};
build?: PluginBuildInfo;
screenshots: any[];
updated: string;
version: string;

View File

@@ -103,7 +103,7 @@ describe('Format value', () => {
it('should return if value isNaN', () => {
const valueMappings: ValueMapping[] = [];
const value = 'N/A';
const instance = getDisplayProcessor({ mappings: valueMappings });
const instance = getDisplayProcessor({ field: { mappings: valueMappings } });
const result = instance(value);
@@ -114,7 +114,7 @@ describe('Format value', () => {
const valueMappings: ValueMapping[] = [];
const value = '6';
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
const result = instance(value);
@@ -127,7 +127,7 @@ describe('Format value', () => {
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
];
const value = '10';
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
const result = instance(value);
@@ -160,7 +160,7 @@ describe('Format value', () => {
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '11';
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
expect(instance(value).text).toEqual('1-20');
});

View File

@@ -7,16 +7,13 @@ import { getColorFromHexRgbOrName } from './namedColorsPalette';
// Types
import { DecimalInfo, DisplayValue, GrafanaTheme, GrafanaThemeType, DecimalCount } from '../types';
import { DateTime, dateTime, Threshold, ValueMapping, getMappedValue, Field } from '@grafana/data';
import { DateTime, dateTime, Threshold, getMappedValue, Field } from '@grafana/data';
export type DisplayProcessor = (value: any) => DisplayValue;
export interface DisplayValueOptions {
field?: Partial<Field>;
mappings?: ValueMapping[];
thresholds?: Threshold[];
// Alternative to empty string
noValue?: string;
@@ -31,7 +28,8 @@ export function getDisplayProcessor(options?: DisplayValueOptions): DisplayProce
const formatFunc = getValueFormat(field.unit || 'none');
return (value: any) => {
const { mappings, thresholds, theme } = options;
const { theme } = options;
const { mappings, thresholds } = field;
let color;
let text = _.toString(value);

View File

@@ -1,5 +1,5 @@
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
import { FieldType, ReducerID } from '@grafana/data';
import { FieldType, ReducerID, Threshold } from '@grafana/data';
import { GrafanaThemeType } from '../types/theme';
import { getTheme } from '../themes/index';
@@ -55,8 +55,6 @@ describe('FieldDisplay', () => {
},
fieldOptions: {
calcs: [],
mappings: [],
thresholds: [],
override: {},
defaults: {},
},
@@ -68,8 +66,6 @@ describe('FieldDisplay', () => {
...options,
fieldOptions: {
calcs: [ReducerID.first],
mappings: [],
thresholds: [],
override: {},
defaults: {
title: '$__cell_0 * $__field_name * $__series_name',
@@ -88,8 +84,6 @@ describe('FieldDisplay', () => {
...options,
fieldOptions: {
calcs: [ReducerID.last],
mappings: [],
thresholds: [],
override: {},
defaults: {},
},
@@ -104,8 +98,6 @@ describe('FieldDisplay', () => {
values: true, //
limit: 1000,
calcs: [],
mappings: [],
thresholds: [],
override: {},
defaults: {},
},
@@ -120,12 +112,27 @@ describe('FieldDisplay', () => {
values: true, //
limit: 2,
calcs: [],
mappings: [],
thresholds: [],
override: {},
defaults: {},
},
});
expect(display.map(v => v.display.numeric)).toEqual([1, 3]); // First 2 are from the first field
});
it('should restore -Infinity value for base threshold', () => {
const field = getFieldProperties({
thresholds: [
({
color: '#73BF69',
value: null,
} as unknown) as Threshold,
{
color: '#F2495C',
value: 50,
},
],
});
expect(field.thresholds!.length).toEqual(2);
expect(field.thresholds![0].value).toBe(-Infinity);
});
});

View File

@@ -4,16 +4,7 @@ import toString from 'lodash/toString';
import { DisplayValue, GrafanaTheme, InterpolateFunction, ScopedVars, GraphSeriesValue } from '../types/index';
import { getDisplayProcessor } from './displayValue';
import { getFlotPairs } from './flotPairs';
import {
ValueMapping,
Threshold,
ReducerID,
reduceField,
FieldType,
NullValueMode,
DataFrame,
Field,
} from '@grafana/data';
import { ReducerID, reduceField, FieldType, NullValueMode, DataFrame, Field } from '@grafana/data';
export interface FieldDisplayOptions {
values?: boolean; // If true show each row value
@@ -22,10 +13,6 @@ export interface FieldDisplayOptions {
defaults: Partial<Field>; // Use these values unless otherwise stated
override: Partial<Field>; // Set these values regardless of the source
// Could these be data driven also?
thresholds: Threshold[];
mappings: ValueMapping[];
}
export const VAR_SERIES_NAME = '__series_name';
@@ -127,8 +114,6 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
const display = getDisplayProcessor({
field,
mappings: fieldOptions.mappings,
thresholds: fieldOptions.thresholds,
theme: options.theme,
});
@@ -263,6 +248,11 @@ export function getFieldProperties(...props: PartialField[]): Field {
field = applyFieldProperties(field, props[i]);
}
// First value is always -Infinity
if (field.thresholds && field.thresholds.length) {
field.thresholds[0].value = -Infinity;
}
// Verify that max > min
if (field.hasOwnProperty('min') && field.hasOwnProperty('max') && field.min! > field.max!) {
return {

View File

@@ -1,4 +1,4 @@
ARG BASE_IMAGE=ubuntu:latest
ARG BASE_IMAGE=ubuntu:18.04
FROM ${BASE_IMAGE}
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
@@ -12,7 +12,7 @@ COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
# Change to tar xfzv to make tar print every file it extracts
RUN mkdir /tmp/grafana && tar xfz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
ARG BASE_IMAGE=ubuntu:latest
ARG BASE_IMAGE=ubuntu:18.04
FROM ${BASE_IMAGE}
ARG GF_UID="472"

View File

@@ -59,14 +59,16 @@ docker_tag_all () {
fi
}
docker_build "ubuntu:latest" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
docker_build "ubuntu:18.04" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
if [ $BUILD_FAST = "0" ]; then
docker_build "arm32v7/ubuntu:latest" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
docker_build "arm64v8/ubuntu:latest" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
docker_build "arm32v7/ubuntu:18.04" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
docker_build "arm64v8/ubuntu:18.04" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
fi
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
if echo "$_grafana_tag" | grep -q "^v"; then
docker_tag_all "${_docker_repo}" "latest"
# Create the expected tag for running the end to end tests successfully
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_tag}"
else
docker_tag_all "${_docker_repo}" "master"
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_version}"

View File

@@ -38,8 +38,14 @@ if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"
echo "pushing ${_docker_repo}:latest"
docker_push_all "${_docker_repo}" "latest"
docker_push_all "${_docker_repo}" "${_grafana_version}"
# Push to the grafana-dev repository with the expected tag
# for running the end to end tests successfully
docker push "grafana/grafana-dev:${_grafana_tag}"
elif echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -q "beta"; then
docker_push_all "${_docker_repo}" "${_grafana_version}"
# Push to the grafana-dev repository with the expected tag
# for running the end to end tests successfully
docker push "grafana/grafana-dev:${_grafana_tag}"
elif echo "$_grafana_tag" | grep -q "master"; then
docker_push_all "${_docker_repo}" "master"
docker push "grafana/grafana-dev:${_grafana_version}"

View File

@@ -34,7 +34,7 @@ func AdminCreateUser(c *models.ReqContext, form dtos.AdminCreateUserForm) {
return
}
metrics.M_Api_Admin_User_Create.Inc()
metrics.MApiAdminUserCreate.Inc()
user := cmd.Result

View File

@@ -133,7 +133,7 @@ func (hs *HTTPServer) GetDashboard(c *m.ReqContext) Response {
Meta: meta,
}
c.TimeRequest(metrics.M_Api_Dashboard_Get)
c.TimeRequest(metrics.MApiDashboardGet)
return JSON(200, dto)
}
@@ -278,12 +278,11 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
inFolder := cmd.FolderId > 0
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
if err != nil {
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
return Error(500, "Failed to make user admin of dashboard", err)
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
}
}
c.TimeRequest(metrics.M_Api_Dashboard_Save)
c.TimeRequest(metrics.MApiDashboardSave)
return JSON(200, util.DynMap{
"status": "success",
"slug": dashboard.Slug,

View File

@@ -97,7 +97,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
cmd.ExternalDeleteUrl = response.DeleteUrl
cmd.Dashboard = simplejson.New()
metrics.M_Api_Dashboard_Snapshot_External.Inc()
metrics.MApiDashboardSnapshotExternal.Inc()
} else {
if cmd.Key == "" {
cmd.Key = util.GetRandomString(32)
@@ -109,7 +109,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
metrics.M_Api_Dashboard_Snapshot_Create.Inc()
metrics.MApiDashboardSnapshotCreate.Inc()
}
if err := bus.Dispatch(&cmd); err != nil {
@@ -154,7 +154,7 @@ func GetDashboardSnapshot(c *m.ReqContext) {
},
}
metrics.M_Api_Dashboard_Snapshot_Get.Inc()
metrics.MApiDashboardSnapshotGet.Inc()
c.Resp.Header().Set("Cache-Control", "public, max-age=3600")
c.JSON(200, dto)

View File

@@ -8,7 +8,7 @@ import (
)
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
c.TimeRequest(metrics.MDataSourceProxyReqTimer)
dsId := c.ParamsInt64(":id")
ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)

View File

@@ -64,7 +64,6 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
if hs.Cfg.EditorsCanAdmin {
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
return Error(500, "Failed to make user admin of folder", err)
}
}

View File

@@ -269,7 +269,8 @@ func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
return
}
promhttp.HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
promhttp.
HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
ServeHTTP(ctx.Resp, ctx.Req.Request)
}

View File

@@ -307,6 +307,25 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
}
}
data.NavTree = append(data.NavTree, cfgNode)
} else {
cfgNode := &dtos.NavLink{
Id: "cfg",
Text: "Configuration",
SubTitle: "Organization: " + c.OrgName,
Icon: "gicon gicon-cog",
Url: setting.AppSubUrl + "/plugins",
Children: []*dtos.NavLink{
{
Text: "Plugins",
Id: "plugins",
Description: "View and configure plugins",
Icon: "gicon gicon-plugins",
Url: setting.AppSubUrl + "/plugins",
},
},
}
data.NavTree = append(data.NavTree, cfgNode)
}

View File

@@ -44,7 +44,7 @@ func (hs *HTTPServer) LoginView(c *models.ReqContext) {
viewData.Settings["loginHint"] = setting.LoginHint
viewData.Settings["passwordHint"] = setting.PasswordHint
viewData.Settings["disableLoginForm"] = setting.DisableLoginForm
viewData.Settings["samlEnabled"] = hs.Cfg.SAMLEnabled
viewData.Settings["samlEnabled"] = setting.IsEnterprise && hs.Cfg.SAMLEnabled
if loginError, ok := tryGetEncryptedCookie(c, LoginErrorCookieName); ok {
//this cookie is only set whenever an OAuth login fails
@@ -81,7 +81,7 @@ func tryOAuthAutoLogin(c *models.ReqContext) bool {
}
oauthInfos := setting.OAuthService.OAuthInfos
if len(oauthInfos) != 1 {
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured.")
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured")
return false
}
for key := range setting.OAuthService.OAuthInfos {
@@ -114,12 +114,16 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
}
if err := bus.Dispatch(authQuery); err != nil {
e401 := Error(401, "Invalid username or password", err)
if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts {
return Error(401, "Invalid username or password", err)
return e401
}
// Do not expose disabled status,
// just show incorrect user credentials error (see #17947)
if err == login.ErrUserDisabled {
return Error(401, "User is disabled", err)
hs.log.Warn("User is disabled", "user", cmd.User)
return e401
}
return Error(500, "Error while trying to authenticate user", err)
@@ -138,7 +142,7 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
c.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
}
metrics.M_Api_Login_Post.Inc()
metrics.MApiLoginPost.Inc()
return JSON(200, result)
}

View File

@@ -60,7 +60,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
if code == "" {
state := GenStateString()
hashedState := hashStatecode(state, setting.OAuthService.OAuthInfos[name].ClientSecret)
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60)
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60, http.SameSiteLaxMode)
if setting.OAuthService.OAuthInfos[name].HostedDomain == "" {
ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline))
} else {
@@ -73,7 +73,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
// delete cookie
ctx.Resp.Header().Del("Set-Cookie")
hs.deleteCookie(ctx.Resp, OauthStateCookieName)
hs.deleteCookie(ctx.Resp, OauthStateCookieName, http.SameSiteLaxMode)
if cookieState == "" {
ctx.Handle(500, "login.OAuthLogin(missing saved state)", nil)
@@ -191,15 +191,18 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
return
}
// Do not expose disabled status,
// just show incorrect user credentials error (see #17947)
if cmd.Result.IsDisabled {
hs.redirectWithError(ctx, login.ErrUserDisabled)
oauthLogger.Warn("User is disabled", "user", cmd.Result.Login)
hs.redirectWithError(ctx, login.ErrInvalidCredentials)
return
}
// login
hs.loginUserWithUser(cmd.Result, ctx)
metrics.M_Api_Login_OAuth.Inc()
metrics.MApiLoginOAuth.Inc()
if redirectTo, _ := url.QueryUnescape(ctx.GetCookie("redirect_to")); len(redirectTo) > 0 {
ctx.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
@@ -210,11 +213,11 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
ctx.Redirect(setting.AppSubUrl + "/")
}
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string) {
hs.writeCookie(w, name, "", -1)
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string, sameSite http.SameSite) {
hs.writeCookie(w, name, "", -1, sameSite)
}
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int) {
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int, sameSite http.SameSite) {
http.SetCookie(w, &http.Cookie{
Name: name,
MaxAge: maxAge,
@@ -222,7 +225,7 @@ func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value stri
HttpOnly: true,
Path: setting.AppSubUrl + "/",
Secure: hs.Cfg.CookieSecure,
SameSite: hs.Cfg.CookieSameSite,
SameSite: sameSite,
})
}

View File

@@ -88,7 +88,7 @@ func CreateOrg(c *m.ReqContext, cmd m.CreateOrgCommand) Response {
return Error(500, "Failed to create organization", err)
}
metrics.M_Api_Org_Create.Inc()
metrics.MApiOrgCreate.Inc()
return JSON(200, &util.DynMap{
"orgId": cmd.Result.Id,

View File

@@ -188,8 +188,8 @@ func (hs *HTTPServer) CompleteInvite(c *m.ReqContext, completeInvite dtos.Comple
hs.loginUserWithUser(user, c)
metrics.M_Api_User_SignUpCompleted.Inc()
metrics.M_Api_User_SignUpInvite.Inc()
metrics.MApiUserSignUpCompleted.Inc()
metrics.MApiUserSignUpInvite.Inc()
return Success("User created and logged in")
}

View File

@@ -61,6 +61,6 @@ func Search(c *m.ReqContext) Response {
return Error(500, "Search failed", err)
}
c.TimeRequest(metrics.M_Api_Dashboard_Search)
c.TimeRequest(metrics.MApiDashboardSearch)
return JSON(200, searchQuery.Result)
}

View File

@@ -46,7 +46,7 @@ func SignUp(c *m.ReqContext, form dtos.SignUpForm) Response {
Code: cmd.Code,
})
metrics.M_Api_User_SignUpStarted.Inc()
metrics.MApiUserSignUpStarted.Inc()
return JSON(200, util.DynMap{"status": "SignUpCreated"})
}
@@ -110,7 +110,7 @@ func (hs *HTTPServer) SignUpStep2(c *m.ReqContext, form dtos.SignUpStep2Form) Re
}
hs.loginUserWithUser(user, c)
metrics.M_Api_User_SignUpCompleted.Inc()
metrics.MApiUserSignUpCompleted.Inc()
return JSON(200, apiResponse)
}

View File

@@ -335,6 +335,8 @@ func GetAuthProviderLabel(authModule string) string {
return "GitLab"
case "oauth_grafana_com", "oauth_grafananet":
return "grafana.com"
case "auth.saml":
return "SAML"
case "ldap", "":
return "LDAP"
default:

View File

@@ -6,37 +6,53 @@ import (
"path/filepath"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
"golang.org/x/xerrors"
)
func GetGrafanaPluginDir(currentOS string) string {
if isDevEnvironment() {
return "../data/plugins"
if rootPath, ok := tryGetRootForDevEnvironment(); ok {
return filepath.Join(rootPath, "data/plugins")
}
return returnOsDefault(currentOS)
}
func isDevEnvironment() bool {
// if ../conf/defaults.ini exists, grafana is not installed as package
// that its in development environment.
// getGrafanaRoot tries to get root of directory when developing grafana ie repo root. It is not perfect it just
// checks what is the binary path and tries to guess based on that but if it is not running in dev env you get a bogus
// path back.
func getGrafanaRoot() (string, error) {
ex, err := os.Executable()
if err != nil {
logger.Error("Could not get executable path. Assuming non dev environment.")
return false
return "", xerrors.New("Failed to get executable path")
}
exPath := filepath.Dir(ex)
_, last := path.Split(exPath)
if last == "bin" {
// In dev env the executable for current platform is created in 'bin/' dir
defaultsPath := filepath.Join(exPath, "../conf/defaults.ini")
_, err = os.Stat(defaultsPath)
return err == nil
return filepath.Join(exPath, ".."), nil
}
// But at the same time there are per platform directories that contain the binaries and can also be used.
defaultsPath := filepath.Join(exPath, "../../conf/defaults.ini")
_, err = os.Stat(defaultsPath)
return err == nil
return filepath.Join(exPath, "../.."), nil
}
// tryGetRootForDevEnvironment returns root path if we are in dev environment. It checks if conf/defaults.ini exists
// which should only exist in dev. Second param is false if we are not in dev or if it wasn't possible to determine it.
func tryGetRootForDevEnvironment() (string, bool) {
rootPath, err := getGrafanaRoot()
if err != nil {
logger.Error("Could not get executable path. Assuming non dev environment.", err)
return "", false
}
devenvPath := filepath.Join(rootPath, "devenv")
_, err = os.Stat(devenvPath)
if err != nil {
return "", false
}
return rootPath, true
}
func returnOsDefault(currentOs string) string {

View File

@@ -3,103 +3,180 @@ package metrics
import (
"runtime"
"github.com/grafana/grafana/pkg/setting"
"github.com/prometheus/client_golang/prometheus"
"github.com/grafana/grafana/pkg/setting"
)
const exporterName = "grafana"
var (
M_Instance_Start prometheus.Counter
M_Page_Status *prometheus.CounterVec
M_Api_Status *prometheus.CounterVec
M_Proxy_Status *prometheus.CounterVec
M_Http_Request_Total *prometheus.CounterVec
M_Http_Request_Summary *prometheus.SummaryVec
// MInstanceStart is a metric counter for started instances
MInstanceStart prometheus.Counter
M_Api_User_SignUpStarted prometheus.Counter
M_Api_User_SignUpCompleted prometheus.Counter
M_Api_User_SignUpInvite prometheus.Counter
M_Api_Dashboard_Save prometheus.Summary
M_Api_Dashboard_Get prometheus.Summary
M_Api_Dashboard_Search prometheus.Summary
M_Api_Admin_User_Create prometheus.Counter
M_Api_Login_Post prometheus.Counter
M_Api_Login_OAuth prometheus.Counter
M_Api_Org_Create prometheus.Counter
// MPageStatus is a metric page http response status
MPageStatus *prometheus.CounterVec
M_Api_Dashboard_Snapshot_Create prometheus.Counter
M_Api_Dashboard_Snapshot_External prometheus.Counter
M_Api_Dashboard_Snapshot_Get prometheus.Counter
M_Api_Dashboard_Insert prometheus.Counter
M_Alerting_Result_State *prometheus.CounterVec
M_Alerting_Notification_Sent *prometheus.CounterVec
M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter
M_Aws_CloudWatch_ListMetrics prometheus.Counter
M_Aws_CloudWatch_GetMetricData prometheus.Counter
M_DB_DataSource_QueryById prometheus.Counter
// MApiStatus is a metric api http response status
MApiStatus *prometheus.CounterVec
// Timers
M_DataSource_ProxyReq_Timer prometheus.Summary
M_Alerting_Execution_Time prometheus.Summary
// MProxyStatus is a metric proxy http response status
MProxyStatus *prometheus.CounterVec
// MHttpRequestTotal is a metric http request counter
MHttpRequestTotal *prometheus.CounterVec
// MHttpRequestSummary is a metric http request summary
MHttpRequestSummary *prometheus.SummaryVec
// MApiUserSignUpStarted is a metric amount of users who started the signup flow
MApiUserSignUpStarted prometheus.Counter
// MApiUserSignUpCompleted is a metric amount of users who completed the signup flow
MApiUserSignUpCompleted prometheus.Counter
// MApiUserSignUpInvite is a metric amount of users who have been invited
MApiUserSignUpInvite prometheus.Counter
// MApiDashboardSave is a metric summary for dashboard save duration
MApiDashboardSave prometheus.Summary
// MApiDashboardGet is a metric summary for dashboard get duration
MApiDashboardGet prometheus.Summary
// MApiDashboardSearch is a metric summary for dashboard search duration
MApiDashboardSearch prometheus.Summary
// MApiAdminUserCreate is a metric api admin user created counter
MApiAdminUserCreate prometheus.Counter
// MApiLoginPost is a metric api login post counter
MApiLoginPost prometheus.Counter
// MApiLoginOAuth is a metric api login oauth counter
MApiLoginOAuth prometheus.Counter
// MApiLoginSAML is a metric api login SAML counter
MApiLoginSAML prometheus.Counter
// MApiOrgCreate is a metric api org created counter
MApiOrgCreate prometheus.Counter
// MApiDashboardSnapshotCreate is a metric dashboard snapshots created
MApiDashboardSnapshotCreate prometheus.Counter
// MApiDashboardSnapshotExternal is a metric external dashboard snapshots created
MApiDashboardSnapshotExternal prometheus.Counter
// MApiDashboardSnapshotGet is a metric loaded dashboards
MApiDashboardSnapshotGet prometheus.Counter
// MApiDashboardInsert is a metric dashboards inserted
MApiDashboardInsert prometheus.Counter
// MAlertingResultState is a metric alert execution result counter
MAlertingResultState *prometheus.CounterVec
// MAlertingNotificationSent is a metric counter for how many alert notifications been sent
MAlertingNotificationSent *prometheus.CounterVec
// MAwsCloudWatchGetMetricStatistics is a metric counter for getting metric statistics from aws
MAwsCloudWatchGetMetricStatistics prometheus.Counter
// MAwsCloudWatchListMetrics is a metric counter for getting list of metrics from aws
MAwsCloudWatchListMetrics prometheus.Counter
// MAwsCloudWatchGetMetricData is a metric counter for getting metric data time series from aws
MAwsCloudWatchGetMetricData prometheus.Counter
// MDBDataSourceQueryByID is a metric counter for getting datasource by id
MDBDataSourceQueryByID prometheus.Counter
// LDAPUsersSyncExecutionTime is a metric summary for LDAP users sync execution duration
LDAPUsersSyncExecutionTime prometheus.Summary
)
// Timers
var (
// MDataSourceProxyReqTimer is a metric summary for dataproxy request duration
MDataSourceProxyReqTimer prometheus.Summary
// MAlertingExecutionTime is a metric summary of alert exeuction duration
MAlertingExecutionTime prometheus.Summary
)
// StatTotals
var (
M_Alerting_Active_Alerts prometheus.Gauge
M_StatTotal_Dashboards prometheus.Gauge
M_StatTotal_Users prometheus.Gauge
M_StatActive_Users prometheus.Gauge
M_StatTotal_Orgs prometheus.Gauge
M_StatTotal_Playlists prometheus.Gauge
// MAlertingActiveAlerts is a metric amount of active alerts
MAlertingActiveAlerts prometheus.Gauge
StatsTotalViewers prometheus.Gauge
StatsTotalEditors prometheus.Gauge
StatsTotalAdmins prometheus.Gauge
// MStatTotalDashboards is a metric total amount of dashboards
MStatTotalDashboards prometheus.Gauge
// MStatTotalUsers is a metric total amount of users
MStatTotalUsers prometheus.Gauge
// MStatActiveUsers is a metric number of active users
MStatActiveUsers prometheus.Gauge
// MStatTotalOrgs is a metric total amount of orgs
MStatTotalOrgs prometheus.Gauge
// MStatTotalPlaylists is a metric total amount of playlists
MStatTotalPlaylists prometheus.Gauge
// StatsTotalViewers is a metric total amount of viewers
StatsTotalViewers prometheus.Gauge
// StatsTotalEditors is a metric total amount of editors
StatsTotalEditors prometheus.Gauge
// StatsTotalAdmins is a metric total amount of admins
StatsTotalAdmins prometheus.Gauge
// StatsTotalActiveViewers is a metric total amount of viewers
StatsTotalActiveViewers prometheus.Gauge
// StatsTotalActiveEditors is a metric total amount of active editors
StatsTotalActiveEditors prometheus.Gauge
StatsTotalActiveAdmins prometheus.Gauge
// M_Grafana_Version is a gauge that contains build info about this binary
//
// Deprecated: use M_Grafana_Build_Version instead.
M_Grafana_Version *prometheus.GaugeVec
// StatsTotalActiveAdmins is a metric total amount of active admins
StatsTotalActiveAdmins prometheus.Gauge
// grafanaBuildVersion is a gauge that contains build info about this binary
// grafanaBuildVersion is a metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built
grafanaBuildVersion *prometheus.GaugeVec
)
func init() {
M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{
httpStatusCodes := []string{"200", "404", "500", "unknown"}
MInstanceStart = prometheus.NewCounter(prometheus.CounterOpts{
Name: "instance_start_total",
Help: "counter for started instances",
Namespace: exporterName,
})
httpStatusCodes := []string{"200", "404", "500", "unknown"}
M_Page_Status = newCounterVecStartingAtZero(
MPageStatus = newCounterVecStartingAtZero(
prometheus.CounterOpts{
Name: "page_response_status_total",
Help: "page http response status",
Namespace: exporterName,
}, []string{"code"}, httpStatusCodes...)
M_Api_Status = newCounterVecStartingAtZero(
MApiStatus = newCounterVecStartingAtZero(
prometheus.CounterOpts{
Name: "api_response_status_total",
Help: "api http response status",
Namespace: exporterName,
}, []string{"code"}, httpStatusCodes...)
M_Proxy_Status = newCounterVecStartingAtZero(
MProxyStatus = newCounterVecStartingAtZero(
prometheus.CounterOpts{
Name: "proxy_response_status_total",
Help: "proxy http response status",
Namespace: exporterName,
}, []string{"code"}, httpStatusCodes...)
M_Http_Request_Total = prometheus.NewCounterVec(
MHttpRequestTotal = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "http_request_total",
Help: "http request counter",
@@ -107,7 +184,7 @@ func init() {
[]string{"handler", "statuscode", "method"},
)
M_Http_Request_Summary = prometheus.NewSummaryVec(
MHttpRequestSummary = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Name: "http_request_duration_milliseconds",
Help: "http request summary",
@@ -115,169 +192,181 @@ func init() {
[]string{"handler", "statuscode", "method"},
)
M_Api_User_SignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
MApiUserSignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_user_signup_started_total",
Help: "amount of users who started the signup flow",
Namespace: exporterName,
})
M_Api_User_SignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
MApiUserSignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_user_signup_completed_total",
Help: "amount of users who completed the signup flow",
Namespace: exporterName,
})
M_Api_User_SignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
MApiUserSignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_user_signup_invite_total",
Help: "amount of users who have been invited",
Namespace: exporterName,
})
M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{
MApiDashboardSave = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_save_milliseconds",
Help: "summary for dashboard save duration",
Namespace: exporterName,
})
M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{
MApiDashboardGet = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_get_milliseconds",
Help: "summary for dashboard get duration",
Namespace: exporterName,
})
M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{
MApiDashboardSearch = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_search_milliseconds",
Help: "summary for dashboard search duration",
Namespace: exporterName,
})
M_Api_Admin_User_Create = newCounterStartingAtZero(prometheus.CounterOpts{
MApiAdminUserCreate = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_admin_user_created_total",
Help: "api admin user created counter",
Namespace: exporterName,
})
M_Api_Login_Post = newCounterStartingAtZero(prometheus.CounterOpts{
MApiLoginPost = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_login_post_total",
Help: "api login post counter",
Namespace: exporterName,
})
M_Api_Login_OAuth = newCounterStartingAtZero(prometheus.CounterOpts{
MApiLoginOAuth = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_login_oauth_total",
Help: "api login oauth counter",
Namespace: exporterName,
})
M_Api_Org_Create = newCounterStartingAtZero(prometheus.CounterOpts{
MApiLoginSAML = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_login_saml_total",
Help: "api login saml counter",
Namespace: exporterName,
})
MApiOrgCreate = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_org_create_total",
Help: "api org created counter",
Namespace: exporterName,
})
M_Api_Dashboard_Snapshot_Create = newCounterStartingAtZero(prometheus.CounterOpts{
MApiDashboardSnapshotCreate = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_dashboard_snapshot_create_total",
Help: "dashboard snapshots created",
Namespace: exporterName,
})
M_Api_Dashboard_Snapshot_External = newCounterStartingAtZero(prometheus.CounterOpts{
MApiDashboardSnapshotExternal = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_dashboard_snapshot_external_total",
Help: "external dashboard snapshots created",
Namespace: exporterName,
})
M_Api_Dashboard_Snapshot_Get = newCounterStartingAtZero(prometheus.CounterOpts{
MApiDashboardSnapshotGet = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_dashboard_snapshot_get_total",
Help: "loaded dashboards",
Namespace: exporterName,
})
M_Api_Dashboard_Insert = newCounterStartingAtZero(prometheus.CounterOpts{
MApiDashboardInsert = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_models_dashboard_insert_total",
Help: "dashboards inserted ",
Namespace: exporterName,
})
M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{
MAlertingResultState = prometheus.NewCounterVec(prometheus.CounterOpts{
Name: "alerting_result_total",
Help: "alert execution result counter",
Namespace: exporterName,
}, []string{"state"})
M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{
MAlertingNotificationSent = prometheus.NewCounterVec(prometheus.CounterOpts{
Name: "alerting_notification_sent_total",
Help: "counter for how many alert notifications been sent",
Namespace: exporterName,
}, []string{"type"})
M_Aws_CloudWatch_GetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
MAwsCloudWatchGetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "aws_cloudwatch_get_metric_statistics_total",
Help: "counter for getting metric statistics from aws",
Namespace: exporterName,
})
M_Aws_CloudWatch_ListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
MAwsCloudWatchListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "aws_cloudwatch_list_metrics_total",
Help: "counter for getting list of metrics from aws",
Namespace: exporterName,
})
M_Aws_CloudWatch_GetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
MAwsCloudWatchGetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "aws_cloudwatch_get_metric_data_total",
Help: "counter for getting metric data time series from aws",
Namespace: exporterName,
})
M_DB_DataSource_QueryById = newCounterStartingAtZero(prometheus.CounterOpts{
MDBDataSourceQueryByID = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "db_datasource_query_by_id_total",
Help: "counter for getting datasource by id",
Namespace: exporterName,
})
M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{
LDAPUsersSyncExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "ldap_users_sync_execution_time",
Help: "summary for LDAP users sync execution duration",
Namespace: exporterName,
})
MDataSourceProxyReqTimer = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dataproxy_request_all_milliseconds",
Help: "summary for dataproxy request duration",
Namespace: exporterName,
})
M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{
MAlertingExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "alerting_execution_time_milliseconds",
Help: "summary of alert exeuction duration",
Namespace: exporterName,
})
M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{
MAlertingActiveAlerts = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "alerting_active_alerts",
Help: "amount of active alerts",
Namespace: exporterName,
})
M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{
MStatTotalDashboards = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_totals_dashboard",
Help: "total amount of dashboards",
Namespace: exporterName,
})
M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{
MStatTotalUsers = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_total_users",
Help: "total amount of users",
Namespace: exporterName,
})
M_StatActive_Users = prometheus.NewGauge(prometheus.GaugeOpts{
MStatActiveUsers = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_active_users",
Help: "number of active users",
Namespace: exporterName,
})
M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{
MStatTotalOrgs = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_total_orgs",
Help: "total amount of orgs",
Namespace: exporterName,
})
M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{
MStatTotalPlaylists = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_total_playlists",
Help: "total amount of playlists",
Namespace: exporterName,
@@ -319,78 +408,69 @@ func init() {
Namespace: exporterName,
})
M_Grafana_Version = prometheus.NewGaugeVec(prometheus.GaugeOpts{
Name: "info",
Help: "Information about the Grafana. This metric is deprecated. please use `grafana_build_info`",
Namespace: exporterName,
}, []string{"version"})
grafanaBuildVersion = prometheus.NewGaugeVec(prometheus.GaugeOpts{
Name: "build_info",
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built.",
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built",
Namespace: exporterName,
}, []string{"version", "revision", "branch", "goversion", "edition"})
}
// SetBuildInformation sets the build information for this binary
func SetBuildInformation(version, revision, branch string) {
// We export this info twice for backwards compatibility.
// Once this have been released for some time we should be able to remote `M_Grafana_Version`
// The reason we added a new one is that its common practice in the prometheus community
// to name this metric `*_build_info` so its easy to do aggregation on all programs.
edition := "oss"
if setting.IsEnterprise {
edition = "enterprise"
}
M_Grafana_Version.WithLabelValues(version).Set(1)
grafanaBuildVersion.WithLabelValues(version, revision, branch, runtime.Version(), edition).Set(1)
}
func initMetricVars() {
prometheus.MustRegister(
M_Instance_Start,
M_Page_Status,
M_Api_Status,
M_Proxy_Status,
M_Http_Request_Total,
M_Http_Request_Summary,
M_Api_User_SignUpStarted,
M_Api_User_SignUpCompleted,
M_Api_User_SignUpInvite,
M_Api_Dashboard_Save,
M_Api_Dashboard_Get,
M_Api_Dashboard_Search,
M_DataSource_ProxyReq_Timer,
M_Alerting_Execution_Time,
M_Api_Admin_User_Create,
M_Api_Login_Post,
M_Api_Login_OAuth,
M_Api_Org_Create,
M_Api_Dashboard_Snapshot_Create,
M_Api_Dashboard_Snapshot_External,
M_Api_Dashboard_Snapshot_Get,
M_Api_Dashboard_Insert,
M_Alerting_Result_State,
M_Alerting_Notification_Sent,
M_Aws_CloudWatch_GetMetricStatistics,
M_Aws_CloudWatch_ListMetrics,
M_Aws_CloudWatch_GetMetricData,
M_DB_DataSource_QueryById,
M_Alerting_Active_Alerts,
M_StatTotal_Dashboards,
M_StatTotal_Users,
M_StatActive_Users,
M_StatTotal_Orgs,
M_StatTotal_Playlists,
M_Grafana_Version,
MInstanceStart,
MPageStatus,
MApiStatus,
MProxyStatus,
MHttpRequestTotal,
MHttpRequestSummary,
MApiUserSignUpStarted,
MApiUserSignUpCompleted,
MApiUserSignUpInvite,
MApiDashboardSave,
MApiDashboardGet,
MApiDashboardSearch,
MDataSourceProxyReqTimer,
MAlertingExecutionTime,
MApiAdminUserCreate,
MApiLoginPost,
MApiLoginOAuth,
MApiLoginSAML,
MApiOrgCreate,
MApiDashboardSnapshotCreate,
MApiDashboardSnapshotExternal,
MApiDashboardSnapshotGet,
MApiDashboardInsert,
MAlertingResultState,
MAlertingNotificationSent,
MAwsCloudWatchGetMetricStatistics,
MAwsCloudWatchListMetrics,
MAwsCloudWatchGetMetricData,
MDBDataSourceQueryByID,
LDAPUsersSyncExecutionTime,
MAlertingActiveAlerts,
MStatTotalDashboards,
MStatTotalUsers,
MStatActiveUsers,
MStatTotalOrgs,
MStatTotalPlaylists,
StatsTotalViewers,
StatsTotalEditors,
StatsTotalAdmins,
StatsTotalActiveViewers,
StatsTotalActiveEditors,
StatsTotalActiveAdmins,
grafanaBuildVersion)
grafanaBuildVersion,
)
}

View File

@@ -46,7 +46,7 @@ func (im *InternalMetricsService) Run(ctx context.Context) error {
}
}
M_Instance_Start.Inc()
MInstanceStart.Inc()
<-ctx.Done()
return ctx.Err()

View File

@@ -22,8 +22,12 @@ func parseRedisConnStr(connStr string) (*redis.Options, error) {
keyValueCSV := strings.Split(connStr, ",")
options := &redis.Options{Network: "tcp"}
for _, rawKeyValue := range keyValueCSV {
keyValueTuple := strings.Split(rawKeyValue, "=")
keyValueTuple := strings.SplitN(rawKeyValue, "=", 2)
if len(keyValueTuple) != 2 {
if strings.HasPrefix(rawKeyValue, "password") {
// don't log the password
rawKeyValue = "password******"
}
return nil, fmt.Errorf("incorrect redis connection string format detected for '%v', format is key=value,key=value", rawKeyValue)
}
connKey := keyValueTuple[0]

View File

@@ -161,11 +161,11 @@ func (uss *UsageStatsService) updateTotalStats() {
return
}
metrics.M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards))
metrics.M_StatTotal_Users.Set(float64(statsQuery.Result.Users))
metrics.M_StatActive_Users.Set(float64(statsQuery.Result.ActiveUsers))
metrics.M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists))
metrics.M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs))
metrics.MStatTotalDashboards.Set(float64(statsQuery.Result.Dashboards))
metrics.MStatTotalUsers.Set(float64(statsQuery.Result.Users))
metrics.MStatActiveUsers.Set(float64(statsQuery.Result.ActiveUsers))
metrics.MStatTotalPlaylists.Set(float64(statsQuery.Result.Playlists))
metrics.MStatTotalOrgs.Set(float64(statsQuery.Result.Orgs))
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))

View File

@@ -21,11 +21,19 @@ import (
var getTime = time.Now
const (
errStringInvalidUsernamePassword = "Invalid username or password"
errStringInvalidAPIKey = "Invalid API key"
)
var (
ReqGrafanaAdmin = Auth(&AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true})
ReqEditorRole = RoleAuth(models.ROLE_EDITOR, models.ROLE_ADMIN)
ReqOrgAdmin = RoleAuth(models.ROLE_ADMIN)
ReqGrafanaAdmin = Auth(&AuthOptions{
ReqSignedIn: true,
ReqGrafanaAdmin: true,
})
ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true})
ReqEditorRole = RoleAuth(models.ROLE_EDITOR, models.ROLE_ADMIN)
ReqOrgAdmin = RoleAuth(models.ROLE_ADMIN)
)
func GetContextHandler(
@@ -106,14 +114,14 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
// base64 decode key
decoded, err := apikeygen.Decode(keyString)
if err != nil {
ctx.JsonApiErr(401, "Invalid API key", err)
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
return true
}
// fetch key
keyQuery := models.GetApiKeyByNameQuery{KeyName: decoded.Name, OrgId: decoded.OrgId}
if err := bus.Dispatch(&keyQuery); err != nil {
ctx.JsonApiErr(401, "Invalid API key", err)
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
return true
}
@@ -121,7 +129,7 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
// validate api key
if !apikeygen.IsValid(decoded, apikey.Key) {
ctx.JsonApiErr(401, "Invalid API key", err)
ctx.JsonApiErr(401, errStringInvalidAPIKey, err)
return true
}
@@ -140,7 +148,6 @@ func initContextWithApiKey(ctx *models.ReqContext) bool {
}
func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
if !setting.BasicAuthEnabled {
return false
}
@@ -158,21 +165,39 @@ func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
loginQuery := models.GetUserByLoginQuery{LoginOrEmail: username}
if err := bus.Dispatch(&loginQuery); err != nil {
ctx.JsonApiErr(401, "Basic auth failed", err)
ctx.Logger.Debug(
"Failed to look up the username",
"username", username,
)
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
return true
}
user := loginQuery.Result
loginUserQuery := models.LoginUserQuery{Username: username, Password: password, User: user}
loginUserQuery := models.LoginUserQuery{
Username: username,
Password: password,
User: user,
}
if err := bus.Dispatch(&loginUserQuery); err != nil {
ctx.JsonApiErr(401, "Invalid username or password", err)
ctx.Logger.Debug(
"Failed to authorize the user",
"username", username,
)
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
return true
}
query := models.GetSignedInUserQuery{UserId: user.Id, OrgId: orgId}
if err := bus.Dispatch(&query); err != nil {
ctx.JsonApiErr(401, "Authentication error", err)
ctx.Logger.Error(
"Failed at user signed in",
"id", user.Id,
"org", orgId,
)
ctx.JsonApiErr(401, errStringInvalidUsernamePassword, err)
return true
}
@@ -193,14 +218,14 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
token, err := authTokenService.LookupToken(ctx.Req.Context(), rawToken)
if err != nil {
ctx.Logger.Error("failed to look up user based on cookie", "error", err)
ctx.Logger.Error("Failed to look up user based on cookie", "error", err)
WriteSessionCookie(ctx, "", -1)
return false
}
query := models.GetSignedInUserQuery{UserId: token.UserId, OrgId: orgID}
if err := bus.Dispatch(&query); err != nil {
ctx.Logger.Error("failed to get user with id", "userId", token.UserId, "error", err)
ctx.Logger.Error("Failed to get user with id", "userId", token.UserId, "error", err)
return false
}
@@ -210,7 +235,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
rotated, err := authTokenService.TryRotateToken(ctx.Req.Context(), token, ctx.RemoteAddr(), ctx.Req.UserAgent())
if err != nil {
ctx.Logger.Error("failed to rotate token", "error", err)
ctx.Logger.Error("Failed to rotate token", "error", err)
return true
}
@@ -223,7 +248,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays int) {
if setting.Env == setting.DEV {
ctx.Logger.Info("new token", "unhashed token", value)
ctx.Logger.Info("New token", "unhashed token", value)
}
var maxAge int

View File

@@ -11,6 +11,7 @@ import (
"time"
. "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/assert"
"gopkg.in/macaron.v1"
"github.com/grafana/grafana/pkg/api/dtos"
@@ -21,7 +22,6 @@ import (
"github.com/grafana/grafana/pkg/services/login"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
"github.com/stretchr/testify/assert"
)
const errorTemplate = "error-template"

View File

@@ -20,9 +20,9 @@ func RequestMetrics(handler string) macaron.Handler {
code := sanitizeCode(status)
method := sanitizeMethod(req.Method)
metrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()
metrics.MHttpRequestTotal.WithLabelValues(handler, code, method).Inc()
duration := time.Since(now).Nanoseconds() / int64(time.Millisecond)
metrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))
metrics.MHttpRequestSummary.WithLabelValues(handler, code, method).Observe(float64(duration))
if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") {
countProxyRequests(status)
@@ -37,39 +37,39 @@ func RequestMetrics(handler string) macaron.Handler {
func countApiRequests(status int) {
switch status {
case 200:
metrics.M_Api_Status.WithLabelValues("200").Inc()
metrics.MApiStatus.WithLabelValues("200").Inc()
case 404:
metrics.M_Api_Status.WithLabelValues("404").Inc()
metrics.MApiStatus.WithLabelValues("404").Inc()
case 500:
metrics.M_Api_Status.WithLabelValues("500").Inc()
metrics.MApiStatus.WithLabelValues("500").Inc()
default:
metrics.M_Api_Status.WithLabelValues("unknown").Inc()
metrics.MApiStatus.WithLabelValues("unknown").Inc()
}
}
func countPageRequests(status int) {
switch status {
case 200:
metrics.M_Page_Status.WithLabelValues("200").Inc()
metrics.MPageStatus.WithLabelValues("200").Inc()
case 404:
metrics.M_Page_Status.WithLabelValues("404").Inc()
metrics.MPageStatus.WithLabelValues("404").Inc()
case 500:
metrics.M_Page_Status.WithLabelValues("500").Inc()
metrics.MPageStatus.WithLabelValues("500").Inc()
default:
metrics.M_Page_Status.WithLabelValues("unknown").Inc()
metrics.MPageStatus.WithLabelValues("unknown").Inc()
}
}
func countProxyRequests(status int) {
switch status {
case 200:
metrics.M_Proxy_Status.WithLabelValues("200").Inc()
metrics.MProxyStatus.WithLabelValues("200").Inc()
case 404:
metrics.M_Proxy_Status.WithLabelValues("400").Inc()
metrics.MProxyStatus.WithLabelValues("400").Inc()
case 500:
metrics.M_Proxy_Status.WithLabelValues("500").Inc()
metrics.MProxyStatus.WithLabelValues("500").Inc()
default:
metrics.M_Proxy_Status.WithLabelValues("unknown").Inc()
metrics.MProxyStatus.WithLabelValues("unknown").Inc()
}
}

View File

@@ -62,6 +62,7 @@ type CreateUserCommand struct {
Password string
EmailVerified bool
IsAdmin bool
IsDisabled bool
SkipOrgSetup bool
DefaultOrgRole string
@@ -146,6 +147,8 @@ type SearchUsersQuery struct {
Limit int
AuthModule string
IsDisabled *bool
Result SearchUserQueryResult
}

View File

@@ -112,11 +112,19 @@ type PluginDependencyItem struct {
Version string `json:"version"`
}
type PluginBuildInfo struct {
Time int64 `json:"time,omitempty"`
Repo string `json:"repo,omitempty"`
Branch string `json:"branch,omitempty"`
Hash string `json:"hash,omitempty"`
}
type PluginInfo struct {
Author PluginInfoLink `json:"author"`
Description string `json:"description"`
Links []PluginInfoLink `json:"links"`
Logos PluginLogos `json:"logos"`
Build PluginBuildInfo `json:"build"`
Screenshots []PluginScreenshots `json:"screenshots"`
Version string `json:"version"`
Updated string `json:"updated"`

View File

@@ -70,5 +70,5 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) {
context.EndTime = time.Now()
elapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)
metrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))
metrics.MAlertingExecutionTime.Observe(float64(elapsedTime))
}

View File

@@ -57,7 +57,7 @@ func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, no
notifier := notifierState.notifier
n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault())
metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc()
metrics.MAlertingNotificationSent.WithLabelValues(notifier.GetType()).Inc()
err := notifier.Notify(evalContext)

View File

@@ -43,6 +43,6 @@ func (arr *defaultRuleReader) fetch() []*Rule {
}
}
metrics.M_Alerting_Active_Alerts.Set(float64(len(res)))
metrics.MAlertingActiveAlerts.Set(float64(len(res)))
return res
}

View File

@@ -44,7 +44,7 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error {
annotationData.Set("noData", true)
}
metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc()
metrics.MAlertingResultState.WithLabelValues(string(evalContext.Rule.State)).Inc()
if evalContext.shouldUpdateAlertState() {
handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState)

View File

@@ -29,6 +29,10 @@ func appendIfNotEmpty(slice []string, values ...string) []string {
}
func getAttribute(name string, entry *ldap.Entry) string {
if strings.ToLower(name) == "dn" {
return entry.DN
}
for _, attr := range entry.Attributes {
if attr.Name == name {
if len(attr.Values) > 0 {
@@ -40,6 +44,10 @@ func getAttribute(name string, entry *ldap.Entry) string {
}
func getArrayAttribute(name string, entry *ldap.Entry) []string {
if strings.ToLower(name) == "dn" {
return []string{entry.DN}
}
for _, attr := range entry.Attributes {
if attr.Name == name && len(attr.Values) > 0 {
return attr.Values

View File

@@ -266,7 +266,9 @@ func (server *Server) Users(logins []string) (
return nil, err
}
server.log.Debug("LDAP users found", "users", spew.Sdump(serializedUsers))
server.log.Debug(
"LDAP users found", "users", spew.Sdump(serializedUsers),
)
return serializedUsers, nil
}
@@ -327,6 +329,9 @@ func (server *Server) getSearchRequest(
inputs.Email,
inputs.Name,
inputs.MemberOf,
// In case for the POSIX LDAP schema server
server.Config.GroupSearchFilterUserAttribute,
)
search := ""
@@ -489,6 +494,7 @@ func (server *Server) requestMemberOf(entry *ldap.Entry) ([]string, error) {
if len(groupSearchResult.Entries) > 0 {
for _, group := range groupSearchResult.Entries {
memberOf = append(
memberOf,
getAttribute(groupIDAttribute, group),

View File

@@ -105,6 +105,16 @@ func TestLDAPHelpers(t *testing.T) {
})
Convey("getAttribute()", t, func() {
Convey("Should get DN", func() {
entry := &ldap.Entry{
DN: "test",
}
result := getAttribute("dn", entry)
So(result, ShouldEqual, "test")
})
Convey("Should get username", func() {
value := []string{"roelgerrits"}
entry := &ldap.Entry{
@@ -137,6 +147,16 @@ func TestLDAPHelpers(t *testing.T) {
})
Convey("getArrayAttribute()", t, func() {
Convey("Should get DN", func() {
entry := &ldap.Entry{
DN: "test",
}
result := getArrayAttribute("dn", entry)
So(result, ShouldResemble, []string{"test"})
})
Convey("Should get username", func() {
value := []string{"roelgerrits"}
entry := &ldap.Entry{

View File

@@ -11,6 +11,44 @@ import (
)
func TestLDAPPrivateMethods(t *testing.T) {
Convey("getSearchRequest()", t, func() {
Convey("with enabled GroupSearchFilterUserAttribute setting", func() {
server := &Server{
Config: &ServerConfig{
Attr: AttributeMap{
Username: "username",
Name: "name",
MemberOf: "memberof",
Email: "email",
},
GroupSearchFilterUserAttribute: "gansta",
SearchBaseDNs: []string{"BaseDNHere"},
},
log: log.New("test-logger"),
}
result := server.getSearchRequest("killa", []string{"gorilla"})
So(result, ShouldResemble, &ldap.SearchRequest{
BaseDN: "killa",
Scope: 2,
DerefAliases: 0,
SizeLimit: 0,
TimeLimit: 0,
TypesOnly: false,
Filter: "(|)",
Attributes: []string{
"username",
"email",
"name",
"memberof",
"gansta",
},
Controls: nil,
})
})
})
Convey("serializeUsers()", t, func() {
Convey("simple case", func() {
server := &Server{

View File

@@ -67,10 +67,18 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (
timezone := ""
cmd.Env = os.Environ()
if opts.Timezone != "" {
timezone = isoTimeOffsetToPosixTz(opts.Timezone)
baseEnviron := os.Environ()
cmd.Env = appendEnviron(baseEnviron, "TZ", timezone)
cmd.Env = appendEnviron(cmd.Env, "TZ", timezone)
}
// Added to disable usage of newer version of OPENSSL
// that seem to be incompatible with PhantomJS (used in Debian Buster)
if runtime.GOOS == "linux" {
disableNewOpenssl := "/etc/ssl"
cmd.Env = appendEnviron(cmd.Env, "OPENSSL_CONF", disableNewOpenssl)
}
rs.log.Debug("executing Phantomjs", "binPath", binPath, "cmdArgs", cmdArgs, "timezone", timezone)

View File

@@ -87,7 +87,7 @@ func saveDashboard(sess *DBSession, cmd *m.SaveDashboardCommand) error {
dash.CreatedBy = userId
dash.Updated = time.Now()
dash.UpdatedBy = userId
metrics.M_Api_Dashboard_Insert.Inc()
metrics.MApiDashboardInsert.Inc()
affectedRows, err = sess.Insert(dash)
} else {
dash.SetVersion(dash.Version + 1)

View File

@@ -25,7 +25,7 @@ func init() {
}
func GetDataSourceById(query *m.GetDataSourceByIdQuery) error {
metrics.M_DB_DataSource_QueryById.Inc()
metrics.MDBDataSourceQueryByID.Inc()
datasource := m.DataSource{OrgId: query.OrgId, Id: query.Id}
has, err := x.Get(&datasource)

View File

@@ -106,6 +106,7 @@ func CreateUser(ctx context.Context, cmd *models.CreateUserCommand) error {
Login: cmd.Login,
Company: cmd.Company,
IsAdmin: cmd.IsAdmin,
IsDisabled: cmd.IsDisabled,
OrgId: orgId,
EmailVerified: cmd.EmailVerified,
Created: time.Now(),
@@ -455,6 +456,11 @@ func SearchUsers(query *models.SearchUsersQuery) error {
whereParams = append(whereParams, queryWithWildcards, queryWithWildcards, queryWithWildcards)
}
if query.IsDisabled != nil {
whereConditions = append(whereConditions, "is_disabled = ?")
whereParams = append(whereParams, query.IsDisabled)
}
if query.AuthModule != "" {
whereConditions = append(
whereConditions,

View File

@@ -16,7 +16,7 @@ func TestUserDataAccess(t *testing.T) {
Convey("Testing DB", t, func() {
ss := InitTestDB(t)
Convey("Creating a user", func() {
Convey("Creates a user", func() {
cmd := &models.CreateUserCommand{
Email: "usertest@test.com",
Name: "user name",
@@ -35,27 +35,47 @@ func TestUserDataAccess(t *testing.T) {
So(query.Result.Password, ShouldEqual, "")
So(query.Result.Rands, ShouldHaveLength, 10)
So(query.Result.Salt, ShouldHaveLength, 10)
So(query.Result.IsDisabled, ShouldBeFalse)
})
})
Convey("Creates disabled user", func() {
cmd := &models.CreateUserCommand{
Email: "usertest@test.com",
Name: "user name",
Login: "user_test_login",
IsDisabled: true,
}
err := CreateUser(context.Background(), cmd)
So(err, ShouldBeNil)
Convey("Loading a user", func() {
query := models.GetUserByIdQuery{Id: cmd.Result.Id}
err := GetUserById(&query)
So(err, ShouldBeNil)
So(query.Result.Email, ShouldEqual, "usertest@test.com")
So(query.Result.Password, ShouldEqual, "")
So(query.Result.Rands, ShouldHaveLength, 10)
So(query.Result.Salt, ShouldHaveLength, 10)
So(query.Result.IsDisabled, ShouldBeTrue)
})
})
Convey("Given 5 users", func() {
var err error
var cmd *models.CreateUserCommand
users := []models.User{}
for i := 0; i < 5; i++ {
cmd = &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
users := createFiveTestUsers(func(i int) *models.CreateUserCommand {
return &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
IsDisabled: false,
}
err = CreateUser(context.Background(), cmd)
So(err, ShouldBeNil)
users = append(users, cmd.Result)
}
})
Convey("Can return the first page of users and a total count", func() {
query := models.SearchUsersQuery{Query: "", Page: 1, Limit: 3}
err = SearchUsers(&query)
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(len(query.Result.Users), ShouldEqual, 3)
@@ -64,7 +84,7 @@ func TestUserDataAccess(t *testing.T) {
Convey("Can return the second page of users and a total count", func() {
query := models.SearchUsersQuery{Query: "", Page: 2, Limit: 3}
err = SearchUsers(&query)
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(len(query.Result.Users), ShouldEqual, 2)
@@ -73,7 +93,7 @@ func TestUserDataAccess(t *testing.T) {
Convey("Can return list of users matching query on user name", func() {
query := models.SearchUsersQuery{Query: "use", Page: 1, Limit: 3}
err = SearchUsers(&query)
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(len(query.Result.Users), ShouldEqual, 3)
@@ -103,7 +123,7 @@ func TestUserDataAccess(t *testing.T) {
Convey("Can return list of users matching query on email", func() {
query := models.SearchUsersQuery{Query: "ser1@test.com", Page: 1, Limit: 3}
err = SearchUsers(&query)
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(len(query.Result.Users), ShouldEqual, 1)
@@ -112,14 +132,14 @@ func TestUserDataAccess(t *testing.T) {
Convey("Can return list of users matching query on login name", func() {
query := models.SearchUsersQuery{Query: "loginuser1", Page: 1, Limit: 3}
err = SearchUsers(&query)
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(len(query.Result.Users), ShouldEqual, 1)
So(query.Result.TotalCount, ShouldEqual, 1)
})
Convey("can return list users based on their auth type", func() {
Convey("Can return list users based on their auth type", func() {
// add users to auth table
for index, user := range users {
authModule := "killa"
@@ -134,11 +154,11 @@ func TestUserDataAccess(t *testing.T) {
AuthModule: authModule,
AuthId: "gorilla",
}
err = SetAuthInfo(cmd2)
err := SetAuthInfo(cmd2)
So(err, ShouldBeNil)
}
query := models.SearchUsersQuery{AuthModule: "ldap"}
err = SearchUsers(&query)
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(query.Result.Users, ShouldHaveLength, 3)
@@ -163,8 +183,51 @@ func TestUserDataAccess(t *testing.T) {
So(fourth, ShouldBeTrue)
})
Convey("Can return list users based on their is_disabled flag", func() {
ss = InitTestDB(t)
createFiveTestUsers(func(i int) *models.CreateUserCommand {
return &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
IsDisabled: i%2 == 0,
}
})
isDisabled := false
query := models.SearchUsersQuery{IsDisabled: &isDisabled}
err := SearchUsers(&query)
So(err, ShouldBeNil)
So(query.Result.Users, ShouldHaveLength, 2)
first, third := false, false
for _, user := range query.Result.Users {
if user.Name == "user1" {
first = true
}
if user.Name == "user3" {
third = true
}
}
So(first, ShouldBeTrue)
So(third, ShouldBeTrue)
ss = InitTestDB(t)
users = createFiveTestUsers(func(i int) *models.CreateUserCommand {
return &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
IsDisabled: false,
}
})
})
Convey("when a user is an org member and has been assigned permissions", func() {
err = AddOrgUser(&models.AddOrgUserCommand{LoginOrEmail: users[1].Login, Role: models.ROLE_VIEWER, OrgId: users[0].OrgId, UserId: users[1].Id})
err := AddOrgUser(&models.AddOrgUserCommand{LoginOrEmail: users[1].Login, Role: models.ROLE_VIEWER, OrgId: users[0].OrgId, UserId: users[1].Id})
So(err, ShouldBeNil)
testHelperUpdateDashboardAcl(1, models.DashboardAcl{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[1].Id, Permission: models.PERMISSION_EDIT})
@@ -222,19 +285,77 @@ func TestUserDataAccess(t *testing.T) {
})
Convey("When batch disabling users", func() {
userIdsToDisable := []int64{}
for i := 0; i < 3; i++ {
userIdsToDisable = append(userIdsToDisable, users[i].Id)
}
disableCmd := models.BatchDisableUsersCommand{UserIds: userIdsToDisable, IsDisabled: true}
Convey("Should disable all users", func() {
disableCmd := models.BatchDisableUsersCommand{
UserIds: []int64{1, 2, 3, 4, 5},
IsDisabled: true,
}
err = BatchDisableUsers(&disableCmd)
So(err, ShouldBeNil)
err := BatchDisableUsers(&disableCmd)
So(err, ShouldBeNil)
isDisabled := true
query := &models.SearchUsersQuery{IsDisabled: &isDisabled}
err = SearchUsers(query)
So(err, ShouldBeNil)
So(query.Result.TotalCount, ShouldEqual, 5)
})
Convey("Should enable all users", func() {
ss = InitTestDB(t)
createFiveTestUsers(func(i int) *models.CreateUserCommand {
return &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
IsDisabled: true,
}
})
disableCmd := models.BatchDisableUsersCommand{
UserIds: []int64{1, 2, 3, 4, 5},
IsDisabled: false,
}
err := BatchDisableUsers(&disableCmd)
So(err, ShouldBeNil)
isDisabled := false
query := &models.SearchUsersQuery{IsDisabled: &isDisabled}
err = SearchUsers(query)
So(err, ShouldBeNil)
So(query.Result.TotalCount, ShouldEqual, 5)
})
Convey("Should disable only specific users", func() {
ss = InitTestDB(t)
users = createFiveTestUsers(func(i int) *models.CreateUserCommand {
return &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
IsDisabled: false,
}
})
userIdsToDisable := []int64{}
for i := 0; i < 3; i++ {
userIdsToDisable = append(userIdsToDisable, users[i].Id)
}
disableCmd := models.BatchDisableUsersCommand{
UserIds: userIdsToDisable,
IsDisabled: true,
}
err := BatchDisableUsers(&disableCmd)
So(err, ShouldBeNil)
Convey("Should disable all provided users", func() {
query := models.SearchUsersQuery{}
err = SearchUsers(&query)
So(err, ShouldBeNil)
So(query.Result.TotalCount, ShouldEqual, 5)
for _, user := range query.Result.Users {
shouldBeDisabled := false
@@ -253,6 +374,17 @@ func TestUserDataAccess(t *testing.T) {
}
}
})
// Since previous tests were destructive
ss = InitTestDB(t)
users = createFiveTestUsers(func(i int) *models.CreateUserCommand {
return &models.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
IsDisabled: false,
}
})
})
Convey("When searching users", func() {
@@ -263,7 +395,7 @@ func TestUserDataAccess(t *testing.T) {
// Make the first log-in during the past
getTime = func() time.Time { return time.Now().AddDate(0, 0, -2) }
query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test1", AuthId: "test1"}
err = GetUserByAuthInfo(query)
err := GetUserByAuthInfo(query)
getTime = time.Now
So(err, ShouldBeNil)
@@ -349,3 +481,19 @@ func GetOrgUsersForTest(query *models.GetOrgUsersQuery) error {
err := sess.Find(&query.Result)
return err
}
func createFiveTestUsers(fn func(i int) *models.CreateUserCommand) []models.User {
var err error
var cmd *models.CreateUserCommand
users := []models.User{}
for i := 0; i < 5; i++ {
cmd = fn(i)
err = CreateUser(context.Background(), cmd)
users = append(users, cmd.Result)
So(err, ShouldBeNil)
}
return users
}

View File

@@ -37,7 +37,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
if err != nil {
return queryResponses, err
}
metrics.M_Aws_CloudWatch_GetMetricData.Add(float64(len(params.MetricDataQueries)))
metrics.MAwsCloudWatchGetMetricData.Add(float64(len(params.MetricDataQueries)))
for _, r := range resp.MetricDataResults {
if _, ok := mdr[*r.Id]; !ok {

View File

@@ -81,7 +81,7 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatch
resp = partResp
}
metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc()
metrics.MAwsCloudWatchGetMetricStatistics.Inc()
}
queryRes, err := parseResponse(resp, query)

View File

@@ -642,7 +642,7 @@ func (e *CloudWatchExecutor) cloudwatchListMetrics(region string, namespace stri
var resp cloudwatch.ListMetricsOutput
err = svc.ListMetricsPages(params,
func(page *cloudwatch.ListMetricsOutput, lastPage bool) bool {
metrics.M_Aws_CloudWatch_ListMetrics.Inc()
metrics.MAwsCloudWatchListMetrics.Inc()
metrics, _ := awsutil.ValuesAtPath(page, "Metrics")
for _, metric := range metrics {
resp.Metrics = append(resp.Metrics, metric.(*cloudwatch.Metric))
@@ -722,7 +722,7 @@ func getAllMetrics(cwData *DatasourceInfo) (cloudwatch.ListMetricsOutput, error)
var resp cloudwatch.ListMetricsOutput
err = svc.ListMetricsPages(params,
func(page *cloudwatch.ListMetricsOutput, lastPage bool) bool {
metrics.M_Aws_CloudWatch_ListMetrics.Inc()
metrics.MAwsCloudWatchListMetrics.Inc()
metrics, _ := awsutil.ValuesAtPath(page, "Metrics")
for _, metric := range metrics {
resp.Metrics = append(resp.Metrics, metric.(*cloudwatch.Metric))

View File

@@ -1,7 +1,8 @@
import React, { Component } from 'react';
import { UserPicker } from 'app/core/components/Select/UserPicker';
import { TeamPicker, Team } from 'app/core/components/Select/TeamPicker';
import { Select, SelectOptionItem } from '@grafana/ui';
import { Select } from '@grafana/ui';
import { SelectableValue } from '@grafana/data';
import { User } from 'app/types';
import {
dashboardPermissionLevels,
@@ -61,7 +62,7 @@ class AddPermissions extends Component<Props, NewDashboardAclItem> {
this.setState({ teamId: team && !Array.isArray(team) ? team.id : 0 });
};
onPermissionChanged = (permission: SelectOptionItem<PermissionLevel>) => {
onPermissionChanged = (permission: SelectableValue<PermissionLevel>) => {
this.setState({ permission: permission.value });
};

View File

@@ -1,5 +1,6 @@
import React, { PureComponent } from 'react';
import { Select, SelectOptionItem } from '@grafana/ui';
import { Select } from '@grafana/ui';
import { SelectableValue } from '@grafana/data';
import { dashboardPermissionLevels, DashboardAcl, PermissionLevel } from 'app/types/acl';
import { FolderInfo } from 'app/types';
@@ -39,7 +40,7 @@ interface Props {
}
export default class PermissionsListItem extends PureComponent<Props> {
onPermissionChanged = (option: SelectOptionItem<PermissionLevel>) => {
onPermissionChanged = (option: SelectableValue<PermissionLevel>) => {
this.props.onPermissionChanged(this.props.item, option.value);
};

View File

@@ -2,7 +2,8 @@
import React, { PureComponent } from 'react';
// Components
import { Select, SelectOptionItem } from '@grafana/ui';
import { Select } from '@grafana/ui';
import { SelectableValue } from '@grafana/data';
// Types
import { DataSourceSelectItem } from '@grafana/ui';
@@ -28,7 +29,7 @@ export class DataSourcePicker extends PureComponent<Props> {
super(props);
}
onChange = (item: SelectOptionItem<string>) => {
onChange = (item: SelectableValue<string>) => {
const ds = this.props.datasources.find(ds => ds.name === item.value);
this.props.onChange(ds);
};

View File

@@ -1,12 +1,13 @@
import React from 'react';
import _ from 'lodash';
import { Select, SelectOptionItem } from '@grafana/ui';
import { Select } from '@grafana/ui';
import { SelectableValue } from '@grafana/data';
import { Variable } from 'app/types/templates';
export interface Props {
onChange: (value: string) => void;
options: Array<SelectOptionItem<string>>;
options: Array<SelectableValue<string>>;
isSearchable: boolean;
value: string;
placeholder?: string;
@@ -15,7 +16,7 @@ export interface Props {
}
interface State {
options: Array<SelectOptionItem<string>>;
options: Array<SelectableValue<string>>;
}
export class MetricSelect extends React.Component<Props, State> {

View File

@@ -324,10 +324,6 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
}
}
const sortedRows = rows.sort((a, b) => {
return a.timestamp > b.timestamp ? -1 : 1;
});
// Meta data to display in status
const meta: LogsMetaItem[] = [];
if (_.size(commonLabels) > 0) {
@@ -343,7 +339,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
if (limits.length > 0) {
meta.push({
label: 'Limit',
value: `${limits[0].meta.limit} (${sortedRows.length} returned)`,
value: `${limits[0].meta.limit} (${rows.length} returned)`,
kind: LogsMetaKind.String,
});
}
@@ -351,7 +347,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
return {
hasUniqueLabels,
meta,
rows: sortedRows,
rows,
};
}

View File

@@ -160,11 +160,11 @@ export class KeybindingSrv {
});
this.bind('t left', () => {
scope.appEvent('shift-time-backward');
scope.appEvent('shift-time', -1);
});
this.bind('t right', () => {
scope.appEvent('shift-time-forward');
scope.appEvent('shift-time', 1);
});
// edit panel

Some files were not shown because too many files have changed in this diff Show More