mirror of
https://github.com/grafana/grafana.git
synced 2026-01-10 05:57:40 +08:00
Compare commits
61 Commits
packages@6
...
omgtest
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
962089bc2b | ||
|
|
ae61bb923f | ||
|
|
d69b54d41a | ||
|
|
ceaa30d88e | ||
|
|
5e4971ac01 | ||
|
|
266d8cab89 | ||
|
|
1bf2ad37f3 | ||
|
|
ca628832ab | ||
|
|
1cbec50866 | ||
|
|
743f8420bc | ||
|
|
e1cec1069c | ||
|
|
e9dd84f9d3 | ||
|
|
660b9a3126 | ||
|
|
b924884240 | ||
|
|
605de54852 | ||
|
|
9b674b3944 | ||
|
|
d249335a6c | ||
|
|
6aa58182c7 | ||
|
|
bcabffc25b | ||
|
|
e58e7cb4c5 | ||
|
|
74c118f1d1 | ||
|
|
ac9774e7bb | ||
|
|
31d619c7de | ||
|
|
ecac5d6931 | ||
|
|
6e2c5eb52a | ||
|
|
76d08989f0 | ||
|
|
a28c96090c | ||
|
|
1292d203a8 | ||
|
|
2ed7ceb59d | ||
|
|
98908f7b98 | ||
|
|
c8da0ac1c8 | ||
|
|
add6a0d00a | ||
|
|
c74c7e24e2 | ||
|
|
7ec87ee76b | ||
|
|
5190949950 | ||
|
|
6f4625bb78 | ||
|
|
3680b95b44 | ||
|
|
04e7970375 | ||
|
|
f2ad3242be | ||
|
|
aa89210c9d | ||
|
|
a5834d3250 | ||
|
|
f5efef1370 | ||
|
|
3bbc40a32f | ||
|
|
bf7fb67f73 | ||
|
|
ffa9429c68 | ||
|
|
6649c5d75b | ||
|
|
d6e8129588 | ||
|
|
6a3a2f5f94 | ||
|
|
5d3a60d46e | ||
|
|
5f0a7f43c3 | ||
|
|
ebff883016 | ||
|
|
81ff856568 | ||
|
|
648aa62264 | ||
|
|
c32365f424 | ||
|
|
6599bdc7f1 | ||
|
|
b44be990bc | ||
|
|
9e87dbb153 | ||
|
|
25506829be | ||
|
|
d918d1f5f4 | ||
|
|
332920954e | ||
|
|
78ca55f3d7 |
@@ -7,12 +7,17 @@ aliases:
|
||||
only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
- &filter-not-release-or-master
|
||||
tags:
|
||||
ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
ignore: /^v[0--9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
branches:
|
||||
ignore: master
|
||||
- &filter-only-master
|
||||
branches:
|
||||
only: master
|
||||
- &filter-only-master-but-not-release
|
||||
tags:
|
||||
ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
branches:
|
||||
only: master
|
||||
|
||||
version: 2
|
||||
|
||||
@@ -623,6 +628,21 @@ jobs:
|
||||
echo "-- no changes to docs files --"
|
||||
fi
|
||||
|
||||
release-next-packages:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Boostrap lerna
|
||||
command: 'npx lerna bootstrap'
|
||||
- run:
|
||||
name: npm - Prepare auth token
|
||||
command: 'echo //registry.npmjs.org/:_authToken=$NPM_TOKEN >> ~/.npmrc'
|
||||
- run:
|
||||
name: Release next packages
|
||||
command: './scripts/circle-release-next-packages.sh'
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build-master:
|
||||
@@ -694,6 +714,11 @@ workflows:
|
||||
requires:
|
||||
- end-to-end-test
|
||||
filters: *filter-only-master
|
||||
- release-next-packages:
|
||||
requires:
|
||||
- test-frontend
|
||||
- build-fast-frontend
|
||||
filters: *filter-only-master-but-not-release
|
||||
release:
|
||||
jobs:
|
||||
- build-all:
|
||||
@@ -803,3 +828,4 @@ workflows:
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
filters: *filter-not-release-or-master
|
||||
|
||||
|
||||
71
CHANGELOG.md
71
CHANGELOG.md
@@ -1,4 +1,73 @@
|
||||
# 6.3.0 (unreleased)
|
||||
# 6.4.0 (unreleased)
|
||||
|
||||
# 6.3.0-beta1
|
||||
### Features / Enhancements
|
||||
* **Alerting**: Add tags to alert rules. [#10989](https://github.com/grafana/grafana/pull/10989), [@Thib17](https://github.com/Thib17)
|
||||
* **Alerting**: Attempt to send email notifications to all given email addresses. [#16881](https://github.com/grafana/grafana/pull/16881), [@zhulongcheng](https://github.com/zhulongcheng)
|
||||
* **Alerting**: Improve alert rule testing. [#16286](https://github.com/grafana/grafana/pull/16286), [@marefr](https://github.com/marefr)
|
||||
* **Alerting**: Support for configuring content field for Discord alert notifier. [#17017](https://github.com/grafana/grafana/pull/17017), [@jan25](https://github.com/jan25)
|
||||
* **Alertmanager**: Replace illegal chars with underscore in label names. [#17002](https://github.com/grafana/grafana/pull/17002), [@bergquist](https://github.com/bergquist)
|
||||
* **Auth**: Allow expiration of API keys. [#17678](https://github.com/grafana/grafana/pull/17678), [@papagian](https://github.com/papagian)
|
||||
* **Auth**: Return device, os and browser when listing user auth tokens in HTTP API. [#17504](https://github.com/grafana/grafana/pull/17504), [@shavonn](https://github.com/shavonn)
|
||||
* **Auth**: Support list and revoke of user auth tokens in UI. [#17434](https://github.com/grafana/grafana/pull/17434), [@shavonn](https://github.com/shavonn)
|
||||
* **AzureMonitor**: change clashing built-in Grafana variables/macro names for Azure Logs. [#17140](https://github.com/grafana/grafana/pull/17140), [@shavonn](https://github.com/shavonn)
|
||||
* **CloudWatch**: Made region visible for AWS Cloudwatch Expressions. [#17243](https://github.com/grafana/grafana/pull/17243), [@utkarshcmu](https://github.com/utkarshcmu)
|
||||
* **Cloudwatch**: Add AWS DocDB metrics. [#17241](https://github.com/grafana/grafana/pull/17241), [@utkarshcmu](https://github.com/utkarshcmu)
|
||||
* **Dashboard**: Use timezone dashboard setting when exporting to CSV. [#18002](https://github.com/grafana/grafana/pull/18002), [@dehrax](https://github.com/dehrax)
|
||||
* **Data links**. [#17267](https://github.com/grafana/grafana/pull/17267), [@torkelo](https://github.com/torkelo)
|
||||
* **Docker**: Switch base image to ubuntu:latest from debian:stretch to avoid security issues.. [#17066](https://github.com/grafana/grafana/pull/17066), [@bergquist](https://github.com/bergquist)
|
||||
* **Elasticsearch**: Support for visualizing logs in Explore . [#17605](https://github.com/grafana/grafana/pull/17605), [@marefr](https://github.com/marefr)
|
||||
* **Explore**: Adds Live option for supported datasources. [#17062](https://github.com/grafana/grafana/pull/17062), [@hugohaggmark](https://github.com/hugohaggmark)
|
||||
* **Explore**: Adds orgId to URL for sharing purposes. [#17895](https://github.com/grafana/grafana/pull/17895), [@kaydelaney](https://github.com/kaydelaney)
|
||||
* **Explore**: Adds support for new loki 'start' and 'end' params for labels endpoint. [#17512](https://github.com/grafana/grafana/pull/17512), [@kaydelaney](https://github.com/kaydelaney)
|
||||
* **Explore**: Adds support for toggling raw query mode in explore. [#17870](https://github.com/grafana/grafana/pull/17870), [@kaydelaney](https://github.com/kaydelaney)
|
||||
* **Explore**: Allow switching between metrics and logs . [#16959](https://github.com/grafana/grafana/pull/16959), [@marefr](https://github.com/marefr)
|
||||
* **Explore**: Combines the timestamp and local time columns into one. [#17775](https://github.com/grafana/grafana/pull/17775), [@hugohaggmark](https://github.com/hugohaggmark)
|
||||
* **Explore**: Display log lines context . [#17097](https://github.com/grafana/grafana/pull/17097), [@dprokop](https://github.com/dprokop)
|
||||
* **Explore**: Don't parse log levels if provided by field or label. [#17180](https://github.com/grafana/grafana/pull/17180), [@marefr](https://github.com/marefr)
|
||||
* **Explore**: Improves performance of Logs element by limiting re-rendering. [#17685](https://github.com/grafana/grafana/pull/17685), [@kaydelaney](https://github.com/kaydelaney)
|
||||
* **Explore**: Support for new LogQL filtering syntax. [#16674](https://github.com/grafana/grafana/pull/16674), [@davkal](https://github.com/davkal)
|
||||
* **Explore**: Use new TimePicker from Grafana/UI. [#17793](https://github.com/grafana/grafana/pull/17793), [@hugohaggmark](https://github.com/hugohaggmark)
|
||||
* **Explore**: handle newlines in LogRow Highlighter. [#17425](https://github.com/grafana/grafana/pull/17425), [@rrfeng](https://github.com/rrfeng)
|
||||
* **Graph**: Added new fill gradient option. [#17528](https://github.com/grafana/grafana/pull/17528), [@torkelo](https://github.com/torkelo)
|
||||
* **GraphPanel**: Don't sort series when legend table & sort column is not visible . [#17095](https://github.com/grafana/grafana/pull/17095), [@shavonn](https://github.com/shavonn)
|
||||
* **InfluxDB**: Support for visualizing logs in Explore. [#17450](https://github.com/grafana/grafana/pull/17450), [@hugohaggmark](https://github.com/hugohaggmark)
|
||||
* **Logging**: Login and Logout actions (#17760). [#17883](https://github.com/grafana/grafana/pull/17883), [@ATTron](https://github.com/ATTron)
|
||||
* **Logging**: Move log package to pkg/infra. [#17023](https://github.com/grafana/grafana/pull/17023), [@zhulongcheng](https://github.com/zhulongcheng)
|
||||
* **Metrics**: Expose stats about roles as metrics. [#17469](https://github.com/grafana/grafana/pull/17469), [@bergquist](https://github.com/bergquist)
|
||||
* **MySQL/Postgres/MSSQL**: Add parsing for day, weeks and year intervals in macros. [#13086](https://github.com/grafana/grafana/pull/13086), [@bernardd](https://github.com/bernardd)
|
||||
* **MySQL**: Add support for periodically reloading client certs. [#14892](https://github.com/grafana/grafana/pull/14892), [@tpetr](https://github.com/tpetr)
|
||||
* **Plugins**: replace dataFormats list with skipDataQuery flag in plugin.json. [#16984](https://github.com/grafana/grafana/pull/16984), [@ryantxu](https://github.com/ryantxu)
|
||||
* **Prometheus**: Take timezone into account for step alignment. [#17477](https://github.com/grafana/grafana/pull/17477), [@fxmiii](https://github.com/fxmiii)
|
||||
* **Prometheus**: Use overridden panel range for $__range instead of dashboard range. [#17352](https://github.com/grafana/grafana/pull/17352), [@patrick246](https://github.com/patrick246)
|
||||
* **Prometheus**: added time range filter to series labels query. [#16851](https://github.com/grafana/grafana/pull/16851), [@FUSAKLA](https://github.com/FUSAKLA)
|
||||
* **Provisioning**: Support folder that doesn't exist yet in dashboard provisioning. [#17407](https://github.com/grafana/grafana/pull/17407), [@Nexucis](https://github.com/Nexucis)
|
||||
* **Refresh picker**: Handle empty intervals. [#17585](https://github.com/grafana/grafana/pull/17585), [@dehrax](https://github.com/dehrax)
|
||||
* **Singlestat**: Add y min/max config to singlestat sparklines. [#17527](https://github.com/grafana/grafana/pull/17527), [@pitr](https://github.com/pitr)
|
||||
* **Snapshot**: use given key and deleteKey. [#16876](https://github.com/grafana/grafana/pull/16876), [@zhulongcheng](https://github.com/zhulongcheng)
|
||||
* **Templating**: Correctly display __text in multi-value variable after page reload. [#17840](https://github.com/grafana/grafana/pull/17840), [@EduardSergeev](https://github.com/EduardSergeev)
|
||||
* **Templating**: Support selecting all filtered values of a multi-value variable. [#16873](https://github.com/grafana/grafana/pull/16873), [@r66ad](https://github.com/r66ad)
|
||||
* **Tracing**: allow propagation with Zipkin headers. [#17009](https://github.com/grafana/grafana/pull/17009), [@jrockway](https://github.com/jrockway)
|
||||
* **Users**: Disable users removed from LDAP. [#16820](https://github.com/grafana/grafana/pull/16820), [@alexanderzobnin](https://github.com/alexanderzobnin)
|
||||
|
||||
### Bug Fixes
|
||||
* **AddPanel**: Fix issue when removing moved add panel widget . [#17659](https://github.com/grafana/grafana/pull/17659), [@dehrax](https://github.com/dehrax)
|
||||
* **CLI**: Fix encrypt-datasource-passwords fails with sql error. [#18014](https://github.com/grafana/grafana/pull/18014), [@marefr](https://github.com/marefr)
|
||||
* **Elasticsearch**: Fix default max concurrent shard requests. [#17770](https://github.com/grafana/grafana/pull/17770), [@marefr](https://github.com/marefr)
|
||||
* **Explore**: Fix browsing back to dashboard panel. [#17061](https://github.com/grafana/grafana/pull/17061), [@jschill](https://github.com/jschill)
|
||||
* **Explore**: Fix filter by series level in logs graph. [#17798](https://github.com/grafana/grafana/pull/17798), [@marefr](https://github.com/marefr)
|
||||
* **Explore**: Fix issues when loading and both graph/table are collapsed. [#17113](https://github.com/grafana/grafana/pull/17113), [@marefr](https://github.com/marefr)
|
||||
* **Explore**: Fix selection/copy of log lines. [#17121](https://github.com/grafana/grafana/pull/17121), [@marefr](https://github.com/marefr)
|
||||
* **Fix**: Wrap value of multi variable in array when coming from URL. [#16992](https://github.com/grafana/grafana/pull/16992), [@aocenas](https://github.com/aocenas)
|
||||
* **Frontend**: Fix for Json tree component not working. [#17608](https://github.com/grafana/grafana/pull/17608), [@srid12](https://github.com/srid12)
|
||||
* **Graphite**: Fix for issue with alias function being moved last. [#17791](https://github.com/grafana/grafana/pull/17791), [@torkelo](https://github.com/torkelo)
|
||||
* **Graphite**: Fixes issue with seriesByTag & function with variable param. [#17795](https://github.com/grafana/grafana/pull/17795), [@torkelo](https://github.com/torkelo)
|
||||
* **Graphite**: use POST for /metrics/find requests. [#17814](https://github.com/grafana/grafana/pull/17814), [@papagian](https://github.com/papagian)
|
||||
* **HTTP Server**: Serve Grafana with a custom URL path prefix. [#17048](https://github.com/grafana/grafana/pull/17048), [@jan25](https://github.com/jan25)
|
||||
* **InfluxDB**: Fixes single quotes are not escaped in label value filters. [#17398](https://github.com/grafana/grafana/pull/17398), [@Panzki](https://github.com/Panzki)
|
||||
* **Prometheus**: Correctly escape '|' literals in interpolated PromQL variables. [#16932](https://github.com/grafana/grafana/pull/16932), [@Limess](https://github.com/Limess)
|
||||
* **Prometheus**: Fix when adding label for metrics which contains colons in Explore. [#16760](https://github.com/grafana/grafana/pull/16760), [@tolwi](https://github.com/tolwi)
|
||||
* **SinglestatPanel**: Remove background color when value turns null. [#17552](https://github.com/grafana/grafana/pull/17552), [@druggieri](https://github.com/druggieri)
|
||||
|
||||
# 6.2.5 (2019-06-25)
|
||||
|
||||
|
||||
14
Makefile
14
Makefile
@@ -8,7 +8,7 @@ GO_FILES := ./pkg/...
|
||||
all: deps build
|
||||
|
||||
deps-go:
|
||||
go run build.go setup
|
||||
$(GO) run build.go setup
|
||||
|
||||
deps-js: node_modules
|
||||
|
||||
@@ -16,15 +16,15 @@ deps: deps-js
|
||||
|
||||
build-go:
|
||||
@echo "build go files"
|
||||
GO111MODULE=on go run build.go build
|
||||
$(GO) run build.go build
|
||||
|
||||
build-server:
|
||||
@echo "build server"
|
||||
GO111MODULE=on go run build.go build-server
|
||||
$(GO) run build.go build-server
|
||||
|
||||
build-cli:
|
||||
@echo "build in CI environment"
|
||||
GO111MODULE=on go run build.go build-cli
|
||||
$(GO) run build.go build-cli
|
||||
|
||||
build-js:
|
||||
@echo "build frontend"
|
||||
@@ -35,7 +35,7 @@ build: build-go build-js
|
||||
build-docker-dev:
|
||||
@echo "build development container"
|
||||
@echo "\033[92mInfo:\033[0m the frontend code is expected to be built already."
|
||||
GO111MODULE=on go run build.go -goos linux -pkg-arch amd64 ${OPT} build pkg-archive latest
|
||||
$(GO) run build.go -goos linux -pkg-arch amd64 ${OPT} build pkg-archive latest
|
||||
cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
|
||||
cd packaging/docker && docker build --tag grafana/grafana:dev .
|
||||
|
||||
@@ -45,7 +45,7 @@ build-docker-full:
|
||||
|
||||
test-go:
|
||||
@echo "test backend"
|
||||
GO111MODULE=on go test -v ./pkg/...
|
||||
$(GO) test -v ./pkg/...
|
||||
|
||||
test-js:
|
||||
@echo "test frontend"
|
||||
@@ -107,7 +107,7 @@ golangci-lint: scripts/go/bin/golangci-lint
|
||||
|
||||
go-vet:
|
||||
@echo "lint via go vet"
|
||||
@go vet $(GO_FILES)
|
||||
@$(GO) vet $(GO_FILES)
|
||||
|
||||
lint-go: go-vet golangci-lint revive revive-alerting gosec
|
||||
|
||||
|
||||
30
README.md
30
README.md
@@ -147,12 +147,34 @@ Writing & watching frontend tests
|
||||
```bash
|
||||
# Run Golang tests using sqlite3 as database (default)
|
||||
go test ./pkg/...
|
||||
```
|
||||
|
||||
# Run Golang tests using mysql as database - convenient to use /docker/blocks/mysql_tests
|
||||
GRAFANA_TEST_DB=mysql go test ./pkg/...
|
||||
##### Running the MySQL or Postgres backend tests:
|
||||
|
||||
# Run Golang tests using postgres as database - convenient to use /docker/blocks/postgres_tests
|
||||
GRAFANA_TEST_DB=postgres go test ./pkg/...
|
||||
Run these by setting `GRAFANA_TEST_DB` in your environment.
|
||||
|
||||
- `GRAFANA_TEST_DB=mysql` to test MySQL
|
||||
- `GRAFANA_TEST_DB=postgres` to test Postgres
|
||||
|
||||
Follow the instructions in `./devenv` to spin up test containers running the appropriate databases with `docker-compose`
|
||||
- Use `docker/blocks/mysql_tests` or `docker/blocks/postgres_tests` as appropriate
|
||||
|
||||
```bash
|
||||
# MySQL
|
||||
# Tests can only be ran in one Go package at a time due to clashing db queries. To run MySQL tests for the "pkg/services/sqlstore" package, run:
|
||||
GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/...
|
||||
|
||||
# Or run all the packages using the circle CI scripts. This method will be slower as the scripts will run all the tests, including the integration tests.
|
||||
./scripts/circle-test-mysql.sh
|
||||
```
|
||||
|
||||
```bash
|
||||
# Postgres
|
||||
# Tests can only be ran in one Go package at a time due to clashing db queries. To run Postgres tests for the "pkg/services/sqlstore" package, run:
|
||||
GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/...
|
||||
|
||||
# Or run all the packages using the circle CI scripts. This method will be slower as the scripts will run all the tests, including the integration tests.
|
||||
./scripts/circle-test-postgres.sh
|
||||
```
|
||||
|
||||
#### End-to-end
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# root_url = %(protocol)s://%(domain)s:10080/grafana/
|
||||
|
||||
nginxproxy:
|
||||
build: docker/blocks/nginx_proxy
|
||||
build: docker/blocks/nginx_proxy_mac
|
||||
ports:
|
||||
- "10080:10080"
|
||||
|
||||
|
||||
@@ -60,9 +60,9 @@ aliases = ["/v1.1", "/guides/reference/admin", "/v3.1"]
|
||||
<h4>Provisioning</h4>
|
||||
<p>A guide to help you automate your Grafana setup & configuration.</p>
|
||||
</a>
|
||||
<a href="{{< relref "guides/whats-new-in-v6-2.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>What's new in v6.2</h4>
|
||||
<p>Article on all the new cool features and enhancements in v6.2</p>
|
||||
<a href="{{< relref "guides/whats-new-in-v6-3.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>What's new in v6.3</h4>
|
||||
<p>Article on all the new cool features and enhancements in v6.3</p>
|
||||
</a>
|
||||
<a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>Screencasts</h4>
|
||||
|
||||
@@ -45,6 +45,8 @@ datasources:
|
||||
password: $PASSWORD
|
||||
```
|
||||
|
||||
If you have a literal `$` in your value and want to avoid interpolation, `$$` can be used.
|
||||
|
||||
<hr />
|
||||
|
||||
## Configuration Management Tools
|
||||
|
||||
@@ -27,7 +27,7 @@ header_name = X-WEBAUTH-USER
|
||||
header_property = username
|
||||
# Set to `true` to enable auto sign up of users who do not exist in Grafana DB. Defaults to `true`.
|
||||
auto_sign_up = true
|
||||
# If combined with Grafana LDAP integration define sync interval
|
||||
# If combined with Grafana LDAP integration define sync interval in minutes
|
||||
ldap_sync_ttl = 60
|
||||
# Limit where auth proxy requests come from by configuring a list of IP addresses.
|
||||
# This can be used to prevent users spoofing the X-WEBAUTH-USER header.
|
||||
|
||||
@@ -99,3 +99,18 @@ allow_sign_up = true
|
||||
allowed_organizations = github google
|
||||
```
|
||||
|
||||
### Team Sync (Enterprise only)
|
||||
|
||||
> Only available in Grafana Enterprise v6.3+
|
||||
|
||||
With Team Sync you can map your GitHub org teams to teams in Grafana so that your users will automatically be added to
|
||||
the correct teams.
|
||||
|
||||
Your GitHub teams can be referenced in two ways:
|
||||
|
||||
- `https://github.com/orgs/<org>/teams/<team name>`
|
||||
- `@<org>/<team name>`
|
||||
|
||||
Example: `@grafana/developers`
|
||||
|
||||
[Learn more about Team Sync]({{< relref "auth/enhanced_ldap.md" >}})
|
||||
|
||||
144
docs/sources/guides/whats-new-in-v6-3.md
Normal file
144
docs/sources/guides/whats-new-in-v6-3.md
Normal file
@@ -0,0 +1,144 @@
|
||||
+++
|
||||
title = "What's New in Grafana v6.3"
|
||||
description = "Feature & improvement highlights for Grafana v6.3"
|
||||
keywords = ["grafana", "new", "documentation", "6.3"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Version 6.3"
|
||||
identifier = "v6.3"
|
||||
parent = "whatsnew"
|
||||
weight = -14
|
||||
+++
|
||||
|
||||
# What's New in Grafana v6.3
|
||||
|
||||
For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md)
|
||||
|
||||
## Highlights
|
||||
|
||||
- New Explore features
|
||||
- [Loki Live Streaming]({{< relref "#loki-live-streaming" >}})
|
||||
- [Loki Context Queries]({{< relref "#loki-context-queries" >}})
|
||||
- [Elasticsearch Logs Support]({{< relref "#elasticsearch-logs-support" >}})
|
||||
- [InfluxDB Logs Support]({{< relref "#influxdb-logs-support" >}})
|
||||
- [Data links]({{< relref "#data-links" >}})
|
||||
- [New Time Picker]({{< relref "#new-time-picker" >}})
|
||||
- [Graph Area Gradients]({{< relref "#graph-gradients" >}}) - A new graph display option!
|
||||
- Grafana Enterprise
|
||||
- [LDAP Active Sync]({{< relref "#ldap-active-sync" >}}) - LDAP Active Sync
|
||||
- [SAML Authentication]({{< relref "#saml-authentication" >}}) - SAML Authentication
|
||||
|
||||
## Explore improvements
|
||||
|
||||
This release adds a ton of enhancements to Explore. Both in terms of new general enhancements but also in
|
||||
new data source specific features.
|
||||
|
||||
### Loki live streaming
|
||||
|
||||
For log queries using the Loki data source you can now stream logs live directly to the Explore UI.
|
||||
|
||||
### Loki context queries
|
||||
|
||||
After finding a log line through the heavy use of query filters it can then be useful to
|
||||
see the log lines surrounding the line your searched for. The `show context` feature
|
||||
allows you to view lines before and after the line of interest.
|
||||
|
||||
### Elasticsearch logs support
|
||||
|
||||
This release adds support for searching & visualizing logs stored in Elasticsearch in the Explore mode. With a special
|
||||
simplified query interface specifically designed for logs search.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/elasticsearch_explore_logs.png" max-width="600px" caption="New Time Picker" >}}
|
||||
|
||||
Please read [Using Elasticsearch in Grafana](/features/datasources/elasticsearch/#querying-logs-beta) for more detailed information on how to get started and use it.
|
||||
|
||||
### InfluxDB logs support
|
||||
|
||||
This release adds support for searching & visualizing logs stored in InfluxDB in the Explore mode. With a special
|
||||
simplified query interface specifically designed for logs search.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/influxdb_explore_logs.png" max-width="600px" caption="New Time Picker" >}}
|
||||
|
||||
Please read [Using InfluxDB in Grafana](/features/datasources/influxdb/#querying-logs-beta) for more detailed information on how to get started and use it.
|
||||
|
||||
## Data Links
|
||||
|
||||
We have simplified the UI for defining panel drilldown links (and renamed them to Panel links). We have also added a
|
||||
new type of link named `Data link`. The reason to have two different types is to make it clear how they are used
|
||||
and what variables you can use in the link. Panel links are only shown in the top left corner of
|
||||
the panel and you cannot reference series name or any data field.
|
||||
|
||||
While `Data links` are used by the actual visualization and can reference data fields.
|
||||
|
||||
Example:
|
||||
```url
|
||||
http://my-grafana.com/d/bPCI6VSZz/other-dashboard?var-server=${__series_name}
|
||||
```
|
||||
|
||||
You have access to these variables:
|
||||
|
||||
Name | Description
|
||||
------------ | -------------
|
||||
*${__series_name}* | The name of the time series (or table)
|
||||
*${__value_time}* | The time of the point your clicking on (in millisecond epoch)
|
||||
*${__url_time_range}* | Interpolates as the full time range (i.e. from=21312323412&to=21312312312)
|
||||
*${__all_variables}* | Adds all current variables (and current values) to the url
|
||||
|
||||
You can then click on point in the Graph.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_datalink.png" max-width="400px" caption="New Time Picker" >}}
|
||||
|
||||
For now only the Graph panel supports `Data links` but we hope to add these to many visualizations.
|
||||
|
||||
## New Time Picker
|
||||
|
||||
The time picker has been re-designed and with a more basic design that makes accessing quick ranges more easy.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/time_picker.png" max-width="400px" caption="New Time Picker" >}}
|
||||
|
||||
## Graph Gradients
|
||||
|
||||
Want more eye candy in your graphs? Then the fill gradient option might be for you! Works really well for
|
||||
graphs with only a single series.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_gradient_area.jpeg" max-width="800px" caption="Graph Gradient Area" >}}
|
||||
|
||||
Looks really nice in light theme as well.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_gradients_white.png" max-width="800px" caption="Graph Gradient Area" >}}
|
||||
|
||||
## Grafana Enterprise
|
||||
|
||||
Substantial refactoring and improvements to the external auth systems has gone in to this release making the features
|
||||
listed below possible as well as laying a foundation for future enhancements.
|
||||
|
||||
### LDAP Active Sync
|
||||
|
||||
This is a new Enterprise feature that enables background syncing of user information, org role and teams memberships.
|
||||
This syncing is otherwise only done at login time. With this feature you can schedule how often this user synchronization should
|
||||
occur.
|
||||
|
||||
For example, lets say a user is removed from an LDAP group. In previous versions of Grafana an admin would have to
|
||||
wait for the user to logout or the session to expire for the Grafana permissions to update, a process that can take days.
|
||||
|
||||
With active sync the user would be automatically removed from the corresponding team in Grafana or even logged out and disabled if no longer
|
||||
belonging to an LDAP group that gives them access to Grafana.
|
||||
|
||||
[Read more](/auth/enhanced_ldap/#active-ldap-synchronization)
|
||||
|
||||
### SAML Authentication
|
||||
|
||||
Built-in support for SAML is now available in Grafana Enterprise.
|
||||
|
||||
### Team Sync for GitHub OAuth
|
||||
|
||||
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.
|
||||
|
||||
[See docs]({{< relref "auth/github.md" >}})
|
||||
|
||||
### Team Sync for Auth Proxy
|
||||
|
||||
We've added support for enriching the Auth Proxy headers with Teams information, which makes it possible
|
||||
to use Team Sync with Auth Proxy.
|
||||
|
||||
[See docs](/auth/auth-proxy/#auth-proxy-authentication)
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"stable": "6.2.5",
|
||||
"testing": "6.2.5"
|
||||
"testing": "6.3.0-beta1"
|
||||
}
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "6.3.0-alpha.33"
|
||||
"version": "6.4.0-alpha.12"
|
||||
}
|
||||
|
||||
11
package.json
11
package.json
@@ -5,7 +5,7 @@
|
||||
"company": "Grafana Labs"
|
||||
},
|
||||
"name": "grafana",
|
||||
"version": "6.3.0-pre",
|
||||
"version": "6.4.0-pre",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/grafana/grafana.git"
|
||||
@@ -89,7 +89,7 @@
|
||||
"ng-annotate-loader": "0.6.1",
|
||||
"ng-annotate-webpack-plugin": "0.3.0",
|
||||
"ngtemplate-loader": "2.0.1",
|
||||
"node-sass": "4.11.0",
|
||||
"node-sass": "4.12.0",
|
||||
"npm": "6.9.0",
|
||||
"optimize-css-assets-webpack-plugin": "5.0.1",
|
||||
"phantomjs-prebuilt": "2.1.16",
|
||||
@@ -146,9 +146,10 @@
|
||||
"prettier:write": "prettier --list-different \"**/*.{ts,tsx,scss}\" --write",
|
||||
"precommit": "grafana-toolkit precommit",
|
||||
"themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts",
|
||||
"packages:prepare": "npm run test && lerna version --tag-version-prefix=\"packages@\" -m \"Packages: publish %s\" --no-push",
|
||||
"packages:prepare": "lerna run clean && npm run test && lerna version --tag-version-prefix=\"packages@\" -m \"Packages: publish %s\" --no-push",
|
||||
"packages:build": "lerna run clean && lerna run build",
|
||||
"packages:publish": "lerna publish from-package --contents dist --tag-version-prefix=\"packages@\" --dist-tag next"
|
||||
"packages:publish": "lerna publish from-package --contents dist",
|
||||
"packages:publishNext": "lerna publish from-package --contents dist --dist-tag next --yes"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
@@ -201,7 +202,7 @@
|
||||
"file-saver": "1.3.8",
|
||||
"immutable": "3.8.2",
|
||||
"jquery": "3.4.1",
|
||||
"lodash": "4.17.11",
|
||||
"lodash": "4.17.14",
|
||||
"marked": "0.6.2",
|
||||
"moment": "2.24.0",
|
||||
"mousetrap": "1.6.3",
|
||||
|
||||
15
packages/README.md
Normal file
15
packages/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
## Grafana frontend packages
|
||||
|
||||
## Releasing new version
|
||||
We use [Lerna](https://github.com/lerna/lerna) for packages versioning and releases
|
||||
|
||||
### Manual release
|
||||
1. Run `packages:prepare` script from root directory. This will perform cleanup, run all tests and bump version for all packages. Also, it will create `@packages@[version]` tag and version bump commit with `Packages: publish [version]` message.
|
||||
2. Run `packages:build` script that will prepare distribution packages.
|
||||
3. Run `packages:publish` to publish new versions
|
||||
- add `--dist-tag next` to publish under `next` tag
|
||||
4. Push version commit
|
||||
|
||||
### Building individual packages
|
||||
To build induvidual packages run `grafana-toolkit package:build --scope=<ui|toolkit|runtime|data>`
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Grafana Data Library
|
||||
|
||||
The core data components
|
||||
This package holds the root data types and functions used within Grafana.
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@grafana/data",
|
||||
"version": "6.3.0-alpha.33",
|
||||
"version": "6.4.0-alpha.12",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
@@ -36,8 +36,5 @@
|
||||
"rollup-plugin-visualizer": "0.9.2",
|
||||
"sinon": "1.17.6",
|
||||
"typescript": "3.4.1"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/lodash": "4.14.119"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,6 +97,9 @@ export interface AnnotationEvent {
|
||||
dashboardId?: number;
|
||||
panelId?: number;
|
||||
userId?: number;
|
||||
login?: string;
|
||||
email?: string;
|
||||
avatarUrl?: string;
|
||||
time?: number;
|
||||
timeEnd?: number;
|
||||
isRegion?: boolean;
|
||||
|
||||
@@ -91,4 +91,58 @@ describe('Stats Calculators', () => {
|
||||
expect(stats.step).toEqual(100);
|
||||
expect(stats.delta).toEqual(300);
|
||||
});
|
||||
|
||||
it('consistent results for first/last value with null', () => {
|
||||
const info = [
|
||||
{
|
||||
rows: [[null], [200], [null]], // first/last value is null
|
||||
result: 200,
|
||||
},
|
||||
{
|
||||
rows: [[null], [null], [null]], // All null
|
||||
result: undefined,
|
||||
},
|
||||
{
|
||||
rows: [], // Empty row
|
||||
result: undefined,
|
||||
},
|
||||
];
|
||||
const fields = [{ name: 'A' }];
|
||||
|
||||
const stats = reduceField({
|
||||
series: { rows: info[0].rows, fields },
|
||||
fieldIndex: 0,
|
||||
reducers: [ReducerID.first, ReducerID.last, ReducerID.firstNotNull, ReducerID.lastNotNull], // uses standard path
|
||||
});
|
||||
expect(stats[ReducerID.first]).toEqual(null);
|
||||
expect(stats[ReducerID.last]).toEqual(null);
|
||||
expect(stats[ReducerID.firstNotNull]).toEqual(200);
|
||||
expect(stats[ReducerID.lastNotNull]).toEqual(200);
|
||||
|
||||
const reducers = [ReducerID.lastNotNull, ReducerID.firstNotNull];
|
||||
for (const input of info) {
|
||||
for (const reducer of reducers) {
|
||||
const v1 = reduceField({
|
||||
series: { rows: input.rows, fields },
|
||||
fieldIndex: 0,
|
||||
reducers: [reducer, ReducerID.mean], // uses standard path
|
||||
})[reducer];
|
||||
|
||||
const v2 = reduceField({
|
||||
series: { rows: input.rows, fields },
|
||||
fieldIndex: 0,
|
||||
reducers: [reducer], // uses optimized path
|
||||
})[reducer];
|
||||
|
||||
if (v1 !== v2 || v1 !== input.result) {
|
||||
const msg =
|
||||
`Invalid ${reducer} result for: ` +
|
||||
input.rows.join(', ') +
|
||||
` Expected: ${input.result}` + // configured
|
||||
` Recieved: Multiple: ${v1}, Single: ${v2}`;
|
||||
expect(msg).toEqual(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -17,6 +17,9 @@ export enum ReducerID {
|
||||
delta = 'delta',
|
||||
step = 'step',
|
||||
|
||||
firstNotNull = 'firstNotNull',
|
||||
lastNotNull = 'lastNotNull',
|
||||
|
||||
changeCount = 'changeCount',
|
||||
distinctCount = 'distinctCount',
|
||||
|
||||
@@ -132,14 +135,28 @@ function getById(id: string): FieldReducerInfo | undefined {
|
||||
if (!hasBuiltIndex) {
|
||||
[
|
||||
{
|
||||
id: ReducerID.last,
|
||||
name: 'Last',
|
||||
description: 'Last Value (current)',
|
||||
id: ReducerID.lastNotNull,
|
||||
name: 'Last (not null)',
|
||||
description: 'Last non-null value',
|
||||
standard: true,
|
||||
alias: 'current',
|
||||
reduce: calculateLastNotNull,
|
||||
},
|
||||
{
|
||||
id: ReducerID.last,
|
||||
name: 'Last',
|
||||
description: 'Last Value',
|
||||
standard: true,
|
||||
reduce: calculateLast,
|
||||
},
|
||||
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
||||
{
|
||||
id: ReducerID.firstNotNull,
|
||||
name: 'First (not null)',
|
||||
description: 'First non-null value',
|
||||
standard: true,
|
||||
reduce: calculateFirstNotNull,
|
||||
},
|
||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
|
||||
@@ -231,6 +248,8 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
mean: null,
|
||||
last: null,
|
||||
first: null,
|
||||
lastNotNull: undefined,
|
||||
firstNotNull: undefined,
|
||||
count: 0,
|
||||
nonNullCount: 0,
|
||||
allIsNull: true,
|
||||
@@ -246,6 +265,10 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
|
||||
for (let i = 0; i < data.rows.length; i++) {
|
||||
let currentValue = data.rows[i][fieldIndex];
|
||||
if (i === 0) {
|
||||
calcs.first = currentValue;
|
||||
}
|
||||
calcs.last = currentValue;
|
||||
|
||||
if (currentValue === null) {
|
||||
if (ignoreNulls) {
|
||||
@@ -257,9 +280,9 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
}
|
||||
|
||||
if (currentValue !== null) {
|
||||
const isFirst = calcs.first === null;
|
||||
const isFirst = calcs.firstNotNull === undefined;
|
||||
if (isFirst) {
|
||||
calcs.first = currentValue;
|
||||
calcs.firstNotNull = currentValue;
|
||||
}
|
||||
|
||||
if (isNumber(currentValue)) {
|
||||
@@ -268,12 +291,12 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
calcs.nonNullCount++;
|
||||
|
||||
if (!isFirst) {
|
||||
const step = currentValue - calcs.last!;
|
||||
const step = currentValue - calcs.lastNotNull!;
|
||||
if (calcs.step > step) {
|
||||
calcs.step = step; // the minimum interval
|
||||
}
|
||||
|
||||
if (calcs.last! > currentValue) {
|
||||
if (calcs.lastNotNull! > currentValue) {
|
||||
// counter reset
|
||||
calcs.previousDeltaUp = false;
|
||||
if (i === data.rows.length - 1) {
|
||||
@@ -307,7 +330,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
calcs.allIsZero = false;
|
||||
}
|
||||
|
||||
calcs.last = currentValue;
|
||||
calcs.lastNotNull = currentValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -331,10 +354,8 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
calcs.range = calcs.max - calcs.min;
|
||||
}
|
||||
|
||||
if (calcs.first !== null && calcs.last !== null) {
|
||||
if (isNumber(calcs.first) && isNumber(calcs.last)) {
|
||||
calcs.diff = calcs.last - calcs.first;
|
||||
}
|
||||
if (isNumber(calcs.firstNotNull) && isNumber(calcs.lastNotNull)) {
|
||||
calcs.diff = calcs.lastNotNull - calcs.firstNotNull;
|
||||
}
|
||||
|
||||
return calcs;
|
||||
@@ -344,10 +365,41 @@ function calculateFirst(data: DataFrame, fieldIndex: number, ignoreNulls: boolea
|
||||
return { first: data.rows[0][fieldIndex] };
|
||||
}
|
||||
|
||||
function calculateFirstNotNull(
|
||||
data: DataFrame,
|
||||
fieldIndex: number,
|
||||
ignoreNulls: boolean,
|
||||
nullAsZero: boolean
|
||||
): FieldCalcs {
|
||||
for (let idx = 0; idx < data.rows.length; idx++) {
|
||||
const v = data.rows[idx][fieldIndex];
|
||||
if (v != null) {
|
||||
return { firstNotNull: v };
|
||||
}
|
||||
}
|
||||
return { firstNotNull: undefined };
|
||||
}
|
||||
|
||||
function calculateLast(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
return { last: data.rows[data.rows.length - 1][fieldIndex] };
|
||||
}
|
||||
|
||||
function calculateLastNotNull(
|
||||
data: DataFrame,
|
||||
fieldIndex: number,
|
||||
ignoreNulls: boolean,
|
||||
nullAsZero: boolean
|
||||
): FieldCalcs {
|
||||
let idx = data.rows.length - 1;
|
||||
while (idx >= 0) {
|
||||
const v = data.rows[idx--][fieldIndex];
|
||||
if (v != null) {
|
||||
return { lastNotNull: v };
|
||||
}
|
||||
}
|
||||
return { lastNotNull: undefined };
|
||||
}
|
||||
|
||||
function calculateChangeCount(
|
||||
data: DataFrame,
|
||||
fieldIndex: number,
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": ["."],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Grafana Runtime library
|
||||
|
||||
Interfaces that let you use the runtime...
|
||||
This package allows access to grafana services. It requires Grafana to be running already and the functions to be imported as externals.
|
||||
@@ -1,11 +1,9 @@
|
||||
{
|
||||
"name": "@grafana/runtime",
|
||||
"version": "6.3.0-alpha.33",
|
||||
"version": "6.4.0-alpha.12",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"react",
|
||||
"react-component"
|
||||
"grafana"
|
||||
],
|
||||
"main": "src/index.ts",
|
||||
"scripts": {
|
||||
@@ -34,8 +32,5 @@
|
||||
"rollup-plugin-typescript2": "0.19.3",
|
||||
"rollup-plugin-visualizer": "0.9.2",
|
||||
"typescript": "3.4.1"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/lodash": "4.14.119"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ const buildCjsPackage = ({ env }) => {
|
||||
globals: {},
|
||||
},
|
||||
],
|
||||
external: ['lodash', '@grafana/ui'], // Use Lodash from grafana
|
||||
external: ['lodash', '@grafana/ui', '@grafana/data'], // Use Lodash from grafana
|
||||
plugins: [
|
||||
commonjs({
|
||||
include: /node_modules/,
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": ["."],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ Adidtionaly, you can also provide additional Jest config via package.json file.
|
||||
|
||||
|
||||
## Working with CSS & static assets
|
||||
We support pure css, SASS and CSS in JS approach (via Emotion). All static assets referenced in your code (i.e. images) should be placed under `src/static` directory and referenced using relative paths.
|
||||
We support pure css, SASS and CSS in JS approach (via Emotion).
|
||||
|
||||
1. Single css/sass file
|
||||
Create your css/sass file and import it in your plugin entry point (typically module.ts):
|
||||
@@ -101,6 +101,8 @@ If you want to provide different stylesheets for dark/light theme, create `dark.
|
||||
|
||||
TODO: add note about loadPluginCss
|
||||
|
||||
Note that static files (png, svg, json, html) are all copied to dist directory when the plugin is bundled. Relative paths to those files does not change.
|
||||
|
||||
3. Emotion
|
||||
Starting from Grafana 6.2 our suggested way of styling plugins is by using [Emotion](https://emotion.sh). It's a css-in-js library that we use internaly at Grafana. The biggest advantage of using Emotion is that you will get access to Grafana Theme variables.
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "6.3.0-alpha.33",
|
||||
"version": "6.4.0-alpha.12",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"react",
|
||||
"react-component"
|
||||
"grafana",
|
||||
"cli",
|
||||
"plugins"
|
||||
],
|
||||
"bin": {
|
||||
"grafana-toolkit": "./bin/grafana-toolkit.js"
|
||||
@@ -29,9 +29,11 @@
|
||||
"@types/node": "^12.0.4",
|
||||
"@types/react-dev-utils": "^9.0.1",
|
||||
"@types/semver": "^6.0.0",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/webpack": "4.4.34",
|
||||
"axios": "0.19.0",
|
||||
"babel-loader": "8.0.6",
|
||||
"babel-plugin-angularjs-annotate": "0.10.0",
|
||||
"chalk": "^2.4.2",
|
||||
"commander": "^2.20.0",
|
||||
"concurrently": "4.1.0",
|
||||
@@ -45,9 +47,8 @@
|
||||
"jest": "24.8.0",
|
||||
"jest-cli": "^24.8.0",
|
||||
"jest-coverage-badges": "^1.1.2",
|
||||
"lodash": "4.17.11",
|
||||
"lodash": "4.17.14",
|
||||
"mini-css-extract-plugin": "^0.7.0",
|
||||
"ng-annotate-webpack-plugin": "^0.3.0",
|
||||
"node-sass": "^4.12.0",
|
||||
"optimize-css-assets-webpack-plugin": "^5.0.3",
|
||||
"ora": "^3.4.0",
|
||||
@@ -73,9 +74,6 @@
|
||||
"url-loader": "^2.0.1",
|
||||
"webpack": "4.35.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/lodash": "4.14.119"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/prettier": "^1.16.4"
|
||||
|
||||
@@ -13,7 +13,14 @@ import { pluginTestTask } from './tasks/plugin.tests';
|
||||
import { searchTestDataSetupTask } from './tasks/searchTestDataSetup';
|
||||
import { closeMilestoneTask } from './tasks/closeMilestone';
|
||||
import { pluginDevTask } from './tasks/plugin.dev';
|
||||
import { pluginCITask } from './tasks/plugin.ci';
|
||||
import {
|
||||
ciBuildPluginTask,
|
||||
ciBuildPluginDocsTask,
|
||||
ciBundlePluginTask,
|
||||
ciTestPluginTask,
|
||||
ciPluginReportTask,
|
||||
ciDeployPluginTask,
|
||||
} from './tasks/plugin.ci';
|
||||
import { buildPackageTask } from './tasks/package.build';
|
||||
|
||||
export const run = (includeInternalScripts = false) => {
|
||||
@@ -141,15 +148,53 @@ export const run = (includeInternalScripts = false) => {
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci')
|
||||
.option('--dryRun', "Dry run (don't post results)")
|
||||
.description('Run Plugin CI task')
|
||||
.command('plugin:ci-build')
|
||||
.option('--backend <backend>', 'For backend task, which backend to run')
|
||||
.description('Build the plugin, leaving artifacts in /dist')
|
||||
.action(async cmd => {
|
||||
await execTask(pluginCITask)({
|
||||
dryRun: cmd.dryRun,
|
||||
await execTask(ciBuildPluginTask)({
|
||||
backend: cmd.backend,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-docs')
|
||||
.description('Build the HTML docs')
|
||||
.action(async cmd => {
|
||||
await execTask(ciBuildPluginDocsTask)({});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-bundle')
|
||||
.description('Create a zip artifact for the plugin')
|
||||
.action(async cmd => {
|
||||
await execTask(ciBundlePluginTask)({});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-test')
|
||||
.option('--full', 'run all the tests (even stuff that will break)')
|
||||
.description('end-to-end test using bundle in /artifacts')
|
||||
.action(async cmd => {
|
||||
await execTask(ciTestPluginTask)({
|
||||
full: cmd.full,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-report')
|
||||
.description('Build a report for this whole process')
|
||||
.action(async cmd => {
|
||||
await execTask(ciPluginReportTask)({});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-deploy')
|
||||
.description('Publish plugin CI results')
|
||||
.action(async cmd => {
|
||||
await execTask(ciDeployPluginTask)({});
|
||||
});
|
||||
|
||||
program.on('command:*', () => {
|
||||
console.error('Invalid command: %s\nSee --help for a list of available commands.', program.args.join(' '));
|
||||
process.exit(1);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import axios from 'axios';
|
||||
// @ts-ignore
|
||||
import * as _ from 'lodash';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
// import execa = require('execa');
|
||||
// import { execTask } from '../utils/execTask';
|
||||
// import { changeCwdToGrafanaUiDist, changeCwdToGrafanaUi, restoreCwd } from '../utils/cwd';
|
||||
// import { ReleaseType, inc } from 'semver';
|
||||
// import { prompt } from 'inquirer';
|
||||
// import chalk from 'chalk';
|
||||
// import { useSpinner } from '../utils/useSpinner';
|
||||
// import { savePackage, buildTask, clean } from './grafanaui.build';
|
||||
// import { TaskRunner, Task } from './task';
|
||||
|
||||
// type VersionBumpType = 'prerelease' | 'patch' | 'minor' | 'major';
|
||||
|
||||
// interface ReleaseTaskOptions {
|
||||
// publishToNpm: boolean;
|
||||
// usePackageJsonVersion: boolean;
|
||||
// createVersionCommit: boolean;
|
||||
// }
|
||||
|
||||
// const promptBumpType = async () => {
|
||||
// return prompt<{ type: VersionBumpType }>([
|
||||
// {
|
||||
// type: 'list',
|
||||
// message: 'Select version bump',
|
||||
// name: 'type',
|
||||
// choices: ['prerelease', 'patch', 'minor', 'major'],
|
||||
// },
|
||||
// ]);
|
||||
// };
|
||||
|
||||
// const promptPrereleaseId = async (message = 'Is this a prerelease?', allowNo = true) => {
|
||||
// return prompt<{ id: string }>([
|
||||
// {
|
||||
// type: 'list',
|
||||
// message: message,
|
||||
// name: 'id',
|
||||
// choices: allowNo ? ['no', 'alpha', 'beta'] : ['alpha', 'beta'],
|
||||
// },
|
||||
// ]);
|
||||
// };
|
||||
|
||||
// const promptConfirm = async (message?: string) => {
|
||||
// return prompt<{ confirmed: boolean }>([
|
||||
// {
|
||||
// type: 'confirm',
|
||||
// message: message || 'Is that correct?',
|
||||
// name: 'confirmed',
|
||||
// default: false,
|
||||
// },
|
||||
// ]);
|
||||
// };
|
||||
|
||||
// // Since Grafana core depends on @grafana/ui highly, we run full check before release
|
||||
// const runChecksAndTests = async () =>
|
||||
// // @ts-ignore
|
||||
// useSpinner<void>(`Running checks and tests`, async () => {
|
||||
// try {
|
||||
// await execa('npm', ['run', 'test']);
|
||||
// } catch (e) {
|
||||
// console.log(e);
|
||||
|
||||
// throw e;
|
||||
// }
|
||||
// })();
|
||||
|
||||
// const bumpVersion = (version: string) =>
|
||||
// // @ts-ignore
|
||||
// useSpinner<void>(`Saving version ${version} to package.json`, async () => {
|
||||
// changeCwdToGrafanaUi();
|
||||
// await execa('npm', ['version', version]);
|
||||
// changeCwdToGrafanaUiDist();
|
||||
// const pkg = require(`${process.cwd()}/package.json`);
|
||||
// pkg.version = version;
|
||||
// await savePackage({ path: `${process.cwd()}/package.json`, pkg });
|
||||
// })();
|
||||
|
||||
// const publishPackage = (name: string, version: string) =>
|
||||
// // @ts-ignore
|
||||
// useSpinner<void>(`Publishing ${name} @ ${version} to npm registry...`, async () => {
|
||||
// changeCwdToGrafanaUiDist();
|
||||
// await execa('npm', ['publish', '--access', 'public']);
|
||||
// })();
|
||||
|
||||
// const ensureMasterBranch = async () => {
|
||||
// const currentBranch = await execa.stdout('git', ['symbolic-ref', '--short', 'HEAD']);
|
||||
// const status = await execa.stdout('git', ['status', '--porcelain']);
|
||||
|
||||
// if (currentBranch !== 'master' && status !== '') {
|
||||
// console.error(chalk.red.bold('You need to be on clean master branch to release @grafana/ui'));
|
||||
// process.exit(1);
|
||||
// }
|
||||
// };
|
||||
|
||||
// const prepareVersionCommitAndPush = async (version: string) =>
|
||||
// // @ts-ignore
|
||||
// useSpinner<void>('Commiting and pushing @grafana/ui version update', async () => {
|
||||
// await execa.stdout('git', ['commit', '-a', '-m', `Upgrade @grafana/ui version to v${version}`]);
|
||||
// await execa.stdout('git', ['push']);
|
||||
// })();
|
||||
|
||||
// const releaseTaskRunner: TaskRunner<ReleaseTaskOptions> = async ({
|
||||
// publishToNpm,
|
||||
// usePackageJsonVersion,
|
||||
// createVersionCommit,
|
||||
// }) => {
|
||||
// changeCwdToGrafanaUi();
|
||||
// // @ts-ignore
|
||||
// await clean(); // Clean previous build if exists
|
||||
// restoreCwd();
|
||||
|
||||
// if (publishToNpm) {
|
||||
// // TODO: Ensure release branch
|
||||
// // When need to update this when we star keeping @grafana/ui releases in sync with core
|
||||
// await ensureMasterBranch();
|
||||
// }
|
||||
|
||||
// await runChecksAndTests();
|
||||
|
||||
// await execTask(buildTask)({} as any);
|
||||
|
||||
// let releaseConfirmed = false;
|
||||
// let nextVersion;
|
||||
// changeCwdToGrafanaUiDist();
|
||||
|
||||
// const pkg = require(`${process.cwd()}/package.json`);
|
||||
|
||||
// console.log(`Current version: ${pkg.version}`);
|
||||
|
||||
// do {
|
||||
// if (!usePackageJsonVersion) {
|
||||
// const { type } = await promptBumpType();
|
||||
// console.log(type);
|
||||
// if (type === 'prerelease') {
|
||||
// const { id } = await promptPrereleaseId('What kind of prerelease?', false);
|
||||
// nextVersion = inc(pkg.version, type, id as any);
|
||||
// } else {
|
||||
// const { id } = await promptPrereleaseId();
|
||||
// if (id !== 'no') {
|
||||
// nextVersion = inc(pkg.version, `pre${type}` as ReleaseType, id as any);
|
||||
// } else {
|
||||
// nextVersion = inc(pkg.version, type as ReleaseType);
|
||||
// }
|
||||
// }
|
||||
// } else {
|
||||
// nextVersion = pkg.version;
|
||||
// }
|
||||
|
||||
// console.log(chalk.yellowBright.bold(`You are going to release a new version of ${pkg.name}`));
|
||||
|
||||
// if (usePackageJsonVersion) {
|
||||
// console.log(chalk.green(`Version based on package.json: `), chalk.bold.yellowBright(`${nextVersion}`));
|
||||
// } else {
|
||||
// console.log(chalk.green(`Version bump: ${pkg.version} ->`), chalk.bold.yellowBright(`${nextVersion}`));
|
||||
// }
|
||||
|
||||
// const { confirmed } = await promptConfirm();
|
||||
|
||||
// releaseConfirmed = confirmed;
|
||||
// } while (!releaseConfirmed);
|
||||
|
||||
// if (!usePackageJsonVersion) {
|
||||
// await bumpVersion(nextVersion);
|
||||
// }
|
||||
|
||||
// if (createVersionCommit) {
|
||||
// await prepareVersionCommitAndPush(nextVersion);
|
||||
// }
|
||||
|
||||
// if (publishToNpm) {
|
||||
// console.log(chalk.yellowBright.bold(`\nReview dist package.json before proceeding!\n`));
|
||||
// const { confirmed } = await promptConfirm('Are you ready to publish to npm?');
|
||||
|
||||
// if (!confirmed) {
|
||||
// process.exit();
|
||||
// }
|
||||
|
||||
// await publishPackage(pkg.name, nextVersion);
|
||||
// console.log(chalk.green(`\nVersion ${nextVersion} of ${pkg.name} succesfully released!`));
|
||||
// console.log(chalk.yellow(`\nUpdated @grafana/ui/package.json with version bump created.`));
|
||||
|
||||
// process.exit();
|
||||
// } else {
|
||||
// console.log(
|
||||
// chalk.green(
|
||||
// `\nVersion ${nextVersion} of ${pkg.name} succesfully prepared for release. See packages/grafana-ui/dist`
|
||||
// )
|
||||
// );
|
||||
// console.log(chalk.green(`\nTo publish to npm registry run`), chalk.bold.blue(`npm run gui:publish`));
|
||||
// }
|
||||
// };
|
||||
|
||||
// export const releaseTask = new Task<ReleaseTaskOptions>('@grafana/ui release', releaseTaskRunner);
|
||||
@@ -3,7 +3,6 @@ import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
// @ts-ignore
|
||||
import * as path from 'path';
|
||||
import { changeCwdToGrafanaUi, restoreCwd, changeCwdToPackage } from '../utils/cwd';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
@@ -100,4 +99,4 @@ const buildTaskRunner: TaskRunner<PackageBuildOptions> = async ({ scope }) => {
|
||||
await Promise.all(scopes.map(s => s()));
|
||||
};
|
||||
|
||||
export const buildPackageTask = new Task<PackageBuildOptions>('@grafana/ui build', buildTaskRunner);
|
||||
export const buildPackageTask = new Task<PackageBuildOptions>('Package build', buildTaskRunner);
|
||||
|
||||
@@ -4,7 +4,6 @@ import execa = require('execa');
|
||||
import path = require('path');
|
||||
import fs = require('fs');
|
||||
import glob = require('glob');
|
||||
import util = require('util');
|
||||
import { Linter, Configuration, RuleFailure } from 'tslint';
|
||||
import * as prettier from 'prettier';
|
||||
|
||||
@@ -17,7 +16,6 @@ interface PluginBuildOptions {
|
||||
|
||||
export const bundlePlugin = useSpinner<PluginBundleOptions>('Compiling...', async options => await bundleFn(options));
|
||||
|
||||
const readFileAsync = util.promisify(fs.readFile);
|
||||
// @ts-ignore
|
||||
export const clean = useSpinner<void>('Cleaning', async () => await execa('rimraf', [`${process.cwd()}/dist`]));
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { Task, TaskRunner } from './task';
|
||||
import { pluginBuildRunner } from './plugin.build';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { restoreCwd } from '../utils/cwd';
|
||||
import { getPluginJson } from '../../config/utils/pluginValidation';
|
||||
|
||||
@@ -10,7 +9,8 @@ import path = require('path');
|
||||
import fs = require('fs');
|
||||
|
||||
export interface PluginCIOptions {
|
||||
dryRun?: boolean;
|
||||
backend?: string;
|
||||
full?: boolean;
|
||||
}
|
||||
|
||||
const calcJavascriptSize = (base: string, files?: string[]): number => {
|
||||
@@ -33,46 +33,353 @@ const calcJavascriptSize = (base: string, files?: string[]): number => {
|
||||
return size;
|
||||
};
|
||||
|
||||
const pluginCIRunner: TaskRunner<PluginCIOptions> = async ({ dryRun }) => {
|
||||
const getJobFromProcessArgv = () => {
|
||||
const arg = process.argv[2];
|
||||
if (arg && arg.startsWith('plugin:ci-')) {
|
||||
const task = arg.substring('plugin:ci-'.length);
|
||||
if ('build' === task) {
|
||||
if ('--platform' === process.argv[3] && process.argv[4]) {
|
||||
return task + '_' + process.argv[4];
|
||||
}
|
||||
return 'build_nodejs';
|
||||
}
|
||||
return task;
|
||||
}
|
||||
return 'unknown_job';
|
||||
};
|
||||
|
||||
const job = process.env.CIRCLE_JOB || getJobFromProcessArgv();
|
||||
|
||||
const getJobFolder = () => {
|
||||
const dir = path.resolve(process.cwd(), 'ci', 'jobs', job);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
const getCiFolder = () => {
|
||||
const dir = path.resolve(process.cwd(), 'ci');
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
const writeJobStats = (startTime: number, workDir: string) => {
|
||||
const stats = {
|
||||
job,
|
||||
startTime,
|
||||
endTime: Date.now(),
|
||||
};
|
||||
const f = path.resolve(workDir, 'stats.json');
|
||||
fs.writeFile(f, JSON.stringify(stats, null, 2), err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to stats: ' + f);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* 1. BUILD
|
||||
*
|
||||
* when platform exists it is building backend, otherwise frontend
|
||||
*
|
||||
* Each build writes data:
|
||||
* ~/work/build_xxx/
|
||||
*
|
||||
* Anything that should be put into the final zip file should be put in:
|
||||
* ~/work/build_xxx/dist
|
||||
*/
|
||||
const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ backend }) => {
|
||||
const start = Date.now();
|
||||
const distDir = `${process.cwd()}/dist`;
|
||||
const artifactsDir = `${process.cwd()}/artifacts`;
|
||||
await execa('rimraf', [`${process.cwd()}/coverage`]);
|
||||
await execa('rimraf', [artifactsDir]);
|
||||
const workDir = getJobFolder();
|
||||
await execa('rimraf', [workDir]);
|
||||
fs.mkdirSync(workDir);
|
||||
|
||||
// Do regular build process
|
||||
await pluginBuildRunner({ coverage: true });
|
||||
const elapsed = Date.now() - start;
|
||||
|
||||
if (!fs.existsSync(artifactsDir)) {
|
||||
fs.mkdirSync(artifactsDir);
|
||||
if (backend) {
|
||||
console.log('TODO, backend support?');
|
||||
fs.mkdirSync(path.resolve(process.cwd(), 'dist'));
|
||||
const file = path.resolve(process.cwd(), 'dist', `README_${backend}.txt`);
|
||||
fs.writeFile(file, `TODO... build bakend plugin: ${backend}!`, err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write: ' + file);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Do regular build process with coverage
|
||||
await pluginBuildRunner({ coverage: true });
|
||||
}
|
||||
|
||||
// TODO? can this typed from @grafana/ui?
|
||||
// Move local folders to the scoped job folder
|
||||
for (const name of ['dist', 'coverage']) {
|
||||
const dir = path.resolve(process.cwd(), name);
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.renameSync(dir, path.resolve(workDir, name));
|
||||
}
|
||||
}
|
||||
writeJobStats(start, workDir);
|
||||
};
|
||||
|
||||
export const ciBuildPluginTask = new Task<PluginCIOptions>('Build Plugin', buildPluginRunner);
|
||||
|
||||
/**
|
||||
* 2. Build Docs
|
||||
*
|
||||
* Take /docs/* and format it into /ci/docs/HTML site
|
||||
*
|
||||
*/
|
||||
const buildPluginDocsRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
const docsSrc = path.resolve(process.cwd(), 'docs');
|
||||
if (!fs.existsSync(docsSrc)) {
|
||||
throw new Error('Docs folder does not exist!');
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
const workDir = getJobFolder();
|
||||
await execa('rimraf', [workDir]);
|
||||
fs.mkdirSync(workDir);
|
||||
|
||||
const docsDest = path.resolve(process.cwd(), 'ci', 'docs');
|
||||
fs.mkdirSync(docsDest);
|
||||
|
||||
const exe = await execa('cp', ['-rv', docsSrc + '/.', docsDest]);
|
||||
console.log(exe.stdout);
|
||||
|
||||
fs.writeFile(path.resolve(docsDest, 'index.html'), `TODO... actually build docs`, err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to docs');
|
||||
}
|
||||
});
|
||||
|
||||
writeJobStats(start, workDir);
|
||||
};
|
||||
|
||||
export const ciBuildPluginDocsTask = new Task<PluginCIOptions>('Build Plugin Docs', buildPluginDocsRunner);
|
||||
|
||||
/**
|
||||
* 2. BUNDLE
|
||||
*
|
||||
* Take everything from `~/ci/job/{any}/dist` and
|
||||
* 1. merge it into: `~/ci/dist`
|
||||
* 2. zip it into artifacts in `~/ci/artifacts`
|
||||
* 3. prepare grafana environment in: `~/ci/grafana-test-env`
|
||||
*
|
||||
*/
|
||||
const bundlePluginRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
const start = Date.now();
|
||||
const ciDir = getCiFolder();
|
||||
const artifactsDir = path.resolve(ciDir, 'artifacts');
|
||||
const distDir = path.resolve(ciDir, 'dist');
|
||||
const docsDir = path.resolve(ciDir, 'docs');
|
||||
const grafanaEnvDir = path.resolve(ciDir, 'grafana-test-env');
|
||||
await execa('rimraf', [artifactsDir, distDir, grafanaEnvDir]);
|
||||
fs.mkdirSync(artifactsDir);
|
||||
fs.mkdirSync(distDir);
|
||||
fs.mkdirSync(grafanaEnvDir);
|
||||
|
||||
console.log('Build Dist Folder');
|
||||
|
||||
// 1. Check for a local 'dist' folder
|
||||
const d = path.resolve(process.cwd(), 'dist');
|
||||
if (fs.existsSync(d)) {
|
||||
await execa('cp', ['-rn', d + '/.', distDir]);
|
||||
}
|
||||
|
||||
// 2. Look for any 'dist' folders under ci/job/XXX/dist
|
||||
const dirs = fs.readdirSync(path.resolve(ciDir, 'jobs'));
|
||||
for (const j of dirs) {
|
||||
const contents = path.resolve(ciDir, 'jobs', j, 'dist');
|
||||
if (fs.existsSync(contents)) {
|
||||
try {
|
||||
await execa('cp', ['-rn', contents + '/.', distDir]);
|
||||
} catch (er) {
|
||||
throw new Error('Duplicate files found in dist folders');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Building ZIP');
|
||||
const pluginInfo = getPluginJson(`${distDir}/plugin.json`);
|
||||
const zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip';
|
||||
const zipFile = path.resolve(artifactsDir, zipName);
|
||||
let zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip';
|
||||
let zipFile = path.resolve(artifactsDir, zipName);
|
||||
process.chdir(distDir);
|
||||
await execa('zip', ['-r', zipFile, '.']);
|
||||
restoreCwd();
|
||||
|
||||
const stats = {
|
||||
startTime: start,
|
||||
buildTime: elapsed,
|
||||
jsSize: calcJavascriptSize(distDir),
|
||||
zipSize: fs.statSync(zipFile).size,
|
||||
endTime: Date.now(),
|
||||
const zipStats = fs.statSync(zipFile);
|
||||
if (zipStats.size < 100) {
|
||||
throw new Error('Invalid zip file: ' + zipFile);
|
||||
}
|
||||
|
||||
const zipInfo: any = {
|
||||
name: zipName,
|
||||
size: zipStats.size,
|
||||
};
|
||||
fs.writeFile(artifactsDir + '/stats.json', JSON.stringify(stats, null, 2), err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write stats');
|
||||
const info: any = {
|
||||
plugin: zipInfo,
|
||||
};
|
||||
try {
|
||||
const exe = await execa('shasum', [zipFile]);
|
||||
const idx = exe.stdout.indexOf(' ');
|
||||
const sha1 = exe.stdout.substring(0, idx);
|
||||
fs.writeFile(zipFile + '.sha1', sha1, err => {});
|
||||
zipInfo.sha1 = sha1;
|
||||
} catch {
|
||||
console.warn('Unable to read SHA1 Checksum');
|
||||
}
|
||||
|
||||
// If docs exist, zip them into artifacts
|
||||
if (fs.existsSync(docsDir)) {
|
||||
zipName = pluginInfo.id + '-' + pluginInfo.info.version + '-docs.zip';
|
||||
zipFile = path.resolve(artifactsDir, zipName);
|
||||
process.chdir(docsDir);
|
||||
await execa('zip', ['-r', zipFile, '.']);
|
||||
restoreCwd();
|
||||
|
||||
const zipStats = fs.statSync(zipFile);
|
||||
const zipInfo: any = {
|
||||
name: zipName,
|
||||
size: zipStats.size,
|
||||
};
|
||||
try {
|
||||
const exe = await execa('shasum', [zipFile]);
|
||||
const idx = exe.stdout.indexOf(' ');
|
||||
const sha1 = exe.stdout.substring(0, idx);
|
||||
fs.writeFile(zipFile + '.sha1', sha1, err => {});
|
||||
zipInfo.sha1 = sha1;
|
||||
} catch {
|
||||
console.warn('Unable to read SHA1 Checksum');
|
||||
}
|
||||
info.docs = zipInfo;
|
||||
}
|
||||
|
||||
let p = path.resolve(artifactsDir, 'info.json');
|
||||
fs.writeFile(p, JSON.stringify(info, null, 2), err => {
|
||||
if (err) {
|
||||
throw new Error('Error writing artifact info: ' + p);
|
||||
}
|
||||
console.log('Stats', stats);
|
||||
});
|
||||
|
||||
if (!dryRun) {
|
||||
console.log('TODO send info to github?');
|
||||
}
|
||||
console.log('Setup Grafan Environment');
|
||||
p = path.resolve(grafanaEnvDir, 'plugins', pluginInfo.id);
|
||||
fs.mkdirSync(p, { recursive: true });
|
||||
await execa('unzip', [zipFile, '-d', p]);
|
||||
|
||||
// Write the custom settings
|
||||
p = path.resolve(grafanaEnvDir, 'custom.ini');
|
||||
const customIniBody =
|
||||
`# Autogenerated by @grafana/toolkit \n` +
|
||||
`[paths] \n` +
|
||||
`plugins = ${path.resolve(grafanaEnvDir, 'plugins')}\n` +
|
||||
`\n`; // empty line
|
||||
fs.writeFile(p, customIniBody, err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write: ' + p);
|
||||
}
|
||||
});
|
||||
|
||||
writeJobStats(start, getJobFolder());
|
||||
};
|
||||
|
||||
export const pluginCITask = new Task<PluginCIOptions>('Plugin CI', pluginCIRunner);
|
||||
export const ciBundlePluginTask = new Task<PluginCIOptions>('Bundle Plugin', bundlePluginRunner);
|
||||
|
||||
/**
|
||||
* 3. Test (end-to-end)
|
||||
*
|
||||
* deploy the zip to a running grafana instance
|
||||
*
|
||||
*/
|
||||
const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
|
||||
const start = Date.now();
|
||||
const workDir = getJobFolder();
|
||||
const pluginInfo = getPluginJson(`${process.cwd()}/src/plugin.json`);
|
||||
|
||||
const args = {
|
||||
withCredentials: true,
|
||||
baseURL: process.env.GRAFANA_URL || 'http://localhost:3000/',
|
||||
responseType: 'json',
|
||||
auth: {
|
||||
username: 'admin',
|
||||
password: 'admin',
|
||||
},
|
||||
};
|
||||
|
||||
const axios = require('axios');
|
||||
const frontendSettings = await axios.get('api/frontend/settings', args);
|
||||
|
||||
console.log('Grafana Version: ' + JSON.stringify(frontendSettings.data.buildInfo, null, 2));
|
||||
|
||||
const allPlugins: any[] = await axios.get('api/plugins', args).data;
|
||||
// for (const plugin of allPlugins) {
|
||||
// if (plugin.id === pluginInfo.id) {
|
||||
// console.log('------------');
|
||||
// console.log(plugin);
|
||||
// console.log('------------');
|
||||
// } else {
|
||||
// console.log('Plugin:', plugin.id, plugin.latestVersion);
|
||||
// }
|
||||
// }
|
||||
console.log('PLUGINS:', allPlugins);
|
||||
|
||||
if (full) {
|
||||
const pluginSettings = await axios.get(`api/plugins/${pluginInfo.id}/settings`, args);
|
||||
console.log('Plugin Info: ' + JSON.stringify(pluginSettings.data, null, 2));
|
||||
}
|
||||
|
||||
console.log('TODO puppeteer');
|
||||
|
||||
const elapsed = Date.now() - start;
|
||||
const stats = {
|
||||
job,
|
||||
sha1: `${process.env.CIRCLE_SHA1}`,
|
||||
startTime: start,
|
||||
buildTime: elapsed,
|
||||
endTime: Date.now(),
|
||||
};
|
||||
|
||||
console.log('TODO Puppeteer Tests', stats);
|
||||
writeJobStats(start, workDir);
|
||||
};
|
||||
|
||||
export const ciTestPluginTask = new Task<PluginCIOptions>('Test Plugin (e2e)', testPluginRunner);
|
||||
|
||||
/**
|
||||
* 4. Report
|
||||
*
|
||||
* Create a report from all the previous steps
|
||||
*
|
||||
*/
|
||||
const pluginReportRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
const start = Date.now();
|
||||
const workDir = getJobFolder();
|
||||
const reportDir = path.resolve(process.cwd(), 'ci', 'report');
|
||||
await execa('rimraf', [reportDir]);
|
||||
fs.mkdirSync(reportDir);
|
||||
|
||||
const file = path.resolve(reportDir, `report.txt`);
|
||||
fs.writeFile(file, `TODO... actually make a report (csv etc)`, err => {
|
||||
if (err) {
|
||||
throw new Error('Unable to write: ' + file);
|
||||
}
|
||||
});
|
||||
|
||||
console.log('TODO... real report');
|
||||
writeJobStats(start, workDir);
|
||||
};
|
||||
|
||||
export const ciPluginReportTask = new Task<PluginCIOptions>('Deploy plugin', pluginReportRunner);
|
||||
|
||||
/**
|
||||
* 5. Deploy
|
||||
*
|
||||
* deploy the zip to a running grafana instance
|
||||
*
|
||||
*/
|
||||
const deployPluginRunner: TaskRunner<PluginCIOptions> = async () => {
|
||||
console.log('TODO DEPLOY??');
|
||||
console.log(' if PR => write a comment to github with difference ');
|
||||
console.log(' if master | vXYZ ==> upload artifacts to some repo ');
|
||||
};
|
||||
|
||||
export const ciDeployPluginTask = new Task<PluginCIOptions>('Deploy plugin', deployPluginRunner);
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import path = require('path');
|
||||
import fs = require('fs');
|
||||
import webpack = require('webpack');
|
||||
import { getWebpackConfig } from '../../../config/webpack.plugin.config';
|
||||
import formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import path = require('path');
|
||||
import * as jestCLI from 'jest-cli';
|
||||
import { useSpinner } from '../../utils/useSpinner';
|
||||
import { jestConfig } from '../../../config/jest.plugin.config';
|
||||
|
||||
@@ -46,7 +46,6 @@ export async function getTeam(team: any): Promise<any> {
|
||||
}
|
||||
|
||||
export async function addToTeam(team: any, user: any): Promise<any> {
|
||||
const members = await client.get(`/teams/${team.id}/members`);
|
||||
console.log(`Adding user ${user.name} to team ${team.name}`);
|
||||
await client.post(`/teams/${team.id}/members`, { userId: user.id });
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
import { changeCwdToGrafanaUi, restoreCwd, changeCwdToGrafanaToolkit } from '../utils/cwd';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -3,7 +3,7 @@ import { getPluginJson, validatePluginJson } from './pluginValidation';
|
||||
describe('pluginValdation', () => {
|
||||
describe('plugin.json', () => {
|
||||
test('missing plugin.json file', () => {
|
||||
expect(() => getPluginJson(`${__dirname}/mocks/missing-plugin-json`)).toThrow('plugin.json file is missing!');
|
||||
expect(() => getPluginJson(`${__dirname}/mocks/missing-plugin.json`)).toThrowError();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import path = require('path');
|
||||
|
||||
// See: packages/grafana-ui/src/types/plugin.ts
|
||||
interface PluginJSONSchema {
|
||||
id: string;
|
||||
@@ -22,15 +20,24 @@ export const validatePluginJson = (pluginJson: any) => {
|
||||
if (!pluginJson.info.version) {
|
||||
throw new Error('Plugin info.version is missing in plugin.json');
|
||||
}
|
||||
|
||||
const types = ['panel', 'datasource', 'app'];
|
||||
const type = pluginJson.type;
|
||||
if (!types.includes(type)) {
|
||||
throw new Error('Invalid plugin type in plugin.json: ' + type);
|
||||
}
|
||||
|
||||
if (!pluginJson.id.endsWith('-' + type)) {
|
||||
throw new Error('[plugin.json] id should end with: -' + type);
|
||||
}
|
||||
};
|
||||
|
||||
export const getPluginJson = (root: string = process.cwd()): PluginJSONSchema => {
|
||||
export const getPluginJson = (path: string): PluginJSONSchema => {
|
||||
let pluginJson;
|
||||
|
||||
try {
|
||||
pluginJson = require(path.resolve(root, 'src/plugin.json'));
|
||||
pluginJson = require(path);
|
||||
} catch (e) {
|
||||
throw new Error('plugin.json file is missing!');
|
||||
throw new Error('Unable to find: ' + path);
|
||||
}
|
||||
|
||||
validatePluginJson(pluginJson);
|
||||
|
||||
@@ -5,9 +5,9 @@ const ReplaceInFileWebpackPlugin = require('replace-in-file-webpack-plugin');
|
||||
const TerserPlugin = require('terser-webpack-plugin');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const OptimizeCssAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
const ngAnnotatePlugin = require('ng-annotate-webpack-plugin');
|
||||
|
||||
import * as webpack from 'webpack';
|
||||
import { hasThemeStylesheets, getStyleLoaders, getStylesheetEntries, getFileLoaders } from './webpack/loaders';
|
||||
import { getStyleLoaders, getStylesheetEntries, getFileLoaders } from './webpack/loaders';
|
||||
|
||||
interface WebpackConfigurationOptions {
|
||||
watch?: boolean;
|
||||
@@ -51,6 +51,7 @@ const getManualChunk = (id: string) => {
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const getEntries = () => {
|
||||
@@ -83,8 +84,8 @@ const getCommonPlugins = (options: WebpackConfigurationOptions) => {
|
||||
{ from: '../LICENSE', to: '.' },
|
||||
{ from: 'img/*', to: '.' },
|
||||
{ from: '**/*.json', to: '.' },
|
||||
// { from: '**/*.svg', to: '.' },
|
||||
// { from: '**/*.png', to: '.' },
|
||||
{ from: '**/*.svg', to: '.' },
|
||||
{ from: '**/*.png', to: '.' },
|
||||
{ from: '**/*.html', to: '.' },
|
||||
],
|
||||
{ logLevel: options.watch ? 'silent' : 'warn' }
|
||||
@@ -114,7 +115,6 @@ export const getWebpackConfig: WebpackConfigurationGetter = options => {
|
||||
const optimization: { [key: string]: any } = {};
|
||||
|
||||
if (options.production) {
|
||||
plugins.push(new ngAnnotatePlugin());
|
||||
optimization.minimizer = [new TerserPlugin(), new OptimizeCssAssetsPlugin()];
|
||||
}
|
||||
|
||||
@@ -177,8 +177,12 @@ export const getWebpackConfig: WebpackConfigurationGetter = options => {
|
||||
loaders: [
|
||||
{
|
||||
loader: 'babel-loader',
|
||||
options: { presets: ['@babel/preset-env'] },
|
||||
options: {
|
||||
presets: ['@babel/preset-env'],
|
||||
plugins: ['angularjs-annotate'],
|
||||
},
|
||||
},
|
||||
|
||||
'ts-loader',
|
||||
],
|
||||
exclude: /(node_modules)/,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { getStylesheetEntries, hasThemeStylesheets } from './loaders';
|
||||
describe('Loaders', () => {
|
||||
describe('stylesheet helpers', () => {
|
||||
const logSpy = jest.spyOn(console, 'log').mockImplementation();
|
||||
const errorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
|
||||
afterAll(() => {
|
||||
logSpy.mockRestore();
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import { getPluginJson } from '../utils/pluginValidation';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
|
||||
@@ -122,8 +119,8 @@ export const getFileLoaders = () => {
|
||||
? {
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
outputPath: 'static',
|
||||
name: '[name].[hash:8].[ext]',
|
||||
outputPath: '/',
|
||||
name: '[path][name].[ext]',
|
||||
},
|
||||
}
|
||||
: // When using single css import images are inlined as base64 URIs in the result bundle
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"rootDirs": ["."],
|
||||
"outDir": "dist/src",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"declaration": false,
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["es2015", "es2017.string"]
|
||||
}
|
||||
|
||||
@@ -15,37 +15,3 @@ See [package source](https://github.com/grafana/grafana/tree/master/packages/gra
|
||||
## Development
|
||||
|
||||
For development purposes we suggest using `yarn link` that will create symlink to @grafana/ui lib. To do so navigate to `packages/grafana-ui` and run `yarn link`. Then, navigate to your project and run `yarn link @grafana/ui` to use the linked version of the lib. To unlink follow the same procedure, but use `yarn unlink` instead.
|
||||
|
||||
## Building @grafana/ui
|
||||
|
||||
To build @grafana/ui run `npm run gui:build` script _from Grafana repository root_. The build will be created in `packages/grafana-ui/dist` directory. Following steps from [Development](#development) you can test built package.
|
||||
|
||||
## Releasing new version
|
||||
|
||||
To release new version run `npm run gui:release` script _from Grafana repository root_. This has to be done on the master branch. The script will prepare the distribution package as well as prompt you to bump library version and publish it to the NPM registry. When the new package is published, create a PR with the bumped version in package.json.
|
||||
|
||||
### Automatic version bump
|
||||
|
||||
When running `npm run gui:release` package.json file will be automatically updated. Also, package.json file will be commited and pushed to upstream branch.
|
||||
|
||||
### Manual version bump
|
||||
|
||||
Manually update the version in `package.json` and then run `npm run gui:release --usePackageJsonVersion` _from Grafana repository root_.
|
||||
|
||||
### Preparing release package without publishing to NPM registry
|
||||
|
||||
For testing purposes there is `npm run gui:releasePrepare` task that prepares distribution package without publishing it to the NPM registry.
|
||||
|
||||
### V1 release process overview
|
||||
|
||||
1. Package is compiled with TSC. Typings are created in `/dist` directory, and the compiled js lands in `/compiled` dir
|
||||
2. Rollup creates a CommonJS package based on compiled sources, and outputs it to `/dist` directory
|
||||
3. Readme, changelog and index.js files are moved to `/dist` directory
|
||||
4. Package version is bumped in both `@grafana/ui` package dir and in dist directory.
|
||||
5. Version commit is created and pushed to master branch
|
||||
6. Package is published to npm
|
||||
|
||||
## Versioning
|
||||
|
||||
To limit the confusion related to @grafana/ui and Grafana versioning we decided to keep the major version in sync between those two.
|
||||
This means, that first version of @grafana/ui is taged with 6.0.0-alpha.0 to keep version in sync with Grafana 6.0 release.
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"name": "@grafana/ui",
|
||||
"version": "6.3.0-alpha.33",
|
||||
"version": "6.4.0-alpha.12",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"typescript",
|
||||
"grafana",
|
||||
"react",
|
||||
"react-component"
|
||||
],
|
||||
@@ -20,12 +20,13 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@grafana/data": "^6.4.0-alpha.8",
|
||||
"@torkelo/react-select": "2.1.1",
|
||||
"@types/react-color": "2.17.0",
|
||||
"classnames": "2.2.6",
|
||||
"d3": "5.9.1",
|
||||
"jquery": "3.4.1",
|
||||
"lodash": "4.17.11",
|
||||
"lodash": "4.17.14",
|
||||
"moment": "2.24.0",
|
||||
"papaparse": "4.6.3",
|
||||
"react": "16.8.6",
|
||||
@@ -76,8 +77,5 @@
|
||||
"rollup-plugin-typescript2": "0.19.3",
|
||||
"rollup-plugin-visualizer": "0.9.2",
|
||||
"typescript": "3.4.1"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/lodash": "4.14.119"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ export class TableInputCSV extends React.PureComponent<Props, State> {
|
||||
};
|
||||
}
|
||||
|
||||
readCSV = debounce(() => {
|
||||
readCSV: any = debounce(() => {
|
||||
const { config } = this.props;
|
||||
const { text } = this.state;
|
||||
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": [".", "stories"],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
}
|
||||
|
||||
13
packages/tsconfig.json
Normal file
13
packages/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"declaration": true,
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
}
|
||||
}
|
||||
@@ -68,7 +68,10 @@ func (hs *HTTPServer) AddAPIKey(c *models.ReqContext, cmd models.AddApiKeyComman
|
||||
if err == models.ErrInvalidApiKeyExpiration {
|
||||
return Error(400, err.Error(), nil)
|
||||
}
|
||||
return Error(500, "Failed to add API key", err)
|
||||
if err == models.ErrDuplicateApiKey {
|
||||
return Error(409, err.Error(), nil)
|
||||
}
|
||||
return Error(500, "Failed to add API Key", err)
|
||||
}
|
||||
|
||||
result := &dtos.NewApiKeyResult{
|
||||
|
||||
@@ -93,6 +93,26 @@ func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReqNoAssertions(method, url string) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
req, _ := http.NewRequest(method, url, nil)
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReqNoAssertionsWithCookie(method, url string, cookie http.Cookie) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
http.SetCookie(sc.resp, &cookie)
|
||||
|
||||
req, _ := http.NewRequest(method, url, nil)
|
||||
req.Header = http.Header{"Cookie": sc.resp.Header()["Set-Cookie"]}
|
||||
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
type scenarioContext struct {
|
||||
m *macaron.Macaron
|
||||
context *m.ReqContext
|
||||
|
||||
@@ -278,8 +278,7 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
|
||||
inFolder := cmd.FolderId > 0
|
||||
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
|
||||
if err != nil {
|
||||
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of dashboard", err)
|
||||
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -64,7 +64,6 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
|
||||
if hs.Cfg.EditorsCanAdmin {
|
||||
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
|
||||
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of folder", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,8 +21,14 @@ const (
|
||||
LoginErrorCookieName = "login_error"
|
||||
)
|
||||
|
||||
var setIndexViewData = (*HTTPServer).setIndexViewData
|
||||
|
||||
var getViewIndex = func() string {
|
||||
return ViewIndex
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) LoginView(c *models.ReqContext) {
|
||||
viewData, err := hs.setIndexViewData(c)
|
||||
viewData, err := setIndexViewData(hs, c)
|
||||
if err != nil {
|
||||
c.Handle(500, "Failed to get settings", err)
|
||||
return
|
||||
@@ -41,8 +47,14 @@ func (hs *HTTPServer) LoginView(c *models.ReqContext) {
|
||||
viewData.Settings["samlEnabled"] = hs.Cfg.SAMLEnabled
|
||||
|
||||
if loginError, ok := tryGetEncryptedCookie(c, LoginErrorCookieName); ok {
|
||||
//this cookie is only set whenever an OAuth login fails
|
||||
//therefore the loginError should be passed to the view data
|
||||
//and the view should return immediately before attempting
|
||||
//to login again via OAuth and enter to a redirect loop
|
||||
deleteCookie(c, LoginErrorCookieName)
|
||||
viewData.Settings["loginError"] = loginError
|
||||
c.HTML(200, getViewIndex(), viewData)
|
||||
return
|
||||
}
|
||||
|
||||
if tryOAuthAutoLogin(c) {
|
||||
|
||||
135
pkg/api/login_test.go
Normal file
135
pkg/api/login_test.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func mockSetIndexViewData() {
|
||||
setIndexViewData = func(*HTTPServer, *models.ReqContext) (*dtos.IndexViewData, error) {
|
||||
data := &dtos.IndexViewData{
|
||||
User: &dtos.CurrentUser{},
|
||||
Settings: map[string]interface{}{},
|
||||
NavTree: []*dtos.NavLink{},
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
}
|
||||
|
||||
func resetSetIndexViewData() {
|
||||
setIndexViewData = (*HTTPServer).setIndexViewData
|
||||
}
|
||||
|
||||
func mockViewIndex() {
|
||||
getViewIndex = func() string {
|
||||
return "index-template"
|
||||
}
|
||||
}
|
||||
|
||||
func resetViewIndex() {
|
||||
getViewIndex = func() string {
|
||||
return ViewIndex
|
||||
}
|
||||
}
|
||||
|
||||
func getBody(resp *httptest.ResponseRecorder) (string, error) {
|
||||
responseData, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(responseData), nil
|
||||
}
|
||||
|
||||
func TestLoginErrorCookieApiEndpoint(t *testing.T) {
|
||||
mockSetIndexViewData()
|
||||
defer resetSetIndexViewData()
|
||||
|
||||
mockViewIndex()
|
||||
defer resetViewIndex()
|
||||
|
||||
sc := setupScenarioContext("/login")
|
||||
hs := &HTTPServer{
|
||||
Cfg: setting.NewCfg(),
|
||||
}
|
||||
|
||||
sc.defaultHandler = Wrap(func(w http.ResponseWriter, c *models.ReqContext) {
|
||||
hs.LoginView(c)
|
||||
})
|
||||
|
||||
setting.OAuthService = &setting.OAuther{}
|
||||
setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
|
||||
setting.LoginCookieName = "grafana_session"
|
||||
setting.SecretKey = "login_testing"
|
||||
|
||||
setting.OAuthService = &setting.OAuther{}
|
||||
setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
|
||||
setting.OAuthService.OAuthInfos["github"] = &setting.OAuthInfo{
|
||||
ClientId: "fake",
|
||||
ClientSecret: "fakefake",
|
||||
Enabled: true,
|
||||
AllowSignup: true,
|
||||
Name: "github",
|
||||
}
|
||||
setting.OAuthAutoLogin = true
|
||||
|
||||
oauthError := errors.New("User not a member of one of the required organizations")
|
||||
encryptedError, _ := util.Encrypt([]byte(oauthError.Error()), setting.SecretKey)
|
||||
cookie := http.Cookie{
|
||||
Name: LoginErrorCookieName,
|
||||
MaxAge: 60,
|
||||
Value: hex.EncodeToString(encryptedError),
|
||||
HttpOnly: true,
|
||||
Path: setting.AppSubUrl + "/",
|
||||
Secure: hs.Cfg.CookieSecure,
|
||||
SameSite: hs.Cfg.CookieSameSite,
|
||||
}
|
||||
sc.m.Get(sc.url, sc.defaultHandler)
|
||||
sc.fakeReqNoAssertionsWithCookie("GET", sc.url, cookie).exec()
|
||||
assert.Equal(t, sc.resp.Code, 200)
|
||||
|
||||
responseString, err := getBody(sc.resp)
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, strings.Contains(responseString, oauthError.Error()))
|
||||
}
|
||||
|
||||
func TestLoginOAuthRedirect(t *testing.T) {
|
||||
mockSetIndexViewData()
|
||||
defer resetSetIndexViewData()
|
||||
|
||||
sc := setupScenarioContext("/login")
|
||||
hs := &HTTPServer{
|
||||
Cfg: setting.NewCfg(),
|
||||
}
|
||||
|
||||
sc.defaultHandler = Wrap(func(c *models.ReqContext) {
|
||||
hs.LoginView(c)
|
||||
})
|
||||
|
||||
setting.OAuthService = &setting.OAuther{}
|
||||
setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
|
||||
setting.OAuthService.OAuthInfos["github"] = &setting.OAuthInfo{
|
||||
ClientId: "fake",
|
||||
ClientSecret: "fakefake",
|
||||
Enabled: true,
|
||||
AllowSignup: true,
|
||||
Name: "github",
|
||||
}
|
||||
setting.OAuthAutoLogin = true
|
||||
sc.m.Get(sc.url, sc.defaultHandler)
|
||||
sc.fakeReqNoAssertions("GET", sc.url).exec()
|
||||
|
||||
assert.Equal(t, sc.resp.Code, 307)
|
||||
location, ok := sc.resp.Header()["Location"]
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, location[0], "/login/github")
|
||||
}
|
||||
@@ -30,23 +30,6 @@ func GetTeamMembers(c *m.ReqContext) Response {
|
||||
return JSON(200, query.Result)
|
||||
}
|
||||
|
||||
func GetAuthProviderLabel(authModule string) string {
|
||||
switch authModule {
|
||||
case "oauth_github":
|
||||
return "GitHub"
|
||||
case "oauth_google":
|
||||
return "Google"
|
||||
case "oauth_gitlab":
|
||||
return "GitLab"
|
||||
case "oauth_grafana_com", "oauth_grafananet":
|
||||
return "grafana.com"
|
||||
case "ldap", "":
|
||||
return "LDAP"
|
||||
default:
|
||||
return "OAuth"
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/teams/:teamId/members
|
||||
func (hs *HTTPServer) AddTeamMember(c *m.ReqContext, cmd m.AddTeamMemberCommand) Response {
|
||||
cmd.OrgId = c.OrgId
|
||||
|
||||
@@ -29,8 +29,11 @@ func getUserUserProfile(userID int64) Response {
|
||||
}
|
||||
|
||||
getAuthQuery := m.GetAuthInfoQuery{UserId: userID}
|
||||
query.Result.AuthLabels = []string{}
|
||||
if err := bus.Dispatch(&getAuthQuery); err == nil {
|
||||
query.Result.AuthModule = []string{getAuthQuery.Result.AuthModule}
|
||||
authLabel := GetAuthProviderLabel(getAuthQuery.Result.AuthModule)
|
||||
query.Result.AuthLabels = append(query.Result.AuthLabels, authLabel)
|
||||
query.Result.IsExternal = true
|
||||
}
|
||||
|
||||
return JSON(200, query.Result)
|
||||
@@ -277,6 +280,12 @@ func searchUser(c *m.ReqContext) (*m.SearchUsersQuery, error) {
|
||||
|
||||
for _, user := range query.Result.Users {
|
||||
user.AvatarUrl = dtos.GetGravatarUrl(user.Email)
|
||||
user.AuthLabels = make([]string, 0)
|
||||
if user.AuthModule != nil && len(user.AuthModule) > 0 {
|
||||
for _, authModule := range user.AuthModule {
|
||||
user.AuthLabels = append(user.AuthLabels, GetAuthProviderLabel(authModule))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query.Result.Page = page
|
||||
@@ -315,3 +324,20 @@ func ClearHelpFlags(c *m.ReqContext) Response {
|
||||
|
||||
return JSON(200, &util.DynMap{"message": "Help flag set", "helpFlags1": cmd.HelpFlags1})
|
||||
}
|
||||
|
||||
func GetAuthProviderLabel(authModule string) string {
|
||||
switch authModule {
|
||||
case "oauth_github":
|
||||
return "GitHub"
|
||||
case "oauth_google":
|
||||
return "Google"
|
||||
case "oauth_gitlab":
|
||||
return "GitLab"
|
||||
case "oauth_grafana_com", "oauth_grafananet":
|
||||
return "grafana.com"
|
||||
case "ldap", "":
|
||||
return "LDAP"
|
||||
default:
|
||||
return "OAuth"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ func EncryptDatasourcePaswords(c utils.CommandLine, sqlStore *sqlstore.SqlStore)
|
||||
}
|
||||
|
||||
func migrateColumn(session *sqlstore.DBSession, column string) (int, error) {
|
||||
var rows []map[string]string
|
||||
var rows []map[string][]byte
|
||||
|
||||
session.Cols("id", column, "secure_json_data")
|
||||
session.Table("data_source")
|
||||
@@ -78,7 +78,7 @@ func migrateColumn(session *sqlstore.DBSession, column string) (int, error) {
|
||||
return rowsUpdated, errutil.Wrapf(err, "failed to update column: %s", column)
|
||||
}
|
||||
|
||||
func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordFieldName string) (int, error) {
|
||||
func updateRows(session *sqlstore.DBSession, rows []map[string][]byte, passwordFieldName string) (int, error) {
|
||||
var rowsUpdated int
|
||||
|
||||
for _, row := range rows {
|
||||
@@ -94,7 +94,7 @@ func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordF
|
||||
|
||||
newRow := map[string]interface{}{"secure_json_data": data, passwordFieldName: ""}
|
||||
session.Table("data_source")
|
||||
session.Where("id = ?", row["id"])
|
||||
session.Where("id = ?", string(row["id"]))
|
||||
// Setting both columns while having value only for secure_json_data should clear the [passwordFieldName] column
|
||||
session.Cols("secure_json_data", passwordFieldName)
|
||||
|
||||
@@ -108,16 +108,20 @@ func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordF
|
||||
return rowsUpdated, nil
|
||||
}
|
||||
|
||||
func getUpdatedSecureJSONData(row map[string]string, passwordFieldName string) (map[string]interface{}, error) {
|
||||
encryptedPassword, err := util.Encrypt([]byte(row[passwordFieldName]), setting.SecretKey)
|
||||
func getUpdatedSecureJSONData(row map[string][]byte, passwordFieldName string) (map[string]interface{}, error) {
|
||||
encryptedPassword, err := util.Encrypt(row[passwordFieldName], setting.SecretKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var secureJSONData map[string]interface{}
|
||||
|
||||
if err := json.Unmarshal([]byte(row["secure_json_data"]), &secureJSONData); err != nil {
|
||||
return nil, err
|
||||
if len(row["secure_json_data"]) > 0 {
|
||||
if err := json.Unmarshal(row["secure_json_data"], &secureJSONData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
secureJSONData = map[string]interface{}{}
|
||||
}
|
||||
|
||||
jsonFieldName := util.ToCamelCase(passwordFieldName)
|
||||
|
||||
@@ -20,19 +20,30 @@ func TestPasswordMigrationCommand(t *testing.T) {
|
||||
datasources := []*models.DataSource{
|
||||
{Type: "influxdb", Name: "influxdb", Password: "foobar"},
|
||||
{Type: "graphite", Name: "graphite", BasicAuthPassword: "foobar"},
|
||||
{Type: "prometheus", Name: "prometheus", SecureJsonData: securejsondata.GetEncryptedJsonData(map[string]string{})},
|
||||
{Type: "prometheus", Name: "prometheus"},
|
||||
{Type: "elasticsearch", Name: "elasticsearch", Password: "pwd"},
|
||||
}
|
||||
|
||||
// set required default values
|
||||
for _, ds := range datasources {
|
||||
ds.Created = time.Now()
|
||||
ds.Updated = time.Now()
|
||||
ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{})
|
||||
if ds.Name == "elasticsearch" {
|
||||
ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{
|
||||
"key": "value",
|
||||
})
|
||||
} else {
|
||||
ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{})
|
||||
}
|
||||
}
|
||||
|
||||
_, err := session.Insert(&datasources)
|
||||
assert.Nil(t, err)
|
||||
|
||||
// force secure_json_data to be null to verify that migration can handle that
|
||||
_, err = session.Exec("update data_source set secure_json_data = null where name = 'influxdb'")
|
||||
assert.Nil(t, err)
|
||||
|
||||
//run migration
|
||||
err = EncryptDatasourcePaswords(&commandstest.FakeCommandLine{}, sqlstore)
|
||||
assert.Nil(t, err)
|
||||
@@ -41,7 +52,7 @@ func TestPasswordMigrationCommand(t *testing.T) {
|
||||
var dss []*models.DataSource
|
||||
err = session.SQL("select * from data_source").Find(&dss)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, len(dss), 3)
|
||||
assert.Equal(t, len(dss), 4)
|
||||
|
||||
for _, ds := range dss {
|
||||
sj := ds.SecureJsonData.Decrypt()
|
||||
@@ -63,5 +74,15 @@ func TestPasswordMigrationCommand(t *testing.T) {
|
||||
if ds.Name == "prometheus" {
|
||||
assert.Equal(t, len(sj), 0)
|
||||
}
|
||||
|
||||
if ds.Name == "elasticsearch" {
|
||||
assert.Equal(t, ds.Password, "")
|
||||
key, exist := sj["key"]
|
||||
assert.True(t, exist)
|
||||
password, exist := sj["password"]
|
||||
assert.True(t, exist)
|
||||
assert.Equal(t, password, "pwd", "expected password to be moved to securejson")
|
||||
assert.Equal(t, key, "value", "expected existing key to be kept intact in securejson")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ func InstallPlugin(pluginName, version string, c utils.CommandLine) error {
|
||||
}
|
||||
|
||||
logger.Infof("installing %v @ %v\n", pluginName, version)
|
||||
logger.Infof("from url: %v\n", downloadURL)
|
||||
logger.Infof("from: %v\n", downloadURL)
|
||||
logger.Infof("into: %v\n", pluginFolder)
|
||||
logger.Info("\n")
|
||||
|
||||
@@ -145,18 +145,27 @@ func downloadFile(pluginName, filePath, url string) (err error) {
|
||||
}
|
||||
}()
|
||||
|
||||
resp, err := http.Get(url) // #nosec
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var bytes []byte
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
if _, err := os.Stat(url); err == nil {
|
||||
bytes, err = ioutil.ReadFile(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
resp, err := http.Get(url) // #nosec
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bytes, err = ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return extractFiles(body, pluginName, filePath)
|
||||
return extractFiles(bytes, pluginName, filePath)
|
||||
}
|
||||
|
||||
func extractFiles(body []byte, pluginName string, filePath string) error {
|
||||
|
||||
@@ -182,6 +182,10 @@ func initContextWithBasicAuth(ctx *models.ReqContext, orgId int64) bool {
|
||||
}
|
||||
|
||||
func initContextWithToken(authTokenService models.UserTokenService, ctx *models.ReqContext, orgID int64) bool {
|
||||
if setting.LoginCookieName == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
rawToken := ctx.GetCookie(setting.LoginCookieName)
|
||||
if rawToken == "" {
|
||||
return false
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
var ErrInvalidApiKey = errors.New("Invalid API Key")
|
||||
var ErrInvalidApiKeyExpiration = errors.New("Negative value for SecondsToLive")
|
||||
var ErrDuplicateApiKey = errors.New("API Key Organization ID And Name Must Be Unique")
|
||||
|
||||
type ApiKey struct {
|
||||
Id int64
|
||||
|
||||
@@ -216,7 +216,8 @@ type UserProfileDTO struct {
|
||||
OrgId int64 `json:"orgId"`
|
||||
IsGrafanaAdmin bool `json:"isGrafanaAdmin"`
|
||||
IsDisabled bool `json:"isDisabled"`
|
||||
AuthModule []string `json:"authModule"`
|
||||
IsExternal bool `json:"isExternal"`
|
||||
AuthLabels []string `json:"authLabels"`
|
||||
}
|
||||
|
||||
type UserSearchHitDTO struct {
|
||||
@@ -229,7 +230,8 @@ type UserSearchHitDTO struct {
|
||||
IsDisabled bool `json:"isDisabled"`
|
||||
LastSeenAt time.Time `json:"lastSeenAt"`
|
||||
LastSeenAtAge string `json:"lastSeenAtAge"`
|
||||
AuthModule AuthModuleConversion `json:"authModule"`
|
||||
AuthLabels []string `json:"authLabels"`
|
||||
AuthModule AuthModuleConversion `json:"-"`
|
||||
}
|
||||
|
||||
type UserIdDTO struct {
|
||||
|
||||
@@ -31,7 +31,8 @@ type IConnection interface {
|
||||
type IServer interface {
|
||||
Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error)
|
||||
Users([]string) ([]*models.ExternalUserInfo, error)
|
||||
Auth(string, string) error
|
||||
Bind() error
|
||||
UserBind(string, string) error
|
||||
Dial() error
|
||||
Close()
|
||||
}
|
||||
@@ -43,6 +44,23 @@ type Server struct {
|
||||
log log.Logger
|
||||
}
|
||||
|
||||
// Bind authenticates the connection with the LDAP server
|
||||
// - with the username and password setup in the config
|
||||
// - or, anonymously
|
||||
func (server *Server) Bind() error {
|
||||
if server.shouldAuthAdmin() {
|
||||
if err := server.AuthAdmin(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
err := server.Connection.UnauthenticatedBind(server.Config.BindDN)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UsersMaxRequest is a max amount of users we can request via Users().
|
||||
// Since many LDAP servers has limitations
|
||||
// on how much items can we return in one request
|
||||
@@ -140,15 +158,19 @@ func (server *Server) Login(query *models.LoginUserQuery) (
|
||||
*models.ExternalUserInfo, error,
|
||||
) {
|
||||
var err error
|
||||
var authAndBind bool
|
||||
|
||||
// Do we need to authenticate the "admin" user first?
|
||||
// Admin user should have access for the user search in LDAP server
|
||||
// Check if we can use a search user
|
||||
if server.shouldAuthAdmin() {
|
||||
if err := server.AuthAdmin(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Or if anyone can perform the search in LDAP?
|
||||
} else if server.shouldSingleBind() {
|
||||
authAndBind = true
|
||||
err = server.UserBind(server.singleBindDN(query.Username), query.Password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
err := server.Connection.UnauthenticatedBind(server.Config.BindDN)
|
||||
if err != nil {
|
||||
@@ -173,15 +195,25 @@ func (server *Server) Login(query *models.LoginUserQuery) (
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Authenticate user
|
||||
err = server.Auth(user.AuthId, query.Password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if !authAndBind {
|
||||
// Authenticate user
|
||||
err = server.UserBind(user.AuthId, query.Password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (server *Server) singleBindDN(username string) string {
|
||||
return fmt.Sprintf(server.Config.BindDN, username)
|
||||
}
|
||||
|
||||
func (server *Server) shouldSingleBind() bool {
|
||||
return strings.Contains(server.Config.BindDN, "%s")
|
||||
}
|
||||
|
||||
// getUsersIteration is a helper function for Users() method.
|
||||
// It divides the users by equal parts for the anticipated requests
|
||||
func getUsersIteration(logins []string, fn func(int, int) error) error {
|
||||
@@ -366,9 +398,9 @@ func (server *Server) shouldAuthAdmin() bool {
|
||||
return server.Config.BindPassword != ""
|
||||
}
|
||||
|
||||
// Auth authentificates user in LDAP
|
||||
func (server *Server) Auth(username, password string) error {
|
||||
err := server.auth(username, password)
|
||||
// UserBind authenticates the connection with the LDAP server
|
||||
func (server *Server) UserBind(username, password string) error {
|
||||
err := server.userBind(username, password)
|
||||
if err != nil {
|
||||
server.log.Error(
|
||||
fmt.Sprintf("Cannot authentificate user %s in LDAP", username),
|
||||
@@ -383,7 +415,7 @@ func (server *Server) Auth(username, password string) error {
|
||||
|
||||
// AuthAdmin authentificates LDAP admin user
|
||||
func (server *Server) AuthAdmin() error {
|
||||
err := server.auth(server.Config.BindDN, server.Config.BindPassword)
|
||||
err := server.userBind(server.Config.BindDN, server.Config.BindPassword)
|
||||
if err != nil {
|
||||
server.log.Error(
|
||||
"Cannot authentificate admin user in LDAP",
|
||||
@@ -396,8 +428,8 @@ func (server *Server) AuthAdmin() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// auth is helper for several types of LDAP authentification
|
||||
func (server *Server) auth(path, password string) error {
|
||||
// userBind authenticates the connection with the LDAP server
|
||||
func (server *Server) userBind(path, password string) error {
|
||||
err := server.Connection.Bind(path, password)
|
||||
if err != nil {
|
||||
if ldapErr, ok := err.(*ldap.Error); ok {
|
||||
|
||||
@@ -19,7 +19,7 @@ func TestLDAPLogin(t *testing.T) {
|
||||
}
|
||||
|
||||
Convey("Login()", t, func() {
|
||||
Convey("Should get invalid credentials when auth fails", func() {
|
||||
Convey("Should get invalid credentials when userBind fails", func() {
|
||||
connection := &MockConnection{}
|
||||
entry := ldap.Entry{}
|
||||
result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}}
|
||||
@@ -198,5 +198,37 @@ func TestLDAPLogin(t *testing.T) {
|
||||
So(username, ShouldEqual, "test")
|
||||
So(password, ShouldEqual, "pwd")
|
||||
})
|
||||
Convey("Should bind with user if %s exists in the bind_dn", func() {
|
||||
connection := &MockConnection{}
|
||||
entry := ldap.Entry{
|
||||
DN: "test",
|
||||
}
|
||||
connection.setSearchResult(&ldap.SearchResult{Entries: []*ldap.Entry{&entry}})
|
||||
|
||||
authBindUser := ""
|
||||
authBindPassword := ""
|
||||
|
||||
connection.BindProvider = func(name, pass string) error {
|
||||
authBindUser = name
|
||||
authBindPassword = pass
|
||||
return nil
|
||||
}
|
||||
server := &Server{
|
||||
Config: &ServerConfig{
|
||||
BindDN: "cn=%s,ou=users,dc=grafana,dc=org",
|
||||
SearchBaseDNs: []string{"BaseDNHere"},
|
||||
},
|
||||
Connection: connection,
|
||||
log: log.New("test-logger"),
|
||||
}
|
||||
|
||||
_, err := server.Login(defaultLogin)
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(authBindUser, ShouldEqual, "cn=user,ou=users,dc=grafana,dc=org")
|
||||
So(authBindPassword, ShouldEqual, "pwd")
|
||||
So(connection.BindCalled, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ func TestLDAPPrivateMethods(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("shouldAuthAdmin()", t, func() {
|
||||
Convey("it should require admin auth", func() {
|
||||
Convey("it should require admin userBind", func() {
|
||||
server := &Server{
|
||||
Config: &ServerConfig{
|
||||
BindPassword: "test",
|
||||
@@ -156,7 +156,7 @@ func TestLDAPPrivateMethods(t *testing.T) {
|
||||
So(result, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("it should not require admin auth", func() {
|
||||
Convey("it should not require admin userBind", func() {
|
||||
server := &Server{
|
||||
Config: &ServerConfig{
|
||||
BindPassword: "",
|
||||
|
||||
@@ -102,7 +102,7 @@ func TestPublicAPI(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Auth()", t, func() {
|
||||
Convey("UserBind()", t, func() {
|
||||
Convey("Should use provided DN and password", func() {
|
||||
connection := &MockConnection{}
|
||||
var actualUsername, actualPassword string
|
||||
@@ -119,7 +119,7 @@ func TestPublicAPI(t *testing.T) {
|
||||
}
|
||||
|
||||
dn := "cn=user,ou=users,dc=grafana,dc=org"
|
||||
err := server.Auth(dn, "pwd")
|
||||
err := server.UserBind(dn, "pwd")
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(actualUsername, ShouldEqual, dn)
|
||||
@@ -141,7 +141,7 @@ func TestPublicAPI(t *testing.T) {
|
||||
},
|
||||
log: log.New("test-logger"),
|
||||
}
|
||||
err := server.Auth("user", "pwd")
|
||||
err := server.UserBind("user", "pwd")
|
||||
So(err, ShouldEqual, expected)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -109,6 +109,10 @@ func (multiples *MultiLDAP) User(login string) (
|
||||
|
||||
defer server.Close()
|
||||
|
||||
if err := server.Bind(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users, err := server.Users(search)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -142,6 +146,10 @@ func (multiples *MultiLDAP) Users(logins []string) (
|
||||
|
||||
defer server.Close()
|
||||
|
||||
if err := server.Bind(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
users, err := server.Users(logins)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -11,12 +11,15 @@ type MockLDAP struct {
|
||||
loginCalledTimes int
|
||||
closeCalledTimes int
|
||||
usersCalledTimes int
|
||||
bindCalledTimes int
|
||||
|
||||
dialErrReturn error
|
||||
|
||||
loginErrReturn error
|
||||
loginReturn *models.ExternalUserInfo
|
||||
|
||||
bindErrReturn error
|
||||
|
||||
usersErrReturn error
|
||||
usersFirstReturn []*models.ExternalUserInfo
|
||||
usersRestReturn []*models.ExternalUserInfo
|
||||
@@ -40,8 +43,8 @@ func (mock *MockLDAP) Users([]string) ([]*models.ExternalUserInfo, error) {
|
||||
return mock.usersRestReturn, mock.usersErrReturn
|
||||
}
|
||||
|
||||
// Auth test fn
|
||||
func (mock *MockLDAP) Auth(string, string) error {
|
||||
// UserBind test fn
|
||||
func (mock *MockLDAP) UserBind(string, string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -56,6 +59,11 @@ func (mock *MockLDAP) Close() {
|
||||
mock.closeCalledTimes = mock.closeCalledTimes + 1
|
||||
}
|
||||
|
||||
func (mock *MockLDAP) Bind() error {
|
||||
mock.bindCalledTimes++
|
||||
return mock.bindErrReturn
|
||||
}
|
||||
|
||||
// MockMultiLDAP represents testing struct for multildap testing
|
||||
type MockMultiLDAP struct {
|
||||
LoginCalledTimes int
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"os"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/util/errutil"
|
||||
)
|
||||
@@ -185,8 +186,14 @@ func transformMap(i map[interface{}]interface{}) interface{} {
|
||||
|
||||
// interpolateValue returns final value after interpolation. At the moment only env var interpolation is done
|
||||
// here but in the future something like interpolation from file could be also done here.
|
||||
// For a literal '$', '$$' can be used to avoid interpolation.
|
||||
func interpolateValue(val string) string {
|
||||
return os.ExpandEnv(val)
|
||||
parts := strings.Split(val, "$$")
|
||||
interpolated := make([]string, len(parts))
|
||||
for i, v := range parts {
|
||||
interpolated[i] = os.ExpandEnv(v)
|
||||
}
|
||||
return strings.Join(interpolated, "$")
|
||||
}
|
||||
|
||||
type interpolated struct {
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
package values
|
||||
|
||||
import (
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"gopkg.in/yaml.v2"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
func TestValues(t *testing.T) {
|
||||
Convey("Values", t, func() {
|
||||
os.Setenv("INT", "1")
|
||||
os.Setenv("STRING", "test")
|
||||
os.Setenv("EMPTYSTRING", "")
|
||||
os.Setenv("BOOL", "true")
|
||||
|
||||
Convey("IntValue", func() {
|
||||
@@ -61,6 +63,24 @@ func TestValues(t *testing.T) {
|
||||
So(d.Val.Value(), ShouldEqual, "")
|
||||
So(d.Val.Raw, ShouldEqual, "")
|
||||
})
|
||||
|
||||
Convey("empty var should have empty value", func() {
|
||||
unmarshalingTest(`val: $EMPTYSTRING`, d)
|
||||
So(d.Val.Value(), ShouldEqual, "")
|
||||
So(d.Val.Raw, ShouldEqual, "$EMPTYSTRING")
|
||||
})
|
||||
|
||||
Convey("$$ should be a literal $", func() {
|
||||
unmarshalingTest(`val: $$`, d)
|
||||
So(d.Val.Value(), ShouldEqual, "$")
|
||||
So(d.Val.Raw, ShouldEqual, "$$")
|
||||
})
|
||||
|
||||
Convey("$$ should be a literal $ and not expanded within a string", func() {
|
||||
unmarshalingTest(`val: mY,Passwo$$rd`, d)
|
||||
So(d.Val.Value(), ShouldEqual, "mY,Passwo$rd")
|
||||
So(d.Val.Raw, ShouldEqual, "mY,Passwo$$rd")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("BoolValue", func() {
|
||||
@@ -199,6 +219,7 @@ func TestValues(t *testing.T) {
|
||||
Reset(func() {
|
||||
os.Unsetenv("INT")
|
||||
os.Unsetenv("STRING")
|
||||
os.Unsetenv("EMPTYSTRING")
|
||||
os.Unsetenv("BOOL")
|
||||
})
|
||||
})
|
||||
|
||||
@@ -37,6 +37,12 @@ func DeleteApiKeyCtx(ctx context.Context, cmd *models.DeleteApiKeyCommand) error
|
||||
|
||||
func AddApiKey(cmd *models.AddApiKeyCommand) error {
|
||||
return inTransaction(func(sess *DBSession) error {
|
||||
key := models.ApiKey{OrgId: cmd.OrgId, Name: cmd.Name}
|
||||
exists, _ := sess.Get(&key)
|
||||
if exists {
|
||||
return models.ErrDuplicateApiKey
|
||||
}
|
||||
|
||||
updated := timeNow()
|
||||
var expires *int64 = nil
|
||||
if cmd.SecondsToLive > 0 {
|
||||
|
||||
@@ -115,3 +115,23 @@ func TestApiKeyDataAccess(t *testing.T) {
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestApiKeyErrors(t *testing.T) {
|
||||
mockTimeNow()
|
||||
defer resetTimeNow()
|
||||
|
||||
t.Run("Testing API Duplicate Key Errors", func(t *testing.T) {
|
||||
InitTestDB(t)
|
||||
t.Run("Given saved api key", func(t *testing.T) {
|
||||
cmd := models.AddApiKeyCommand{OrgId: 0, Name: "duplicate", Key: "asd"}
|
||||
err := AddApiKey(&cmd)
|
||||
assert.Nil(t, err)
|
||||
|
||||
t.Run("Add API Key with existing Org ID and Name", func(t *testing.T) {
|
||||
cmd := models.AddApiKeyCommand{OrgId: 0, Name: "duplicate", Key: "asd"}
|
||||
err = AddApiKey(&cmd)
|
||||
assert.EqualError(t, err, models.ErrDuplicateApiKey.Error())
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,9 +11,14 @@ import { TagBadge } from './TagBadge';
|
||||
import { NoOptionsMessage, IndicatorsContainer, resetSelectStyles } from '@grafana/ui';
|
||||
import { escapeStringForRegex } from '../FilterInput/FilterInput';
|
||||
|
||||
export interface TermCount {
|
||||
term: string;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export interface Props {
|
||||
tags: string[];
|
||||
tagOptions: () => any;
|
||||
tagOptions: () => Promise<TermCount[]>;
|
||||
onChange: (tags: string[]) => void;
|
||||
}
|
||||
|
||||
@@ -25,7 +30,7 @@ export class TagFilter extends React.Component<Props, any> {
|
||||
}
|
||||
|
||||
onLoadOptions = (query: string) => {
|
||||
return this.props.tagOptions().then((options: any[]) => {
|
||||
return this.props.tagOptions().then(options => {
|
||||
return options.map(option => ({
|
||||
value: option.term,
|
||||
label: option.term,
|
||||
|
||||
@@ -324,10 +324,6 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
|
||||
}
|
||||
}
|
||||
|
||||
const sortedRows = rows.sort((a, b) => {
|
||||
return a.timestamp > b.timestamp ? -1 : 1;
|
||||
});
|
||||
|
||||
// Meta data to display in status
|
||||
const meta: LogsMetaItem[] = [];
|
||||
if (_.size(commonLabels) > 0) {
|
||||
@@ -343,7 +339,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
|
||||
if (limits.length > 0) {
|
||||
meta.push({
|
||||
label: 'Limit',
|
||||
value: `${limits[0].meta.limit} (${sortedRows.length} returned)`,
|
||||
value: `${limits[0].meta.limit} (${rows.length} returned)`,
|
||||
kind: LogsMetaKind.String,
|
||||
});
|
||||
}
|
||||
@@ -351,7 +347,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
|
||||
return {
|
||||
hasUniqueLabels,
|
||||
meta,
|
||||
rows: sortedRows,
|
||||
rows,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ describe('file_export', () => {
|
||||
|
||||
describe('when exporting series as rows', () => {
|
||||
it('should export points in proper order', () => {
|
||||
const text = fileExport.convertSeriesListToCsv(ctx.seriesList, ctx.timeFormat);
|
||||
const text = fileExport.convertSeriesListToCsv(ctx.seriesList, { dateTimeFormat: ctx.timeFormat });
|
||||
const expectedText =
|
||||
'"Series";"Time";"Value"\r\n' +
|
||||
'"series_1";"1500026100";1\r\n' +
|
||||
@@ -48,7 +48,7 @@ describe('file_export', () => {
|
||||
|
||||
describe('when exporting series as columns', () => {
|
||||
it('should export points in proper order', () => {
|
||||
const text = fileExport.convertSeriesListToCsvColumns(ctx.seriesList, ctx.timeFormat);
|
||||
const text = fileExport.convertSeriesListToCsvColumns(ctx.seriesList, { dateTimeFormat: ctx.timeFormat });
|
||||
const expectedText =
|
||||
'"Time";"series_1";"series_2"\r\n' +
|
||||
'"1500026100";1;11\r\n' +
|
||||
@@ -65,7 +65,7 @@ describe('file_export', () => {
|
||||
const expectedSeries1DataPoints = ctx.seriesList[0].datapoints.slice();
|
||||
const expectedSeries2DataPoints = ctx.seriesList[1].datapoints.slice();
|
||||
|
||||
fileExport.convertSeriesListToCsvColumns(ctx.seriesList, ctx.timeFormat);
|
||||
fileExport.convertSeriesListToCsvColumns(ctx.seriesList, { dateTimeFormat: ctx.timeFormat });
|
||||
|
||||
expect(expectedSeries1DataPoints).toEqual(ctx.seriesList[0].datapoints);
|
||||
expect(expectedSeries2DataPoints).toEqual(ctx.seriesList[1].datapoints);
|
||||
|
||||
@@ -418,13 +418,6 @@ describe('dataFrameToLogsModel', () => {
|
||||
expect(logsModel.hasUniqueLabels).toBeFalsy();
|
||||
expect(logsModel.rows).toHaveLength(2);
|
||||
expect(logsModel.rows).toMatchObject([
|
||||
{
|
||||
timestamp: '2019-04-26T14:42:50.991981292Z',
|
||||
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
|
||||
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
|
||||
logLevel: 'error',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
timestamp: '2019-04-26T09:28:11.352440161Z',
|
||||
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
|
||||
@@ -432,6 +425,13 @@ describe('dataFrameToLogsModel', () => {
|
||||
logLevel: 'info',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
timestamp: '2019-04-26T14:42:50.991981292Z',
|
||||
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
|
||||
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
|
||||
logLevel: 'error',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
]);
|
||||
|
||||
expect(logsModel.series).toHaveLength(2);
|
||||
@@ -524,12 +524,6 @@ describe('dataFrameToLogsModel', () => {
|
||||
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
||||
expect(logsModel.rows).toHaveLength(3);
|
||||
expect(logsModel.rows).toMatchObject([
|
||||
{
|
||||
entry: 'INFO 2',
|
||||
labels: { foo: 'bar', baz: '2' },
|
||||
logLevel: LogLevel.error,
|
||||
uniqueLabels: { baz: '2' },
|
||||
},
|
||||
{
|
||||
entry: 'WARN boooo',
|
||||
labels: { foo: 'bar', baz: '1' },
|
||||
@@ -542,6 +536,12 @@ describe('dataFrameToLogsModel', () => {
|
||||
logLevel: LogLevel.error,
|
||||
uniqueLabels: { baz: '2' },
|
||||
},
|
||||
{
|
||||
entry: 'INFO 2',
|
||||
labels: { foo: 'bar', baz: '2' },
|
||||
logLevel: LogLevel.error,
|
||||
uniqueLabels: { baz: '2' },
|
||||
},
|
||||
]);
|
||||
|
||||
expect(logsModel.series).toHaveLength(2);
|
||||
|
||||
@@ -487,11 +487,11 @@ export const getRefIds = (value: any): string[] => {
|
||||
};
|
||||
|
||||
const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
|
||||
if (a.timeEpochMs < b.timeEpochMs) {
|
||||
if (a.timestamp < b.timestamp) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.timeEpochMs > b.timeEpochMs) {
|
||||
if (a.timestamp > b.timestamp) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -499,11 +499,11 @@ const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
|
||||
};
|
||||
|
||||
const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
|
||||
if (a.timeEpochMs > b.timeEpochMs) {
|
||||
if (a.timestamp > b.timestamp) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.timeEpochMs < b.timeEpochMs) {
|
||||
if (a.timestamp < b.timestamp) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { isBoolean, isNumber, sortedUniq, sortedIndexOf, unescape as htmlUnescaped } from 'lodash';
|
||||
import { saveAs } from 'file-saver';
|
||||
import { isNullOrUndefined } from 'util';
|
||||
import { dateTime } from '@grafana/data';
|
||||
import { dateTime, TimeZone } from '@grafana/data';
|
||||
|
||||
const DEFAULT_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ssZ';
|
||||
const POINT_TIME_INDEX = 1;
|
||||
@@ -12,7 +12,19 @@ const END_ROW = '\r\n';
|
||||
const QUOTE = '"';
|
||||
const EXPORT_FILENAME = 'grafana_data_export.csv';
|
||||
|
||||
function csvEscaped(text) {
|
||||
interface SeriesListToCsvColumnsOptions {
|
||||
dateTimeFormat: string;
|
||||
excel: boolean;
|
||||
timezone: TimeZone;
|
||||
}
|
||||
|
||||
const defaultOptions: SeriesListToCsvColumnsOptions = {
|
||||
dateTimeFormat: DEFAULT_DATETIME_FORMAT,
|
||||
excel: false,
|
||||
timezone: '',
|
||||
};
|
||||
|
||||
function csvEscaped(text: string) {
|
||||
if (!text) {
|
||||
return text;
|
||||
}
|
||||
@@ -25,13 +37,13 @@ function csvEscaped(text) {
|
||||
}
|
||||
|
||||
const domParser = new DOMParser();
|
||||
function htmlDecoded(text) {
|
||||
function htmlDecoded(text: string) {
|
||||
if (!text) {
|
||||
return text;
|
||||
}
|
||||
|
||||
const regexp = /&[^;]+;/g;
|
||||
function htmlDecoded(value) {
|
||||
function htmlDecoded(value: string) {
|
||||
const parsedDom = domParser.parseFromString(value, 'text/html');
|
||||
return parsedDom.body.textContent;
|
||||
}
|
||||
@@ -58,14 +70,19 @@ function formatRow(row, addEndRowDelimiter = true) {
|
||||
return addEndRowDelimiter ? text + END_ROW : text;
|
||||
}
|
||||
|
||||
export function convertSeriesListToCsv(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) {
|
||||
export function convertSeriesListToCsv(seriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const { dateTimeFormat, excel, timezone } = { ...defaultOptions, ...options };
|
||||
let text = formatSpecialHeader(excel) + formatRow(['Series', 'Time', 'Value']);
|
||||
for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex += 1) {
|
||||
for (let i = 0; i < seriesList[seriesIndex].datapoints.length; i += 1) {
|
||||
text += formatRow(
|
||||
[
|
||||
seriesList[seriesIndex].alias,
|
||||
dateTime(seriesList[seriesIndex].datapoints[i][POINT_TIME_INDEX]).format(dateTimeFormat),
|
||||
timezone === 'utc'
|
||||
? dateTime(seriesList[seriesIndex].datapoints[i][POINT_TIME_INDEX])
|
||||
.utc()
|
||||
.format(dateTimeFormat)
|
||||
: dateTime(seriesList[seriesIndex].datapoints[i][POINT_TIME_INDEX]).format(dateTimeFormat),
|
||||
seriesList[seriesIndex].datapoints[i][POINT_VALUE_INDEX],
|
||||
],
|
||||
i < seriesList[seriesIndex].datapoints.length - 1 || seriesIndex < seriesList.length - 1
|
||||
@@ -75,12 +92,13 @@ export function convertSeriesListToCsv(seriesList, dateTimeFormat = DEFAULT_DATE
|
||||
return text;
|
||||
}
|
||||
|
||||
export function exportSeriesListToCsv(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) {
|
||||
const text = convertSeriesListToCsv(seriesList, dateTimeFormat, excel);
|
||||
export function exportSeriesListToCsv(seriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const text = convertSeriesListToCsv(seriesList, options);
|
||||
saveSaveBlob(text, EXPORT_FILENAME);
|
||||
}
|
||||
|
||||
export function convertSeriesListToCsvColumns(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) {
|
||||
export function convertSeriesListToCsvColumns(seriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const { dateTimeFormat, excel, timezone } = { ...defaultOptions, ...options };
|
||||
// add header
|
||||
let text =
|
||||
formatSpecialHeader(excel) +
|
||||
@@ -96,7 +114,13 @@ export function convertSeriesListToCsvColumns(seriesList, dateTimeFormat = DEFAU
|
||||
|
||||
// make text
|
||||
for (let i = 0; i < extendedDatapointsList[0].length; i += 1) {
|
||||
const timestamp = dateTime(extendedDatapointsList[0][i][POINT_TIME_INDEX]).format(dateTimeFormat);
|
||||
const timestamp =
|
||||
timezone === 'utc'
|
||||
? dateTime(extendedDatapointsList[0][i][POINT_TIME_INDEX])
|
||||
.utc()
|
||||
.format(dateTimeFormat)
|
||||
: dateTime(extendedDatapointsList[0][i][POINT_TIME_INDEX]).format(dateTimeFormat);
|
||||
|
||||
text += formatRow(
|
||||
[timestamp].concat(
|
||||
extendedDatapointsList.map(datapoints => {
|
||||
@@ -143,8 +167,8 @@ function mergeSeriesByTime(seriesList) {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function exportSeriesListToCsvColumns(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) {
|
||||
const text = convertSeriesListToCsvColumns(seriesList, dateTimeFormat, excel);
|
||||
export function exportSeriesListToCsvColumns(seriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const text = convertSeriesListToCsvColumns(seriesList, options);
|
||||
saveSaveBlob(text, EXPORT_FILENAME);
|
||||
}
|
||||
|
||||
|
||||
@@ -179,7 +179,7 @@ export default class AdminEditUserCtrl {
|
||||
const user = $scope.user;
|
||||
|
||||
// External user can not be disabled
|
||||
if (user.authModule) {
|
||||
if (user.isExternal) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
return;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { NavModelSrv } from 'app/core/core';
|
||||
import tags from 'app/core/utils/tags';
|
||||
|
||||
export default class AdminListUsersCtrl {
|
||||
users: any;
|
||||
@@ -32,6 +33,8 @@ export default class AdminListUsersCtrl {
|
||||
for (let i = 1; i < this.totalPages + 1; i++) {
|
||||
this.pages.push({ page: i, current: i === this.page });
|
||||
}
|
||||
|
||||
this.addUsersAuthLabels();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -40,10 +43,29 @@ export default class AdminListUsersCtrl {
|
||||
this.getUsers();
|
||||
}
|
||||
|
||||
getAuthModule(user: any) {
|
||||
if (user.authModule && user.authModule.length) {
|
||||
return user.authModule[0];
|
||||
addUsersAuthLabels() {
|
||||
for (const user of this.users) {
|
||||
user.authLabel = getAuthLabel(user);
|
||||
user.authLabelStyle = getAuthLabelStyle(user.authLabel);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function getAuthLabel(user: any) {
|
||||
if (user.authLabels && user.authLabels.length) {
|
||||
return user.authLabels[0];
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
function getAuthLabelStyle(label: string) {
|
||||
if (label === 'LDAP' || !label) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const { color, borderColor } = tags.getTagColorsFromName(label);
|
||||
return {
|
||||
'background-color': color,
|
||||
'border-color': borderColor,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -118,48 +118,52 @@
|
||||
<h3 class="page-heading">Sessions</h3>
|
||||
|
||||
<div class="gf-form-group">
|
||||
<table class="filter-table form-inline">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Last seen</th>
|
||||
<th>Logged on</th>
|
||||
<th>IP address</th>
|
||||
<th>Browser & OS</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr ng-repeat="session in sessions">
|
||||
<td ng-if="session.isActive">Now</td>
|
||||
<td ng-if="!session.isActive">{{session.seenAt}}</td>
|
||||
<td>{{session.createdAt}}</td>
|
||||
<td>{{session.clientIp}}</td>
|
||||
<td>{{session.browser}} on {{session.os}} {{session.osVersion}}</td>
|
||||
<td>
|
||||
<button class="btn btn-danger btn-small" ng-click="revokeUserSession(session.id)">
|
||||
<i class="fa fa-power-off"></i>
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="gf-form">
|
||||
<table class="filter-table form-inline">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Last seen</th>
|
||||
<th>Logged on</th>
|
||||
<th>IP address</th>
|
||||
<th>Browser & OS</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr ng-repeat="session in sessions">
|
||||
<td ng-if="session.isActive">Now</td>
|
||||
<td ng-if="!session.isActive">{{session.seenAt}}</td>
|
||||
<td>{{session.createdAt}}</td>
|
||||
<td>{{session.clientIp}}</td>
|
||||
<td>{{session.browser}} on {{session.os}} {{session.osVersion}}</td>
|
||||
<td>
|
||||
<button class="btn btn-danger btn-small" ng-click="revokeUserSession(session.id)">
|
||||
<i class="fa fa-power-off"></i>
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="gf-form-button-row">
|
||||
<button ng-if="sessions.length" class="btn btn-danger" ng-click="revokeAllUserSessions()">
|
||||
Logout user from all devices
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<button ng-if="sessions.length" class="btn btn-danger" ng-click="revokeAllUserSessions()">
|
||||
Logout user from all devices
|
||||
</button>
|
||||
|
||||
<h3 class="page-heading">User status</h3>
|
||||
|
||||
<div class="gf-form-group">
|
||||
<h3 class="page-heading">User status</h3>
|
||||
|
||||
<div class="gf-form-button-row">
|
||||
<button
|
||||
type="submit"
|
||||
class="btn btn-danger"
|
||||
ng-if="!user.isDisabled"
|
||||
ng-click="disableUser($event)"
|
||||
bs-tooltip="user.authModule ? 'External user cannot be activated or deactivated' : ''"
|
||||
ng-class="{'disabled': user.authModule}"
|
||||
bs-tooltip="user.isExternal ? 'External user cannot be enabled or disabled' : ''"
|
||||
ng-class="{'disabled': user.isExternal}"
|
||||
>
|
||||
Disable
|
||||
</button>
|
||||
@@ -168,8 +172,8 @@
|
||||
class="btn btn-primary"
|
||||
ng-if="user.isDisabled"
|
||||
ng-click="disableUser($event)"
|
||||
bs-tooltip="user.authModule ? 'External user cannot be activated or deactivated' : ''"
|
||||
ng-class="{'disabled': user.authModule}"
|
||||
bs-tooltip="user.isExternal ? 'External user cannot be enabled or disabled' : ''"
|
||||
ng-class="{'disabled': user.isExternal}"
|
||||
>
|
||||
Enable
|
||||
</button>
|
||||
|
||||
@@ -55,7 +55,9 @@
|
||||
</a>
|
||||
</td>
|
||||
<td class="text-right">
|
||||
<span class="label label-tag" ng-class="{'muted': user.isDisabled}" ng-if="ctrl.getAuthModule(user) === 'ldap'">LDAP</span>
|
||||
<span class="label label-tag" ng-style="user.authLabelStyle" ng-if="user.authLabel">
|
||||
{{user.authLabel}}
|
||||
</span>
|
||||
</td>
|
||||
<td class="text-right">
|
||||
<span class="label label-tag label-tag--gray" ng-if="user.isDisabled">Disabled</span>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import angular from 'angular';
|
||||
import * as fileExport from 'app/core/utils/file_export';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { DashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
|
||||
|
||||
export class ExportDataModalCtrl {
|
||||
private data: any[];
|
||||
@@ -9,14 +10,23 @@ export class ExportDataModalCtrl {
|
||||
dateTimeFormat = 'YYYY-MM-DDTHH:mm:ssZ';
|
||||
excel = false;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private dashboardSrv: DashboardSrv) {}
|
||||
|
||||
export() {
|
||||
const timezone = this.dashboardSrv.getCurrent().timezone;
|
||||
const options = {
|
||||
excel: this.excel,
|
||||
dateTimeFormat: this.dateTimeFormat,
|
||||
timezone,
|
||||
};
|
||||
if (this.panel === 'table') {
|
||||
fileExport.exportTableDataToCsv(this.data, this.excel);
|
||||
} else {
|
||||
if (this.asRows) {
|
||||
fileExport.exportSeriesListToCsv(this.data, this.dateTimeFormat, this.excel);
|
||||
fileExport.exportSeriesListToCsv(this.data, options);
|
||||
} else {
|
||||
fileExport.exportSeriesListToCsvColumns(this.data, this.dateTimeFormat, this.excel);
|
||||
fileExport.exportSeriesListToCsvColumns(this.data, options);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,9 +72,10 @@ export class ResultProcessor {
|
||||
const graphInterval = this.state.queryIntervals.intervalMs;
|
||||
const dataFrame = this.rawData.map(result => guessFieldTypes(toDataFrame(result)));
|
||||
const newResults = this.rawData ? dataFrameToLogsModel(dataFrame, graphInterval) : null;
|
||||
const sortedNewResults = sortLogsResult(newResults, this.state.refreshInterval);
|
||||
|
||||
if (this.replacePreviousResults) {
|
||||
return newResults;
|
||||
return sortedNewResults;
|
||||
}
|
||||
|
||||
const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] };
|
||||
@@ -86,17 +87,17 @@ export class ResultProcessor {
|
||||
for (const row of rowsInState) {
|
||||
processedRows.push({ ...row, fresh: false });
|
||||
}
|
||||
for (const row of newResults.rows) {
|
||||
for (const row of sortedNewResults.rows) {
|
||||
processedRows.push({ ...row, fresh: true });
|
||||
}
|
||||
|
||||
const processedSeries = this.mergeGraphResults(newResults.series, seriesInState);
|
||||
const processedSeries = this.mergeGraphResults(sortedNewResults.series, seriesInState);
|
||||
|
||||
const slice = -1000;
|
||||
const rows = processedRows.slice(slice);
|
||||
const series = processedSeries.slice(slice);
|
||||
|
||||
return { ...newResults, rows, series };
|
||||
return { ...sortedNewResults, rows, series };
|
||||
};
|
||||
|
||||
private makeTimeSeriesList = (rawData: any[]) => {
|
||||
|
||||
@@ -22,6 +22,7 @@ import * as graphPanel from 'app/plugins/panel/graph/module';
|
||||
import * as dashListPanel from 'app/plugins/panel/dashlist/module';
|
||||
import * as pluginsListPanel from 'app/plugins/panel/pluginlist/module';
|
||||
import * as alertListPanel from 'app/plugins/panel/alertlist/module';
|
||||
import * as annoListPanel from 'app/plugins/panel/annolist/module';
|
||||
import * as heatmapPanel from 'app/plugins/panel/heatmap/module';
|
||||
import * as tablePanel from 'app/plugins/panel/table/module';
|
||||
import * as table2Panel from 'app/plugins/panel/table2/module';
|
||||
@@ -59,6 +60,7 @@ const builtInPlugins = {
|
||||
'app/plugins/panel/dashlist/module': dashListPanel,
|
||||
'app/plugins/panel/pluginlist/module': pluginsListPanel,
|
||||
'app/plugins/panel/alertlist/module': alertListPanel,
|
||||
'app/plugins/panel/annolist/module': annoListPanel,
|
||||
'app/plugins/panel/heatmap/module': heatmapPanel,
|
||||
'app/plugins/panel/table/module': tablePanel,
|
||||
'app/plugins/panel/table2/module': table2Panel,
|
||||
|
||||
@@ -4,7 +4,7 @@ import { variableRegex } from 'app/features/templating/variable';
|
||||
import { ScopedVars } from '@grafana/ui';
|
||||
import { TimeRange } from '@grafana/data';
|
||||
|
||||
function luceneEscape(value) {
|
||||
function luceneEscape(value: string) {
|
||||
return value.replace(/([\!\*\+\-\=<>\s\&\|\(\)\[\]\{\}\^\~\?\:\\/"])/g, '\\$1');
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ export class TemplateSrv {
|
||||
variables: any[];
|
||||
|
||||
private regex = variableRegex;
|
||||
private index = {};
|
||||
private grafanaVariables = {};
|
||||
private index: any = {};
|
||||
private grafanaVariables: any = {};
|
||||
private builtIns: any = {};
|
||||
private timeRange: TimeRange = null;
|
||||
|
||||
@@ -23,7 +23,7 @@ export class TemplateSrv {
|
||||
this.variables = [];
|
||||
}
|
||||
|
||||
init(variables, timeRange?: TimeRange) {
|
||||
init(variables: any, timeRange?: TimeRange) {
|
||||
this.variables = variables;
|
||||
this.timeRange = timeRange;
|
||||
this.updateIndex();
|
||||
@@ -34,7 +34,7 @@ export class TemplateSrv {
|
||||
}
|
||||
|
||||
updateIndex() {
|
||||
const existsOrEmpty = value => value || value === '';
|
||||
const existsOrEmpty = (value: any) => value || value === '';
|
||||
|
||||
this.index = this.variables.reduce((acc, currentValue) => {
|
||||
if (currentValue.current && (currentValue.current.isNone || existsOrEmpty(currentValue.current.value))) {
|
||||
@@ -64,12 +64,12 @@ export class TemplateSrv {
|
||||
this.updateIndex();
|
||||
}
|
||||
|
||||
variableInitialized(variable) {
|
||||
variableInitialized(variable: any) {
|
||||
this.index[variable.name] = variable;
|
||||
}
|
||||
|
||||
getAdhocFilters(datasourceName) {
|
||||
let filters = [];
|
||||
getAdhocFilters(datasourceName: string) {
|
||||
let filters: any = [];
|
||||
|
||||
if (this.variables) {
|
||||
for (let i = 0; i < this.variables.length; i++) {
|
||||
@@ -92,7 +92,7 @@ export class TemplateSrv {
|
||||
return filters;
|
||||
}
|
||||
|
||||
luceneFormat(value) {
|
||||
luceneFormat(value: any) {
|
||||
if (typeof value === 'string') {
|
||||
return luceneEscape(value);
|
||||
}
|
||||
@@ -108,7 +108,7 @@ export class TemplateSrv {
|
||||
// encode string according to RFC 3986; in contrast to encodeURIComponent()
|
||||
// also the sub-delims "!", "'", "(", ")" and "*" are encoded;
|
||||
// unicode handling uses UTF-8 as in ECMA-262.
|
||||
encodeURIComponentStrict(str) {
|
||||
encodeURIComponentStrict(str: string) {
|
||||
return encodeURIComponent(str).replace(/[!'()*]/g, c => {
|
||||
return (
|
||||
'%' +
|
||||
@@ -120,7 +120,7 @@ export class TemplateSrv {
|
||||
});
|
||||
}
|
||||
|
||||
formatValue(value, format, variable) {
|
||||
formatValue(value: any, format: any, variable: any) {
|
||||
// for some scopedVars there is no variable
|
||||
variable = variable || {};
|
||||
|
||||
@@ -180,11 +180,11 @@ export class TemplateSrv {
|
||||
}
|
||||
}
|
||||
|
||||
setGrafanaVariable(name, value) {
|
||||
setGrafanaVariable(name: string, value: any) {
|
||||
this.grafanaVariables[name] = value;
|
||||
}
|
||||
|
||||
getVariableName(expression) {
|
||||
getVariableName(expression: string) {
|
||||
this.regex.lastIndex = 0;
|
||||
const match = this.regex.exec(expression);
|
||||
if (!match) {
|
||||
@@ -194,12 +194,12 @@ export class TemplateSrv {
|
||||
return variableName;
|
||||
}
|
||||
|
||||
variableExists(expression) {
|
||||
variableExists(expression: string) {
|
||||
const name = this.getVariableName(expression);
|
||||
return name && this.index[name] !== void 0;
|
||||
}
|
||||
|
||||
highlightVariablesAsHtml(str) {
|
||||
highlightVariablesAsHtml(str: string) {
|
||||
if (!str || !_.isString(str)) {
|
||||
return str;
|
||||
}
|
||||
@@ -214,7 +214,7 @@ export class TemplateSrv {
|
||||
});
|
||||
}
|
||||
|
||||
getAllValue(variable) {
|
||||
getAllValue(variable: any) {
|
||||
if (variable.allValue) {
|
||||
return variable.allValue;
|
||||
}
|
||||
@@ -225,7 +225,7 @@ export class TemplateSrv {
|
||||
return values;
|
||||
}
|
||||
|
||||
replace(target: string, scopedVars?: ScopedVars, format?: string | Function) {
|
||||
replace(target: string, scopedVars?: ScopedVars, format?: string | Function): any {
|
||||
if (!target) {
|
||||
return target;
|
||||
}
|
||||
@@ -266,11 +266,11 @@ export class TemplateSrv {
|
||||
});
|
||||
}
|
||||
|
||||
isAllValue(value) {
|
||||
isAllValue(value: any) {
|
||||
return value === '$__all' || (Array.isArray(value) && value[0] === '$__all');
|
||||
}
|
||||
|
||||
replaceWithText(target, scopedVars) {
|
||||
replaceWithText(target: string, scopedVars: ScopedVars) {
|
||||
if (!target) {
|
||||
return target;
|
||||
}
|
||||
@@ -278,7 +278,7 @@ export class TemplateSrv {
|
||||
let variable;
|
||||
this.regex.lastIndex = 0;
|
||||
|
||||
return target.replace(this.regex, (match, var1, var2, fmt2, var3) => {
|
||||
return target.replace(this.regex, (match: any, var1: any, var2: any, fmt2: any, var3: any) => {
|
||||
if (scopedVars) {
|
||||
const option = scopedVars[var1 || var2 || var3];
|
||||
if (option) {
|
||||
@@ -297,7 +297,7 @@ export class TemplateSrv {
|
||||
});
|
||||
}
|
||||
|
||||
fillVariableValuesForUrl(params, scopedVars?) {
|
||||
fillVariableValuesForUrl(params: any, scopedVars?: ScopedVars) {
|
||||
_.each(this.variables, variable => {
|
||||
if (scopedVars && scopedVars[variable.name] !== void 0) {
|
||||
if (scopedVars[variable.name].skipUrlSync) {
|
||||
@@ -313,7 +313,7 @@ export class TemplateSrv {
|
||||
});
|
||||
}
|
||||
|
||||
distributeVariable(value, variable) {
|
||||
distributeVariable(value: any, variable: any) {
|
||||
value = _.map(value, (val: any, index: number) => {
|
||||
if (index !== 0) {
|
||||
return variable + '=' + val;
|
||||
|
||||
@@ -16,12 +16,12 @@ export const variableRegexExec = (variableString: string) => {
|
||||
};
|
||||
|
||||
export interface Variable {
|
||||
setValue(option);
|
||||
updateOptions();
|
||||
dependsOn(variable);
|
||||
setValueFromUrl(urlValue);
|
||||
getValueForUrl();
|
||||
getSaveModel();
|
||||
setValue(option: any): any;
|
||||
updateOptions(): any;
|
||||
dependsOn(variable: any): any;
|
||||
setValueFromUrl(urlValue: any): any;
|
||||
getValueForUrl(): any;
|
||||
getSaveModel(): any;
|
||||
}
|
||||
|
||||
export let variableTypes = {};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// Libaries
|
||||
import angular from 'angular';
|
||||
import angular, { IQService, ILocationService, auto, IPromise } from 'angular';
|
||||
import _ from 'lodash';
|
||||
|
||||
// Utils & Services
|
||||
@@ -19,9 +19,9 @@ export class VariableSrv {
|
||||
|
||||
/** @ngInject */
|
||||
constructor(
|
||||
private $q,
|
||||
private $location,
|
||||
private $injector,
|
||||
private $q: IQService,
|
||||
private $location: ILocationService,
|
||||
private $injector: auto.IInjectorService,
|
||||
private templateSrv: TemplateSrv,
|
||||
private timeSrv: TimeSrv
|
||||
) {}
|
||||
@@ -71,7 +71,7 @@ export class VariableSrv {
|
||||
});
|
||||
}
|
||||
|
||||
processVariable(variable, queryParams) {
|
||||
processVariable(variable: any, queryParams: any) {
|
||||
const dependencies = [];
|
||||
|
||||
for (const otherVariable of this.variables) {
|
||||
@@ -100,7 +100,8 @@ export class VariableSrv {
|
||||
});
|
||||
}
|
||||
|
||||
createVariableFromModel(model) {
|
||||
createVariableFromModel(model: any) {
|
||||
// @ts-ignore
|
||||
const ctor = variableTypes[model.type].ctor;
|
||||
if (!ctor) {
|
||||
throw {
|
||||
@@ -112,24 +113,24 @@ export class VariableSrv {
|
||||
return variable;
|
||||
}
|
||||
|
||||
addVariable(variable) {
|
||||
addVariable(variable: any) {
|
||||
this.variables.push(variable);
|
||||
this.templateSrv.updateIndex();
|
||||
this.dashboard.updateSubmenuVisibility();
|
||||
}
|
||||
|
||||
removeVariable(variable) {
|
||||
removeVariable(variable: any) {
|
||||
const index = _.indexOf(this.variables, variable);
|
||||
this.variables.splice(index, 1);
|
||||
this.templateSrv.updateIndex();
|
||||
this.dashboard.updateSubmenuVisibility();
|
||||
}
|
||||
|
||||
updateOptions(variable) {
|
||||
updateOptions(variable: any) {
|
||||
return variable.updateOptions();
|
||||
}
|
||||
|
||||
variableUpdated(variable, emitChangeEvents?) {
|
||||
variableUpdated(variable: any, emitChangeEvents?: any) {
|
||||
// if there is a variable lock ignore cascading update because we are in a boot up scenario
|
||||
if (variable.initLock) {
|
||||
return this.$q.when();
|
||||
@@ -152,7 +153,7 @@ export class VariableSrv {
|
||||
});
|
||||
}
|
||||
|
||||
selectOptionsForCurrentValue(variable) {
|
||||
selectOptionsForCurrentValue(variable: any) {
|
||||
let i, y, value, option;
|
||||
const selected: any = [];
|
||||
|
||||
@@ -176,7 +177,7 @@ export class VariableSrv {
|
||||
return selected;
|
||||
}
|
||||
|
||||
validateVariableSelectionState(variable) {
|
||||
validateVariableSelectionState(variable: any) {
|
||||
if (!variable.current) {
|
||||
variable.current = {};
|
||||
}
|
||||
@@ -221,7 +222,7 @@ export class VariableSrv {
|
||||
* @param variable Instance of Variable
|
||||
* @param urlValue Value of the query parameter
|
||||
*/
|
||||
setOptionFromUrl(variable: any, urlValue: string | string[]): Promise<any> {
|
||||
setOptionFromUrl(variable: any, urlValue: string | string[]): IPromise<any> {
|
||||
let promise = this.$q.when();
|
||||
|
||||
if (variable.refresh) {
|
||||
@@ -268,7 +269,7 @@ export class VariableSrv {
|
||||
});
|
||||
}
|
||||
|
||||
setOptionAsCurrent(variable, option) {
|
||||
setOptionAsCurrent(variable: any, option: any) {
|
||||
variable.current = _.cloneDeep(option);
|
||||
|
||||
if (_.isArray(variable.current.text) && variable.current.text.length > 0) {
|
||||
@@ -298,7 +299,7 @@ export class VariableSrv {
|
||||
this.$location.search(params);
|
||||
}
|
||||
|
||||
setAdhocFilter(options) {
|
||||
setAdhocFilter(options: any) {
|
||||
let variable: any = _.find(this.variables, {
|
||||
type: 'adhoc',
|
||||
datasource: options.datasource,
|
||||
|
||||
@@ -78,7 +78,7 @@ export class UsersActionBar extends PureComponent<Props> {
|
||||
}
|
||||
}
|
||||
|
||||
function mapStateToProps(state) {
|
||||
function mapStateToProps(state: any) {
|
||||
return {
|
||||
searchQuery: getUsersSearchQuery(state.users),
|
||||
pendingInvitesCount: getInviteesCount(state.users),
|
||||
|
||||
@@ -34,7 +34,7 @@ export interface State {
|
||||
export class UsersListPage extends PureComponent<Props, State> {
|
||||
externalUserMngInfoHtml: string;
|
||||
|
||||
constructor(props) {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
|
||||
if (this.props.externalUserMngInfo) {
|
||||
@@ -59,13 +59,13 @@ export class UsersListPage extends PureComponent<Props, State> {
|
||||
return await this.props.loadInvitees();
|
||||
}
|
||||
|
||||
onRoleChange = (role, user) => {
|
||||
onRoleChange = (role: string, user: OrgUser) => {
|
||||
const updatedUser = { ...user, role: role };
|
||||
|
||||
this.props.updateUser(updatedUser);
|
||||
};
|
||||
|
||||
onRemoveUser = user => {
|
||||
onRemoveUser = (user: OrgUser) => {
|
||||
appEvents.emit('confirm-modal', {
|
||||
title: 'Delete',
|
||||
text: 'Are you sure you want to delete user ' + user.login + '?',
|
||||
@@ -119,7 +119,7 @@ export class UsersListPage extends PureComponent<Props, State> {
|
||||
}
|
||||
}
|
||||
|
||||
function mapStateToProps(state) {
|
||||
function mapStateToProps(state: any) {
|
||||
return {
|
||||
navModel: getNavModel(state.navIndex, 'users'),
|
||||
users: getUsers(state.users),
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
export const getUsers = state => {
|
||||
import { UsersState } from 'app/types';
|
||||
|
||||
export const getUsers = (state: UsersState) => {
|
||||
const regex = new RegExp(state.searchQuery, 'i');
|
||||
|
||||
return state.users.filter(user => {
|
||||
@@ -6,7 +8,7 @@ export const getUsers = state => {
|
||||
});
|
||||
};
|
||||
|
||||
export const getInvitees = state => {
|
||||
export const getInvitees = (state: UsersState) => {
|
||||
const regex = new RegExp(state.searchQuery, 'i');
|
||||
|
||||
return state.invitees.filter(invitee => {
|
||||
@@ -14,5 +16,5 @@ export const getInvitees = state => {
|
||||
});
|
||||
};
|
||||
|
||||
export const getInviteesCount = state => state.invitees.length;
|
||||
export const getUsersSearchQuery = state => state.searchQuery;
|
||||
export const getInviteesCount = (state: UsersState) => state.invitees.length;
|
||||
export const getUsersSearchQuery = (state: UsersState) => state.searchQuery;
|
||||
|
||||
@@ -45,6 +45,10 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="clearfix"></div>
|
||||
<a class="btn btn-medium btn-service btn-service--github login-btn" href="login/saml" target="_self" ng-if="samlEnabled">
|
||||
<i class="btn-service-icon fa fa-key"></i>
|
||||
Sign in with SAML
|
||||
</a>
|
||||
<div class="login-oauth text-center" ng-show="oauthEnabled">
|
||||
<a class="btn btn-medium btn-service btn-service--google login-btn" href="login/google" target="_self" ng-if="oauth.google">
|
||||
<i class="btn-service-icon fa fa-google"></i>
|
||||
@@ -68,10 +72,6 @@
|
||||
<i class="btn-service-icon fa fa-sign-in"></i>
|
||||
Sign in with {{oauth.generic_oauth.name}}
|
||||
</a>
|
||||
<a class="btn btn-medium btn-service btn-service--github login-btn" href="login/saml" target="_self" ng-if="samlEnabled">
|
||||
<i class="btn-service-icon fa fa-key"></i>
|
||||
Sign in with SAML
|
||||
</a>
|
||||
</div>
|
||||
<div class="login-signup-box" ng-show="!disableUserSignUp">
|
||||
<div class="login-signup-title p-r-1">
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import DatasourceSrv from 'app/features/plugins/datasource_srv';
|
||||
import CloudWatchDatasource from './datasource';
|
||||
export class CloudWatchConfigCtrl {
|
||||
static templateUrl = 'partials/config.html';
|
||||
current: any;
|
||||
@@ -8,7 +10,7 @@ export class CloudWatchConfigCtrl {
|
||||
secretKeyExist = false;
|
||||
|
||||
/** @ngInject */
|
||||
constructor($scope, datasourceSrv) {
|
||||
constructor($scope: any, datasourceSrv: DatasourceSrv) {
|
||||
this.current.jsonData.timeField = this.current.jsonData.timeField || '@timestamp';
|
||||
this.current.jsonData.authType = this.current.jsonData.authType || 'credentials';
|
||||
|
||||
@@ -32,7 +34,7 @@ export class CloudWatchConfigCtrl {
|
||||
{ name: 'ARN', value: 'arn' },
|
||||
];
|
||||
|
||||
indexPatternTypes = [
|
||||
indexPatternTypes: any = [
|
||||
{ name: 'No pattern', value: undefined },
|
||||
{ name: 'Hourly', value: 'Hourly', example: '[logstash-]YYYY.MM.DD.HH' },
|
||||
{ name: 'Daily', value: 'Daily', example: '[logstash-]YYYY.MM.DD' },
|
||||
@@ -71,14 +73,14 @@ export class CloudWatchConfigCtrl {
|
||||
getRegions() {
|
||||
this.datasourceSrv
|
||||
.loadDatasource(this.current.name)
|
||||
.then(ds => {
|
||||
.then((ds: CloudWatchDatasource) => {
|
||||
return ds.getRegions();
|
||||
})
|
||||
.then(
|
||||
regions => {
|
||||
(regions: any) => {
|
||||
this.regions = _.map(regions, 'value');
|
||||
},
|
||||
err => {
|
||||
(err: any) => {
|
||||
console.error('failed to get latest regions');
|
||||
}
|
||||
);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import angular from 'angular';
|
||||
import angular, { IQService } from 'angular';
|
||||
import _ from 'lodash';
|
||||
import { dateMath } from '@grafana/data';
|
||||
import kbn from 'app/core/utils/kbn';
|
||||
import { CloudWatchQuery } from './types';
|
||||
import { DataSourceApi, DataQueryRequest, DataSourceInstanceSettings } from '@grafana/ui';
|
||||
import { DataSourceApi, DataQueryRequest, DataSourceInstanceSettings, ScopedVars } from '@grafana/ui';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
@@ -18,7 +18,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
/** @ngInject */
|
||||
constructor(
|
||||
private instanceSettings: DataSourceInstanceSettings,
|
||||
private $q,
|
||||
private $q: IQService,
|
||||
private backendSrv: BackendSrv,
|
||||
private templateSrv: TemplateSrv,
|
||||
private timeSrv: TimeSrv
|
||||
@@ -96,7 +96,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
return this.performTimeSeriesQuery(request);
|
||||
}
|
||||
|
||||
getPeriod(target, options, now?) {
|
||||
getPeriod(target: any, options: any, now?: number) {
|
||||
const start = this.convertToCloudWatchTime(options.range.from, false);
|
||||
const end = this.convertToCloudWatchTime(options.range.to, true);
|
||||
now = Math.round((now || Date.now()) / 1000);
|
||||
@@ -142,8 +142,8 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
return period;
|
||||
}
|
||||
|
||||
performTimeSeriesQuery(request) {
|
||||
return this.awsRequest('/api/tsdb/query', request).then(res => {
|
||||
performTimeSeriesQuery(request: any) {
|
||||
return this.awsRequest('/api/tsdb/query', request).then((res: any) => {
|
||||
const data = [];
|
||||
|
||||
if (res.results) {
|
||||
@@ -165,7 +165,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
transformSuggestDataFromTable(suggestData) {
|
||||
transformSuggestDataFromTable(suggestData: any) {
|
||||
return _.map(suggestData.results['metricFindQuery'].tables[0].rows, v => {
|
||||
return {
|
||||
text: v[0],
|
||||
@@ -174,7 +174,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
doMetricQueryRequest(subtype, parameters) {
|
||||
doMetricQueryRequest(subtype: any, parameters: any) {
|
||||
const range = this.timeSrv.timeRange();
|
||||
return this.awsRequest('/api/tsdb/query', {
|
||||
from: range.from.valueOf().toString(),
|
||||
@@ -192,7 +192,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
parameters
|
||||
),
|
||||
],
|
||||
}).then(r => {
|
||||
}).then((r: any) => {
|
||||
return this.transformSuggestDataFromTable(r);
|
||||
});
|
||||
}
|
||||
@@ -205,21 +205,27 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
return this.doMetricQueryRequest('namespaces', null);
|
||||
}
|
||||
|
||||
getMetrics(namespace, region) {
|
||||
getMetrics(namespace: string, region: string) {
|
||||
return this.doMetricQueryRequest('metrics', {
|
||||
region: this.templateSrv.replace(this.getActualRegion(region)),
|
||||
namespace: this.templateSrv.replace(namespace),
|
||||
});
|
||||
}
|
||||
|
||||
getDimensionKeys(namespace, region) {
|
||||
getDimensionKeys(namespace: string, region: string) {
|
||||
return this.doMetricQueryRequest('dimension_keys', {
|
||||
region: this.templateSrv.replace(this.getActualRegion(region)),
|
||||
namespace: this.templateSrv.replace(namespace),
|
||||
});
|
||||
}
|
||||
|
||||
getDimensionValues(region, namespace, metricName, dimensionKey, filterDimensions) {
|
||||
getDimensionValues(
|
||||
region: string,
|
||||
namespace: string,
|
||||
metricName: string,
|
||||
dimensionKey: string,
|
||||
filterDimensions: {}
|
||||
) {
|
||||
return this.doMetricQueryRequest('dimension_values', {
|
||||
region: this.templateSrv.replace(this.getActualRegion(region)),
|
||||
namespace: this.templateSrv.replace(namespace),
|
||||
@@ -229,14 +235,14 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
getEbsVolumeIds(region, instanceId) {
|
||||
getEbsVolumeIds(region: string, instanceId: string) {
|
||||
return this.doMetricQueryRequest('ebs_volume_ids', {
|
||||
region: this.templateSrv.replace(this.getActualRegion(region)),
|
||||
instanceId: this.templateSrv.replace(instanceId),
|
||||
});
|
||||
}
|
||||
|
||||
getEc2InstanceAttribute(region, attributeName, filters) {
|
||||
getEc2InstanceAttribute(region: string, attributeName: string, filters: any) {
|
||||
return this.doMetricQueryRequest('ec2_instance_attribute', {
|
||||
region: this.templateSrv.replace(this.getActualRegion(region)),
|
||||
attributeName: this.templateSrv.replace(attributeName),
|
||||
@@ -244,7 +250,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
getResourceARNs(region, resourceType, tags) {
|
||||
getResourceARNs(region: string, resourceType: string, tags: any) {
|
||||
return this.doMetricQueryRequest('resource_arns', {
|
||||
region: this.templateSrv.replace(this.getActualRegion(region)),
|
||||
resourceType: this.templateSrv.replace(resourceType),
|
||||
@@ -252,7 +258,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
metricFindQuery(query) {
|
||||
metricFindQuery(query: string) {
|
||||
let region;
|
||||
let namespace;
|
||||
let metricName;
|
||||
@@ -324,7 +330,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
return this.$q.when([]);
|
||||
}
|
||||
|
||||
annotationQuery(options) {
|
||||
annotationQuery(options: any) {
|
||||
const annotation = options.annotation;
|
||||
const statistics = _.map(annotation.statistics, s => {
|
||||
return this.templateSrv.replace(s);
|
||||
@@ -359,7 +365,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
parameters
|
||||
),
|
||||
],
|
||||
}).then(r => {
|
||||
}).then((r: any) => {
|
||||
return _.map(r.results['annotationQuery'].tables[0].rows, v => {
|
||||
return {
|
||||
annotation: annotation,
|
||||
@@ -372,7 +378,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
targetContainsTemplate(target) {
|
||||
targetContainsTemplate(target: any) {
|
||||
return (
|
||||
this.templateSrv.variableExists(target.region) ||
|
||||
this.templateSrv.variableExists(target.namespace) ||
|
||||
@@ -395,14 +401,14 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
awsRequest(url, data) {
|
||||
awsRequest(url: string, data: any) {
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url: url,
|
||||
data: data,
|
||||
url,
|
||||
data,
|
||||
};
|
||||
|
||||
return this.backendSrv.datasourceRequest(options).then(result => {
|
||||
return this.backendSrv.datasourceRequest(options).then((result: any) => {
|
||||
return result.data;
|
||||
});
|
||||
}
|
||||
@@ -411,14 +417,14 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
return this.defaultRegion;
|
||||
}
|
||||
|
||||
getActualRegion(region) {
|
||||
getActualRegion(region: string) {
|
||||
if (region === 'default' || _.isEmpty(region)) {
|
||||
return this.getDefaultRegion();
|
||||
}
|
||||
return region;
|
||||
}
|
||||
|
||||
getExpandedVariables(target, dimensionKey, variable, templateSrv) {
|
||||
getExpandedVariables(target: any, dimensionKey: any, variable: any, templateSrv: TemplateSrv) {
|
||||
/* if the all checkbox is marked we should add all values to the targets */
|
||||
const allSelected: any = _.find(variable.options, { selected: true, text: 'All' });
|
||||
const selectedVariables = _.filter(variable.options, v => {
|
||||
@@ -430,7 +436,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
const currentVariables = !_.isArray(variable.current.value)
|
||||
? [variable.current]
|
||||
: variable.current.value.map(v => {
|
||||
: variable.current.value.map((v: any) => {
|
||||
return {
|
||||
text: v,
|
||||
value: v,
|
||||
@@ -440,9 +446,9 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
selectedVariables.some((s: any) => {
|
||||
return s.value === currentVariables[0].value;
|
||||
}) || currentVariables[0].value === '$__all';
|
||||
return (useSelectedVariables ? selectedVariables : currentVariables).map(v => {
|
||||
return (useSelectedVariables ? selectedVariables : currentVariables).map((v: any) => {
|
||||
const t = angular.copy(target);
|
||||
const scopedVar = {};
|
||||
const scopedVar: any = {};
|
||||
scopedVar[variable.name] = v;
|
||||
t.refId = target.refId + '_' + v.value;
|
||||
t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar);
|
||||
@@ -455,7 +461,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
});
|
||||
}
|
||||
|
||||
expandTemplateVariable(targets, scopedVars, templateSrv) {
|
||||
expandTemplateVariable(targets: any, scopedVars: ScopedVars, templateSrv: TemplateSrv) {
|
||||
// Datasource and template srv logic uber-complected. This should be cleaned up.
|
||||
return _.chain(targets)
|
||||
.map(target => {
|
||||
@@ -480,15 +486,15 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
|
||||
.value();
|
||||
}
|
||||
|
||||
convertToCloudWatchTime(date, roundUp) {
|
||||
convertToCloudWatchTime(date: any, roundUp: any) {
|
||||
if (_.isString(date)) {
|
||||
date = dateMath.parse(date, roundUp);
|
||||
}
|
||||
return Math.round(date.valueOf() / 1000);
|
||||
}
|
||||
|
||||
convertDimensionFormat(dimensions, scopedVars) {
|
||||
const convertedDimensions = {};
|
||||
convertDimensionFormat(dimensions: any, scopedVars: ScopedVars) {
|
||||
const convertedDimensions: any = {};
|
||||
_.each(dimensions, (value, key) => {
|
||||
convertedDimensions[this.templateSrv.replace(key, scopedVars)] = this.templateSrv.replace(value, scopedVars);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import './query_parameter_ctrl';
|
||||
import { QueryCtrl } from 'app/plugins/sdk';
|
||||
import { auto } from 'angular';
|
||||
|
||||
export class CloudWatchQueryCtrl extends QueryCtrl {
|
||||
static templateUrl = 'partials/query.editor.html';
|
||||
@@ -7,7 +8,7 @@ export class CloudWatchQueryCtrl extends QueryCtrl {
|
||||
aliasSyntax: string;
|
||||
|
||||
/** @ngInject */
|
||||
constructor($scope, $injector) {
|
||||
constructor($scope: any, $injector: auto.IInjectorService) {
|
||||
super($scope, $injector);
|
||||
this.aliasSyntax = '{{metric}} {{stat}} {{namespace}} {{region}} {{<dimension name>}}';
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import angular from 'angular';
|
||||
import angular, { IQService } from 'angular';
|
||||
import coreModule from 'app/core/core_module';
|
||||
import _ from 'lodash';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import DatasourceSrv from 'app/features/plugins/datasource_srv';
|
||||
|
||||
export class CloudWatchQueryParameterCtrl {
|
||||
/** @ngInject */
|
||||
constructor($scope, templateSrv, uiSegmentSrv, datasourceSrv, $q) {
|
||||
constructor($scope: any, templateSrv: TemplateSrv, uiSegmentSrv: any, datasourceSrv: DatasourceSrv, $q: IQService) {
|
||||
$scope.init = () => {
|
||||
const target = $scope.target;
|
||||
target.namespace = target.namespace || '';
|
||||
@@ -69,7 +71,7 @@ export class CloudWatchQueryParameterCtrl {
|
||||
);
|
||||
};
|
||||
|
||||
$scope.statSegmentChanged = (segment, index) => {
|
||||
$scope.statSegmentChanged = (segment: any, index: number) => {
|
||||
if (segment.value === $scope.removeStatSegment.value) {
|
||||
$scope.statSegments.splice(index, 1);
|
||||
} else {
|
||||
@@ -91,7 +93,7 @@ export class CloudWatchQueryParameterCtrl {
|
||||
$scope.onChange();
|
||||
};
|
||||
|
||||
$scope.ensurePlusButton = segments => {
|
||||
$scope.ensurePlusButton = (segments: any) => {
|
||||
const count = segments.length;
|
||||
const lastSegment = segments[Math.max(count - 1, 0)];
|
||||
|
||||
@@ -100,7 +102,7 @@ export class CloudWatchQueryParameterCtrl {
|
||||
}
|
||||
};
|
||||
|
||||
$scope.getDimSegments = (segment, $index) => {
|
||||
$scope.getDimSegments = (segment: any, $index: number) => {
|
||||
if (segment.type === 'operator') {
|
||||
return $q.when([]);
|
||||
}
|
||||
@@ -130,7 +132,7 @@ export class CloudWatchQueryParameterCtrl {
|
||||
});
|
||||
};
|
||||
|
||||
$scope.dimSegmentChanged = (segment, index) => {
|
||||
$scope.dimSegmentChanged = (segment: any, index: number) => {
|
||||
$scope.dimSegments[index] = segment;
|
||||
|
||||
if (segment.value === $scope.removeDimSegment.value) {
|
||||
@@ -148,7 +150,7 @@ export class CloudWatchQueryParameterCtrl {
|
||||
};
|
||||
|
||||
$scope.syncDimSegmentsWithModel = () => {
|
||||
const dims = {};
|
||||
const dims: any = {};
|
||||
const length = $scope.dimSegments.length;
|
||||
|
||||
for (let i = 0; i < length - 2; i += 3) {
|
||||
@@ -165,7 +167,7 @@ export class CloudWatchQueryParameterCtrl {
|
||||
$scope.getRegions = () => {
|
||||
return $scope.datasource
|
||||
.metricFindQuery('regions()')
|
||||
.then(results => {
|
||||
.then((results: any) => {
|
||||
results.unshift({ text: 'default' });
|
||||
return results;
|
||||
})
|
||||
@@ -197,8 +199,8 @@ export class CloudWatchQueryParameterCtrl {
|
||||
$scope.onChange();
|
||||
};
|
||||
|
||||
$scope.transformToSegments = addTemplateVars => {
|
||||
return results => {
|
||||
$scope.transformToSegments = (addTemplateVars: any) => {
|
||||
return (results: any) => {
|
||||
const segments = _.map(results, segment => {
|
||||
return uiSegmentSrv.newSegment({
|
||||
value: segment.text,
|
||||
|
||||
@@ -32,7 +32,7 @@ describe('CloudWatchDatasource', () => {
|
||||
} as any;
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.ds = new CloudWatchDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
|
||||
ctx.ds = new CloudWatchDatasource(instanceSettings, {} as any, backendSrv, templateSrv, timeSrv);
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch query', () => {
|
||||
@@ -56,7 +56,7 @@ describe('CloudWatchDatasource', () => {
|
||||
],
|
||||
};
|
||||
|
||||
const response = {
|
||||
const response: any = {
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
@@ -156,7 +156,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return series list', done => {
|
||||
ctx.ds.query(query).then(result => {
|
||||
ctx.ds.query(query).then((result: any) => {
|
||||
expect(result.data[0].target).toBe(response.results.A.series[0].name);
|
||||
expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]);
|
||||
done();
|
||||
@@ -204,7 +204,7 @@ describe('CloudWatchDatasource', () => {
|
||||
],
|
||||
};
|
||||
|
||||
ctx.ds.query(query).then(result => {
|
||||
ctx.ds.query(query).then((result: any) => {
|
||||
expect(requestParams.queries[0].region).toBe(instanceSettings.jsonData.defaultRegion);
|
||||
done();
|
||||
});
|
||||
@@ -231,7 +231,7 @@ describe('CloudWatchDatasource', () => {
|
||||
],
|
||||
};
|
||||
|
||||
const response = {
|
||||
const response: any = {
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
@@ -259,7 +259,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return series list', done => {
|
||||
ctx.ds.query(query).then(result => {
|
||||
ctx.ds.query(query).then((result: any) => {
|
||||
expect(result.data[0].target).toBe(response.results.A.series[0].name);
|
||||
expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]);
|
||||
done();
|
||||
@@ -411,7 +411,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
|
||||
it('should generate the correct query for multilple template variables with expression', done => {
|
||||
const query = {
|
||||
const query: any = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
@@ -466,17 +466,17 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
function describeMetricFindQuery(query, func) {
|
||||
function describeMetricFindQuery(query: any, func: any) {
|
||||
describe('metricFindQuery ' + query, () => {
|
||||
const scenario: any = {};
|
||||
scenario.setup = setupCallback => {
|
||||
scenario.setup = (setupCallback: any) => {
|
||||
beforeEach(() => {
|
||||
setupCallback();
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(args => {
|
||||
scenario.request = args.data;
|
||||
return Promise.resolve({ data: scenario.requestResponse });
|
||||
});
|
||||
ctx.ds.metricFindQuery(query).then(args => {
|
||||
ctx.ds.metricFindQuery(query).then((args: any) => {
|
||||
scenario.result = args;
|
||||
});
|
||||
});
|
||||
@@ -486,7 +486,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
}
|
||||
|
||||
describeMetricFindQuery('regions()', scenario => {
|
||||
describeMetricFindQuery('regions()', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
@@ -504,7 +504,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('namespaces()', scenario => {
|
||||
describeMetricFindQuery('namespaces()', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
@@ -522,7 +522,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('metrics(AWS/EC2)', scenario => {
|
||||
describeMetricFindQuery('metrics(AWS/EC2)', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
@@ -540,7 +540,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('dimension_keys(AWS/EC2)', scenario => {
|
||||
describeMetricFindQuery('dimension_keys(AWS/EC2)', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
@@ -558,7 +558,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('dimension_values(us-east-1,AWS/EC2,CPUUtilization,InstanceId)', scenario => {
|
||||
describeMetricFindQuery('dimension_values(us-east-1,AWS/EC2,CPUUtilization,InstanceId)', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
@@ -576,7 +576,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('dimension_values(default,AWS/EC2,CPUUtilization,InstanceId)', scenario => {
|
||||
describeMetricFindQuery('dimension_values(default,AWS/EC2,CPUUtilization,InstanceId)', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
@@ -594,7 +594,7 @@ describe('CloudWatchDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('resource_arns(default,ec2:instance,{"environment":["production"]})', scenario => {
|
||||
describeMetricFindQuery('resource_arns(default,ec2:instance,{"environment":["production"]})', (scenario: any) => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import coreModule from 'app/core/core_module';
|
||||
import _ from 'lodash';
|
||||
import * as queryDef from './query_def';
|
||||
import { IQService } from 'angular';
|
||||
|
||||
export class ElasticBucketAggCtrl {
|
||||
/** @ngInject */
|
||||
constructor($scope, uiSegmentSrv, $q, $rootScope) {
|
||||
constructor($scope: any, uiSegmentSrv: any, $q: IQService, $rootScope: any) {
|
||||
const bucketAggs = $scope.target.bucketAggs;
|
||||
|
||||
$scope.orderByOptions = [];
|
||||
@@ -158,7 +159,7 @@ export class ElasticBucketAggCtrl {
|
||||
$scope.agg.settings.filters.push({ query: '*' });
|
||||
};
|
||||
|
||||
$scope.removeFiltersQuery = filter => {
|
||||
$scope.removeFiltersQuery = (filter: any) => {
|
||||
$scope.agg.settings.filters = _.without($scope.agg.settings.filters, filter);
|
||||
};
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ export class ElasticConfigCtrl {
|
||||
current: DataSourceInstanceSettings<ElasticsearchOptions>;
|
||||
|
||||
/** @ngInject */
|
||||
constructor($scope) {
|
||||
constructor($scope: any) {
|
||||
this.current.jsonData.timeField = this.current.jsonData.timeField || '@timestamp';
|
||||
this.current.jsonData.esVersion = this.current.jsonData.esVersion || 5;
|
||||
const defaultMaxConcurrentShardRequests = this.current.jsonData.esVersion >= 70 ? 5 : 256;
|
||||
@@ -18,7 +18,7 @@ export class ElasticConfigCtrl {
|
||||
this.current.jsonData.logLevelField = this.current.jsonData.logLevelField || '';
|
||||
}
|
||||
|
||||
indexPatternTypes = [
|
||||
indexPatternTypes: any = [
|
||||
{ name: 'No pattern', value: undefined },
|
||||
{ name: 'Hourly', value: 'Hourly', example: '[logstash-]YYYY.MM.DD.HH' },
|
||||
{ name: 'Daily', value: 'Daily', example: '[logstash-]YYYY.MM.DD' },
|
||||
|
||||
@@ -63,7 +63,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
}
|
||||
}
|
||||
|
||||
private request(method, url, data?) {
|
||||
private request(method: string, url: string, data?: undefined) {
|
||||
const options: any = {
|
||||
url: this.url + '/' + url,
|
||||
method: method,
|
||||
@@ -82,29 +82,29 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
return this.backendSrv.datasourceRequest(options);
|
||||
}
|
||||
|
||||
private get(url) {
|
||||
private get(url: string) {
|
||||
const range = this.timeSrv.timeRange();
|
||||
const indexList = this.indexPattern.getIndexList(range.from.valueOf(), range.to.valueOf());
|
||||
if (_.isArray(indexList) && indexList.length) {
|
||||
return this.request('GET', indexList[0] + url).then(results => {
|
||||
return this.request('GET', indexList[0] + url).then((results: any) => {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
} else {
|
||||
return this.request('GET', this.indexPattern.getIndexForToday() + url).then(results => {
|
||||
return this.request('GET', this.indexPattern.getIndexForToday() + url).then((results: any) => {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private post(url, data) {
|
||||
private post(url: string, data: any) {
|
||||
return this.request('POST', url, data)
|
||||
.then(results => {
|
||||
.then((results: any) => {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
})
|
||||
.catch(err => {
|
||||
.catch((err: any) => {
|
||||
if (err.data && err.data.error) {
|
||||
throw {
|
||||
message: 'Elasticsearch error: ' + err.data.error.reason,
|
||||
@@ -116,14 +116,14 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
});
|
||||
}
|
||||
|
||||
annotationQuery(options) {
|
||||
annotationQuery(options: any) {
|
||||
const annotation = options.annotation;
|
||||
const timeField = annotation.timeField || '@timestamp';
|
||||
const queryString = annotation.query || '*';
|
||||
const tagsField = annotation.tagsField || 'tags';
|
||||
const textField = annotation.textField || null;
|
||||
|
||||
const range = {};
|
||||
const range: any = {};
|
||||
range[timeField] = {
|
||||
from: options.range.from.valueOf(),
|
||||
to: options.range.to.valueOf(),
|
||||
@@ -144,8 +144,8 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
},
|
||||
};
|
||||
|
||||
const data = {
|
||||
query: query,
|
||||
const data: any = {
|
||||
query,
|
||||
size: 10000,
|
||||
};
|
||||
|
||||
@@ -168,11 +168,11 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
|
||||
const payload = angular.toJson(header) + '\n' + angular.toJson(data) + '\n';
|
||||
|
||||
return this.post('_msearch', payload).then(res => {
|
||||
return this.post('_msearch', payload).then((res: any) => {
|
||||
const list = [];
|
||||
const hits = res.responses[0].hits.hits;
|
||||
|
||||
const getFieldFromSource = (source, fieldName) => {
|
||||
const getFieldFromSource = (source: any, fieldName: any) => {
|
||||
if (!fieldName) {
|
||||
return;
|
||||
}
|
||||
@@ -229,7 +229,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
testDatasource() {
|
||||
// validate that the index exist and has date field
|
||||
return this.getFields({ type: 'date' }).then(
|
||||
dateFields => {
|
||||
(dateFields: any) => {
|
||||
const timeField: any = _.find(dateFields, { text: this.timeField });
|
||||
if (!timeField) {
|
||||
return {
|
||||
@@ -239,7 +239,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
}
|
||||
return { status: 'success', message: 'Index OK. Time field name OK.' };
|
||||
},
|
||||
err => {
|
||||
(err: any) => {
|
||||
console.log(err);
|
||||
if (err.data && err.data.error) {
|
||||
let message = angular.toJson(err.data.error);
|
||||
@@ -254,7 +254,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
);
|
||||
}
|
||||
|
||||
getQueryHeader(searchType, timeFrom, timeTo) {
|
||||
getQueryHeader(searchType: any, timeFrom: any, timeTo: any) {
|
||||
const queryHeader: any = {
|
||||
search_type: searchType,
|
||||
ignore_unavailable: true,
|
||||
@@ -319,7 +319,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
|
||||
const url = this.getMultiSearchUrl();
|
||||
|
||||
return this.post(url, payload).then(res => {
|
||||
return this.post(url, payload).then((res: any) => {
|
||||
const er = new ElasticResponse(sentTargets, res);
|
||||
if (sentTargets.some(target => target.isLogsQuery)) {
|
||||
return er.getLogs(this.logMessageField, this.logLevelField);
|
||||
@@ -329,10 +329,10 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
});
|
||||
}
|
||||
|
||||
getFields(query) {
|
||||
getFields(query: any) {
|
||||
const configuredEsVersion = this.esVersion;
|
||||
return this.get('/_mapping').then(result => {
|
||||
const typeMap = {
|
||||
return this.get('/_mapping').then((result: any) => {
|
||||
const typeMap: any = {
|
||||
float: 'number',
|
||||
double: 'number',
|
||||
integer: 'number',
|
||||
@@ -344,7 +344,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
nested: 'nested',
|
||||
};
|
||||
|
||||
function shouldAddField(obj, key, query) {
|
||||
function shouldAddField(obj: any, key: any, query: any) {
|
||||
if (key[0] === '_') {
|
||||
return false;
|
||||
}
|
||||
@@ -358,10 +358,10 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
}
|
||||
|
||||
// Store subfield names: [system, process, cpu, total] -> system.process.cpu.total
|
||||
const fieldNameParts = [];
|
||||
const fields = {};
|
||||
const fieldNameParts: any = [];
|
||||
const fields: any = {};
|
||||
|
||||
function getFieldsRecursively(obj) {
|
||||
function getFieldsRecursively(obj: any) {
|
||||
for (const key in obj) {
|
||||
const subObj = obj[key];
|
||||
|
||||
@@ -415,7 +415,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
});
|
||||
}
|
||||
|
||||
getTerms(queryDef) {
|
||||
getTerms(queryDef: any) {
|
||||
const range = this.timeSrv.timeRange();
|
||||
const searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count';
|
||||
const header = this.getQueryHeader(searchType, range.from, range.to);
|
||||
@@ -427,7 +427,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
|
||||
const url = this.getMultiSearchUrl();
|
||||
|
||||
return this.post(url, esQuery).then(res => {
|
||||
return this.post(url, esQuery).then((res: any) => {
|
||||
if (!res.responses[0].aggregations) {
|
||||
return [];
|
||||
}
|
||||
@@ -450,7 +450,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
return '_msearch';
|
||||
}
|
||||
|
||||
metricFindQuery(query) {
|
||||
metricFindQuery(query: any) {
|
||||
query = angular.fromJson(query);
|
||||
if (!query) {
|
||||
return this.$q.when([]);
|
||||
@@ -472,11 +472,11 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
return this.getFields({});
|
||||
}
|
||||
|
||||
getTagValues(options) {
|
||||
getTagValues(options: any) {
|
||||
return this.getTerms({ field: options.key, query: '*' });
|
||||
}
|
||||
|
||||
targetContainsTemplate(target) {
|
||||
targetContainsTemplate(target: any) {
|
||||
if (this.templateSrv.variableExists(target.query) || this.templateSrv.variableExists(target.alias)) {
|
||||
return true;
|
||||
}
|
||||
@@ -500,7 +500,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
return false;
|
||||
}
|
||||
|
||||
private isPrimitive(obj) {
|
||||
private isPrimitive(obj: any) {
|
||||
if (obj === null || obj === undefined) {
|
||||
return true;
|
||||
}
|
||||
@@ -511,7 +511,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
|
||||
return false;
|
||||
}
|
||||
|
||||
private objectContainsTemplate(obj) {
|
||||
private objectContainsTemplate(obj: any) {
|
||||
if (!obj) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -4,14 +4,15 @@ import * as queryDef from './query_def';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { DataFrame, toDataFrame, FieldType } from '@grafana/data';
|
||||
import { DataQueryResponse } from '@grafana/ui';
|
||||
import { ElasticsearchAggregation } from './types';
|
||||
|
||||
export class ElasticResponse {
|
||||
constructor(private targets, private response) {
|
||||
constructor(private targets: any, private response: any) {
|
||||
this.targets = targets;
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
processMetrics(esAgg, target, seriesList, props) {
|
||||
processMetrics(esAgg: any, target: any, seriesList: any, props: any) {
|
||||
let metric, y, i, newSeries, bucket, value;
|
||||
|
||||
for (y = 0; y < target.metrics.length; y++) {
|
||||
@@ -113,7 +114,7 @@ export class ElasticResponse {
|
||||
}
|
||||
}
|
||||
|
||||
processAggregationDocs(esAgg, aggDef, target, table, props) {
|
||||
processAggregationDocs(esAgg: any, aggDef: ElasticsearchAggregation, target: any, table: any, props: any) {
|
||||
// add columns
|
||||
if (table.columns.length === 0) {
|
||||
for (const propKey of _.keys(props)) {
|
||||
@@ -123,7 +124,7 @@ export class ElasticResponse {
|
||||
}
|
||||
|
||||
// helper func to add values to value array
|
||||
const addMetricValue = (values, metricName, value) => {
|
||||
const addMetricValue = (values: any[], metricName: string, value: any) => {
|
||||
table.addColumn({ text: metricName });
|
||||
values.push(value);
|
||||
};
|
||||
@@ -188,8 +189,8 @@ export class ElasticResponse {
|
||||
|
||||
// This is quite complex
|
||||
// need to recurse down the nested buckets to build series
|
||||
processBuckets(aggs, target, seriesList, table, props, depth) {
|
||||
let bucket, aggDef, esAgg, aggId;
|
||||
processBuckets(aggs: any, target: any, seriesList: any, table: any, props: any, depth: any) {
|
||||
let bucket, aggDef: any, esAgg, aggId;
|
||||
const maxDepth = target.bucketAggs.length - 1;
|
||||
|
||||
for (aggId in aggs) {
|
||||
@@ -224,7 +225,7 @@ export class ElasticResponse {
|
||||
}
|
||||
}
|
||||
|
||||
private getMetricName(metric) {
|
||||
private getMetricName(metric: any) {
|
||||
let metricDef: any = _.find(queryDef.metricAggTypes, { value: metric });
|
||||
if (!metricDef) {
|
||||
metricDef = _.find(queryDef.extendedStats, { value: metric });
|
||||
@@ -233,13 +234,13 @@ export class ElasticResponse {
|
||||
return metricDef ? metricDef.text : metric;
|
||||
}
|
||||
|
||||
private getSeriesName(series, target, metricTypeCount) {
|
||||
private getSeriesName(series: any, target: any, metricTypeCount: any) {
|
||||
let metricName = this.getMetricName(series.metric);
|
||||
|
||||
if (target.alias) {
|
||||
const regex = /\{\{([\s\S]+?)\}\}/g;
|
||||
|
||||
return target.alias.replace(regex, (match, g1, g2) => {
|
||||
return target.alias.replace(regex, (match: any, g1: any, g2: any) => {
|
||||
const group = g1 || g2;
|
||||
|
||||
if (group.indexOf('term ') === 0) {
|
||||
@@ -303,7 +304,7 @@ export class ElasticResponse {
|
||||
return name.trim() + ' ' + metricName;
|
||||
}
|
||||
|
||||
nameSeries(seriesList, target) {
|
||||
nameSeries(seriesList: any, target: any) {
|
||||
const metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
|
||||
|
||||
for (let i = 0; i < seriesList.length; i++) {
|
||||
@@ -312,17 +313,17 @@ export class ElasticResponse {
|
||||
}
|
||||
}
|
||||
|
||||
processHits(hits, seriesList) {
|
||||
processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[]) {
|
||||
const hitsTotal = typeof hits.total === 'number' ? hits.total : hits.total.value; // <- Works with Elasticsearch 7.0+
|
||||
|
||||
const series = {
|
||||
const series: any = {
|
||||
target: 'docs',
|
||||
type: 'docs',
|
||||
datapoints: [],
|
||||
total: hitsTotal,
|
||||
filterable: true,
|
||||
};
|
||||
let propName, hit, doc, i;
|
||||
let propName, hit, doc: any, i;
|
||||
|
||||
for (i = 0; i < hits.hits.length; i++) {
|
||||
hit = hits.hits[i];
|
||||
@@ -347,7 +348,7 @@ export class ElasticResponse {
|
||||
seriesList.push(series);
|
||||
}
|
||||
|
||||
trimDatapoints(aggregations, target) {
|
||||
trimDatapoints(aggregations: any, target: any) {
|
||||
const histogram: any = _.find(target.bucketAggs, { type: 'date_histogram' });
|
||||
|
||||
const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
|
||||
@@ -362,7 +363,7 @@ export class ElasticResponse {
|
||||
}
|
||||
}
|
||||
|
||||
getErrorFromElasticResponse(response, err) {
|
||||
getErrorFromElasticResponse(response: any, err: any) {
|
||||
const result: any = {};
|
||||
result.data = JSON.stringify(err, null, 4);
|
||||
if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {
|
||||
@@ -394,7 +395,7 @@ export class ElasticResponse {
|
||||
if (response.aggregations) {
|
||||
const aggregations = response.aggregations;
|
||||
const target = this.targets[i];
|
||||
const tmpSeriesList = [];
|
||||
const tmpSeriesList: any[] = [];
|
||||
const table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
@@ -426,7 +427,7 @@ export class ElasticResponse {
|
||||
|
||||
const hits = response.hits;
|
||||
let propNames: string[] = [];
|
||||
let propName, hit, doc, i;
|
||||
let propName, hit, doc: any, i;
|
||||
|
||||
for (i = 0; i < hits.hits.length; i++) {
|
||||
hit = hits.hits[i];
|
||||
@@ -534,7 +535,7 @@ export class ElasticResponse {
|
||||
if (response.aggregations) {
|
||||
const aggregations = response.aggregations;
|
||||
const target = this.targets[n];
|
||||
const tmpSeriesList = [];
|
||||
const tmpSeriesList: any[] = [];
|
||||
const table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user