mirror of
https://github.com/grafana/grafana.git
synced 2025-12-20 19:44:55 +08:00
Compare commits
19 Commits
docs/add-d
...
v6.4.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ba8388f3a | ||
|
|
c3b3ad4380 | ||
|
|
bef64b046c | ||
|
|
4edafb7c8c | ||
|
|
3c0268d671 | ||
|
|
126296826b | ||
|
|
dd75bb67bb | ||
|
|
c31f39ca11 | ||
|
|
e17af53428 | ||
|
|
4d1617c1dd | ||
|
|
3cb8b896dd | ||
|
|
943f661a75 | ||
|
|
b2c1473e59 | ||
|
|
052ea8f63b | ||
|
|
38e88083a3 | ||
|
|
6232cfcdda | ||
|
|
aa7659d1dd | ||
|
|
199031a6e2 | ||
|
|
10d47ab095 |
@@ -19,7 +19,7 @@ version: 2
|
||||
jobs:
|
||||
mysql-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.10
|
||||
- image: circleci/mysql:5.6-ram
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: rootpass
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
|
||||
postgres-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.10
|
||||
- image: circleci/postgres:9.3-ram
|
||||
environment:
|
||||
POSTGRES_USER: grafanatest
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
cache-server-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.10
|
||||
- image: circleci/redis:4-alpine
|
||||
- image: memcached
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
|
||||
lint-go:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.10
|
||||
environment:
|
||||
# we need CGO because of go-sqlite3
|
||||
CGO_ENABLED: 1
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: circleci/golang:1.12.9
|
||||
- image: circleci/golang:1.12.10
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -195,7 +195,7 @@ jobs:
|
||||
|
||||
build-all:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -214,15 +214,15 @@ jobs:
|
||||
- run:
|
||||
name: build and package grafana
|
||||
command: './scripts/build/build-all.sh'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: sign packages
|
||||
command: './scripts/build/sign_packages.sh'
|
||||
command: './scripts/build/sign_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: verify signed packages
|
||||
command: |
|
||||
mkdir -p ~/.rpmdb/pubkeys
|
||||
curl -s https://packages.grafana.com/gpg.key > ~/.rpmdb/pubkeys/grafana.key
|
||||
./scripts/build/verify_signed_packages.sh dist/*.rpm
|
||||
command: './scripts/build/verify_signed_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: sha-sum packages
|
||||
command: 'go run build.go sha-dist'
|
||||
@@ -239,7 +239,7 @@ jobs:
|
||||
|
||||
build:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -249,9 +249,12 @@ jobs:
|
||||
- run:
|
||||
name: build and package grafana
|
||||
command: './scripts/build/build.sh'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: sign packages
|
||||
command: './scripts/build/sign_packages.sh'
|
||||
command: './scripts/build/sign_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: sha-sum packages
|
||||
command: 'go run build.go sha-dist'
|
||||
@@ -265,7 +268,7 @@ jobs:
|
||||
|
||||
build-fast-backend:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -282,7 +285,7 @@ jobs:
|
||||
|
||||
build-fast-frontend:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -306,7 +309,7 @@ jobs:
|
||||
|
||||
build-fast-package:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -333,7 +336,7 @@ jobs:
|
||||
|
||||
build-fast-save:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -360,9 +363,12 @@ jobs:
|
||||
- run:
|
||||
name: package grafana
|
||||
command: './scripts/build/build.sh --fast --package-only'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: sign packages
|
||||
command: './scripts/build/sign_packages.sh'
|
||||
command: './scripts/build/sign_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: sha-sum packages
|
||||
command: 'go run build.go sha-dist'
|
||||
@@ -419,7 +425,7 @@ jobs:
|
||||
|
||||
build-enterprise:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -435,9 +441,12 @@ jobs:
|
||||
- run:
|
||||
name: build and package enterprise
|
||||
command: './scripts/build/build.sh -enterprise'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: sign packages
|
||||
command: './scripts/build/sign_packages.sh'
|
||||
command: './scripts/build/sign_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: sha-sum packages
|
||||
command: 'go run build.go sha-dist'
|
||||
@@ -451,7 +460,7 @@ jobs:
|
||||
|
||||
build-all-enterprise:
|
||||
docker:
|
||||
- image: grafana/build-container:1.2.8
|
||||
- image: grafana/build-container:1.2.9
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
@@ -476,15 +485,15 @@ jobs:
|
||||
- run:
|
||||
name: build and package grafana
|
||||
command: './scripts/build/build-all.sh -enterprise'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: sign packages
|
||||
command: './scripts/build/sign_packages.sh'
|
||||
command: './scripts/build/sign_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: verify signed packages
|
||||
command: |
|
||||
mkdir -p ~/.rpmdb/pubkeys
|
||||
curl -s https://packages.grafana.com/gpg.key > ~/.rpmdb/pubkeys/grafana.key
|
||||
./scripts/build/verify_signed_packages.sh dist/*.rpm
|
||||
command: './scripts/build/verify_signed_packages.sh dist/*.rpm'
|
||||
- run:
|
||||
name: sha-sum packages
|
||||
command: 'go run build.go sha-dist'
|
||||
@@ -537,15 +546,24 @@ jobs:
|
||||
- run:
|
||||
name: Deploy to Grafana.com
|
||||
command: './scripts/build/publish.sh --enterprise'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: Load GPG private key
|
||||
command: './scripts/build/load-signing-key.sh'
|
||||
command: './scripts/build/update_repo/load-signing-key.sh'
|
||||
- run:
|
||||
name: Update Debian repository
|
||||
command: './scripts/build/update_repo/update-deb.sh "enterprise" "$GPG_KEY_PASSWORD" "$CIRCLE_TAG" "enterprise-dist"'
|
||||
- run:
|
||||
name: Publish Debian repository
|
||||
command: './scripts/build/update_repo/publish-deb.sh "enterprise"'
|
||||
- run:
|
||||
name: Update RPM repository
|
||||
command: './scripts/build/update_repo/update-rpm.sh "enterprise" "$GPG_KEY_PASSWORD" "$CIRCLE_TAG" "enterprise-dist"'
|
||||
- run:
|
||||
name: Publish RPM repository
|
||||
command: './scripts/build/update_repo/publish-rpm.sh "enterprise" "$CIRCLE_TAG"'
|
||||
|
||||
|
||||
deploy-master:
|
||||
@@ -591,15 +609,24 @@ jobs:
|
||||
- run:
|
||||
name: Deploy to Grafana.com
|
||||
command: './scripts/build/publish.sh'
|
||||
- run:
|
||||
name: Prepare GPG private key
|
||||
command: './scripts/build/prepare_signing_key.sh'
|
||||
- run:
|
||||
name: Load GPG private key
|
||||
command: './scripts/build/load-signing-key.sh'
|
||||
command: './scripts/build/update_repo/load-signing-key.sh'
|
||||
- run:
|
||||
name: Update Debian repository
|
||||
command: './scripts/build/update_repo/update-deb.sh "oss" "$GPG_KEY_PASSWORD" "$CIRCLE_TAG" "dist"'
|
||||
- run:
|
||||
name: Publish Debian repository
|
||||
command: './scripts/build/update_repo/publish-deb.sh "oss"'
|
||||
- run:
|
||||
name: Update RPM repository
|
||||
command: './scripts/build/update_repo/update-rpm.sh "oss" "$GPG_KEY_PASSWORD" "$CIRCLE_TAG" "dist"'
|
||||
- run:
|
||||
name: Publish RPM repository
|
||||
command: './scripts/build/update_repo/publish-rpm.sh "oss" "$CIRCLE_TAG"'
|
||||
|
||||
build-oss-msi:
|
||||
docker:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Golang build container
|
||||
FROM golang:1.12.9-alpine
|
||||
FROM golang:1.12.10-alpine
|
||||
|
||||
RUN apk add --no-cache gcc g++
|
||||
|
||||
@@ -62,7 +62,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
|
||||
|
||||
WORKDIR $GF_PATHS_HOME
|
||||
|
||||
RUN apk add --no-cache ca-certificates bash
|
||||
RUN apk add --no-cache ca-certificates bash && \
|
||||
apk add --no-cache --upgrade --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main openssl musl-utils
|
||||
|
||||
COPY conf ./conf
|
||||
|
||||
|
||||
@@ -38,10 +38,7 @@ Just add it as a datasource and you are ready to query your log data in [Explore
|
||||
|
||||
## Querying Logs
|
||||
|
||||
Querying and displaying log data from Loki is available via [Explore](/features/explore).
|
||||
Select the Loki data source, and then enter a log query to display your logs.
|
||||
|
||||
> Viewing Loki data in dashboard panels is not supported yet, but is being worked on.
|
||||
Querying and displaying log data from Loki is available via [Explore](/features/explore), and with the [logs panel](/features/panels/logs/) in dashboards. Select the Loki data source, and then enter a log query to display your logs.
|
||||
|
||||
### Log Queries
|
||||
|
||||
|
||||
44
docs/sources/features/panels/logs.md
Normal file
44
docs/sources/features/panels/logs.md
Normal file
@@ -0,0 +1,44 @@
|
||||
+++
|
||||
title = "Logs Panel"
|
||||
keywords = ["grafana", "dashboard", "documentation", "panels", "logs panel"]
|
||||
type = "docs"
|
||||
aliases = ["/reference/logs/"]
|
||||
[menu.docs]
|
||||
name = "Logs"
|
||||
parent = "panels"
|
||||
weight = 2
|
||||
+++
|
||||
|
||||
# Logs Panel
|
||||
|
||||
<img class="screenshot" src="/assets/img/features/logs-panel.png">
|
||||
|
||||
> Logs panel is only available in Grafana v6.4+
|
||||
|
||||
The logs panel shows log lines from datasources that support logs, e.g., Elastic, Influx, and Loki.
|
||||
Typically you would use this panel next to a graph panel to display the log output of a related process.
|
||||
|
||||
## Querying Data
|
||||
|
||||
The logs panel will show the result of queries that are specified in the **Queries** tab.
|
||||
The results of multiple queries will be merged and sorted by time.
|
||||
Note that you can scroll inside the panel in case the datasource returns more lines than can be displayed at any one time.
|
||||
|
||||
### Query Options
|
||||
|
||||
Some datasources (e.g., Loki) allow the use of **Live** tailing to show a steady stream of log messages.
|
||||
When the panel is in **Live** mode, results are directly streamed from the datasource and the dashboard's time range is ignored.
|
||||
Note that the streaming can put extra effort on the datasource and your browser.
|
||||
Usually, the dashboard-wide refresh should be enough to get a recent set of log lines.
|
||||
|
||||
To limit the number of lines rendered, you can use the query-wide **Max data points** setting. If it is not set, the datasource will usually enforce a limit.
|
||||
|
||||
## Visualization Options
|
||||
|
||||
### Columns
|
||||
|
||||
1. **Time**: Show/hide the time column. This is the timestamp associated with the log line as reported from the datasource.
|
||||
2. **Order**: Set to **Ascending** to show the oldest log lines first.
|
||||
|
||||
|
||||
<div class="clearfix"></div>
|
||||
@@ -2,5 +2,5 @@
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "6.4.0-pre"
|
||||
"version": "6.4.0"
|
||||
}
|
||||
|
||||
14
package.json
14
package.json
@@ -3,7 +3,7 @@
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "6.4.0-pre",
|
||||
"version": "6.4.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/grafana/grafana.git"
|
||||
@@ -52,7 +52,9 @@
|
||||
"@types/redux-logger": "3.0.7",
|
||||
"@types/redux-mock-store": "1.0.1",
|
||||
"@types/reselect": "2.2.0",
|
||||
"@types/slate": "0.44.11",
|
||||
"@types/slate": "0.47.1",
|
||||
"@types/slate-plain-serializer": "0.6.1",
|
||||
"@types/slate-react": "0.22.5",
|
||||
"@types/tinycolor2": "1.4.2",
|
||||
"angular-mocks": "1.6.6",
|
||||
"autoprefixer": "9.5.0",
|
||||
@@ -121,6 +123,7 @@
|
||||
"redux-mock-store": "1.5.3",
|
||||
"regexp-replace-loader": "1.0.1",
|
||||
"rimraf": "2.6.3",
|
||||
"rxjs-spy": "^7.5.1",
|
||||
"sass-lint": "1.12.1",
|
||||
"sass-loader": "7.1.0",
|
||||
"sinon": "1.17.6",
|
||||
@@ -193,6 +196,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/polyfill": "7.2.5",
|
||||
"@grafana/slate-react": "0.22.9-grafana",
|
||||
"@torkelo/react-select": "2.4.1",
|
||||
"angular": "1.6.6",
|
||||
"angular-bindonce": "0.3.1",
|
||||
@@ -243,10 +247,8 @@
|
||||
"rst2html": "github:thoward/rst2html#990cb89",
|
||||
"rxjs": "6.4.0",
|
||||
"search-query-parser": "1.5.2",
|
||||
"slate": "0.33.8",
|
||||
"slate-plain-serializer": "0.5.41",
|
||||
"slate-prism": "0.5.0",
|
||||
"slate-react": "0.12.11",
|
||||
"slate": "0.47.8",
|
||||
"slate-plain-serializer": "0.7.10",
|
||||
"tether": "1.4.5",
|
||||
"tether-drop": "https://github.com/torkelo/drop/tarball/master",
|
||||
"tinycolor2": "1.4.1",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "6.4.0-pre",
|
||||
"version": "6.4.0",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -82,7 +82,7 @@ describe('FieldCache', () => {
|
||||
it('should get the first field with a duplicate name', () => {
|
||||
const field = ext.getFieldByName('value');
|
||||
expect(field!.name).toEqual('value');
|
||||
expect(field!.values.toJSON()).toEqual([1, 2, 3]);
|
||||
expect(field!.values.toArray()).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('should return index of the field', () => {
|
||||
|
||||
@@ -189,14 +189,14 @@ describe('sorted DataFrame', () => {
|
||||
it('Should sort numbers', () => {
|
||||
const sorted = sortDataFrame(frame, 0, true);
|
||||
expect(sorted.length).toEqual(3);
|
||||
expect(sorted.fields[0].values.toJSON()).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values.toJSON()).toEqual(['c', 'b', 'a']);
|
||||
expect(sorted.fields[0].values.toArray()).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values.toArray()).toEqual(['c', 'b', 'a']);
|
||||
});
|
||||
|
||||
it('Should sort strings', () => {
|
||||
const sorted = sortDataFrame(frame, 1, true);
|
||||
expect(sorted.length).toEqual(3);
|
||||
expect(sorted.fields[0].values.toJSON()).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values.toJSON()).toEqual(['c', 'b', 'a']);
|
||||
expect(sorted.fields[0].values.toArray()).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values.toArray()).toEqual(['c', 'b', 'a']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -401,7 +401,7 @@ export function toDataFrameDTO(data: DataFrame): DataFrameDTO {
|
||||
name: f.name,
|
||||
type: f.type,
|
||||
config: f.config,
|
||||
values: f.values.toJSON(),
|
||||
values: f.values.toArray(),
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
@@ -2,4 +2,5 @@
|
||||
import * as dateMath from './datemath';
|
||||
import * as rangeUtil from './rangeutil';
|
||||
export * from './moment_wrapper';
|
||||
export * from './timezones';
|
||||
export { dateMath, rangeUtil };
|
||||
|
||||
390
packages/grafana-data/src/datetime/timezones.ts
Normal file
390
packages/grafana-data/src/datetime/timezones.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
// List taken from https://stackoverflow.com/questions/38399465/how-to-get-list-of-all-timezones-in-javascript
|
||||
|
||||
export const getTimeZoneGroups = () => {
|
||||
const europeZones = [
|
||||
'Europe/Amsterdam',
|
||||
'Europe/Andorra',
|
||||
'Europe/Astrakhan',
|
||||
'Europe/Athens',
|
||||
'Europe/Belgrade',
|
||||
'Europe/Berlin',
|
||||
'Europe/Brussels',
|
||||
'Europe/Bucharest',
|
||||
'Europe/Budapest',
|
||||
'Europe/Chisinau',
|
||||
'Europe/Copenhagen',
|
||||
'Europe/Dublin',
|
||||
'Europe/Gibraltar',
|
||||
'Europe/Helsinki',
|
||||
'Europe/Istanbul',
|
||||
'Europe/Kaliningrad',
|
||||
'Europe/Kiev',
|
||||
'Europe/Kirov',
|
||||
'Europe/Lisbon',
|
||||
'Europe/London',
|
||||
'Europe/Luxembourg',
|
||||
'Europe/Madrid',
|
||||
'Europe/Malta',
|
||||
'Europe/Minsk',
|
||||
'Europe/Monaco',
|
||||
'Europe/Moscow',
|
||||
'Europe/Oslo',
|
||||
'Europe/Paris',
|
||||
'Europe/Prague',
|
||||
'Europe/Riga',
|
||||
'Europe/Rome',
|
||||
'Europe/Samara',
|
||||
'Europe/Saratov',
|
||||
'Europe/Simferopol',
|
||||
'Europe/Sofia',
|
||||
'Europe/Stockholm',
|
||||
'Europe/Tallinn',
|
||||
'Europe/Tirane',
|
||||
'Europe/Ulyanovsk',
|
||||
'Europe/Uzhgorod',
|
||||
'Europe/Vienna',
|
||||
'Europe/Vilnius',
|
||||
'Europe/Volgograd',
|
||||
'Europe/Warsaw',
|
||||
'Europe/Zaporozhye',
|
||||
'Europe/Zurich',
|
||||
];
|
||||
|
||||
const africaZones = [
|
||||
'Africa/Abidjan',
|
||||
'Africa/Accra',
|
||||
'Africa/Algiers',
|
||||
'Africa/Bissau',
|
||||
'Africa/Cairo',
|
||||
'Africa/Casablanca',
|
||||
'Africa/Ceuta',
|
||||
'Africa/El_Aaiun',
|
||||
'Africa/Johannesburg',
|
||||
'Africa/Juba',
|
||||
'Africa/Khartoum',
|
||||
'Africa/Lagos',
|
||||
'Africa/Maputo',
|
||||
'Africa/Monrovia',
|
||||
'Africa/Nairobi',
|
||||
'Africa/Ndjamena',
|
||||
'Africa/Sao_Tome',
|
||||
'Africa/Tripoli',
|
||||
'Africa/Tunis',
|
||||
'Africa/Windhoek',
|
||||
];
|
||||
|
||||
const asiaZones = [
|
||||
'Asia/Almaty',
|
||||
'Asia/Amman',
|
||||
'Asia/Anadyr',
|
||||
'Asia/Aqtau',
|
||||
'Asia/Aqtobe',
|
||||
'Asia/Ashgabat',
|
||||
'Asia/Atyrau',
|
||||
'Asia/Baghdad',
|
||||
'Asia/Baku',
|
||||
'Asia/Bangkok',
|
||||
'Asia/Barnaul',
|
||||
'Asia/Beirut',
|
||||
'Asia/Bishkek',
|
||||
'Asia/Brunei',
|
||||
'Asia/Chita',
|
||||
'Asia/Choibalsan',
|
||||
'Asia/Colombo',
|
||||
'Asia/Damascus',
|
||||
'Asia/Dhaka',
|
||||
'Asia/Dili',
|
||||
'Asia/Dubai',
|
||||
'Asia/Dushanbe',
|
||||
'Asia/Famagusta',
|
||||
'Asia/Gaza',
|
||||
'Asia/Hebron',
|
||||
'Asia/Ho_Chi_Minh',
|
||||
'Asia/Hong_Kong',
|
||||
'Asia/Hovd',
|
||||
'Asia/Irkutsk',
|
||||
'Asia/Jakarta',
|
||||
'Asia/Jayapura',
|
||||
'Asia/Jerusalem',
|
||||
'Asia/Kabul',
|
||||
'Asia/Kamchatka',
|
||||
'Asia/Karachi',
|
||||
'Asia/Kathmandu',
|
||||
'Asia/Khandyga',
|
||||
'Asia/Kolkata',
|
||||
'Asia/Krasnoyarsk',
|
||||
'Asia/Kuala_Lumpur',
|
||||
'Asia/Kuching',
|
||||
'Asia/Macau',
|
||||
'Asia/Magadan',
|
||||
'Asia/Makassar',
|
||||
'Asia/Manila',
|
||||
'Asia/Nicosia',
|
||||
'Asia/Novokuznetsk',
|
||||
'Asia/Novosibirsk',
|
||||
'Asia/Omsk',
|
||||
'Asia/Oral',
|
||||
'Asia/Pontianak',
|
||||
'Asia/Pyongyang',
|
||||
'Asia/Qatar',
|
||||
'Asia/Qostanay',
|
||||
'Asia/Qyzylorda',
|
||||
'Asia/Riyadh',
|
||||
'Asia/Sakhalin',
|
||||
'Asia/Samarkand',
|
||||
'Asia/Seoul',
|
||||
'Asia/Shanghai',
|
||||
'Asia/Singapore',
|
||||
'Asia/Srednekolymsk',
|
||||
'Asia/Taipei',
|
||||
'Asia/Tashkent',
|
||||
'Asia/Tbilisi',
|
||||
'Asia/Tehran',
|
||||
'Asia/Thimphu',
|
||||
'Asia/Tokyo',
|
||||
'Asia/Tomsk',
|
||||
'Asia/Ulaanbaatar',
|
||||
'Asia/Urumqi',
|
||||
'Asia/Ust-Nera',
|
||||
'Asia/Vladivostok',
|
||||
'Asia/Yakutsk',
|
||||
'Asia/Yangon',
|
||||
'Asia/Yekaterinburg',
|
||||
'Asia/Yerevan',
|
||||
];
|
||||
|
||||
const antarcticaZones = [
|
||||
'Antarctica/Casey',
|
||||
'Antarctica/Davis',
|
||||
'Antarctica/DumontDUrville',
|
||||
'Antarctica/Macquarie',
|
||||
'Antarctica/Mawson',
|
||||
'Antarctica/Palmer',
|
||||
'Antarctica/Rothera',
|
||||
'Antarctica/Syowa',
|
||||
'Antarctica/Troll',
|
||||
'Antarctica/Vostok',
|
||||
];
|
||||
|
||||
const americaZones = [
|
||||
'America/Adak',
|
||||
'America/Anchorage',
|
||||
'America/Araguaina',
|
||||
'America/Argentina/Buenos_Aires',
|
||||
'America/Argentina/Catamarca',
|
||||
'America/Argentina/Cordoba',
|
||||
'America/Argentina/Jujuy',
|
||||
'America/Argentina/La_Rioja',
|
||||
'America/Argentina/Mendoza',
|
||||
'America/Argentina/Rio_Gallegos',
|
||||
'America/Argentina/Salta',
|
||||
'America/Argentina/San_Juan',
|
||||
'America/Argentina/San_Luis',
|
||||
'America/Argentina/Tucuman',
|
||||
'America/Argentina/Ushuaia',
|
||||
'America/Asuncion',
|
||||
'America/Atikokan',
|
||||
'America/Bahia',
|
||||
'America/Bahia_Banderas',
|
||||
'America/Barbados',
|
||||
'America/Belem',
|
||||
'America/Belize',
|
||||
'America/Blanc-Sablon',
|
||||
'America/Boa_Vista',
|
||||
'America/Bogota',
|
||||
'America/Boise',
|
||||
'America/Cambridge_Bay',
|
||||
'America/Campo_Grande',
|
||||
'America/Cancun',
|
||||
'America/Caracas',
|
||||
'America/Cayenne',
|
||||
'America/Chicago',
|
||||
'America/Chihuahua',
|
||||
'America/Costa_Rica',
|
||||
'America/Creston',
|
||||
'America/Cuiaba',
|
||||
'America/Curacao',
|
||||
'America/Danmarkshavn',
|
||||
'America/Dawson',
|
||||
'America/Dawson_Creek',
|
||||
'America/Denver',
|
||||
'America/Detroit',
|
||||
'America/Edmonton',
|
||||
'America/Eirunepe',
|
||||
'America/El_Salvador',
|
||||
'America/Fort_Nelson',
|
||||
'America/Fortaleza',
|
||||
'America/Glace_Bay',
|
||||
'America/Godthab',
|
||||
'America/Goose_Bay',
|
||||
'America/Grand_Turk',
|
||||
'America/Guatemala',
|
||||
'America/Guayaquil',
|
||||
'America/Guyana',
|
||||
'America/Halifax',
|
||||
'America/Havana',
|
||||
'America/Hermosillo',
|
||||
'America/Indiana/Indianapolis',
|
||||
'America/Indiana/Knox',
|
||||
'America/Indiana/Marengo',
|
||||
'America/Indiana/Petersburg',
|
||||
'America/Indiana/Tell_City',
|
||||
'America/Indiana/Vevay',
|
||||
'America/Indiana/Vincennes',
|
||||
'America/Indiana/Winamac',
|
||||
'America/Inuvik',
|
||||
'America/Iqaluit',
|
||||
'America/Jamaica',
|
||||
'America/Juneau',
|
||||
'America/Kentucky/Louisville',
|
||||
'America/Kentucky/Monticello',
|
||||
'America/La_Paz',
|
||||
'America/Lima',
|
||||
'America/Los_Angeles',
|
||||
'America/Maceio',
|
||||
'America/Managua',
|
||||
'America/Manaus',
|
||||
'America/Martinique',
|
||||
'America/Matamoros',
|
||||
'America/Mazatlan',
|
||||
'America/Menominee',
|
||||
'America/Merida',
|
||||
'America/Metlakatla',
|
||||
'America/Mexico_City',
|
||||
'America/Miquelon',
|
||||
'America/Moncton',
|
||||
'America/Monterrey',
|
||||
'America/Montevideo',
|
||||
'America/Nassau',
|
||||
'America/New_York',
|
||||
'America/Nipigon',
|
||||
'America/Nome',
|
||||
'America/Noronha',
|
||||
'America/North_Dakota/Beulah',
|
||||
'America/North_Dakota/Center',
|
||||
'America/North_Dakota/New_Salem',
|
||||
'America/Ojinaga',
|
||||
'America/Panama',
|
||||
'America/Pangnirtung',
|
||||
'America/Paramaribo',
|
||||
'America/Phoenix',
|
||||
'America/Port-au-Prince',
|
||||
'America/Port_of_Spain',
|
||||
'America/Porto_Velho',
|
||||
'America/Puerto_Rico',
|
||||
'America/Punta_Arenas',
|
||||
'America/Rainy_River',
|
||||
'America/Rankin_Inlet',
|
||||
'America/Recife',
|
||||
'America/Regina',
|
||||
'America/Resolute',
|
||||
'America/Rio_Branco',
|
||||
'America/Santarem',
|
||||
'America/Santiago',
|
||||
'America/Santo_Domingo',
|
||||
'America/Sao_Paulo',
|
||||
'America/Scoresbysund',
|
||||
'America/Sitka',
|
||||
'America/St_Johns',
|
||||
'America/Swift_Current',
|
||||
'America/Tegucigalpa',
|
||||
'America/Thule',
|
||||
'America/Thunder_Bay',
|
||||
'America/Tijuana',
|
||||
'America/Toronto',
|
||||
'America/Vancouver',
|
||||
'America/Whitehorse',
|
||||
'America/Winnipeg',
|
||||
'America/Yakutat',
|
||||
'America/Yellowknife',
|
||||
];
|
||||
|
||||
const pacificZones = [
|
||||
'Pacific/Apia',
|
||||
'Pacific/Auckland',
|
||||
'Pacific/Bougainville',
|
||||
'Pacific/Chatham',
|
||||
'Pacific/Chuuk',
|
||||
'Pacific/Easter',
|
||||
'Pacific/Efate',
|
||||
'Pacific/Enderbury',
|
||||
'Pacific/Fakaofo',
|
||||
'Pacific/Fiji',
|
||||
'Pacific/Funafuti',
|
||||
'Pacific/Galapagos',
|
||||
'Pacific/Gambier',
|
||||
'Pacific/Guadalcanal',
|
||||
'Pacific/Guam',
|
||||
'Pacific/Honolulu',
|
||||
'Pacific/Kiritimati',
|
||||
'Pacific/Kosrae',
|
||||
'Pacific/Kwajalein',
|
||||
'Pacific/Majuro',
|
||||
'Pacific/Marquesas',
|
||||
'Pacific/Nauru',
|
||||
'Pacific/Niue',
|
||||
'Pacific/Norfolk',
|
||||
'Pacific/Noumea',
|
||||
'Pacific/Pago_Pago',
|
||||
'Pacific/Palau',
|
||||
'Pacific/Pitcairn',
|
||||
'Pacific/Pohnpei',
|
||||
'Pacific/Port_Moresby',
|
||||
'Pacific/Rarotonga',
|
||||
'Pacific/Tahiti',
|
||||
'Pacific/Tarawa',
|
||||
'Pacific/Tongatapu',
|
||||
'Pacific/Wake',
|
||||
'Pacific/Wallis',
|
||||
];
|
||||
|
||||
const australiaZones = [
|
||||
'Australia/Adelaide',
|
||||
'Australia/Brisbane',
|
||||
'Australia/Broken_Hill',
|
||||
'Australia/Currie',
|
||||
'Australia/Darwin',
|
||||
'Australia/Eucla',
|
||||
'Australia/Hobart',
|
||||
'Australia/Lindeman',
|
||||
'Australia/Lord_Howe',
|
||||
'Australia/Melbourne',
|
||||
'Australia/Perth',
|
||||
'Australia/Sydney',
|
||||
];
|
||||
|
||||
const atlanticZones = [
|
||||
'Atlantic/Azores',
|
||||
'Atlantic/Bermuda',
|
||||
'Atlantic/Canary',
|
||||
'Atlantic/Cape_Verde',
|
||||
'Atlantic/Faroe',
|
||||
'Atlantic/Madeira',
|
||||
'Atlantic/Reykjavik',
|
||||
'Atlantic/South_Georgia',
|
||||
'Atlantic/Stanley',
|
||||
];
|
||||
|
||||
const indianZones = [
|
||||
'Indian/Chagos',
|
||||
'Indian/Christmas',
|
||||
'Indian/Cocos',
|
||||
'Indian/Kerguelen',
|
||||
'Indian/Mahe',
|
||||
'Indian/Maldives',
|
||||
'Indian/Mauritius',
|
||||
'Indian/Reunion',
|
||||
];
|
||||
|
||||
return [
|
||||
{ label: 'Africa', options: africaZones },
|
||||
{ label: 'America', options: americaZones },
|
||||
{ label: 'Antarctica', options: antarcticaZones },
|
||||
{ label: 'Asia', options: asiaZones },
|
||||
{ label: 'Atlantic', options: atlanticZones },
|
||||
{ label: 'Australia', options: australiaZones },
|
||||
{ label: 'Europe', options: europeZones },
|
||||
{ label: 'Indian', options: indianZones },
|
||||
{ label: 'Pacific', options: pacificZones },
|
||||
];
|
||||
};
|
||||
@@ -12,3 +12,4 @@ export * from './displayValue';
|
||||
export * from './graph';
|
||||
export * from './ScopedVars';
|
||||
export * from './transformations';
|
||||
export * from './vector';
|
||||
|
||||
@@ -41,3 +41,9 @@ export interface TimeOptions {
|
||||
export type TimeFragment = string | DateTime;
|
||||
|
||||
export const TIME_FORMAT = 'YYYY-MM-DD HH:mm:ss';
|
||||
|
||||
export const DefaultTimeRange: TimeRange = {
|
||||
from: {} as DateTime,
|
||||
to: {} as DateTime,
|
||||
raw: { from: '6h', to: 'now' },
|
||||
};
|
||||
|
||||
@@ -10,11 +10,6 @@ export interface Vector<T = any> {
|
||||
* Get the resutls as an array.
|
||||
*/
|
||||
toArray(): T[];
|
||||
|
||||
/**
|
||||
* Return the values as a simple array for json serialization
|
||||
*/
|
||||
toJSON(): any; // same results as toArray()
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "6.4.0-pre",
|
||||
"version": "6.4.0",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -21,8 +21,8 @@
|
||||
"build": "grafana-toolkit package:build --scope=runtime"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "^6.4.0-alpha",
|
||||
"@grafana/ui": "^6.4.0-alpha",
|
||||
"@grafana/data": "6.4.0",
|
||||
"@grafana/ui": "6.4.0",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "0.1.37"
|
||||
},
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
# 6.4.0 (unreleased)
|
||||
|
||||
# 6.4.0-beta1 (2019-09-17)
|
||||
First release, see [Readme](https://github.com/grafana/grafana/blob/v6.4.0-beta1/packages/grafana-toolkit/README.md) for details.
|
||||
|
||||
@@ -2,23 +2,27 @@
|
||||
> **@grafana/toolkit is currently in ALPHA**. Core API is unstable and can be a subject of breaking changes!
|
||||
|
||||
# grafana-toolkit
|
||||
grafana-toolkit is CLI that enables efficient development of Grafana extensions
|
||||
grafana-toolkit is CLI that enables efficient development of Grafana plugins
|
||||
|
||||
|
||||
## Rationale
|
||||
Historically, creating Grafana extension was an exercise of reverse engineering and ceremony around testing, developing and eventually building the plugin. We want to help our community to focus on the core value of their plugins rather than all the setup required to develop an extension.
|
||||
Historically, creating Grafana plugin was an exercise of reverse engineering and ceremony around testing, developing and eventually building the plugin. We want to help our community to focus on the core value of their plugins rather than all the setup required to develop them.
|
||||
|
||||
## Installation
|
||||
## Getting started
|
||||
|
||||
You can either add grafana-toolkit to your extension's `package.json` file by running
|
||||
`yarn add @grafana/toolkit` or `npm instal @grafana/toolkit`, or use one of our extension templates:
|
||||
- [React Panel](https://github.com/grafana/simple-react-panel)
|
||||
- [Angular Panel](https://github.com/grafana/simple-angular-panel)
|
||||
Setup new plugin with `grafana-toolkit plugin:create` command:
|
||||
|
||||
### Updating your extension to use grafana-toolkit
|
||||
In order to start using grafana-toolkit in your extension you need to follow the steps below:
|
||||
1. Add `@grafana/toolkit` package to your project
|
||||
2. Create `tsconfig.json` file in the root dir of your extension and paste the code below:
|
||||
```sh
|
||||
npx grafana-toolkit plugin:create my-grafana-plugin
|
||||
cd my-grafana-plugin
|
||||
yarn install
|
||||
yarn dev
|
||||
```
|
||||
|
||||
### Updating your plugin to use grafana-toolkit
|
||||
In order to start using grafana-toolkit in your existing plugin you need to follow the steps below:
|
||||
1. Add `@grafana/toolkit` package to your project by running `yarn add @grafana/toolkit` or `npm install @grafana/toolkit`
|
||||
2. Create `tsconfig.json` file in the root dir of your plugin and paste the code below:
|
||||
```json
|
||||
{
|
||||
"extends": "./node_modules/@grafana/toolkit/src/config/tsconfig.plugin.json",
|
||||
@@ -31,7 +35,7 @@ In order to start using grafana-toolkit in your extension you need to follow the
|
||||
}
|
||||
```
|
||||
|
||||
3. Create `.prettierrc.js` file in the root dir of your extension and paste the code below:
|
||||
3. Create `.prettierrc.js` file in the root dir of your plugin and paste the code below:
|
||||
```js
|
||||
module.exports = {
|
||||
...require("./node_modules/@grafana/toolkit/src/config/prettier.plugin.config.json"),
|
||||
@@ -49,13 +53,21 @@ module.exports = {
|
||||
```
|
||||
|
||||
## Usage
|
||||
With grafana-toolkit we put in your hands a CLI that addresses common tasks performed when working on Grafana extension:
|
||||
- `grafana-toolkit plugin:test`
|
||||
With grafana-toolkit we put in your hands a CLI that addresses common tasks performed when working on Grafana plugin:
|
||||
- `grafana-toolkit plugin:create`
|
||||
- `grafana-toolkit plugin:dev`
|
||||
- `grafana-toolkit plugin:test`
|
||||
- `grafana-toolkit plugin:build`
|
||||
|
||||
|
||||
### Developing extensions
|
||||
### Creating plugin
|
||||
`grafana-toolkit plugin:create plugin-name`
|
||||
|
||||
Creates new Grafana plugin from template.
|
||||
|
||||
If `plugin-name` is provided, the template will be downloaded to `./plugin-name` directory. Otherwise, it will be downloaded to current directory.
|
||||
|
||||
### Developing plugin
|
||||
`grafana-toolkit plugin:dev`
|
||||
|
||||
Creates development build that's easy to play with and debug using common browser tooling
|
||||
@@ -63,7 +75,7 @@ Creates development build that's easy to play with and debug using common browse
|
||||
Available options:
|
||||
- `-w`, `--watch` - run development task in a watch mode
|
||||
|
||||
### Testing extensions
|
||||
### Testing plugin
|
||||
`grafana-toolkit plugin:test`
|
||||
|
||||
Runs Jest against your codebase
|
||||
@@ -76,26 +88,29 @@ Available options:
|
||||
- `--testPathPattern=<regex>` - runs test with paths that match provided regex (https://jestjs.io/docs/en/cli#testpathpattern-regex)
|
||||
|
||||
|
||||
### Building extensions
|
||||
### Building plugin
|
||||
`grafana-toolkit plugin:build`
|
||||
|
||||
Creates production ready build of your extension
|
||||
Creates production ready build of your plugin
|
||||
|
||||
## FAQ
|
||||
|
||||
### Which version should I use?
|
||||
Please refer to [Grafana packages versioning guide](https://github.com/grafana/grafana/blob/master/packages/README.md#versioning)
|
||||
### What tools does grafana-toolkit use?
|
||||
grafana-toolkit comes with Typescript, TSLint, Prettier, Jest, CSS and SASS support.
|
||||
|
||||
### How to start using grafana-toolkit in my extension?
|
||||
See [Updating your extension to use grafana-toolkit](#updating-your-extension-to-use-grafana-toolkit)
|
||||
### Can I use Typescript to develop Grafana extensions?
|
||||
### How to start using grafana-toolkit in my plugin?
|
||||
See [Updating your plugin to use grafana-toolkit](#updating-your-plugin-to-use-grafana-toolkit)
|
||||
|
||||
### Can I use Typescript to develop Grafana plugins?
|
||||
Yes! grafana-toolkit supports Typescript by default.
|
||||
|
||||
|
||||
### How can I test my extension?
|
||||
### How can I test my plugin?
|
||||
grafana-toolkit comes with Jest as a test runner.
|
||||
|
||||
Internally at Grafana we use Enzyme. If you are developing React extension and you want to configure Enzyme as a testing utility, you need to configure `enzyme-adapter-react`. To do so create `<YOUR_EXTENSION>/config/jest-setup.ts` file that will provide necessary setup. Copy the following code into that file to get Enzyme working with React:
|
||||
Internally at Grafana we use Enzyme. If you are developing React plugin and you want to configure Enzyme as a testing utility, you need to configure `enzyme-adapter-react`. To do so create `<YOUR_PLUGIN_DIR>/config/jest-setup.ts` file that will provide necessary setup. Copy the following code into that file to get Enzyme working with React:
|
||||
|
||||
```ts
|
||||
import { configure } from 'enzyme';
|
||||
@@ -104,7 +119,7 @@ import Adapter from 'enzyme-adapter-react-16';
|
||||
configure({ adapter: new Adapter() });
|
||||
```
|
||||
|
||||
You can also setup Jest with shims of your needs by creating `jest-shim.ts` file in the same directory: `<YOUR_EXTENSION>/config/jest-shim.ts`
|
||||
You can also setup Jest with shims of your needs by creating `jest-shim.ts` file in the same directory: `<YOUR_PLUGIN_DIR_>/config/jest-shim.ts`
|
||||
|
||||
### Can I provide custom setup for Jest?
|
||||
|
||||
@@ -114,7 +129,7 @@ Currently we support following Jest config properties:
|
||||
- [`snapshotSerializers`](https://jest-bot.github.io/jest/docs/configuration.html#snapshotserializers-array-string)
|
||||
- [`moduleNameMapper`](https://jestjs.io/docs/en/configuration#modulenamemapper-object-string-string)
|
||||
|
||||
### How can I style my extension?
|
||||
### How can I style my plugin?
|
||||
We support pure CSS, SASS and CSS-in-JS approach (via [Emotion](https://emotion.sh/)).
|
||||
|
||||
#### Single CSS or SASS file
|
||||
@@ -132,18 +147,18 @@ The styles will be injected via `style` tag during runtime.
|
||||
|
||||
If you want to provide different stylesheets for dark/light theme, create `dark.[css|scss]` and `light.[css|scss]` files in `src/styles` directory of your plugin. grafana-toolkit will generate theme specific stylesheets that will end up in `dist/styles` directory.
|
||||
|
||||
In order for Grafana to pickup up you theme stylesheets you need to use `loadPluginCss` from `@grafana/runtime` package. Typically you would do that in the entrypoint of your extension:
|
||||
In order for Grafana to pickup up you theme stylesheets you need to use `loadPluginCss` from `@grafana/runtime` package. Typically you would do that in the entrypoint of your plugin:
|
||||
|
||||
```ts
|
||||
import { loadPluginCss } from '@grafana/runtime';
|
||||
|
||||
loadPluginCss({
|
||||
dark: 'plugins/<YOUR-EXTENSION-NAME>/styles/dark.css',
|
||||
light: 'plugins/<YOUR-EXTENSION-NAME>/styles/light.css',
|
||||
dark: 'plugins/<YOUR-PLUGIN-ID>/styles/dark.css',
|
||||
light: 'plugins/<YOUR-PLUGIN-ID>/styles/light.css',
|
||||
});
|
||||
```
|
||||
|
||||
You need to add `@grafana/runtime` to your extension dependencies by running `yarn add @grafana/runtime` or `npm instal @grafana/runtime`
|
||||
You need to add `@grafana/runtime` to your plugin dependencies by running `yarn add @grafana/runtime` or `npm instal @grafana/runtime`
|
||||
|
||||
> Note that in this case static files (png, svg, json, html) are all copied to dist directory when the plugin is bundled. Relative paths to those files does not change!
|
||||
|
||||
@@ -194,7 +209,7 @@ grafana-toolkit comes with [default config for TSLint](https://github.com/grafan
|
||||
|
||||
|
||||
### How is Prettier integrated into grafana-toolkit workflow?
|
||||
When building extension with [`grafana-toolkit plugin:build`](#building-extensions) task, grafana-toolkit performs Prettier check. If the check detects any Prettier issues, the build will not pass. To avoid such situation we suggest developing plugin with [`grafana-toolkit plugin:dev --watch`](#developing-extensions) task running. This task tries to fix Prettier issues automatically.
|
||||
When building plugin with [`grafana-toolkit plugin:build`](#building-plugin) task, grafana-toolkit performs Prettier check. If the check detects any Prettier issues, the build will not pass. To avoid such situation we suggest developing plugin with [`grafana-toolkit plugin:dev --watch`](#developing-plugin) task running. This task tries to fix Prettier issues automatically.
|
||||
|
||||
### My editor does not respect Prettier config, what should I do?
|
||||
In order for your editor to pickup our Prettier config you need to create `.prettierrc.js` file in the root directory of your plugin with following content:
|
||||
|
||||
@@ -11,4 +11,5 @@ require('ts-node').register({
|
||||
transpileOnly: true
|
||||
});
|
||||
|
||||
|
||||
require('../src/cli/index.ts').run(true);
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "6.4.0-pre",
|
||||
"version": "6.4.0",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -28,6 +28,9 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "7.4.5",
|
||||
"@babel/preset-env": "7.4.5",
|
||||
"@grafana/data": "6.4.0",
|
||||
"@grafana/ui": "6.4.0",
|
||||
"@types/command-exists": "^1.2.0",
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/expect-puppeteer": "3.3.1",
|
||||
"@types/inquirer": "^6.0.3",
|
||||
@@ -40,12 +43,11 @@
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/webpack": "4.4.34",
|
||||
"aws-sdk": "^2.495.0",
|
||||
"@grafana/data": "^6.4.0-alpha",
|
||||
"@grafana/ui": "^6.4.0-alpha",
|
||||
"axios": "0.19.0",
|
||||
"babel-loader": "8.0.6",
|
||||
"babel-plugin-angularjs-annotate": "0.10.0",
|
||||
"chalk": "^2.4.2",
|
||||
"command-exists": "^1.2.8",
|
||||
"commander": "^2.20.0",
|
||||
"concurrently": "4.1.0",
|
||||
"copy-webpack-plugin": "5.0.3",
|
||||
@@ -98,6 +100,5 @@
|
||||
},
|
||||
"_moduleAliases": {
|
||||
"puppeteer": "node_modules/puppeteer-core"
|
||||
},
|
||||
"types": "src/index.ts"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
ciPluginReportTask,
|
||||
} from './tasks/plugin.ci';
|
||||
import { buildPackageTask } from './tasks/package.build';
|
||||
import { pluginCreateTask } from './tasks/plugin.create';
|
||||
|
||||
export const run = (includeInternalScripts = false) => {
|
||||
if (includeInternalScripts) {
|
||||
@@ -61,6 +62,7 @@ export const run = (includeInternalScripts = false) => {
|
||||
|
||||
await execTask(changelogTask)({
|
||||
milestone: cmd.milestone,
|
||||
silent: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -89,8 +91,7 @@ export const run = (includeInternalScripts = false) => {
|
||||
.command('toolkit:build')
|
||||
.description('Prepares grafana/toolkit dist package')
|
||||
.action(async cmd => {
|
||||
// @ts-ignore
|
||||
await execTask(toolkitBuildTask)();
|
||||
await execTask(toolkitBuildTask)({});
|
||||
});
|
||||
|
||||
program
|
||||
@@ -117,11 +118,18 @@ export const run = (includeInternalScripts = false) => {
|
||||
});
|
||||
}
|
||||
|
||||
program
|
||||
.command('plugin:create [name]')
|
||||
.description('Creates plugin from template')
|
||||
.action(async cmd => {
|
||||
await execTask(pluginCreateTask)({ name: cmd, silent: true });
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:build')
|
||||
.description('Prepares plugin dist package')
|
||||
.action(async cmd => {
|
||||
await execTask(pluginBuildTask)({ coverage: false });
|
||||
await execTask(pluginBuildTask)({ coverage: false, silent: true });
|
||||
});
|
||||
|
||||
program
|
||||
@@ -133,6 +141,7 @@ export const run = (includeInternalScripts = false) => {
|
||||
await execTask(pluginDevTask)({
|
||||
watch: !!cmd.watch,
|
||||
yarnlink: !!cmd.yarnlink,
|
||||
silent: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -151,6 +160,7 @@ export const run = (includeInternalScripts = false) => {
|
||||
watch: !!cmd.watch,
|
||||
testPathPattern: cmd.testPathPattern,
|
||||
testNamePattern: cmd.testNamePattern,
|
||||
silent: true,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -2,67 +2,84 @@
|
||||
import * as _ from 'lodash';
|
||||
import { Task, TaskRunner } from './task';
|
||||
import GithubClient from '../utils/githubClient';
|
||||
import difference from 'lodash/difference';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
|
||||
interface ChangelogOptions {
|
||||
milestone: string;
|
||||
}
|
||||
|
||||
const changelogTaskRunner: TaskRunner<ChangelogOptions> = async ({ milestone }) => {
|
||||
const githubClient = new GithubClient();
|
||||
const client = githubClient.client;
|
||||
const filterBugs = (item: any) => {
|
||||
if (item.title.match(/fix|fixes/i)) {
|
||||
return true;
|
||||
}
|
||||
if (item.labels.find((label: any) => label.name === 'type/bug')) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (!/^\d+$/.test(milestone)) {
|
||||
console.log('Use milestone number not title, find number in milestone url');
|
||||
return;
|
||||
const getPackageChangelog = (packageName: string, issues: any[]) => {
|
||||
if (issues.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const res = await client.get('/issues', {
|
||||
params: {
|
||||
state: 'closed',
|
||||
per_page: 100,
|
||||
labels: 'add to changelog',
|
||||
milestone: milestone,
|
||||
},
|
||||
});
|
||||
|
||||
const issues = res.data;
|
||||
|
||||
const bugs = _.sortBy(
|
||||
issues.filter((item: any) => {
|
||||
if (item.title.match(/fix|fixes/i)) {
|
||||
return true;
|
||||
}
|
||||
if (item.labels.find((label: any) => label.name === 'type/bug')) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}),
|
||||
'title'
|
||||
);
|
||||
|
||||
const notBugs = _.sortBy(issues.filter((item: any) => !bugs.find((bug: any) => bug === item)), 'title');
|
||||
|
||||
let markdown = '';
|
||||
let markdown = chalk.bold.yellow(`\n\n/*** ${packageName} changelog ***/\n\n`);
|
||||
const bugs = _.sortBy(issues.filter(filterBugs), 'title');
|
||||
const notBugs = _.sortBy(difference(issues, bugs), 'title');
|
||||
|
||||
if (notBugs.length > 0) {
|
||||
markdown = '### Features / Enhancements\n';
|
||||
}
|
||||
|
||||
for (const item of notBugs) {
|
||||
markdown += getMarkdownLineForIssue(item);
|
||||
markdown += '### Features / Enhancements\n';
|
||||
for (const item of notBugs) {
|
||||
markdown += getMarkdownLineForIssue(item);
|
||||
}
|
||||
}
|
||||
|
||||
if (bugs.length > 0) {
|
||||
markdown += '\n### Bug Fixes\n';
|
||||
for (const item of bugs) {
|
||||
markdown += getMarkdownLineForIssue(item);
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of bugs) {
|
||||
markdown += getMarkdownLineForIssue(item);
|
||||
}
|
||||
|
||||
console.log(markdown);
|
||||
return markdown;
|
||||
};
|
||||
|
||||
const changelogTaskRunner: TaskRunner<ChangelogOptions> = useSpinner<ChangelogOptions>(
|
||||
'Generating changelog',
|
||||
async ({ milestone }) => {
|
||||
const githubClient = new GithubClient();
|
||||
const client = githubClient.client;
|
||||
|
||||
if (!/^\d+$/.test(milestone)) {
|
||||
console.log('Use milestone number not title, find number in milestone url');
|
||||
return;
|
||||
}
|
||||
|
||||
const res = await client.get('/issues', {
|
||||
params: {
|
||||
state: 'closed',
|
||||
per_page: 100,
|
||||
labels: 'add to changelog',
|
||||
milestone: milestone,
|
||||
},
|
||||
});
|
||||
|
||||
const issues = res.data;
|
||||
const toolkitIssues = issues.filter((item: any) =>
|
||||
item.labels.find((label: any) => label.name === 'area/grafana/toolkit')
|
||||
);
|
||||
|
||||
let markdown = '';
|
||||
|
||||
markdown += getPackageChangelog('Grafana', issues);
|
||||
markdown += getPackageChangelog('grafana-toolkit', toolkitIssues);
|
||||
|
||||
console.log(markdown);
|
||||
}
|
||||
);
|
||||
|
||||
function getMarkdownLineForIssue(item: any) {
|
||||
const githubGrafanaUrl = 'https://github.com/grafana/grafana';
|
||||
let markdown = '';
|
||||
|
||||
@@ -10,7 +10,10 @@ const cherryPickRunner: TaskRunner<CherryPickOptions> = async () => {
|
||||
const res = await client.get('/issues', {
|
||||
params: {
|
||||
state: 'closed',
|
||||
per_page: 100,
|
||||
labels: 'cherry-pick needed',
|
||||
sort: 'closed',
|
||||
direction: 'asc',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
47
packages/grafana-toolkit/src/cli/tasks/plugin.create.ts
Normal file
47
packages/grafana-toolkit/src/cli/tasks/plugin.create.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { prompt } from 'inquirer';
|
||||
import path from 'path';
|
||||
|
||||
import { Task, TaskRunner } from './task';
|
||||
import { promptConfirm } from '../utils/prompt';
|
||||
import {
|
||||
getPluginIdFromName,
|
||||
verifyGitExists,
|
||||
promptPluginType,
|
||||
fetchTemplate,
|
||||
promptPluginDetails,
|
||||
formatPluginDetails,
|
||||
prepareJsonFiles,
|
||||
removeGitFiles,
|
||||
} from './plugin/create';
|
||||
|
||||
interface PluginCreateOptions {
|
||||
name?: string;
|
||||
}
|
||||
|
||||
const pluginCreateRunner: TaskRunner<PluginCreateOptions> = async ({ name }) => {
|
||||
const destPath = path.resolve(process.cwd(), getPluginIdFromName(name || ''));
|
||||
let pluginDetails;
|
||||
|
||||
// 1. Verifying if git exists in user's env as templates are cloned from git templates
|
||||
await verifyGitExists();
|
||||
|
||||
// 2. Prompt plugin template
|
||||
const { type } = await promptPluginType();
|
||||
|
||||
// 3. Fetch plugin template from Github
|
||||
await fetchTemplate({ type, dest: destPath });
|
||||
|
||||
// 4. Prompt plugin details
|
||||
do {
|
||||
pluginDetails = await promptPluginDetails(name);
|
||||
formatPluginDetails(pluginDetails);
|
||||
} while ((await prompt<{ confirm: boolean }>(promptConfirm('confirm', 'Is that ok?'))).confirm === false);
|
||||
|
||||
// 5. Update json files (package.json, src/plugin.json)
|
||||
await prepareJsonFiles({ pluginDetails, pluginPath: destPath });
|
||||
|
||||
// 6. Remove cloned repository .git dir
|
||||
await removeGitFiles(destPath);
|
||||
};
|
||||
|
||||
export const pluginCreateTask = new Task<PluginCreateOptions>('plugin:create task', pluginCreateRunner);
|
||||
150
packages/grafana-toolkit/src/cli/tasks/plugin/create.ts
Normal file
150
packages/grafana-toolkit/src/cli/tasks/plugin/create.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import commandExists from 'command-exists';
|
||||
import { readFileSync, promises as fs } from 'fs';
|
||||
import { prompt } from 'inquirer';
|
||||
import kebabCase from 'lodash/kebabCase';
|
||||
import path from 'path';
|
||||
import gitPromise from 'simple-git/promise';
|
||||
|
||||
import { useSpinner } from '../../utils/useSpinner';
|
||||
import { rmdir } from '../../utils/rmdir';
|
||||
import { promptInput, promptConfirm } from '../../utils/prompt';
|
||||
import chalk from 'chalk';
|
||||
|
||||
const simpleGit = gitPromise(process.cwd());
|
||||
|
||||
interface PluginDetails {
|
||||
name: string;
|
||||
org: string;
|
||||
description: string;
|
||||
author: boolean | string;
|
||||
url: string;
|
||||
keywords: string;
|
||||
}
|
||||
|
||||
type PluginType = 'angular-panel' | 'react-panel' | 'datasource-plugin';
|
||||
|
||||
const RepositoriesPaths = {
|
||||
'angular-panel': 'git@github.com:grafana/simple-angular-panel.git',
|
||||
'react-panel': 'git@github.com:grafana/simple-react-panel.git',
|
||||
'datasource-plugin': 'git@github.com:grafana/simple-datasource.git',
|
||||
};
|
||||
|
||||
export const getGitUsername = async () => await simpleGit.raw(['config', '--global', 'user.name']);
|
||||
export const getPluginIdFromName = (name: string) => kebabCase(name);
|
||||
export const getPluginId = (pluginDetails: PluginDetails) =>
|
||||
`${kebabCase(pluginDetails.org)}-${getPluginIdFromName(pluginDetails.name)}`;
|
||||
|
||||
export const getPluginKeywords = (pluginDetails: PluginDetails) =>
|
||||
pluginDetails.keywords
|
||||
.split(',')
|
||||
.map(k => k.trim())
|
||||
.filter(k => k !== '');
|
||||
|
||||
export const verifyGitExists = async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
commandExists('git', (err, exists) => {
|
||||
if (exists) {
|
||||
resolve(true);
|
||||
}
|
||||
reject(new Error('git is not installed'));
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const promptPluginType = async () =>
|
||||
prompt<{ type: PluginType }>([
|
||||
{
|
||||
type: 'list',
|
||||
message: 'Select plugin type',
|
||||
name: 'type',
|
||||
choices: [
|
||||
{ name: 'Angular panel', value: 'angular-panel' },
|
||||
{ name: 'React panel', value: 'react-panel' },
|
||||
{ name: 'Datasource plugin', value: 'datasource-plugin' },
|
||||
],
|
||||
},
|
||||
]);
|
||||
|
||||
export const promptPluginDetails = async (name?: string) => {
|
||||
const username = (await getGitUsername()).trim();
|
||||
const responses = await prompt<PluginDetails>([
|
||||
promptInput('name', 'Plugin name', true, name),
|
||||
promptInput('org', 'Organization (used as part of plugin ID)', true),
|
||||
promptInput('description', 'Description'),
|
||||
promptInput('keywords', 'Keywords (separated by comma)'),
|
||||
// Try using git specified username
|
||||
promptConfirm('author', `Author (${username})`, username, username !== ''),
|
||||
// Prompt for manual author entry if no git user.name specifed
|
||||
promptInput('author', `Author`, true, undefined, answers => !answers.author || username === ''),
|
||||
promptInput('url', 'Your URL (i.e. organisation url)'),
|
||||
]);
|
||||
|
||||
return {
|
||||
...responses,
|
||||
author: responses.author === true ? username : responses.author,
|
||||
};
|
||||
};
|
||||
|
||||
export const fetchTemplate = useSpinner<{ type: PluginType; dest: string }>(
|
||||
'Fetching plugin template...',
|
||||
async ({ type, dest }) => {
|
||||
const url = RepositoriesPaths[type];
|
||||
if (!url) {
|
||||
throw new Error('Unknown plugin type');
|
||||
}
|
||||
|
||||
await simpleGit.clone(url, dest);
|
||||
}
|
||||
);
|
||||
|
||||
export const prepareJsonFiles = useSpinner<{ pluginDetails: PluginDetails; pluginPath: string }>(
|
||||
'Saving package.json and plugin.json files',
|
||||
async ({ pluginDetails, pluginPath }) => {
|
||||
const packageJsonPath = path.resolve(pluginPath, 'package.json');
|
||||
const pluginJsonPath = path.resolve(pluginPath, 'src/plugin.json');
|
||||
const packageJson: any = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
||||
const pluginJson: any = JSON.parse(readFileSync(pluginJsonPath, 'utf8'));
|
||||
|
||||
const pluginId = `${kebabCase(pluginDetails.org)}-${getPluginIdFromName(pluginDetails.name)}`;
|
||||
packageJson.name = pluginId;
|
||||
packageJson.author = pluginDetails.author;
|
||||
packageJson.description = pluginDetails.description;
|
||||
|
||||
pluginJson.name = pluginDetails.name;
|
||||
pluginJson.id = pluginId;
|
||||
pluginJson.info = {
|
||||
...pluginJson.info,
|
||||
description: pluginDetails.description,
|
||||
author: {
|
||||
name: pluginDetails.author,
|
||||
url: pluginDetails.url,
|
||||
},
|
||||
keywords: getPluginKeywords(pluginDetails),
|
||||
};
|
||||
|
||||
await Promise.all(
|
||||
[packageJson, pluginJson].map((f, i) => {
|
||||
const filePath = i === 0 ? packageJsonPath : pluginJsonPath;
|
||||
return fs.writeFile(filePath, JSON.stringify(f, null, 2));
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export const removeGitFiles = useSpinner('Cleaning', async pluginPath => rmdir(`${path.resolve(pluginPath, '.git')}`));
|
||||
|
||||
export const formatPluginDetails = (details: PluginDetails) => {
|
||||
console.group();
|
||||
console.log();
|
||||
console.log(chalk.bold.yellow('Your plugin details'));
|
||||
console.log('---');
|
||||
console.log(chalk.bold('Name: '), details.name);
|
||||
console.log(chalk.bold('ID: '), getPluginId(details));
|
||||
console.log(chalk.bold('Description: '), details.description);
|
||||
console.log(chalk.bold('Keywords: '), getPluginKeywords(details));
|
||||
console.log(chalk.bold('Author: '), details.author);
|
||||
console.log(chalk.bold('Organisation: '), details.org);
|
||||
console.log(chalk.bold('Website: '), details.url);
|
||||
console.log();
|
||||
console.groupEnd();
|
||||
};
|
||||
@@ -3,7 +3,6 @@ import * as fs from 'fs';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
import escapeRegExp from 'lodash/escapeRegExp';
|
||||
|
||||
const path = require('path');
|
||||
|
||||
@@ -105,7 +104,9 @@ const copySassFiles = () => {
|
||||
})();
|
||||
};
|
||||
|
||||
const toolkitBuildTaskRunner: TaskRunner<void> = async () => {
|
||||
interface ToolkitBuildOptions {}
|
||||
|
||||
const toolkitBuildTaskRunner: TaskRunner<ToolkitBuildOptions> = async () => {
|
||||
cwd = path.resolve(__dirname, '../../../');
|
||||
distDir = `${cwd}/dist`;
|
||||
const pkg = require(`${cwd}/package.json`);
|
||||
@@ -118,21 +119,6 @@ const toolkitBuildTaskRunner: TaskRunner<void> = async () => {
|
||||
fs.mkdirSync('./dist/sass');
|
||||
await copyFiles();
|
||||
await copySassFiles();
|
||||
|
||||
// RYAN HACK HACK HACK
|
||||
// when Dominik is back from vacation, we can find a better way
|
||||
// This moves the index to the root so plugin e2e tests can import them
|
||||
console.warn('hacking an index.js file for toolkit. Help!');
|
||||
const index = `${distDir}/src/index.js`;
|
||||
fs.readFile(index, 'utf8', (err, data) => {
|
||||
const pattern = 'require("./';
|
||||
const js = data.replace(new RegExp(escapeRegExp(pattern), 'g'), 'require("./src/');
|
||||
fs.writeFile(`${distDir}/index.js`, js, err => {
|
||||
if (err) {
|
||||
throw new Error('Error writing index: ' + err);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const toolkitBuildTask = new Task<void>('@grafana/toolkit build', toolkitBuildTaskRunner);
|
||||
export const toolkitBuildTask = new Task<ToolkitBuildOptions>('@grafana/toolkit build', toolkitBuildTaskRunner);
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { Task } from '../tasks/task';
|
||||
import chalk from 'chalk';
|
||||
|
||||
export const execTask = <TOptions>(task: Task<TOptions>) => async (options: TOptions) => {
|
||||
console.log(chalk.yellow(`Running ${chalk.bold(task.name)} task`));
|
||||
interface TaskBasicOptions {
|
||||
// Don't print task details when running
|
||||
silent?: boolean;
|
||||
}
|
||||
|
||||
export const execTask = <TOptions>(task: Task<TOptions>) => async (options: TOptions & TaskBasicOptions) => {
|
||||
if (!options.silent) {
|
||||
console.log(chalk.yellow(`Running ${chalk.bold(task.name)} task`));
|
||||
}
|
||||
task.setOptions(options);
|
||||
try {
|
||||
console.group();
|
||||
|
||||
58
packages/grafana-toolkit/src/cli/utils/prompt.ts
Normal file
58
packages/grafana-toolkit/src/cli/utils/prompt.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import {
|
||||
Question,
|
||||
InputQuestion,
|
||||
CheckboxQuestion,
|
||||
NumberQuestion,
|
||||
PasswordQuestion,
|
||||
EditorQuestion,
|
||||
ConfirmQuestion,
|
||||
} from 'inquirer';
|
||||
|
||||
type QuestionWithValidation<A = any> =
|
||||
| InputQuestion<A>
|
||||
| CheckboxQuestion<A>
|
||||
| NumberQuestion<A>
|
||||
| PasswordQuestion<A>
|
||||
| EditorQuestion<A>;
|
||||
|
||||
export const answerRequired = (question: QuestionWithValidation): Question<any> => {
|
||||
return {
|
||||
...question,
|
||||
validate: (answer: any) => answer.trim() !== '' || `${question.name} is required`,
|
||||
};
|
||||
};
|
||||
|
||||
export const promptInput = <A>(
|
||||
name: string,
|
||||
message: string | ((answers: A) => string),
|
||||
required = false,
|
||||
def: any = undefined,
|
||||
when: boolean | ((answers: A) => boolean | Promise<boolean>) = true
|
||||
) => {
|
||||
const model: InputQuestion<A> = {
|
||||
type: 'input',
|
||||
name,
|
||||
message,
|
||||
default: def,
|
||||
when,
|
||||
};
|
||||
|
||||
return required ? answerRequired(model) : model;
|
||||
};
|
||||
|
||||
export const promptConfirm = <A>(
|
||||
name: string,
|
||||
message: string | ((answers: A) => string),
|
||||
def: any = undefined,
|
||||
when: boolean | ((answers: A) => boolean | Promise<boolean>) = true
|
||||
) => {
|
||||
const model: ConfirmQuestion<A> = {
|
||||
type: 'confirm',
|
||||
name,
|
||||
message,
|
||||
default: def,
|
||||
when,
|
||||
};
|
||||
|
||||
return model;
|
||||
};
|
||||
23
packages/grafana-toolkit/src/cli/utils/rmdir.ts
Normal file
23
packages/grafana-toolkit/src/cli/utils/rmdir.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
|
||||
/**
|
||||
* Remove directory recursively
|
||||
* Ref https://stackoverflow.com/a/42505874
|
||||
*/
|
||||
export const rmdir = (dirPath: string) => {
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
fs.readdirSync(dirPath).forEach(entry => {
|
||||
const entryPath = path.join(dirPath, entry);
|
||||
if (fs.lstatSync(entryPath).isDirectory()) {
|
||||
rmdir(entryPath);
|
||||
} else {
|
||||
fs.unlinkSync(entryPath);
|
||||
}
|
||||
});
|
||||
|
||||
fs.rmdirSync(dirPath);
|
||||
};
|
||||
@@ -2,7 +2,7 @@ import ora from 'ora';
|
||||
|
||||
type FnToSpin<T> = (options: T) => Promise<void>;
|
||||
|
||||
export const useSpinner = <T>(spinnerLabel: string, fn: FnToSpin<T>, killProcess = true) => {
|
||||
export const useSpinner = <T = any>(spinnerLabel: string, fn: FnToSpin<T>, killProcess = true) => {
|
||||
return async (options: T) => {
|
||||
const spinner = ora(spinnerLabel);
|
||||
spinner.start();
|
||||
|
||||
@@ -149,7 +149,7 @@ export const getWebpackConfig: WebpackConfigurationGetter = options => {
|
||||
'emotion',
|
||||
'prismjs',
|
||||
'slate-plain-serializer',
|
||||
'slate-react',
|
||||
'@grafana/slate-react',
|
||||
'react',
|
||||
'react-dom',
|
||||
'react-redux',
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
export * from './e2e';
|
||||
|
||||
// Namespace for Plugins
|
||||
import * as plugins from './plugins';
|
||||
|
||||
export { plugins };
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Browser, Page } from 'puppeteer-core';
|
||||
|
||||
import { e2eScenario, takeScreenShot, plugins, pages } from '@grafana/toolkit';
|
||||
import { e2eScenario, takeScreenShot, pages } from '@grafana/toolkit/src/e2e';
|
||||
import { getEndToEndSettings } from '@grafana/toolkit/src/plugins';
|
||||
|
||||
// ****************************************************************
|
||||
// NOTE, This file is copied to plugins at runtime, it is not run locally
|
||||
@@ -11,7 +12,7 @@ const sleep = (milliseconds: number) => {
|
||||
};
|
||||
|
||||
e2eScenario('Common Plugin Test', 'should pass', async (browser: Browser, page: Page) => {
|
||||
const settings = plugins.getEndToEndSettings();
|
||||
const settings = getEndToEndSettings();
|
||||
const pluginPage = pages.getPluginPage(settings.plugin.id);
|
||||
await pluginPage.init(page);
|
||||
await pluginPage.navigateTo();
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"rootDirs": ["."],
|
||||
"outDir": "dist/src",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"declarationDir": "dist/src",
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
"esModuleInterop": true,
|
||||
"lib": ["es2015", "es2017.string", "dom"]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "6.4.0-pre",
|
||||
"version": "6.4.0",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -25,11 +25,13 @@
|
||||
"build": "grafana-toolkit package:build --scope=ui"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "^6.4.0-alpha",
|
||||
"@grafana/data": "6.4.0",
|
||||
"@grafana/slate-react": "0.22.9-grafana",
|
||||
"@torkelo/react-select": "2.1.1",
|
||||
"@types/react-color": "2.17.0",
|
||||
"classnames": "2.2.6",
|
||||
"d3": "5.9.1",
|
||||
"immutable": "3.8.2",
|
||||
"jquery": "3.4.1",
|
||||
"lodash": "4.17.15",
|
||||
"moment": "2.24.0",
|
||||
@@ -45,6 +47,7 @@
|
||||
"react-storybook-addon-props-combinations": "1.1.0",
|
||||
"react-transition-group": "2.6.1",
|
||||
"react-virtualized": "9.21.0",
|
||||
"slate": "0.47.8",
|
||||
"tinycolor2": "1.4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -65,6 +68,8 @@
|
||||
"@types/react-custom-scrollbars": "4.0.5",
|
||||
"@types/react-test-renderer": "16.8.1",
|
||||
"@types/react-transition-group": "2.0.16",
|
||||
"@types/slate": "0.47.1",
|
||||
"@types/slate-react": "0.22.5",
|
||||
"@types/storybook__addon-actions": "3.4.2",
|
||||
"@types/storybook__addon-info": "4.1.1",
|
||||
"@types/storybook__addon-knobs": "4.0.4",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import resolve from 'rollup-plugin-node-resolve';
|
||||
import commonjs from 'rollup-plugin-commonjs';
|
||||
import sourceMaps from 'rollup-plugin-sourcemaps';
|
||||
// import sourceMaps from 'rollup-plugin-sourcemaps';
|
||||
import { terser } from 'rollup-plugin-terser';
|
||||
|
||||
const pkg = require('./package.json');
|
||||
@@ -47,19 +47,20 @@ const buildCjsPackage = ({ env }) => {
|
||||
],
|
||||
'../../node_modules/react-color/lib/components/common': ['Saturation', 'Hue', 'Alpha'],
|
||||
'../../node_modules/immutable/dist/immutable.js': [
|
||||
'Record',
|
||||
'Set',
|
||||
'Map',
|
||||
'List',
|
||||
'OrderedSet',
|
||||
'is',
|
||||
'Stack',
|
||||
'Record',
|
||||
],
|
||||
'node_modules/immutable/dist/immutable.js': ['Record', 'Set', 'Map', 'List', 'OrderedSet', 'is', 'Stack'],
|
||||
'../../node_modules/esrever/esrever.js': ['reverse'],
|
||||
},
|
||||
}),
|
||||
resolve(),
|
||||
sourceMaps(),
|
||||
// sourceMaps(),
|
||||
env === 'production' && terser(),
|
||||
],
|
||||
};
|
||||
|
||||
@@ -19,7 +19,9 @@ export interface CommonButtonProps {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export interface LinkButtonProps extends CommonButtonProps, AnchorHTMLAttributes<HTMLAnchorElement> {}
|
||||
export interface LinkButtonProps extends CommonButtonProps, AnchorHTMLAttributes<HTMLAnchorElement> {
|
||||
disabled?: boolean;
|
||||
}
|
||||
export interface ButtonProps extends CommonButtonProps, ButtonHTMLAttributes<HTMLButtonElement> {}
|
||||
|
||||
interface AbstractButtonProps extends CommonButtonProps, Themeable {
|
||||
|
||||
@@ -11,7 +11,7 @@ interface DataLinkEditorProps {
|
||||
isLast: boolean;
|
||||
value: DataLink;
|
||||
suggestions: VariableSuggestion[];
|
||||
onChange: (index: number, link: DataLink) => void;
|
||||
onChange: (index: number, link: DataLink, callback?: () => void) => void;
|
||||
onRemove: (link: DataLink) => void;
|
||||
}
|
||||
|
||||
@@ -20,8 +20,8 @@ export const DataLinkEditor: React.FC<DataLinkEditorProps> = React.memo(
|
||||
const theme = useContext(ThemeContext);
|
||||
const [title, setTitle] = useState(value.title);
|
||||
|
||||
const onUrlChange = (url: string) => {
|
||||
onChange(index, { ...value, url });
|
||||
const onUrlChange = (url: string, callback?: () => void) => {
|
||||
onChange(index, { ...value, url }, callback);
|
||||
};
|
||||
const onTitleChange = (event: ChangeEvent<HTMLInputElement>) => {
|
||||
setTitle(event.target.value);
|
||||
|
||||
@@ -1,46 +1,39 @@
|
||||
import React, { useState, useMemo, useCallback, useContext } from 'react';
|
||||
import React, { useState, useMemo, useCallback, useContext, useRef, RefObject } from 'react';
|
||||
import { VariableSuggestion, VariableOrigin, DataLinkSuggestions } from './DataLinkSuggestions';
|
||||
import { makeValue, ThemeContext, DataLinkBuiltInVars } from '../../index';
|
||||
import { ThemeContext, DataLinkBuiltInVars, makeValue } from '../../index';
|
||||
import { SelectionReference } from './SelectionReference';
|
||||
import { Portal } from '../index';
|
||||
// @ts-ignore
|
||||
import { Editor } from 'slate-react';
|
||||
// @ts-ignore
|
||||
import { Value, Change, Document } from 'slate';
|
||||
// @ts-ignore
|
||||
|
||||
import { Editor } from '@grafana/slate-react';
|
||||
import { Value, Editor as CoreEditor } from 'slate';
|
||||
import Plain from 'slate-plain-serializer';
|
||||
import { Popper as ReactPopper } from 'react-popper';
|
||||
import useDebounce from 'react-use/lib/useDebounce';
|
||||
import { css, cx } from 'emotion';
|
||||
// @ts-ignore
|
||||
import PluginPrism from 'slate-prism';
|
||||
|
||||
import { SlatePrism } from '../../slate-plugins';
|
||||
import { SCHEMA } from '../../utils/slate';
|
||||
|
||||
const modulo = (a: number, n: number) => a - n * Math.floor(a / n);
|
||||
|
||||
interface DataLinkInputProps {
|
||||
value: string;
|
||||
onChange: (url: string) => void;
|
||||
onChange: (url: string, callback?: () => void) => void;
|
||||
suggestions: VariableSuggestion[];
|
||||
}
|
||||
|
||||
const plugins = [
|
||||
PluginPrism({
|
||||
SlatePrism({
|
||||
onlyIn: (node: any) => node.type === 'code_block',
|
||||
getSyntax: () => 'links',
|
||||
}),
|
||||
];
|
||||
|
||||
export const DataLinkInput: React.FC<DataLinkInputProps> = ({ value, onChange, suggestions }) => {
|
||||
const editorRef = useRef<Editor>() as RefObject<Editor>;
|
||||
const theme = useContext(ThemeContext);
|
||||
const [showingSuggestions, setShowingSuggestions] = useState(false);
|
||||
const [suggestionsIndex, setSuggestionsIndex] = useState(0);
|
||||
const [usedSuggestions, setUsedSuggestions] = useState(
|
||||
suggestions.filter(suggestion => {
|
||||
return value.indexOf(suggestion.value) > -1;
|
||||
})
|
||||
);
|
||||
// Using any here as TS has problem pickung up `change` method existance on Value
|
||||
// According to code and documentation `change` is an instance method on Value in slate 0.33.8 that we use
|
||||
// https://github.com/ianstormtaylor/slate/blob/slate%400.33.8/docs/reference/slate/value.md#change
|
||||
const [linkUrl, setLinkUrl] = useState<any>(makeValue(value));
|
||||
const [linkUrl, setLinkUrl] = useState<Value>(makeValue(value));
|
||||
|
||||
const getStyles = useCallback(() => {
|
||||
return {
|
||||
@@ -55,99 +48,67 @@ export const DataLinkInput: React.FC<DataLinkInputProps> = ({ value, onChange, s
|
||||
};
|
||||
}, [theme]);
|
||||
|
||||
const currentSuggestions = useMemo(
|
||||
() =>
|
||||
suggestions.filter(suggestion => {
|
||||
return usedSuggestions.map(s => s.value).indexOf(suggestion.value) === -1;
|
||||
}),
|
||||
[usedSuggestions, suggestions]
|
||||
);
|
||||
// Workaround for https://github.com/ianstormtaylor/slate/issues/2927
|
||||
const stateRef = useRef({ showingSuggestions, suggestions, suggestionsIndex, linkUrl, onChange });
|
||||
stateRef.current = { showingSuggestions, suggestions, suggestionsIndex, linkUrl, onChange };
|
||||
|
||||
// SelectionReference is used to position the variables suggestion relatively to current DOM selection
|
||||
const selectionRef = useMemo(() => new SelectionReference(), [setShowingSuggestions]);
|
||||
const selectionRef = useMemo(() => new SelectionReference(), [setShowingSuggestions, linkUrl]);
|
||||
|
||||
// Keep track of variables that has been used already
|
||||
const updateUsedSuggestions = () => {
|
||||
const currentLink = Plain.serialize(linkUrl);
|
||||
const next = usedSuggestions.filter(suggestion => {
|
||||
return currentLink.indexOf(suggestion.value) > -1;
|
||||
});
|
||||
if (next.length !== usedSuggestions.length) {
|
||||
setUsedSuggestions(next);
|
||||
}
|
||||
};
|
||||
|
||||
useDebounce(updateUsedSuggestions, 250, [linkUrl]);
|
||||
|
||||
const onKeyDown = (event: KeyboardEvent) => {
|
||||
if (event.key === 'Backspace' || event.key === 'Escape') {
|
||||
setShowingSuggestions(false);
|
||||
setSuggestionsIndex(0);
|
||||
}
|
||||
|
||||
if (event.key === 'Enter') {
|
||||
if (showingSuggestions) {
|
||||
onVariableSelect(currentSuggestions[suggestionsIndex]);
|
||||
const onKeyDown = React.useCallback((event: KeyboardEvent, next: () => any) => {
|
||||
if (!stateRef.current.showingSuggestions) {
|
||||
if (event.key === '=' || event.key === '$' || (event.keyCode === 32 && event.ctrlKey)) {
|
||||
return setShowingSuggestions(true);
|
||||
}
|
||||
return next();
|
||||
}
|
||||
|
||||
if (showingSuggestions) {
|
||||
if (event.key === 'ArrowDown') {
|
||||
switch (event.key) {
|
||||
case 'Backspace':
|
||||
case 'Escape':
|
||||
setShowingSuggestions(false);
|
||||
return setSuggestionsIndex(0);
|
||||
|
||||
case 'Enter':
|
||||
event.preventDefault();
|
||||
setSuggestionsIndex(index => {
|
||||
return (index + 1) % currentSuggestions.length;
|
||||
});
|
||||
}
|
||||
if (event.key === 'ArrowUp') {
|
||||
return onVariableSelect(stateRef.current.suggestions[stateRef.current.suggestionsIndex]);
|
||||
|
||||
case 'ArrowDown':
|
||||
case 'ArrowUp':
|
||||
event.preventDefault();
|
||||
setSuggestionsIndex(index => {
|
||||
const nextIndex = index - 1 < 0 ? currentSuggestions.length - 1 : (index - 1) % currentSuggestions.length;
|
||||
return nextIndex;
|
||||
});
|
||||
}
|
||||
const direction = event.key === 'ArrowDown' ? 1 : -1;
|
||||
return setSuggestionsIndex(index => modulo(index + direction, stateRef.current.suggestions.length));
|
||||
default:
|
||||
return next();
|
||||
}
|
||||
}, []);
|
||||
|
||||
if (event.key === '?' || event.key === '&' || event.key === '$' || (event.keyCode === 32 && event.ctrlKey)) {
|
||||
setShowingSuggestions(true);
|
||||
}
|
||||
|
||||
if (event.key === 'Enter' && showingSuggestions) {
|
||||
// Preventing entering a new line
|
||||
// As of https://github.com/ianstormtaylor/slate/issues/1345#issuecomment-340508289
|
||||
return false;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
const onUrlChange = ({ value }: Change) => {
|
||||
const onUrlChange = React.useCallback(({ value }: { value: Value }) => {
|
||||
setLinkUrl(value);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const onUrlBlur = () => {
|
||||
onChange(Plain.serialize(linkUrl));
|
||||
};
|
||||
|
||||
const onVariableSelect = (item: VariableSuggestion) => {
|
||||
const includeDollarSign = Plain.serialize(linkUrl).slice(-1) !== '$';
|
||||
|
||||
const change = linkUrl.change();
|
||||
const onUrlBlur = React.useCallback((event: Event, editor: CoreEditor, next: () => any) => {
|
||||
// Callback needed for blur to work correctly
|
||||
stateRef.current.onChange(Plain.serialize(stateRef.current.linkUrl), () => {
|
||||
editorRef.current!.blur();
|
||||
});
|
||||
}, []);
|
||||
|
||||
const onVariableSelect = (item: VariableSuggestion, editor = editorRef.current!) => {
|
||||
const includeDollarSign = Plain.serialize(editor.value).slice(-1) !== '$';
|
||||
if (item.origin !== VariableOrigin.Template || item.value === DataLinkBuiltInVars.includeVars) {
|
||||
change.insertText(`${includeDollarSign ? '$' : ''}\{${item.value}}`);
|
||||
editor.insertText(`${includeDollarSign ? '$' : ''}\{${item.value}}`);
|
||||
} else {
|
||||
change.insertText(`var-${item.value}=$\{${item.value}}`);
|
||||
editor.insertText(`var-${item.value}=$\{${item.value}}`);
|
||||
}
|
||||
|
||||
setLinkUrl(change.value);
|
||||
setLinkUrl(editor.value);
|
||||
setShowingSuggestions(false);
|
||||
setUsedSuggestions((previous: VariableSuggestion[]) => {
|
||||
return [...previous, item];
|
||||
});
|
||||
|
||||
setSuggestionsIndex(0);
|
||||
onChange(Plain.serialize(change.value));
|
||||
onChange(Plain.serialize(editor.value));
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(
|
||||
@@ -163,7 +124,7 @@ export const DataLinkInput: React.FC<DataLinkInputProps> = ({ value, onChange, s
|
||||
<Portal>
|
||||
<ReactPopper
|
||||
referenceElement={selectionRef}
|
||||
placement="auto-end"
|
||||
placement="top-end"
|
||||
modifiers={{
|
||||
preventOverflow: { enabled: true, boundariesElement: 'window' },
|
||||
arrow: { enabled: false },
|
||||
@@ -174,7 +135,7 @@ export const DataLinkInput: React.FC<DataLinkInputProps> = ({ value, onChange, s
|
||||
return (
|
||||
<div ref={ref} style={style} data-placement={placement}>
|
||||
<DataLinkSuggestions
|
||||
suggestions={currentSuggestions}
|
||||
suggestions={stateRef.current.suggestions}
|
||||
onSuggestionSelect={onVariableSelect}
|
||||
onClose={() => setShowingSuggestions(false)}
|
||||
activeIndex={suggestionsIndex}
|
||||
@@ -186,11 +147,13 @@ export const DataLinkInput: React.FC<DataLinkInputProps> = ({ value, onChange, s
|
||||
</Portal>
|
||||
)}
|
||||
<Editor
|
||||
schema={SCHEMA}
|
||||
ref={editorRef}
|
||||
placeholder="http://your-grafana.com/d/000000010/annotations"
|
||||
value={linkUrl}
|
||||
value={stateRef.current.linkUrl}
|
||||
onChange={onUrlChange}
|
||||
onBlur={onUrlBlur}
|
||||
onKeyDown={onKeyDown}
|
||||
onKeyDown={(event, _editor, next) => onKeyDown(event as KeyboardEvent, next)}
|
||||
plugins={plugins}
|
||||
className={getStyles().editor}
|
||||
/>
|
||||
|
||||
@@ -12,7 +12,7 @@ import { VariableSuggestion } from './DataLinkSuggestions';
|
||||
|
||||
interface DataLinksEditorProps {
|
||||
value: DataLink[];
|
||||
onChange: (links: DataLink[]) => void;
|
||||
onChange: (links: DataLink[], callback?: () => void) => void;
|
||||
suggestions: VariableSuggestion[];
|
||||
maxLinks?: number;
|
||||
}
|
||||
@@ -30,14 +30,15 @@ export const DataLinksEditor: FC<DataLinksEditorProps> = React.memo(({ value, on
|
||||
onChange(value ? [...value, { url: '', title: '' }] : [{ url: '', title: '' }]);
|
||||
};
|
||||
|
||||
const onLinkChanged = (linkIndex: number, newLink: DataLink) => {
|
||||
const onLinkChanged = (linkIndex: number, newLink: DataLink, callback?: () => void) => {
|
||||
onChange(
|
||||
value.map((item, listIndex) => {
|
||||
if (linkIndex === listIndex) {
|
||||
return newLink;
|
||||
}
|
||||
return item;
|
||||
})
|
||||
}),
|
||||
callback
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -83,9 +83,9 @@ $select-input-bg-disabled: $input-bg-disabled;
|
||||
.gf-form-select-box__multi-value__remove {
|
||||
text-align: center;
|
||||
display: inline-block;
|
||||
height: 14px;
|
||||
vertical-align: middle;
|
||||
margin-left: 2px;
|
||||
position: relative;
|
||||
top: 3px;
|
||||
}
|
||||
|
||||
.gf-form-select-box__multi-value__label {
|
||||
@@ -111,6 +111,10 @@ $select-input-bg-disabled: $input-bg-disabled;
|
||||
}
|
||||
}
|
||||
|
||||
.gf-form-select-box__placeholder {
|
||||
color: $input-color-placeholder;
|
||||
}
|
||||
|
||||
.gf-form-select-box__control--is-focused .gf-form-select-box__placeholder {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
import React from 'react';
|
||||
import { storiesOf } from '@storybook/react';
|
||||
import { action } from '@storybook/addon-actions';
|
||||
|
||||
import { TimeZonePicker } from './TimeZonePicker';
|
||||
import { UseState } from '../../utils/storybook/UseState';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
|
||||
const TimeZonePickerStories = storiesOf('UI/TimeZonePicker', module);
|
||||
|
||||
TimeZonePickerStories.addDecorator(withCenteredStory);
|
||||
|
||||
TimeZonePickerStories.add('default', () => {
|
||||
return (
|
||||
<UseState
|
||||
initialState={{
|
||||
value: 'europe/stockholm',
|
||||
}}
|
||||
>
|
||||
{(value, updateValue) => {
|
||||
return (
|
||||
<TimeZonePicker
|
||||
value={value.value}
|
||||
onChange={newValue => {
|
||||
action('on selected')(newValue);
|
||||
updateValue({ value: newValue });
|
||||
}}
|
||||
width={20}
|
||||
/>
|
||||
);
|
||||
}}
|
||||
</UseState>
|
||||
);
|
||||
});
|
||||
@@ -0,0 +1,41 @@
|
||||
import React, { FC } from 'react';
|
||||
import { getTimeZoneGroups, SelectableValue } from '@grafana/data';
|
||||
import { Select } from '..';
|
||||
|
||||
interface Props {
|
||||
value: string;
|
||||
width?: number;
|
||||
|
||||
onChange: (newValue: string) => void;
|
||||
}
|
||||
|
||||
export const TimeZonePicker: FC<Props> = ({ onChange, value, width }) => {
|
||||
const timeZoneGroups = getTimeZoneGroups();
|
||||
|
||||
const groupOptions = timeZoneGroups.map(group => {
|
||||
const options = group.options.map(timeZone => {
|
||||
return {
|
||||
label: timeZone,
|
||||
value: timeZone,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
label: group.label,
|
||||
options,
|
||||
};
|
||||
});
|
||||
|
||||
const selectedValue = groupOptions.map(group => {
|
||||
return group.options.find(option => option.value === value);
|
||||
});
|
||||
|
||||
return (
|
||||
<Select
|
||||
options={groupOptions}
|
||||
value={selectedValue}
|
||||
onChange={(newValue: SelectableValue) => onChange(newValue.value)}
|
||||
width={width}
|
||||
/>
|
||||
);
|
||||
};
|
||||
@@ -11,7 +11,7 @@ export const ReduceTransformerEditor: React.FC<TransformerUIProps<ReduceTransfor
|
||||
}) => {
|
||||
return (
|
||||
<StatsPicker
|
||||
width={12}
|
||||
width={25}
|
||||
placeholder="Choose Stat"
|
||||
allowMultiple
|
||||
stats={options.reducers || []}
|
||||
|
||||
@@ -2,3 +2,4 @@ export * from './components';
|
||||
export * from './types';
|
||||
export * from './utils';
|
||||
export * from './themes';
|
||||
export * from './slate-plugins';
|
||||
|
||||
1
packages/grafana-ui/src/slate-plugins/index.ts
Normal file
1
packages/grafana-ui/src/slate-plugins/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { SlatePrism } from './slate-prism';
|
||||
@@ -0,0 +1,3 @@
|
||||
const TOKEN_MARK = 'prism-token';
|
||||
|
||||
export default TOKEN_MARK;
|
||||
160
packages/grafana-ui/src/slate-plugins/slate-prism/index.ts
Normal file
160
packages/grafana-ui/src/slate-plugins/slate-prism/index.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import Prism from 'prismjs';
|
||||
import { Block, Text, Decoration } from 'slate';
|
||||
import { Plugin } from '@grafana/slate-react';
|
||||
import Options, { OptionsFormat } from './options';
|
||||
import TOKEN_MARK from './TOKEN_MARK';
|
||||
|
||||
/**
|
||||
* A Slate plugin to highlight code syntax.
|
||||
*/
|
||||
export function SlatePrism(optsParam: OptionsFormat = {}): Plugin {
|
||||
const opts: Options = new Options(optsParam);
|
||||
|
||||
return {
|
||||
decorateNode: (node, editor, next) => {
|
||||
if (!opts.onlyIn(node)) {
|
||||
return next();
|
||||
}
|
||||
return decorateNode(opts, Block.create(node as Block));
|
||||
},
|
||||
|
||||
renderDecoration: (props, editor, next) =>
|
||||
opts.renderDecoration(
|
||||
{
|
||||
children: props.children,
|
||||
decoration: props.decoration,
|
||||
},
|
||||
editor as any,
|
||||
next
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the decoration for a node
|
||||
*/
|
||||
function decorateNode(opts: Options, block: Block) {
|
||||
const grammarName = opts.getSyntax(block);
|
||||
const grammar = Prism.languages[grammarName];
|
||||
if (!grammar) {
|
||||
// Grammar not loaded
|
||||
return [];
|
||||
}
|
||||
|
||||
// Tokenize the whole block text
|
||||
const texts = block.getTexts();
|
||||
const blockText = texts.map(text => text && text.getText()).join('\n');
|
||||
const tokens = Prism.tokenize(blockText, grammar);
|
||||
|
||||
// The list of decorations to return
|
||||
const decorations: Decoration[] = [];
|
||||
let textStart = 0;
|
||||
let textEnd = 0;
|
||||
|
||||
texts.forEach(text => {
|
||||
textEnd = textStart + text!.getText().length;
|
||||
|
||||
let offset = 0;
|
||||
function processToken(token: string | Prism.Token, accu?: string | number) {
|
||||
if (typeof token === 'string') {
|
||||
if (accu) {
|
||||
const decoration = createDecoration({
|
||||
text: text!,
|
||||
textStart,
|
||||
textEnd,
|
||||
start: offset,
|
||||
end: offset + token.length,
|
||||
className: `prism-token token ${accu}`,
|
||||
block,
|
||||
});
|
||||
if (decoration) {
|
||||
decorations.push(decoration);
|
||||
}
|
||||
}
|
||||
offset += token.length;
|
||||
} else {
|
||||
accu = `${accu} ${token.type} ${token.alias || ''}`;
|
||||
|
||||
if (typeof token.content === 'string') {
|
||||
const decoration = createDecoration({
|
||||
text: text!,
|
||||
textStart,
|
||||
textEnd,
|
||||
start: offset,
|
||||
end: offset + token.content.length,
|
||||
className: `prism-token token ${accu}`,
|
||||
block,
|
||||
});
|
||||
if (decoration) {
|
||||
decorations.push(decoration);
|
||||
}
|
||||
|
||||
offset += token.content.length;
|
||||
} else {
|
||||
// When using token.content instead of token.matchedStr, token can be deep
|
||||
for (let i = 0; i < token.content.length; i += 1) {
|
||||
// @ts-ignore
|
||||
processToken(token.content[i], accu);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokens.forEach(processToken);
|
||||
textStart = textEnd + 1; // account for added `\n`
|
||||
});
|
||||
|
||||
return decorations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a decoration range for the given text.
|
||||
*/
|
||||
function createDecoration({
|
||||
text,
|
||||
textStart,
|
||||
textEnd,
|
||||
start,
|
||||
end,
|
||||
className,
|
||||
block,
|
||||
}: {
|
||||
text: Text; // The text being decorated
|
||||
textStart: number; // Its start position in the whole text
|
||||
textEnd: number; // Its end position in the whole text
|
||||
start: number; // The position in the whole text where the token starts
|
||||
end: number; // The position in the whole text where the token ends
|
||||
className: string; // The prism token classname
|
||||
block: Block;
|
||||
}): Decoration | null {
|
||||
if (start >= textEnd || end <= textStart) {
|
||||
// Ignore, the token is not in the text
|
||||
return null;
|
||||
}
|
||||
|
||||
// Shrink to this text boundaries
|
||||
start = Math.max(start, textStart);
|
||||
end = Math.min(end, textEnd);
|
||||
|
||||
// Now shift offsets to be relative to this text
|
||||
start -= textStart;
|
||||
end -= textStart;
|
||||
|
||||
const myDec = block.createDecoration({
|
||||
object: 'decoration',
|
||||
anchor: {
|
||||
key: text.key,
|
||||
offset: start,
|
||||
object: 'point',
|
||||
},
|
||||
focus: {
|
||||
key: text.key,
|
||||
offset: end,
|
||||
object: 'point',
|
||||
},
|
||||
type: TOKEN_MARK,
|
||||
data: { className },
|
||||
});
|
||||
|
||||
return myDec;
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
import React from 'react';
|
||||
import { Mark, Node, Decoration } from 'slate';
|
||||
import { Editor } from '@grafana/slate-react';
|
||||
import { Record } from 'immutable';
|
||||
|
||||
import TOKEN_MARK from './TOKEN_MARK';
|
||||
|
||||
export interface OptionsFormat {
|
||||
// Determine which node should be highlighted
|
||||
onlyIn?: (node: Node) => boolean;
|
||||
// Returns the syntax for a node that should be highlighted
|
||||
getSyntax?: (node: Node) => string;
|
||||
// Render a highlighting mark in a highlighted node
|
||||
renderMark?: ({ mark, children }: { mark: Mark; children: React.ReactNode }) => void | React.ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default filter for code blocks
|
||||
*/
|
||||
function defaultOnlyIn(node: Node): boolean {
|
||||
return node.object === 'block' && node.type === 'code_block';
|
||||
}
|
||||
|
||||
/**
|
||||
* Default getter for syntax
|
||||
*/
|
||||
function defaultGetSyntax(node: Node): string {
|
||||
return 'javascript';
|
||||
}
|
||||
|
||||
/**
|
||||
* Default rendering for decorations
|
||||
*/
|
||||
function defaultRenderDecoration(
|
||||
props: { children: React.ReactNode; decoration: Decoration },
|
||||
editor: Editor,
|
||||
next: () => any
|
||||
): void | React.ReactNode {
|
||||
const { decoration } = props;
|
||||
if (decoration.type !== TOKEN_MARK) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const className = decoration.data.get('className');
|
||||
return <span className={className}>{props.children}</span>;
|
||||
}
|
||||
|
||||
/**
|
||||
* The plugin options
|
||||
*/
|
||||
class Options
|
||||
extends Record({
|
||||
onlyIn: defaultOnlyIn,
|
||||
getSyntax: defaultGetSyntax,
|
||||
renderDecoration: defaultRenderDecoration,
|
||||
})
|
||||
implements OptionsFormat {
|
||||
readonly onlyIn!: (node: Node) => boolean;
|
||||
readonly getSyntax!: (node: Node) => string;
|
||||
readonly renderDecoration!: (
|
||||
{
|
||||
decoration,
|
||||
children,
|
||||
}: {
|
||||
decoration: Decoration;
|
||||
children: React.ReactNode;
|
||||
},
|
||||
editor: Editor,
|
||||
next: () => any
|
||||
) => void | React.ReactNode;
|
||||
|
||||
constructor(props: OptionsFormat) {
|
||||
super(props);
|
||||
}
|
||||
}
|
||||
|
||||
export default Options;
|
||||
@@ -193,6 +193,7 @@ $btn-semi-transparent: rgba(0, 0, 0, 0.2) !default;
|
||||
|
||||
// sidemenu
|
||||
$side-menu-width: 60px;
|
||||
$navbar-padding: 20px;
|
||||
|
||||
// dashboard
|
||||
$dashboard-padding: $space-md;
|
||||
|
||||
@@ -16,6 +16,8 @@ export interface PanelData {
|
||||
series: DataFrame[];
|
||||
request?: DataQueryRequest;
|
||||
error?: DataQueryError;
|
||||
// Contains the range from the request or a shifted time range if a request uses relative time
|
||||
timeRange: TimeRange;
|
||||
}
|
||||
|
||||
export interface PanelProps<T = any> {
|
||||
|
||||
@@ -50,13 +50,13 @@ function getTitleTemplate(title: string | undefined, stats: string[], data?: Dat
|
||||
|
||||
const parts: string[] = [];
|
||||
if (stats.length > 1) {
|
||||
parts.push('$' + VAR_CALC);
|
||||
parts.push('${' + VAR_CALC + '}');
|
||||
}
|
||||
if (data.length > 1) {
|
||||
parts.push('${' + VAR_SERIES_NAME + '}');
|
||||
}
|
||||
if (fieldCount > 1 || !parts.length) {
|
||||
parts.push('$' + VAR_FIELD_NAME);
|
||||
parts.push('${' + VAR_FIELD_NAME + '}');
|
||||
}
|
||||
return parts.join(' ');
|
||||
}
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
// @ts-ignore
|
||||
import { Block, Document, Text, Value } from 'slate';
|
||||
import { Block, Document, Text, Value, SchemaProperties } from 'slate';
|
||||
|
||||
const SCHEMA = {
|
||||
blocks: {
|
||||
paragraph: 'paragraph',
|
||||
codeblock: 'code_block',
|
||||
codeline: 'code_line',
|
||||
export const SCHEMA: SchemaProperties = {
|
||||
document: {
|
||||
nodes: [
|
||||
{
|
||||
match: [{ type: 'paragraph' }, { type: 'code_block' }, { type: 'code_line' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
inlines: {},
|
||||
marks: {},
|
||||
};
|
||||
|
||||
export const makeFragment = (text: string, syntax?: string) => {
|
||||
export const makeFragment = (text: string, syntax?: string): Document => {
|
||||
const lines = text.split('\n').map(line =>
|
||||
Block.create({
|
||||
type: 'code_line',
|
||||
nodes: [Text.create(line)],
|
||||
} as any)
|
||||
})
|
||||
);
|
||||
|
||||
const block = Block.create({
|
||||
@@ -25,18 +25,17 @@ export const makeFragment = (text: string, syntax?: string) => {
|
||||
},
|
||||
type: 'code_block',
|
||||
nodes: lines,
|
||||
} as any);
|
||||
});
|
||||
|
||||
return Document.create({
|
||||
nodes: [block],
|
||||
});
|
||||
};
|
||||
|
||||
export const makeValue = (text: string, syntax?: string) => {
|
||||
export const makeValue = (text: string, syntax?: string): Value => {
|
||||
const fragment = makeFragment(text, syntax);
|
||||
|
||||
return Value.create({
|
||||
document: fragment,
|
||||
SCHEMA,
|
||||
} as any);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import { toFixed, getValueFormat } from './valueFormats';
|
||||
|
||||
describe('valueFormats', () => {
|
||||
describe('toFixed with edge cases', () => {
|
||||
it('should handle non number input gracefully', () => {
|
||||
expect(toFixed(NaN)).toBe('NaN');
|
||||
expect(toFixed(Number.NEGATIVE_INFINITY)).toBe('-Inf');
|
||||
expect(toFixed(Number.POSITIVE_INFINITY)).toBe('Inf');
|
||||
});
|
||||
});
|
||||
|
||||
describe('toFixed and negative decimals', () => {
|
||||
it('should treat as zero decimals', () => {
|
||||
const str = toFixed(186.123, -2);
|
||||
|
||||
@@ -33,6 +33,12 @@ export function toFixed(value: number, decimals?: DecimalCount): string {
|
||||
if (value === null) {
|
||||
return '';
|
||||
}
|
||||
if (value === Number.NEGATIVE_INFINITY) {
|
||||
return '-Inf';
|
||||
}
|
||||
if (value === Number.POSITIVE_INFINITY) {
|
||||
return 'Inf';
|
||||
}
|
||||
|
||||
const factor = decimals ? Math.pow(10, Math.max(0, decimals)) : 1;
|
||||
const formatted = String(Math.round(value * factor) / factor);
|
||||
|
||||
@@ -5,6 +5,10 @@
|
||||
"compilerOptions": {
|
||||
"rootDirs": [".", "stories"],
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"baseUrl": "./node_modules/@types",
|
||||
"paths": {
|
||||
"@grafana/slate-react": ["slate-react"]
|
||||
},
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
|
||||
@@ -23,7 +23,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
|
||||
|
||||
WORKDIR $GF_PATHS_HOME
|
||||
|
||||
RUN apk add --no-cache ca-certificates bash
|
||||
RUN apk add --no-cache ca-certificates bash && \
|
||||
apk add --no-cache --upgrade --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main openssl musl-utils
|
||||
|
||||
# PhantomJS
|
||||
RUN if [ `arch` = "x86_64" ]; then \
|
||||
|
||||
@@ -135,3 +135,15 @@ func Respond(status int, body interface{}) *NormalResponse {
|
||||
header: make(http.Header),
|
||||
}
|
||||
}
|
||||
|
||||
type RedirectResponse struct {
|
||||
location string
|
||||
}
|
||||
|
||||
func (r *RedirectResponse) WriteTo(ctx *m.ReqContext) {
|
||||
ctx.Redirect(r.location)
|
||||
}
|
||||
|
||||
func Redirect(location string) *RedirectResponse {
|
||||
return &RedirectResponse{location: location}
|
||||
}
|
||||
|
||||
@@ -157,32 +157,34 @@ func latestSupportedVersion(plugin *m.Plugin) *m.Version {
|
||||
|
||||
// SelectVersion returns latest version if none is specified or the specified version. If the version string is not
|
||||
// matched to existing version it errors out. It also errors out if version that is matched is not available for current
|
||||
// os and platform.
|
||||
// os and platform. It expects plugin.Versions to be sorted so the newest version is first.
|
||||
func SelectVersion(plugin *m.Plugin, version string) (*m.Version, error) {
|
||||
var ver *m.Version
|
||||
if version == "" {
|
||||
ver = &plugin.Versions[0]
|
||||
}
|
||||
|
||||
for _, v := range plugin.Versions {
|
||||
if v.Version == version {
|
||||
ver = &v
|
||||
}
|
||||
}
|
||||
|
||||
if ver == nil {
|
||||
return nil, xerrors.New("Could not find the version you're looking for")
|
||||
}
|
||||
var ver m.Version
|
||||
|
||||
latestForArch := latestSupportedVersion(plugin)
|
||||
if latestForArch == nil {
|
||||
return nil, xerrors.New("Plugin is not supported on your architecture and os.")
|
||||
}
|
||||
|
||||
if latestForArch.Version == ver.Version {
|
||||
return ver, nil
|
||||
if version == "" {
|
||||
return latestForArch, nil
|
||||
}
|
||||
return nil, xerrors.Errorf("Version you want is not supported on your architecture and os. Latest suitable version is %v", latestForArch.Version)
|
||||
for _, v := range plugin.Versions {
|
||||
if v.Version == version {
|
||||
ver = v
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(ver.Version) == 0 {
|
||||
return nil, xerrors.New("Could not find the version you're looking for")
|
||||
}
|
||||
|
||||
if !supportsCurrentArch(&ver) {
|
||||
return nil, xerrors.Errorf("Version you want is not supported on your architecture and os. Latest suitable version is %v", latestForArch.Version)
|
||||
}
|
||||
|
||||
return &ver, nil
|
||||
}
|
||||
|
||||
func RemoveGitBuildFromName(pluginName, filename string) string {
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFoldernameReplacement(t *testing.T) {
|
||||
func TestFolderNameReplacement(t *testing.T) {
|
||||
Convey("path containing git commit path", t, func() {
|
||||
pluginName := "datasource-plugin-kairosdb"
|
||||
|
||||
@@ -134,7 +134,68 @@ func TestIsPathSafe(t *testing.T) {
|
||||
assert.False(t, isPathSafe("../../", dest))
|
||||
assert.False(t, isPathSafe("../../test", dest))
|
||||
})
|
||||
}
|
||||
|
||||
func TestSelectVersion(t *testing.T) {
|
||||
t.Run("Should return error when requested version does not exist", func(t *testing.T) {
|
||||
_, err := SelectVersion(
|
||||
makePluginWithVersions(versionArg{Version: "version"}),
|
||||
"1.1.1",
|
||||
)
|
||||
assert.NotNil(t, err)
|
||||
})
|
||||
|
||||
t.Run("Should return error when no version supports current arch", func(t *testing.T) {
|
||||
_, err := SelectVersion(
|
||||
makePluginWithVersions(versionArg{Version: "version", Arch: []string{"non-existent"}}),
|
||||
"",
|
||||
)
|
||||
assert.NotNil(t, err)
|
||||
})
|
||||
|
||||
t.Run("Should return error when requested version does not support current arch", func(t *testing.T) {
|
||||
_, err := SelectVersion(
|
||||
makePluginWithVersions(
|
||||
versionArg{Version: "2.0.0"},
|
||||
versionArg{Version: "1.1.1", Arch: []string{"non-existent"}},
|
||||
),
|
||||
"1.1.1",
|
||||
)
|
||||
assert.NotNil(t, err)
|
||||
})
|
||||
|
||||
t.Run("Should return latest available for arch when no version specified", func(t *testing.T) {
|
||||
ver, err := SelectVersion(
|
||||
makePluginWithVersions(
|
||||
versionArg{Version: "2.0.0", Arch: []string{"non-existent"}},
|
||||
versionArg{Version: "1.0.0"},
|
||||
),
|
||||
"",
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "1.0.0", ver.Version)
|
||||
})
|
||||
|
||||
t.Run("Should return latest version when no version specified", func(t *testing.T) {
|
||||
ver, err := SelectVersion(
|
||||
makePluginWithVersions(versionArg{Version: "2.0.0"}, versionArg{Version: "1.0.0"}),
|
||||
"",
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "2.0.0", ver.Version)
|
||||
})
|
||||
|
||||
t.Run("Should return requested version", func(t *testing.T) {
|
||||
ver, err := SelectVersion(
|
||||
makePluginWithVersions(
|
||||
versionArg{Version: "2.0.0"},
|
||||
versionArg{Version: "1.0.0"},
|
||||
),
|
||||
"1.0.0",
|
||||
)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "1.0.0", ver.Version)
|
||||
})
|
||||
}
|
||||
|
||||
func setupPluginInstallCmd(t *testing.T, pluginDir string) utils.CommandLine {
|
||||
@@ -199,3 +260,35 @@ func skipWindows(t *testing.T) {
|
||||
t.Skip("Skipping test on Windows")
|
||||
}
|
||||
}
|
||||
|
||||
type versionArg struct {
|
||||
Version string
|
||||
Arch []string
|
||||
}
|
||||
|
||||
func makePluginWithVersions(versions ...versionArg) *models.Plugin {
|
||||
plugin := &models.Plugin{
|
||||
Id: "",
|
||||
Category: "",
|
||||
Versions: []models.Version{},
|
||||
}
|
||||
|
||||
for _, version := range versions {
|
||||
ver := models.Version{
|
||||
Version: version.Version,
|
||||
Commit: fmt.Sprintf("commit_%s", version.Version),
|
||||
Url: fmt.Sprintf("url_%s", version.Version),
|
||||
}
|
||||
if version.Arch != nil {
|
||||
ver.Arch = map[string]models.ArchMeta{}
|
||||
for _, arch := range version.Arch {
|
||||
ver.Arch[arch] = models.ArchMeta{
|
||||
Md5: fmt.Sprintf("md5_%s", arch),
|
||||
}
|
||||
}
|
||||
}
|
||||
plugin.Versions = append(plugin.Versions, ver)
|
||||
}
|
||||
|
||||
return plugin
|
||||
}
|
||||
|
||||
@@ -217,7 +217,7 @@ func (scanner *PluginScanner) loadPluginJson(pluginJsonFilePath string) error {
|
||||
loader = reflect.New(reflect.TypeOf(pluginGoType)).Interface().(PluginLoader)
|
||||
|
||||
// External plugins need a module.js file for SystemJS to load
|
||||
if !strings.HasPrefix(pluginJsonFilePath, setting.StaticRootPath) {
|
||||
if !strings.HasPrefix(pluginJsonFilePath, setting.StaticRootPath) && !scanner.IsBackendOnlyPlugin(pluginCommon.Type) {
|
||||
module := filepath.Join(filepath.Dir(pluginJsonFilePath), "module.js")
|
||||
if _, err := os.Stat(module); os.IsNotExist(err) {
|
||||
plog.Warn("Plugin missing module.js",
|
||||
@@ -231,6 +231,10 @@ func (scanner *PluginScanner) loadPluginJson(pluginJsonFilePath string) error {
|
||||
return loader.Load(jsonParser, currentDir)
|
||||
}
|
||||
|
||||
func (scanner *PluginScanner) IsBackendOnlyPlugin(pluginType string) bool {
|
||||
return pluginType == "renderer"
|
||||
}
|
||||
|
||||
func GetPluginMarkdown(pluginId string, name string) ([]byte, error) {
|
||||
plug, exists := Plugins[pluginId]
|
||||
if !exists {
|
||||
|
||||
@@ -42,4 +42,18 @@ func TestPluginScans(t *testing.T) {
|
||||
So(Apps["test-app"].Info.Screenshots[1].Path, ShouldEqual, "public/plugins/test-app/img/screenshot2.png")
|
||||
})
|
||||
|
||||
Convey("When checking if renderer is backend only plugin", t, func() {
|
||||
pluginScanner := &PluginScanner{}
|
||||
result := pluginScanner.IsBackendOnlyPlugin("renderer")
|
||||
|
||||
So(result, ShouldEqual, true)
|
||||
})
|
||||
|
||||
Convey("When checking if app is backend only plugin", t, func() {
|
||||
pluginScanner := &PluginScanner{}
|
||||
result := pluginScanner.IsBackendOnlyPlugin("app")
|
||||
|
||||
So(result, ShouldEqual, false)
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
@@ -88,7 +88,13 @@ func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
|
||||
pn.log.Info("Notifying Pagerduty", "event_type", eventType)
|
||||
|
||||
payloadJSON := simplejson.New()
|
||||
payloadJSON.Set("summary", evalContext.Rule.Name+" - "+evalContext.Rule.Message)
|
||||
|
||||
summary := evalContext.Rule.Name + " - " + evalContext.Rule.Message
|
||||
if len(summary) > 1024 {
|
||||
summary = summary[0:1024]
|
||||
}
|
||||
payloadJSON.Set("summary", summary)
|
||||
|
||||
if hostname, err := os.Hostname(); err == nil {
|
||||
payloadJSON.Set("source", hostname)
|
||||
}
|
||||
|
||||
@@ -3,10 +3,14 @@ package multildap
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/ldap"
|
||||
)
|
||||
|
||||
// logger to log
|
||||
var logger = log.New("ldap")
|
||||
|
||||
// GetConfig gets LDAP config
|
||||
var GetConfig = ldap.GetConfig
|
||||
|
||||
@@ -119,12 +123,18 @@ func (multiples *MultiLDAP) Login(query *models.LoginUserQuery) (
|
||||
return user, nil
|
||||
}
|
||||
|
||||
// Continue if we couldn't find the user
|
||||
if err == ErrCouldNotFindUser {
|
||||
continue
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
||||
if isSilentError(err) {
|
||||
logger.Debug(
|
||||
"unable to login with LDAP - skipping server",
|
||||
"host", config.Host,
|
||||
"port", config.Port,
|
||||
"error", err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -204,3 +214,17 @@ func (multiples *MultiLDAP) Users(logins []string) (
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// isSilentError evaluates an error and tells whenever we should fail the LDAP request
|
||||
// immediately or if we should continue into other LDAP servers
|
||||
func isSilentError(err error) bool {
|
||||
continueErrs := []error{ErrInvalidCredentials, ErrCouldNotFindUser}
|
||||
|
||||
for _, cerr := range continueErrs {
|
||||
if err == cerr {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -152,6 +152,25 @@ func TestMultiLDAP(t *testing.T) {
|
||||
teardown()
|
||||
})
|
||||
|
||||
Convey("Should still try to auth with the second server after receiving an invalid credentials error from the first", func() {
|
||||
mock := setup()
|
||||
|
||||
mock.loginErrReturn = ErrInvalidCredentials
|
||||
|
||||
multi := New([]*ldap.ServerConfig{
|
||||
{}, {},
|
||||
})
|
||||
_, err := multi.Login(&models.LoginUserQuery{})
|
||||
|
||||
So(mock.dialCalledTimes, ShouldEqual, 2)
|
||||
So(mock.loginCalledTimes, ShouldEqual, 2)
|
||||
So(mock.closeCalledTimes, ShouldEqual, 2)
|
||||
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
|
||||
teardown()
|
||||
})
|
||||
|
||||
Convey("Should return unknown error", func() {
|
||||
mock := setup()
|
||||
|
||||
|
||||
0
pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git
vendored
Normal file
0
pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git
vendored
Normal file
@@ -155,7 +155,13 @@ func (val *StringMapValue) Value() map[string]string {
|
||||
// slices and the actual interpolation is done on all simple string values in the structure. It returns a copy of any
|
||||
// map or slice value instead of modifying them in place.
|
||||
func tranformInterface(i interface{}) interface{} {
|
||||
switch reflect.TypeOf(i).Kind() {
|
||||
typeOf := reflect.TypeOf(i)
|
||||
|
||||
if typeOf == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch typeOf.Kind() {
|
||||
case reflect.Slice:
|
||||
return transformSlice(i.([]interface{}))
|
||||
case reflect.Map:
|
||||
|
||||
@@ -131,6 +131,8 @@ func TestValues(t *testing.T) {
|
||||
- two
|
||||
- three:
|
||||
inside: $STRING
|
||||
- six:
|
||||
empty:
|
||||
four:
|
||||
nested:
|
||||
onemore: $INT
|
||||
@@ -146,11 +148,18 @@ func TestValues(t *testing.T) {
|
||||
"one": 1,
|
||||
"two": "test",
|
||||
"three": []interface{}{
|
||||
1, "two", anyMap{
|
||||
1,
|
||||
"two",
|
||||
anyMap{
|
||||
"three": anyMap{
|
||||
"inside": "test",
|
||||
},
|
||||
},
|
||||
anyMap{
|
||||
"six": anyMap{
|
||||
"empty": interface{}(nil),
|
||||
},
|
||||
},
|
||||
},
|
||||
"four": anyMap{
|
||||
"nested": anyMap{
|
||||
@@ -166,11 +175,18 @@ func TestValues(t *testing.T) {
|
||||
"one": 1,
|
||||
"two": "$STRING",
|
||||
"three": []interface{}{
|
||||
1, "two", anyMap{
|
||||
1,
|
||||
"two",
|
||||
anyMap{
|
||||
"three": anyMap{
|
||||
"inside": "$STRING",
|
||||
},
|
||||
},
|
||||
anyMap{
|
||||
"six": anyMap{
|
||||
"empty": interface{}(nil),
|
||||
},
|
||||
},
|
||||
},
|
||||
"four": anyMap{
|
||||
"nested": anyMap{
|
||||
|
||||
@@ -96,22 +96,13 @@ func roleCounterSQL(role, alias string) string {
|
||||
return `
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("user") + ` as u
|
||||
WHERE
|
||||
(SELECT COUNT(*)
|
||||
FROM org_user
|
||||
WHERE org_user.user_id=u.id
|
||||
AND org_user.role='` + role + `')>0
|
||||
FROM ` + dialect.Quote("user") + ` as u, org_user
|
||||
WHERE ( org_user.user_id=u.id AND org_user.role='` + role + `' )
|
||||
) as ` + alias + `,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("user") + ` as u
|
||||
WHERE
|
||||
(SELECT COUNT(*)
|
||||
FROM org_user
|
||||
WHERE org_user.user_id=u.id
|
||||
AND org_user.role='` + role + `')>0
|
||||
AND u.last_seen_at>?
|
||||
FROM ` + dialect.Quote("user") + ` as u, org_user
|
||||
WHERE u.last_seen_at>? AND ( org_user.user_id=u.id AND org_user.role='` + role + `' )
|
||||
) as active_` + alias
|
||||
}
|
||||
|
||||
|
||||
@@ -60,11 +60,7 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori
|
||||
if err != nil {
|
||||
queryRes.Error = err
|
||||
}
|
||||
if val, ok := result.Results[query.RefID]; ok {
|
||||
val.Series = append(result.Results[query.RefID].Series, queryRes.Series...)
|
||||
} else {
|
||||
result.Results[query.RefID] = queryRes
|
||||
}
|
||||
result.Results[query.RefID] = queryRes
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@@ -88,22 +84,11 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
|
||||
azureMonitorTarget := query.Model.Get("azureMonitor").MustMap()
|
||||
azlog.Debug("AzureMonitor", "target", azureMonitorTarget)
|
||||
|
||||
queryMode := fmt.Sprintf("%v", azureMonitorTarget["queryMode"])
|
||||
if queryMode == "crossResource" {
|
||||
return nil, fmt.Errorf("Alerting not supported for multiple resource queries")
|
||||
}
|
||||
|
||||
var azureMonitorData map[string]interface{}
|
||||
if queryMode == "singleResource" {
|
||||
azureMonitorData = azureMonitorTarget["data"].(map[string]interface{})[queryMode].(map[string]interface{})
|
||||
} else {
|
||||
azureMonitorData = azureMonitorTarget
|
||||
}
|
||||
urlComponents := map[string]string{}
|
||||
urlComponents["subscription"] = fmt.Sprintf("%v", query.Model.Get("subscription").MustString())
|
||||
urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorData["resourceGroup"])
|
||||
urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorData["metricDefinition"])
|
||||
urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorData["resourceName"])
|
||||
urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"])
|
||||
urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"])
|
||||
urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"])
|
||||
|
||||
ub := urlBuilder{
|
||||
DefaultSubscription: query.DataSource.JsonData.Get("subscriptionId").MustString(),
|
||||
@@ -115,12 +100,12 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
|
||||
azureURL := ub.Build()
|
||||
|
||||
alias := ""
|
||||
if val, ok := azureMonitorData["alias"]; ok {
|
||||
if val, ok := azureMonitorTarget["alias"]; ok {
|
||||
alias = fmt.Sprintf("%v", val)
|
||||
}
|
||||
|
||||
timeGrain := fmt.Sprintf("%v", azureMonitorData["timeGrain"])
|
||||
timeGrains := azureMonitorData["allowedTimeGrainsMs"]
|
||||
timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])
|
||||
timeGrains := azureMonitorTarget["allowedTimeGrainsMs"]
|
||||
if timeGrain == "auto" {
|
||||
timeGrain, err = e.setAutoTimeGrain(query.IntervalMs, timeGrains)
|
||||
if err != nil {
|
||||
@@ -132,16 +117,13 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
|
||||
params.Add("api-version", "2018-01-01")
|
||||
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339)))
|
||||
params.Add("interval", timeGrain)
|
||||
params.Add("aggregation", fmt.Sprintf("%v", azureMonitorData["aggregation"]))
|
||||
params.Add("metricnames", fmt.Sprintf("%v", azureMonitorData["metricName"]))
|
||||
params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"]))
|
||||
params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"]))
|
||||
params.Add("metricnamespace", fmt.Sprintf("%v", azureMonitorTarget["metricNamespace"]))
|
||||
|
||||
if val, ok := azureMonitorData["metricNamespace"]; ok {
|
||||
params.Add("metricnamespace", fmt.Sprintf("%v", val))
|
||||
}
|
||||
|
||||
dimension := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorData["dimension"]))
|
||||
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorData["dimensionFilter"]))
|
||||
if azureMonitorData["dimension"] != nil && azureMonitorData["dimensionFilter"] != nil && len(dimension) > 0 && len(dimensionFilter) > 0 && dimension != "None" {
|
||||
dimension := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimension"]))
|
||||
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"]))
|
||||
if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && len(dimension) > 0 && len(dimensionFilter) > 0 && dimension != "None" {
|
||||
params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
|
||||
}
|
||||
|
||||
|
||||
@@ -36,20 +36,15 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"subscription": "12345678-aaaa-bbbb-cccc-123456789abc",
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"queryMode": "singleResource",
|
||||
"data": map[string]interface{}{
|
||||
"singleResource": map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
},
|
||||
},
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
},
|
||||
}),
|
||||
RefId: "A",
|
||||
|
||||
@@ -155,7 +155,7 @@ func init() {
|
||||
"AWS/Events": {"RuleName"},
|
||||
"AWS/FSx": {},
|
||||
"AWS/Firehose": {"DeliveryStreamName"},
|
||||
"AWS/GameLift": {"FleetId", "InstanceType", "MatchmakingConfigurationName", "MatchmakingConfigurationName-RuleName", "MetricGroup", "OperatingSystem", "QueueName"},
|
||||
"AWS/GameLift": {"FleetId", "InstanceType", "MatchmakingConfigurationName", "MatchmakingConfigurationName-RuleName", "MetricGroups", "OperatingSystem", "QueueName"},
|
||||
"AWS/Glue": {"JobName", "JobRunId", "Type"},
|
||||
"AWS/Inspector": {},
|
||||
"AWS/IoT": {"ActionType", "BehaviorName", "CheckName", "JobId", "Protocol", "RuleName", "ScheduledAuditName", "SecurityProfileName"},
|
||||
|
||||
@@ -3,7 +3,6 @@ package mssql
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
@@ -24,7 +23,10 @@ func init() {
|
||||
func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
||||
logger := log.New("tsdb.mssql")
|
||||
|
||||
cnnstr := generateConnectionString(datasource)
|
||||
cnnstr, err := generateConnectionString(datasource)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if setting.Env == setting.DEV {
|
||||
logger.Debug("getEngine", "connection", cnnstr)
|
||||
}
|
||||
@@ -36,35 +38,35 @@ func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin
|
||||
MetricColumnTypes: []string{"VARCHAR", "CHAR", "NVARCHAR", "NCHAR"},
|
||||
}
|
||||
|
||||
rowTransformer := mssqlRowTransformer{
|
||||
queryResultTransformer := mssqlQueryResultTransformer{
|
||||
log: logger,
|
||||
}
|
||||
|
||||
return sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newMssqlMacroEngine(), logger)
|
||||
return sqleng.NewSqlQueryEndpoint(&config, &queryResultTransformer, newMssqlMacroEngine(), logger)
|
||||
}
|
||||
|
||||
func generateConnectionString(datasource *models.DataSource) string {
|
||||
func generateConnectionString(datasource *models.DataSource) (string, error) {
|
||||
server, port := util.SplitHostPortDefault(datasource.Url, "localhost", "1433")
|
||||
|
||||
encrypt := datasource.JsonData.Get("encrypt").MustString("false")
|
||||
|
||||
query := url.Values{}
|
||||
query.Add("database", datasource.Database)
|
||||
query.Add("encrypt", encrypt)
|
||||
|
||||
u := &url.URL{
|
||||
Scheme: "sqlserver",
|
||||
User: url.UserPassword(datasource.User, datasource.DecryptedPassword()),
|
||||
Host: fmt.Sprintf("%s:%s", server, port),
|
||||
RawQuery: query.Encode(),
|
||||
connStr := fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;",
|
||||
server,
|
||||
port,
|
||||
datasource.Database,
|
||||
datasource.User,
|
||||
datasource.DecryptedPassword(),
|
||||
)
|
||||
if encrypt != "false" {
|
||||
connStr += fmt.Sprintf("encrypt=%s;", encrypt)
|
||||
}
|
||||
return u.String()
|
||||
return connStr, nil
|
||||
}
|
||||
|
||||
type mssqlRowTransformer struct {
|
||||
type mssqlQueryResultTransformer struct {
|
||||
log log.Logger
|
||||
}
|
||||
|
||||
func (t *mssqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
|
||||
func (t *mssqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
|
||||
values := make([]interface{}, len(columnTypes))
|
||||
valuePtrs := make([]interface{}, len(columnTypes))
|
||||
|
||||
@@ -98,3 +100,7 @@ func (t *mssqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *cor
|
||||
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (t *mssqlQueryResultTransformer) TransformQueryError(err error) error {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -28,33 +28,6 @@ import (
|
||||
// If needed, change the variable below to the IP address of the database.
|
||||
var serverIP = "localhost"
|
||||
|
||||
func TestGenerateConnectionString(t *testing.T) {
|
||||
encrypted, _ := simplejson.NewJson([]byte(`{"encrypt":"false"}`))
|
||||
testSet := []struct {
|
||||
ds *models.DataSource
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
&models.DataSource{
|
||||
User: "user",
|
||||
Database: "db",
|
||||
Url: "localhost:1433",
|
||||
SecureJsonData: securejsondata.GetEncryptedJsonData(map[string]string{
|
||||
"password": "pass;word",
|
||||
}),
|
||||
JsonData: encrypted,
|
||||
},
|
||||
"sqlserver://user:pass;word@localhost:1433?database=db&encrypt=false",
|
||||
},
|
||||
}
|
||||
for i := range testSet {
|
||||
got := generateConnectionString(testSet[i].ds)
|
||||
if got != testSet[i].expected {
|
||||
t.Errorf("mssql connString error for testCase %d got: %s expected: %s", i, got, testSet[i].expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMSSQL(t *testing.T) {
|
||||
SkipConvey("MSSQL", t, func() {
|
||||
x := InitMSSQLTestDB(t)
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package mysql
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/gtime"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/grafana/grafana/pkg/tsdb/sqleng"
|
||||
)
|
||||
@@ -13,19 +15,29 @@ import (
|
||||
const rsIdentifier = `([_a-zA-Z0-9]+)`
|
||||
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
|
||||
|
||||
var restrictedRegExp = regexp.MustCompile(`(?im)([\s]*show[\s]+grants|[\s,]session_user\([^\)]*\)|[\s,]current_user(\([^\)]*\))?|[\s,]system_user\([^\)]*\)|[\s,]user\([^\)]*\))([\s,;]|$)`)
|
||||
|
||||
type mySqlMacroEngine struct {
|
||||
*sqleng.SqlMacroEngineBase
|
||||
timeRange *tsdb.TimeRange
|
||||
query *tsdb.Query
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func newMysqlMacroEngine() sqleng.SqlMacroEngine {
|
||||
return &mySqlMacroEngine{SqlMacroEngineBase: sqleng.NewSqlMacroEngineBase()}
|
||||
func newMysqlMacroEngine(logger log.Logger) sqleng.SqlMacroEngine {
|
||||
return &mySqlMacroEngine{SqlMacroEngineBase: sqleng.NewSqlMacroEngineBase(), logger: logger}
|
||||
}
|
||||
|
||||
func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
|
||||
m.timeRange = timeRange
|
||||
m.query = query
|
||||
|
||||
matches := restrictedRegExp.FindAllStringSubmatch(sql, 1)
|
||||
if len(matches) > 0 {
|
||||
m.logger.Error("show grants, session_user(), current_user(), system_user() or user() not allowed in query")
|
||||
return "", errors.New("Invalid query. Inspect Grafana server log for details")
|
||||
}
|
||||
|
||||
rExp, _ := regexp.Compile(sExpr)
|
||||
var macroError error
|
||||
|
||||
|
||||
@@ -6,13 +6,16 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestMacroEngine(t *testing.T) {
|
||||
Convey("MacroEngine", t, func() {
|
||||
engine := &mySqlMacroEngine{}
|
||||
engine := &mySqlMacroEngine{
|
||||
logger: log.New("test"),
|
||||
}
|
||||
query := &tsdb.Query{}
|
||||
|
||||
Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
|
||||
@@ -157,5 +160,33 @@ func TestMacroEngine(t *testing.T) {
|
||||
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given queries that contains unallowed user functions", func() {
|
||||
tcs := []string{
|
||||
"select \nSESSION_USER(), abc",
|
||||
"SELECT session_User( ) ",
|
||||
"SELECT session_User( )\n",
|
||||
"SELECT current_user",
|
||||
"SELECT current_USER",
|
||||
"SELECT current_user()",
|
||||
"SELECT Current_User()",
|
||||
"SELECT current_user( )",
|
||||
"SELECT current_user(\t )",
|
||||
"SELECT user()",
|
||||
"SELECT USER()",
|
||||
"SELECT SYSTEM_USER()",
|
||||
"SELECT System_User()",
|
||||
"SELECT System_User( )",
|
||||
"SELECT System_User(\t \t)",
|
||||
"SHOW \t grants",
|
||||
" show Grants\n",
|
||||
"show grants;",
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
_, err := engine.Interpolate(nil, nil, tc)
|
||||
So(err.Error(), ShouldEqual, "Invalid query. Inspect Grafana server log for details")
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,11 +2,14 @@ package mysql
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/VividCortex/mysqlerr"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
"github.com/go-sql-driver/mysql"
|
||||
@@ -59,18 +62,18 @@ func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin
|
||||
MetricColumnTypes: []string{"CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT"},
|
||||
}
|
||||
|
||||
rowTransformer := mysqlRowTransformer{
|
||||
rowTransformer := mysqlQueryResultTransformer{
|
||||
log: logger,
|
||||
}
|
||||
|
||||
return sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(), logger)
|
||||
return sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(logger), logger)
|
||||
}
|
||||
|
||||
type mysqlRowTransformer struct {
|
||||
type mysqlQueryResultTransformer struct {
|
||||
log log.Logger
|
||||
}
|
||||
|
||||
func (t *mysqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
|
||||
func (t *mysqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
|
||||
values := make([]interface{}, len(columnTypes))
|
||||
|
||||
for i := range values {
|
||||
@@ -128,3 +131,16 @@ func (t *mysqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *cor
|
||||
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (t *mysqlQueryResultTransformer) TransformQueryError(err error) error {
|
||||
if driverErr, ok := err.(*mysql.MySQLError); ok {
|
||||
if driverErr.Number != mysqlerr.ER_PARSE_ERROR && driverErr.Number != mysqlerr.ER_BAD_FIELD_ERROR && driverErr.Number != mysqlerr.ER_NO_SUCH_TABLE {
|
||||
t.log.Error("query error", "err", err)
|
||||
return errQueryFailed
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
var errQueryFailed = errors.New("Query failed. Please inspect Grafana server log for details")
|
||||
|
||||
@@ -33,13 +33,13 @@ func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndp
|
||||
MetricColumnTypes: []string{"UNKNOWN", "TEXT", "VARCHAR", "CHAR"},
|
||||
}
|
||||
|
||||
rowTransformer := postgresRowTransformer{
|
||||
queryResultTransformer := postgresQueryResultTransformer{
|
||||
log: logger,
|
||||
}
|
||||
|
||||
timescaledb := datasource.JsonData.Get("timescaledb").MustBool(false)
|
||||
|
||||
return sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(timescaledb), logger)
|
||||
return sqleng.NewSqlQueryEndpoint(&config, &queryResultTransformer, newPostgresMacroEngine(timescaledb), logger)
|
||||
}
|
||||
|
||||
func generateConnectionString(datasource *models.DataSource) string {
|
||||
@@ -54,11 +54,11 @@ func generateConnectionString(datasource *models.DataSource) string {
|
||||
return u.String()
|
||||
}
|
||||
|
||||
type postgresRowTransformer struct {
|
||||
type postgresQueryResultTransformer struct {
|
||||
log log.Logger
|
||||
}
|
||||
|
||||
func (t *postgresRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
|
||||
func (t *postgresQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) {
|
||||
values := make([]interface{}, len(columnTypes))
|
||||
valuePtrs := make([]interface{}, len(columnTypes))
|
||||
|
||||
@@ -93,3 +93,7 @@ func (t *postgresRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *
|
||||
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (t *postgresQueryResultTransformer) TransformQueryError(err error) error {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
@@ -29,9 +31,12 @@ type SqlMacroEngine interface {
|
||||
Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error)
|
||||
}
|
||||
|
||||
// SqlTableRowTransformer transforms a query result row to RowValues with proper types.
|
||||
type SqlTableRowTransformer interface {
|
||||
Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error)
|
||||
// SqlQueryResultTransformer transforms a query result row to RowValues with proper types.
|
||||
type SqlQueryResultTransformer interface {
|
||||
// TransformQueryResult transforms a query result row to RowValues with proper types.
|
||||
TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error)
|
||||
// TransformQueryError transforms a query error.
|
||||
TransformQueryError(err error) error
|
||||
}
|
||||
|
||||
type engineCacheType struct {
|
||||
@@ -52,12 +57,12 @@ var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engi
|
||||
}
|
||||
|
||||
type sqlQueryEndpoint struct {
|
||||
macroEngine SqlMacroEngine
|
||||
rowTransformer SqlTableRowTransformer
|
||||
engine *xorm.Engine
|
||||
timeColumnNames []string
|
||||
metricColumnTypes []string
|
||||
log log.Logger
|
||||
macroEngine SqlMacroEngine
|
||||
queryResultTransformer SqlQueryResultTransformer
|
||||
engine *xorm.Engine
|
||||
timeColumnNames []string
|
||||
metricColumnTypes []string
|
||||
log log.Logger
|
||||
}
|
||||
|
||||
type SqlQueryEndpointConfiguration struct {
|
||||
@@ -68,12 +73,12 @@ type SqlQueryEndpointConfiguration struct {
|
||||
MetricColumnTypes []string
|
||||
}
|
||||
|
||||
var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (tsdb.TsdbQueryEndpoint, error) {
|
||||
var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, queryResultTransformer SqlQueryResultTransformer, macroEngine SqlMacroEngine, log log.Logger) (tsdb.TsdbQueryEndpoint, error) {
|
||||
queryEndpoint := sqlQueryEndpoint{
|
||||
rowTransformer: rowTransformer,
|
||||
macroEngine: macroEngine,
|
||||
timeColumnNames: []string{"time"},
|
||||
log: log,
|
||||
queryResultTransformer: queryResultTransformer,
|
||||
macroEngine: macroEngine,
|
||||
timeColumnNames: []string{"time"},
|
||||
log: log,
|
||||
}
|
||||
|
||||
if len(config.TimeColumnNames) > 0 {
|
||||
@@ -158,7 +163,7 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource,
|
||||
|
||||
rows, err := db.Query(rawSQL)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
queryResult.Error = e.queryResultTransformer.TransformQueryError(err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -240,7 +245,7 @@ func (e *sqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows,
|
||||
return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
|
||||
}
|
||||
|
||||
values, err := e.rowTransformer.Transform(columnTypes, rows)
|
||||
values, err := e.queryResultTransformer.TransformQueryResult(columnTypes, rows)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -338,7 +343,7 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.R
|
||||
return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
|
||||
}
|
||||
|
||||
values, err := e.rowTransformer.Transform(columnTypes, rows)
|
||||
values, err := e.queryResultTransformer.TransformQueryResult(columnTypes, rows)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -418,7 +423,9 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.R
|
||||
|
||||
series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
|
||||
|
||||
e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
|
||||
if setting.Env == setting.DEV {
|
||||
e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import $ from 'jquery';
|
||||
import _ from 'lodash';
|
||||
|
||||
import coreModule from 'app/core/core_module';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { getExploreUrl } from 'app/core/utils/explore';
|
||||
import locationUtil from 'app/core/utils/location_util';
|
||||
import { store } from 'app/store/store';
|
||||
|
||||
import Mousetrap from 'mousetrap';
|
||||
@@ -47,9 +47,36 @@ export class KeybindingSrv {
|
||||
this.bind('s o', this.openSearch);
|
||||
this.bind('f', this.openSearch);
|
||||
this.bind('esc', this.exit);
|
||||
this.bindGlobal('esc', this.globalEsc);
|
||||
}
|
||||
}
|
||||
|
||||
globalEsc() {
|
||||
const anyDoc = document as any;
|
||||
const activeElement = anyDoc.activeElement;
|
||||
|
||||
// typehead needs to handle it
|
||||
const typeaheads = document.querySelectorAll('.slate-typeahead--open');
|
||||
if (typeaheads.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// second check if we are in an input we can blur
|
||||
if (activeElement && activeElement.blur) {
|
||||
if (
|
||||
activeElement.nodeName === 'INPUT' ||
|
||||
activeElement.nodeName === 'TEXTAREA' ||
|
||||
activeElement.hasAttribute('data-slate-editor')
|
||||
) {
|
||||
anyDoc.activeElement.blur();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// ok no focused input or editor that should block this, let exist!
|
||||
this.exit();
|
||||
}
|
||||
|
||||
openSearch() {
|
||||
appEvents.emit('show-dash-search');
|
||||
}
|
||||
@@ -71,11 +98,6 @@ export class KeybindingSrv {
|
||||
}
|
||||
|
||||
exit() {
|
||||
const popups = $('.popover.in, .slate-typeahead');
|
||||
if (popups.length > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
appEvents.emit('hide-modal');
|
||||
|
||||
if (this.modalOpen) {
|
||||
@@ -199,8 +221,10 @@ export class KeybindingSrv {
|
||||
const panel = dashboard.getPanelById(dashboard.meta.focusPanelId);
|
||||
const datasource = await this.datasourceSrv.get(panel.datasource);
|
||||
const url = await getExploreUrl(panel, panel.targets, datasource, this.datasourceSrv, this.timeSrv);
|
||||
if (url) {
|
||||
this.$timeout(() => this.$location.url(url));
|
||||
const urlWithoutBase = locationUtil.stripBaseFromUrl(url);
|
||||
|
||||
if (urlWithoutBase) {
|
||||
this.$timeout(() => this.$location.url(urlWithoutBase));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -91,7 +91,8 @@ export async function getExploreUrl(
|
||||
const exploreState = JSON.stringify({ ...state, originPanelId: panel.id });
|
||||
url = renderUrl('/explore', { left: exploreState });
|
||||
}
|
||||
return url;
|
||||
const finalUrl = config.appSubUrl + url;
|
||||
return finalUrl;
|
||||
}
|
||||
|
||||
export function buildQueryTransaction(
|
||||
|
||||
@@ -30,27 +30,27 @@
|
||||
<tbody>
|
||||
<tr ng-repeat="user in ctrl.users">
|
||||
<td class="width-4 text-center link-td">
|
||||
<a href="admin/users/{{user.authLabel === 'LDAP' ? 'ldap/' : ''}}edit/{{user.id}}">
|
||||
<a href="admin/users/edit/{{user.id}}">
|
||||
<img class="filter-table__avatar" ng-src="{{user.avatarUrl}}"></img>
|
||||
</a>
|
||||
</td>
|
||||
<td class="link-td">
|
||||
<a href="admin/users/{{user.authLabel === 'LDAP' ? 'ldap/' : ''}}edit/{{user.id}}">
|
||||
<a href="admin/users/edit/{{user.id}}">
|
||||
{{user.login}}
|
||||
</a>
|
||||
</td>
|
||||
<td class="link-td">
|
||||
<a href="admin/users/{{user.authLabel === 'LDAP' ? 'ldap/' : ''}}edit/{{user.id}}">
|
||||
<a href="admin/users/edit/{{user.id}}">
|
||||
{{user.email}}
|
||||
</a>
|
||||
</td>
|
||||
<td class="link-td">
|
||||
<a href="admin/users/{{user.authLabel === 'LDAP' ? 'ldap/' : ''}}edit/{{user.id}}">
|
||||
<a href="admin/users/edit/{{user.id}}">
|
||||
{{user.lastSeenAtAge}}
|
||||
</a>
|
||||
</td>
|
||||
<td class="link-td">
|
||||
<a href="admin/users/{{user.authLabel === 'LDAP' ? 'ldap/' : ''}}edit/{{user.id}}">
|
||||
<a href="admin/users/edit/{{user.id}}">
|
||||
<i class="fa fa-shield" ng-show="user.isAdmin" bs-tooltip="'Grafana Admin'"></i>
|
||||
</a>
|
||||
</td>
|
||||
|
||||
@@ -38,6 +38,7 @@ export function loadLdapState(): ThunkResult<void> {
|
||||
const connectionInfo = await getLdapState();
|
||||
dispatch(ldapConnectionInfoLoadedAction(connectionInfo));
|
||||
} catch (error) {
|
||||
error.isHandled = true;
|
||||
const ldapError = {
|
||||
title: error.data.message,
|
||||
body: error.data.error,
|
||||
@@ -63,6 +64,7 @@ export function loadUserMapping(username: string): ThunkResult<void> {
|
||||
const userInfo = await getUserInfo(username);
|
||||
dispatch(userMappingInfoLoadedAction(userInfo));
|
||||
} catch (error) {
|
||||
error.isHandled = true;
|
||||
const userError = {
|
||||
title: error.data.message,
|
||||
body: error.data.error,
|
||||
@@ -106,6 +108,7 @@ export function loadLdapUserInfo(userId: number): ThunkResult<void> {
|
||||
dispatch(loadUserSessions(userId));
|
||||
dispatch(loadUserMapping(user.login));
|
||||
} catch (error) {
|
||||
error.isHandled = true;
|
||||
const userError = {
|
||||
title: error.data.message,
|
||||
body: error.data.error,
|
||||
|
||||
@@ -47,18 +47,14 @@ export const syncLdapUser = async (userId: number) => {
|
||||
};
|
||||
|
||||
export const getUserInfo = async (username: string): Promise<LdapUser> => {
|
||||
try {
|
||||
const response = await getBackendSrv().get(`/api/admin/ldap/${username}`);
|
||||
const { name, surname, email, login, isGrafanaAdmin, isDisabled, roles, teams } = response;
|
||||
return {
|
||||
info: { name, surname, email, login },
|
||||
permissions: { isGrafanaAdmin, isDisabled },
|
||||
roles,
|
||||
teams,
|
||||
};
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
const response = await getBackendSrv().get(`/api/admin/ldap/${username}`);
|
||||
const { name, surname, email, login, isGrafanaAdmin, isDisabled, roles, teams } = response;
|
||||
return {
|
||||
info: { name, surname, email, login },
|
||||
permissions: { isGrafanaAdmin, isDisabled },
|
||||
roles,
|
||||
teams,
|
||||
};
|
||||
};
|
||||
|
||||
export const getUser = async (id: number): Promise<User> => {
|
||||
|
||||
@@ -1,34 +1,46 @@
|
||||
// Libraries
|
||||
import React, { PureComponent } from 'react';
|
||||
import { hot } from 'react-hot-loader';
|
||||
import { connect } from 'react-redux';
|
||||
import { css } from 'emotion';
|
||||
import { Button } from '@grafana/ui';
|
||||
|
||||
// Services & Utils
|
||||
import { AngularComponent, getAngularLoader } from '@grafana/runtime';
|
||||
import { AngularComponent, getAngularLoader, getDataSourceSrv } from '@grafana/runtime';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { getAlertingValidationMessage } from './getAlertingValidationMessage';
|
||||
|
||||
// Components
|
||||
import { EditorTabBody, EditorToolbarView } from '../dashboard/panel_editor/EditorTabBody';
|
||||
import EmptyListCTA from 'app/core/components/EmptyListCTA/EmptyListCTA';
|
||||
import StateHistory from './StateHistory';
|
||||
import 'app/features/alerting/AlertTabCtrl';
|
||||
|
||||
// Types
|
||||
import { DashboardModel } from '../dashboard/state/DashboardModel';
|
||||
import { PanelModel } from '../dashboard/state/PanelModel';
|
||||
import { TestRuleResult } from './TestRuleResult';
|
||||
import { AlertBox } from 'app/core/components/AlertBox/AlertBox';
|
||||
import { AppNotificationSeverity } from 'app/types';
|
||||
import { AppNotificationSeverity, StoreState } from 'app/types';
|
||||
import { PanelEditorTabIds, getPanelEditorTab } from '../dashboard/panel_editor/state/reducers';
|
||||
import { changePanelEditorTab } from '../dashboard/panel_editor/state/actions';
|
||||
|
||||
interface Props {
|
||||
angularPanel?: AngularComponent;
|
||||
dashboard: DashboardModel;
|
||||
panel: PanelModel;
|
||||
changePanelEditorTab: typeof changePanelEditorTab;
|
||||
}
|
||||
|
||||
export class AlertTab extends PureComponent<Props> {
|
||||
interface State {
|
||||
validatonMessage: string;
|
||||
}
|
||||
|
||||
class UnConnectedAlertTab extends PureComponent<Props, State> {
|
||||
element: any;
|
||||
component: AngularComponent;
|
||||
panelCtrl: any;
|
||||
|
||||
state: State = {
|
||||
validatonMessage: '',
|
||||
};
|
||||
|
||||
componentDidMount() {
|
||||
if (this.shouldLoadAlertTab()) {
|
||||
this.loadAlertTab();
|
||||
@@ -51,8 +63,8 @@ export class AlertTab extends PureComponent<Props> {
|
||||
}
|
||||
}
|
||||
|
||||
loadAlertTab() {
|
||||
const { angularPanel } = this.props;
|
||||
async loadAlertTab() {
|
||||
const { angularPanel, panel } = this.props;
|
||||
|
||||
const scope = angularPanel.getScope();
|
||||
|
||||
@@ -71,6 +83,17 @@ export class AlertTab extends PureComponent<Props> {
|
||||
const scopeProps = { ctrl: this.panelCtrl };
|
||||
|
||||
this.component = loader.load(this.element, scopeProps, template);
|
||||
|
||||
const validatonMessage = await getAlertingValidationMessage(
|
||||
panel.transformations,
|
||||
panel.targets,
|
||||
getDataSourceSrv(),
|
||||
panel.datasource
|
||||
);
|
||||
|
||||
if (validatonMessage) {
|
||||
this.setState({ validatonMessage });
|
||||
}
|
||||
}
|
||||
|
||||
stateHistory = (): EditorToolbarView => {
|
||||
@@ -128,19 +151,39 @@ export class AlertTab extends PureComponent<Props> {
|
||||
this.forceUpdate();
|
||||
};
|
||||
|
||||
switchToQueryTab = () => {
|
||||
const { changePanelEditorTab } = this.props;
|
||||
changePanelEditorTab(getPanelEditorTab(PanelEditorTabIds.Queries));
|
||||
};
|
||||
|
||||
renderValidationMessage = () => {
|
||||
const { validatonMessage } = this.state;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={css`
|
||||
width: 508px;
|
||||
margin: 128px auto;
|
||||
`}
|
||||
>
|
||||
<h2>{validatonMessage}</h2>
|
||||
<br />
|
||||
<div className="gf-form-group">
|
||||
<Button size={'md'} variant={'secondary'} icon="fa fa-arrow-left" onClick={this.switchToQueryTab}>
|
||||
Go back to Queries
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
render() {
|
||||
const { alert, transformations } = this.props.panel;
|
||||
const hasTransformations = transformations && transformations.length;
|
||||
const { validatonMessage } = this.state;
|
||||
const hasTransformations = transformations && transformations.length > 0;
|
||||
|
||||
if (!alert && hasTransformations) {
|
||||
return (
|
||||
<EditorTabBody heading="Alert">
|
||||
<AlertBox
|
||||
severity={AppNotificationSeverity.Warning}
|
||||
title="Transformations are not supported in alert queries"
|
||||
/>
|
||||
</EditorTabBody>
|
||||
);
|
||||
if (!alert && validatonMessage) {
|
||||
return this.renderValidationMessage();
|
||||
}
|
||||
|
||||
const toolbarItems = alert ? [this.stateHistory(), this.testRule(), this.deleteAlert()] : [];
|
||||
@@ -163,9 +206,20 @@ export class AlertTab extends PureComponent<Props> {
|
||||
)}
|
||||
|
||||
<div ref={element => (this.element = element)} />
|
||||
{!alert && <EmptyListCTA {...model} />}
|
||||
{!alert && !validatonMessage && <EmptyListCTA {...model} />}
|
||||
</>
|
||||
</EditorTabBody>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const mapStateToProps = (state: StoreState) => ({});
|
||||
|
||||
const mapDispatchToProps = { changePanelEditorTab };
|
||||
|
||||
export const AlertTab = hot(module)(
|
||||
connect(
|
||||
mapStateToProps,
|
||||
mapDispatchToProps
|
||||
)(UnConnectedAlertTab)
|
||||
);
|
||||
|
||||
@@ -10,6 +10,7 @@ import { DashboardSrv } from '../dashboard/services/DashboardSrv';
|
||||
import DatasourceSrv from '../plugins/datasource_srv';
|
||||
import { DataQuery } from '@grafana/ui/src/types/datasource';
|
||||
import { PanelModel } from 'app/features/dashboard/state';
|
||||
import { getDefaultCondition } from './getAlertingValidationMessage';
|
||||
|
||||
export class AlertTabCtrl {
|
||||
panel: PanelModel;
|
||||
@@ -179,7 +180,7 @@ export class AlertTabCtrl {
|
||||
|
||||
alert.conditions = alert.conditions || [];
|
||||
if (alert.conditions.length === 0) {
|
||||
alert.conditions.push(this.buildDefaultCondition());
|
||||
alert.conditions.push(getDefaultCondition());
|
||||
}
|
||||
|
||||
alert.noDataState = alert.noDataState || config.alertingNoDataOrNullValues;
|
||||
@@ -241,16 +242,6 @@ export class AlertTabCtrl {
|
||||
}
|
||||
}
|
||||
|
||||
buildDefaultCondition() {
|
||||
return {
|
||||
type: 'query',
|
||||
query: { params: ['A', '5m', 'now'] },
|
||||
reducer: { type: 'avg', params: [] as any[] },
|
||||
evaluator: { type: 'gt', params: [null] as any[] },
|
||||
operator: { type: 'and' },
|
||||
};
|
||||
}
|
||||
|
||||
validateModel() {
|
||||
if (!this.alert) {
|
||||
return;
|
||||
@@ -348,7 +339,7 @@ export class AlertTabCtrl {
|
||||
}
|
||||
|
||||
addCondition(type: string) {
|
||||
const condition = this.buildDefaultCondition();
|
||||
const condition = getDefaultCondition();
|
||||
// add to persited model
|
||||
this.alert.conditions.push(condition);
|
||||
// add to view model
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
import { DataSourceSrv } from '@grafana/runtime';
|
||||
import { DataSourceApi, PluginMeta } from '@grafana/ui';
|
||||
import { DataTransformerConfig } from '@grafana/data';
|
||||
|
||||
import { ElasticsearchQuery } from '../../plugins/datasource/elasticsearch/types';
|
||||
import { getAlertingValidationMessage } from './getAlertingValidationMessage';
|
||||
|
||||
describe('getAlertingValidationMessage', () => {
|
||||
describe('when called with some targets containing template variables', () => {
|
||||
it('then it should return false', async () => {
|
||||
let call = 0;
|
||||
const datasource: DataSourceApi = ({
|
||||
meta: ({ alerting: true } as any) as PluginMeta,
|
||||
targetContainsTemplate: () => {
|
||||
if (call === 0) {
|
||||
call++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
name: 'some name',
|
||||
} as any) as DataSourceApi;
|
||||
const getMock = jest.fn().mockResolvedValue(datasource);
|
||||
const datasourceSrv: DataSourceSrv = {
|
||||
get: getMock,
|
||||
};
|
||||
const targets: ElasticsearchQuery[] = [
|
||||
{ refId: 'A', query: '@hostname:$hostname', isLogsQuery: false },
|
||||
{ refId: 'B', query: '@instance:instance', isLogsQuery: false },
|
||||
];
|
||||
const transformations: DataTransformerConfig[] = [];
|
||||
|
||||
const result = await getAlertingValidationMessage(transformations, targets, datasourceSrv, datasource.name);
|
||||
|
||||
expect(result).toBe('');
|
||||
expect(getMock).toHaveBeenCalledTimes(2);
|
||||
expect(getMock).toHaveBeenCalledWith(datasource.name);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called with some targets using a datasource that does not support alerting', () => {
|
||||
it('then it should return false', async () => {
|
||||
const alertingDatasource: DataSourceApi = ({
|
||||
meta: ({ alerting: true } as any) as PluginMeta,
|
||||
targetContainsTemplate: () => false,
|
||||
name: 'alertingDatasource',
|
||||
} as any) as DataSourceApi;
|
||||
const datasource: DataSourceApi = ({
|
||||
meta: ({ alerting: false } as any) as PluginMeta,
|
||||
targetContainsTemplate: () => false,
|
||||
name: 'datasource',
|
||||
} as any) as DataSourceApi;
|
||||
|
||||
const datasourceSrv: DataSourceSrv = {
|
||||
get: (name: string) => {
|
||||
if (name === datasource.name) {
|
||||
return Promise.resolve(datasource);
|
||||
}
|
||||
|
||||
return Promise.resolve(alertingDatasource);
|
||||
},
|
||||
};
|
||||
const targets: any[] = [
|
||||
{ refId: 'A', query: 'some query', datasource: 'alertingDatasource' },
|
||||
{ refId: 'B', query: 'some query', datasource: 'datasource' },
|
||||
];
|
||||
const transformations: DataTransformerConfig[] = [];
|
||||
|
||||
const result = await getAlertingValidationMessage(transformations, targets, datasourceSrv, datasource.name);
|
||||
|
||||
expect(result).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called with all targets containing template variables', () => {
|
||||
it('then it should return false', async () => {
|
||||
const datasource: DataSourceApi = ({
|
||||
meta: ({ alerting: true } as any) as PluginMeta,
|
||||
targetContainsTemplate: () => true,
|
||||
name: 'some name',
|
||||
} as any) as DataSourceApi;
|
||||
const getMock = jest.fn().mockResolvedValue(datasource);
|
||||
const datasourceSrv: DataSourceSrv = {
|
||||
get: getMock,
|
||||
};
|
||||
const targets: ElasticsearchQuery[] = [
|
||||
{ refId: 'A', query: '@hostname:$hostname', isLogsQuery: false },
|
||||
{ refId: 'B', query: '@instance:$instance', isLogsQuery: false },
|
||||
];
|
||||
const transformations: DataTransformerConfig[] = [];
|
||||
|
||||
const result = await getAlertingValidationMessage(transformations, targets, datasourceSrv, datasource.name);
|
||||
|
||||
expect(result).toBe('Template variables are not supported in alert queries');
|
||||
expect(getMock).toHaveBeenCalledTimes(2);
|
||||
expect(getMock).toHaveBeenCalledWith(datasource.name);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called with all targets using a datasource that does not support alerting', () => {
|
||||
it('then it should return false', async () => {
|
||||
const datasource: DataSourceApi = ({
|
||||
meta: ({ alerting: false } as any) as PluginMeta,
|
||||
targetContainsTemplate: () => false,
|
||||
name: 'some name',
|
||||
} as any) as DataSourceApi;
|
||||
const getMock = jest.fn().mockResolvedValue(datasource);
|
||||
const datasourceSrv: DataSourceSrv = {
|
||||
get: getMock,
|
||||
};
|
||||
const targets: ElasticsearchQuery[] = [
|
||||
{ refId: 'A', query: '@hostname:hostname', isLogsQuery: false },
|
||||
{ refId: 'B', query: '@instance:instance', isLogsQuery: false },
|
||||
];
|
||||
const transformations: DataTransformerConfig[] = [];
|
||||
|
||||
const result = await getAlertingValidationMessage(transformations, targets, datasourceSrv, datasource.name);
|
||||
|
||||
expect(result).toBe('The datasource does not support alerting queries');
|
||||
expect(getMock).toHaveBeenCalledTimes(2);
|
||||
expect(getMock).toHaveBeenCalledWith(datasource.name);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called with transformations', () => {
|
||||
it('then it should return false', async () => {
|
||||
const datasource: DataSourceApi = ({
|
||||
meta: ({ alerting: true } as any) as PluginMeta,
|
||||
targetContainsTemplate: () => false,
|
||||
name: 'some name',
|
||||
} as any) as DataSourceApi;
|
||||
const getMock = jest.fn().mockResolvedValue(datasource);
|
||||
const datasourceSrv: DataSourceSrv = {
|
||||
get: getMock,
|
||||
};
|
||||
const targets: ElasticsearchQuery[] = [
|
||||
{ refId: 'A', query: '@hostname:hostname', isLogsQuery: false },
|
||||
{ refId: 'B', query: '@instance:instance', isLogsQuery: false },
|
||||
];
|
||||
const transformations: DataTransformerConfig[] = [{ id: 'A', options: null }];
|
||||
|
||||
const result = await getAlertingValidationMessage(transformations, targets, datasourceSrv, datasource.name);
|
||||
|
||||
expect(result).toBe('Transformations are not supported in alert queries');
|
||||
expect(getMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
49
public/app/features/alerting/getAlertingValidationMessage.ts
Normal file
49
public/app/features/alerting/getAlertingValidationMessage.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { DataQuery } from '@grafana/ui';
|
||||
import { DataSourceSrv } from '@grafana/runtime';
|
||||
import { DataTransformerConfig } from '@grafana/data';
|
||||
|
||||
export const getDefaultCondition = () => ({
|
||||
type: 'query',
|
||||
query: { params: ['A', '5m', 'now'] },
|
||||
reducer: { type: 'avg', params: [] as any[] },
|
||||
evaluator: { type: 'gt', params: [null] as any[] },
|
||||
operator: { type: 'and' },
|
||||
});
|
||||
|
||||
export const getAlertingValidationMessage = async (
|
||||
transformations: DataTransformerConfig[],
|
||||
targets: DataQuery[],
|
||||
datasourceSrv: DataSourceSrv,
|
||||
datasourceName: string
|
||||
): Promise<string> => {
|
||||
if (targets.length === 0) {
|
||||
return 'Could not find any metric queries';
|
||||
}
|
||||
|
||||
if (transformations && transformations.length) {
|
||||
return 'Transformations are not supported in alert queries';
|
||||
}
|
||||
|
||||
let alertingNotSupported = 0;
|
||||
let templateVariablesNotSupported = 0;
|
||||
|
||||
for (const target of targets) {
|
||||
const dsName = target.datasource || datasourceName;
|
||||
const ds = await datasourceSrv.get(dsName);
|
||||
if (!ds.meta.alerting) {
|
||||
alertingNotSupported++;
|
||||
} else if (ds.targetContainsTemplate && ds.targetContainsTemplate(target)) {
|
||||
templateVariablesNotSupported++;
|
||||
}
|
||||
}
|
||||
|
||||
if (alertingNotSupported === targets.length) {
|
||||
return 'The datasource does not support alerting queries';
|
||||
}
|
||||
|
||||
if (templateVariablesNotSupported === targets.length) {
|
||||
return 'Template variables are not supported in alert queries';
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
@@ -14,7 +14,7 @@ import templateSrv from 'app/features/templating/template_srv';
|
||||
import config from 'app/core/config';
|
||||
// Types
|
||||
import { DashboardModel, PanelModel } from '../state';
|
||||
import { LoadingState, ScopedVars, AbsoluteTimeRange, toUtc } from '@grafana/data';
|
||||
import { LoadingState, ScopedVars, AbsoluteTimeRange, toUtc, toDataFrameDTO, DefaultTimeRange } from '@grafana/data';
|
||||
|
||||
const DEFAULT_PLUGIN_ERROR = 'Error in plugin';
|
||||
|
||||
@@ -52,6 +52,7 @@ export class PanelChrome extends PureComponent<Props, State> {
|
||||
data: {
|
||||
state: LoadingState.NotStarted,
|
||||
series: [],
|
||||
timeRange: DefaultTimeRange,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -66,6 +67,7 @@ export class PanelChrome extends PureComponent<Props, State> {
|
||||
if (this.hasPanelSnapshot) {
|
||||
this.setState({
|
||||
data: {
|
||||
...this.state.data,
|
||||
state: LoadingState.Done,
|
||||
series: getProcessedDataFrames(panel.snapshotData),
|
||||
},
|
||||
@@ -113,27 +115,34 @@ export class PanelChrome extends PureComponent<Props, State> {
|
||||
return;
|
||||
}
|
||||
|
||||
let { errorMessage, isFirstLoad } = this.state;
|
||||
let { isFirstLoad } = this.state;
|
||||
let errorMessage: string | null = null;
|
||||
|
||||
if (data.state === LoadingState.Error) {
|
||||
const { error } = data;
|
||||
if (error) {
|
||||
if (errorMessage !== error.message) {
|
||||
errorMessage = error.message;
|
||||
switch (data.state) {
|
||||
case LoadingState.Loading:
|
||||
// Skip updating state data if it is already in loading state
|
||||
// This is to avoid rendering partial loading responses
|
||||
if (this.state.data.state === LoadingState.Loading) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
errorMessage = null;
|
||||
}
|
||||
|
||||
if (data.state === LoadingState.Done) {
|
||||
// If we are doing a snapshot save data in panel model
|
||||
if (this.props.dashboard.snapshot) {
|
||||
this.props.panel.snapshotData = data.series;
|
||||
}
|
||||
if (isFirstLoad) {
|
||||
isFirstLoad = false;
|
||||
}
|
||||
break;
|
||||
case LoadingState.Error:
|
||||
const { error } = data;
|
||||
if (error) {
|
||||
if (errorMessage !== error.message) {
|
||||
errorMessage = error.message;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case LoadingState.Done:
|
||||
// If we are doing a snapshot save data in panel model
|
||||
if (this.props.dashboard.snapshot) {
|
||||
this.props.panel.snapshotData = data.series.map(frame => toDataFrameDTO(frame));
|
||||
}
|
||||
if (isFirstLoad) {
|
||||
isFirstLoad = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.setState({ isFirstLoad, errorMessage, data });
|
||||
@@ -241,6 +250,7 @@ export class PanelChrome extends PureComponent<Props, State> {
|
||||
|
||||
const PanelComponent = plugin.panel;
|
||||
const innerPanelHeight = calculateInnerPanelHeight(panel, height);
|
||||
const timeRange = data.timeRange || this.timeSrv.timeRange();
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -249,7 +259,7 @@ export class PanelChrome extends PureComponent<Props, State> {
|
||||
<PanelComponent
|
||||
id={panel.id}
|
||||
data={data}
|
||||
timeRange={data.request ? data.request.range : this.timeSrv.timeRange()}
|
||||
timeRange={timeRange}
|
||||
timeZone={this.props.dashboard.getTimezone()}
|
||||
options={panel.getOptions()}
|
||||
transparent={panel.transparent}
|
||||
|
||||
@@ -48,10 +48,10 @@ export class GeneralTab extends PureComponent<Props> {
|
||||
}
|
||||
}
|
||||
|
||||
onDataLinksChanged = (links: DataLink[]) => {
|
||||
onDataLinksChanged = (links: DataLink[], callback?: () => void) => {
|
||||
this.props.panel.links = links;
|
||||
this.props.panel.render();
|
||||
this.forceUpdate();
|
||||
this.forceUpdate(callback);
|
||||
};
|
||||
|
||||
render() {
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import classNames from 'classnames';
|
||||
import { hot } from 'react-hot-loader';
|
||||
import { connect } from 'react-redux';
|
||||
import { Tooltip, PanelPlugin, PanelPluginMeta } from '@grafana/ui';
|
||||
import { AngularComponent, config } from '@grafana/runtime';
|
||||
|
||||
import { QueriesTab } from './QueriesTab';
|
||||
import VisualizationTab from './VisualizationTab';
|
||||
import { GeneralTab } from './GeneralTab';
|
||||
import { AlertTab } from '../../alerting/AlertTab';
|
||||
|
||||
import config from 'app/core/config';
|
||||
import { store } from 'app/store/store';
|
||||
import { updateLocation } from 'app/core/actions';
|
||||
import { AngularComponent } from '@grafana/runtime';
|
||||
|
||||
import { PanelModel } from '../state/PanelModel';
|
||||
import { DashboardModel } from '../state/DashboardModel';
|
||||
import { Tooltip, PanelPlugin, PanelPluginMeta } from '@grafana/ui';
|
||||
import { StoreState } from '../../../types';
|
||||
import { PanelEditorTabIds, PanelEditorTab } from './state/reducers';
|
||||
import { refreshPanelEditor, changePanelEditorTab, panelEditorCleanUp } from './state/actions';
|
||||
|
||||
interface PanelEditorProps {
|
||||
panel: PanelModel;
|
||||
@@ -21,56 +21,54 @@ interface PanelEditorProps {
|
||||
plugin: PanelPlugin;
|
||||
angularPanel?: AngularComponent;
|
||||
onPluginTypeChange: (newType: PanelPluginMeta) => void;
|
||||
activeTab: PanelEditorTabIds;
|
||||
tabs: PanelEditorTab[];
|
||||
refreshPanelEditor: typeof refreshPanelEditor;
|
||||
panelEditorCleanUp: typeof panelEditorCleanUp;
|
||||
changePanelEditorTab: typeof changePanelEditorTab;
|
||||
}
|
||||
|
||||
interface PanelEditorTab {
|
||||
id: string;
|
||||
text: string;
|
||||
}
|
||||
|
||||
enum PanelEditorTabIds {
|
||||
Queries = 'queries',
|
||||
Visualization = 'visualization',
|
||||
Advanced = 'advanced',
|
||||
Alert = 'alert',
|
||||
}
|
||||
|
||||
interface PanelEditorTab {
|
||||
id: string;
|
||||
text: string;
|
||||
}
|
||||
|
||||
const panelEditorTabTexts = {
|
||||
[PanelEditorTabIds.Queries]: 'Queries',
|
||||
[PanelEditorTabIds.Visualization]: 'Visualization',
|
||||
[PanelEditorTabIds.Advanced]: 'General',
|
||||
[PanelEditorTabIds.Alert]: 'Alert',
|
||||
};
|
||||
|
||||
const getPanelEditorTab = (tabId: PanelEditorTabIds): PanelEditorTab => {
|
||||
return {
|
||||
id: tabId,
|
||||
text: panelEditorTabTexts[tabId],
|
||||
};
|
||||
};
|
||||
|
||||
export class PanelEditor extends PureComponent<PanelEditorProps> {
|
||||
class UnConnectedPanelEditor extends PureComponent<PanelEditorProps> {
|
||||
constructor(props: PanelEditorProps) {
|
||||
super(props);
|
||||
}
|
||||
|
||||
componentDidMount(): void {
|
||||
this.refreshFromState();
|
||||
}
|
||||
|
||||
componentWillUnmount(): void {
|
||||
const { panelEditorCleanUp } = this.props;
|
||||
panelEditorCleanUp();
|
||||
}
|
||||
|
||||
refreshFromState = (meta?: PanelPluginMeta) => {
|
||||
const { refreshPanelEditor, plugin } = this.props;
|
||||
meta = meta || plugin.meta;
|
||||
|
||||
refreshPanelEditor({
|
||||
hasQueriesTab: !meta.skipDataQuery,
|
||||
usesGraphPlugin: meta.id === 'graph',
|
||||
alertingEnabled: config.alertingEnabled,
|
||||
});
|
||||
};
|
||||
|
||||
onChangeTab = (tab: PanelEditorTab) => {
|
||||
store.dispatch(
|
||||
updateLocation({
|
||||
query: { tab: tab.id, openVizPicker: null },
|
||||
partial: true,
|
||||
})
|
||||
);
|
||||
this.forceUpdate();
|
||||
const { changePanelEditorTab } = this.props;
|
||||
// Angular Query Components can potentially refresh the PanelModel
|
||||
// onBlur so this makes sure we change tab after that
|
||||
setTimeout(() => changePanelEditorTab(tab), 10);
|
||||
};
|
||||
|
||||
onPluginTypeChange = (newType: PanelPluginMeta) => {
|
||||
const { onPluginTypeChange } = this.props;
|
||||
onPluginTypeChange(newType);
|
||||
|
||||
this.refreshFromState(newType);
|
||||
};
|
||||
|
||||
renderCurrentTab(activeTab: string) {
|
||||
const { panel, dashboard, onPluginTypeChange, plugin, angularPanel } = this.props;
|
||||
const { panel, dashboard, plugin, angularPanel } = this.props;
|
||||
|
||||
switch (activeTab) {
|
||||
case 'advanced':
|
||||
@@ -85,7 +83,7 @@ export class PanelEditor extends PureComponent<PanelEditorProps> {
|
||||
panel={panel}
|
||||
dashboard={dashboard}
|
||||
plugin={plugin}
|
||||
onPluginTypeChange={onPluginTypeChange}
|
||||
onPluginTypeChange={this.onPluginTypeChange}
|
||||
angularPanel={angularPanel}
|
||||
/>
|
||||
);
|
||||
@@ -95,28 +93,7 @@ export class PanelEditor extends PureComponent<PanelEditorProps> {
|
||||
}
|
||||
|
||||
render() {
|
||||
const { plugin } = this.props;
|
||||
let activeTab: PanelEditorTabIds = store.getState().location.query.tab || PanelEditorTabIds.Queries;
|
||||
|
||||
const tabs: PanelEditorTab[] = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
];
|
||||
|
||||
// handle panels that do not have queries tab
|
||||
if (plugin.meta.skipDataQuery) {
|
||||
// remove queries tab
|
||||
tabs.shift();
|
||||
// switch tab
|
||||
if (activeTab === PanelEditorTabIds.Queries) {
|
||||
activeTab = PanelEditorTabIds.Visualization;
|
||||
}
|
||||
}
|
||||
|
||||
if (config.alertingEnabled && plugin.meta.id === 'graph') {
|
||||
tabs.push(getPanelEditorTab(PanelEditorTabIds.Alert));
|
||||
}
|
||||
const { activeTab, tabs } = this.props;
|
||||
|
||||
return (
|
||||
<div className="panel-editor-container__editor">
|
||||
@@ -131,6 +108,20 @@ export class PanelEditor extends PureComponent<PanelEditorProps> {
|
||||
}
|
||||
}
|
||||
|
||||
export const mapStateToProps = (state: StoreState) => ({
|
||||
activeTab: state.location.query.tab || PanelEditorTabIds.Queries,
|
||||
tabs: state.panelEditor.tabs,
|
||||
});
|
||||
|
||||
const mapDispatchToProps = { refreshPanelEditor, panelEditorCleanUp, changePanelEditorTab };
|
||||
|
||||
export const PanelEditor = hot(module)(
|
||||
connect(
|
||||
mapStateToProps,
|
||||
mapDispatchToProps
|
||||
)(UnConnectedPanelEditor)
|
||||
);
|
||||
|
||||
interface TabItemParams {
|
||||
tab: PanelEditorTab;
|
||||
activeTab: string;
|
||||
|
||||
@@ -2,25 +2,29 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import _ from 'lodash';
|
||||
import { css } from 'emotion';
|
||||
|
||||
// Components
|
||||
import { EditorTabBody, EditorToolbarView } from './EditorTabBody';
|
||||
import { DataSourcePicker } from 'app/core/components/Select/DataSourcePicker';
|
||||
import { QueryInspector } from './QueryInspector';
|
||||
import { QueryOptions } from './QueryOptions';
|
||||
import { PanelOptionsGroup, TransformationsEditor } from '@grafana/ui';
|
||||
import {
|
||||
PanelOptionsGroup,
|
||||
TransformationsEditor,
|
||||
DataQuery,
|
||||
DataSourceSelectItem,
|
||||
PanelData,
|
||||
AlphaNotice,
|
||||
PluginState,
|
||||
} from '@grafana/ui';
|
||||
import { QueryEditorRow } from './QueryEditorRow';
|
||||
|
||||
// Services
|
||||
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
|
||||
import { getBackendSrv } from 'app/core/services/backend_srv';
|
||||
import config from 'app/core/config';
|
||||
|
||||
// Types
|
||||
import { PanelModel } from '../state/PanelModel';
|
||||
import { DashboardModel } from '../state/DashboardModel';
|
||||
import { DataQuery, DataSourceSelectItem, PanelData, AlphaNotice, PluginState } from '@grafana/ui';
|
||||
import { LoadingState, DataTransformerConfig } from '@grafana/data';
|
||||
import { LoadingState, DataTransformerConfig, DefaultTimeRange } from '@grafana/data';
|
||||
import { PluginHelp } from 'app/core/components/PluginHelp/PluginHelp';
|
||||
import { Unsubscribable } from 'rxjs';
|
||||
import { isSharedDashboardQuery, DashboardQueryEditor } from 'app/plugins/datasource/dashboard';
|
||||
@@ -55,6 +59,7 @@ export class QueriesTab extends PureComponent<Props, State> {
|
||||
data: {
|
||||
state: LoadingState.NotStarted,
|
||||
series: [],
|
||||
timeRange: DefaultTimeRange,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LoadingState, toDataFrame } from '@grafana/data';
|
||||
import { LoadingState, toDataFrame, dateTime } from '@grafana/data';
|
||||
import { PanelData, DataQueryRequest } from '@grafana/ui';
|
||||
import { filterPanelDataToQuery } from './QueryEditorRow';
|
||||
|
||||
@@ -28,6 +28,7 @@ describe('filterPanelDataToQuery', () => {
|
||||
makePretendRequest('sub2'),
|
||||
makePretendRequest('sub3'),
|
||||
]),
|
||||
timeRange: { from: dateTime(), to: dateTime(), raw: { from: 'now-1d', to: 'now' } },
|
||||
};
|
||||
|
||||
it('should not have an error unless the refId matches', () => {
|
||||
|
||||
@@ -2,13 +2,11 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import classNames from 'classnames';
|
||||
import _ from 'lodash';
|
||||
|
||||
// Utils & Services
|
||||
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
|
||||
import { AngularComponent, getAngularLoader } from '@grafana/runtime';
|
||||
import { Emitter } from 'app/core/utils/emitter';
|
||||
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
|
||||
// Types
|
||||
import { PanelModel } from '../state/PanelModel';
|
||||
import { DataQuery, DataSourceApi, PanelData, DataQueryRequest, ErrorBoundaryAlert } from '@grafana/ui';
|
||||
@@ -321,10 +319,13 @@ export function filterPanelDataToQuery(data: PanelData, refId: string): PanelDat
|
||||
state = LoadingState.Error;
|
||||
}
|
||||
|
||||
const timeRange = data.timeRange;
|
||||
|
||||
return {
|
||||
state,
|
||||
series,
|
||||
request,
|
||||
error,
|
||||
timeRange,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,27 +1,23 @@
|
||||
// Libraries
|
||||
import React, { PureComponent } from 'react';
|
||||
|
||||
// Utils & Services
|
||||
import { AngularComponent, getAngularLoader } from '@grafana/runtime';
|
||||
import { connectWithStore } from 'app/core/utils/connectWithReduxStore';
|
||||
import { StoreState } from 'app/types';
|
||||
import { updateLocation } from 'app/core/actions';
|
||||
|
||||
// Components
|
||||
import { EditorTabBody, EditorToolbarView } from './EditorTabBody';
|
||||
import { VizTypePicker } from './VizTypePicker';
|
||||
import { PluginHelp } from 'app/core/components/PluginHelp/PluginHelp';
|
||||
import { FadeIn } from 'app/core/components/Animations/FadeIn';
|
||||
|
||||
// Types
|
||||
import { PanelModel } from '../state';
|
||||
import { DashboardModel } from '../state';
|
||||
import { PanelModel, DashboardModel } from '../state';
|
||||
import { VizPickerSearch } from './VizPickerSearch';
|
||||
import PluginStateinfo from 'app/features/plugins/PluginStateInfo';
|
||||
import { PanelPlugin, PanelPluginMeta, PanelData } from '@grafana/ui';
|
||||
import { PanelCtrl } from 'app/plugins/sdk';
|
||||
import { Unsubscribable } from 'rxjs';
|
||||
import { LoadingState } from '@grafana/data';
|
||||
import { LoadingState, DefaultTimeRange } from '@grafana/data';
|
||||
|
||||
interface Props {
|
||||
panel: PanelModel;
|
||||
@@ -57,6 +53,7 @@ export class VisualizationTab extends PureComponent<Props, State> {
|
||||
data: {
|
||||
state: LoadingState.NotStarted,
|
||||
series: [],
|
||||
timeRange: DefaultTimeRange,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
127
public/app/features/dashboard/panel_editor/state/actions.test.ts
Normal file
127
public/app/features/dashboard/panel_editor/state/actions.test.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { thunkTester } from '../../../../../test/core/thunk/thunkTester';
|
||||
import { initialState, getPanelEditorTab, PanelEditorTabIds } from './reducers';
|
||||
import { refreshPanelEditor, panelEditorInitCompleted, changePanelEditorTab } from './actions';
|
||||
import { updateLocation } from '../../../../core/actions';
|
||||
|
||||
describe('refreshPanelEditor', () => {
|
||||
describe('when called and there is no activeTab in state', () => {
|
||||
it('then the dispatched action should default the activeTab to PanelEditorTabIds.Queries', async () => {
|
||||
const activeTab = PanelEditorTabIds.Queries;
|
||||
const tabs = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
getPanelEditorTab(PanelEditorTabIds.Alert),
|
||||
];
|
||||
const dispatchedActions = await thunkTester({ panelEditor: { ...initialState, activeTab: null } })
|
||||
.givenThunk(refreshPanelEditor)
|
||||
.whenThunkIsDispatched({ hasQueriesTab: true, alertingEnabled: true, usesGraphPlugin: true });
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions[0]).toEqual(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called and there is already an activeTab in state', () => {
|
||||
it('then the dispatched action should include activeTab from state', async () => {
|
||||
const activeTab = PanelEditorTabIds.Visualization;
|
||||
const tabs = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
getPanelEditorTab(PanelEditorTabIds.Alert),
|
||||
];
|
||||
const dispatchedActions = await thunkTester({ panelEditor: { ...initialState, activeTab } })
|
||||
.givenThunk(refreshPanelEditor)
|
||||
.whenThunkIsDispatched({ hasQueriesTab: true, alertingEnabled: true, usesGraphPlugin: true });
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions[0]).toEqual(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called and plugin has no queries tab', () => {
|
||||
it('then the dispatched action should not include Queries tab and default the activeTab to PanelEditorTabIds.Visualization', async () => {
|
||||
const activeTab = PanelEditorTabIds.Visualization;
|
||||
const tabs = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
getPanelEditorTab(PanelEditorTabIds.Alert),
|
||||
];
|
||||
const dispatchedActions = await thunkTester({ panelEditor: { ...initialState } })
|
||||
.givenThunk(refreshPanelEditor)
|
||||
.whenThunkIsDispatched({ hasQueriesTab: false, alertingEnabled: true, usesGraphPlugin: true });
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions[0]).toEqual(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called and alerting is enabled and the visualization is the graph plugin', () => {
|
||||
it('then the dispatched action should include the alert tab', async () => {
|
||||
const activeTab = PanelEditorTabIds.Queries;
|
||||
const tabs = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
getPanelEditorTab(PanelEditorTabIds.Alert),
|
||||
];
|
||||
const dispatchedActions = await thunkTester({ panelEditor: { ...initialState } })
|
||||
.givenThunk(refreshPanelEditor)
|
||||
.whenThunkIsDispatched({ hasQueriesTab: true, alertingEnabled: true, usesGraphPlugin: true });
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions[0]).toEqual(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called and alerting is not enabled', () => {
|
||||
it('then the dispatched action should not include the alert tab', async () => {
|
||||
const activeTab = PanelEditorTabIds.Queries;
|
||||
const tabs = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
];
|
||||
const dispatchedActions = await thunkTester({ panelEditor: { ...initialState } })
|
||||
.givenThunk(refreshPanelEditor)
|
||||
.whenThunkIsDispatched({ hasQueriesTab: true, alertingEnabled: false, usesGraphPlugin: true });
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions[0]).toEqual(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called and the visualization is not the graph plugin', () => {
|
||||
it('then the dispatched action should not include the alert tab', async () => {
|
||||
const activeTab = PanelEditorTabIds.Queries;
|
||||
const tabs = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
];
|
||||
const dispatchedActions = await thunkTester({ panelEditor: { ...initialState } })
|
||||
.givenThunk(refreshPanelEditor)
|
||||
.whenThunkIsDispatched({ hasQueriesTab: true, alertingEnabled: true, usesGraphPlugin: false });
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions[0]).toEqual(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('changePanelEditorTab', () => {
|
||||
describe('when called', () => {
|
||||
it('then it should dispatch correct actions', async () => {
|
||||
const activeTab = getPanelEditorTab(PanelEditorTabIds.Visualization);
|
||||
const dispatchedActions = await thunkTester({})
|
||||
.givenThunk(changePanelEditorTab)
|
||||
.whenThunkIsDispatched(activeTab);
|
||||
|
||||
expect(dispatchedActions.length).toBe(1);
|
||||
expect(dispatchedActions).toEqual([
|
||||
updateLocation({ query: { tab: activeTab.id, openVizPicker: null }, partial: true }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
54
public/app/features/dashboard/panel_editor/state/actions.ts
Normal file
54
public/app/features/dashboard/panel_editor/state/actions.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { actionCreatorFactory, noPayloadActionCreatorFactory } from '../../../../core/redux';
|
||||
import { PanelEditorTabIds, PanelEditorTab, getPanelEditorTab } from './reducers';
|
||||
import { ThunkResult } from '../../../../types';
|
||||
import { updateLocation } from '../../../../core/actions';
|
||||
|
||||
export interface PanelEditorInitCompleted {
|
||||
activeTab: PanelEditorTabIds;
|
||||
tabs: PanelEditorTab[];
|
||||
}
|
||||
|
||||
export const panelEditorInitCompleted = actionCreatorFactory<PanelEditorInitCompleted>(
|
||||
'PANEL_EDITOR_INIT_COMPLETED'
|
||||
).create();
|
||||
|
||||
export const panelEditorCleanUp = noPayloadActionCreatorFactory('PANEL_EDITOR_CLEAN_UP').create();
|
||||
|
||||
export const refreshPanelEditor = (props: {
|
||||
hasQueriesTab?: boolean;
|
||||
usesGraphPlugin?: boolean;
|
||||
alertingEnabled?: boolean;
|
||||
}): ThunkResult<void> => {
|
||||
return async (dispatch, getState) => {
|
||||
let activeTab = getState().panelEditor.activeTab || PanelEditorTabIds.Queries;
|
||||
const { hasQueriesTab, usesGraphPlugin, alertingEnabled } = props;
|
||||
|
||||
const tabs: PanelEditorTab[] = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
];
|
||||
|
||||
// handle panels that do not have queries tab
|
||||
if (!hasQueriesTab) {
|
||||
// remove queries tab
|
||||
tabs.shift();
|
||||
// switch tab
|
||||
if (activeTab === PanelEditorTabIds.Queries) {
|
||||
activeTab = PanelEditorTabIds.Visualization;
|
||||
}
|
||||
}
|
||||
|
||||
if (alertingEnabled && usesGraphPlugin) {
|
||||
tabs.push(getPanelEditorTab(PanelEditorTabIds.Alert));
|
||||
}
|
||||
|
||||
dispatch(panelEditorInitCompleted({ activeTab, tabs }));
|
||||
};
|
||||
};
|
||||
|
||||
export const changePanelEditorTab = (activeTab: PanelEditorTab): ThunkResult<void> => {
|
||||
return async dispatch => {
|
||||
dispatch(updateLocation({ query: { tab: activeTab.id, openVizPicker: null }, partial: true }));
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,35 @@
|
||||
import { reducerTester } from '../../../../../test/core/redux/reducerTester';
|
||||
import { initialState, panelEditorReducer, PanelEditorTabIds, PanelEditorTab, getPanelEditorTab } from './reducers';
|
||||
import { panelEditorInitCompleted, panelEditorCleanUp } from './actions';
|
||||
|
||||
describe('panelEditorReducer', () => {
|
||||
describe('when panelEditorInitCompleted is dispatched', () => {
|
||||
it('then state should be correct', () => {
|
||||
const activeTab = PanelEditorTabIds.Alert;
|
||||
const tabs: PanelEditorTab[] = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
];
|
||||
reducerTester()
|
||||
.givenReducer(panelEditorReducer, initialState)
|
||||
.whenActionIsDispatched(panelEditorInitCompleted({ activeTab, tabs }))
|
||||
.thenStateShouldEqual({ activeTab, tabs });
|
||||
});
|
||||
});
|
||||
|
||||
describe('when panelEditorCleanUp is dispatched', () => {
|
||||
it('then state should be intialState', () => {
|
||||
const activeTab = PanelEditorTabIds.Alert;
|
||||
const tabs: PanelEditorTab[] = [
|
||||
getPanelEditorTab(PanelEditorTabIds.Queries),
|
||||
getPanelEditorTab(PanelEditorTabIds.Visualization),
|
||||
getPanelEditorTab(PanelEditorTabIds.Advanced),
|
||||
];
|
||||
reducerTester()
|
||||
.givenReducer(panelEditorReducer, { activeTab, tabs })
|
||||
.whenActionIsDispatched(panelEditorCleanUp())
|
||||
.thenStateShouldEqual(initialState);
|
||||
});
|
||||
});
|
||||
});
|
||||
56
public/app/features/dashboard/panel_editor/state/reducers.ts
Normal file
56
public/app/features/dashboard/panel_editor/state/reducers.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { reducerFactory } from '../../../../core/redux';
|
||||
import { panelEditorCleanUp, panelEditorInitCompleted } from './actions';
|
||||
|
||||
export interface PanelEditorTab {
|
||||
id: string;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export enum PanelEditorTabIds {
|
||||
Queries = 'queries',
|
||||
Visualization = 'visualization',
|
||||
Advanced = 'advanced',
|
||||
Alert = 'alert',
|
||||
}
|
||||
|
||||
export const panelEditorTabTexts = {
|
||||
[PanelEditorTabIds.Queries]: 'Queries',
|
||||
[PanelEditorTabIds.Visualization]: 'Visualization',
|
||||
[PanelEditorTabIds.Advanced]: 'General',
|
||||
[PanelEditorTabIds.Alert]: 'Alert',
|
||||
};
|
||||
|
||||
export const getPanelEditorTab = (tabId: PanelEditorTabIds): PanelEditorTab => {
|
||||
return {
|
||||
id: tabId,
|
||||
text: panelEditorTabTexts[tabId],
|
||||
};
|
||||
};
|
||||
|
||||
export interface PanelEditorState {
|
||||
activeTab: PanelEditorTabIds;
|
||||
tabs: PanelEditorTab[];
|
||||
}
|
||||
|
||||
export const initialState: PanelEditorState = {
|
||||
activeTab: null,
|
||||
tabs: [],
|
||||
};
|
||||
|
||||
export const panelEditorReducer = reducerFactory<PanelEditorState>(initialState)
|
||||
.addMapper({
|
||||
filter: panelEditorInitCompleted,
|
||||
mapper: (state, action): PanelEditorState => {
|
||||
const { activeTab, tabs } = action.payload;
|
||||
return {
|
||||
...state,
|
||||
activeTab,
|
||||
tabs,
|
||||
};
|
||||
},
|
||||
})
|
||||
.addMapper({
|
||||
filter: panelEditorCleanUp,
|
||||
mapper: (): PanelEditorState => initialState,
|
||||
})
|
||||
.create();
|
||||
@@ -1,17 +1,22 @@
|
||||
// Libraries
|
||||
import _ from 'lodash';
|
||||
|
||||
// Utils
|
||||
import kbn from 'app/core/utils/kbn';
|
||||
import coreModule from 'app/core/core_module';
|
||||
import { dateMath } from '@grafana/data';
|
||||
|
||||
// Types
|
||||
import { TimeRange, RawTimeRange, TimeZone } from '@grafana/data';
|
||||
import {
|
||||
dateMath,
|
||||
DefaultTimeRange,
|
||||
TimeRange,
|
||||
RawTimeRange,
|
||||
TimeZone,
|
||||
toUtc,
|
||||
dateTime,
|
||||
isDateTime,
|
||||
} from '@grafana/data';
|
||||
import { ITimeoutService, ILocationService } from 'angular';
|
||||
import { ContextSrv } from 'app/core/services/context_srv';
|
||||
import { DashboardModel } from '../state/DashboardModel';
|
||||
import { toUtc, dateTime, isDateTime } from '@grafana/data';
|
||||
import { getZoomedTimeRange, getShiftedTimeRange } from 'app/core/utils/timePicker';
|
||||
|
||||
export class TimeSrv {
|
||||
@@ -32,7 +37,7 @@ export class TimeSrv {
|
||||
private contextSrv: ContextSrv
|
||||
) {
|
||||
// default time
|
||||
this.time = { from: '6h', to: 'now' };
|
||||
this.time = DefaultTimeRange.raw;
|
||||
|
||||
$rootScope.$on('zoom-out', this.zoomOut.bind(this));
|
||||
$rootScope.$on('shift-time', this.shiftTime.bind(this));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user