mirror of
https://github.com/grafana/grafana.git
synced 2025-12-20 19:44:55 +08:00
Compare commits
80 Commits
zoltan/pos
...
v7.5.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b144fb1a9f | ||
|
|
c330762f20 | ||
|
|
f593179a4a | ||
|
|
0f146e7c38 | ||
|
|
099f5cbf97 | ||
|
|
15ee6280db | ||
|
|
0b08c5e608 | ||
|
|
c713b92249 | ||
|
|
80a4d4296c | ||
|
|
47c10bab65 | ||
|
|
1acc921689 | ||
|
|
0528f7211a | ||
|
|
fafa352451 | ||
|
|
a9077812d8 | ||
|
|
5039c90b8c | ||
|
|
1df1d60e1e | ||
|
|
fbf3469e80 | ||
|
|
bc463d6a2f | ||
|
|
5d58d0aabb | ||
|
|
e3b5d53545 | ||
|
|
6bfe5ccf3e | ||
|
|
d025e3b57e | ||
|
|
44298a3761 | ||
|
|
37b07a5a11 | ||
|
|
42c5db4ac3 | ||
|
|
ca31570dcc | ||
|
|
da11a67c4e | ||
|
|
4a56ca167c | ||
|
|
698cf6de77 | ||
|
|
0954e1c277 | ||
|
|
986cfa767d | ||
|
|
0ba9aff11e | ||
|
|
3c8daef653 | ||
|
|
30b91296ad | ||
|
|
f570fb2d6f | ||
|
|
cfc0e132f5 | ||
|
|
f54533877b | ||
|
|
74dec56b49 | ||
|
|
320ad56397 | ||
|
|
46daba67d2 | ||
|
|
0e9b553daf | ||
|
|
c5ea45ab80 | ||
|
|
44838f201c | ||
|
|
47937d8f1b | ||
|
|
1d3339de34 | ||
|
|
876e18c105 | ||
|
|
bc5f11d0e4 | ||
|
|
61ba4eeb8c | ||
|
|
4d1d3f9ef5 | ||
|
|
e23c086a94 | ||
|
|
6ed29a0418 | ||
|
|
1616ea14f0 | ||
|
|
0af435c61d | ||
|
|
7a7aad22a2 | ||
|
|
994fa86836 | ||
|
|
099b761702 | ||
|
|
6ddb62fca1 | ||
|
|
6623706336 | ||
|
|
0be2177d15 | ||
|
|
3549380bd2 | ||
|
|
453b956797 | ||
|
|
aeee3931d2 | ||
|
|
be4b530a85 | ||
|
|
8df4fcd190 | ||
|
|
2297e13ac6 | ||
|
|
877b50957c | ||
|
|
a21a300f3e | ||
|
|
c946c4e372 | ||
|
|
f95657dc2d | ||
|
|
f2b7582671 | ||
|
|
e81a5bc015 | ||
|
|
cf29df808a | ||
|
|
1ab3249bab | ||
|
|
4452ddfee2 | ||
|
|
be08685e3f | ||
|
|
dc8d109ce4 | ||
|
|
6b83895315 | ||
|
|
a3d4d4ac7c | ||
|
|
7ea361825e | ||
|
|
11740b48a7 |
322
.drone.yml
322
.drone.yml
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,7 @@ COPY emails emails
|
||||
ENV NODE_ENV production
|
||||
RUN yarn build
|
||||
|
||||
FROM golang:1.16.0-alpine3.13 as go-builder
|
||||
FROM golang:1.16.1-alpine3.13 as go-builder
|
||||
|
||||
RUN apk add --no-cache gcc g++
|
||||
|
||||
|
||||
@@ -69,6 +69,10 @@ socket = /tmp/grafana.sock
|
||||
# CDN Url
|
||||
cdn_url =
|
||||
|
||||
# Sets the maximum time in minutes before timing out read of an incoming request and closing idle connections.
|
||||
# `0` means there is no timeout for reading the request.
|
||||
read_timeout = 0
|
||||
|
||||
#################################### Database ############################
|
||||
[database]
|
||||
# You can configure the database connection by specifying type, host, name, user and password
|
||||
@@ -508,13 +512,16 @@ active_sync_enabled = true
|
||||
#################################### AWS ###########################
|
||||
[aws]
|
||||
# Enter a comma-separated list of allowed AWS authentication providers.
|
||||
# Options are: default (AWS SDK Default), keys (Access && secret key), credentials (Credentials field), ec2_IAM_role (EC2 IAM Role)
|
||||
# Options are: default (AWS SDK Default), keys (Access && secret key), credentials (Credentials field), ec2_iam_role (EC2 IAM Role)
|
||||
allowed_auth_providers = default,keys,credentials
|
||||
|
||||
# Allow AWS users to assume a role using temporary security credentials.
|
||||
# If true, assume role will be enabled for all AWS authentication providers that are specified in aws_auth_providers
|
||||
assume_role_enabled = true
|
||||
|
||||
# Specify max no of pages to be returned by the ListMetricPages API
|
||||
list_metrics_page_limit = 500
|
||||
|
||||
#################################### SMTP / Emailing #####################
|
||||
[smtp]
|
||||
enabled = false
|
||||
@@ -682,6 +689,9 @@ max_annotation_age =
|
||||
max_annotations_to_keep =
|
||||
|
||||
#################################### Annotations #########################
|
||||
[annotations]
|
||||
# Configures the batch size for the annotation clean-up job. This setting is used for dashboard, API, and alert annotations.
|
||||
cleanupjob_batchsize = 100
|
||||
|
||||
[annotations.dashboard]
|
||||
# Dashboard annotations means that annotations are associated with the dashboard they are created on.
|
||||
|
||||
@@ -70,6 +70,10 @@
|
||||
# CDN Url
|
||||
;cdn_url =
|
||||
|
||||
# Sets the maximum time using a duration format (5s/5m/5ms) before timing out read of an incoming request and closing idle connections.
|
||||
# `0` means there is no timeout for reading the request.
|
||||
;read_timeout = 0
|
||||
|
||||
#################################### Database ####################################
|
||||
[database]
|
||||
# You can configure the database connection by specifying type, host, name, user and password
|
||||
@@ -498,7 +502,7 @@
|
||||
#################################### AWS ###########################
|
||||
[aws]
|
||||
# Enter a comma-separated list of allowed AWS authentication providers.
|
||||
# Options are: default (AWS SDK Default), keys (Access && secret key), credentials (Credentials field), ec2_IAM_role (EC2 IAM Role)
|
||||
# Options are: default (AWS SDK Default), keys (Access && secret key), credentials (Credentials field), ec2_iam_role (EC2 IAM Role)
|
||||
; allowed_auth_providers = default,keys,credentials
|
||||
|
||||
# Allow AWS users to assume a role using temporary security credentials.
|
||||
@@ -675,6 +679,9 @@
|
||||
;max_annotations_to_keep =
|
||||
|
||||
#################################### Annotations #########################
|
||||
[annotations]
|
||||
# Configures the batch size for the annotation clean-up job. This setting is used for dashboard, API, and alert annotations.
|
||||
;cleanupjob_batchsize = 100
|
||||
|
||||
[annotations.dashboard]
|
||||
# Dashboard annotations means that annotations are associated with the dashboard they are created on.
|
||||
|
||||
1194
devenv/dev-dashboards/panel-common/linked-viz.json
Normal file
1194
devenv/dev-dashboards/panel-common/linked-viz.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
|
||||
FROM golang:latest
|
||||
FROM golang:latest
|
||||
ADD main.go /
|
||||
WORKDIR /
|
||||
RUN go build -o main .
|
||||
RUN GO111MODULE=off go build -o main .
|
||||
EXPOSE 3011
|
||||
ENTRYPOINT ["/main"]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
|
||||
FROM golang:latest
|
||||
FROM golang:latest
|
||||
ADD main.go /
|
||||
WORKDIR /
|
||||
RUN go build -o main .
|
||||
RUN GO111MODULE=off go build -o main .
|
||||
EXPOSE 3011
|
||||
ENTRYPOINT ["/main"]
|
||||
|
||||
@@ -268,6 +268,11 @@ Specify a full HTTP URL address to the root of your Grafana CDN assets. Grafana
|
||||
For example, given a cdn url like `https://cdn.myserver.com` grafana will try to load a javascript file from
|
||||
`http://cdn.myserver.com/grafana-oss/v7.4.0/public/build/app.<hash>.js`.
|
||||
|
||||
### read_timeout
|
||||
|
||||
Sets the maximum time using a duration format (5s/5m/5ms) before timing out read of an incoming request and closing idle connections.
|
||||
`0` means there is no timeout for reading the request.
|
||||
|
||||
<hr />
|
||||
|
||||
## [database]
|
||||
@@ -780,7 +785,7 @@ You can configure core and external AWS plugins.
|
||||
|
||||
Specify what authentication providers the AWS plugins allow. For a list of allowed providers, refer to the data-source configuration page for a given plugin. If you configure a plugin by provisioning, only providers that are specified in `allowed_auth_providers` are allowed.
|
||||
|
||||
Options: `default` (AWS SDK default), `keys` (Access and secret key), `credentials` (Credentials file), `ec2_IAM_role` (EC2 IAM role)
|
||||
Options: `default` (AWS SDK default), `keys` (Access and secret key), `credentials` (Credentials file), `ec2_iam_role` (EC2 IAM role)
|
||||
|
||||
### assume_role_enabled
|
||||
|
||||
@@ -788,6 +793,10 @@ Set to `false` to disable AWS authentication from using an assumed role with tem
|
||||
|
||||
If this option is disabled, the **Assume Role** and the **External Id** field are removed from the AWS data source configuration page. If the plugin is configured using provisioning, it is possible to use an assumed role as long as `assume_role_enabled` is set to `true`.
|
||||
|
||||
### list_metrics_page_limit
|
||||
|
||||
Use the [List Metrics API](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_ListMetrics.html) option to load metrics for custom namespaces in the CloudWatch data source. By default, the page limit is 500.
|
||||
|
||||
<hr />
|
||||
|
||||
## [smtp]
|
||||
@@ -1082,6 +1091,12 @@ Configures max number of alert annotations that Grafana stores. Default value is
|
||||
|
||||
<hr>
|
||||
|
||||
## [annotations]
|
||||
|
||||
### cleanupjob_batchsize
|
||||
|
||||
Configures the batch size for the annotation clean-up job. This setting is used for dashboard, API, and alert annotations.
|
||||
|
||||
## [annotations.dashboard]
|
||||
|
||||
Dashboard annotations means that annotations are associated with the dashboard they are created on.
|
||||
|
||||
@@ -375,6 +375,22 @@ To request a quota increase, visit the [AWS Service Quotas console](https://cons
|
||||
|
||||
Please see the AWS documentation for [Service Quotas](https://docs.aws.amazon.com/servicequotas/latest/userguide/intro.html) and [CloudWatch limits](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_limits.html) for more information.
|
||||
|
||||
## Configure the data source with grafana.ini
|
||||
|
||||
In the [Grafana configuration](https://grafana.com/docs/grafana/latest/administration/configuration/#aws) there's an `AWS` section that allows you to customize the data source.
|
||||
|
||||
### allowed_auth_providers
|
||||
|
||||
Specify which authentication providers are allowed for the CloudWatch data source. The following providers are enabled by default in OSS Grafana: `default` (AWS SDK default), keys (Access and secret key), credentials (Credentials file), ec2_iam_role (EC2 IAM role).
|
||||
|
||||
### assume_role_enabled
|
||||
|
||||
Allows you to disable `assume role (ARN)` in the CloudWatch data source. By default, assume role (ARN) is enabled for OSS Grafana.
|
||||
|
||||
### list_metrics_page_limit
|
||||
|
||||
When a custom namespace is specified in the query editor, the [List Metrics API](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_ListMetrics.html) is used to populate the _Metrics_ field and the _Dimension_ fields. The API is paginated and returns up to 500 results per page. The CloudWatch data source also limits the number of pages to 500. However, you can change this limit using the `list_metrics_page_limit` variable in the [grafana configuration file](https://grafana.com/docs/grafana/latest/administration/configuration/#aws).
|
||||
|
||||
## Configure the data source with provisioning
|
||||
|
||||
It's now possible to configure data sources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for data sources on the [provisioning docs page]({{< relref "../administration/provisioning/#datasources" >}})
|
||||
|
||||
@@ -3,6 +3,7 @@ title = "What's new in Grafana v7.5"
|
||||
description = "Feature and improvement highlights for Grafana v7.5"
|
||||
keywords = ["grafana", "new", "documentation", "7.5", "release notes"]
|
||||
weight = -32
|
||||
aliases = ["/docs/grafana/latest/guides/whats-new-in-v7-5/"]
|
||||
[_build]
|
||||
list = false
|
||||
+++
|
||||
@@ -102,3 +103,13 @@ If you enable the feature, then you can use template variables in reports.
|
||||
## Breaking changes
|
||||
|
||||
There are no known breaking changes in this release.
|
||||
|
||||
## Updated configuration
|
||||
|
||||
```
|
||||
[server]
|
||||
read_timeout = 0
|
||||
```
|
||||
|
||||
Sets the maximum time using a duration format (5s/5m/5ms) before timing out read of an incoming request and closing idle connections.
|
||||
`0` means there is no timeout for reading the request.
|
||||
|
||||
9
go.mod
9
go.mod
@@ -42,7 +42,7 @@ require (
|
||||
github.com/google/go-cmp v0.5.4
|
||||
github.com/google/uuid v1.2.0
|
||||
github.com/gosimple/slug v1.9.0
|
||||
github.com/grafana/grafana-aws-sdk v0.1.0
|
||||
github.com/grafana/grafana-aws-sdk v0.2.0
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.88.0
|
||||
github.com/grafana/loki v1.6.2-0.20201026154740-6978ee5d7387
|
||||
@@ -52,6 +52,7 @@ require (
|
||||
github.com/hashicorp/go-version v1.2.1
|
||||
github.com/inconshreveable/log15 v0.0.0-20180818164646-67afb5ed74ec
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.2.0
|
||||
github.com/jaegertracing/jaeger v1.22.0
|
||||
github.com/jmespath/go-jmespath v0.4.0
|
||||
github.com/jonboulle/clockwork v0.2.2 // indirect
|
||||
github.com/json-iterator/go v1.1.10
|
||||
@@ -67,7 +68,6 @@ require (
|
||||
github.com/prometheus/client_golang v1.9.0
|
||||
github.com/prometheus/client_model v0.2.0
|
||||
github.com/prometheus/common v0.18.0
|
||||
github.com/quasilyte/go-ruleguard/dsl/fluent v0.0.0-20201222093424-5d7e62a465d3 // indirect
|
||||
github.com/robfig/cron v0.0.0-20180505203441-b41be1df6967
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/russellhaering/goxmldsig v1.1.0
|
||||
@@ -82,8 +82,9 @@ require (
|
||||
github.com/weaveworks/common v0.0.0-20201119133501-0619918236ec
|
||||
github.com/xorcare/pointer v1.1.0
|
||||
github.com/yudai/gojsondiff v1.0.0
|
||||
golang.org/x/crypto v0.0.0-20201208171446-5f87f3452ae9
|
||||
golang.org/x/net v0.0.0-20201224014010-6772e930b67b
|
||||
go.opentelemetry.io/collector v0.21.0
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777
|
||||
golang.org/x/oauth2 v0.0.0-20210113205817-d3ed898aa8a3
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a
|
||||
golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "7.5.0-pre.0"
|
||||
"packages": [
|
||||
"packages/*"
|
||||
],
|
||||
"version": "7.5.0"
|
||||
}
|
||||
|
||||
11
package.json
11
package.json
@@ -3,7 +3,7 @@
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "7.5.0-pre",
|
||||
"version": "7.5.0",
|
||||
"repository": "github:grafana/grafana",
|
||||
"scripts": {
|
||||
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
|
||||
@@ -46,7 +46,7 @@
|
||||
"ci:test-frontend": "yarn run prettier:check && yarn run typecheck && yarn run lint && yarn run test:ci && yarn grafana-toolkit node-version-check && ./scripts/ci-check-strict.sh"
|
||||
},
|
||||
"grafana": {
|
||||
"whatsNewUrl": "https://grafana.com/docs/grafana/latest/guides/whats-new-in-v7-4/",
|
||||
"whatsNewUrl": "https://grafana.com/docs/grafana/latest/guides/whats-new-in-v7-5/",
|
||||
"releaseNotesUrl": "https://grafana.com/docs/grafana/latest/release-notes/"
|
||||
},
|
||||
"husky": {
|
||||
@@ -75,7 +75,7 @@
|
||||
"@babel/preset-typescript": "7.8.3",
|
||||
"@grafana/api-documenter": "7.11.2",
|
||||
"@grafana/api-extractor": "7.10.1",
|
||||
"@grafana/eslint-config": "2.2.1",
|
||||
"@grafana/eslint-config": "2.3.0",
|
||||
"@rtsao/plugin-proposal-class-properties": "7.0.1-patch.1",
|
||||
"@testing-library/jest-dom": "5.11.5",
|
||||
"@testing-library/react": "11.1.2",
|
||||
@@ -137,7 +137,7 @@
|
||||
"es-abstract": "1.18.0-next.1",
|
||||
"es6-promise": "4.2.8",
|
||||
"es6-shim": "0.35.5",
|
||||
"eslint": "7.19.0",
|
||||
"eslint": "7.21.0",
|
||||
"eslint-config-prettier": "7.2.0",
|
||||
"eslint-plugin-jsdoc": "31.6.1",
|
||||
"eslint-plugin-no-only-tests": "2.4.0",
|
||||
@@ -147,7 +147,7 @@
|
||||
"expect.js": "0.3.1",
|
||||
"expose-loader": "0.7.5",
|
||||
"file-loader": "5.0.2",
|
||||
"fork-ts-checker-webpack-plugin": "5.0.0",
|
||||
"fork-ts-checker-webpack-plugin": "6.1.1",
|
||||
"gaze": "1.1.3",
|
||||
"glob": "7.1.6",
|
||||
"html-loader": "0.5.5",
|
||||
@@ -200,6 +200,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/core": "10.0.27",
|
||||
"@grafana/aws-sdk": "0.0.24",
|
||||
"@grafana/slate-react": "0.22.9-grafana",
|
||||
"@popperjs/core": "2.5.4",
|
||||
"@reduxjs/toolkit": "1.5.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { TimeZone } from '../types/time';
|
||||
/* eslint-disable id-blacklist, no-restricted-imports, @typescript-eslint/ban-types */
|
||||
import moment, { Moment, MomentInput, DurationInputArg1 } from 'moment';
|
||||
import moment, { Moment, MomentInput, DurationInputArg1, DurationInputArg2 } from 'moment';
|
||||
export interface DateTimeBuiltinFormat {
|
||||
__momentBuiltinFormatBrand: any;
|
||||
}
|
||||
@@ -17,6 +17,7 @@ export type DurationUnit =
|
||||
| 'M'
|
||||
| 'week'
|
||||
| 'weeks'
|
||||
| 'isoWeek'
|
||||
| 'w'
|
||||
| 'day'
|
||||
| 'days'
|
||||
@@ -96,7 +97,8 @@ export const toUtc = (input?: DateTimeInput, formatInput?: FormatInput): DateTim
|
||||
};
|
||||
|
||||
export const toDuration = (input?: DurationInput, unit?: DurationUnit): DateTimeDuration => {
|
||||
return moment.duration(input as DurationInputArg1, unit) as DateTimeDuration;
|
||||
// moment built-in types are a bit flaky, for example `isoWeek` is not in the type definition but it's present in the js source.
|
||||
return moment.duration(input as DurationInputArg1, unit as DurationInputArg2) as DateTimeDuration;
|
||||
};
|
||||
|
||||
export const dateTime = (input?: DateTimeInput, formatInput?: FormatInput): DateTime => {
|
||||
|
||||
@@ -13,6 +13,9 @@ describe('getFieldDisplayValuesProxy', () => {
|
||||
{
|
||||
name: 'power',
|
||||
values: [100, 200, 300],
|
||||
labels: {
|
||||
name: 'POWAH!',
|
||||
},
|
||||
config: {
|
||||
displayName: 'The Power',
|
||||
},
|
||||
@@ -60,6 +63,7 @@ describe('getFieldDisplayValuesProxy', () => {
|
||||
});
|
||||
expect(p.power.numeric).toEqual(300);
|
||||
expect(p['power'].numeric).toEqual(300);
|
||||
expect(p['POWAH!'].numeric).toEqual(300);
|
||||
expect(p['The Power'].numeric).toEqual(300);
|
||||
expect(p[1].numeric).toEqual(300);
|
||||
});
|
||||
|
||||
@@ -28,9 +28,18 @@ export function getFieldDisplayValuesProxy(
|
||||
field = frame.fields[k];
|
||||
}
|
||||
if (!field) {
|
||||
// 3. Match the title
|
||||
// 3. Match the config displayName
|
||||
field = frame.fields.find((f) => key === f.config.displayName);
|
||||
}
|
||||
if (!field) {
|
||||
// 4. Match the name label
|
||||
field = frame.fields.find((f) => {
|
||||
if (f.labels) {
|
||||
return key === f.labels.name;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
if (!field) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -274,4 +274,96 @@ describe('Reducer Transformer', () => {
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('reduces multiple data frames with decimal display name (https://github.com/grafana/grafana/issues/31580)', async () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.reduce,
|
||||
options: {
|
||||
reducers: [ReducerID.max],
|
||||
},
|
||||
};
|
||||
|
||||
const seriesA = toDataFrame({
|
||||
name: 'a',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [3000, 4000, 5000, 6000] },
|
||||
{ name: 'value', type: FieldType.number, values: [3, 4, 5, 6], state: { displayName: 'a' } },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesB = toDataFrame({
|
||||
name: '2021',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [3000, 4000, 5000, 6000] },
|
||||
{ name: 'value', type: FieldType.number, values: [7, 8, 9, 10], state: { displayName: '2021' } },
|
||||
],
|
||||
});
|
||||
|
||||
await expect(transformDataFrame([cfg], [seriesA, seriesB])).toEmitValuesWith((received) => {
|
||||
const processed = received[0];
|
||||
const expected: Field[] = [
|
||||
{
|
||||
name: 'Field',
|
||||
type: FieldType.string,
|
||||
values: new ArrayVector(['a', '2021']),
|
||||
config: {},
|
||||
},
|
||||
{
|
||||
name: 'Max',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([6, 10]),
|
||||
config: {},
|
||||
},
|
||||
];
|
||||
|
||||
expect(processed.length).toEqual(1);
|
||||
expect(processed[0].length).toEqual(2);
|
||||
expect(processed[0].fields).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
it('reduces multiple data frames with decimal fields name (https://github.com/grafana/grafana/issues/31580)', async () => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.reduce,
|
||||
options: {
|
||||
reducers: [ReducerID.max],
|
||||
},
|
||||
};
|
||||
|
||||
const seriesA = toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [3000, 4000, 5000, 6000] },
|
||||
{ name: 'a', type: FieldType.number, values: [3, 4, 5, 6] },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesB = toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [3000, 4000, 5000, 6000] },
|
||||
{ name: '2021', type: FieldType.number, values: [7, 8, 9, 10] },
|
||||
],
|
||||
});
|
||||
|
||||
await expect(transformDataFrame([cfg], [seriesA, seriesB])).toEmitValuesWith((received) => {
|
||||
const processed = received[0];
|
||||
const expected: Field[] = [
|
||||
{
|
||||
name: 'Field',
|
||||
type: FieldType.string,
|
||||
values: new ArrayVector(['a', '2021']),
|
||||
config: {},
|
||||
},
|
||||
{
|
||||
name: 'Max',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([6, 10]),
|
||||
config: {},
|
||||
},
|
||||
];
|
||||
|
||||
expect(processed.length).toEqual(1);
|
||||
expect(processed[0].length).toEqual(2);
|
||||
expect(processed[0].fields).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import { DataTransformerID } from './ids';
|
||||
import { DataTransformerInfo, MatcherConfig, FieldMatcher } from '../../types/transformations';
|
||||
import { DataTransformerInfo, FieldMatcher, MatcherConfig } from '../../types/transformations';
|
||||
import { fieldReducers, reduceField, ReducerID } from '../fieldReducer';
|
||||
import { alwaysFieldMatcher, notTimeFieldMatcher } from '../matchers/predicates';
|
||||
import { DataFrame, Field, FieldType } from '../../types/dataFrame';
|
||||
@@ -149,15 +149,18 @@ export function mergeResults(data: DataFrame[]): DataFrame | undefined {
|
||||
for (let seriesIndex = 1; seriesIndex < data.length; seriesIndex++) {
|
||||
const series = data[seriesIndex];
|
||||
|
||||
for (const baseField of baseFrame.fields) {
|
||||
for (const field of series.fields) {
|
||||
if (baseField.type !== field.type || baseField.name !== field.name) {
|
||||
continue;
|
||||
}
|
||||
for (let baseIndex = 0; baseIndex < baseFrame.fields.length; baseIndex++) {
|
||||
const baseField = baseFrame.fields[baseIndex];
|
||||
for (let fieldIndex = 0; fieldIndex < series.fields.length; fieldIndex++) {
|
||||
const field = series.fields[fieldIndex];
|
||||
const isFirstField = baseIndex === 0 && fieldIndex === 0;
|
||||
const isSameField = baseField.type === field.type && baseField.name === field.name;
|
||||
|
||||
const baseValues: any[] = ((baseField.values as unknown) as ArrayVector).buffer;
|
||||
const values: any[] = ((field.values as unknown) as ArrayVector).buffer;
|
||||
((baseField.values as unknown) as ArrayVector).buffer = baseValues.concat(values);
|
||||
if (isFirstField || isSameField) {
|
||||
const baseValues: any[] = baseField.values.toArray();
|
||||
const values: any[] = field.values.toArray();
|
||||
((baseField.values as unknown) as ArrayVector).buffer = baseValues.concat(values);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,3 +5,10 @@ export interface CartesianCoords2D {
|
||||
x: number;
|
||||
y: number;
|
||||
}
|
||||
/**
|
||||
* 2d object dimensions.
|
||||
*/
|
||||
export interface Dimensions2D {
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
@@ -123,9 +123,12 @@ describe('isNumeric', () => {
|
||||
it.each`
|
||||
value | expected
|
||||
${123} | ${true}
|
||||
${0} | ${true}
|
||||
${'123'} | ${true}
|
||||
${'0'} | ${true}
|
||||
${' 123'} | ${true}
|
||||
${' 123 '} | ${true}
|
||||
${' 0 '} | ${true}
|
||||
${-123.4} | ${true}
|
||||
${'-123.4'} | ${true}
|
||||
${0.41} | ${true}
|
||||
|
||||
@@ -97,11 +97,8 @@ const isNullValueMap = (mapping: ValueMap): boolean => {
|
||||
return mapping.value.toLowerCase() === 'null';
|
||||
};
|
||||
|
||||
// Ref https://stackoverflow.com/a/42356340
|
||||
export function isNumeric(num: any) {
|
||||
if (num === true) {
|
||||
return false;
|
||||
}
|
||||
// Ref https://stackoverflow.com/a/58550111
|
||||
|
||||
return Boolean(Number(num));
|
||||
export function isNumeric(num: any) {
|
||||
return (typeof num === 'number' || (typeof num === 'string' && num.trim() !== '')) && !isNaN(num as number);
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e-selectors",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"description": "Grafana End-to-End Test Selectors Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
|
||||
@@ -178,4 +178,10 @@ export const Components = {
|
||||
dropDown: 'Dashboard link dropdown',
|
||||
link: 'Dashboard link',
|
||||
},
|
||||
CallToActionCard: {
|
||||
button: (name: string) => `Call to action button ${name}`,
|
||||
},
|
||||
DataLinksContextMenu: {
|
||||
singleLink: 'Data link',
|
||||
},
|
||||
};
|
||||
|
||||
@@ -54,12 +54,12 @@ export const Pages = {
|
||||
},
|
||||
Annotations: {
|
||||
List: {
|
||||
addAnnotationCTA: 'Call to action button Add Annotation Query',
|
||||
addAnnotationCTA: Components.CallToActionCard.button('Add Annotation Query'),
|
||||
},
|
||||
},
|
||||
Variables: {
|
||||
List: {
|
||||
addVariableCTA: 'Call to action button Add variable',
|
||||
addVariableCTA: Components.CallToActionCard.button('Add variable'),
|
||||
newButton: 'Variable editor New variable button',
|
||||
table: 'Variable editor Table',
|
||||
tableRowNameFields: (variableName: string) => `Variable editor Table Name field ${variableName}`,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"description": "Grafana End-to-End Test Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
@@ -44,7 +44,7 @@
|
||||
"types": "src/index.ts",
|
||||
"dependencies": {
|
||||
"@cypress/webpack-preprocessor": "4.1.3",
|
||||
"@grafana/e2e-selectors": "7.5.0-pre.0",
|
||||
"@grafana/e2e-selectors": "7.5.0",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@mochajs/json-file-reporter": "^1.2.0",
|
||||
"blink-diff": "1.0.13",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -22,8 +22,8 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "7.5.0-pre.0",
|
||||
"@grafana/ui": "7.5.0-pre.0",
|
||||
"@grafana/data": "7.5.0",
|
||||
"@grafana/ui": "7.5.0",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "0.1.37"
|
||||
},
|
||||
|
||||
@@ -18,8 +18,8 @@ apk add --no-cache curl 'nodejs-current=14.5.0-r0' npm yarn build-base openssh g
|
||||
# apk add --no-cache xvfb glib nss nspr gdk-pixbuf "gtk+3.0" pango atk cairo dbus-libs libxcomposite libxrender libxi libxtst libxrandr libxscrnsaver alsa-lib at-spi2-atk at-spi2-core cups-libs gcompat libc6-compat
|
||||
|
||||
# Install Go
|
||||
filename="go1.16.linux-amd64.tar.gz"
|
||||
get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "013a489ebb3e24ef3d915abe5b94c3286c070dfe0818d5bca8108f1d6e8440d2"
|
||||
filename="go1.16.1.linux-amd64.tar.gz"
|
||||
get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "3edc22f8332231c3ba8be246f184b736b8d28f06ce24f08168d8ecf052549769"
|
||||
untar_file "/tmp/$filename"
|
||||
|
||||
# Install golangci-lint
|
||||
|
||||
@@ -22,8 +22,8 @@ source "/etc/profile"
|
||||
npm i -g yarn
|
||||
|
||||
# Install Go
|
||||
filename="go1.16.linux-amd64.tar.gz"
|
||||
get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "013a489ebb3e24ef3d915abe5b94c3286c070dfe0818d5bca8108f1d6e8440d2"
|
||||
filename="go1.16.1.linux-amd64.tar.gz"
|
||||
get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "3edc22f8332231c3ba8be246f184b736b8d28f06ce24f08168d8ecf052549769"
|
||||
untar_file "/tmp/$filename"
|
||||
|
||||
# Install golangci-lint
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
source "./deploy-common.sh"
|
||||
|
||||
# Install Go
|
||||
filename="go1.16.linux-amd64.tar.gz"
|
||||
get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "013a489ebb3e24ef3d915abe5b94c3286c070dfe0818d5bca8108f1d6e8440d2"
|
||||
filename="go1.16.1.linux-amd64.tar.gz"
|
||||
get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "3edc22f8332231c3ba8be246f184b736b8d28f06ce24f08168d8ecf052549769"
|
||||
untar_file "/tmp/$filename"
|
||||
|
||||
# Install golangci-lint
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -28,10 +28,10 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "7.9.0",
|
||||
"@babel/preset-env": "7.9.0",
|
||||
"@grafana/data": "7.5.0-pre.0",
|
||||
"@grafana/eslint-config": "2.2.1",
|
||||
"@grafana/data": "7.5.0",
|
||||
"@grafana/eslint-config": "2.3.0",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@grafana/ui": "7.5.0-pre.0",
|
||||
"@grafana/ui": "7.5.0",
|
||||
"@types/command-exists": "^1.2.0",
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/expect-puppeteer": "3.3.1",
|
||||
@@ -58,7 +58,7 @@
|
||||
"concurrently": "4.1.0",
|
||||
"copy-webpack-plugin": "5.1.2",
|
||||
"css-loader": "3.4.2",
|
||||
"eslint": "7.19.0",
|
||||
"eslint": "7.21.0",
|
||||
"eslint-config-prettier": "7.2.0",
|
||||
"eslint-plugin-jsdoc": "31.6.1",
|
||||
"eslint-plugin-prettier": "3.3.1",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -27,8 +27,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/core": "10.0.27",
|
||||
"@grafana/data": "7.5.0-pre.0",
|
||||
"@grafana/e2e-selectors": "7.5.0-pre.0",
|
||||
"@grafana/data": "7.5.0",
|
||||
"@grafana/e2e-selectors": "7.5.0",
|
||||
"@grafana/slate-react": "0.22.9-grafana",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@iconscout/react-unicons": "1.1.4",
|
||||
@@ -77,7 +77,7 @@
|
||||
"react-transition-group": "4.4.1",
|
||||
"slate": "0.47.8",
|
||||
"tinycolor2": "1.4.1",
|
||||
"uplot": "1.6.4"
|
||||
"uplot": "1.6.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "16.0.0",
|
||||
|
||||
@@ -2,10 +2,36 @@ import { Story } from '@storybook/react';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import { NOOP_CONTROL } from '../../utils/storybook/noopControl';
|
||||
import { Cascader } from '@grafana/ui';
|
||||
import { CascaderProps } from './Cascader';
|
||||
import { CascaderOption, CascaderProps } from './Cascader';
|
||||
import mdx from './Cascader.mdx';
|
||||
import React from 'react';
|
||||
|
||||
const onSelect = (val: string) => console.log(val);
|
||||
const options = [
|
||||
{
|
||||
label: 'First',
|
||||
value: '1',
|
||||
items: [
|
||||
{
|
||||
label: 'Second',
|
||||
value: '2',
|
||||
},
|
||||
{
|
||||
label: 'Third',
|
||||
value: '3',
|
||||
},
|
||||
{
|
||||
label: 'Fourth',
|
||||
value: '4',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'FirstFirst',
|
||||
value: '5',
|
||||
},
|
||||
];
|
||||
|
||||
export default {
|
||||
title: 'Forms/Cascader',
|
||||
component: Cascader,
|
||||
@@ -19,31 +45,8 @@ export default {
|
||||
},
|
||||
},
|
||||
args: {
|
||||
onSelect: (val: string) => console.log(val),
|
||||
options: [
|
||||
{
|
||||
label: 'First',
|
||||
value: '1',
|
||||
items: [
|
||||
{
|
||||
label: 'Second',
|
||||
value: '2',
|
||||
},
|
||||
{
|
||||
label: 'Third',
|
||||
value: '3',
|
||||
},
|
||||
{
|
||||
label: 'Fourth',
|
||||
value: '4',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'FirstFirst',
|
||||
value: '5',
|
||||
},
|
||||
],
|
||||
onSelect,
|
||||
options,
|
||||
},
|
||||
argTypes: {
|
||||
width: { control: { type: 'range', min: 0, max: 70 } },
|
||||
@@ -59,6 +62,7 @@ export const Simple = Template.bind({});
|
||||
Simple.args = {
|
||||
separator: '',
|
||||
};
|
||||
|
||||
export const WithInitialValue = Template.bind({});
|
||||
WithInitialValue.args = {
|
||||
initialValue: '3',
|
||||
@@ -70,3 +74,22 @@ WithCustomValue.args = {
|
||||
allowCustomValue: true,
|
||||
formatCreateLabel: (val) => 'Custom Label' + val,
|
||||
};
|
||||
|
||||
export const WithDisplayAllSelectedLevels = Template.bind({});
|
||||
WithDisplayAllSelectedLevels.args = {
|
||||
displayAllSelectedLevels: true,
|
||||
separator: ',',
|
||||
};
|
||||
|
||||
export const WithOptionsStateUpdate = () => {
|
||||
const [updatedOptions, setOptions] = React.useState<CascaderOption[]>([
|
||||
{
|
||||
label: 'Initial state option',
|
||||
value: 'initial',
|
||||
},
|
||||
]);
|
||||
|
||||
setTimeout(() => setOptions(options), 2000);
|
||||
|
||||
return <Cascader options={updatedOptions} onSelect={onSelect} />;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React from 'react';
|
||||
import { Cascader } from './Cascader';
|
||||
import { shallow } from 'enzyme';
|
||||
import { Cascader, CascaderOption, CascaderProps } from './Cascader';
|
||||
import { render, screen, act } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
|
||||
const options = [
|
||||
{
|
||||
@@ -27,36 +28,132 @@ const options = [
|
||||
},
|
||||
];
|
||||
|
||||
const flatOptions = [
|
||||
{
|
||||
singleLabel: 'Second',
|
||||
label: 'First / Second',
|
||||
value: ['1', '2'],
|
||||
},
|
||||
{
|
||||
singleLabel: 'Third',
|
||||
label: 'First / Third',
|
||||
value: ['1', '3'],
|
||||
},
|
||||
{
|
||||
singleLabel: 'Fourth',
|
||||
label: 'First / Fourth',
|
||||
value: ['1', '4'],
|
||||
},
|
||||
{
|
||||
singleLabel: 'FirstFirst',
|
||||
label: 'FirstFirst',
|
||||
value: ['5'],
|
||||
},
|
||||
];
|
||||
const CascaderWithOptionsStateUpdate = (props: Omit<CascaderProps, 'options'>) => {
|
||||
const [updatedOptions, setOptions] = React.useState<CascaderOption[]>([
|
||||
{
|
||||
label: 'Initial state option',
|
||||
value: 'initial',
|
||||
},
|
||||
]);
|
||||
|
||||
setTimeout(() => setOptions(options), 1000);
|
||||
|
||||
return <Cascader options={updatedOptions} {...props} />;
|
||||
};
|
||||
|
||||
describe('Cascader', () => {
|
||||
let cascader: any;
|
||||
beforeEach(() => {
|
||||
cascader = shallow(<Cascader options={options} onSelect={() => {}} />);
|
||||
const placeholder = 'cascader-placeholder';
|
||||
|
||||
describe('options from state change', () => {
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
|
||||
it('displays updated options', () => {
|
||||
render(<CascaderWithOptionsStateUpdate placeholder={placeholder} onSelect={jest.fn()} />);
|
||||
|
||||
userEvent.click(screen.getByPlaceholderText(placeholder));
|
||||
|
||||
expect(screen.getByText('Initial state option')).toBeInTheDocument();
|
||||
expect(screen.queryByText('First')).not.toBeInTheDocument();
|
||||
|
||||
act(() => {
|
||||
jest.runAllTimers();
|
||||
});
|
||||
|
||||
userEvent.click(screen.getByPlaceholderText(placeholder));
|
||||
expect(screen.queryByText('Initial state option')).not.toBeInTheDocument();
|
||||
expect(screen.getByText('First')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('filters updated results when searching', () => {
|
||||
render(<CascaderWithOptionsStateUpdate placeholder={placeholder} onSelect={jest.fn()} />);
|
||||
|
||||
act(() => {
|
||||
jest.runAllTimers();
|
||||
});
|
||||
|
||||
userEvent.type(screen.getByPlaceholderText(placeholder), 'Third');
|
||||
expect(screen.queryByText('Second')).not.toBeInTheDocument();
|
||||
expect(screen.getByText('First / Third')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should convert options to searchable strings', () => {
|
||||
expect(cascader.state('searchableOptions')).toEqual(flatOptions);
|
||||
it('filters results when searching', () => {
|
||||
render(<Cascader placeholder={placeholder} options={options} onSelect={jest.fn()} />);
|
||||
|
||||
userEvent.type(screen.getByPlaceholderText(placeholder), 'Third');
|
||||
|
||||
expect(screen.queryByText('Second')).not.toBeInTheDocument();
|
||||
expect(screen.getByText('First / Third')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays selected value with all levels when displayAllSelectedLevels is true and selecting a value from the search', () => {
|
||||
render(
|
||||
<Cascader displayAllSelectedLevels={true} placeholder={placeholder} options={options} onSelect={jest.fn()} />
|
||||
);
|
||||
|
||||
userEvent.type(screen.getByPlaceholderText(placeholder), 'Third');
|
||||
userEvent.click(screen.getByText('First / Third'));
|
||||
|
||||
expect(screen.getByDisplayValue('First / Third')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays all levels selected with default separator when displayAllSelectedLevels is true', () => {
|
||||
render(
|
||||
<Cascader displayAllSelectedLevels={true} placeholder={placeholder} options={options} onSelect={() => {}} />
|
||||
);
|
||||
|
||||
expect(screen.queryByDisplayValue('First/Second')).not.toBeInTheDocument();
|
||||
|
||||
userEvent.click(screen.getByPlaceholderText(placeholder));
|
||||
userEvent.click(screen.getByText('First'));
|
||||
userEvent.click(screen.getByText('Second'));
|
||||
|
||||
expect(screen.getByDisplayValue('First/Second')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays all levels selected with separator passed in when displayAllSelectedLevels is true', () => {
|
||||
const separator = ',';
|
||||
|
||||
render(
|
||||
<Cascader
|
||||
displayAllSelectedLevels={true}
|
||||
separator={separator}
|
||||
placeholder={placeholder}
|
||||
options={options}
|
||||
onSelect={() => {}}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.queryByDisplayValue('First/Second')).not.toBeInTheDocument();
|
||||
|
||||
userEvent.click(screen.getByPlaceholderText(placeholder));
|
||||
userEvent.click(screen.getByText('First'));
|
||||
userEvent.click(screen.getByText('Second'));
|
||||
|
||||
expect(screen.getByDisplayValue(`First${separator}Second`)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays last level selected when displayAllSelectedLevels is false', () => {
|
||||
render(
|
||||
<Cascader displayAllSelectedLevels={false} placeholder={placeholder} options={options} onSelect={jest.fn()} />
|
||||
);
|
||||
|
||||
userEvent.click(screen.getByPlaceholderText(placeholder));
|
||||
userEvent.click(screen.getByText('First'));
|
||||
userEvent.click(screen.getByText('Second'));
|
||||
|
||||
expect(screen.getByDisplayValue('Second')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays last level selected when displayAllSelectedLevels is not passed in', () => {
|
||||
render(<Cascader placeholder={placeholder} options={options} onSelect={jest.fn()} />);
|
||||
|
||||
userEvent.click(screen.getByPlaceholderText(placeholder));
|
||||
userEvent.click(screen.getByText('First'));
|
||||
userEvent.click(screen.getByText('Second'));
|
||||
|
||||
expect(screen.getByDisplayValue('Second')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,6 +7,7 @@ import { Input } from '../Input/Input';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { css } from 'emotion';
|
||||
import { onChangeCascader } from './optionMappings';
|
||||
import memoizeOne from 'memoize-one';
|
||||
|
||||
export interface CascaderProps {
|
||||
/** The separator between levels in the search */
|
||||
@@ -22,11 +23,11 @@ export interface CascaderProps {
|
||||
allowCustomValue?: boolean;
|
||||
/** A function for formatting the message for custom value creation. Only applies when allowCustomValue is set to true*/
|
||||
formatCreateLabel?: (val: string) => string;
|
||||
displayAllSelectedLevels?: boolean;
|
||||
}
|
||||
|
||||
interface CascaderState {
|
||||
isSearching: boolean;
|
||||
searchableOptions: Array<SelectableValue<string[]>>;
|
||||
focusCascade: boolean;
|
||||
//Array for cascade navigation
|
||||
rcValue: SelectableValue<string[]>;
|
||||
@@ -57,15 +58,16 @@ const disableDivFocus = css(`
|
||||
}
|
||||
`);
|
||||
|
||||
const DEFAULT_SEPARATOR = '/';
|
||||
|
||||
export class Cascader extends React.PureComponent<CascaderProps, CascaderState> {
|
||||
constructor(props: CascaderProps) {
|
||||
super(props);
|
||||
const searchableOptions = this.flattenOptions(props.options);
|
||||
const searchableOptions = this.getSearchableOptions(props.options);
|
||||
const { rcValue, activeLabel } = this.setInitialValue(searchableOptions, props.initialValue);
|
||||
this.state = {
|
||||
isSearching: false,
|
||||
focusCascade: false,
|
||||
searchableOptions,
|
||||
rcValue,
|
||||
activeLabel,
|
||||
};
|
||||
@@ -81,7 +83,7 @@ export class Cascader extends React.PureComponent<CascaderProps, CascaderState>
|
||||
if (!option.items) {
|
||||
selectOptions.push({
|
||||
singleLabel: cpy[cpy.length - 1].label,
|
||||
label: cpy.map((o) => o.label).join(this.props.separator || ' / '),
|
||||
label: cpy.map((o) => o.label).join(this.props.separator || ` ${DEFAULT_SEPARATOR} `),
|
||||
value: cpy.map((o) => o.value),
|
||||
});
|
||||
} else {
|
||||
@@ -91,6 +93,8 @@ export class Cascader extends React.PureComponent<CascaderProps, CascaderState>
|
||||
return selectOptions;
|
||||
};
|
||||
|
||||
getSearchableOptions = memoizeOne((options: CascaderOption[]) => this.flattenOptions(options));
|
||||
|
||||
setInitialValue(searchableOptions: Array<SelectableValue<string[]>>, initValue?: string) {
|
||||
if (!initValue) {
|
||||
return { rcValue: [], activeLabel: '' };
|
||||
@@ -101,7 +105,7 @@ export class Cascader extends React.PureComponent<CascaderProps, CascaderState>
|
||||
if (optionPath.indexOf(initValue) === optionPath.length - 1) {
|
||||
return {
|
||||
rcValue: optionPath,
|
||||
activeLabel: option.singleLabel || '',
|
||||
activeLabel: this.props.displayAllSelectedLevels ? option.label : option.singleLabel || '',
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -116,7 +120,9 @@ export class Cascader extends React.PureComponent<CascaderProps, CascaderState>
|
||||
this.setState({
|
||||
rcValue: value,
|
||||
focusCascade: true,
|
||||
activeLabel: selectedOptions[selectedOptions.length - 1].label,
|
||||
activeLabel: this.props.displayAllSelectedLevels
|
||||
? selectedOptions.map((option) => option.label).join(this.props.separator || DEFAULT_SEPARATOR)
|
||||
: selectedOptions[selectedOptions.length - 1].label,
|
||||
});
|
||||
|
||||
this.props.onSelect(selectedOptions[selectedOptions.length - 1].value);
|
||||
@@ -126,7 +132,7 @@ export class Cascader extends React.PureComponent<CascaderProps, CascaderState>
|
||||
onSelect = (obj: SelectableValue<string[]>) => {
|
||||
const valueArray = obj.value || [];
|
||||
this.setState({
|
||||
activeLabel: obj.singleLabel || '',
|
||||
activeLabel: this.props.displayAllSelectedLevels ? obj.label : obj.singleLabel || '',
|
||||
rcValue: valueArray,
|
||||
isSearching: false,
|
||||
});
|
||||
@@ -178,8 +184,10 @@ export class Cascader extends React.PureComponent<CascaderProps, CascaderState>
|
||||
};
|
||||
|
||||
render() {
|
||||
const { allowCustomValue, placeholder, width, changeOnSelect } = this.props;
|
||||
const { focusCascade, isSearching, searchableOptions, rcValue, activeLabel } = this.state;
|
||||
const { allowCustomValue, placeholder, width, changeOnSelect, options } = this.props;
|
||||
const { focusCascade, isSearching, rcValue, activeLabel } = this.state;
|
||||
|
||||
const searchableOptions = this.getSearchableOptions(options);
|
||||
|
||||
return (
|
||||
<div>
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { Tooltip } from './Tooltip';
|
||||
|
||||
// Tooltip container has padding of 8px, let's assume target tooltip has measured width & height of 100px
|
||||
const content = <div style={{ width: '84px', height: '84' }} />;
|
||||
|
||||
describe('Chart Tooltip', () => {
|
||||
describe('is positioned correctly', () => {
|
||||
beforeEach(() => {
|
||||
// jsdom does not perform actual DOM rendering
|
||||
// We need to mock getBoundingClientRect to return what DOM would actually return
|
||||
// when measuring tooltip container (wrapper with padding and content inside)
|
||||
Element.prototype.getBoundingClientRect = jest.fn(() => {
|
||||
return { width: 100, height: 100, top: 0, left: 0, bottom: 0, right: 0 } as DOMRect;
|
||||
});
|
||||
});
|
||||
|
||||
// Jest's default viewport size is 1024x768px
|
||||
test('when fits into the viewport', () => {
|
||||
const tooltip = mount(<Tooltip content={content} position={{ x: 0, y: 0 }} />);
|
||||
const container = tooltip.find('TooltipContainer > div');
|
||||
const styleAttribute = container.getDOMNode().getAttribute('style');
|
||||
|
||||
// +------+
|
||||
// |origin|
|
||||
// +------+--------------+
|
||||
// | Tooltip |
|
||||
// | |
|
||||
// +--------------+
|
||||
expect(styleAttribute).toContain('translate3d(0px, 0px, 0)');
|
||||
});
|
||||
|
||||
test("when overflows viewport's x axis", () => {
|
||||
const tooltip = mount(<Tooltip content={content} position={{ x: 1000, y: 0 }} />);
|
||||
const container = tooltip.find('TooltipContainer > div');
|
||||
const styleAttribute = container.getDOMNode().getAttribute('style');
|
||||
|
||||
// We expect tooltip to flip over left side of the origin position
|
||||
// +------+
|
||||
// |origin|
|
||||
// +--------------+------+
|
||||
// | Tooltip |
|
||||
// | |
|
||||
// +--------------+
|
||||
expect(styleAttribute).toContain('translate3d(900px, 0px, 0)');
|
||||
});
|
||||
|
||||
test("when overflows viewport's y axis", () => {
|
||||
const tooltip = mount(<Tooltip content={content} position={{ x: 0, y: 700 }} />);
|
||||
const container = tooltip.find('TooltipContainer > div');
|
||||
const styleAttribute = container.getDOMNode().getAttribute('style');
|
||||
|
||||
// We expect tooltip to flip over top side of the origin position
|
||||
// +--------------+
|
||||
// | Tooltip |
|
||||
// | |
|
||||
// +------+--------------+
|
||||
// |origin|
|
||||
// +------+
|
||||
expect(styleAttribute).toContain('translate3d(0px, 600px, 0)');
|
||||
});
|
||||
|
||||
test("when overflows viewport's x and y axes", () => {
|
||||
const tooltip = mount(<Tooltip content={content} position={{ x: 1000, y: 700 }} />);
|
||||
const container = tooltip.find('TooltipContainer > div');
|
||||
const styleAttribute = container.getDOMNode().getAttribute('style');
|
||||
|
||||
// We expect tooltip to flip over the left top corner of the origin position
|
||||
// +--------------+
|
||||
// | Tooltip |
|
||||
// | |
|
||||
// +--------------+------+
|
||||
// |origin|
|
||||
// +------+
|
||||
expect(styleAttribute).toContain('translate3d(900px, 600px, 0)');
|
||||
});
|
||||
|
||||
describe('when offset provided', () => {
|
||||
test("when overflows viewport's x and y axes", () => {
|
||||
const tooltip = mount(<Tooltip content={content} position={{ x: 1000, y: 700 }} offset={{ x: 10, y: 10 }} />);
|
||||
const container = tooltip.find('TooltipContainer > div');
|
||||
const styleAttribute = container.getDOMNode().getAttribute('style');
|
||||
|
||||
// We expect tooltip to flip over the left top corner of the origin position with offset applied
|
||||
// +--------------------+
|
||||
// | |
|
||||
// | +--------------+ |
|
||||
// | | Tooltip | |
|
||||
// | | | |
|
||||
// | +--------------+ |
|
||||
// | offset|
|
||||
// +--------------------++------+
|
||||
// |origin|
|
||||
// +------+
|
||||
expect(styleAttribute).toContain('translate3d(910px, 610px, 0)');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,9 @@
|
||||
import React, { useState, useLayoutEffect, useRef, HTMLAttributes } from 'react';
|
||||
import React, { useState, useLayoutEffect, useRef, HTMLAttributes, useMemo } from 'react';
|
||||
import { stylesFactory } from '../../themes/stylesFactory';
|
||||
import { css, cx } from 'emotion';
|
||||
import { useTheme } from '../../themes/ThemeContext';
|
||||
import useWindowSize from 'react-use/lib/useWindowSize';
|
||||
import { GrafanaTheme } from '@grafana/data';
|
||||
import { Dimensions2D, GrafanaTheme } from '@grafana/data';
|
||||
|
||||
interface TooltipContainerProps extends HTMLAttributes<HTMLDivElement> {
|
||||
position: { x: number; y: number };
|
||||
@@ -20,18 +20,49 @@ export const TooltipContainer: React.FC<TooltipContainerProps> = ({
|
||||
}) => {
|
||||
const theme = useTheme();
|
||||
const tooltipRef = useRef<HTMLDivElement>(null);
|
||||
const tooltipMeasurementRef = useRef<Dimensions2D>({ width: 0, height: 0 });
|
||||
const { width, height } = useWindowSize();
|
||||
const [placement, setPlacement] = useState({
|
||||
x: positionX + offsetX,
|
||||
y: positionY + offsetY,
|
||||
});
|
||||
|
||||
const resizeObserver = useMemo(
|
||||
() =>
|
||||
// TS has hard time playing games with @types/resize-observer-browser, hence the ignore
|
||||
// @ts-ignore
|
||||
new ResizeObserver((entries) => {
|
||||
for (let entry of entries) {
|
||||
const tW = Math.floor(entry.contentRect.width + 2 * 8); // adding padding until Safari supports borderBoxSize
|
||||
const tH = Math.floor(entry.contentRect.height + 2 * 8);
|
||||
|
||||
if (tooltipMeasurementRef.current.width !== tW || tooltipMeasurementRef.current.height !== tH) {
|
||||
tooltipMeasurementRef.current = {
|
||||
width: tW,
|
||||
height: tH,
|
||||
};
|
||||
}
|
||||
}
|
||||
}),
|
||||
[]
|
||||
);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
if (tooltipRef.current) {
|
||||
resizeObserver.observe(tooltipRef.current);
|
||||
}
|
||||
|
||||
return () => {
|
||||
resizeObserver.disconnect();
|
||||
};
|
||||
}, [resizeObserver]);
|
||||
|
||||
// Make sure tooltip does not overflow window
|
||||
useLayoutEffect(() => {
|
||||
let xO = 0,
|
||||
yO = 0;
|
||||
if (tooltipRef && tooltipRef.current) {
|
||||
const measurement = tooltipRef.current.getBoundingClientRect();
|
||||
const measurement = tooltipMeasurementRef.current;
|
||||
const xOverflow = width - (positionX + measurement.width);
|
||||
const yOverflow = height - (positionY + measurement.height);
|
||||
if (xOverflow < 0) {
|
||||
@@ -47,7 +78,7 @@ export const TooltipContainer: React.FC<TooltipContainerProps> = ({
|
||||
x: positionX + offsetX - xO,
|
||||
y: positionY + offsetY - yO,
|
||||
});
|
||||
}, [tooltipRef, width, height, positionX, offsetX, positionY, offsetY]);
|
||||
}, [width, height, positionX, offsetX, positionY, offsetY]);
|
||||
|
||||
const styles = getTooltipContainerStyles(theme);
|
||||
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
import React from 'react';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { DataLinksContextMenu } from './DataLinksContextMenu';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
|
||||
const fakeAriaLabel = 'fake aria label';
|
||||
describe('DataLinksContextMenu', () => {
|
||||
it('renders context menu when there are more than one data links', () => {
|
||||
render(
|
||||
<DataLinksContextMenu
|
||||
links={() => [
|
||||
{
|
||||
href: '/link1',
|
||||
title: 'Link1',
|
||||
target: '_blank',
|
||||
origin: {},
|
||||
},
|
||||
{
|
||||
href: '/link2',
|
||||
title: 'Link2',
|
||||
target: '_blank',
|
||||
origin: {},
|
||||
},
|
||||
]}
|
||||
config={{
|
||||
links: [
|
||||
{
|
||||
title: 'Link1',
|
||||
url: '/link1',
|
||||
},
|
||||
{
|
||||
title: 'Link2',
|
||||
url: '/link2',
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
{() => {
|
||||
return <div aria-label="fake aria label" />;
|
||||
}}
|
||||
</DataLinksContextMenu>
|
||||
);
|
||||
|
||||
expect(screen.getByLabelText(fakeAriaLabel)).toBeInTheDocument();
|
||||
expect(screen.queryAllByLabelText(selectors.components.DataLinksContextMenu.singleLink)).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('renders link when there is a single data link', () => {
|
||||
render(
|
||||
<DataLinksContextMenu
|
||||
links={() => [
|
||||
{
|
||||
href: '/link1',
|
||||
title: 'Link1',
|
||||
target: '_blank',
|
||||
origin: {},
|
||||
},
|
||||
]}
|
||||
config={{
|
||||
links: [
|
||||
{
|
||||
title: 'Link1',
|
||||
url: '/link1',
|
||||
},
|
||||
],
|
||||
}}
|
||||
>
|
||||
{() => {
|
||||
return <div aria-label="fake aria label" />;
|
||||
}}
|
||||
</DataLinksContextMenu>
|
||||
);
|
||||
|
||||
expect(screen.getByLabelText(fakeAriaLabel)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(selectors.components.DataLinksContextMenu.singleLink)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,14 @@
|
||||
import React from 'react';
|
||||
import { WithContextMenu } from '../ContextMenu/WithContextMenu';
|
||||
import { LinkModel } from '@grafana/data';
|
||||
import { linkModelToContextMenuItems } from '../../utils/dataLinks';
|
||||
import { FieldConfig, LinkModel } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { css } from 'emotion';
|
||||
import { WithContextMenu } from '../ContextMenu/WithContextMenu';
|
||||
import { linkModelToContextMenuItems } from '../../utils/dataLinks';
|
||||
|
||||
interface DataLinksContextMenuProps {
|
||||
children: (props: DataLinksContextMenuApi) => JSX.Element;
|
||||
links: () => LinkModel[];
|
||||
config: FieldConfig;
|
||||
}
|
||||
|
||||
export interface DataLinksContextMenuApi {
|
||||
@@ -14,7 +16,8 @@ export interface DataLinksContextMenuApi {
|
||||
targetClassName?: string;
|
||||
}
|
||||
|
||||
export const DataLinksContextMenu: React.FC<DataLinksContextMenuProps> = ({ children, links }) => {
|
||||
export const DataLinksContextMenu: React.FC<DataLinksContextMenuProps> = ({ children, links, config }) => {
|
||||
const linksCounter = config.links!.length;
|
||||
const getDataLinksContextMenuItems = () => {
|
||||
return [{ items: linkModelToContextMenuItems(links), label: 'Data links' }];
|
||||
};
|
||||
@@ -24,11 +27,27 @@ export const DataLinksContextMenu: React.FC<DataLinksContextMenuProps> = ({ chil
|
||||
cursor: context-menu;
|
||||
`;
|
||||
|
||||
return (
|
||||
<WithContextMenu getContextMenuItems={getDataLinksContextMenuItems}>
|
||||
{({ openMenu }) => {
|
||||
return children({ openMenu, targetClassName });
|
||||
}}
|
||||
</WithContextMenu>
|
||||
);
|
||||
if (linksCounter > 1) {
|
||||
return (
|
||||
<WithContextMenu getContextMenuItems={getDataLinksContextMenuItems}>
|
||||
{({ openMenu }) => {
|
||||
return children({ openMenu, targetClassName });
|
||||
}}
|
||||
</WithContextMenu>
|
||||
);
|
||||
} else {
|
||||
const linkModel = links()[0];
|
||||
return (
|
||||
<a
|
||||
href={linkModel.href}
|
||||
onClick={linkModel.onClick}
|
||||
target={linkModel.target}
|
||||
title={linkModel.title}
|
||||
style={{ display: 'flex' }}
|
||||
aria-label={selectors.components.DataLinksContextMenu.singleLink}
|
||||
>
|
||||
{children({})}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -79,6 +79,13 @@ const mockGraphProps = (multiSeries = false) => {
|
||||
timeZone: 'browser',
|
||||
};
|
||||
};
|
||||
|
||||
(window as any).ResizeObserver = class ResizeObserver {
|
||||
constructor() {}
|
||||
observe() {}
|
||||
disconnect() {}
|
||||
};
|
||||
|
||||
describe('Graph', () => {
|
||||
describe('with tooltip', () => {
|
||||
describe('in single mode', () => {
|
||||
|
||||
@@ -75,8 +75,16 @@ export const SeriesTable: React.FC<SeriesTableProps> = ({ timestamp, series }) =
|
||||
{timestamp}
|
||||
</div>
|
||||
)}
|
||||
{series.map((s) => {
|
||||
return <SeriesTableRow isActive={s.isActive} label={s.label} color={s.color} value={s.value} key={s.label} />;
|
||||
{series.map((s, i) => {
|
||||
return (
|
||||
<SeriesTableRow
|
||||
isActive={s.isActive}
|
||||
label={s.label}
|
||||
color={s.color}
|
||||
value={s.value}
|
||||
key={`${s.label}-${i}`}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
DataFrameFieldIndex,
|
||||
FieldMatcherID,
|
||||
fieldMatchers,
|
||||
FieldType,
|
||||
TimeRange,
|
||||
TimeZone,
|
||||
} from '@grafana/data';
|
||||
@@ -89,7 +90,7 @@ class UnthemedGraphNG extends React.Component<GraphNGProps, GraphNGState> {
|
||||
|
||||
return {
|
||||
...state,
|
||||
data: preparePlotData(frame),
|
||||
data: preparePlotData(frame, [FieldType.string]),
|
||||
alignedDataFrame: frame,
|
||||
seriesToDataFrameFieldIndexMap: frame.fields.map((f) => f.state!.origin!),
|
||||
dimFields,
|
||||
|
||||
@@ -152,7 +152,9 @@ class UnThemedLogRow extends PureComponent<Props, State> {
|
||||
});
|
||||
|
||||
const processedRow =
|
||||
row.hasUnescapedContent && forceEscape ? { ...row, entry: escapeUnescapedString(row.entry) } : row;
|
||||
row.hasUnescapedContent && forceEscape
|
||||
? { ...row, entry: escapeUnescapedString(row.entry), raw: escapeUnescapedString(row.raw) }
|
||||
: row;
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -13,9 +13,11 @@ interface Props {
|
||||
numberOfPages: number;
|
||||
/** Callback function for fetching the selected page */
|
||||
onNavigate: (toPage: number) => void;
|
||||
/** When set to true and the pagination result is only one page it will not render the pagination at all */
|
||||
hideWhenSinglePage?: boolean;
|
||||
}
|
||||
|
||||
export const Pagination: React.FC<Props> = ({ currentPage, numberOfPages, onNavigate }) => {
|
||||
export const Pagination: React.FC<Props> = ({ currentPage, numberOfPages, onNavigate, hideWhenSinglePage }) => {
|
||||
const styles = getStyles();
|
||||
const pages = [...new Array(numberOfPages).keys()];
|
||||
|
||||
@@ -71,6 +73,10 @@ export const Pagination: React.FC<Props> = ({ currentPage, numberOfPages, onNavi
|
||||
return pagesToRender;
|
||||
}, []);
|
||||
|
||||
if (hideWhenSinglePage && numberOfPages <= 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<ol>
|
||||
|
||||
@@ -27,6 +27,7 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
tabContent: css`
|
||||
padding: ${theme.spacing.md};
|
||||
background-color: ${theme.colors.bodyBg};
|
||||
height: 100%;
|
||||
`,
|
||||
close: css`
|
||||
position: absolute;
|
||||
|
||||
@@ -7,6 +7,7 @@ import uPlot from 'uplot';
|
||||
import createMockRaf from 'mock-raf';
|
||||
import { UPlotConfigBuilder } from './config/UPlotConfigBuilder';
|
||||
import { preparePlotData } from './utils';
|
||||
import { SeriesProps } from './config/UPlotSeriesBuilder';
|
||||
|
||||
const mockRaf = createMockRaf();
|
||||
const setDataMock = jest.fn();
|
||||
@@ -52,7 +53,9 @@ const mockData = () => {
|
||||
raw: { from: '1602673200000', to: '1602680400000' },
|
||||
};
|
||||
|
||||
return { data, timeRange, config: new UPlotConfigBuilder() };
|
||||
const config = new UPlotConfigBuilder();
|
||||
config.addSeries({} as SeriesProps);
|
||||
return { data, timeRange, config };
|
||||
};
|
||||
|
||||
describe('UPlotChart', () => {
|
||||
@@ -159,14 +162,11 @@ describe('UPlotChart', () => {
|
||||
|
||||
expect(uPlot).toBeCalledTimes(1);
|
||||
|
||||
const nextConfig = new UPlotConfigBuilder();
|
||||
nextConfig.addSeries({} as SeriesProps);
|
||||
|
||||
rerender(
|
||||
<UPlotChart
|
||||
data={preparePlotData(data)}
|
||||
config={new UPlotConfigBuilder()}
|
||||
timeRange={timeRange}
|
||||
width={100}
|
||||
height={100}
|
||||
/>
|
||||
<UPlotChart data={preparePlotData(data)} config={nextConfig} timeRange={timeRange} width={100} height={100} />
|
||||
);
|
||||
|
||||
expect(destroyMock).toBeCalledTimes(1);
|
||||
@@ -190,11 +190,13 @@ describe('UPlotChart', () => {
|
||||
act(() => {
|
||||
mockRaf.step({ count: 1 });
|
||||
});
|
||||
const nextConfig = new UPlotConfigBuilder();
|
||||
nextConfig.addSeries({} as SeriesProps);
|
||||
|
||||
rerender(
|
||||
<UPlotChart
|
||||
data={preparePlotData(data)} // frame
|
||||
config={new UPlotConfigBuilder()}
|
||||
config={nextConfig}
|
||||
timeRange={timeRange}
|
||||
width={200}
|
||||
height={200}
|
||||
@@ -205,5 +207,68 @@ describe('UPlotChart', () => {
|
||||
expect(uPlot).toBeCalledTimes(1);
|
||||
expect(setSizeMock).toBeCalledTimes(1);
|
||||
});
|
||||
|
||||
it('does not initialize plot when config and data are not in sync', () => {
|
||||
const { data, timeRange, config } = mockData();
|
||||
|
||||
// 1 series in data, 2 series in config
|
||||
config.addSeries({} as SeriesProps);
|
||||
|
||||
render(
|
||||
<UPlotChart
|
||||
data={preparePlotData(data)} // frame
|
||||
config={config}
|
||||
timeRange={timeRange}
|
||||
width={100}
|
||||
height={100}
|
||||
/>
|
||||
);
|
||||
|
||||
// we wait 1 frame for plugins initialisation logic to finish
|
||||
act(() => {
|
||||
mockRaf.step({ count: 1 });
|
||||
});
|
||||
|
||||
expect(destroyMock).toBeCalledTimes(0);
|
||||
expect(uPlot).toBeCalledTimes(0);
|
||||
});
|
||||
|
||||
it('does not reinitialize plot when config and data are not in sync', () => {
|
||||
const { data, timeRange, config } = mockData();
|
||||
|
||||
// 1 series in data, 1 series in config
|
||||
const { rerender } = render(
|
||||
<UPlotChart
|
||||
data={preparePlotData(data)} // frame
|
||||
config={config}
|
||||
timeRange={timeRange}
|
||||
width={100}
|
||||
height={100}
|
||||
/>
|
||||
);
|
||||
|
||||
// we wait 1 frame for plugins initialisation logic to finish
|
||||
act(() => {
|
||||
mockRaf.step({ count: 1 });
|
||||
});
|
||||
|
||||
const nextConfig = new UPlotConfigBuilder();
|
||||
nextConfig.addSeries({} as SeriesProps);
|
||||
nextConfig.addSeries({} as SeriesProps);
|
||||
|
||||
// 1 series in data, 2 series in config
|
||||
rerender(
|
||||
<UPlotChart
|
||||
data={preparePlotData(data)} // frame
|
||||
config={nextConfig}
|
||||
timeRange={timeRange}
|
||||
width={200}
|
||||
height={200}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(destroyMock).toBeCalledTimes(0);
|
||||
expect(uPlot).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -31,6 +31,14 @@ export const UPlotChart: React.FC<PlotProps> = (props) => {
|
||||
return;
|
||||
}
|
||||
|
||||
// 0. Exit if the data set length is different than number of series expected to render
|
||||
// This may happen when GraphNG has not synced config yet with the aligned frame. Alignment happens before the render
|
||||
// in the getDerivedStateFromProps, while the config creation happens in componentDidUpdate, causing one more render
|
||||
// of the UPlotChart if the config needs to be updated.
|
||||
if (currentConfig.current.series.length !== props.data.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 1. When config is ready and there is no uPlot instance, create new uPlot and return
|
||||
if (isConfigReady && !plotInstance.current) {
|
||||
plotInstance.current = initializePlot(props.data, currentConfig.current, canvasRef.current);
|
||||
@@ -80,7 +88,7 @@ export const UPlotChart: React.FC<PlotProps> = (props) => {
|
||||
);
|
||||
};
|
||||
|
||||
function initializePlot(data: AlignedData | null, config: Options, el: HTMLDivElement) {
|
||||
function initializePlot(data: AlignedData | undefined, config: Options, el: HTMLDivElement) {
|
||||
pluginLog('UPlotChart: init uPlot', false, 'initialized with', data, config);
|
||||
return new uPlot(config, data, el);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { DataFrame, DisplayValue, fieldReducers, reduceField } from '@grafana/data';
|
||||
import { DataFrame, DisplayValue, fieldReducers, getFieldDisplayName, reduceField } from '@grafana/data';
|
||||
import { UPlotConfigBuilder } from './config/UPlotConfigBuilder';
|
||||
import { VizLegendItem, VizLegendOptions } from '../VizLegend/types';
|
||||
import { AxisPlacement } from './config';
|
||||
@@ -56,11 +56,17 @@ export const PlotLegend: React.FC<PlotLegendProps> = ({
|
||||
|
||||
const field = data[fieldIndex.frameIndex]?.fields[fieldIndex.fieldIndex];
|
||||
|
||||
if (!field) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const label = getFieldDisplayName(field, data[fieldIndex.frameIndex]!);
|
||||
|
||||
return {
|
||||
disabled: !seriesConfig.show ?? false,
|
||||
fieldIndex,
|
||||
color: seriesConfig.lineColor!,
|
||||
label: seriesConfig.fieldName,
|
||||
label,
|
||||
yAxis: axisPlacement === AxisPlacement.Left ? 1 : 2,
|
||||
getDisplayValues: () => {
|
||||
if (!calcs?.length) {
|
||||
@@ -80,6 +86,7 @@ export const PlotLegend: React.FC<PlotLegendProps> = ({
|
||||
};
|
||||
});
|
||||
},
|
||||
getItemKey: () => `${label}-${fieldIndex.frameIndex}-${fieldIndex.fieldIndex}`,
|
||||
};
|
||||
})
|
||||
.filter((i) => i !== undefined) as VizLegendItem[];
|
||||
|
||||
@@ -59,6 +59,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({ mode = 'single', t
|
||||
// when interacting with a point in single mode
|
||||
if (mode === 'single' && originFieldIndex !== null) {
|
||||
const field = otherProps.data[originFieldIndex.frameIndex].fields[originFieldIndex.fieldIndex];
|
||||
const plotSeries = plotContext.getSeries();
|
||||
|
||||
const fieldFmt = field.display || getDisplayProcessor({ field, timeZone });
|
||||
tooltip = (
|
||||
@@ -66,7 +67,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({ mode = 'single', t
|
||||
series={[
|
||||
{
|
||||
// TODO: align with uPlot typings
|
||||
color: (plotContext.getSeries()[focusedSeriesIdx!].stroke as any)(),
|
||||
color: (plotSeries[focusedSeriesIdx!].stroke as any)(),
|
||||
label: getFieldDisplayName(field, otherProps.data[originFieldIndex.frameIndex]),
|
||||
value: fieldFmt(field.values.get(focusedPointIdx)).text,
|
||||
},
|
||||
@@ -78,33 +79,31 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({ mode = 'single', t
|
||||
|
||||
if (mode === 'multi') {
|
||||
let series: SeriesTableRowProps[] = [];
|
||||
const plotSeries = plotContext.getSeries();
|
||||
|
||||
for (let i = 0; i < otherProps.data.length; i++) {
|
||||
series = series.concat(
|
||||
otherProps.data[i].fields.reduce<SeriesTableRowProps[]>((agg, f, j) => {
|
||||
// skipping time field and non-numeric fields
|
||||
if (f.type === FieldType.time || f.type !== FieldType.number) {
|
||||
return agg;
|
||||
}
|
||||
for (let i = 0; i < plotSeries.length; i++) {
|
||||
const dataFrameFieldIndex = graphContext.mapSeriesIndexToDataFrameFieldIndex(i);
|
||||
const frame = otherProps.data[dataFrameFieldIndex.frameIndex];
|
||||
const field = otherProps.data[dataFrameFieldIndex.frameIndex].fields[dataFrameFieldIndex.fieldIndex];
|
||||
if (
|
||||
field === xField ||
|
||||
field.type === FieldType.time ||
|
||||
field.type !== FieldType.number ||
|
||||
field.config.custom?.hideFrom?.tooltip
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (f.config.custom?.hideFrom?.tooltip) {
|
||||
return agg;
|
||||
}
|
||||
|
||||
return [
|
||||
...agg,
|
||||
{
|
||||
// TODO: align with uPlot typings
|
||||
color: (plotContext.getSeries()[j].stroke as any)!(),
|
||||
label: getFieldDisplayName(f, otherProps.data[i]),
|
||||
value: formattedValueToString(f.display!(f.values.get(focusedPointIdx!))),
|
||||
isActive: originFieldIndex
|
||||
? originFieldIndex.frameIndex === i && originFieldIndex.fieldIndex === j
|
||||
: false,
|
||||
},
|
||||
];
|
||||
}, [])
|
||||
);
|
||||
series.push({
|
||||
// TODO: align with uPlot typings
|
||||
color: (plotSeries[i].stroke as any)!(),
|
||||
label: getFieldDisplayName(field, frame),
|
||||
value: formattedValueToString(field.display!(field.values.get(focusedPointIdx!))),
|
||||
isActive: originFieldIndex
|
||||
? dataFrameFieldIndex.frameIndex === originFieldIndex.frameIndex &&
|
||||
dataFrameFieldIndex.fieldIndex === originFieldIndex.fieldIndex
|
||||
: false,
|
||||
});
|
||||
}
|
||||
|
||||
tooltip = <SeriesTable series={series} timestamp={xVal} />;
|
||||
|
||||
@@ -33,21 +33,31 @@ export function buildPlotConfig(props: PlotProps, plugins: Record<string, PlotPl
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export function preparePlotData(frame: DataFrame): AlignedData {
|
||||
return frame.fields.map((f) => {
|
||||
export function preparePlotData(frame: DataFrame, ignoreFieldTypes?: FieldType[]): AlignedData {
|
||||
const result: any[] = [];
|
||||
|
||||
for (let i = 0; i < frame.fields.length; i++) {
|
||||
const f = frame.fields[i];
|
||||
|
||||
if (f.type === FieldType.time) {
|
||||
if (f.values.length > 0 && typeof f.values.get(0) === 'string') {
|
||||
const timestamps = [];
|
||||
for (let i = 0; i < f.values.length; i++) {
|
||||
timestamps.push(dateTime(f.values.get(i)).valueOf());
|
||||
}
|
||||
return timestamps;
|
||||
result.push(timestamps);
|
||||
continue;
|
||||
}
|
||||
return f.values.toArray();
|
||||
result.push(f.values.toArray());
|
||||
continue;
|
||||
}
|
||||
|
||||
return f.values.toArray();
|
||||
}) as AlignedData;
|
||||
if (ignoreFieldTypes && ignoreFieldTypes.indexOf(f.type) > -1) {
|
||||
continue;
|
||||
}
|
||||
result.push(f.values.toArray());
|
||||
}
|
||||
return result as AlignedData;
|
||||
}
|
||||
|
||||
// Dev helpers
|
||||
|
||||
@@ -24,6 +24,7 @@ export type IconName =
|
||||
| 'bolt'
|
||||
| 'book-open'
|
||||
| 'book'
|
||||
| 'brackets-curly'
|
||||
| 'bug'
|
||||
| 'calculator-alt'
|
||||
| 'calendar-alt'
|
||||
@@ -157,6 +158,7 @@ export const getAvailableIcons = (): IconName[] => [
|
||||
'bolt',
|
||||
'book-open',
|
||||
'book',
|
||||
'brackets-curly',
|
||||
'bug',
|
||||
'calculator-alt',
|
||||
'calendar-alt',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@jaegertracing/jaeger-ui-components",
|
||||
"version": "7.5.0-pre.0",
|
||||
"version": "7.5.0",
|
||||
"main": "src/index.ts",
|
||||
"types": "src/index.ts",
|
||||
"license": "Apache-2.0",
|
||||
@@ -14,8 +14,8 @@
|
||||
"typescript": "4.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "7.5.0-pre.0",
|
||||
"@grafana/ui": "7.5.0-pre.0",
|
||||
"@grafana/data": "7.5.0",
|
||||
"@grafana/ui": "7.5.0",
|
||||
"@types/classnames": "^2.2.7",
|
||||
"@types/deep-freeze": "^0.1.1",
|
||||
"@types/hoist-non-react-statics": "^3.3.1",
|
||||
|
||||
@@ -18,6 +18,7 @@ var plog = log.New("api")
|
||||
|
||||
// registerRoutes registers all API HTTP routes.
|
||||
func (hs *HTTPServer) registerRoutes() {
|
||||
reqNoAuth := middleware.NoAuth()
|
||||
reqSignedIn := middleware.ReqSignedIn
|
||||
reqSignedInNoAnonymous := middleware.ReqSignedInNoAnonymous
|
||||
reqGrafanaAdmin := middleware.ReqGrafanaAdmin
|
||||
@@ -118,7 +119,7 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), routing.Wrap(ResetPassword))
|
||||
|
||||
// dashboard snapshots
|
||||
r.Get("/dashboard/snapshot/*", hs.Index)
|
||||
r.Get("/dashboard/snapshot/*", reqNoAuth, hs.Index)
|
||||
r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index)
|
||||
|
||||
// api renew session based on cookie
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/datasource"
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||
@@ -40,9 +41,7 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *models.ReqContext) {
|
||||
return
|
||||
}
|
||||
|
||||
// macaron does not include trailing slashes when resolving a wildcard path
|
||||
proxyPath := ensureProxyPathTrailingSlash(c.Req.URL.Path, c.Params("*"))
|
||||
|
||||
proxyPath := getProxyPath(c)
|
||||
proxy, err := pluginproxy.NewDataSourceProxy(ds, plugin, c, proxyPath, hs.Cfg)
|
||||
if err != nil {
|
||||
if errors.Is(err, datasource.URLValidationError{}) {
|
||||
@@ -55,14 +54,12 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *models.ReqContext) {
|
||||
proxy.HandleRequest()
|
||||
}
|
||||
|
||||
// ensureProxyPathTrailingSlash Check for a trailing slash in original path and makes
|
||||
// sure that a trailing slash is added to proxy path, if not already exists.
|
||||
func ensureProxyPathTrailingSlash(originalPath, proxyPath string) string {
|
||||
if len(proxyPath) > 1 {
|
||||
if originalPath[len(originalPath)-1] == '/' && proxyPath[len(proxyPath)-1] != '/' {
|
||||
return proxyPath + "/"
|
||||
}
|
||||
}
|
||||
var proxyPathRegexp = regexp.MustCompile(`^\/api\/datasources\/proxy\/[\d]+\/?`)
|
||||
|
||||
return proxyPath
|
||||
func extractProxyPath(originalRawPath string) string {
|
||||
return proxyPathRegexp.ReplaceAllString(originalRawPath, "")
|
||||
}
|
||||
|
||||
func getProxyPath(c *models.ReqContext) string {
|
||||
return extractProxyPath(c.Req.URL.EscapedPath())
|
||||
}
|
||||
|
||||
@@ -7,28 +7,28 @@ import (
|
||||
)
|
||||
|
||||
func TestDataProxy(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
origPath string
|
||||
proxyPath string
|
||||
exp string
|
||||
}{
|
||||
{
|
||||
"Should append trailing slash to proxy path if original path has a trailing slash",
|
||||
"/api/datasources/proxy/6/api/v1/query_range/",
|
||||
"api/v1/query_range/",
|
||||
"api/v1/query_range/",
|
||||
},
|
||||
{
|
||||
"Should not append trailing slash to proxy path if original path doesn't have a trailing slash",
|
||||
"/api/datasources/proxy/6/api/v1/query_range",
|
||||
"api/v1/query_range",
|
||||
"api/v1/query_range",
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
assert.Equal(t, tc.exp, ensureProxyPathTrailingSlash(tc.origPath, tc.proxyPath))
|
||||
})
|
||||
}
|
||||
t.Run("extractProxyPath", func(t *testing.T) {
|
||||
testCases := []struct {
|
||||
originalRawPath string
|
||||
exp string
|
||||
}{
|
||||
{
|
||||
"/api/datasources/proxy/1",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"/api/datasources/proxy/1/some/thing",
|
||||
"some/thing",
|
||||
},
|
||||
{
|
||||
"/api/datasources/proxy/54/api/services/afsd%2Fafsd/operations",
|
||||
"api/services/afsd%2Fafsd/operations",
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run("Given raw path, should extract expected proxy path", func(t *testing.T) {
|
||||
assert.Equal(t, tc.exp, extractProxyPath(tc.originalRawPath))
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -107,8 +107,9 @@ func (hs *HTTPServer) Run(ctx context.Context) error {
|
||||
// Remove any square brackets enclosing IPv6 addresses, a format we support for backwards compatibility
|
||||
host := strings.TrimSuffix(strings.TrimPrefix(setting.HttpAddr, "["), "]")
|
||||
hs.httpSrv = &http.Server{
|
||||
Addr: net.JoinHostPort(host, setting.HttpPort),
|
||||
Handler: hs.macaron,
|
||||
Addr: net.JoinHostPort(host, setting.HttpPort),
|
||||
Handler: hs.macaron,
|
||||
ReadTimeout: hs.Cfg.ReadTimeout,
|
||||
}
|
||||
switch hs.Cfg.Protocol {
|
||||
case setting.HTTP2Scheme:
|
||||
|
||||
@@ -283,7 +283,7 @@ func (hs *HTTPServer) Logout(c *models.ReqContext) {
|
||||
return
|
||||
}
|
||||
|
||||
err := hs.AuthTokenService.RevokeToken(c.Req.Context(), c.UserToken)
|
||||
err := hs.AuthTokenService.RevokeToken(c.Req.Context(), c.UserToken, false)
|
||||
if err != nil && !errors.Is(err, models.ErrUserTokenNotFound) {
|
||||
hs.log.Error("failed to revoke auth token", "error", err)
|
||||
}
|
||||
|
||||
@@ -179,20 +179,28 @@ func (proxy *DataSourceProxy) director(req *http.Request) {
|
||||
|
||||
switch proxy.ds.Type {
|
||||
case models.DS_INFLUXDB_08:
|
||||
req.URL.Path = util.JoinURLFragments(proxy.targetUrl.Path, "db/"+proxy.ds.Database+"/"+proxy.proxyPath)
|
||||
req.URL.RawPath = util.JoinURLFragments(proxy.targetUrl.Path, "db/"+proxy.ds.Database+"/"+proxy.proxyPath)
|
||||
reqQueryVals.Add("u", proxy.ds.User)
|
||||
reqQueryVals.Add("p", proxy.ds.DecryptedPassword())
|
||||
req.URL.RawQuery = reqQueryVals.Encode()
|
||||
case models.DS_INFLUXDB:
|
||||
req.URL.Path = util.JoinURLFragments(proxy.targetUrl.Path, proxy.proxyPath)
|
||||
req.URL.RawPath = util.JoinURLFragments(proxy.targetUrl.Path, proxy.proxyPath)
|
||||
req.URL.RawQuery = reqQueryVals.Encode()
|
||||
if !proxy.ds.BasicAuth {
|
||||
req.Header.Set("Authorization", util.GetBasicAuthHeader(proxy.ds.User, proxy.ds.DecryptedPassword()))
|
||||
}
|
||||
default:
|
||||
req.URL.Path = util.JoinURLFragments(proxy.targetUrl.Path, proxy.proxyPath)
|
||||
req.URL.RawPath = util.JoinURLFragments(proxy.targetUrl.Path, proxy.proxyPath)
|
||||
}
|
||||
|
||||
unescapedPath, err := url.PathUnescape(req.URL.RawPath)
|
||||
if err != nil {
|
||||
logger.Error("Failed to unescape raw path", "rawPath", req.URL.RawPath, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
req.URL.Path = unescapedPath
|
||||
|
||||
if proxy.ds.BasicAuth {
|
||||
req.Header.Set("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser,
|
||||
proxy.ds.DecryptedBasicAuthPassword()))
|
||||
|
||||
@@ -527,7 +527,7 @@ func TestDataSourceProxy_requestHandling(t *testing.T) {
|
||||
|
||||
type setUpCfg struct {
|
||||
headers map[string]string
|
||||
writeCb func(w http.ResponseWriter)
|
||||
writeCb func(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
|
||||
setUp := func(t *testing.T, cfgs ...setUpCfg) (*models.ReqContext, *models.DataSource) {
|
||||
@@ -539,7 +539,7 @@ func TestDataSourceProxy_requestHandling(t *testing.T) {
|
||||
for _, cfg := range cfgs {
|
||||
if cfg.writeCb != nil {
|
||||
t.Log("Writing response via callback")
|
||||
cfg.writeCb(w)
|
||||
cfg.writeCb(w, r)
|
||||
written = true
|
||||
}
|
||||
}
|
||||
@@ -607,7 +607,7 @@ func TestDataSourceProxy_requestHandling(t *testing.T) {
|
||||
|
||||
t.Run("Data source returns status code 401", func(t *testing.T) {
|
||||
ctx, ds := setUp(t, setUpCfg{
|
||||
writeCb: func(w http.ResponseWriter) {
|
||||
writeCb: func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(401)
|
||||
w.Header().Set("www-authenticate", `Basic realm="Access to the server"`)
|
||||
_, err := w.Write([]byte("Not authenticated"))
|
||||
@@ -624,6 +624,28 @@ func TestDataSourceProxy_requestHandling(t *testing.T) {
|
||||
assert.Equal(t, 400, proxy.ctx.Resp.Status(), "Status code 401 should be converted to 400")
|
||||
assert.Empty(t, proxy.ctx.Resp.Header().Get("www-authenticate"))
|
||||
})
|
||||
|
||||
t.Run("Data source should handle proxy path url encoding correctly", func(t *testing.T) {
|
||||
var req *http.Request
|
||||
ctx, ds := setUp(t, setUpCfg{
|
||||
writeCb: func(w http.ResponseWriter, r *http.Request) {
|
||||
req = r
|
||||
w.WriteHeader(200)
|
||||
_, err := w.Write([]byte("OK"))
|
||||
require.NoError(t, err)
|
||||
},
|
||||
})
|
||||
|
||||
ctx.Req.Request = httptest.NewRequest("GET", "/api/datasources/proxy/1/path/%2Ftest%2Ftest%2F?query=%2Ftest%2Ftest%2F", nil)
|
||||
proxy, err := NewDataSourceProxy(ds, plugin, ctx, "/path/%2Ftest%2Ftest%2F", &setting.Cfg{})
|
||||
require.NoError(t, err)
|
||||
|
||||
proxy.HandleRequest()
|
||||
|
||||
require.NoError(t, writeErr)
|
||||
require.NotNil(t, req)
|
||||
require.Equal(t, "/path/%2Ftest%2Ftest%2F?query=%2Ftest%2Ftest%2F", req.RequestURI)
|
||||
})
|
||||
}
|
||||
|
||||
func TestNewDataSourceProxy_InvalidURL(t *testing.T) {
|
||||
|
||||
@@ -132,7 +132,7 @@ func (hs *HTTPServer) revokeUserAuthTokenInternal(c *models.ReqContext, userID i
|
||||
return response.Error(400, "Cannot revoke active user auth token", nil)
|
||||
}
|
||||
|
||||
err = hs.AuthTokenService.RevokeToken(c.Req.Context(), token)
|
||||
err = hs.AuthTokenService.RevokeToken(c.Req.Context(), token, false)
|
||||
if err != nil {
|
||||
if errors.Is(err, models.ErrUserTokenNotFound) {
|
||||
return response.Error(404, "User auth token not found", err)
|
||||
|
||||
@@ -32,6 +32,7 @@ import (
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/opentsdb"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/postgres"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/tempo"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/testdatasource"
|
||||
)
|
||||
|
||||
|
||||
@@ -102,6 +102,25 @@ func (uss *UsageStatsService) GetUsageReport(ctx context.Context) (UsageReport,
|
||||
}
|
||||
metrics["stats.ds.other.count"] = dsOtherCount
|
||||
|
||||
esDataSourcesQuery := models.GetDataSourcesByTypeQuery{Type: models.DS_ES}
|
||||
if err := uss.Bus.Dispatch(&esDataSourcesQuery); err != nil {
|
||||
metricsLogger.Error("Failed to get elasticsearch json data", "error", err)
|
||||
return report, err
|
||||
}
|
||||
|
||||
for _, data := range esDataSourcesQuery.Result {
|
||||
esVersion, err := data.JsonData.Get("esVersion").Int()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
statName := fmt.Sprintf("stats.ds.elasticsearch.v%d.count", esVersion)
|
||||
|
||||
count, _ := metrics[statName].(int64)
|
||||
|
||||
metrics[statName] = count + 1
|
||||
}
|
||||
|
||||
metrics["stats.packaging."+setting.Packaging+".count"] = 1
|
||||
metrics["stats.distributor."+setting.ReportingDistributor+".count"] = 1
|
||||
|
||||
|
||||
@@ -94,6 +94,29 @@ func TestMetrics(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
var getESDatasSourcesQuery *models.GetDataSourcesByTypeQuery
|
||||
uss.Bus.AddHandler(func(query *models.GetDataSourcesByTypeQuery) error {
|
||||
query.Result = []*models.DataSource{
|
||||
{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 2,
|
||||
}),
|
||||
},
|
||||
{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 2,
|
||||
}),
|
||||
},
|
||||
{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 70,
|
||||
}),
|
||||
},
|
||||
}
|
||||
getESDatasSourcesQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
var getDataSourceAccessStatsQuery *models.GetDataSourceAccessStatsQuery
|
||||
uss.Bus.AddHandler(func(query *models.GetDataSourceAccessStatsQuery) error {
|
||||
query.Result = []*models.DataSourceAccessStats{
|
||||
@@ -200,6 +223,7 @@ func TestMetrics(t *testing.T) {
|
||||
assert.Nil(t, getSystemStatsQuery)
|
||||
assert.Nil(t, getDataSourceStatsQuery)
|
||||
assert.Nil(t, getDataSourceAccessStatsQuery)
|
||||
assert.Nil(t, getESDatasSourcesQuery)
|
||||
assert.Nil(t, req)
|
||||
})
|
||||
})
|
||||
@@ -226,6 +250,7 @@ func TestMetrics(t *testing.T) {
|
||||
assert.NotNil(t, getSystemStatsQuery)
|
||||
assert.NotNil(t, getDataSourceStatsQuery)
|
||||
assert.NotNil(t, getDataSourceAccessStatsQuery)
|
||||
assert.NotNil(t, getESDatasSourcesQuery)
|
||||
assert.NotNil(t, getAlertNotifierUsageStatsQuery)
|
||||
assert.NotNil(t, req)
|
||||
|
||||
@@ -266,6 +291,10 @@ func TestMetrics(t *testing.T) {
|
||||
|
||||
assert.Equal(t, 9, metrics.Get("stats.ds."+models.DS_ES+".count").MustInt())
|
||||
assert.Equal(t, 10, metrics.Get("stats.ds."+models.DS_PROMETHEUS+".count").MustInt())
|
||||
|
||||
assert.Equal(t, 2, metrics.Get("stats.ds."+models.DS_ES+".v2.count").MustInt())
|
||||
assert.Equal(t, 1, metrics.Get("stats.ds."+models.DS_ES+".v70.count").MustInt())
|
||||
|
||||
assert.Equal(t, 11+12, metrics.Get("stats.ds.other.count").MustInt())
|
||||
|
||||
assert.Equal(t, 1, metrics.Get("stats.ds_access."+models.DS_ES+".direct.count").MustInt())
|
||||
@@ -420,6 +449,11 @@ func TestMetrics(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
uss.Bus.AddHandler(func(query *models.GetDataSourcesByTypeQuery) error {
|
||||
query.Result = []*models.DataSource{}
|
||||
return nil
|
||||
})
|
||||
|
||||
uss.Bus.AddHandler(func(query *models.GetDataSourceAccessStatsQuery) error {
|
||||
query.Result = []*models.DataSourceAccessStats{}
|
||||
return nil
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
@@ -34,6 +35,27 @@ func notAuthorized(c *models.ReqContext) {
|
||||
return
|
||||
}
|
||||
|
||||
writeRedirectCookie(c)
|
||||
c.Redirect(setting.AppSubUrl + "/login")
|
||||
}
|
||||
|
||||
func tokenRevoked(c *models.ReqContext, err *models.TokenRevokedError) {
|
||||
if c.IsApiRequest() {
|
||||
c.JSON(401, map[string]interface{}{
|
||||
"message": "Token revoked",
|
||||
"error": map[string]interface{}{
|
||||
"id": "ERR_TOKEN_REVOKED",
|
||||
"maxConcurrentSessions": err.MaxConcurrentSessions,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeRedirectCookie(c)
|
||||
c.Redirect(setting.AppSubUrl + "/login")
|
||||
}
|
||||
|
||||
func writeRedirectCookie(c *models.ReqContext) {
|
||||
redirectTo := c.Req.RequestURI
|
||||
if setting.AppSubUrl != "" && !strings.HasPrefix(redirectTo, setting.AppSubUrl) {
|
||||
redirectTo = setting.AppSubUrl + c.Req.RequestURI
|
||||
@@ -43,7 +65,6 @@ func notAuthorized(c *models.ReqContext) {
|
||||
redirectTo = removeForceLoginParams(redirectTo)
|
||||
|
||||
cookies.WriteCookie(c.Resp, "redirect_to", url.QueryEscape(redirectTo), 0, nil)
|
||||
c.Redirect(setting.AppSubUrl + "/login")
|
||||
}
|
||||
|
||||
var forceLoginParamsRegexp = regexp.MustCompile(`&?forceLogin=true`)
|
||||
@@ -76,13 +97,8 @@ func RoleAuth(roles ...models.RoleType) macaron.Handler {
|
||||
func Auth(options *AuthOptions) macaron.Handler {
|
||||
return func(c *models.ReqContext) {
|
||||
forceLogin := false
|
||||
|
||||
if c.AllowAnonymous {
|
||||
forceLoginParam, err := strconv.ParseBool(c.Req.URL.Query().Get("forceLogin"))
|
||||
if err == nil {
|
||||
forceLogin = forceLoginParam
|
||||
}
|
||||
|
||||
forceLogin = shouldForceLogin(c)
|
||||
if !forceLogin {
|
||||
orgIDValue := c.Req.URL.Query().Get("orgId")
|
||||
orgID, err := strconv.ParseInt(orgIDValue, 10, 64)
|
||||
@@ -95,6 +111,13 @@ func Auth(options *AuthOptions) macaron.Handler {
|
||||
requireLogin := !c.AllowAnonymous || forceLogin || options.ReqNoAnonynmous
|
||||
|
||||
if !c.IsSignedIn && options.ReqSignedIn && requireLogin {
|
||||
lookupTokenErr, hasTokenErr := c.Data["lookupTokenErr"].(error)
|
||||
var revokedErr *models.TokenRevokedError
|
||||
if hasTokenErr && errors.As(lookupTokenErr, &revokedErr) {
|
||||
tokenRevoked(c, revokedErr)
|
||||
return
|
||||
}
|
||||
|
||||
notAuthorized(c)
|
||||
return
|
||||
}
|
||||
@@ -137,3 +160,26 @@ func SnapshotPublicModeOrSignedIn(cfg *setting.Cfg) macaron.Handler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NoAuth creates a middleware that doesn't require any authentication.
|
||||
// If forceLogin param is set it will redirect the user to the login page.
|
||||
func NoAuth() macaron.Handler {
|
||||
return func(c *models.ReqContext) {
|
||||
if shouldForceLogin(c) {
|
||||
notAuthorized(c)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// shouldForceLogin checks if user should be enforced to login.
|
||||
// Returns true if forceLogin parameter is set.
|
||||
func shouldForceLogin(c *models.ReqContext) bool {
|
||||
forceLogin := false
|
||||
forceLoginParam, err := strconv.ParseBool(c.Req.URL.Query().Get("forceLogin"))
|
||||
if err == nil {
|
||||
forceLogin = forceLoginParam
|
||||
}
|
||||
|
||||
return forceLogin
|
||||
}
|
||||
|
||||
@@ -208,6 +208,11 @@ type GetDataSourcesQuery struct {
|
||||
Result []*DataSource
|
||||
}
|
||||
|
||||
type GetDataSourcesByTypeQuery struct {
|
||||
Type string
|
||||
Result []*DataSource
|
||||
}
|
||||
|
||||
type GetDefaultDataSourceQuery struct {
|
||||
OrgId int64
|
||||
User *SignedInUser
|
||||
|
||||
@@ -32,6 +32,14 @@ type TokenExpiredError struct {
|
||||
|
||||
func (e *TokenExpiredError) Error() string { return "user token expired" }
|
||||
|
||||
type TokenRevokedError struct {
|
||||
UserID int64
|
||||
TokenID int64
|
||||
MaxConcurrentSessions int64
|
||||
}
|
||||
|
||||
func (e *TokenRevokedError) Error() string { return "user token revoked" }
|
||||
|
||||
// UserToken represents a user token
|
||||
type UserToken struct {
|
||||
Id int64
|
||||
@@ -45,6 +53,7 @@ type UserToken struct {
|
||||
RotatedAt int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
RevokedAt int64
|
||||
UnhashedToken string
|
||||
}
|
||||
|
||||
@@ -57,9 +66,10 @@ type UserTokenService interface {
|
||||
CreateToken(ctx context.Context, user *User, clientIP net.IP, userAgent string) (*UserToken, error)
|
||||
LookupToken(ctx context.Context, unhashedToken string) (*UserToken, error)
|
||||
TryRotateToken(ctx context.Context, token *UserToken, clientIP net.IP, userAgent string) (bool, error)
|
||||
RevokeToken(ctx context.Context, token *UserToken) error
|
||||
RevokeToken(ctx context.Context, token *UserToken, soft bool) error
|
||||
RevokeAllUserTokens(ctx context.Context, userId int64) error
|
||||
ActiveTokenCount(ctx context.Context) (int64, error)
|
||||
GetUserToken(ctx context.Context, userId, userTokenId int64) (*UserToken, error)
|
||||
GetUserTokens(ctx context.Context, userId int64) ([]*UserToken, error)
|
||||
GetUserRevokedTokens(ctx context.Context, userId int64) ([]*UserToken, error)
|
||||
}
|
||||
|
||||
@@ -9,9 +9,11 @@ import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-aws-sdk/pkg/awsds"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
@@ -108,6 +110,8 @@ func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error {
|
||||
}
|
||||
}
|
||||
|
||||
hostEnv = append(hostEnv, m.getAWSEnvironmentVariables()...)
|
||||
|
||||
env := pluginSettings.ToEnv("GF_PLUGIN", hostEnv)
|
||||
|
||||
pluginLogger := m.logger.New("pluginId", pluginID)
|
||||
@@ -121,6 +125,18 @@ func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *manager) getAWSEnvironmentVariables() []string {
|
||||
variables := []string{}
|
||||
if m.Cfg.AWSAssumeRoleEnabled {
|
||||
variables = append(variables, awsds.AssumeRoleEnabledEnvVarKeyName+"=true")
|
||||
}
|
||||
if len(m.Cfg.AWSAllowedAuthProviders) > 0 {
|
||||
variables = append(variables, awsds.AllowedAuthProvidersEnvVarKeyName+"="+strings.Join(m.Cfg.AWSAllowedAuthProviders, ","))
|
||||
}
|
||||
|
||||
return variables
|
||||
}
|
||||
|
||||
// start starts all managed backend plugins
|
||||
func (m *manager) start(ctx context.Context) {
|
||||
m.pluginsMu.RLock()
|
||||
|
||||
@@ -3,12 +3,14 @@ package backendplugin
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-aws-sdk/pkg/awsds"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
@@ -57,8 +59,8 @@ func TestManager(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Should provide expected host environment variables", func(t *testing.T) {
|
||||
require.Len(t, ctx.env, 2)
|
||||
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Open Source"}, ctx.env)
|
||||
require.Len(t, ctx.env, 4)
|
||||
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Open Source", fmt.Sprintf("%s=true", awsds.AssumeRoleEnabledEnvVarKeyName), fmt.Sprintf("%s=keys,credentials", awsds.AllowedAuthProvidersEnvVarKeyName)}, ctx.env)
|
||||
})
|
||||
|
||||
t.Run("When manager runs should start and stop plugin", func(t *testing.T) {
|
||||
@@ -259,8 +261,8 @@ func TestManager(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("Should provide expected host environment variables", func(t *testing.T) {
|
||||
require.Len(t, ctx.env, 4)
|
||||
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Enterprise", "GF_ENTERPRISE_LICENSE_PATH=/license.txt", "GF_ENTERPRISE_LICENSE_TEXT=testtoken"}, ctx.env)
|
||||
require.Len(t, ctx.env, 6)
|
||||
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Enterprise", "GF_ENTERPRISE_LICENSE_PATH=/license.txt", "GF_ENTERPRISE_LICENSE_TEXT=testtoken", fmt.Sprintf("%s=true", awsds.AssumeRoleEnabledEnvVarKeyName), fmt.Sprintf("%s=keys,credentials", awsds.AllowedAuthProvidersEnvVarKeyName)}, ctx.env)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -278,6 +280,9 @@ type managerScenarioCtx struct {
|
||||
func newManagerScenario(t *testing.T, managed bool, fn func(t *testing.T, ctx *managerScenarioCtx)) {
|
||||
t.Helper()
|
||||
cfg := setting.NewCfg()
|
||||
cfg.AWSAllowedAuthProviders = []string{"keys", "credentials"}
|
||||
cfg.AWSAssumeRoleEnabled = true
|
||||
|
||||
license := &testLicensingService{}
|
||||
validator := &testPluginRequestValidator{}
|
||||
ctx := &managerScenarioCtx{
|
||||
|
||||
@@ -49,7 +49,7 @@ func (s *UserAuthTokenService) ActiveTokenCount(ctx context.Context) (int64, err
|
||||
var err error
|
||||
err = s.SQLStore.WithDbSession(ctx, func(dbSession *sqlstore.DBSession) error {
|
||||
var model userAuthToken
|
||||
count, err = dbSession.Where(`created_at > ? AND rotated_at > ?`,
|
||||
count, err = dbSession.Where(`created_at > ? AND rotated_at > ? AND revoked_at = 0`,
|
||||
s.createdAfterParam(),
|
||||
s.rotatedAfterParam()).
|
||||
Count(&model)
|
||||
@@ -84,6 +84,7 @@ func (s *UserAuthTokenService) CreateToken(ctx context.Context, user *models.Use
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
SeenAt: 0,
|
||||
RevokedAt: 0,
|
||||
AuthTokenSeen: false,
|
||||
}
|
||||
|
||||
@@ -127,6 +128,13 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st
|
||||
return nil, models.ErrUserTokenNotFound
|
||||
}
|
||||
|
||||
if model.RevokedAt > 0 {
|
||||
return nil, &models.TokenRevokedError{
|
||||
UserID: model.UserId,
|
||||
TokenID: model.Id,
|
||||
}
|
||||
}
|
||||
|
||||
if model.CreatedAt <= s.createdAfterParam() || model.RotatedAt <= s.rotatedAfterParam() {
|
||||
return nil, &models.TokenExpiredError{
|
||||
UserID: model.UserId,
|
||||
@@ -278,7 +286,7 @@ func (s *UserAuthTokenService) TryRotateToken(ctx context.Context, token *models
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *models.UserToken) error {
|
||||
func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *models.UserToken, soft bool) error {
|
||||
if token == nil {
|
||||
return models.ErrUserTokenNotFound
|
||||
}
|
||||
@@ -289,10 +297,19 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *models.Us
|
||||
}
|
||||
|
||||
var rowsAffected int64
|
||||
err = s.SQLStore.WithDbSession(ctx, func(dbSession *sqlstore.DBSession) error {
|
||||
rowsAffected, err = dbSession.Delete(model)
|
||||
return err
|
||||
})
|
||||
|
||||
if soft {
|
||||
model.RevokedAt = getTime().Unix()
|
||||
err = s.SQLStore.WithDbSession(ctx, func(dbSession *sqlstore.DBSession) error {
|
||||
rowsAffected, err = dbSession.ID(model.Id).Update(model)
|
||||
return err
|
||||
})
|
||||
} else {
|
||||
err = s.SQLStore.WithDbSession(ctx, func(dbSession *sqlstore.DBSession) error {
|
||||
rowsAffected, err = dbSession.Delete(model)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -303,7 +320,7 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *models.Us
|
||||
return models.ErrUserTokenNotFound
|
||||
}
|
||||
|
||||
s.log.Debug("user auth token revoked", "tokenId", model.Id, "userId", model.UserId, "clientIP", model.ClientIp, "userAgent", model.UserAgent)
|
||||
s.log.Debug("user auth token revoked", "tokenId", model.Id, "userId", model.UserId, "clientIP", model.ClientIp, "userAgent", model.UserAgent, "soft", soft)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -380,7 +397,7 @@ func (s *UserAuthTokenService) GetUserTokens(ctx context.Context, userId int64)
|
||||
result := []*models.UserToken{}
|
||||
err := s.SQLStore.WithDbSession(ctx, func(dbSession *sqlstore.DBSession) error {
|
||||
var tokens []*userAuthToken
|
||||
err := dbSession.Where("user_id = ? AND created_at > ? AND rotated_at > ?",
|
||||
err := dbSession.Where("user_id = ? AND created_at > ? AND rotated_at > ? AND revoked_at = 0",
|
||||
userId,
|
||||
s.createdAfterParam(),
|
||||
s.rotatedAfterParam()).
|
||||
@@ -403,6 +420,29 @@ func (s *UserAuthTokenService) GetUserTokens(ctx context.Context, userId int64)
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) GetUserRevokedTokens(ctx context.Context, userId int64) ([]*models.UserToken, error) {
|
||||
result := []*models.UserToken{}
|
||||
err := s.SQLStore.WithDbSession(ctx, func(dbSession *sqlstore.DBSession) error {
|
||||
var tokens []*userAuthToken
|
||||
err := dbSession.Where("user_id = ? AND revoked_at > 0", userId).Find(&tokens)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, token := range tokens {
|
||||
var userToken models.UserToken
|
||||
if err := token.toUserToken(&userToken); err != nil {
|
||||
return err
|
||||
}
|
||||
result = append(result, &userToken)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) createdAfterParam() int64 {
|
||||
return getTime().Add(-s.Cfg.LoginMaxLifetime).Unix()
|
||||
}
|
||||
|
||||
@@ -60,8 +60,18 @@ func TestUserAuthToken(t *testing.T) {
|
||||
So(userToken, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("revoking existing token should delete token", func() {
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), userToken)
|
||||
Convey("soft revoking existing token should not delete it", func() {
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), userToken, true)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
model, err := ctx.getAuthTokenByID(userToken.Id)
|
||||
So(err, ShouldBeNil)
|
||||
So(model, ShouldNotBeNil)
|
||||
So(model.RevokedAt, ShouldBeGreaterThan, 0)
|
||||
})
|
||||
|
||||
Convey("revoking existing token should delete it", func() {
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), userToken, false)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
model, err := ctx.getAuthTokenByID(userToken.Id)
|
||||
@@ -70,13 +80,13 @@ func TestUserAuthToken(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("revoking nil token should return error", func() {
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), nil)
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), nil, false)
|
||||
So(err, ShouldEqual, models.ErrUserTokenNotFound)
|
||||
})
|
||||
|
||||
Convey("revoking non-existing token should return error", func() {
|
||||
userToken.Id = 1000
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), userToken)
|
||||
err = userAuthTokenService.RevokeToken(context.Background(), userToken, false)
|
||||
So(err, ShouldEqual, models.ErrUserTokenNotFound)
|
||||
})
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ type userAuthToken struct {
|
||||
RotatedAt int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
RevokedAt int64
|
||||
UnhashedToken string `xorm:"-"`
|
||||
}
|
||||
|
||||
@@ -43,6 +44,7 @@ func (uat *userAuthToken) fromUserToken(ut *models.UserToken) error {
|
||||
uat.RotatedAt = ut.RotatedAt
|
||||
uat.CreatedAt = ut.CreatedAt
|
||||
uat.UpdatedAt = ut.UpdatedAt
|
||||
uat.RevokedAt = ut.RevokedAt
|
||||
uat.UnhashedToken = ut.UnhashedToken
|
||||
|
||||
return nil
|
||||
@@ -64,6 +66,7 @@ func (uat *userAuthToken) toUserToken(ut *models.UserToken) error {
|
||||
ut.RotatedAt = uat.RotatedAt
|
||||
ut.CreatedAt = uat.CreatedAt
|
||||
ut.UpdatedAt = uat.UpdatedAt
|
||||
ut.RevokedAt = uat.RevokedAt
|
||||
ut.UnhashedToken = uat.UnhashedToken
|
||||
|
||||
return nil
|
||||
|
||||
@@ -8,15 +8,16 @@ import (
|
||||
)
|
||||
|
||||
type FakeUserAuthTokenService struct {
|
||||
CreateTokenProvider func(ctx context.Context, user *models.User, clientIP net.IP, userAgent string) (*models.UserToken, error)
|
||||
TryRotateTokenProvider func(ctx context.Context, token *models.UserToken, clientIP net.IP, userAgent string) (bool, error)
|
||||
LookupTokenProvider func(ctx context.Context, unhashedToken string) (*models.UserToken, error)
|
||||
RevokeTokenProvider func(ctx context.Context, token *models.UserToken) error
|
||||
RevokeAllUserTokensProvider func(ctx context.Context, userId int64) error
|
||||
ActiveAuthTokenCount func(ctx context.Context) (int64, error)
|
||||
GetUserTokenProvider func(ctx context.Context, userId, userTokenId int64) (*models.UserToken, error)
|
||||
GetUserTokensProvider func(ctx context.Context, userId int64) ([]*models.UserToken, error)
|
||||
BatchRevokedTokenProvider func(ctx context.Context, userIds []int64) error
|
||||
CreateTokenProvider func(ctx context.Context, user *models.User, clientIP net.IP, userAgent string) (*models.UserToken, error)
|
||||
TryRotateTokenProvider func(ctx context.Context, token *models.UserToken, clientIP net.IP, userAgent string) (bool, error)
|
||||
LookupTokenProvider func(ctx context.Context, unhashedToken string) (*models.UserToken, error)
|
||||
RevokeTokenProvider func(ctx context.Context, token *models.UserToken, soft bool) error
|
||||
RevokeAllUserTokensProvider func(ctx context.Context, userId int64) error
|
||||
ActiveAuthTokenCount func(ctx context.Context) (int64, error)
|
||||
GetUserTokenProvider func(ctx context.Context, userId, userTokenId int64) (*models.UserToken, error)
|
||||
GetUserTokensProvider func(ctx context.Context, userId int64) ([]*models.UserToken, error)
|
||||
GetUserRevokedTokensProvider func(ctx context.Context, userId int64) ([]*models.UserToken, error)
|
||||
BatchRevokedTokenProvider func(ctx context.Context, userIds []int64) error
|
||||
}
|
||||
|
||||
func NewFakeUserAuthTokenService() *FakeUserAuthTokenService {
|
||||
@@ -36,7 +37,7 @@ func NewFakeUserAuthTokenService() *FakeUserAuthTokenService {
|
||||
UnhashedToken: "",
|
||||
}, nil
|
||||
},
|
||||
RevokeTokenProvider: func(ctx context.Context, token *models.UserToken) error {
|
||||
RevokeTokenProvider: func(ctx context.Context, token *models.UserToken, soft bool) error {
|
||||
return nil
|
||||
},
|
||||
RevokeAllUserTokensProvider: func(ctx context.Context, userId int64) error {
|
||||
@@ -76,8 +77,8 @@ func (s *FakeUserAuthTokenService) TryRotateToken(ctx context.Context, token *mo
|
||||
return s.TryRotateTokenProvider(context.Background(), token, clientIP, userAgent)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) RevokeToken(ctx context.Context, token *models.UserToken) error {
|
||||
return s.RevokeTokenProvider(context.Background(), token)
|
||||
func (s *FakeUserAuthTokenService) RevokeToken(ctx context.Context, token *models.UserToken, soft bool) error {
|
||||
return s.RevokeTokenProvider(context.Background(), token, soft)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) RevokeAllUserTokens(ctx context.Context, userId int64) error {
|
||||
@@ -96,6 +97,10 @@ func (s *FakeUserAuthTokenService) GetUserTokens(ctx context.Context, userId int
|
||||
return s.GetUserTokensProvider(context.Background(), userId)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) GetUserRevokedTokens(ctx context.Context, userId int64) ([]*models.UserToken, error) {
|
||||
return s.GetUserRevokedTokensProvider(context.Background(), userId)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) BatchRevokeAllUserTokens(ctx context.Context, userIds []int64) error {
|
||||
return s.BatchRevokedTokenProvider(ctx, userIds)
|
||||
}
|
||||
|
||||
@@ -257,7 +257,11 @@ func (h *ContextHandler) initContextWithToken(ctx *models.ReqContext, orgID int6
|
||||
token, err := h.AuthTokenService.LookupToken(ctx.Req.Context(), rawToken)
|
||||
if err != nil {
|
||||
ctx.Logger.Error("Failed to look up user based on cookie", "error", err)
|
||||
cookies.WriteSessionCookie(ctx, h.Cfg, "", -1)
|
||||
|
||||
var revokedErr *models.TokenRevokedError
|
||||
if !errors.As(err, &revokedErr) || !ctx.IsApiRequest() {
|
||||
cookies.WriteSessionCookie(ctx, h.Cfg, "", -1)
|
||||
}
|
||||
|
||||
ctx.Data["lookupTokenErr"] = err
|
||||
return false
|
||||
|
||||
@@ -81,7 +81,7 @@ func (lps *LibraryPanelService) getHandler(c *models.ReqContext) response.Respon
|
||||
|
||||
// getAllHandler handles GET /api/library-panels/.
|
||||
func (lps *LibraryPanelService) getAllHandler(c *models.ReqContext) response.Response {
|
||||
libraryPanels, err := lps.getAllLibraryPanels(c, c.QueryInt64("limit"))
|
||||
libraryPanels, err := lps.getAllLibraryPanels(c, c.QueryInt("perPage"), c.QueryInt("page"), c.Query("name"), c.Query("excludeUid"))
|
||||
if err != nil {
|
||||
return toLibraryPanelError(err, "Failed to get library panels")
|
||||
}
|
||||
@@ -128,5 +128,8 @@ func toLibraryPanelError(err error, message string) response.Response {
|
||||
if errors.Is(err, models.ErrFolderAccessDenied) {
|
||||
return response.Error(403, models.ErrFolderAccessDenied.Error(), err)
|
||||
}
|
||||
if errors.Is(err, errLibraryPanelHasConnectedDashboards) {
|
||||
return response.Error(403, errLibraryPanelHasConnectedDashboards.Error(), err)
|
||||
}
|
||||
return response.Error(500, message, err)
|
||||
}
|
||||
|
||||
@@ -3,19 +3,19 @@ package librarypanels
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
var (
|
||||
sqlStatmentLibrayPanelDTOWithMeta = `
|
||||
SELECT DISTINCT
|
||||
lp.id, lp.org_id, lp.folder_id, lp.uid, lp.name, lp.model, lp.created, lp.created_by, lp.updated, lp.updated_by, lp.version
|
||||
lp.id, lp.org_id, lp.folder_id, lp.uid, lp.name, lp.type, lp.description, lp.model, lp.created, lp.created_by, lp.updated, lp.updated_by, lp.version
|
||||
, 0 AS can_edit
|
||||
, u1.login AS created_by_name
|
||||
, u1.email AS created_by_email
|
||||
@@ -28,13 +28,23 @@ FROM library_panel AS lp
|
||||
`
|
||||
)
|
||||
|
||||
func syncTitleWithName(libraryPanel *LibraryPanel) error {
|
||||
func syncFieldsWithModel(libraryPanel *LibraryPanel) error {
|
||||
var model map[string]interface{}
|
||||
if err := json.Unmarshal(libraryPanel.Model, &model); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
model["title"] = libraryPanel.Name
|
||||
if model["type"] != nil {
|
||||
libraryPanel.Type = model["type"].(string)
|
||||
} else {
|
||||
model["type"] = libraryPanel.Type
|
||||
}
|
||||
if model["description"] != nil {
|
||||
libraryPanel.Description = model["description"].(string)
|
||||
} else {
|
||||
model["description"] = libraryPanel.Description
|
||||
}
|
||||
syncedModel, err := json.Marshal(&model)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -62,7 +72,7 @@ func (lps *LibraryPanelService) createLibraryPanel(c *models.ReqContext, cmd cre
|
||||
UpdatedBy: c.SignedInUser.UserId,
|
||||
}
|
||||
|
||||
if err := syncTitleWithName(&libraryPanel); err != nil {
|
||||
if err := syncFieldsWithModel(&libraryPanel); err != nil {
|
||||
return LibraryPanelDTO{}, err
|
||||
}
|
||||
|
||||
@@ -80,13 +90,15 @@ func (lps *LibraryPanelService) createLibraryPanel(c *models.ReqContext, cmd cre
|
||||
})
|
||||
|
||||
dto := LibraryPanelDTO{
|
||||
ID: libraryPanel.ID,
|
||||
OrgID: libraryPanel.OrgID,
|
||||
FolderID: libraryPanel.FolderID,
|
||||
UID: libraryPanel.UID,
|
||||
Name: libraryPanel.Name,
|
||||
Model: libraryPanel.Model,
|
||||
Version: libraryPanel.Version,
|
||||
ID: libraryPanel.ID,
|
||||
OrgID: libraryPanel.OrgID,
|
||||
FolderID: libraryPanel.FolderID,
|
||||
UID: libraryPanel.UID,
|
||||
Name: libraryPanel.Name,
|
||||
Type: libraryPanel.Type,
|
||||
Description: libraryPanel.Description,
|
||||
Model: libraryPanel.Model,
|
||||
Version: libraryPanel.Version,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
@@ -108,7 +120,17 @@ func (lps *LibraryPanelService) createLibraryPanel(c *models.ReqContext, cmd cre
|
||||
return dto, err
|
||||
}
|
||||
|
||||
func connectDashboard(session *sqlstore.DBSession, dialect migrator.Dialect, user *models.SignedInUser, uid string, dashboardID int64) error {
|
||||
// connectDashboard adds a connection between a Library Panel and a Dashboard.
|
||||
func (lps *LibraryPanelService) connectDashboard(c *models.ReqContext, uid string, dashboardID int64) error {
|
||||
err := lps.SQLStore.WithTransactionalDbSession(c.Context.Req.Context(), func(session *sqlstore.DBSession) error {
|
||||
return lps.internalConnectDashboard(session, c.SignedInUser, uid, dashboardID)
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (lps *LibraryPanelService) internalConnectDashboard(session *sqlstore.DBSession, user *models.SignedInUser,
|
||||
uid string, dashboardID int64) error {
|
||||
panel, err := getLibraryPanel(session, uid, user.OrgId)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -117,8 +139,6 @@ func connectDashboard(session *sqlstore.DBSession, dialect migrator.Dialect, use
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO add check that dashboard exists
|
||||
|
||||
libraryPanelDashboard := libraryPanelDashboard{
|
||||
DashboardID: dashboardID,
|
||||
LibraryPanelID: panel.ID,
|
||||
@@ -126,7 +146,7 @@ func connectDashboard(session *sqlstore.DBSession, dialect migrator.Dialect, use
|
||||
CreatedBy: user.UserId,
|
||||
}
|
||||
if _, err := session.Insert(&libraryPanelDashboard); err != nil {
|
||||
if dialect.IsUniqueConstraintViolation(err) {
|
||||
if lps.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
@@ -134,15 +154,6 @@ func connectDashboard(session *sqlstore.DBSession, dialect migrator.Dialect, use
|
||||
return nil
|
||||
}
|
||||
|
||||
// connectDashboard adds a connection between a Library Panel and a Dashboard.
|
||||
func (lps *LibraryPanelService) connectDashboard(c *models.ReqContext, uid string, dashboardID int64) error {
|
||||
err := lps.SQLStore.WithTransactionalDbSession(c.Context.Req.Context(), func(session *sqlstore.DBSession) error {
|
||||
return connectDashboard(session, lps.SQLStore.Dialect, c.SignedInUser, uid, dashboardID)
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// connectLibraryPanelsForDashboard adds connections for all Library Panels in a Dashboard.
|
||||
func (lps *LibraryPanelService) connectLibraryPanelsForDashboard(c *models.ReqContext, uids []string, dashboardID int64) error {
|
||||
err := lps.SQLStore.WithTransactionalDbSession(c.Context.Req.Context(), func(session *sqlstore.DBSession) error {
|
||||
@@ -151,7 +162,7 @@ func (lps *LibraryPanelService) connectLibraryPanelsForDashboard(c *models.ReqCo
|
||||
return err
|
||||
}
|
||||
for _, uid := range uids {
|
||||
err := connectDashboard(session, lps.SQLStore.Dialect, c.SignedInUser, uid, dashboardID)
|
||||
err := lps.internalConnectDashboard(session, c.SignedInUser, uid, dashboardID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -172,8 +183,14 @@ func (lps *LibraryPanelService) deleteLibraryPanel(c *models.ReqContext, uid str
|
||||
if err := requirePermissionsOnFolder(c.SignedInUser, panel.FolderID); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := session.Exec("DELETE FROM library_panel_dashboard WHERE librarypanel_id=?", panel.ID); err != nil {
|
||||
var dashIDs []struct {
|
||||
DashboardID int64 `xorm:"dashboard_id"`
|
||||
}
|
||||
sql := "SELECT dashboard_id FROM library_panel_dashboard WHERE librarypanel_id=?"
|
||||
if err := session.SQL(sql, panel.ID).Find(&dashIDs); err != nil {
|
||||
return err
|
||||
} else if len(dashIDs) > 0 {
|
||||
return errLibraryPanelHasConnectedDashboards
|
||||
}
|
||||
|
||||
result, err := session.Exec("DELETE FROM library_panel WHERE id=?", panel.ID)
|
||||
@@ -336,13 +353,15 @@ func (lps *LibraryPanelService) getLibraryPanel(c *models.ReqContext, uid string
|
||||
})
|
||||
|
||||
dto := LibraryPanelDTO{
|
||||
ID: libraryPanel.ID,
|
||||
OrgID: libraryPanel.OrgID,
|
||||
FolderID: libraryPanel.FolderID,
|
||||
UID: libraryPanel.UID,
|
||||
Name: libraryPanel.Name,
|
||||
Model: libraryPanel.Model,
|
||||
Version: libraryPanel.Version,
|
||||
ID: libraryPanel.ID,
|
||||
OrgID: libraryPanel.OrgID,
|
||||
FolderID: libraryPanel.FolderID,
|
||||
UID: libraryPanel.UID,
|
||||
Name: libraryPanel.Name,
|
||||
Type: libraryPanel.Type,
|
||||
Description: libraryPanel.Description,
|
||||
Model: libraryPanel.Model,
|
||||
Version: libraryPanel.Version,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: libraryPanel.ConnectedDashboards,
|
||||
@@ -365,60 +384,103 @@ func (lps *LibraryPanelService) getLibraryPanel(c *models.ReqContext, uid string
|
||||
}
|
||||
|
||||
// getAllLibraryPanels gets all library panels.
|
||||
func (lps *LibraryPanelService) getAllLibraryPanels(c *models.ReqContext, limit int64) ([]LibraryPanelDTO, error) {
|
||||
func (lps *LibraryPanelService) getAllLibraryPanels(c *models.ReqContext, perPage int, page int, name string, excludeUID string) (LibraryPanelSearchResult, error) {
|
||||
libraryPanels := make([]LibraryPanelWithMeta, 0)
|
||||
result := LibraryPanelSearchResult{}
|
||||
if perPage <= 0 {
|
||||
perPage = 100
|
||||
}
|
||||
if page <= 0 {
|
||||
page = 1
|
||||
}
|
||||
|
||||
err := lps.SQLStore.WithDbSession(c.Context.Req.Context(), func(session *sqlstore.DBSession) error {
|
||||
builder := sqlstore.SQLBuilder{}
|
||||
builder.Write(sqlStatmentLibrayPanelDTOWithMeta)
|
||||
builder.Write(` WHERE lp.org_id=? AND lp.folder_id=0`, c.SignedInUser.OrgId)
|
||||
if len(strings.TrimSpace(name)) > 0 {
|
||||
builder.Write(" AND lp.name "+lps.SQLStore.Dialect.LikeStr()+" ?", "%"+name+"%")
|
||||
}
|
||||
if len(strings.TrimSpace(excludeUID)) > 0 {
|
||||
builder.Write(" AND lp.uid <> ?", excludeUID)
|
||||
}
|
||||
builder.Write(" UNION ")
|
||||
builder.Write(sqlStatmentLibrayPanelDTOWithMeta)
|
||||
builder.Write(" INNER JOIN dashboard AS dashboard on lp.folder_id = dashboard.id AND lp.folder_id<>0")
|
||||
builder.Write(` WHERE lp.org_id=?`, c.SignedInUser.OrgId)
|
||||
if len(strings.TrimSpace(name)) > 0 {
|
||||
builder.Write(" AND lp.name "+lps.SQLStore.Dialect.LikeStr()+" ?", "%"+name+"%")
|
||||
}
|
||||
if len(strings.TrimSpace(excludeUID)) > 0 {
|
||||
builder.Write(" AND lp.uid <> ?", excludeUID)
|
||||
}
|
||||
if c.SignedInUser.OrgRole != models.ROLE_ADMIN {
|
||||
builder.WriteDashboardPermissionFilter(c.SignedInUser, models.PERMISSION_VIEW)
|
||||
}
|
||||
if limit == 0 {
|
||||
limit = 1000
|
||||
if perPage != 0 {
|
||||
offset := perPage * (page - 1)
|
||||
builder.Write(lps.SQLStore.Dialect.LimitOffset(int64(perPage), int64(offset)))
|
||||
}
|
||||
builder.Write(lps.SQLStore.Dialect.Limit(limit))
|
||||
if err := session.SQL(builder.GetSQLString(), builder.GetParams()...).Find(&libraryPanels); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retDTOs := make([]LibraryPanelDTO, 0)
|
||||
for _, panel := range libraryPanels {
|
||||
retDTOs = append(retDTOs, LibraryPanelDTO{
|
||||
ID: panel.ID,
|
||||
OrgID: panel.OrgID,
|
||||
FolderID: panel.FolderID,
|
||||
UID: panel.UID,
|
||||
Name: panel.Name,
|
||||
Type: panel.Type,
|
||||
Description: panel.Description,
|
||||
Model: panel.Model,
|
||||
Version: panel.Version,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: panel.ConnectedDashboards,
|
||||
Created: panel.Created,
|
||||
Updated: panel.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: panel.CreatedBy,
|
||||
Name: panel.CreatedByName,
|
||||
AvatarUrl: dtos.GetGravatarUrl(panel.CreatedByEmail),
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: panel.UpdatedBy,
|
||||
Name: panel.UpdatedByName,
|
||||
AvatarUrl: dtos.GetGravatarUrl(panel.UpdatedByEmail),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
var panels []LibraryPanel
|
||||
countBuilder := sqlstore.SQLBuilder{}
|
||||
countBuilder.Write("SELECT * FROM library_panel")
|
||||
countBuilder.Write(` WHERE org_id=?`, c.SignedInUser.OrgId)
|
||||
if len(strings.TrimSpace(name)) > 0 {
|
||||
countBuilder.Write(" AND name "+lps.SQLStore.Dialect.LikeStr()+" ?", "%"+name+"%")
|
||||
}
|
||||
if len(strings.TrimSpace(excludeUID)) > 0 {
|
||||
countBuilder.Write(" AND uid <> ?", excludeUID)
|
||||
}
|
||||
if err := session.SQL(countBuilder.GetSQLString(), countBuilder.GetParams()...).Find(&panels); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
result = LibraryPanelSearchResult{
|
||||
TotalCount: int64(len(panels)),
|
||||
LibraryPanels: retDTOs,
|
||||
Page: page,
|
||||
PerPage: perPage,
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
retDTOs := make([]LibraryPanelDTO, 0)
|
||||
for _, panel := range libraryPanels {
|
||||
retDTOs = append(retDTOs, LibraryPanelDTO{
|
||||
ID: panel.ID,
|
||||
OrgID: panel.OrgID,
|
||||
FolderID: panel.FolderID,
|
||||
UID: panel.UID,
|
||||
Name: panel.Name,
|
||||
Model: panel.Model,
|
||||
Version: panel.Version,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: panel.ConnectedDashboards,
|
||||
Created: panel.Created,
|
||||
Updated: panel.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: panel.CreatedBy,
|
||||
Name: panel.CreatedByName,
|
||||
AvatarUrl: dtos.GetGravatarUrl(panel.CreatedByEmail),
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: panel.UpdatedBy,
|
||||
Name: panel.UpdatedByName,
|
||||
AvatarUrl: dtos.GetGravatarUrl(panel.UpdatedByEmail),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return retDTOs, err
|
||||
return result, err
|
||||
}
|
||||
|
||||
// getConnectedDashboards gets all dashboards connected to a Library Panel.
|
||||
@@ -464,13 +526,15 @@ func (lps *LibraryPanelService) getLibraryPanelsForDashboardID(c *models.ReqCont
|
||||
|
||||
for _, panel := range libraryPanels {
|
||||
libraryPanelMap[panel.UID] = LibraryPanelDTO{
|
||||
ID: panel.ID,
|
||||
OrgID: panel.OrgID,
|
||||
FolderID: panel.FolderID,
|
||||
UID: panel.UID,
|
||||
Name: panel.Name,
|
||||
Model: panel.Model,
|
||||
Version: panel.Version,
|
||||
ID: panel.ID,
|
||||
OrgID: panel.OrgID,
|
||||
FolderID: panel.FolderID,
|
||||
UID: panel.UID,
|
||||
Name: panel.Name,
|
||||
Type: panel.Type,
|
||||
Description: panel.Description,
|
||||
Model: panel.Model,
|
||||
Version: panel.Version,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: panel.CanEdit,
|
||||
ConnectedDashboards: panel.ConnectedDashboards,
|
||||
@@ -532,17 +596,19 @@ func (lps *LibraryPanelService) patchLibraryPanel(c *models.ReqContext, cmd patc
|
||||
}
|
||||
|
||||
var libraryPanel = LibraryPanel{
|
||||
ID: panelInDB.ID,
|
||||
OrgID: c.SignedInUser.OrgId,
|
||||
FolderID: cmd.FolderID,
|
||||
UID: uid,
|
||||
Name: cmd.Name,
|
||||
Model: cmd.Model,
|
||||
Version: panelInDB.Version + 1,
|
||||
Created: panelInDB.Created,
|
||||
CreatedBy: panelInDB.CreatedBy,
|
||||
Updated: time.Now(),
|
||||
UpdatedBy: c.SignedInUser.UserId,
|
||||
ID: panelInDB.ID,
|
||||
OrgID: c.SignedInUser.OrgId,
|
||||
FolderID: cmd.FolderID,
|
||||
UID: uid,
|
||||
Name: cmd.Name,
|
||||
Type: panelInDB.Type,
|
||||
Description: panelInDB.Description,
|
||||
Model: cmd.Model,
|
||||
Version: panelInDB.Version + 1,
|
||||
Created: panelInDB.Created,
|
||||
CreatedBy: panelInDB.CreatedBy,
|
||||
Updated: time.Now(),
|
||||
UpdatedBy: c.SignedInUser.UserId,
|
||||
}
|
||||
|
||||
if cmd.Name == "" {
|
||||
@@ -554,7 +620,7 @@ func (lps *LibraryPanelService) patchLibraryPanel(c *models.ReqContext, cmd patc
|
||||
if err := handleFolderIDPatches(&libraryPanel, panelInDB.FolderID, cmd.FolderID, c.SignedInUser); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := syncTitleWithName(&libraryPanel); err != nil {
|
||||
if err := syncFieldsWithModel(&libraryPanel); err != nil {
|
||||
return err
|
||||
}
|
||||
if rowsAffected, err := session.ID(panelInDB.ID).Update(&libraryPanel); err != nil {
|
||||
@@ -567,13 +633,15 @@ func (lps *LibraryPanelService) patchLibraryPanel(c *models.ReqContext, cmd patc
|
||||
}
|
||||
|
||||
dto = LibraryPanelDTO{
|
||||
ID: libraryPanel.ID,
|
||||
OrgID: libraryPanel.OrgID,
|
||||
FolderID: libraryPanel.FolderID,
|
||||
UID: libraryPanel.UID,
|
||||
Name: libraryPanel.Name,
|
||||
Model: libraryPanel.Model,
|
||||
Version: libraryPanel.Version,
|
||||
ID: libraryPanel.ID,
|
||||
OrgID: libraryPanel.OrgID,
|
||||
FolderID: libraryPanel.FolderID,
|
||||
UID: libraryPanel.UID,
|
||||
Name: libraryPanel.Name,
|
||||
Type: libraryPanel.Type,
|
||||
Description: libraryPanel.Description,
|
||||
Model: libraryPanel.Model,
|
||||
Version: libraryPanel.Version,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: panelInDB.ConnectedDashboards,
|
||||
|
||||
@@ -102,9 +102,11 @@ func (lps *LibraryPanelService) LoadLibraryPanelsForDashboard(c *models.ReqConte
|
||||
elem.Set("gridPos", panelAsJSON.Get("gridPos").MustMap())
|
||||
elem.Set("id", panelAsJSON.Get("id").MustInt64())
|
||||
elem.Set("libraryPanel", map[string]interface{}{
|
||||
"uid": libraryPanelInDB.UID,
|
||||
"name": libraryPanelInDB.Name,
|
||||
"version": libraryPanelInDB.Version,
|
||||
"uid": libraryPanelInDB.UID,
|
||||
"name": libraryPanelInDB.Name,
|
||||
"type": libraryPanelInDB.Type,
|
||||
"description": libraryPanelInDB.Description,
|
||||
"version": libraryPanelInDB.Version,
|
||||
"meta": map[string]interface{}{
|
||||
"canEdit": libraryPanelInDB.Meta.CanEdit,
|
||||
"connectedDashboards": libraryPanelInDB.Meta.ConnectedDashboards,
|
||||
@@ -242,6 +244,8 @@ func (lps *LibraryPanelService) AddMigration(mg *migrator.Migrator) {
|
||||
{Name: "folder_id", Type: migrator.DB_BigInt, Nullable: false},
|
||||
{Name: "uid", Type: migrator.DB_NVarchar, Length: 40, Nullable: false},
|
||||
{Name: "name", Type: migrator.DB_NVarchar, Length: 255, Nullable: false},
|
||||
{Name: "type", Type: migrator.DB_NVarchar, Length: 40, Nullable: false},
|
||||
{Name: "description", Type: migrator.DB_NVarchar, Length: 255, Nullable: false},
|
||||
{Name: "model", Type: migrator.DB_Text, Nullable: false},
|
||||
{Name: "created", Type: migrator.DB_DateTime, Nullable: false},
|
||||
{Name: "created_by", Type: migrator.DB_BigInt, Nullable: false},
|
||||
|
||||
@@ -19,16 +19,19 @@ func TestCreateLibraryPanel(t *testing.T) {
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
var expected = libraryPanelResult{
|
||||
Result: libraryPanel{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: sc.initialResult.Result.UID,
|
||||
Name: "Text - Library Panel",
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: sc.initialResult.Result.UID,
|
||||
Name: "Text - Library Panel",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
@@ -61,16 +64,19 @@ func TestCreateLibraryPanel(t *testing.T) {
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
var expected = libraryPanelResult{
|
||||
Result: libraryPanel{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.UID,
|
||||
Name: "Library Panel Name",
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.UID,
|
||||
Name: "Library Panel Name",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": float64(1),
|
||||
"title": "Library Panel Name",
|
||||
"type": "text",
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Library Panel Name",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
|
||||
@@ -30,4 +30,15 @@ func TestDeleteLibraryPanel(t *testing.T) {
|
||||
resp := sc.service.deleteHandler(sc.reqContext)
|
||||
require.Equal(t, 404, resp.Status())
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to delete a library panel that is connected, it should fail",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
sc.reqContext.ReplaceAllParams(map[string]string{":uid": sc.initialResult.Result.UID, ":dashboardId": "1"})
|
||||
resp := sc.service.connectHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
sc.reqContext.ReplaceAllParams(map[string]string{":uid": sc.initialResult.Result.UID})
|
||||
resp = sc.service.deleteHandler(sc.reqContext)
|
||||
require.Equal(t, 403, resp.Status())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -16,11 +16,20 @@ func TestGetAllLibraryPanels(t *testing.T) {
|
||||
resp := sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsResult
|
||||
var result libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result.Result)
|
||||
require.Equal(t, 0, len(result.Result))
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 0,
|
||||
LibraryPanels: []libraryPanel{},
|
||||
Page: 1,
|
||||
PerPage: 100,
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist, it should succeed",
|
||||
@@ -32,68 +41,141 @@ func TestGetAllLibraryPanels(t *testing.T) {
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsResult
|
||||
var result libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsResult{
|
||||
Result: []libraryPanel{
|
||||
{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result[0].UID,
|
||||
Name: "Text - Library Panel",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result[0].Meta.Created,
|
||||
Updated: result.Result[0].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 2,
|
||||
Page: 1,
|
||||
PerPage: 100,
|
||||
LibraryPanels: []libraryPanel{
|
||||
{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.LibraryPanels[0].UID,
|
||||
Name: "Text - Library Panel",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result.LibraryPanels[0].Meta.Created,
|
||||
Updated: result.Result.LibraryPanels[0].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: 2,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.LibraryPanels[1].UID,
|
||||
Name: "Text - Library Panel2",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel2",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result.LibraryPanels[1].Meta.Created,
|
||||
Updated: result.Result.LibraryPanels[1].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: 2,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result[1].UID,
|
||||
Name: "Text - Library Panel2",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel2",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result[1].Meta.Created,
|
||||
Updated: result.Result[1].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist and excludeUID is set, it should succeed and the result should be correct",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
resp := sc.service.createHandler(sc.reqContext, command)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
err := sc.reqContext.Req.ParseForm()
|
||||
require.NoError(t, err)
|
||||
sc.reqContext.Req.Form.Add("excludeUid", sc.initialResult.Result.UID)
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsSearch
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 1,
|
||||
Page: 1,
|
||||
PerPage: 100,
|
||||
LibraryPanels: []libraryPanel{
|
||||
{
|
||||
ID: 2,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.LibraryPanels[0].UID,
|
||||
Name: "Text - Library Panel2",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel2",
|
||||
"type": "text",
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result.LibraryPanels[0].Meta.Created,
|
||||
Updated: result.Result.LibraryPanels[0].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -104,6 +186,255 @@ func TestGetAllLibraryPanels(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist and perPage is 1, it should succeed and the result should be correct",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
resp := sc.service.createHandler(sc.reqContext, command)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
err := sc.reqContext.Req.ParseForm()
|
||||
require.NoError(t, err)
|
||||
sc.reqContext.Req.Form.Add("perPage", "1")
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsSearch
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 2,
|
||||
Page: 1,
|
||||
PerPage: 1,
|
||||
LibraryPanels: []libraryPanel{
|
||||
{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.LibraryPanels[0].UID,
|
||||
Name: "Text - Library Panel",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result.LibraryPanels[0].Meta.Created,
|
||||
Updated: result.Result.LibraryPanels[0].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist and perPage is 1 and page is 2, it should succeed and the result should be correct",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
resp := sc.service.createHandler(sc.reqContext, command)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
err := sc.reqContext.Req.ParseForm()
|
||||
require.NoError(t, err)
|
||||
sc.reqContext.Req.Form.Add("perPage", "1")
|
||||
sc.reqContext.Req.Form.Add("page", "2")
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsSearch
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 2,
|
||||
Page: 2,
|
||||
PerPage: 1,
|
||||
LibraryPanels: []libraryPanel{
|
||||
{
|
||||
ID: 2,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.LibraryPanels[0].UID,
|
||||
Name: "Text - Library Panel2",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel2",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result.LibraryPanels[0].Meta.Created,
|
||||
Updated: result.Result.LibraryPanels[0].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist and perPage is 1 and page is 1 and name is panel2, it should succeed and the result should be correct",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
resp := sc.service.createHandler(sc.reqContext, command)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
err := sc.reqContext.Req.ParseForm()
|
||||
require.NoError(t, err)
|
||||
sc.reqContext.Req.Form.Add("perPage", "1")
|
||||
sc.reqContext.Req.Form.Add("page", "1")
|
||||
sc.reqContext.Req.Form.Add("name", "panel2")
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsSearch
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 1,
|
||||
Page: 1,
|
||||
PerPage: 1,
|
||||
LibraryPanels: []libraryPanel{
|
||||
{
|
||||
ID: 2,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.LibraryPanels[0].UID,
|
||||
Name: "Text - Library Panel2",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel2",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
CanEdit: true,
|
||||
ConnectedDashboards: 0,
|
||||
Created: result.Result.LibraryPanels[0].Meta.Created,
|
||||
Updated: result.Result.LibraryPanels[0].Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist and perPage is 1 and page is 3 and name is panel, it should succeed and the result should be correct",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
resp := sc.service.createHandler(sc.reqContext, command)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
err := sc.reqContext.Req.ParseForm()
|
||||
require.NoError(t, err)
|
||||
sc.reqContext.Req.Form.Add("perPage", "1")
|
||||
sc.reqContext.Req.Form.Add("page", "3")
|
||||
sc.reqContext.Req.Form.Add("name", "panel")
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsSearch
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 2,
|
||||
Page: 3,
|
||||
PerPage: 1,
|
||||
LibraryPanels: []libraryPanel{},
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist and perPage is 1 and page is 3 and name does not exist, it should succeed and the result should be correct",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
resp := sc.service.createHandler(sc.reqContext, command)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
err := sc.reqContext.Req.ParseForm()
|
||||
require.NoError(t, err)
|
||||
sc.reqContext.Req.Form.Add("perPage", "1")
|
||||
sc.reqContext.Req.Form.Add("page", "3")
|
||||
sc.reqContext.Req.Form.Add("name", "monkey")
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsSearch
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 0,
|
||||
Page: 3,
|
||||
PerPage: 1,
|
||||
LibraryPanels: []libraryPanel{},
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels and two exist but only one is connected, it should succeed and return correct connected dashboards",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
command := getCreateCommand(sc.folder.Id, "Text - Library Panel2")
|
||||
@@ -121,11 +452,11 @@ func TestGetAllLibraryPanels(t *testing.T) {
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var results libraryPanelsResult
|
||||
var results libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &results)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(0), results.Result[0].Meta.ConnectedDashboards)
|
||||
require.Equal(t, int64(2), results.Result[1].Meta.ConnectedDashboards)
|
||||
require.Equal(t, int64(0), results.Result.LibraryPanels[0].Meta.ConnectedDashboards)
|
||||
require.Equal(t, int64(2), results.Result.LibraryPanels[1].Meta.ConnectedDashboards)
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to get all library panels in a different org, none should be returned",
|
||||
@@ -133,22 +464,31 @@ func TestGetAllLibraryPanels(t *testing.T) {
|
||||
resp := sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
var result libraryPanelsResult
|
||||
var result libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(result.Result))
|
||||
require.Equal(t, int64(1), result.Result[0].FolderID)
|
||||
require.Equal(t, "Text - Library Panel", result.Result[0].Name)
|
||||
require.Equal(t, 1, len(result.Result.LibraryPanels))
|
||||
require.Equal(t, int64(1), result.Result.LibraryPanels[0].FolderID)
|
||||
require.Equal(t, "Text - Library Panel", result.Result.LibraryPanels[0].Name)
|
||||
|
||||
sc.reqContext.SignedInUser.OrgId = 2
|
||||
sc.reqContext.SignedInUser.OrgRole = models.ROLE_ADMIN
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
|
||||
result = libraryPanelsResult{}
|
||||
result = libraryPanelsSearch{}
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result.Result)
|
||||
require.Equal(t, 0, len(result.Result))
|
||||
var expected = libraryPanelsSearch{
|
||||
Result: libraryPanelsSearchResult{
|
||||
TotalCount: 0,
|
||||
LibraryPanels: []libraryPanel{},
|
||||
Page: 1,
|
||||
PerPage: 100,
|
||||
},
|
||||
}
|
||||
if diff := cmp.Diff(expected, result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -24,16 +24,19 @@ func TestGetLibraryPanel(t *testing.T) {
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
var expected = libraryPanelResult{
|
||||
Result: libraryPanel{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.UID,
|
||||
Name: "Text - Library Panel",
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: 1,
|
||||
UID: result.Result.UID,
|
||||
Name: "Text - Library Panel",
|
||||
Type: "text",
|
||||
Description: "A description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"id": float64(1),
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
},
|
||||
Version: 1,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
|
||||
@@ -32,9 +32,10 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
Model: []byte(`
|
||||
{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "An updated description",
|
||||
"id": 1,
|
||||
"title": "Model - New name",
|
||||
"type": "text"
|
||||
"type": "graph"
|
||||
}
|
||||
`),
|
||||
Version: 1,
|
||||
@@ -45,16 +46,19 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
var expected = libraryPanelResult{
|
||||
Result: libraryPanel{
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: newFolder.Id,
|
||||
UID: sc.initialResult.Result.UID,
|
||||
Name: "Panel - New name",
|
||||
ID: 1,
|
||||
OrgID: 1,
|
||||
FolderID: newFolder.Id,
|
||||
UID: sc.initialResult.Result.UID,
|
||||
Name: "Panel - New name",
|
||||
Type: "graph",
|
||||
Description: "An updated description",
|
||||
Model: map[string]interface{}{
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": float64(1),
|
||||
"title": "Panel - New name",
|
||||
"type": "text",
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "An updated description",
|
||||
"id": float64(1),
|
||||
"title": "Panel - New name",
|
||||
"type": "graph",
|
||||
},
|
||||
Version: 2,
|
||||
Meta: LibraryPanelDTOMeta{
|
||||
@@ -64,8 +68,8 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
Updated: result.Result.Meta.Updated,
|
||||
CreatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
Name: "user_in_db",
|
||||
AvatarUrl: "/avatar/402d08de060496d6b6874495fe20f5ad",
|
||||
Name: UserInDbName,
|
||||
AvatarUrl: UserInDbAvatar,
|
||||
},
|
||||
UpdatedBy: LibraryPanelDTOMetaUser{
|
||||
ID: 1,
|
||||
@@ -92,8 +96,8 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
require.Equal(t, 200, resp.Status())
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
sc.initialResult.Result.FolderID = newFolder.Id
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = "user_in_db"
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = "/avatar/402d08de060496d6b6874495fe20f5ad"
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = UserInDbName
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = UserInDbAvatar
|
||||
sc.initialResult.Result.Version = 2
|
||||
if diff := cmp.Diff(sc.initialResult.Result, result.Result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
@@ -111,8 +115,8 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
resp := sc.service.patchHandler(sc.reqContext, cmd)
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
sc.initialResult.Result.Name = "New Name"
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = "user_in_db"
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = "/avatar/402d08de060496d6b6874495fe20f5ad"
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = UserInDbName
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = UserInDbAvatar
|
||||
sc.initialResult.Result.Model["title"] = "New Name"
|
||||
sc.initialResult.Result.Version = 2
|
||||
if diff := cmp.Diff(sc.initialResult.Result, result.Result, getCompareOptions()...); diff != "" {
|
||||
@@ -120,22 +124,76 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to patch a library panel with model only, it should change model successfully and return correct result",
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to patch a library panel with model only, it should change model successfully, sync name, type and description fields and return correct result",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
cmd := patchLibraryPanelCommand{
|
||||
FolderID: -1,
|
||||
Model: []byte(`{ "title": "New Model Title", "name": "New Model Name" }`),
|
||||
Model: []byte(`{ "title": "New Model Title", "name": "New Model Name", "type":"graph", "description": "New description" }`),
|
||||
Version: 1,
|
||||
}
|
||||
sc.reqContext.ReplaceAllParams(map[string]string{":uid": sc.initialResult.Result.UID})
|
||||
resp := sc.service.patchHandler(sc.reqContext, cmd)
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
sc.initialResult.Result.Type = "graph"
|
||||
sc.initialResult.Result.Description = "New description"
|
||||
sc.initialResult.Result.Model = map[string]interface{}{
|
||||
"title": "Text - Library Panel",
|
||||
"name": "New Model Name",
|
||||
"title": "Text - Library Panel",
|
||||
"name": "New Model Name",
|
||||
"type": "graph",
|
||||
"description": "New description",
|
||||
}
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = "user_in_db"
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = "/avatar/402d08de060496d6b6874495fe20f5ad"
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = UserInDbName
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = UserInDbAvatar
|
||||
sc.initialResult.Result.Version = 2
|
||||
if diff := cmp.Diff(sc.initialResult.Result, result.Result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to patch a library panel with model.description only, it should change model successfully, sync name, type and description fields and return correct result",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
cmd := patchLibraryPanelCommand{
|
||||
FolderID: -1,
|
||||
Model: []byte(`{ "description": "New description" }`),
|
||||
Version: 1,
|
||||
}
|
||||
sc.reqContext.ReplaceAllParams(map[string]string{":uid": sc.initialResult.Result.UID})
|
||||
resp := sc.service.patchHandler(sc.reqContext, cmd)
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
sc.initialResult.Result.Type = "text"
|
||||
sc.initialResult.Result.Description = "New description"
|
||||
sc.initialResult.Result.Model = map[string]interface{}{
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text",
|
||||
"description": "New description",
|
||||
}
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = UserInDbName
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = UserInDbAvatar
|
||||
sc.initialResult.Result.Version = 2
|
||||
if diff := cmp.Diff(sc.initialResult.Result, result.Result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
scenarioWithLibraryPanel(t, "When an admin tries to patch a library panel with model.type only, it should change model successfully, sync name, type and description fields and return correct result",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
cmd := patchLibraryPanelCommand{
|
||||
FolderID: -1,
|
||||
Model: []byte(`{ "type": "graph" }`),
|
||||
Version: 1,
|
||||
}
|
||||
sc.reqContext.ReplaceAllParams(map[string]string{":uid": sc.initialResult.Result.UID})
|
||||
resp := sc.service.patchHandler(sc.reqContext, cmd)
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
sc.initialResult.Result.Type = "graph"
|
||||
sc.initialResult.Result.Description = "A description"
|
||||
sc.initialResult.Result.Model = map[string]interface{}{
|
||||
"title": "Text - Library Panel",
|
||||
"type": "graph",
|
||||
"description": "A description",
|
||||
}
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = UserInDbName
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = UserInDbAvatar
|
||||
sc.initialResult.Result.Version = 2
|
||||
if diff := cmp.Diff(sc.initialResult.Result, result.Result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
@@ -150,8 +208,8 @@ func TestPatchLibraryPanel(t *testing.T) {
|
||||
resp := sc.service.patchHandler(sc.reqContext, cmd)
|
||||
var result = validateAndUnMarshalResponse(t, resp)
|
||||
sc.initialResult.Result.Meta.UpdatedBy.ID = int64(2)
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = "user_in_db"
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = "/avatar/402d08de060496d6b6874495fe20f5ad"
|
||||
sc.initialResult.Result.Meta.CreatedBy.Name = UserInDbName
|
||||
sc.initialResult.Result.Meta.CreatedBy.AvatarUrl = UserInDbAvatar
|
||||
sc.initialResult.Result.Version = 2
|
||||
if diff := cmp.Diff(sc.initialResult.Result, result.Result, getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
|
||||
@@ -343,10 +343,10 @@ func TestLibraryPanelPermissions(t *testing.T) {
|
||||
|
||||
resp := sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
var actual libraryPanelsResult
|
||||
var actual libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &actual)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, testCase.panels, len(actual.Result))
|
||||
require.Equal(t, testCase.panels, len(actual.Result.LibraryPanels))
|
||||
for _, folderIndex := range testCase.folderIndexes {
|
||||
var folderID = int64(folderIndex + 2) // testScenario creates one folder and general folder doesn't count
|
||||
var foundResult libraryPanel
|
||||
@@ -359,7 +359,7 @@ func TestLibraryPanelPermissions(t *testing.T) {
|
||||
}
|
||||
require.NotEmpty(t, foundResult)
|
||||
|
||||
for _, result := range actual.Result {
|
||||
for _, result := range actual.Result.LibraryPanels {
|
||||
if result.FolderID == folderID {
|
||||
actualResult = result
|
||||
break
|
||||
@@ -386,11 +386,11 @@ func TestLibraryPanelPermissions(t *testing.T) {
|
||||
|
||||
resp = sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
var actual libraryPanelsResult
|
||||
var actual libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &actual)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(actual.Result))
|
||||
if diff := cmp.Diff(result.Result, actual.Result[0], getCompareOptions()...); diff != "" {
|
||||
require.Equal(t, 1, len(actual.Result.LibraryPanels))
|
||||
if diff := cmp.Diff(result.Result, actual.Result.LibraryPanels[0], getCompareOptions()...); diff != "" {
|
||||
t.Fatalf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -84,11 +84,14 @@ func TestLoadLibraryPanelsForDashboard(t *testing.T) {
|
||||
"x": 6,
|
||||
"y": 0,
|
||||
},
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"description": "A description",
|
||||
"libraryPanel": map[string]interface{}{
|
||||
"uid": sc.initialResult.Result.UID,
|
||||
"name": sc.initialResult.Result.Name,
|
||||
"version": sc.initialResult.Result.Version,
|
||||
"uid": sc.initialResult.Result.UID,
|
||||
"name": sc.initialResult.Result.Name,
|
||||
"type": sc.initialResult.Result.Type,
|
||||
"description": sc.initialResult.Result.Description,
|
||||
"version": sc.initialResult.Result.Version,
|
||||
"meta": map[string]interface{}{
|
||||
"canEdit": false,
|
||||
"connectedDashboards": int64(1),
|
||||
@@ -646,11 +649,11 @@ func TestDeleteLibraryPanelsInFolder(t *testing.T) {
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
resp := sc.service.getAllHandler(sc.reqContext)
|
||||
require.Equal(t, 200, resp.Status())
|
||||
var result libraryPanelsResult
|
||||
var result libraryPanelsSearch
|
||||
err := json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result.Result)
|
||||
require.Equal(t, 1, len(result.Result))
|
||||
require.Equal(t, 1, len(result.Result.LibraryPanels))
|
||||
|
||||
err = sc.service.DeleteLibraryPanelsInFolder(sc.reqContext, sc.folder.Uid)
|
||||
require.NoError(t, err)
|
||||
@@ -659,27 +662,36 @@ func TestDeleteLibraryPanelsInFolder(t *testing.T) {
|
||||
err = json.Unmarshal(resp.Body(), &result)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result.Result)
|
||||
require.Equal(t, 0, len(result.Result))
|
||||
require.Equal(t, 0, len(result.Result.LibraryPanels))
|
||||
})
|
||||
}
|
||||
|
||||
type libraryPanel struct {
|
||||
ID int64 `json:"id"`
|
||||
OrgID int64 `json:"orgId"`
|
||||
FolderID int64 `json:"folderId"`
|
||||
UID string `json:"uid"`
|
||||
Name string `json:"name"`
|
||||
Model map[string]interface{} `json:"model"`
|
||||
Version int64 `json:"version"`
|
||||
Meta LibraryPanelDTOMeta `json:"meta"`
|
||||
ID int64 `json:"id"`
|
||||
OrgID int64 `json:"orgId"`
|
||||
FolderID int64 `json:"folderId"`
|
||||
UID string `json:"uid"`
|
||||
Name string `json:"name"`
|
||||
Type string
|
||||
Description string
|
||||
Model map[string]interface{} `json:"model"`
|
||||
Version int64 `json:"version"`
|
||||
Meta LibraryPanelDTOMeta `json:"meta"`
|
||||
}
|
||||
|
||||
type libraryPanelResult struct {
|
||||
Result libraryPanel `json:"result"`
|
||||
}
|
||||
|
||||
type libraryPanelsResult struct {
|
||||
Result []libraryPanel `json:"result"`
|
||||
type libraryPanelsSearch struct {
|
||||
Result libraryPanelsSearchResult `json:"result"`
|
||||
}
|
||||
|
||||
type libraryPanelsSearchResult struct {
|
||||
TotalCount int64 `json:"totalCount"`
|
||||
LibraryPanels []libraryPanel `json:"libraryPanels"`
|
||||
Page int `json:"page"`
|
||||
PerPage int `json:"perPage"`
|
||||
}
|
||||
|
||||
type libraryPanelDashboardsResult struct {
|
||||
@@ -716,7 +728,8 @@ func getCreateCommand(folderID int64, name string) createLibraryPanelCommand {
|
||||
"datasource": "${DS_GDEV-TESTDATA}",
|
||||
"id": 1,
|
||||
"title": "Text - Library Panel",
|
||||
"type": "text"
|
||||
"type": "text",
|
||||
"description": "A description"
|
||||
}
|
||||
`),
|
||||
}
|
||||
|
||||
@@ -8,13 +8,15 @@ import (
|
||||
|
||||
// LibraryPanel is the model for library panel definitions.
|
||||
type LibraryPanel struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `xorm:"org_id"`
|
||||
FolderID int64 `xorm:"folder_id"`
|
||||
UID string `xorm:"uid"`
|
||||
Name string
|
||||
Model json.RawMessage
|
||||
Version int64
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `xorm:"org_id"`
|
||||
FolderID int64 `xorm:"folder_id"`
|
||||
UID string `xorm:"uid"`
|
||||
Name string
|
||||
Type string
|
||||
Description string
|
||||
Model json.RawMessage
|
||||
Version int64
|
||||
|
||||
Created time.Time
|
||||
Updated time.Time
|
||||
@@ -25,13 +27,15 @@ type LibraryPanel struct {
|
||||
|
||||
// LibraryPanelWithMeta is the model used to retrieve library panels with additional meta information.
|
||||
type LibraryPanelWithMeta struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `xorm:"org_id"`
|
||||
FolderID int64 `xorm:"folder_id"`
|
||||
UID string `xorm:"uid"`
|
||||
Name string
|
||||
Model json.RawMessage
|
||||
Version int64
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `xorm:"org_id"`
|
||||
FolderID int64 `xorm:"folder_id"`
|
||||
UID string `xorm:"uid"`
|
||||
Name string
|
||||
Type string
|
||||
Description string
|
||||
Model json.RawMessage
|
||||
Version int64
|
||||
|
||||
Created time.Time
|
||||
Updated time.Time
|
||||
@@ -48,14 +52,24 @@ type LibraryPanelWithMeta struct {
|
||||
|
||||
// LibraryPanelDTO is the frontend DTO for library panels.
|
||||
type LibraryPanelDTO struct {
|
||||
ID int64 `json:"id"`
|
||||
OrgID int64 `json:"orgId"`
|
||||
FolderID int64 `json:"folderId"`
|
||||
UID string `json:"uid"`
|
||||
Name string `json:"name"`
|
||||
Model json.RawMessage `json:"model"`
|
||||
Version int64 `json:"version"`
|
||||
Meta LibraryPanelDTOMeta `json:"meta"`
|
||||
ID int64 `json:"id"`
|
||||
OrgID int64 `json:"orgId"`
|
||||
FolderID int64 `json:"folderId"`
|
||||
UID string `json:"uid"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Model json.RawMessage `json:"model"`
|
||||
Version int64 `json:"version"`
|
||||
Meta LibraryPanelDTOMeta `json:"meta"`
|
||||
}
|
||||
|
||||
// LibraryPanelSearchResult is the search result for library panels.
|
||||
type LibraryPanelSearchResult struct {
|
||||
TotalCount int64 `json:"totalCount"`
|
||||
LibraryPanels []LibraryPanelDTO `json:"libraryPanels"`
|
||||
Page int `json:"page"`
|
||||
PerPage int `json:"perPage"`
|
||||
}
|
||||
|
||||
// LibraryPanelDTOMeta is the meta information for LibraryPanelDTO.
|
||||
@@ -103,6 +117,8 @@ var (
|
||||
ErrFolderHasConnectedLibraryPanels = errors.New("folder contains library panels that are linked to dashboards")
|
||||
// errLibraryPanelVersionMismatch is an error for when a library panel has been changed by someone else.
|
||||
errLibraryPanelVersionMismatch = errors.New("the library panel has been changed by someone else")
|
||||
// errLibraryPanelHasConnectedDashboards is an error for when an user deletes a library panel that is connected to library panels.
|
||||
errLibraryPanelHasConnectedDashboards = errors.New("the library panel is linked to dashboards")
|
||||
)
|
||||
|
||||
// Commands
|
||||
|
||||
@@ -510,7 +510,7 @@ func SetAlertNotificationStateToCompleteCommand(ctx context.Context, cmd *models
|
||||
}
|
||||
|
||||
func SetAlertNotificationStateToPendingCommand(ctx context.Context, cmd *models.SetAlertNotificationStateToPendingCommand) error {
|
||||
return withDbSession(ctx, func(sess *DBSession) error {
|
||||
return withDbSession(ctx, x, func(sess *DBSession) error {
|
||||
newVersion := cmd.Version + 1
|
||||
sql := `UPDATE alert_notification_state SET
|
||||
state = ?,
|
||||
|
||||
@@ -91,7 +91,7 @@ func (acs *AnnotationCleanupService) executeUntilDoneOrCancelled(ctx context.Con
|
||||
return totalAffected, ctx.Err()
|
||||
default:
|
||||
var affected int64
|
||||
err := withDbSession(ctx, func(session *DBSession) error {
|
||||
err := withDbSession(ctx, x, func(session *DBSession) error {
|
||||
res, err := session.Exec(sql)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -28,13 +28,17 @@ func GetApiKeys(query *models.GetApiKeysQuery) error {
|
||||
}
|
||||
|
||||
func DeleteApiKeyCtx(ctx context.Context, cmd *models.DeleteApiKeyCommand) error {
|
||||
return withDbSession(ctx, func(sess *DBSession) error {
|
||||
var rawSQL = "DELETE FROM api_key WHERE id=? and org_id=?"
|
||||
_, err := sess.Exec(rawSQL, cmd.Id, cmd.OrgId)
|
||||
return err
|
||||
return withDbSession(ctx, x, func(sess *DBSession) error {
|
||||
return deleteAPIKey(sess, cmd.Id, cmd.OrgId)
|
||||
})
|
||||
}
|
||||
|
||||
func deleteAPIKey(sess *DBSession, id, orgID int64) error {
|
||||
rawSQL := "DELETE FROM api_key WHERE id=? and org_id=?"
|
||||
_, err := sess.Exec(rawSQL, id, orgID)
|
||||
return err
|
||||
}
|
||||
|
||||
func AddApiKey(cmd *models.AddApiKeyCommand) error {
|
||||
return inTransaction(func(sess *DBSession) error {
|
||||
key := models.ApiKey{OrgId: cmd.OrgId, Name: cmd.Name}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package sqlstore
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -18,6 +19,7 @@ import (
|
||||
|
||||
func init() {
|
||||
bus.AddHandler("sql", GetDataSources)
|
||||
bus.AddHandler("sql", GetDataSourcesByType)
|
||||
bus.AddHandler("sql", GetDataSource)
|
||||
bus.AddHandler("sql", AddDataSource)
|
||||
bus.AddHandler("sql", DeleteDataSource)
|
||||
@@ -71,10 +73,21 @@ func GetDataSources(query *models.GetDataSourcesQuery) error {
|
||||
} else {
|
||||
sess = x.Limit(query.DataSourceLimit, 0).Where("org_id=?", query.OrgId).Asc("name")
|
||||
}
|
||||
|
||||
query.Result = make([]*models.DataSource, 0)
|
||||
return sess.Find(&query.Result)
|
||||
}
|
||||
|
||||
// GetDataSourcesByType returns all datasources for a given type or an error if the specified type is an empty string
|
||||
func GetDataSourcesByType(query *models.GetDataSourcesByTypeQuery) error {
|
||||
if query.Type == "" {
|
||||
return fmt.Errorf("datasource type cannot be empty")
|
||||
}
|
||||
|
||||
query.Result = make([]*models.DataSource, 0)
|
||||
return x.Where("type=?", query.Type).Asc("id").Find(&query.Result)
|
||||
}
|
||||
|
||||
// GetDefaultDataSource is used to get the default datasource of organization
|
||||
func GetDefaultDataSource(query *models.GetDefaultDataSourceQuery) error {
|
||||
datasource := models.DataSource{}
|
||||
|
||||
@@ -218,7 +218,7 @@ func TestDataAccess(t *testing.T) {
|
||||
require.Equal(t, 0, len(query.Result))
|
||||
})
|
||||
|
||||
t.Run("GetDataSource", func(t *testing.T) {
|
||||
t.Run("GetDataSources", func(t *testing.T) {
|
||||
t.Run("Number of data sources returned limited to 6 per organization", func(t *testing.T) {
|
||||
InitTestDB(t)
|
||||
datasourceLimit := 6
|
||||
@@ -288,6 +288,49 @@ func TestDataAccess(t *testing.T) {
|
||||
require.Equal(t, numberOfDatasource, len(query.Result))
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("GetDataSourcesByType", func(t *testing.T) {
|
||||
t.Run("Only returns datasources of specified type", func(t *testing.T) {
|
||||
InitTestDB(t)
|
||||
|
||||
err := AddDataSource(&models.AddDataSourceCommand{
|
||||
OrgId: 10,
|
||||
Name: "Elasticsearch",
|
||||
Type: models.DS_ES,
|
||||
Access: models.DS_ACCESS_DIRECT,
|
||||
Url: "http://test",
|
||||
Database: "site",
|
||||
ReadOnly: true,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
err = AddDataSource(&models.AddDataSourceCommand{
|
||||
OrgId: 10,
|
||||
Name: "Graphite",
|
||||
Type: models.DS_GRAPHITE,
|
||||
Access: models.DS_ACCESS_DIRECT,
|
||||
Url: "http://test",
|
||||
Database: "site",
|
||||
ReadOnly: true,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
query := models.GetDataSourcesByTypeQuery{Type: models.DS_ES}
|
||||
|
||||
err = GetDataSourcesByType(&query)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(query.Result))
|
||||
})
|
||||
|
||||
t.Run("Returns an error if no type specified", func(t *testing.T) {
|
||||
query := models.GetDataSourcesByTypeQuery{}
|
||||
|
||||
err := GetDataSourcesByType(&query)
|
||||
|
||||
require.Error(t, err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetDefaultDataSource(t *testing.T) {
|
||||
|
||||
@@ -32,4 +32,16 @@ func addUserAuthTokenMigrations(mg *Migrator) {
|
||||
mg.AddMigration("add unique index user_auth_token.prev_auth_token", NewAddIndexMigration(userAuthTokenV1, userAuthTokenV1.Indices[1]))
|
||||
|
||||
mg.AddMigration("add index user_auth_token.user_id", NewAddIndexMigration(userAuthTokenV1, userAuthTokenV1.Indices[2]))
|
||||
|
||||
mg.AddMigration(
|
||||
"Add revoked_at to the user auth token",
|
||||
NewAddColumnMigration(
|
||||
userAuthTokenV1,
|
||||
&Column{
|
||||
Name: "revoked_at",
|
||||
Type: DB_Int,
|
||||
Nullable: true,
|
||||
},
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -79,11 +79,15 @@ func (mg *Migrator) Start() error {
|
||||
return err
|
||||
}
|
||||
|
||||
migrationsPerformed := 0
|
||||
migrationsSkipped := 0
|
||||
start := time.Now()
|
||||
for _, m := range mg.migrations {
|
||||
m := m
|
||||
_, exists := logMap[m.Id()]
|
||||
if exists {
|
||||
mg.Logger.Debug("Skipping migration: Already executed", "id", m.Id())
|
||||
migrationsSkipped++
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -107,6 +111,9 @@ func (mg *Migrator) Start() error {
|
||||
}
|
||||
record.Success = true
|
||||
_, err = sess.Insert(&record)
|
||||
if err == nil {
|
||||
migrationsPerformed++
|
||||
}
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
@@ -114,6 +121,8 @@ func (mg *Migrator) Start() error {
|
||||
}
|
||||
}
|
||||
|
||||
mg.Logger.Info("migrations completed", "performed", migrationsPerformed, "skipped", migrationsSkipped, "duration", time.Since(start))
|
||||
|
||||
// Make sure migrations are synced
|
||||
return mg.x.Sync2()
|
||||
}
|
||||
|
||||
@@ -46,22 +46,14 @@ func startSession(ctx context.Context, engine *xorm.Engine, beginTran bool) (*DB
|
||||
return newSess, nil
|
||||
}
|
||||
|
||||
// WithDbSession calls the callback with an session attached to the context.
|
||||
// WithDbSession calls the callback with a session.
|
||||
func (ss *SQLStore) WithDbSession(ctx context.Context, callback dbTransactionFunc) error {
|
||||
sess, err := startSession(ctx, ss.engine, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer sess.Close()
|
||||
|
||||
return callback(sess)
|
||||
return withDbSession(ctx, ss.engine, callback)
|
||||
}
|
||||
|
||||
func withDbSession(ctx context.Context, callback dbTransactionFunc) error {
|
||||
sess, err := startSession(ctx, x, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func withDbSession(ctx context.Context, engine *xorm.Engine, callback dbTransactionFunc) error {
|
||||
sess := &DBSession{Session: engine.NewSession()}
|
||||
defer sess.Close()
|
||||
|
||||
return callback(sess)
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ func (ss *SQLStore) Init() error {
|
||||
|
||||
// Init repo instances
|
||||
annotations.SetRepository(&SQLAnnotationRepo{})
|
||||
annotations.SetAnnotationCleaner(&AnnotationCleanupService{batchSize: 100, log: log.New("annotationcleaner")})
|
||||
annotations.SetAnnotationCleaner(&AnnotationCleanupService{batchSize: ss.Cfg.AnnotationCleanupJobBatchSize, log: log.New("annotationcleaner")})
|
||||
ss.Bus.SetTransactionManager(ss)
|
||||
|
||||
// Register handlers
|
||||
@@ -142,21 +142,15 @@ func (ss *SQLStore) Reset() error {
|
||||
}
|
||||
|
||||
func (ss *SQLStore) ensureMainOrgAndAdminUser() error {
|
||||
err := ss.InTransaction(context.Background(), func(ctx context.Context) error {
|
||||
ctx := context.Background()
|
||||
err := ss.WithTransactionalDbSession(ctx, func(sess *DBSession) error {
|
||||
ss.log.Debug("Ensuring main org and admin user exist")
|
||||
var stats models.SystemUserCountStats
|
||||
err := ss.WithDbSession(ctx, func(sess *DBSession) error {
|
||||
// TODO: Should be able to rename "Count" to "count", for more standard SQL style
|
||||
// Just have to make sure it gets deserialized properly into models.SystemUserCountStats
|
||||
rawSQL := `SELECT COUNT(id) AS Count FROM ` + dialect.Quote("user")
|
||||
if _, err := sess.SQL(rawSQL).Get(&stats); err != nil {
|
||||
return fmt.Errorf("could not determine if admin user exists: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
// TODO: Should be able to rename "Count" to "count", for more standard SQL style
|
||||
// Just have to make sure it gets deserialized properly into models.SystemUserCountStats
|
||||
rawSQL := `SELECT COUNT(id) AS Count FROM ` + dialect.Quote("user")
|
||||
if _, err := sess.SQL(rawSQL).Get(&stats); err != nil {
|
||||
return fmt.Errorf("could not determine if admin user exists: %w", err)
|
||||
}
|
||||
|
||||
if stats.Count > 0 {
|
||||
@@ -167,7 +161,7 @@ func (ss *SQLStore) ensureMainOrgAndAdminUser() error {
|
||||
if !ss.Cfg.DisableInitAdminCreation {
|
||||
ss.log.Debug("Creating default admin user")
|
||||
ss.log.Debug("Creating default admin user")
|
||||
if _, err := ss.createUser(ctx, userCreationArgs{
|
||||
if _, err := ss.createUser(ctx, sess, userCreationArgs{
|
||||
Login: ss.Cfg.AdminUser,
|
||||
Email: ss.Cfg.AdminUser + "@localhost",
|
||||
Password: ss.Cfg.AdminPassword,
|
||||
@@ -182,11 +176,8 @@ func (ss *SQLStore) ensureMainOrgAndAdminUser() error {
|
||||
// return nil
|
||||
}
|
||||
|
||||
if err := inTransactionWithRetryCtx(ctx, ss.engine, func(sess *DBSession) error {
|
||||
ss.log.Debug("Creating default org", "name", MainOrgName)
|
||||
_, err := ss.getOrCreateOrg(sess, MainOrgName)
|
||||
return err
|
||||
}, 0); err != nil {
|
||||
ss.log.Debug("Creating default org", "name", MainOrgName)
|
||||
if _, err := ss.getOrCreateOrg(sess, MainOrgName); err != nil {
|
||||
return fmt.Errorf("failed to create default organization: %w", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -170,7 +170,7 @@ func GetAdminStats(query *models.GetAdminStatsQuery) error {
|
||||
}
|
||||
|
||||
func GetSystemUserCountStats(ctx context.Context, query *models.GetSystemUserCountStatsQuery) error {
|
||||
return withDbSession(ctx, func(sess *DBSession) error {
|
||||
return withDbSession(ctx, x, func(sess *DBSession) error {
|
||||
var rawSQL = `SELECT COUNT(id) AS Count FROM ` + dialect.Quote("user")
|
||||
var stats models.SystemUserCountStats
|
||||
_, err := sess.SQL(rawSQL).Get(&stats)
|
||||
|
||||
@@ -23,11 +23,9 @@ func TestTransaction(t *testing.T) {
|
||||
err := AddApiKey(cmd)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
deleteApiKeyCmd := &models.DeleteApiKeyCommand{Id: cmd.Result.Id, OrgId: 1}
|
||||
|
||||
Convey("can update key", func() {
|
||||
err := ss.InTransaction(context.Background(), func(ctx context.Context) error {
|
||||
return DeleteApiKeyCtx(ctx, deleteApiKeyCmd)
|
||||
err := ss.WithTransactionalDbSession(context.Background(), func(sess *DBSession) error {
|
||||
return deleteAPIKey(sess, cmd.Result.Id, 1)
|
||||
})
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
@@ -38,8 +36,8 @@ func TestTransaction(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("won't update if one handler fails", func() {
|
||||
err := ss.InTransaction(context.Background(), func(ctx context.Context) error {
|
||||
err := DeleteApiKeyCtx(ctx, deleteApiKeyCmd)
|
||||
err := ss.WithTransactionalDbSession(context.Background(), func(sess *DBSession) error {
|
||||
err := deleteAPIKey(sess, cmd.Result.Id, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -88,105 +88,99 @@ func (ss *SQLStore) getOrgIDForNewUser(sess *DBSession, args userCreationArgs) (
|
||||
}
|
||||
|
||||
// createUser creates a user in the database.
|
||||
func (ss *SQLStore) createUser(ctx context.Context, args userCreationArgs, skipOrgSetup bool) (models.User, error) {
|
||||
func (ss *SQLStore) createUser(ctx context.Context, sess *DBSession, args userCreationArgs, skipOrgSetup bool) (models.User, error) {
|
||||
var user models.User
|
||||
if err := inTransactionWithRetryCtx(ctx, ss.engine, func(sess *DBSession) error {
|
||||
var orgID int64 = -1
|
||||
if !skipOrgSetup {
|
||||
var err error
|
||||
orgID, err = ss.getOrgIDForNewUser(sess, args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if args.Email == "" {
|
||||
args.Email = args.Login
|
||||
}
|
||||
|
||||
exists, err := sess.Where("email=? OR login=?", args.Email, args.Login).Get(&models.User{})
|
||||
var orgID int64 = -1
|
||||
if !skipOrgSetup {
|
||||
var err error
|
||||
orgID, err = ss.getOrgIDForNewUser(sess, args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return models.ErrUserAlreadyExists
|
||||
return user, err
|
||||
}
|
||||
}
|
||||
|
||||
// create user
|
||||
user = models.User{
|
||||
Email: args.Email,
|
||||
Name: args.Name,
|
||||
Login: args.Login,
|
||||
Company: args.Company,
|
||||
IsAdmin: args.IsAdmin,
|
||||
IsDisabled: args.IsDisabled,
|
||||
OrgId: orgID,
|
||||
EmailVerified: args.EmailVerified,
|
||||
Created: time.Now(),
|
||||
Updated: time.Now(),
|
||||
LastSeenAt: time.Now().AddDate(-10, 0, 0),
|
||||
}
|
||||
if args.Email == "" {
|
||||
args.Email = args.Login
|
||||
}
|
||||
|
||||
salt, err := util.GetRandomString(10)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
user.Salt = salt
|
||||
rands, err := util.GetRandomString(10)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
user.Rands = rands
|
||||
|
||||
if len(args.Password) > 0 {
|
||||
encodedPassword, err := util.EncodePassword(args.Password, user.Salt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
user.Password = encodedPassword
|
||||
}
|
||||
|
||||
sess.UseBool("is_admin")
|
||||
|
||||
if _, err := sess.Insert(&user); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sess.publishAfterCommit(&events.UserCreated{
|
||||
Timestamp: user.Created,
|
||||
Id: user.Id,
|
||||
Name: user.Name,
|
||||
Login: user.Login,
|
||||
Email: user.Email,
|
||||
})
|
||||
|
||||
// create org user link
|
||||
if !skipOrgSetup {
|
||||
orgUser := models.OrgUser{
|
||||
OrgId: orgID,
|
||||
UserId: user.Id,
|
||||
Role: models.ROLE_ADMIN,
|
||||
Created: time.Now(),
|
||||
Updated: time.Now(),
|
||||
}
|
||||
|
||||
if ss.Cfg.AutoAssignOrg && !user.IsAdmin {
|
||||
if len(args.DefaultOrgRole) > 0 {
|
||||
orgUser.Role = models.RoleType(args.DefaultOrgRole)
|
||||
} else {
|
||||
orgUser.Role = models.RoleType(ss.Cfg.AutoAssignOrgRole)
|
||||
}
|
||||
}
|
||||
|
||||
if _, err = sess.Insert(&orgUser); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}, 0); err != nil {
|
||||
exists, err := sess.Where("email=? OR login=?", args.Email, args.Login).Get(&models.User{})
|
||||
if err != nil {
|
||||
return user, err
|
||||
}
|
||||
if exists {
|
||||
return user, models.ErrUserAlreadyExists
|
||||
}
|
||||
|
||||
// create user
|
||||
user = models.User{
|
||||
Email: args.Email,
|
||||
Name: args.Name,
|
||||
Login: args.Login,
|
||||
Company: args.Company,
|
||||
IsAdmin: args.IsAdmin,
|
||||
IsDisabled: args.IsDisabled,
|
||||
OrgId: orgID,
|
||||
EmailVerified: args.EmailVerified,
|
||||
Created: time.Now(),
|
||||
Updated: time.Now(),
|
||||
LastSeenAt: time.Now().AddDate(-10, 0, 0),
|
||||
}
|
||||
|
||||
salt, err := util.GetRandomString(10)
|
||||
if err != nil {
|
||||
return user, err
|
||||
}
|
||||
user.Salt = salt
|
||||
rands, err := util.GetRandomString(10)
|
||||
if err != nil {
|
||||
return user, err
|
||||
}
|
||||
user.Rands = rands
|
||||
|
||||
if len(args.Password) > 0 {
|
||||
encodedPassword, err := util.EncodePassword(args.Password, user.Salt)
|
||||
if err != nil {
|
||||
return user, err
|
||||
}
|
||||
user.Password = encodedPassword
|
||||
}
|
||||
|
||||
sess.UseBool("is_admin")
|
||||
|
||||
if _, err := sess.Insert(&user); err != nil {
|
||||
return user, err
|
||||
}
|
||||
|
||||
sess.publishAfterCommit(&events.UserCreated{
|
||||
Timestamp: user.Created,
|
||||
Id: user.Id,
|
||||
Name: user.Name,
|
||||
Login: user.Login,
|
||||
Email: user.Email,
|
||||
})
|
||||
|
||||
// create org user link
|
||||
if !skipOrgSetup {
|
||||
orgUser := models.OrgUser{
|
||||
OrgId: orgID,
|
||||
UserId: user.Id,
|
||||
Role: models.ROLE_ADMIN,
|
||||
Created: time.Now(),
|
||||
Updated: time.Now(),
|
||||
}
|
||||
|
||||
if ss.Cfg.AutoAssignOrg && !user.IsAdmin {
|
||||
if len(args.DefaultOrgRole) > 0 {
|
||||
orgUser.Role = models.RoleType(args.DefaultOrgRole)
|
||||
} else {
|
||||
orgUser.Role = models.RoleType(ss.Cfg.AutoAssignOrgRole)
|
||||
}
|
||||
}
|
||||
|
||||
if _, err = sess.Insert(&orgUser); err != nil {
|
||||
return user, err
|
||||
}
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
@@ -12,12 +12,15 @@ import (
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/common/model"
|
||||
ini "gopkg.in/ini.v1"
|
||||
|
||||
"github.com/grafana/grafana-aws-sdk/pkg/awsds"
|
||||
"github.com/grafana/grafana/pkg/components/gtime"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
@@ -46,6 +49,9 @@ const (
|
||||
authProxySyncTTL = 60
|
||||
)
|
||||
|
||||
// zoneInfo names environment variable for setting the path to look for the timezone database in go
|
||||
const zoneInfo = "ZONEINFO"
|
||||
|
||||
var (
|
||||
// App settings.
|
||||
Env = Dev
|
||||
@@ -199,6 +205,9 @@ type Cfg struct {
|
||||
RouterLogging bool
|
||||
Domain string
|
||||
CDNRootURL *url.URL
|
||||
ReadTimeout time.Duration
|
||||
EnableGzip bool
|
||||
EnforceDomain bool
|
||||
|
||||
// build
|
||||
BuildVersion string
|
||||
@@ -276,6 +285,7 @@ type Cfg struct {
|
||||
// AWS Plugin Auth
|
||||
AWSAllowedAuthProviders []string
|
||||
AWSAssumeRoleEnabled bool
|
||||
AWSListMetricsPageLimit int
|
||||
|
||||
// Auth proxy settings
|
||||
AuthProxyEnabled bool
|
||||
@@ -318,6 +328,7 @@ type Cfg struct {
|
||||
HiddenUsers map[string]struct{}
|
||||
|
||||
// Annotations
|
||||
AnnotationCleanupJobBatchSize int64
|
||||
AlertingAnnotationCleanupSetting AnnotationCleanupSettings
|
||||
DashboardAnnotationCleanupSettings AnnotationCleanupSettings
|
||||
APIAnnotationCleanupSettings AnnotationCleanupSettings
|
||||
@@ -471,6 +482,9 @@ func (cfg *Cfg) readGrafanaEnvironmentMetrics() error {
|
||||
}
|
||||
|
||||
func (cfg *Cfg) readAnnotationSettings() {
|
||||
section := cfg.Raw.Section("annotations")
|
||||
cfg.AnnotationCleanupJobBatchSize = section.Key("cleanupjob_batchsize").MustInt64(100)
|
||||
|
||||
dashboardAnnotation := cfg.Raw.Section("annotations.dashboard")
|
||||
apiIAnnotation := cfg.Raw.Section("annotations.api")
|
||||
alertingSection := cfg.Raw.Section("alerting")
|
||||
@@ -744,6 +758,14 @@ func (cfg *Cfg) validateStaticRootPath() error {
|
||||
func (cfg *Cfg) Load(args *CommandLineArgs) error {
|
||||
setHomePath(args)
|
||||
|
||||
// Fix for missing IANA db on Windows
|
||||
_, zoneInfoSet := os.LookupEnv(zoneInfo)
|
||||
if runtime.GOOS == "windows" && !zoneInfoSet {
|
||||
if err := os.Setenv(zoneInfo, filepath.Join(HomePath, "tools", "zoneinfo.zip")); err != nil {
|
||||
cfg.Logger.Error("Can't set ZONEINFO environment variable", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
iniFile, err := cfg.loadConfiguration(args)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -865,7 +887,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
|
||||
}
|
||||
|
||||
cfg.readLDAPConfig()
|
||||
cfg.readAWSConfig()
|
||||
cfg.handleAWSConfig()
|
||||
cfg.readSessionConfig()
|
||||
cfg.readSmtpSettings()
|
||||
cfg.readQuotaSettings()
|
||||
@@ -928,16 +950,27 @@ func (cfg *Cfg) readLDAPConfig() {
|
||||
cfg.LDAPAllowSignup = LDAPAllowSignup
|
||||
}
|
||||
|
||||
func (cfg *Cfg) readAWSConfig() {
|
||||
func (cfg *Cfg) handleAWSConfig() {
|
||||
awsPluginSec := cfg.Raw.Section("aws")
|
||||
cfg.AWSAssumeRoleEnabled = awsPluginSec.Key("assume_role_enabled").MustBool(true)
|
||||
allowedAuthProviders := awsPluginSec.Key("allowed_auth_providers").String()
|
||||
allowedAuthProviders := awsPluginSec.Key("allowed_auth_providers").MustString("default,keys,credentials")
|
||||
for _, authProvider := range strings.Split(allowedAuthProviders, ",") {
|
||||
authProvider = strings.TrimSpace(authProvider)
|
||||
if authProvider != "" {
|
||||
cfg.AWSAllowedAuthProviders = append(cfg.AWSAllowedAuthProviders, authProvider)
|
||||
}
|
||||
}
|
||||
cfg.AWSListMetricsPageLimit = awsPluginSec.Key("list_metrics_page_limit").MustInt(500)
|
||||
// Also set environment variables that can be used by core plugins
|
||||
err := os.Setenv(awsds.AssumeRoleEnabledEnvVarKeyName, strconv.FormatBool(cfg.AWSAssumeRoleEnabled))
|
||||
if err != nil {
|
||||
cfg.Logger.Error(fmt.Sprintf("could not set environment variable '%s'", awsds.AssumeRoleEnabledEnvVarKeyName), err)
|
||||
}
|
||||
|
||||
err = os.Setenv(awsds.AllowedAuthProvidersEnvVarKeyName, allowedAuthProviders)
|
||||
if err != nil {
|
||||
cfg.Logger.Error(fmt.Sprintf("could not set environment variable '%s'", awsds.AllowedAuthProvidersEnvVarKeyName), err)
|
||||
}
|
||||
}
|
||||
|
||||
func (cfg *Cfg) readSessionConfig() {
|
||||
@@ -1331,6 +1364,8 @@ func (cfg *Cfg) readServerSettings(iniFile *ini.File) error {
|
||||
}
|
||||
}
|
||||
|
||||
cfg.ReadTimeout = server.Key("read_timeout").MustDuration(0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -4,15 +4,12 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-aws-sdk/pkg/awsds"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/client"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/stscreds"
|
||||
"github.com/aws/aws-sdk-go/aws/request"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
@@ -31,19 +28,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
type datasourceInfo struct {
|
||||
Profile string
|
||||
Region string
|
||||
AuthType authType
|
||||
AssumeRoleARN string
|
||||
ExternalID string
|
||||
Namespace string
|
||||
Endpoint string
|
||||
|
||||
AccessKey string
|
||||
SecretKey string
|
||||
}
|
||||
|
||||
const cloudWatchTSFormat = "2006-01-02 15:04:05.000"
|
||||
const defaultRegion = "default"
|
||||
|
||||
@@ -64,21 +48,32 @@ func init() {
|
||||
|
||||
type CloudWatchService struct {
|
||||
LogsService *LogsService `inject:""`
|
||||
Cfg *setting.Cfg `inject:""`
|
||||
sessions SessionCache
|
||||
}
|
||||
|
||||
func (s *CloudWatchService) Init() error {
|
||||
plog.Debug("initing")
|
||||
|
||||
tsdb.RegisterTsdbQueryEndpoint("cloudwatch", func(ds *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
||||
return newExecutor(s.LogsService), nil
|
||||
return newExecutor(s.LogsService, s.Cfg, s.sessions), nil
|
||||
})
|
||||
|
||||
s.sessions = awsds.NewSessionCache()
|
||||
return nil
|
||||
}
|
||||
|
||||
func newExecutor(logsService *LogsService) *cloudWatchExecutor {
|
||||
func (s *CloudWatchService) NewExecutor(*models.DataSource) (*cloudWatchExecutor, error) {
|
||||
return newExecutor(s.LogsService, s.Cfg, s.sessions), nil
|
||||
}
|
||||
|
||||
type SessionCache interface {
|
||||
GetSession(region string, s awsds.AWSDatasourceSettings) (*session.Session, error)
|
||||
}
|
||||
|
||||
func newExecutor(logsService *LogsService, cfg *setting.Cfg, sessions SessionCache) *cloudWatchExecutor {
|
||||
return &cloudWatchExecutor{
|
||||
cfg: cfg,
|
||||
logsService: logsService,
|
||||
sessions: sessions,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,113 +85,14 @@ type cloudWatchExecutor struct {
|
||||
rgtaClient resourcegroupstaggingapiiface.ResourceGroupsTaggingAPIAPI
|
||||
|
||||
logsService *LogsService
|
||||
cfg *setting.Cfg
|
||||
sessions SessionCache
|
||||
}
|
||||
|
||||
func (e *cloudWatchExecutor) newSession(region string) (*session.Session, error) {
|
||||
dsInfo := e.getDSInfo(region)
|
||||
awsDatasourceSettings := e.getAWSDatasourceSettings(region)
|
||||
|
||||
bldr := strings.Builder{}
|
||||
for i, s := range []string{
|
||||
dsInfo.AuthType.String(), dsInfo.AccessKey, dsInfo.Profile, dsInfo.AssumeRoleARN, region, dsInfo.Endpoint,
|
||||
} {
|
||||
if i != 0 {
|
||||
bldr.WriteString(":")
|
||||
}
|
||||
bldr.WriteString(strings.ReplaceAll(s, ":", `\:`))
|
||||
}
|
||||
cacheKey := bldr.String()
|
||||
|
||||
sessCacheLock.RLock()
|
||||
if env, ok := sessCache[cacheKey]; ok {
|
||||
if env.expiration.After(time.Now().UTC()) {
|
||||
sessCacheLock.RUnlock()
|
||||
return env.session, nil
|
||||
}
|
||||
}
|
||||
sessCacheLock.RUnlock()
|
||||
|
||||
cfgs := []*aws.Config{
|
||||
{
|
||||
CredentialsChainVerboseErrors: aws.Bool(true),
|
||||
},
|
||||
}
|
||||
|
||||
var regionCfg *aws.Config
|
||||
if dsInfo.Region == defaultRegion {
|
||||
plog.Warn("Region is set to \"default\", which is unsupported")
|
||||
dsInfo.Region = ""
|
||||
}
|
||||
if dsInfo.Region != "" {
|
||||
regionCfg = &aws.Config{Region: aws.String(dsInfo.Region)}
|
||||
cfgs = append(cfgs, regionCfg)
|
||||
}
|
||||
|
||||
if dsInfo.Endpoint != "" {
|
||||
cfgs = append(cfgs, &aws.Config{Endpoint: aws.String(dsInfo.Endpoint)})
|
||||
}
|
||||
|
||||
switch dsInfo.AuthType {
|
||||
case authTypeSharedCreds:
|
||||
plog.Debug("Authenticating towards AWS with shared credentials", "profile", dsInfo.Profile,
|
||||
"region", dsInfo.Region)
|
||||
cfgs = append(cfgs, &aws.Config{
|
||||
Credentials: credentials.NewSharedCredentials("", dsInfo.Profile),
|
||||
})
|
||||
case authTypeKeys:
|
||||
plog.Debug("Authenticating towards AWS with an access key pair", "region", dsInfo.Region)
|
||||
cfgs = append(cfgs, &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(dsInfo.AccessKey, dsInfo.SecretKey, ""),
|
||||
})
|
||||
case authTypeDefault:
|
||||
plog.Debug("Authenticating towards AWS with default SDK method", "region", dsInfo.Region)
|
||||
default:
|
||||
panic(fmt.Sprintf("Unrecognized authType: %d", dsInfo.AuthType))
|
||||
}
|
||||
sess, err := newSession(cfgs...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
duration := stscreds.DefaultDuration
|
||||
expiration := time.Now().UTC().Add(duration)
|
||||
if dsInfo.AssumeRoleARN != "" {
|
||||
// We should assume a role in AWS
|
||||
plog.Debug("Trying to assume role in AWS", "arn", dsInfo.AssumeRoleARN)
|
||||
|
||||
cfgs := []*aws.Config{
|
||||
{
|
||||
CredentialsChainVerboseErrors: aws.Bool(true),
|
||||
},
|
||||
{
|
||||
Credentials: newSTSCredentials(sess, dsInfo.AssumeRoleARN, func(p *stscreds.AssumeRoleProvider) {
|
||||
// Not sure if this is necessary, overlaps with p.Duration and is undocumented
|
||||
p.Expiry.SetExpiration(expiration, 0)
|
||||
p.Duration = duration
|
||||
if dsInfo.ExternalID != "" {
|
||||
p.ExternalID = aws.String(dsInfo.ExternalID)
|
||||
}
|
||||
}),
|
||||
},
|
||||
}
|
||||
if regionCfg != nil {
|
||||
cfgs = append(cfgs, regionCfg)
|
||||
}
|
||||
sess, err = newSession(cfgs...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
plog.Debug("Successfully created AWS session")
|
||||
|
||||
sessCacheLock.Lock()
|
||||
sessCache[cacheKey] = envelope{
|
||||
session: sess,
|
||||
expiration: expiration,
|
||||
}
|
||||
sessCacheLock.Unlock()
|
||||
|
||||
return sess, nil
|
||||
return e.sessions.GetSession(region, *awsDatasourceSettings)
|
||||
}
|
||||
|
||||
func (e *cloudWatchExecutor) getCWClient(region string) (cloudwatchiface.CloudWatchAPI, error) {
|
||||
@@ -389,28 +285,7 @@ func (e *cloudWatchExecutor) executeLogAlertQuery(ctx context.Context, queryCont
|
||||
return response, nil
|
||||
}
|
||||
|
||||
type authType int
|
||||
|
||||
const (
|
||||
authTypeDefault authType = iota
|
||||
authTypeSharedCreds
|
||||
authTypeKeys
|
||||
)
|
||||
|
||||
func (at authType) String() string {
|
||||
switch at {
|
||||
case authTypeDefault:
|
||||
return "default"
|
||||
case authTypeSharedCreds:
|
||||
return "sharedCreds"
|
||||
case authTypeKeys:
|
||||
return "keys"
|
||||
default:
|
||||
panic(fmt.Sprintf("Unrecognized auth type %d", at))
|
||||
}
|
||||
}
|
||||
|
||||
func (e *cloudWatchExecutor) getDSInfo(region string) *datasourceInfo {
|
||||
func (e *cloudWatchExecutor) getAWSDatasourceSettings(region string) *awsds.AWSDatasourceSettings {
|
||||
if region == defaultRegion {
|
||||
region = e.DataSource.JsonData.Get("defaultRegion").MustString()
|
||||
}
|
||||
@@ -423,17 +298,19 @@ func (e *cloudWatchExecutor) getDSInfo(region string) *datasourceInfo {
|
||||
accessKey := decrypted["accessKey"]
|
||||
secretKey := decrypted["secretKey"]
|
||||
|
||||
at := authTypeDefault
|
||||
at := awsds.AuthTypeDefault
|
||||
switch atStr {
|
||||
case "credentials":
|
||||
at = authTypeSharedCreds
|
||||
at = awsds.AuthTypeSharedCreds
|
||||
case "keys":
|
||||
at = authTypeKeys
|
||||
at = awsds.AuthTypeKeys
|
||||
case "default":
|
||||
at = authTypeDefault
|
||||
at = awsds.AuthTypeDefault
|
||||
case "arn":
|
||||
at = authTypeDefault
|
||||
at = awsds.AuthTypeDefault
|
||||
plog.Warn("Authentication type \"arn\" is deprecated, falling back to default")
|
||||
case "ec2_iam_role":
|
||||
at = awsds.AuthTypeEC2IAMRole
|
||||
default:
|
||||
plog.Warn("Unrecognized AWS authentication type", "type", atStr)
|
||||
}
|
||||
@@ -443,7 +320,7 @@ func (e *cloudWatchExecutor) getDSInfo(region string) *datasourceInfo {
|
||||
profile = e.DataSource.Database // legacy support
|
||||
}
|
||||
|
||||
return &datasourceInfo{
|
||||
return &awsds.AWSDatasourceSettings{
|
||||
Region: region,
|
||||
Profile: profile,
|
||||
AuthType: at,
|
||||
|
||||
@@ -47,7 +47,7 @@ func TestQuery_DescribeLogGroups(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
@@ -100,7 +100,7 @@ func TestQuery_DescribeLogGroups(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
@@ -170,7 +170,7 @@ func TestQuery_GetLogGroupFields(t *testing.T) {
|
||||
|
||||
const refID = "A"
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
@@ -249,7 +249,7 @@ func TestQuery_StartQuery(t *testing.T) {
|
||||
To: "1584700643000",
|
||||
}
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
_, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
TimeRange: timeRange,
|
||||
Queries: []*tsdb.Query{
|
||||
@@ -295,7 +295,7 @@ func TestQuery_StartQuery(t *testing.T) {
|
||||
To: "1584873443000",
|
||||
}
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
TimeRange: timeRange,
|
||||
Queries: []*tsdb.Query{
|
||||
@@ -371,7 +371,7 @@ func TestQuery_StopQuery(t *testing.T) {
|
||||
To: "1584700643000",
|
||||
}
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
TimeRange: timeRange,
|
||||
Queries: []*tsdb.Query{
|
||||
@@ -458,7 +458,7 @@ func TestQuery_GetQueryResults(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
executor := newExecutor(nil)
|
||||
executor := newExecutor(nil, newTestConfig(), fakeSessionCache{})
|
||||
resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user