mirror of
https://github.com/grafana/grafana.git
synced 2025-12-20 11:40:21 +08:00
Compare commits
36 Commits
provisioni
...
v6.7.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca6d08d5cb | ||
|
|
d01bdb517d | ||
|
|
63dfdb7066 | ||
|
|
e95667fffb | ||
|
|
c08b901664 | ||
|
|
7cd6fef466 | ||
|
|
1b4f93b88c | ||
|
|
c4656a885d | ||
|
|
818a2f3d64 | ||
|
|
7f52e023b5 | ||
|
|
962a06545a | ||
|
|
79aeeaa10a | ||
|
|
ea483c0ce1 | ||
|
|
3e88197f96 | ||
|
|
66df54db80 | ||
|
|
e4b4480064 | ||
|
|
4e4f69b5f6 | ||
|
|
a4b7209e39 | ||
|
|
6c001d9c09 | ||
|
|
312600aa2c | ||
|
|
26d701dcf9 | ||
|
|
e347b62cee | ||
|
|
36232857df | ||
|
|
9d605bdd04 | ||
|
|
4d235b978e | ||
|
|
6575c9cb6e | ||
|
|
0ad27a6596 | ||
|
|
a0c6afa0a5 | ||
|
|
3d0bc141c7 | ||
|
|
ed307897e7 | ||
|
|
1d63f57caf | ||
|
|
e00f393a17 | ||
|
|
277e00aaed | ||
|
|
ba6104190e | ||
|
|
eaaca91f25 | ||
|
|
a551cd2470 |
@@ -45,17 +45,13 @@ jobs:
|
||||
description: Install the Grafana Build Pipeline tool
|
||||
executor: grafana-build
|
||||
steps:
|
||||
- run:
|
||||
name: Clone repo
|
||||
command: |
|
||||
mkdir -p ~/.ssh
|
||||
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==' >> ~/.ssh/known_hosts
|
||||
git clone git@github.com:grafana/build-pipeline.git
|
||||
- run:
|
||||
name: Install Grafana Build Pipeline
|
||||
command: |
|
||||
cd build-pipeline
|
||||
go build -o ../bin/grabpl ./cmd/grabpl
|
||||
curl -fLO https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v0.1.0/grabpl
|
||||
chmod +x grabpl
|
||||
mkdir bin
|
||||
mv grabpl bin/
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths:
|
||||
|
||||
@@ -274,6 +274,45 @@ There are also some Grafana variables that can be used in Azure Log Analytics qu
|
||||
|
||||
- `$__interval` - Grafana calculates the minimum time grain that can be used to group by time in queries. More details on how it works [here]({{< relref "../../reference/templating.md#interval-variables" >}}). It returns a time grain like `5m` or `1h` that can be used in the bin function. E.g. `summarize count() by bin(TimeGenerated, $__interval)`
|
||||
|
||||
### Templating with Variables for Azure Log Analytics
|
||||
|
||||
Any Log Analytics query that returns a list of values can be used in the `Query` field in the Variable edit view. There is also one Grafana function for Log Analytics that returns a list of workspaces.
|
||||
|
||||
Refer to the [Variables]({{< relref "../../reference/templating.md" >}}) documentation for an introduction to the templating feature and the different
|
||||
types of template variables.
|
||||
|
||||
| Name | Description |
|
||||
| -------------------------------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||
| _workspaces()_ | Returns a list of workspaces for the default subscription. |
|
||||
| _workspaces(12345678-aaaa-bbbb-cccc-123456789aaa)_ | Returns a list of workspaces for the specified subscription (the parameter can be quoted or unquoted). |
|
||||
|
||||
Example variable queries:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
| Query | Description |
|
||||
| --------------------------------------------------------------------------------------- | --------------------------------------------------------- |
|
||||
| _subscriptions()_ | Returns a list of Azure subscriptions |
|
||||
| _workspaces()_ | Returns a list of workspaces for default subscription |
|
||||
| _workspaces("12345678-aaaa-bbbb-cccc-123456789aaa")_ | Returns a list of workspaces for a specified subscription |
|
||||
| _workspaces("$subscription")_ | With template variable for the subscription parameter |
|
||||
| _workspace("myWorkspace").Heartbeat \| distinct Computer_ | Returns a list of Virtual Machines |
|
||||
| _workspace("$workspace").Heartbeat \| distinct Computer_ | Returns a list of Virtual Machines with template variable |
|
||||
| _workspace("$workspace").Perf \| distinct ObjectName_ | Returns a list of objects from the Perf table |
|
||||
| _workspace("$workspace").Perf \| where ObjectName == "$object" \| distinct CounterName_ | Returns a list of metric names from the Perf table |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
Example of a time series query using variables:
|
||||
|
||||
```
|
||||
Perf
|
||||
| where ObjectName == "$object" and CounterName == "$metric"
|
||||
| where TimeGenerated >= $__timeFrom() and TimeGenerated <= $__timeTo()
|
||||
| where $__contains(Computer, $computer)
|
||||
| summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer
|
||||
| order by TimeGenerated asc
|
||||
```
|
||||
|
||||
### Azure Log Analytics Alerting
|
||||
|
||||
Not implemented yet.
|
||||
|
||||
@@ -45,9 +45,9 @@ Grafana 6.7 comes with a new OAuth integration for Microsoft Azure Active Direct
|
||||
|
||||
Allowing a low dashboard refresh interval can cause severe load on data sources and Grafana. Grafana v6.7 allows you to restrict the dashboard refresh interval so it cannot be set lower than a given interval. This provides a way for administrators to control dashboard refresh behavior on a global level.
|
||||
|
||||
Refer to min_refresh_interval in [Configuration]({{< relref "../administration/configuration/#min-refresh-interval" >}}) for further information and how to enable this.
|
||||
Refer to min_refresh_interval in [Configuration]({{< relref "../installation/configuration#min-refresh-interval" >}}) for more information and how to enable this feature.
|
||||
|
||||
### Stackdriver Project Selector
|
||||
### Stackdriver project selector
|
||||
A Stackdriver data source in Grafana is configured for one service account only. That service account is always associated with a default project in Google Cloud Platform (GCP). Depending on your setup in GCP, the service account might be granted access to more projects than just the default project.
|
||||
|
||||
In Grafana 6.7, the query editor has been enhanced with a project selector that makes it possible to query different projects without changing datasource. Many thanks [Eraac](https://github.com/Eraac), [eliaslaouiti](https://github.com/eliaslaouiti) and [NaurisSadovskis](https://github.com/NaurisSadovskis) for making this happen!
|
||||
|
||||
2
go.mod
2
go.mod
@@ -32,7 +32,7 @@ require (
|
||||
github.com/gorilla/websocket v1.4.1
|
||||
github.com/gosimple/slug v1.4.2
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.26.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.30.0
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
|
||||
github.com/hashicorp/go-plugin v1.0.1
|
||||
github.com/hashicorp/go-version v1.1.0
|
||||
|
||||
10
go.sum
10
go.sum
@@ -133,14 +133,8 @@ github.com/gosimple/slug v1.4.2 h1:jDmprx3q/9Lfk4FkGZtvzDQ9Cj9eAmsjzeQGp24PeiQ=
|
||||
github.com/gosimple/slug v1.4.2/go.mod h1:ER78kgg1Mv0NQGlXiDe57DpCyfbNywXXZ9mIorhxAf0=
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4 h1:SPdxCL9BChFTlyi0Khv64vdCW4TMna8+sxL7+Chx+Ag=
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4/go.mod h1:nc0XxBzjeGcrMltCDw269LoWF9S8ibhgxolCdA1R8To=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.21.0 h1:5en5MdVFgeD9tuHDuJgwHYdIVjPs0PN0a7ZQ2bZNxNk=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.21.0/go.mod h1:G6Ov9M+FDOZXNw8eKXINO6XzqdUvTs7huwyQp5jLTBQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.22.1-0.20200310164332-6b4c0d952d70 h1:VQFBaWHlxwjb4VB5HuXtuucMzXJ7xZGGASzbqA3VtVo=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.22.1-0.20200310164332-6b4c0d952d70/go.mod h1:G6Ov9M+FDOZXNw8eKXINO6XzqdUvTs7huwyQp5jLTBQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.24.0 h1:sgd9rAQMmB0rAIMd4JVMFM0Gc+CTHoDwN5oxkPjVrGw=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.24.0/go.mod h1:G6Ov9M+FDOZXNw8eKXINO6XzqdUvTs7huwyQp5jLTBQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.26.0 h1:zDOZMGgGOrFF5m7+iqcQSQA/AJiG9xplNibL8SbLmn4=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.26.0/go.mod h1:G6Ov9M+FDOZXNw8eKXINO6XzqdUvTs7huwyQp5jLTBQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.30.0 h1:G2mA0Vsh629aTG8FkpnUmPsWtLQocwCFMLMANjT1wgg=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.30.0/go.mod h1:G6Ov9M+FDOZXNw8eKXINO6XzqdUvTs7huwyQp5jLTBQ=
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd h1:rNuUHR+CvK1IS89MMtcF0EpcVMZtjKfPRp4MEmt/aTs=
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
|
||||
github.com/hashicorp/go-plugin v1.0.1 h1:4OtAfUGbnKC6yS48p0CtMX2oFYtzFZVv6rok3cRWgnE=
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": ["packages/*"],
|
||||
"version": "6.7.0-pre"
|
||||
"version": "6.7.1"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"license": "Apache-2.0",
|
||||
"private": true,
|
||||
"name": "grafana",
|
||||
"version": "6.7.0-pre",
|
||||
"version": "6.7.1",
|
||||
"repository": "github:grafana/grafana",
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.8.4",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/data",
|
||||
"version": "6.7.0-pre",
|
||||
"version": "6.7.1",
|
||||
"description": "Grafana Data Library",
|
||||
"keywords": [
|
||||
"typescript"
|
||||
|
||||
@@ -1,9 +1,23 @@
|
||||
const { resolve } = require('path');
|
||||
const wp = require('@cypress/webpack-preprocessor');
|
||||
|
||||
const anyNodeModules = /node_modules/;
|
||||
const packageRoot = resolve(`${__dirname}/../../`);
|
||||
const packageModules = `${packageRoot}/node_modules`;
|
||||
|
||||
const webpackOptions = {
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
include: modulePath => {
|
||||
if (!anyNodeModules.test(modulePath)) {
|
||||
// Is a file within the project
|
||||
return true;
|
||||
} else {
|
||||
// Is a file within this package
|
||||
return modulePath.startsWith(packageRoot) && !modulePath.startsWith(packageModules);
|
||||
}
|
||||
},
|
||||
test: /\.ts$/,
|
||||
use: [
|
||||
{
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"module": "commonjs",
|
||||
"types": ["cypress"]
|
||||
},
|
||||
"extends": "../tsconfig.json",
|
||||
"extends": "@grafana/tsconfig",
|
||||
"include": ["**/*.ts"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/e2e",
|
||||
"version": "6.7.0-pre",
|
||||
"version": "6.7.1",
|
||||
"description": "Grafana End-to-End Test Library",
|
||||
"keywords": [
|
||||
"cli",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/runtime",
|
||||
"version": "6.7.0-pre",
|
||||
"version": "6.7.1",
|
||||
"description": "Grafana Runtime Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -23,8 +23,8 @@
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grafana/data": "6.7.0-pre",
|
||||
"@grafana/ui": "6.7.0-pre",
|
||||
"@grafana/data": "6.7.1",
|
||||
"@grafana/ui": "6.7.1",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "0.1.37"
|
||||
},
|
||||
|
||||
@@ -13,6 +13,18 @@ import { getBackendSrv } from '../services';
|
||||
// Ideally internal (exported for consistency)
|
||||
const ExpressionDatasourceID = '__expr__';
|
||||
|
||||
export enum HealthStatus {
|
||||
Unknown = 'UNKNOWN',
|
||||
OK = 'OK',
|
||||
Error = 'ERROR',
|
||||
}
|
||||
|
||||
export interface HealthCheckResult {
|
||||
status: HealthStatus;
|
||||
message: string;
|
||||
details?: Record<string, any>;
|
||||
}
|
||||
|
||||
export class DataSourceWithBackend<
|
||||
TQuery extends DataQuery = DataQuery,
|
||||
TOptions extends DataSourceJsonData = DataSourceJsonData
|
||||
@@ -22,16 +34,13 @@ export class DataSourceWithBackend<
|
||||
}
|
||||
|
||||
/**
|
||||
* Ideally final -- any other implementation would be wrong!
|
||||
* Ideally final -- any other implementation may not work as expected
|
||||
*/
|
||||
query(request: DataQueryRequest): Observable<DataQueryResponse> {
|
||||
const { targets, intervalMs, maxDataPoints, range } = request;
|
||||
|
||||
let expressionCount = 0;
|
||||
const { targets, intervalMs, maxDataPoints, range, requestId } = request;
|
||||
const orgId = config.bootData.user.orgId;
|
||||
const queries = targets.map(q => {
|
||||
if (q.datasource === ExpressionDatasourceID) {
|
||||
expressionCount++;
|
||||
return {
|
||||
...q,
|
||||
datasourceId: this.id,
|
||||
@@ -53,7 +62,6 @@ export class DataSourceWithBackend<
|
||||
});
|
||||
|
||||
const body: any = {
|
||||
expressionCount,
|
||||
queries,
|
||||
};
|
||||
if (range) {
|
||||
@@ -63,10 +71,16 @@ export class DataSourceWithBackend<
|
||||
}
|
||||
|
||||
const req: Promise<DataQueryResponse> = getBackendSrv()
|
||||
.post('/api/ds/query', body)
|
||||
.datasourceRequest({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
data: body,
|
||||
requestId,
|
||||
})
|
||||
.then((rsp: any) => {
|
||||
return this.toDataQueryResponse(rsp);
|
||||
return this.toDataQueryResponse(rsp?.data);
|
||||
});
|
||||
|
||||
return from(req);
|
||||
}
|
||||
|
||||
@@ -101,8 +115,36 @@ export class DataSourceWithBackend<
|
||||
return getBackendSrv().post(`/api/datasources/${this.id}/resources/${path}`, { ...body });
|
||||
}
|
||||
|
||||
testDatasource() {
|
||||
// TODO, this will call the backend healthcheck endpoint
|
||||
return Promise.resolve({});
|
||||
/**
|
||||
* Run the datasource healthcheck
|
||||
*/
|
||||
async callHealthCheck(): Promise<HealthCheckResult> {
|
||||
return getBackendSrv()
|
||||
.get(`/api/datasources/${this.id}/health`)
|
||||
.then(v => {
|
||||
return v as HealthCheckResult;
|
||||
})
|
||||
.catch(err => {
|
||||
err.isHandled = true; // Avoid extra popup warning
|
||||
return err.data as HealthCheckResult;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the plugin health
|
||||
*/
|
||||
async testDatasource(): Promise<any> {
|
||||
return this.callHealthCheck().then(res => {
|
||||
if (res.status === HealthStatus.OK) {
|
||||
return {
|
||||
status: 'success',
|
||||
message: res.message,
|
||||
};
|
||||
}
|
||||
return {
|
||||
status: 'fail',
|
||||
message: res.message,
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/toolkit",
|
||||
"version": "6.7.0-pre",
|
||||
"version": "6.7.1",
|
||||
"description": "Grafana Toolkit",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -29,10 +29,10 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "7.8.3",
|
||||
"@babel/preset-env": "7.8.3",
|
||||
"@grafana/data": "6.7.0-pre",
|
||||
"@grafana/data": "6.7.1",
|
||||
"@grafana/eslint-config": "^1.0.0-rc1",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@grafana/ui": "6.7.0-pre",
|
||||
"@grafana/ui": "6.7.1",
|
||||
"@types/command-exists": "^1.2.0",
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/expect-puppeteer": "3.3.1",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"author": "Grafana Labs",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@grafana/ui",
|
||||
"version": "6.7.0-pre",
|
||||
"version": "6.7.1",
|
||||
"description": "Grafana Components Library",
|
||||
"keywords": [
|
||||
"grafana",
|
||||
@@ -28,7 +28,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/core": "^10.0.27",
|
||||
"@grafana/data": "6.7.0-pre",
|
||||
"@grafana/data": "6.7.1",
|
||||
"@grafana/slate-react": "0.22.9-grafana",
|
||||
"@grafana/tsconfig": "^1.0.0-rc1",
|
||||
"@torkelo/react-select": "3.0.8",
|
||||
|
||||
@@ -14,6 +14,8 @@ import { RadioButtonGroup } from './RadioButtonGroup/RadioButtonGroup';
|
||||
import { Select } from './Select/Select';
|
||||
import Forms from './index';
|
||||
import mdx from './Form.mdx';
|
||||
import { boolean } from '@storybook/addon-knobs';
|
||||
import { TextArea } from './TextArea/TextArea';
|
||||
|
||||
export default {
|
||||
title: 'Forms/Test forms',
|
||||
@@ -48,6 +50,7 @@ interface FormDTO {
|
||||
switch: boolean;
|
||||
radio: string;
|
||||
select: string;
|
||||
text: string;
|
||||
nested: {
|
||||
path: string;
|
||||
};
|
||||
@@ -86,6 +89,10 @@ const renderForm = (defaultValues?: Partial<FormDTO>) => (
|
||||
<Input name="nested.path" placeholder="Nested path" size="md" ref={register} />
|
||||
</Field>
|
||||
|
||||
<Field label="Textarea" invalid={!!errors.text} error="Text is required">
|
||||
<TextArea name="text" placeholder="Long text" size="md" ref={register({ required: true })} />
|
||||
</Field>
|
||||
|
||||
<Field label="Checkbox" invalid={!!errors.checkbox} error="We need your consent">
|
||||
<Checkbox name="checkbox" label="Do you consent?" ref={register({ required: true })} />
|
||||
</Field>
|
||||
|
||||
@@ -47,15 +47,10 @@ export const getInputStyles = stylesFactory(({ theme, invalid = false }: StyleDe
|
||||
height: 100%;
|
||||
/* Min width specified for prefix/suffix classes used outside React component*/
|
||||
min-width: ${prefixSuffixStaticWidth};
|
||||
// Hack to fix font awesome icons
|
||||
> .fa {
|
||||
position: relative;
|
||||
top: 2px;
|
||||
}
|
||||
`;
|
||||
|
||||
return {
|
||||
// Wraps inputWraper and addons
|
||||
// Wraps inputWrapper and addons
|
||||
wrapper: cx(
|
||||
css`
|
||||
label: input-wrapper;
|
||||
@@ -154,37 +149,36 @@ export const getInputStyles = stylesFactory(({ theme, invalid = false }: StyleDe
|
||||
color: ${colors.formInputDisabledText};
|
||||
`,
|
||||
addon: css`
|
||||
label: input-addon;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
flex-grow: 0;
|
||||
flex-shrink: 0;
|
||||
position: relative;
|
||||
label: input-addon;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
flex-grow: 0;
|
||||
flex-shrink: 0;
|
||||
position: relative;
|
||||
|
||||
&:first-child {
|
||||
&:first-child {
|
||||
border-top-right-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
> :last-child {
|
||||
border-top-right-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
> :last-child {
|
||||
border-top-right-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
&:last-child {
|
||||
border-top-left-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
> :first-child {
|
||||
border-top-left-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
> :first-child {
|
||||
border-top-left-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
}
|
||||
> *:focus {
|
||||
/* we want anything that has focus and is an addon to be above input */
|
||||
z-index: 2;
|
||||
}
|
||||
}
|
||||
`,
|
||||
}
|
||||
> *:focus {
|
||||
/* we want anything that has focus and is an addon to be above input */
|
||||
z-index: 2;
|
||||
}
|
||||
`,
|
||||
prefix: cx(
|
||||
prefixSuffix,
|
||||
css`
|
||||
|
||||
@@ -14,7 +14,6 @@ export const IndicatorsContainer = React.forwardRef<HTMLDivElement, React.PropsW
|
||||
styles.suffix,
|
||||
css`
|
||||
position: relative;
|
||||
top: 1px;
|
||||
`
|
||||
)}
|
||||
ref={ref}
|
||||
|
||||
@@ -15,6 +15,7 @@ export function MultiSelect<T>(props: MultiSelectCommonProps<T>) {
|
||||
interface AsyncSelectProps<T> extends Omit<SelectCommonProps<T>, 'options'>, SelectAsyncProps<T> {
|
||||
// AsyncSelect has options stored internally. We cannot enable plain values as we don't have access to the fetched options
|
||||
value?: SelectableValue<T>;
|
||||
invalid?: boolean;
|
||||
}
|
||||
|
||||
export function AsyncSelect<T>(props: AsyncSelectProps<T>) {
|
||||
|
||||
@@ -32,6 +32,7 @@ const getTextAreaStyle = stylesFactory((theme: GrafanaTheme, invalid = false) =>
|
||||
border-radius: ${theme.border.radius.sm};
|
||||
padding: ${theme.spacing.formSpacingBase / 4}px ${theme.spacing.formSpacingBase}px;
|
||||
width: 100%;
|
||||
border-color: ${invalid ? theme.colors.redBase : theme.colors.formInputBorder};
|
||||
`
|
||||
),
|
||||
};
|
||||
|
||||
@@ -10,6 +10,7 @@ import { Field } from './Field';
|
||||
import { Button, LinkButton } from './Button';
|
||||
import { Switch } from './Switch';
|
||||
import { TextArea } from './TextArea/TextArea';
|
||||
import { Checkbox } from './Checkbox';
|
||||
|
||||
const Forms = {
|
||||
RadioButtonGroup,
|
||||
@@ -26,6 +27,7 @@ const Forms = {
|
||||
InputControl,
|
||||
AsyncSelect,
|
||||
TextArea,
|
||||
Checkbox,
|
||||
};
|
||||
|
||||
export { ButtonVariant } from './Button';
|
||||
|
||||
@@ -258,6 +258,7 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) {
|
||||
pluginRoute.Get("/:pluginId/dashboards/", Wrap(GetPluginDashboards))
|
||||
pluginRoute.Post("/:pluginId/settings", bind(models.UpdatePluginSettingCmd{}), Wrap(UpdatePluginSetting))
|
||||
pluginRoute.Get("/:pluginId/metrics", Wrap(hs.CollectPluginMetrics))
|
||||
}, reqOrgAdmin)
|
||||
|
||||
apiRoute.Get("/frontend/settings/", hs.GetFrontendSettings)
|
||||
@@ -265,6 +266,7 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
apiRoute.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest)
|
||||
apiRoute.Any("/datasources/:id/resources", hs.CallDatasourceResource)
|
||||
apiRoute.Any("/datasources/:id/resources/*", hs.CallDatasourceResource)
|
||||
apiRoute.Any("/datasources/:id/health", hs.CheckDatasourceHealth)
|
||||
|
||||
// Folders
|
||||
apiRoute.Group("/folders", func(folderRoute routing.RouteRegister) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sort"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
@@ -323,3 +324,90 @@ func convertModelToDtos(ds *models.DataSource) dtos.DataSource {
|
||||
|
||||
return dto
|
||||
}
|
||||
|
||||
// CheckDatasourceHealth sends a health check request to the plugin datasource
|
||||
// /api/datasource/:id/health
|
||||
func (hs *HTTPServer) CheckDatasourceHealth(c *models.ReqContext) {
|
||||
datasourceID := c.ParamsInt64("id")
|
||||
|
||||
ds, err := hs.DatasourceCache.GetDatasource(datasourceID, c.SignedInUser, c.SkipCache)
|
||||
if err != nil {
|
||||
if err == models.ErrDataSourceAccessDenied {
|
||||
c.JsonApiErr(403, "Access denied to datasource", err)
|
||||
return
|
||||
}
|
||||
c.JsonApiErr(500, "Unable to load datasource metadata", err)
|
||||
return
|
||||
}
|
||||
|
||||
plugin, ok := hs.PluginManager.GetDatasource(ds.Type)
|
||||
if !ok {
|
||||
c.JsonApiErr(500, "Unable to find datasource plugin", err)
|
||||
return
|
||||
}
|
||||
|
||||
config := &backendplugin.PluginConfig{
|
||||
OrgID: c.OrgId,
|
||||
PluginID: plugin.Id,
|
||||
DataSourceConfig: &backendplugin.DataSourceConfig{
|
||||
ID: ds.Id,
|
||||
Name: ds.Name,
|
||||
URL: ds.Url,
|
||||
Database: ds.Database,
|
||||
User: ds.User,
|
||||
BasicAuthEnabled: ds.BasicAuth,
|
||||
BasicAuthUser: ds.BasicAuthUser,
|
||||
JSONData: ds.JsonData,
|
||||
DecryptedSecureJSONData: ds.DecryptedValues(),
|
||||
Updated: ds.Updated,
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := hs.BackendPluginManager.CheckHealth(c.Req.Context(), config)
|
||||
if err != nil {
|
||||
if err == backendplugin.ErrPluginNotRegistered {
|
||||
c.JsonApiErr(404, "Plugin not found", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Return status unknown instead?
|
||||
if err == backendplugin.ErrDiagnosticsNotSupported {
|
||||
c.JsonApiErr(404, "Health check not implemented", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Return status unknown or error instead?
|
||||
if err == backendplugin.ErrHealthCheckFailed {
|
||||
c.JsonApiErr(500, "Plugin health check failed", err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JsonApiErr(500, "Plugin healthcheck returned an unknown error", err)
|
||||
return
|
||||
}
|
||||
|
||||
var jsonDetails map[string]interface{}
|
||||
payload := map[string]interface{}{
|
||||
"status": resp.Status.String(),
|
||||
"message": resp.Message,
|
||||
"details": jsonDetails,
|
||||
}
|
||||
|
||||
// Unmarshal JSONDetails if it's not empty.
|
||||
if len(resp.JSONDetails) > 0 {
|
||||
err = json.Unmarshal(resp.JSONDetails, &jsonDetails)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Failed to unmarshal detailed response from backend plugin", err)
|
||||
return
|
||||
}
|
||||
|
||||
payload["details"] = jsonDetails
|
||||
}
|
||||
|
||||
if resp.Status != backendplugin.HealthStatusOk {
|
||||
c.JSON(503, payload)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, payload)
|
||||
}
|
||||
|
||||
@@ -73,6 +73,7 @@ type HTTPServer struct {
|
||||
Login *login.LoginService `inject:""`
|
||||
License models.Licensing `inject:""`
|
||||
BackendPluginManager backendplugin.Manager `inject:""`
|
||||
PluginManager *plugins.PluginManager `inject:""`
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) Init() error {
|
||||
|
||||
@@ -19,10 +19,6 @@ import (
|
||||
// QueryMetricsV2 returns query metrics
|
||||
// POST /api/ds/query DataSource query w/ expressions
|
||||
func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDto dtos.MetricRequest) Response {
|
||||
if !setting.IsExpressionsEnabled() {
|
||||
return Error(404, "Expressions feature toggle is not enabled", nil)
|
||||
}
|
||||
|
||||
if len(reqDto.Queries) == 0 {
|
||||
return Error(500, "No queries found in query", nil)
|
||||
}
|
||||
@@ -76,6 +72,10 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDto dtos.MetricReq
|
||||
return Error(500, "Metric request error", err)
|
||||
}
|
||||
} else {
|
||||
if !setting.IsExpressionsEnabled() {
|
||||
return Error(404, "Expressions feature toggle is not enabled", nil)
|
||||
}
|
||||
|
||||
resp, err = plugins.Transform.Transform(c.Req.Context(), request)
|
||||
if err != nil {
|
||||
return Error(500, "Transform request error", err)
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/util/errutil"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
@@ -14,6 +17,41 @@ import (
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
// ErrPluginNotFound is returned when an requested plugin is not installed.
|
||||
var ErrPluginNotFound error = errors.New("plugin not found, no installed plugin with that id")
|
||||
|
||||
func (hs *HTTPServer) getPluginConfig(pluginID string, user *models.SignedInUser) (backendplugin.PluginConfig, error) {
|
||||
pluginConfig := backendplugin.PluginConfig{}
|
||||
plugin, exists := plugins.Plugins[pluginID]
|
||||
if !exists {
|
||||
return pluginConfig, ErrPluginNotFound
|
||||
}
|
||||
|
||||
var jsonData *simplejson.Json
|
||||
var decryptedSecureJSONData map[string]string
|
||||
var updated time.Time
|
||||
|
||||
ps, err := hs.getCachedPluginSettings(pluginID, user)
|
||||
if err != nil {
|
||||
if err != models.ErrPluginSettingNotFound {
|
||||
return pluginConfig, errutil.Wrap("Failed to get plugin settings", err)
|
||||
}
|
||||
jsonData = simplejson.New()
|
||||
decryptedSecureJSONData = make(map[string]string)
|
||||
} else {
|
||||
decryptedSecureJSONData = ps.DecryptedValues()
|
||||
updated = ps.Updated
|
||||
}
|
||||
|
||||
return backendplugin.PluginConfig{
|
||||
OrgID: user.OrgId,
|
||||
PluginID: plugin.Id,
|
||||
JSONData: jsonData,
|
||||
DecryptedSecureJSONData: decryptedSecureJSONData,
|
||||
Updated: updated,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) GetPluginList(c *models.ReqContext) Response {
|
||||
typeFilter := c.Query("type")
|
||||
enabledFilter := c.Query("enabled")
|
||||
@@ -205,11 +243,54 @@ func ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) R
|
||||
return JSON(200, cmd.Result)
|
||||
}
|
||||
|
||||
// CollectPluginMetrics collect metrics from a plugin.
|
||||
//
|
||||
// /api/plugins/:pluginId/metrics
|
||||
func (hs *HTTPServer) CollectPluginMetrics(c *models.ReqContext) Response {
|
||||
pluginID := c.Params("pluginId")
|
||||
plugin, exists := plugins.Plugins[pluginID]
|
||||
if !exists {
|
||||
return Error(404, "Plugin not found, no installed plugin with that id", nil)
|
||||
}
|
||||
|
||||
resp, err := hs.BackendPluginManager.CollectMetrics(c.Req.Context(), plugin.Id)
|
||||
if err != nil {
|
||||
if err == backendplugin.ErrPluginNotRegistered {
|
||||
return Error(404, "Plugin not found", err)
|
||||
}
|
||||
|
||||
if err == backendplugin.ErrDiagnosticsNotSupported {
|
||||
return Error(404, "Health check not implemented", err)
|
||||
}
|
||||
|
||||
return Error(500, "Collect plugin metrics failed", err)
|
||||
}
|
||||
|
||||
headers := make(http.Header)
|
||||
headers.Set("Content-Type", "text/plain")
|
||||
|
||||
return &NormalResponse{
|
||||
header: headers,
|
||||
body: resp.PrometheusMetrics,
|
||||
status: http.StatusOK,
|
||||
}
|
||||
}
|
||||
|
||||
// CheckHealth returns the health of a plugin.
|
||||
// /api/plugins/:pluginId/health
|
||||
func (hs *HTTPServer) CheckHealth(c *models.ReqContext) Response {
|
||||
pluginID := c.Params("pluginId")
|
||||
resp, err := hs.BackendPluginManager.CheckHealth(c.Req.Context(), pluginID)
|
||||
|
||||
config, err := hs.getPluginConfig(pluginID, c.SignedInUser)
|
||||
if err != nil {
|
||||
if err == ErrPluginNotFound {
|
||||
return Error(404, "Plugin not found, no installed plugin with that id", nil)
|
||||
}
|
||||
|
||||
return Error(500, "Failed to get plugin settings", err)
|
||||
}
|
||||
|
||||
resp, err := hs.BackendPluginManager.CheckHealth(c.Req.Context(), &config)
|
||||
if err != nil {
|
||||
if err == backendplugin.ErrPluginNotRegistered {
|
||||
return Error(404, "Plugin not found", err)
|
||||
@@ -224,6 +305,8 @@ func (hs *HTTPServer) CheckHealth(c *models.ReqContext) Response {
|
||||
if err == backendplugin.ErrHealthCheckFailed {
|
||||
return Error(500, "Plugin health check failed", err)
|
||||
}
|
||||
|
||||
return Error(500, "Plugin healthcheck returned an unknown error", err)
|
||||
}
|
||||
|
||||
payload := map[string]interface{}{
|
||||
@@ -239,39 +322,23 @@ func (hs *HTTPServer) CheckHealth(c *models.ReqContext) Response {
|
||||
return JSON(200, payload)
|
||||
}
|
||||
|
||||
// CallResource passes a resource call from a plugin to the backend plugin.
|
||||
//
|
||||
// /api/plugins/:pluginId/resources/*
|
||||
func (hs *HTTPServer) CallResource(c *models.ReqContext) {
|
||||
pluginID := c.Params("pluginId")
|
||||
plugin, exists := plugins.Plugins[pluginID]
|
||||
if !exists {
|
||||
c.JsonApiErr(404, "Plugin not found, no installed plugin with that id", nil)
|
||||
|
||||
config, err := hs.getPluginConfig(pluginID, c.SignedInUser)
|
||||
if err != nil {
|
||||
if err == ErrPluginNotFound {
|
||||
c.JsonApiErr(404, "Plugin not found, no installed plugin with that id", nil)
|
||||
return
|
||||
}
|
||||
|
||||
c.JsonApiErr(500, "Failed to get plugin settings", err)
|
||||
return
|
||||
}
|
||||
|
||||
var jsonData *simplejson.Json
|
||||
var decryptedSecureJSONData map[string]string
|
||||
var updated time.Time
|
||||
|
||||
ps, err := hs.getCachedPluginSettings(pluginID, c.SignedInUser)
|
||||
if err != nil {
|
||||
if err != models.ErrPluginSettingNotFound {
|
||||
c.JsonApiErr(500, "Failed to get plugin settings", err)
|
||||
return
|
||||
}
|
||||
jsonData = simplejson.New()
|
||||
decryptedSecureJSONData = make(map[string]string)
|
||||
} else {
|
||||
decryptedSecureJSONData = ps.DecryptedValues()
|
||||
updated = ps.Updated
|
||||
}
|
||||
|
||||
config := backendplugin.PluginConfig{
|
||||
OrgID: c.OrgId,
|
||||
PluginID: plugin.Id,
|
||||
JSONData: jsonData,
|
||||
DecryptedSecureJSONData: decryptedSecureJSONData,
|
||||
Updated: updated,
|
||||
}
|
||||
hs.BackendPluginManager.CallResource(config, c, c.Params("*"))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,26 +1,20 @@
|
||||
package backendplugin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/common/expfmt"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/status"
|
||||
|
||||
datasourceV1 "github.com/grafana/grafana-plugin-model/go/datasource"
|
||||
rendererV1 "github.com/grafana/grafana-plugin-model/go/renderer"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/collector"
|
||||
"github.com/grafana/grafana/pkg/util/errutil"
|
||||
plugin "github.com/hashicorp/go-plugin"
|
||||
dto "github.com/prometheus/client_model/go"
|
||||
)
|
||||
|
||||
// BackendPlugin a registered backend plugin.
|
||||
@@ -140,59 +134,70 @@ func (p *BackendPlugin) supportsDiagnostics() bool {
|
||||
}
|
||||
|
||||
// CollectMetrics implements the collector.Collector interface.
|
||||
func (p *BackendPlugin) CollectMetrics(ctx context.Context, ch chan<- prometheus.Metric) error {
|
||||
if p.diagnostics == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if p.client == nil || p.client.Exited() {
|
||||
return nil
|
||||
func (p *BackendPlugin) CollectMetrics(ctx context.Context) (*pluginv2.CollectMetricsResponse, error) {
|
||||
if p.diagnostics == nil || p.client == nil || p.client.Exited() {
|
||||
return &pluginv2.CollectMetricsResponse{
|
||||
Metrics: &pluginv2.CollectMetricsResponse_Payload{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
res, err := p.diagnostics.CollectMetrics(ctx, &pluginv2.CollectMetricsRequest{})
|
||||
if err != nil {
|
||||
if st, ok := status.FromError(err); ok {
|
||||
if st.Code() == codes.Unimplemented {
|
||||
return nil
|
||||
return &pluginv2.CollectMetricsResponse{
|
||||
Metrics: &pluginv2.CollectMetricsResponse_Payload{},
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res == nil || res.Metrics == nil || res.Metrics.Prometheus == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
reader := bytes.NewReader(res.Metrics.Prometheus)
|
||||
var parser expfmt.TextParser
|
||||
families, err := parser.TextToMetricFamilies(reader)
|
||||
if err != nil {
|
||||
return errutil.Wrap("failed to parse collected metrics", err)
|
||||
}
|
||||
|
||||
for _, mf := range families {
|
||||
if mf.Help == nil {
|
||||
help := fmt.Sprintf("Metric read from %s plugin", p.id)
|
||||
mf.Help = &help
|
||||
}
|
||||
}
|
||||
|
||||
for _, mf := range families {
|
||||
convertMetricFamily(p.id, mf, ch, p.logger)
|
||||
}
|
||||
|
||||
return nil
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (p *BackendPlugin) checkHealth(ctx context.Context) (*pluginv2.CheckHealthResponse, error) {
|
||||
func (p *BackendPlugin) checkHealth(ctx context.Context, config *PluginConfig) (*pluginv2.CheckHealthResponse, error) {
|
||||
if p.diagnostics == nil || p.client == nil || p.client.Exited() {
|
||||
return &pluginv2.CheckHealthResponse{
|
||||
Status: pluginv2.CheckHealthResponse_UNKNOWN,
|
||||
}, nil
|
||||
}
|
||||
|
||||
res, err := p.diagnostics.CheckHealth(ctx, &pluginv2.CheckHealthRequest{})
|
||||
jsonDataBytes, err := config.JSONData.ToDB()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pconfig := &pluginv2.PluginConfig{
|
||||
OrgId: config.OrgID,
|
||||
PluginId: config.PluginID,
|
||||
JsonData: jsonDataBytes,
|
||||
DecryptedSecureJsonData: config.DecryptedSecureJSONData,
|
||||
LastUpdatedMS: config.Updated.UnixNano() / int64(time.Millisecond),
|
||||
}
|
||||
|
||||
if config.DataSourceConfig != nil {
|
||||
datasourceJSONData, err := config.DataSourceConfig.JSONData.ToDB()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pconfig.DatasourceConfig = &pluginv2.DataSourceConfig{
|
||||
Id: config.DataSourceConfig.ID,
|
||||
Name: config.DataSourceConfig.Name,
|
||||
Url: config.DataSourceConfig.URL,
|
||||
User: config.DataSourceConfig.User,
|
||||
Database: config.DataSourceConfig.Database,
|
||||
BasicAuthEnabled: config.DataSourceConfig.BasicAuthEnabled,
|
||||
BasicAuthUser: config.DataSourceConfig.BasicAuthUser,
|
||||
JsonData: datasourceJSONData,
|
||||
DecryptedSecureJsonData: config.DataSourceConfig.DecryptedSecureJSONData,
|
||||
LastUpdatedMS: config.DataSourceConfig.Updated.Unix() / int64(time.Millisecond),
|
||||
}
|
||||
}
|
||||
|
||||
res, err := p.diagnostics.CheckHealth(ctx, &pluginv2.CheckHealthRequest{Config: pconfig})
|
||||
if err != nil {
|
||||
if st, ok := status.FromError(err); ok {
|
||||
if st.Code() == codes.Unimplemented {
|
||||
@@ -288,112 +293,3 @@ func (p *BackendPlugin) callResource(ctx context.Context, req CallResourceReques
|
||||
stream: protoStream,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// convertMetricFamily converts metric family to prometheus.Metric.
|
||||
// Copied from https://github.com/prometheus/node_exporter/blob/3ddc82c2d8d11eec53ed5faa8db969a1bb81f8bb/collector/textfile.go#L66-L165
|
||||
func convertMetricFamily(pluginID string, metricFamily *dto.MetricFamily, ch chan<- prometheus.Metric, logger log.Logger) {
|
||||
var valType prometheus.ValueType
|
||||
var val float64
|
||||
|
||||
allLabelNames := map[string]struct{}{}
|
||||
for _, metric := range metricFamily.Metric {
|
||||
labels := metric.GetLabel()
|
||||
for _, label := range labels {
|
||||
if _, ok := allLabelNames[label.GetName()]; !ok {
|
||||
allLabelNames[label.GetName()] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, metric := range metricFamily.Metric {
|
||||
if metric.TimestampMs != nil {
|
||||
logger.Warn("Ignoring unsupported custom timestamp on metric", "metric", metric)
|
||||
}
|
||||
|
||||
labels := metric.GetLabel()
|
||||
var names []string
|
||||
var values []string
|
||||
for _, label := range labels {
|
||||
names = append(names, label.GetName())
|
||||
values = append(values, label.GetValue())
|
||||
}
|
||||
names = append(names, "plugin_id")
|
||||
values = append(values, pluginID)
|
||||
|
||||
for k := range allLabelNames {
|
||||
present := false
|
||||
for _, name := range names {
|
||||
if k == name {
|
||||
present = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !present {
|
||||
names = append(names, k)
|
||||
values = append(values, "")
|
||||
}
|
||||
}
|
||||
|
||||
metricName := prometheus.BuildFQName(collector.Namespace, "", *metricFamily.Name)
|
||||
|
||||
metricType := metricFamily.GetType()
|
||||
switch metricType {
|
||||
case dto.MetricType_COUNTER:
|
||||
valType = prometheus.CounterValue
|
||||
val = metric.Counter.GetValue()
|
||||
|
||||
case dto.MetricType_GAUGE:
|
||||
valType = prometheus.GaugeValue
|
||||
val = metric.Gauge.GetValue()
|
||||
|
||||
case dto.MetricType_UNTYPED:
|
||||
valType = prometheus.UntypedValue
|
||||
val = metric.Untyped.GetValue()
|
||||
|
||||
case dto.MetricType_SUMMARY:
|
||||
quantiles := map[float64]float64{}
|
||||
for _, q := range metric.Summary.Quantile {
|
||||
quantiles[q.GetQuantile()] = q.GetValue()
|
||||
}
|
||||
ch <- prometheus.MustNewConstSummary(
|
||||
prometheus.NewDesc(
|
||||
metricName,
|
||||
metricFamily.GetHelp(),
|
||||
names, nil,
|
||||
),
|
||||
metric.Summary.GetSampleCount(),
|
||||
metric.Summary.GetSampleSum(),
|
||||
quantiles, values...,
|
||||
)
|
||||
case dto.MetricType_HISTOGRAM:
|
||||
buckets := map[float64]uint64{}
|
||||
for _, b := range metric.Histogram.Bucket {
|
||||
buckets[b.GetUpperBound()] = b.GetCumulativeCount()
|
||||
}
|
||||
ch <- prometheus.MustNewConstHistogram(
|
||||
prometheus.NewDesc(
|
||||
metricName,
|
||||
metricFamily.GetHelp(),
|
||||
names, nil,
|
||||
),
|
||||
metric.Histogram.GetSampleCount(),
|
||||
metric.Histogram.GetSampleSum(),
|
||||
buckets, values...,
|
||||
)
|
||||
default:
|
||||
logger.Error("unknown metric type", "type", metricType)
|
||||
continue
|
||||
}
|
||||
|
||||
if metricType == dto.MetricType_GAUGE || metricType == dto.MetricType_COUNTER || metricType == dto.MetricType_UNTYPED {
|
||||
ch <- prometheus.MustNewConstMetric(
|
||||
prometheus.NewDesc(
|
||||
metricName,
|
||||
metricFamily.GetHelp(),
|
||||
names, nil,
|
||||
),
|
||||
valType, val, values...,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,7 +101,7 @@ func NewRendererPluginDescriptor(pluginID, executablePath string, startFns Plugi
|
||||
}
|
||||
|
||||
type DiagnosticsPlugin interface {
|
||||
plugin.DiagnosticsServer
|
||||
plugin.DiagnosticsClient
|
||||
}
|
||||
|
||||
type ResourcePlugin interface {
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
package collector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
)
|
||||
|
||||
// Namespace collector metric namespace
|
||||
const Namespace = "grafana_plugin"
|
||||
|
||||
var (
|
||||
scrapeDurationDesc = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(Namespace, "scrape", "duration_seconds"),
|
||||
"grafana_plugin: Duration of a plugin collector scrape.",
|
||||
[]string{"plugin_id"},
|
||||
nil,
|
||||
)
|
||||
scrapeSuccessDesc = prometheus.NewDesc(
|
||||
prometheus.BuildFQName(Namespace, "scrape", "success"),
|
||||
"grafana_plugin: Whether a plugin collector succeeded.",
|
||||
[]string{"plugin_id"},
|
||||
nil,
|
||||
)
|
||||
)
|
||||
|
||||
// Collector is the interface a plugin collector has to implement.
|
||||
type Collector interface {
|
||||
// Get new metrics and expose them via prometheus registry.
|
||||
CollectMetrics(ctx context.Context, ch chan<- prometheus.Metric) error
|
||||
}
|
||||
|
||||
// PluginCollector implements the prometheus.Collector interface.
|
||||
type PluginCollector struct {
|
||||
collectors map[string]Collector
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
// NewPluginCollector creates a new PluginCollector..
|
||||
func NewPluginCollector() PluginCollector {
|
||||
return PluginCollector{
|
||||
collectors: make(map[string]Collector),
|
||||
logger: log.New("plugins.backend.collector"),
|
||||
}
|
||||
}
|
||||
|
||||
func (pc PluginCollector) Register(pluginID string, c Collector) {
|
||||
pc.collectors[pluginID] = c
|
||||
}
|
||||
|
||||
// Describe implements the prometheus.Collector interface.
|
||||
func (pc PluginCollector) Describe(ch chan<- *prometheus.Desc) {
|
||||
ch <- scrapeDurationDesc
|
||||
ch <- scrapeSuccessDesc
|
||||
}
|
||||
|
||||
// Collect implements the prometheus.Collector interface.
|
||||
func (pc PluginCollector) Collect(ch chan<- prometheus.Metric) {
|
||||
ctx := context.Background()
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(len(pc.collectors))
|
||||
for name, c := range pc.collectors {
|
||||
go func(name string, c Collector) {
|
||||
execute(ctx, name, c, ch, pc.logger)
|
||||
wg.Done()
|
||||
}(name, c)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func execute(ctx context.Context, pluginID string, c Collector, ch chan<- prometheus.Metric, logger log.Logger) {
|
||||
begin := time.Now()
|
||||
err := c.CollectMetrics(ctx, ch)
|
||||
duration := time.Since(begin)
|
||||
var success float64
|
||||
|
||||
if err != nil {
|
||||
logger.Error("collector failed", "pluginId", pluginID, "took", duration, "error", err)
|
||||
success = 0
|
||||
} else {
|
||||
logger.Debug("collector succeeded", "pluginId", pluginID, "took", duration)
|
||||
success = 1
|
||||
}
|
||||
ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, duration.Seconds(), pluginID)
|
||||
ch <- prometheus.MustNewConstMetric(scrapeSuccessDesc, prometheus.GaugeValue, success, pluginID)
|
||||
}
|
||||
@@ -40,7 +40,7 @@ func (hs HealthStatus) String() string {
|
||||
type CheckHealthResult struct {
|
||||
Status HealthStatus
|
||||
Message string
|
||||
JSONDetails string
|
||||
JSONDetails []byte
|
||||
}
|
||||
|
||||
func checkHealthResultFromProto(protoResp *pluginv2.CheckHealthResponse) *CheckHealthResult {
|
||||
@@ -59,6 +59,23 @@ func checkHealthResultFromProto(protoResp *pluginv2.CheckHealthResponse) *CheckH
|
||||
}
|
||||
}
|
||||
|
||||
func collectMetricsResultFromProto(protoResp *pluginv2.CollectMetricsResponse) *CollectMetricsResult {
|
||||
var prometheusMetrics []byte
|
||||
|
||||
if protoResp.Metrics != nil {
|
||||
prometheusMetrics = protoResp.Metrics.Prometheus
|
||||
}
|
||||
|
||||
return &CollectMetricsResult{
|
||||
PrometheusMetrics: prometheusMetrics,
|
||||
}
|
||||
}
|
||||
|
||||
// CollectMetricsResult collect metrics result.
|
||||
type CollectMetricsResult struct {
|
||||
PrometheusMetrics []byte
|
||||
}
|
||||
|
||||
type DataSourceConfig struct {
|
||||
ID int64
|
||||
Name string
|
||||
|
||||
@@ -10,10 +10,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/util/proxyutil"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/collector"
|
||||
"github.com/grafana/grafana/pkg/registry"
|
||||
plugin "github.com/hashicorp/go-plugin"
|
||||
"golang.org/x/xerrors"
|
||||
@@ -42,24 +39,23 @@ type Manager interface {
|
||||
Register(descriptor PluginDescriptor) error
|
||||
// StartPlugin starts a non-managed backend plugin
|
||||
StartPlugin(ctx context.Context, pluginID string) error
|
||||
// CollectMetrics collects metrics from a registered backend plugin.
|
||||
CollectMetrics(ctx context.Context, pluginID string) (*CollectMetricsResult, error)
|
||||
// CheckHealth checks the health of a registered backend plugin.
|
||||
CheckHealth(ctx context.Context, pluginID string) (*CheckHealthResult, error)
|
||||
CheckHealth(ctx context.Context, pluginConfig *PluginConfig) (*CheckHealthResult, error)
|
||||
// CallResource calls a plugin resource.
|
||||
CallResource(pluginConfig PluginConfig, ctx *models.ReqContext, path string)
|
||||
}
|
||||
|
||||
type manager struct {
|
||||
pluginsMu sync.RWMutex
|
||||
plugins map[string]*BackendPlugin
|
||||
pluginCollector collector.PluginCollector
|
||||
logger log.Logger
|
||||
pluginsMu sync.RWMutex
|
||||
plugins map[string]*BackendPlugin
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func (m *manager) Init() error {
|
||||
m.plugins = make(map[string]*BackendPlugin)
|
||||
m.logger = log.New("plugins.backend")
|
||||
m.pluginCollector = collector.NewPluginCollector()
|
||||
prometheus.MustRegister(m.pluginCollector)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -111,11 +107,6 @@ func (m *manager) start(ctx context.Context) {
|
||||
p.logger.Error("Failed to start plugin", "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if p.supportsDiagnostics() {
|
||||
p.logger.Debug("Registering metrics collector")
|
||||
m.pluginCollector.Register(p.id, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,8 +141,8 @@ func (m *manager) stop() {
|
||||
}
|
||||
}
|
||||
|
||||
// CheckHealth checks the health of a registered backend plugin.
|
||||
func (m *manager) CheckHealth(ctx context.Context, pluginID string) (*CheckHealthResult, error) {
|
||||
// CollectMetrics collects metrics from a registered backend plugin.
|
||||
func (m *manager) CollectMetrics(ctx context.Context, pluginID string) (*CollectMetricsResult, error) {
|
||||
m.pluginsMu.RLock()
|
||||
p, registered := m.plugins[pluginID]
|
||||
m.pluginsMu.RUnlock()
|
||||
@@ -164,7 +155,29 @@ func (m *manager) CheckHealth(ctx context.Context, pluginID string) (*CheckHealt
|
||||
return nil, ErrDiagnosticsNotSupported
|
||||
}
|
||||
|
||||
res, err := p.checkHealth(ctx)
|
||||
res, err := p.CollectMetrics(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return collectMetricsResultFromProto(res), nil
|
||||
}
|
||||
|
||||
// CheckHealth checks the health of a registered backend plugin.
|
||||
func (m *manager) CheckHealth(ctx context.Context, pluginConfig *PluginConfig) (*CheckHealthResult, error) {
|
||||
m.pluginsMu.RLock()
|
||||
p, registered := m.plugins[pluginConfig.PluginID]
|
||||
m.pluginsMu.RUnlock()
|
||||
|
||||
if !registered {
|
||||
return nil, ErrPluginNotRegistered
|
||||
}
|
||||
|
||||
if !p.supportsDiagnostics() {
|
||||
return nil, ErrDiagnosticsNotSupported
|
||||
}
|
||||
|
||||
res, err := p.checkHealth(ctx, pluginConfig)
|
||||
if err != nil {
|
||||
p.logger.Error("Failed to check plugin health", "error", err)
|
||||
return nil, ErrHealthCheckFailed
|
||||
|
||||
@@ -188,6 +188,15 @@ func (pm *PluginManager) scan(pluginDir string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetDatasource returns a datasource based on passed pluginID if it exists
|
||||
//
|
||||
// This function fetches the datasource from the global variable DataSources in this package.
|
||||
// Rather then refactor all dependencies on the global variable we can use this as an transition.
|
||||
func (pm *PluginManager) GetDatasource(pluginID string) (*DataSourcePlugin, bool) {
|
||||
ds, exist := DataSources[pluginID]
|
||||
return ds, exist
|
||||
}
|
||||
|
||||
func (scanner *PluginScanner) walker(currentPath string, f os.FileInfo, err error) error {
|
||||
// We scan all the subfolders for plugin.json (with some exceptions) so that we also load embedded plugins, for
|
||||
// example https://github.com/raintank/worldping-app/tree/master/dist/grafana-worldmap-panel worldmap panel plugin
|
||||
|
||||
@@ -145,6 +145,10 @@ func addAnnotationMig(mg *Migrator) {
|
||||
mg.AddMigration("Remove index org_id_epoch_epoch_end from annotation table", NewDropIndexMigration(table, &Index{
|
||||
Cols: []string{"org_id", "epoch", "epoch_end"}, Type: IndexType,
|
||||
}))
|
||||
|
||||
mg.AddMigration("Add index for alert_id on annotation table", NewAddIndexMigration(table, &Index{
|
||||
Cols: []string{"alert_id"}, Type: IndexType,
|
||||
}))
|
||||
}
|
||||
|
||||
type AddMakeRegionSingleRowMigration struct {
|
||||
|
||||
@@ -12,6 +12,8 @@ export interface Props {
|
||||
currentDashboard?: SelectableValue<number>;
|
||||
size?: FormInputSize;
|
||||
isClearable?: boolean;
|
||||
invalid?: boolean;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
const getDashboards = (query = '') => {
|
||||
@@ -24,7 +26,14 @@ const getDashboards = (query = '') => {
|
||||
});
|
||||
};
|
||||
|
||||
export const DashboardPicker: FC<Props> = ({ onSelected, currentDashboard, size = 'md', isClearable = false }) => {
|
||||
export const DashboardPicker: FC<Props> = ({
|
||||
onSelected,
|
||||
currentDashboard,
|
||||
size = 'md',
|
||||
isClearable = false,
|
||||
invalid,
|
||||
disabled,
|
||||
}) => {
|
||||
const debouncedSearch = debounce(getDashboards, 300, {
|
||||
leading: true,
|
||||
trailing: true,
|
||||
@@ -43,6 +52,8 @@ export const DashboardPicker: FC<Props> = ({ onSelected, currentDashboard, size
|
||||
placeholder="Select dashboard"
|
||||
noOptionsMessage="No dashboards found"
|
||||
value={currentDashboard}
|
||||
invalid={invalid}
|
||||
disabled={disabled}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import _ from 'lodash';
|
||||
import coreModule from '../../core_module';
|
||||
import { ISCEService } from 'angular';
|
||||
import { promiseToDigest } from 'app/core/utils/promiseToDigest';
|
||||
|
||||
function typeaheadMatcher(this: any, item: string) {
|
||||
let str = this.query;
|
||||
@@ -101,8 +102,7 @@ export class FormDropdownCtrl {
|
||||
}
|
||||
|
||||
getOptionsInternal(query: string) {
|
||||
const result = this.getOptions({ $query: query });
|
||||
return Promise.resolve(result);
|
||||
return promiseToDigest(this.$scope)(Promise.resolve(this.getOptions({ $query: query })));
|
||||
}
|
||||
|
||||
isPromiseLike(obj: any) {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import omitBy from 'lodash/omitBy';
|
||||
import { from, merge, MonoTypeOperatorFunction, Observable, of, Subject, throwError } from 'rxjs';
|
||||
import { catchError, filter, map, mergeMap, retryWhen, share, takeUntil, tap, throwIfEmpty } from 'rxjs/operators';
|
||||
import { fromFetch } from 'rxjs/fetch';
|
||||
@@ -14,6 +13,7 @@ import { ContextSrv, contextSrv } from './context_srv';
|
||||
import { coreModule } from 'app/core/core_module';
|
||||
import { Emitter } from '../utils/emitter';
|
||||
import { DataSourceResponse } from '../../types/events';
|
||||
import { parseInitFromOptions, parseUrlFromOptions } from '../utils/fetch';
|
||||
|
||||
export interface DatasourceRequestOptions {
|
||||
retry?: number;
|
||||
@@ -54,18 +54,6 @@ enum CancellationType {
|
||||
dataSourceRequest,
|
||||
}
|
||||
|
||||
function serializeParams(data: Record<string, any>): string {
|
||||
return Object.keys(data)
|
||||
.map(key => {
|
||||
const value = data[key];
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(arrayValue => `${encodeURIComponent(key)}=${encodeURIComponent(arrayValue)}`).join('&');
|
||||
}
|
||||
return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;
|
||||
})
|
||||
.join('&');
|
||||
}
|
||||
|
||||
export interface BackendSrvDependencies {
|
||||
fromFetch: (input: string | Request, init?: RequestInit) => Observable<Response>;
|
||||
appEvents: Emitter;
|
||||
@@ -580,36 +568,3 @@ coreModule.factory('backendSrv', () => backendSrv);
|
||||
// Used for testing and things that really need BackendSrv
|
||||
export const backendSrv = new BackendSrv();
|
||||
export const getBackendSrv = (): BackendSrv => backendSrv;
|
||||
|
||||
export const parseUrlFromOptions = (options: BackendSrvRequest): string => {
|
||||
const cleanParams = omitBy(options.params, v => v === undefined || (v && v.length === 0));
|
||||
const serializedParams = serializeParams(cleanParams);
|
||||
return options.params && serializedParams.length ? `${options.url}?${serializedParams}` : options.url;
|
||||
};
|
||||
|
||||
export const parseInitFromOptions = (options: BackendSrvRequest): RequestInit => {
|
||||
const method = options.method;
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
...options.headers,
|
||||
};
|
||||
const body = parseBody({ ...options, headers });
|
||||
return {
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
};
|
||||
};
|
||||
|
||||
const parseBody = (options: BackendSrvRequest) => {
|
||||
if (!options.data || typeof options.data === 'string') {
|
||||
return options.data;
|
||||
}
|
||||
|
||||
if (options.headers['Content-Type'] === 'application/json') {
|
||||
return JSON.stringify(options.data);
|
||||
}
|
||||
|
||||
return new URLSearchParams(options.data);
|
||||
};
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { BackendSrv, getBackendSrv, parseInitFromOptions, parseUrlFromOptions } from '../services/backend_srv';
|
||||
import 'whatwg-fetch'; // fetch polyfill needed for PhantomJs rendering
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { delay } from 'rxjs/operators';
|
||||
import { AppEvents } from '@grafana/data';
|
||||
|
||||
import { BackendSrv, getBackendSrv } from '../services/backend_srv';
|
||||
import { Emitter } from '../utils/emitter';
|
||||
import { ContextSrv, User } from '../services/context_srv';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { AppEvents } from '@grafana/data';
|
||||
import { CoreEvents } from '../../types';
|
||||
import { delay } from 'rxjs/operators';
|
||||
|
||||
const getTestContext = (overides?: object) => {
|
||||
const defaults = {
|
||||
@@ -17,7 +19,6 @@ const getTestContext = (overides?: object) => {
|
||||
redirected: false,
|
||||
type: 'basic',
|
||||
url: 'http://localhost:3000/api/some-mock',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
};
|
||||
const props = { ...defaults, ...overides };
|
||||
const textMock = jest.fn().mockResolvedValue(JSON.stringify(props.data));
|
||||
@@ -30,7 +31,6 @@ const getTestContext = (overides?: object) => {
|
||||
redirected: false,
|
||||
type: 'basic',
|
||||
url: 'http://localhost:3000/api/some-mock',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
};
|
||||
return of(mockedResponse);
|
||||
});
|
||||
@@ -174,7 +174,9 @@ describe('backendSrv', () => {
|
||||
statusText: 'Ok',
|
||||
text: () => Promise.resolve(JSON.stringify(slowData)),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
map: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
},
|
||||
redirected: false,
|
||||
type: 'basic',
|
||||
@@ -189,7 +191,9 @@ describe('backendSrv', () => {
|
||||
statusText: 'Ok',
|
||||
text: () => Promise.resolve(JSON.stringify(fastData)),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
map: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
},
|
||||
redirected: false,
|
||||
type: 'basic',
|
||||
@@ -344,9 +348,6 @@ describe('backendSrv', () => {
|
||||
const result = await backendSrv.datasourceRequest({ url, method: 'GET', silent: true });
|
||||
expect(result).toEqual({
|
||||
data: { test: 'hello world' },
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
ok: true,
|
||||
redirected: false,
|
||||
status: 200,
|
||||
@@ -358,8 +359,9 @@ describe('backendSrv', () => {
|
||||
method: 'GET',
|
||||
body: undefined,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
map: {
|
||||
accept: 'application/json, text/plain, */*',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -375,9 +377,6 @@ describe('backendSrv', () => {
|
||||
const result = await backendSrv.datasourceRequest({ url, method: 'GET' });
|
||||
const expectedResult = {
|
||||
data: { test: 'hello world' },
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
ok: true,
|
||||
redirected: false,
|
||||
status: 200,
|
||||
@@ -389,8 +388,9 @@ describe('backendSrv', () => {
|
||||
method: 'GET',
|
||||
body: undefined as any,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
map: {
|
||||
accept: 'application/json, text/plain, */*',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -414,9 +414,6 @@ describe('backendSrv', () => {
|
||||
status: 200,
|
||||
statusText: 'Ok',
|
||||
text: () => Promise.resolve(JSON.stringify(slowData)),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
redirected: false,
|
||||
type: 'basic',
|
||||
url,
|
||||
@@ -429,9 +426,6 @@ describe('backendSrv', () => {
|
||||
status: 200,
|
||||
statusText: 'Ok',
|
||||
text: () => Promise.resolve(JSON.stringify(fastData)),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
redirected: false,
|
||||
type: 'basic',
|
||||
url,
|
||||
@@ -447,9 +441,6 @@ describe('backendSrv', () => {
|
||||
const fastResponse = await backendSrv.datasourceRequest(options);
|
||||
expect(fastResponse).toEqual({
|
||||
data: { message: 'Fast Request' },
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
ok: true,
|
||||
redirected: false,
|
||||
status: 200,
|
||||
@@ -461,8 +452,9 @@ describe('backendSrv', () => {
|
||||
method: 'GET',
|
||||
body: undefined,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
map: {
|
||||
accept: 'application/json, text/plain, */*',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -477,8 +469,9 @@ describe('backendSrv', () => {
|
||||
method: 'GET',
|
||||
body: undefined,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
map: {
|
||||
accept: 'application/json, text/plain, */*',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -613,43 +606,3 @@ describe('backendSrv', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseUrlFromOptions', () => {
|
||||
it.each`
|
||||
params | url | expected
|
||||
${undefined} | ${'api/dashboard'} | ${'api/dashboard'}
|
||||
${{ key: 'value' }} | ${'api/dashboard'} | ${'api/dashboard?key=value'}
|
||||
${{ key: undefined }} | ${'api/dashboard'} | ${'api/dashboard'}
|
||||
${{ firstKey: 'first value', secondValue: 'second value' }} | ${'api/dashboard'} | ${'api/dashboard?firstKey=first%20value&secondValue=second%20value'}
|
||||
${{ firstKey: 'first value', secondValue: undefined }} | ${'api/dashboard'} | ${'api/dashboard?firstKey=first%20value'}
|
||||
${{ id: [1, 2, 3] }} | ${'api/dashboard'} | ${'api/dashboard?id=1&id=2&id=3'}
|
||||
${{ id: [] }} | ${'api/dashboard'} | ${'api/dashboard'}
|
||||
`(
|
||||
"when called with params: '$params' and url: '$url' then result should be '$expected'",
|
||||
({ params, url, expected }) => {
|
||||
expect(parseUrlFromOptions({ params, url })).toEqual(expected);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('parseInitFromOptions', () => {
|
||||
it.each`
|
||||
method | headers | data | expected
|
||||
${undefined} | ${undefined} | ${undefined} | ${{ method: undefined, headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*' }, body: undefined }}
|
||||
${'GET'} | ${undefined} | ${undefined} | ${{ method: 'GET', headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*' }, body: undefined }}
|
||||
${'GET'} | ${undefined} | ${null} | ${{ method: 'GET', headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*' }, body: null }}
|
||||
${'GET'} | ${{ Auth: 'Some Auth' }} | ${undefined} | ${{ method: 'GET', headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: undefined }}
|
||||
${'GET'} | ${{ Auth: 'Some Auth' }} | ${{ data: { test: 'Some data' } }} | ${{ method: 'GET', headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: '{"data":{"test":"Some data"}}' }}
|
||||
${'GET'} | ${{ Auth: 'Some Auth' }} | ${'some data'} | ${{ method: 'GET', headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: 'some data' }}
|
||||
${'GET'} | ${{ Auth: 'Some Auth' }} | ${'{"data":{"test":"Some data"}}'} | ${{ method: 'GET', headers: { 'Content-Type': 'application/json', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: '{"data":{"test":"Some data"}}' }}
|
||||
${'POST'} | ${{ Auth: 'Some Auth', 'Content-Type': 'application/x-www-form-urlencoded' }} | ${undefined} | ${{ method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: undefined }}
|
||||
${'POST'} | ${{ Auth: 'Some Auth', 'Content-Type': 'application/x-www-form-urlencoded' }} | ${{ data: 'Some data' }} | ${{ method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: new URLSearchParams({ data: 'Some data' }) }}
|
||||
${'POST'} | ${{ Auth: 'Some Auth', 'Content-Type': 'application/x-www-form-urlencoded' }} | ${'some data'} | ${{ method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: 'some data' }}
|
||||
${'POST'} | ${{ Auth: 'Some Auth', 'Content-Type': 'application/x-www-form-urlencoded' }} | ${'{"data":{"test":"Some data"}}'} | ${{ method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', Accept: 'application/json, text/plain, */*', Auth: 'Some Auth' }, body: '{"data":{"test":"Some data"}}' }}
|
||||
`(
|
||||
"when called with method: '$method', headers: '$headers' and data: '$data' then result should be '$expected'",
|
||||
({ method, headers, data, expected }) => {
|
||||
expect(parseInitFromOptions({ method, headers, data, url: '' })).toEqual(expected);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
101
public/app/core/utils/fetch.test.ts
Normal file
101
public/app/core/utils/fetch.test.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import 'whatwg-fetch'; // fetch polyfill needed for PhantomJs rendering
|
||||
import {
|
||||
isContentTypeApplicationJson,
|
||||
parseBody,
|
||||
parseHeaders,
|
||||
parseInitFromOptions,
|
||||
parseUrlFromOptions,
|
||||
} from './fetch';
|
||||
|
||||
describe('parseUrlFromOptions', () => {
|
||||
it.each`
|
||||
params | url | expected
|
||||
${undefined} | ${'api/dashboard'} | ${'api/dashboard'}
|
||||
${{ key: 'value' }} | ${'api/dashboard'} | ${'api/dashboard?key=value'}
|
||||
${{ key: undefined }} | ${'api/dashboard'} | ${'api/dashboard'}
|
||||
${{ firstKey: 'first value', secondValue: 'second value' }} | ${'api/dashboard'} | ${'api/dashboard?firstKey=first%20value&secondValue=second%20value'}
|
||||
${{ firstKey: 'first value', secondValue: undefined }} | ${'api/dashboard'} | ${'api/dashboard?firstKey=first%20value'}
|
||||
${{ id: [1, 2, 3] }} | ${'api/dashboard'} | ${'api/dashboard?id=1&id=2&id=3'}
|
||||
${{ id: [] }} | ${'api/dashboard'} | ${'api/dashboard'}
|
||||
`(
|
||||
"when called with params: '$params' and url: '$url' then result should be '$expected'",
|
||||
({ params, url, expected }) => {
|
||||
expect(parseUrlFromOptions({ params, url })).toEqual(expected);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('parseInitFromOptions', () => {
|
||||
it.each`
|
||||
method | data | expected
|
||||
${undefined} | ${undefined} | ${{ method: undefined, headers: { map: { accept: 'application/json, text/plain, */*' } }, body: undefined }}
|
||||
${'GET'} | ${undefined} | ${{ method: 'GET', headers: { map: { accept: 'application/json, text/plain, */*' } }, body: undefined }}
|
||||
${'POST'} | ${{ id: '0' }} | ${{ method: 'POST', headers: { map: { 'content-type': 'application/json', accept: 'application/json, text/plain, */*' } }, body: '{"id":"0"}' }}
|
||||
${'PUT'} | ${{ id: '0' }} | ${{ method: 'PUT', headers: { map: { 'content-type': 'application/json', accept: 'application/json, text/plain, */*' } }, body: '{"id":"0"}' }}
|
||||
${'monkey'} | ${undefined} | ${{ method: 'monkey', headers: { map: { accept: 'application/json, text/plain, */*' } }, body: undefined }}
|
||||
`(
|
||||
"when called with method: '$method' and data: '$data' then result should be '$expected'",
|
||||
({ method, data, expected }) => {
|
||||
expect(parseInitFromOptions({ method, data, url: '' })).toEqual(expected);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('parseHeaders', () => {
|
||||
it.each`
|
||||
options | expected
|
||||
${undefined} | ${{ map: { accept: 'application/json, text/plain, */*' } }}
|
||||
${{ propKey: 'some prop value' }} | ${{ map: { accept: 'application/json, text/plain, */*' } }}
|
||||
${{ method: 'GET' }} | ${{ map: { accept: 'application/json, text/plain, */*' } }}
|
||||
${{ method: 'POST' }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ method: 'PUT' }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ headers: { 'content-type': 'application/json' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ method: 'GET', headers: { 'content-type': 'application/json' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ method: 'POST', headers: { 'content-type': 'application/json' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ method: 'PUT', headers: { 'content-type': 'application/json' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ headers: { 'cOnTent-tYpe': 'application/json' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/json' } }}
|
||||
${{ headers: { 'content-type': 'AppLiCatIon/JsOn' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'AppLiCatIon/JsOn' } }}
|
||||
${{ headers: { 'cOnTent-tYpe': 'AppLiCatIon/JsOn' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'AppLiCatIon/JsOn' } }}
|
||||
${{ headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/x-www-form-urlencoded' } }}
|
||||
${{ method: 'GET', headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/x-www-form-urlencoded' } }}
|
||||
${{ method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/x-www-form-urlencoded' } }}
|
||||
${{ method: 'PUT', headers: { 'Content-Type': 'application/x-www-form-urlencoded' } }} | ${{ map: { accept: 'application/json, text/plain, */*', 'content-type': 'application/x-www-form-urlencoded' } }}
|
||||
${{ headers: { Accept: 'text/plain' } }} | ${{ map: { accept: 'text/plain' } }}
|
||||
${{ headers: { Auth: 'Basic asdasdasd' } }} | ${{ map: { accept: 'application/json, text/plain, */*', auth: 'Basic asdasdasd' } }}
|
||||
`("when called with options: '$options' then the result should be '$expected'", ({ options, expected }) => {
|
||||
expect(parseHeaders(options)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isContentTypeApplicationJson', () => {
|
||||
it.each`
|
||||
headers | expected
|
||||
${undefined} | ${false}
|
||||
${new Headers({ 'cOnTent-tYpe': 'application/json' })} | ${true}
|
||||
${new Headers({ 'content-type': 'AppLiCatIon/JsOn' })} | ${true}
|
||||
${new Headers({ 'cOnTent-tYpe': 'AppLiCatIon/JsOn' })} | ${true}
|
||||
${new Headers({ 'content-type': 'application/x-www-form-urlencoded' })} | ${false}
|
||||
${new Headers({ auth: 'Basic akdjasdkjalksdjasd' })} | ${false}
|
||||
`("when called with headers: 'headers' then the result should be '$expected'", ({ headers, expected }) => {
|
||||
expect(isContentTypeApplicationJson(headers)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseBody', () => {
|
||||
it.each`
|
||||
options | isAppJson | expected
|
||||
${undefined} | ${false} | ${undefined}
|
||||
${undefined} | ${true} | ${undefined}
|
||||
${{ data: undefined }} | ${false} | ${undefined}
|
||||
${{ data: undefined }} | ${true} | ${undefined}
|
||||
${{ data: 'some data' }} | ${false} | ${'some data'}
|
||||
${{ data: 'some data' }} | ${true} | ${'some data'}
|
||||
${{ data: { id: '0' } }} | ${false} | ${new URLSearchParams({ id: '0' })}
|
||||
${{ data: { id: '0' } }} | ${true} | ${'{"id":"0"}'}
|
||||
`(
|
||||
"when called with options: '$options' and isAppJson: '$isAppJson' then the result should be '$expected'",
|
||||
({ options, isAppJson, expected }) => {
|
||||
expect(parseBody(options, isAppJson)).toEqual(expected);
|
||||
}
|
||||
);
|
||||
});
|
||||
107
public/app/core/utils/fetch.ts
Normal file
107
public/app/core/utils/fetch.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { BackendSrvRequest } from '@grafana/runtime';
|
||||
import omitBy from 'lodash/omitBy';
|
||||
|
||||
export const parseInitFromOptions = (options: BackendSrvRequest): RequestInit => {
|
||||
const method = options.method;
|
||||
const headers = parseHeaders(options);
|
||||
const isAppJson = isContentTypeApplicationJson(headers);
|
||||
const body = parseBody(options, isAppJson);
|
||||
|
||||
return {
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
};
|
||||
};
|
||||
|
||||
interface HeaderParser {
|
||||
canParse: (options: BackendSrvRequest) => boolean;
|
||||
parse: (headers: Headers) => Headers;
|
||||
}
|
||||
|
||||
const defaultHeaderParser: HeaderParser = {
|
||||
canParse: () => true,
|
||||
parse: headers => {
|
||||
const accept = headers.get('accept');
|
||||
if (accept) {
|
||||
return headers;
|
||||
}
|
||||
|
||||
headers.set('accept', 'application/json, text/plain, */*');
|
||||
return headers;
|
||||
},
|
||||
};
|
||||
|
||||
const parseHeaderByMethodFactory = (methodPredicate: string): HeaderParser => ({
|
||||
canParse: options => {
|
||||
const method = options?.method ? options?.method.toLowerCase() : '';
|
||||
return method === methodPredicate;
|
||||
},
|
||||
parse: headers => {
|
||||
const contentType = headers.get('content-type');
|
||||
if (contentType) {
|
||||
return headers;
|
||||
}
|
||||
|
||||
headers.set('content-type', 'application/json');
|
||||
return headers;
|
||||
},
|
||||
});
|
||||
|
||||
const postHeaderParser: HeaderParser = parseHeaderByMethodFactory('post');
|
||||
const putHeaderParser: HeaderParser = parseHeaderByMethodFactory('put');
|
||||
|
||||
const headerParsers = [postHeaderParser, putHeaderParser, defaultHeaderParser];
|
||||
|
||||
export const parseHeaders = (options: BackendSrvRequest) => {
|
||||
const headers = options?.headers ? new Headers(options.headers) : new Headers();
|
||||
const parsers = headerParsers.filter(parser => parser.canParse(options));
|
||||
const combinedHeaders = parsers.reduce((prev, parser) => {
|
||||
return parser.parse(prev);
|
||||
}, headers);
|
||||
|
||||
return combinedHeaders;
|
||||
};
|
||||
|
||||
export const isContentTypeApplicationJson = (headers: Headers) => {
|
||||
if (!headers) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const contentType = headers.get('content-type');
|
||||
if (contentType && contentType.toLowerCase() === 'application/json') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
export const parseBody = (options: BackendSrvRequest, isAppJson: boolean) => {
|
||||
if (!options) {
|
||||
return options;
|
||||
}
|
||||
|
||||
if (!options.data || typeof options.data === 'string') {
|
||||
return options.data;
|
||||
}
|
||||
|
||||
return isAppJson ? JSON.stringify(options.data) : new URLSearchParams(options.data);
|
||||
};
|
||||
|
||||
function serializeParams(data: Record<string, any>): string {
|
||||
return Object.keys(data)
|
||||
.map(key => {
|
||||
const value = data[key];
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(arrayValue => `${encodeURIComponent(key)}=${encodeURIComponent(arrayValue)}`).join('&');
|
||||
}
|
||||
return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;
|
||||
})
|
||||
.join('&');
|
||||
}
|
||||
|
||||
export const parseUrlFromOptions = (options: BackendSrvRequest): string => {
|
||||
const cleanParams = omitBy(options.params, v => v === undefined || (v && v.length === 0));
|
||||
const serializedParams = serializeParams(cleanParams);
|
||||
return options.params && serializedParams.length ? `${options.url}?${serializedParams}` : options.url;
|
||||
};
|
||||
181
public/app/core/utils/richHistory.test.ts
Normal file
181
public/app/core/utils/richHistory.test.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import {
|
||||
addToRichHistory,
|
||||
updateStarredInRichHistory,
|
||||
updateCommentInRichHistory,
|
||||
mapNumbertoTimeInSlider,
|
||||
createDateStringFromTs,
|
||||
createQueryHeading,
|
||||
createDataQuery,
|
||||
deleteAllFromRichHistory,
|
||||
deleteQueryInRichHistory,
|
||||
} from './richHistory';
|
||||
import store from 'app/core/store';
|
||||
import { SortOrder } from './explore';
|
||||
|
||||
const mock: any = {
|
||||
history: [
|
||||
{
|
||||
comment: '',
|
||||
datasourceId: 'datasource historyId',
|
||||
datasourceName: 'datasource history name',
|
||||
queries: ['query1', 'query2'],
|
||||
sessionName: '',
|
||||
starred: true,
|
||||
ts: 1,
|
||||
},
|
||||
],
|
||||
comment: '',
|
||||
datasourceId: 'datasourceId',
|
||||
datasourceName: 'datasourceName',
|
||||
queries: ['query3'],
|
||||
sessionName: '',
|
||||
starred: false,
|
||||
};
|
||||
|
||||
const key = 'grafana.explore.richHistory';
|
||||
|
||||
describe('addToRichHistory', () => {
|
||||
beforeEach(() => {
|
||||
deleteAllFromRichHistory();
|
||||
expect(store.exists(key)).toBeFalsy();
|
||||
});
|
||||
|
||||
const expectedResult = [
|
||||
{
|
||||
comment: mock.comment,
|
||||
datasourceId: mock.datasourceId,
|
||||
datasourceName: mock.datasourceName,
|
||||
queries: mock.queries,
|
||||
sessionName: mock.sessionName,
|
||||
starred: mock.starred,
|
||||
ts: 2,
|
||||
},
|
||||
mock.history[0],
|
||||
];
|
||||
|
||||
it('should append query to query history', () => {
|
||||
Date.now = jest.fn(() => 2);
|
||||
const newHistory = addToRichHistory(
|
||||
mock.history,
|
||||
mock.datasourceId,
|
||||
mock.datasourceName,
|
||||
mock.queries,
|
||||
mock.starred,
|
||||
mock.comment,
|
||||
mock.sessionName
|
||||
);
|
||||
expect(newHistory).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
it('should save query history to localStorage', () => {
|
||||
Date.now = jest.fn(() => 2);
|
||||
|
||||
addToRichHistory(
|
||||
mock.history,
|
||||
mock.datasourceId,
|
||||
mock.datasourceName,
|
||||
mock.queries,
|
||||
mock.starred,
|
||||
mock.comment,
|
||||
mock.sessionName
|
||||
);
|
||||
expect(store.exists(key)).toBeTruthy();
|
||||
expect(store.getObject(key)).toMatchObject(expectedResult);
|
||||
});
|
||||
|
||||
it('should not append duplicated query to query history', () => {
|
||||
Date.now = jest.fn(() => 2);
|
||||
const newHistory = addToRichHistory(
|
||||
mock.history,
|
||||
mock.history[0].datasourceId,
|
||||
mock.history[0].datasourceName,
|
||||
mock.history[0].queries,
|
||||
mock.starred,
|
||||
mock.comment,
|
||||
mock.sessionName
|
||||
);
|
||||
expect(newHistory).toEqual([mock.history[0]]);
|
||||
});
|
||||
|
||||
it('should not save duplicated query to localStorage', () => {
|
||||
Date.now = jest.fn(() => 2);
|
||||
addToRichHistory(
|
||||
mock.history,
|
||||
mock.history[0].datasourceId,
|
||||
mock.history[0].datasourceName,
|
||||
mock.history[0].queries,
|
||||
mock.starred,
|
||||
mock.comment,
|
||||
mock.sessionName
|
||||
);
|
||||
expect(store.exists(key)).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateStarredInRichHistory', () => {
|
||||
it('should update starred in query in history', () => {
|
||||
const updatedStarred = updateStarredInRichHistory(mock.history, 1);
|
||||
expect(updatedStarred[0].starred).toEqual(false);
|
||||
});
|
||||
it('should update starred in localStorage', () => {
|
||||
updateStarredInRichHistory(mock.history, 1);
|
||||
expect(store.exists(key)).toBeTruthy();
|
||||
expect(store.getObject(key)[0].starred).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateCommentInRichHistory', () => {
|
||||
it('should update comment in query in history', () => {
|
||||
const updatedComment = updateCommentInRichHistory(mock.history, 1, 'new comment');
|
||||
expect(updatedComment[0].comment).toEqual('new comment');
|
||||
});
|
||||
it('should update comment in localStorage', () => {
|
||||
updateCommentInRichHistory(mock.history, 1, 'new comment');
|
||||
expect(store.exists(key)).toBeTruthy();
|
||||
expect(store.getObject(key)[0].comment).toEqual('new comment');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteQueryInRichHistory', () => {
|
||||
it('should delete query in query in history', () => {
|
||||
const deletedHistory = deleteQueryInRichHistory(mock.history, 1);
|
||||
expect(deletedHistory).toEqual([]);
|
||||
});
|
||||
it('should delete query in localStorage', () => {
|
||||
deleteQueryInRichHistory(mock.history, 1);
|
||||
expect(store.exists(key)).toBeTruthy();
|
||||
expect(store.getObject(key)).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mapNumbertoTimeInSlider', () => {
|
||||
it('should correctly map number to value', () => {
|
||||
const value = mapNumbertoTimeInSlider(25);
|
||||
expect(value).toEqual('25 days ago');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createDateStringFromTs', () => {
|
||||
it('should correctly create string value from timestamp', () => {
|
||||
const value = createDateStringFromTs(1583932327000);
|
||||
expect(value).toEqual('March 11');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createQueryHeading', () => {
|
||||
it('should correctly create heading for queries when sort order is ascending ', () => {
|
||||
const heading = createQueryHeading(mock.history[0], SortOrder.Ascending);
|
||||
expect(heading).toEqual('January 1');
|
||||
});
|
||||
it('should correctly create heading for queries when sort order is datasourceAZ ', () => {
|
||||
const heading = createQueryHeading(mock.history[0], SortOrder.DatasourceAZ);
|
||||
expect(heading).toEqual(mock.history[0].datasourceName);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createDataQuery', () => {
|
||||
it('should correctly create data query from rich history query', () => {
|
||||
const dataQuery = createDataQuery(mock.history[0], mock.queries[0], 0);
|
||||
expect(dataQuery).toEqual({ datasource: 'datasource history name', expr: 'query3', refId: 'A' });
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,7 @@ import { DataQuery, ExploreMode } from '@grafana/data';
|
||||
import { renderUrl } from 'app/core/utils/url';
|
||||
import store from 'app/core/store';
|
||||
import { serializeStateToUrlParam, SortOrder } from './explore';
|
||||
import { getExploreDatasources } from '../../features/explore/state/selectors';
|
||||
|
||||
// Types
|
||||
import { ExploreUrlState, RichHistoryQuery } from 'app/types/explore';
|
||||
@@ -13,9 +14,10 @@ import { ExploreUrlState, RichHistoryQuery } from 'app/types/explore';
|
||||
const RICH_HISTORY_KEY = 'grafana.explore.richHistory';
|
||||
|
||||
export const RICH_HISTORY_SETTING_KEYS = {
|
||||
retentionPeriod: `${RICH_HISTORY_KEY}.retentionPeriod`,
|
||||
starredTabAsFirstTab: `${RICH_HISTORY_KEY}.starredTabAsFirstTab`,
|
||||
activeDatasourceOnly: `${RICH_HISTORY_KEY}.activeDatasourceOnly`,
|
||||
retentionPeriod: 'grafana.explore.richHistory.retentionPeriod',
|
||||
starredTabAsFirstTab: 'grafana.explore.richHistory.starredTabAsFirstTab',
|
||||
activeDatasourceOnly: 'grafana.explore.richHistory.activeDatasourceOnly',
|
||||
datasourceFilters: 'grafana.explore.richHistory.datasourceFilters',
|
||||
};
|
||||
|
||||
/*
|
||||
@@ -60,8 +62,14 @@ export function addToRichHistory(
|
||||
];
|
||||
|
||||
/* Combine all queries of a datasource type into one rich history */
|
||||
store.setObject(RICH_HISTORY_KEY, newHistory);
|
||||
return newHistory;
|
||||
const isSaved = store.setObject(RICH_HISTORY_KEY, newHistory);
|
||||
|
||||
/* If newHistory is succesfully saved, return it. Otherwise return not updated richHistory. */
|
||||
if (isSaved) {
|
||||
return newHistory;
|
||||
} else {
|
||||
return richHistory;
|
||||
}
|
||||
}
|
||||
|
||||
return richHistory;
|
||||
@@ -107,6 +115,12 @@ export function updateCommentInRichHistory(
|
||||
return updatedQueries;
|
||||
}
|
||||
|
||||
export function deleteQueryInRichHistory(richHistory: RichHistoryQuery[], ts: number) {
|
||||
const updatedQueries = richHistory.filter(query => query.ts !== ts);
|
||||
store.setObject(RICH_HISTORY_KEY, updatedQueries);
|
||||
return updatedQueries;
|
||||
}
|
||||
|
||||
export const sortQueries = (array: RichHistoryQuery[], sortOrder: SortOrder) => {
|
||||
let sortFunc;
|
||||
|
||||
@@ -251,3 +265,31 @@ export function mapQueriesToHeadings(query: RichHistoryQuery[], sortOrder: SortO
|
||||
|
||||
return mappedQueriesToHeadings;
|
||||
}
|
||||
|
||||
/* Create datasource list with images. If specific datasource retrieved from Rich history is not part of
|
||||
* exploreDatasources add generic datasource image and add property isRemoved = true.
|
||||
*/
|
||||
export function createDatasourcesList(queriesDatasources: string[]) {
|
||||
const exploreDatasources = getExploreDatasources();
|
||||
const datasources: Array<{ label: string; value: string; imgUrl: string; isRemoved: boolean }> = [];
|
||||
|
||||
queriesDatasources.forEach(queryDsName => {
|
||||
const index = exploreDatasources.findIndex(exploreDs => exploreDs.name === queryDsName);
|
||||
if (index !== -1) {
|
||||
datasources.push({
|
||||
label: queryDsName,
|
||||
value: queryDsName,
|
||||
imgUrl: exploreDatasources[index].meta.info.logos.small,
|
||||
isRemoved: false,
|
||||
});
|
||||
} else {
|
||||
datasources.push({
|
||||
label: queryDsName,
|
||||
value: queryDsName,
|
||||
imgUrl: 'public/img/icn-datasource.svg',
|
||||
isRemoved: true,
|
||||
});
|
||||
}
|
||||
});
|
||||
return datasources;
|
||||
}
|
||||
|
||||
@@ -75,7 +75,12 @@ export const SaveDashboardAsForm: React.FC<SaveDashboardFormProps & { isNew?: bo
|
||||
{({ register, control, errors }) => (
|
||||
<>
|
||||
<Forms.Field label="Dashboard name" invalid={!!errors.title} error="Dashboard name is required">
|
||||
<Forms.Input name="title" ref={register({ required: true })} aria-label="Save dashboard title field" />
|
||||
<Forms.Input
|
||||
name="title"
|
||||
ref={register({ required: true })}
|
||||
aria-label="Save dashboard title field"
|
||||
autoFocus
|
||||
/>
|
||||
</Forms.Field>
|
||||
<Forms.Field label="Folder">
|
||||
<Forms.InputControl
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import { Forms, Button, HorizontalGroup } from '@grafana/ui';
|
||||
import { e2e } from '@grafana/e2e';
|
||||
import { SaveDashboardFormProps } from '../types';
|
||||
@@ -30,27 +31,32 @@ export const SaveDashboardForm: React.FC<SaveDashboardFormProps> = ({ dashboard,
|
||||
>
|
||||
{({ register, errors }) => (
|
||||
<>
|
||||
<Forms.Field label="Changes description">
|
||||
<Forms.TextArea name="message" ref={register} placeholder="Add a note to describe your changes..." />
|
||||
</Forms.Field>
|
||||
{hasTimeChanged && (
|
||||
<Forms.Field label="Save current time range" description="Dashboard time range has changed">
|
||||
<Forms.Switch
|
||||
<div className="gf-form-group">
|
||||
{hasTimeChanged && (
|
||||
<Forms.Checkbox
|
||||
label="Save current time range as dashboard default"
|
||||
name="saveTimerange"
|
||||
ref={register}
|
||||
aria-label={e2e.pages.SaveDashboardModal.selectors.saveTimerange}
|
||||
/>
|
||||
</Forms.Field>
|
||||
)}
|
||||
{hasVariableChanged && (
|
||||
<Forms.Field label="Save current variables" description="Dashboard variables have changed">
|
||||
<Forms.Switch
|
||||
)}
|
||||
{hasVariableChanged && (
|
||||
<Forms.Checkbox
|
||||
label="Save current variable values as dashboard default"
|
||||
name="saveVariables"
|
||||
ref={register}
|
||||
aria-label={e2e.pages.SaveDashboardModal.selectors.saveVariables}
|
||||
/>
|
||||
</Forms.Field>
|
||||
)}
|
||||
)}
|
||||
{(hasVariableChanged || hasTimeChanged) && <div className="gf-form-group" />}
|
||||
|
||||
<Forms.TextArea
|
||||
name="message"
|
||||
ref={register}
|
||||
placeholder="Add a note to describe your changes..."
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
|
||||
<HorizontalGroup>
|
||||
<Button type="submit" aria-label={e2e.pages.SaveDashboardModal.selectors.save}>
|
||||
|
||||
@@ -72,10 +72,12 @@ export class DashboardPanelUnconnected extends PureComponent<Props, State> {
|
||||
};
|
||||
|
||||
renderPanel(plugin: PanelPlugin) {
|
||||
const { dashboard, panel, isFullscreen, isInView, isInEditMode } = this.props;
|
||||
const { dashboard, panel, isFullscreen, isEditing, isInView, isInEditMode } = this.props;
|
||||
|
||||
const autoSizerStyle = { height: isEditing ? '100%' : '' };
|
||||
|
||||
return (
|
||||
<AutoSizer>
|
||||
<AutoSizer style={autoSizerStyle}>
|
||||
{({ width, height }) => {
|
||||
if (width === 0) {
|
||||
return null;
|
||||
|
||||
@@ -11,11 +11,13 @@ import { PanelHeader } from './PanelHeader/PanelHeader';
|
||||
import { getTimeSrv, TimeSrv } from '../services/TimeSrv';
|
||||
import { getAngularLoader, AngularComponent } from '@grafana/runtime';
|
||||
import { setPanelAngularComponent } from '../state/reducers';
|
||||
import config from 'app/core/config';
|
||||
|
||||
// Types
|
||||
import { DashboardModel, PanelModel } from '../state';
|
||||
import { StoreState } from 'app/types';
|
||||
import { LoadingState, DefaultTimeRange, PanelData, PanelPlugin, PanelEvents } from '@grafana/data';
|
||||
import { PANEL_BORDER } from 'app/core/constants';
|
||||
|
||||
interface OwnProps {
|
||||
panel: PanelModel;
|
||||
@@ -135,15 +137,32 @@ export class PanelChromeAngularUnconnected extends PureComponent<Props, State> {
|
||||
|
||||
if (prevProps.width !== width || prevProps.height !== height) {
|
||||
if (this.scopeProps) {
|
||||
this.scopeProps.size.height = height;
|
||||
this.scopeProps.size.width = width;
|
||||
this.scopeProps.size.height = this.getInnerPanelHeight();
|
||||
this.scopeProps.size.width = this.getInnerPanelWidth();
|
||||
panel.events.emit(PanelEvents.panelSizeChanged);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getInnerPanelHeight() {
|
||||
const { plugin, height } = this.props;
|
||||
const { theme } = config;
|
||||
|
||||
const headerHeight = this.hasOverlayHeader() ? 0 : theme.panelHeaderHeight;
|
||||
const chromePadding = plugin.noPadding ? 0 : theme.panelPadding;
|
||||
return height - headerHeight - chromePadding * 2 - PANEL_BORDER;
|
||||
}
|
||||
|
||||
getInnerPanelWidth() {
|
||||
const { plugin, width } = this.props;
|
||||
const { theme } = config;
|
||||
|
||||
const chromePadding = plugin.noPadding ? 0 : theme.panelPadding;
|
||||
return width - chromePadding * 2 - PANEL_BORDER;
|
||||
}
|
||||
|
||||
loadAngularPanel() {
|
||||
const { panel, dashboard, height, width, setPanelAngularComponent } = this.props;
|
||||
const { panel, dashboard, setPanelAngularComponent } = this.props;
|
||||
|
||||
// if we have no element or already have loaded the panel return
|
||||
if (!this.element) {
|
||||
@@ -156,7 +175,7 @@ export class PanelChromeAngularUnconnected extends PureComponent<Props, State> {
|
||||
this.scopeProps = {
|
||||
panel: panel,
|
||||
dashboard: dashboard,
|
||||
size: { width, height },
|
||||
size: { width: this.getInnerPanelWidth(), height: this.getInnerPanelHeight() },
|
||||
};
|
||||
|
||||
setPanelAngularComponent({
|
||||
|
||||
@@ -322,7 +322,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
|
||||
['explore-active-button']: showRichHistory,
|
||||
})}
|
||||
onClick={this.toggleShowRichHistory}
|
||||
disabled={isLive}
|
||||
>
|
||||
<i className={'fa fa-fw fa-history icon-margin-right '} />
|
||||
<span className="btn-title">{'\xA0' + 'Query history'}</span>
|
||||
@@ -382,7 +381,13 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{showRichHistory && <RichHistoryContainer width={width} exploreId={exploreId} />}
|
||||
{showRichHistory && (
|
||||
<RichHistoryContainer
|
||||
width={width}
|
||||
exploreId={exploreId}
|
||||
onClose={this.toggleShowRichHistory}
|
||||
/>
|
||||
)}
|
||||
</ErrorBoundaryAlert>
|
||||
</main>
|
||||
);
|
||||
|
||||
63
public/app/features/explore/RichHistory/RichHistory.test.tsx
Normal file
63
public/app/features/explore/RichHistory/RichHistory.test.tsx
Normal file
@@ -0,0 +1,63 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { GrafanaTheme } from '@grafana/data';
|
||||
import { ExploreId } from '../../../types/explore';
|
||||
import { RichHistory, RichHistoryProps } from './RichHistory';
|
||||
import { Tabs } from './RichHistory';
|
||||
import { Tab, Slider } from '@grafana/ui';
|
||||
|
||||
jest.mock('../state/selectors', () => ({ getExploreDatasources: jest.fn() }));
|
||||
|
||||
const setup = (propOverrides?: Partial<RichHistoryProps>) => {
|
||||
const props: RichHistoryProps = {
|
||||
theme: {} as GrafanaTheme,
|
||||
exploreId: ExploreId.left,
|
||||
height: 100,
|
||||
activeDatasourceInstance: 'Test datasource',
|
||||
richHistory: [],
|
||||
firstTab: Tabs.RichHistory,
|
||||
deleteRichHistory: jest.fn(),
|
||||
onClose: jest.fn(),
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = mount(<RichHistory {...props} />);
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
describe('RichHistory', () => {
|
||||
it('should render all tabs in tab bar', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find(Tab)).toHaveLength(3);
|
||||
});
|
||||
it('should render correct lebels of tabs in tab bar', () => {
|
||||
const wrapper = setup();
|
||||
expect(
|
||||
wrapper
|
||||
.find(Tab)
|
||||
.at(0)
|
||||
.text()
|
||||
).toEqual('Query history');
|
||||
expect(
|
||||
wrapper
|
||||
.find(Tab)
|
||||
.at(1)
|
||||
.text()
|
||||
).toEqual('Starred');
|
||||
expect(
|
||||
wrapper
|
||||
.find(Tab)
|
||||
.at(2)
|
||||
.text()
|
||||
).toEqual('Settings');
|
||||
});
|
||||
it('should correctly render query history tab as active tab', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find(Slider)).toHaveLength(1);
|
||||
});
|
||||
it('should correctly render starred tab as active tab', () => {
|
||||
const wrapper = setup({ firstTab: Tabs.Starred });
|
||||
expect(wrapper.find(Slider)).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -30,12 +30,14 @@ export const sortOrderOptions = [
|
||||
{ label: 'Data source Z-A', value: SortOrder.DatasourceZA },
|
||||
];
|
||||
|
||||
interface RichHistoryProps extends Themeable {
|
||||
export interface RichHistoryProps extends Themeable {
|
||||
richHistory: RichHistoryQuery[];
|
||||
activeDatasourceInstance: string;
|
||||
firstTab: Tabs;
|
||||
exploreId: ExploreId;
|
||||
height: number;
|
||||
deleteRichHistory: () => void;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
interface RichHistoryState {
|
||||
@@ -60,6 +62,11 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
background-color: ${tabContentBg};
|
||||
padding: ${theme.spacing.md};
|
||||
`,
|
||||
close: css`
|
||||
position: absolute;
|
||||
right: ${theme.spacing.sm};
|
||||
cursor: pointer;
|
||||
`,
|
||||
tabs: css`
|
||||
background-color: ${tabBarBg};
|
||||
padding-top: ${theme.spacing.sm};
|
||||
@@ -76,8 +83,8 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
super(props);
|
||||
this.state = {
|
||||
activeTab: this.props.firstTab,
|
||||
datasourceFilters: null,
|
||||
sortOrder: SortOrder.Descending,
|
||||
datasourceFilters: store.getObject(RICH_HISTORY_SETTING_KEYS.datasourceFilters, null),
|
||||
retentionPeriod: store.getObject(RICH_HISTORY_SETTING_KEYS.retentionPeriod, 7),
|
||||
starredTabAsFirstTab: store.getBool(RICH_HISTORY_SETTING_KEYS.starredTabAsFirstTab, false),
|
||||
activeDatasourceOnly: store.getBool(RICH_HISTORY_SETTING_KEYS.activeDatasourceOnly, false),
|
||||
@@ -108,6 +115,7 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
};
|
||||
|
||||
onSelectDatasourceFilters = (value: SelectableValue[] | null) => {
|
||||
store.setObject(RICH_HISTORY_SETTING_KEYS.datasourceFilters, value);
|
||||
this.setState({ datasourceFilters: value });
|
||||
};
|
||||
|
||||
@@ -126,7 +134,7 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
? this.onSelectDatasourceFilters([
|
||||
{ label: this.props.activeDatasourceInstance, value: this.props.activeDatasourceInstance },
|
||||
])
|
||||
: this.onSelectDatasourceFilters(null);
|
||||
: this.onSelectDatasourceFilters(this.state.datasourceFilters);
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
@@ -142,15 +150,8 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
}
|
||||
|
||||
render() {
|
||||
const {
|
||||
datasourceFilters,
|
||||
sortOrder,
|
||||
activeTab,
|
||||
starredTabAsFirstTab,
|
||||
activeDatasourceOnly,
|
||||
retentionPeriod,
|
||||
} = this.state;
|
||||
const { theme, richHistory, exploreId, deleteRichHistory } = this.props;
|
||||
const { datasourceFilters, sortOrder, activeTab, activeDatasourceOnly, retentionPeriod } = this.state;
|
||||
const { theme, richHistory, height, exploreId, deleteRichHistory, onClose } = this.props;
|
||||
const styles = getStyles(theme);
|
||||
|
||||
const QueriesTab = {
|
||||
@@ -166,6 +167,7 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
onChangeSortOrder={this.onChangeSortOrder}
|
||||
onSelectDatasourceFilters={this.onSelectDatasourceFilters}
|
||||
exploreId={exploreId}
|
||||
height={height}
|
||||
/>
|
||||
),
|
||||
icon: 'fa fa-history',
|
||||
@@ -205,8 +207,7 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
icon: 'gicon gicon-preferences',
|
||||
};
|
||||
|
||||
let tabs = starredTabAsFirstTab ? [StarredTab, QueriesTab, SettingsTab] : [QueriesTab, StarredTab, SettingsTab];
|
||||
|
||||
let tabs = [QueriesTab, StarredTab, SettingsTab];
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<TabsBar className={styles.tabs}>
|
||||
@@ -219,6 +220,9 @@ class UnThemedRichHistory extends PureComponent<RichHistoryProps, RichHistorySta
|
||||
icon={t.icon}
|
||||
/>
|
||||
))}
|
||||
<div className={styles.close} onClick={onClose}>
|
||||
<i className="fa fa-times" title="Close query history" />
|
||||
</div>
|
||||
</TabsBar>
|
||||
<CustomScrollbar
|
||||
className={css`
|
||||
|
||||
115
public/app/features/explore/RichHistory/RichHistoryCard.test.tsx
Normal file
115
public/app/features/explore/RichHistory/RichHistoryCard.test.tsx
Normal file
@@ -0,0 +1,115 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { RichHistoryCard, Props } from './RichHistoryCard';
|
||||
import { ExploreId } from '../../../types/explore';
|
||||
import { DataSourceApi } from '@grafana/data';
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
const props: Props = {
|
||||
query: {
|
||||
ts: 1,
|
||||
datasourceName: 'Test datasource',
|
||||
datasourceId: 'datasource 1',
|
||||
starred: false,
|
||||
comment: '',
|
||||
queries: ['query1', 'query2', 'query3'],
|
||||
sessionName: '',
|
||||
},
|
||||
dsImg: '/app/img',
|
||||
isRemoved: false,
|
||||
changeDatasource: jest.fn(),
|
||||
updateRichHistory: jest.fn(),
|
||||
setQueries: jest.fn(),
|
||||
exploreId: ExploreId.left,
|
||||
datasourceInstance: { name: 'Datasource' } as DataSourceApi,
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = mount(<RichHistoryCard {...props} />);
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
const starredQueryWithComment = {
|
||||
ts: 1,
|
||||
datasourceName: 'Test datasource',
|
||||
datasourceId: 'datasource 1',
|
||||
starred: true,
|
||||
comment: 'test comment',
|
||||
queries: ['query1', 'query2', 'query3'],
|
||||
sessionName: '',
|
||||
};
|
||||
|
||||
describe('RichHistoryCard', () => {
|
||||
it('should render all queries', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Query text' })).toHaveLength(3);
|
||||
expect(
|
||||
wrapper
|
||||
.find({ 'aria-label': 'Query text' })
|
||||
.at(0)
|
||||
.text()
|
||||
).toEqual('query1');
|
||||
expect(
|
||||
wrapper
|
||||
.find({ 'aria-label': 'Query text' })
|
||||
.at(1)
|
||||
.text()
|
||||
).toEqual('query2');
|
||||
expect(
|
||||
wrapper
|
||||
.find({ 'aria-label': 'Query text' })
|
||||
.at(2)
|
||||
.text()
|
||||
).toEqual('query3');
|
||||
});
|
||||
it('should render data source icon', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Data source icon' })).toHaveLength(1);
|
||||
});
|
||||
it('should render data source name', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Data source name' }).text()).toEqual('Test datasource');
|
||||
});
|
||||
it('should render "Data source does not exist anymore" if removed data source', () => {
|
||||
const wrapper = setup({ isRemoved: true });
|
||||
expect(wrapper.find({ 'aria-label': 'Data source name' }).text()).toEqual('Data source does not exist anymore');
|
||||
});
|
||||
|
||||
describe('commenting', () => {
|
||||
it('should render comment, if comment present', () => {
|
||||
const wrapper = setup({ query: starredQueryWithComment });
|
||||
expect(wrapper.find({ 'aria-label': 'Query comment' })).toHaveLength(1);
|
||||
expect(wrapper.find({ 'aria-label': 'Query comment' }).text()).toEqual('test comment');
|
||||
});
|
||||
it('should have title "Edit comment" at comment icon, if comment present', () => {
|
||||
const wrapper = setup({ query: starredQueryWithComment });
|
||||
expect(wrapper.find({ title: 'Edit comment' })).toHaveLength(1);
|
||||
expect(wrapper.find({ title: 'Add comment' })).toHaveLength(0);
|
||||
});
|
||||
it('should have title "Add comment" at comment icon, if no comment present', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ title: 'Add comment' })).toHaveLength(1);
|
||||
expect(wrapper.find({ title: 'Edit comment' })).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('starring', () => {
|
||||
it('should have title "Star query", if not starred', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ title: 'Star query' })).toHaveLength(1);
|
||||
});
|
||||
it('should render fa-star-o icon, if not starred', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ title: 'Star query' }).hasClass('fa-star-o')).toBe(true);
|
||||
});
|
||||
it('should have title "Unstar query", if not starred', () => {
|
||||
const wrapper = setup({ query: starredQueryWithComment });
|
||||
expect(wrapper.find({ title: 'Unstar query' })).toHaveLength(1);
|
||||
});
|
||||
it('should have fa-star icon, if not starred', () => {
|
||||
const wrapper = setup({ query: starredQueryWithComment });
|
||||
expect(wrapper.find({ title: 'Unstar query' }).hasClass('fa-star')).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -2,56 +2,93 @@ import React, { useState } from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
import { hot } from 'react-hot-loader';
|
||||
import { css, cx } from 'emotion';
|
||||
import { stylesFactory, useTheme, Forms, styleMixins } from '@grafana/ui';
|
||||
import { stylesFactory, useTheme, Forms } from '@grafana/ui';
|
||||
import { GrafanaTheme, AppEvents, DataSourceApi } from '@grafana/data';
|
||||
import { RichHistoryQuery, ExploreId } from 'app/types/explore';
|
||||
import { copyStringToClipboard, createUrlFromRichHistory, createDataQuery } from 'app/core/utils/richHistory';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { StoreState } from 'app/types';
|
||||
|
||||
import { changeQuery, changeDatasource, clearQueries, updateRichHistory } from '../state/actions';
|
||||
interface Props {
|
||||
import { changeDatasource, updateRichHistory, setQueries } from '../state/actions';
|
||||
export interface Props {
|
||||
query: RichHistoryQuery;
|
||||
changeQuery: typeof changeQuery;
|
||||
dsImg: string;
|
||||
isRemoved: boolean;
|
||||
changeDatasource: typeof changeDatasource;
|
||||
clearQueries: typeof clearQueries;
|
||||
updateRichHistory: typeof updateRichHistory;
|
||||
setQueries: typeof setQueries;
|
||||
exploreId: ExploreId;
|
||||
datasourceInstance: DataSourceApi;
|
||||
}
|
||||
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme, hasComment?: boolean) => {
|
||||
const bgColor = theme.isLight ? theme.colors.gray5 : theme.colors.dark4;
|
||||
const cardBottomPadding = hasComment ? theme.spacing.sm : theme.spacing.xs;
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme, isRemoved: boolean) => {
|
||||
/* Hard-coded value so all buttons and icons on right side of card are aligned */
|
||||
const rigtColumnWidth = '240px';
|
||||
const rigtColumnContentWidth = '170px';
|
||||
|
||||
const borderColor = theme.isLight ? theme.colors.gray5 : theme.colors.gray25;
|
||||
|
||||
/* If datasource was removed, card will have inactive color */
|
||||
const cardColor = theme.isLight
|
||||
? isRemoved
|
||||
? theme.colors.gray95
|
||||
: theme.colors.white
|
||||
: isRemoved
|
||||
? theme.colors.gray15
|
||||
: theme.colors.gray05;
|
||||
const cardBoxShadow = theme.isLight ? `0px 2px 2px ${borderColor}` : `0px 2px 4px black`;
|
||||
|
||||
return {
|
||||
queryCard: css`
|
||||
${styleMixins.listItem(theme)}
|
||||
display: flex;
|
||||
padding: ${theme.spacing.sm} ${theme.spacing.sm} ${cardBottomPadding};
|
||||
flex-direction: column;
|
||||
border: 1px solid ${borderColor};
|
||||
margin: ${theme.spacing.sm} 0;
|
||||
|
||||
box-shadow: ${cardBoxShadow};
|
||||
background-color: ${cardColor};
|
||||
border-radius: ${theme.border.radius.sm};
|
||||
.starred {
|
||||
color: ${theme.colors.orange};
|
||||
}
|
||||
`,
|
||||
queryCardLeft: css`
|
||||
padding-right: 10px;
|
||||
width: calc(100% - 150px);
|
||||
cursor: pointer;
|
||||
cardRow: css`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: ${theme.spacing.sm};
|
||||
border-bottom: none;
|
||||
:first-of-type {
|
||||
border-bottom: 1px solid ${borderColor};
|
||||
padding: ${theme.spacing.xs} ${theme.spacing.sm};
|
||||
}
|
||||
img {
|
||||
height: ${theme.typography.size.base};
|
||||
max-width: ${theme.typography.size.base};
|
||||
margin-right: ${theme.spacing.sm};
|
||||
}
|
||||
`,
|
||||
queryCardRight: css`
|
||||
width: 150px;
|
||||
datasourceContainer: css`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
font-size: ${theme.typography.size.sm};
|
||||
font-weight: ${theme.typography.weight.semibold};
|
||||
`,
|
||||
queryActionButtons: css`
|
||||
max-width: ${rigtColumnContentWidth};
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
|
||||
font-size: ${theme.typography.size.base};
|
||||
i {
|
||||
margin: ${theme.spacing.xs};
|
||||
cursor: pointer;
|
||||
}
|
||||
`,
|
||||
queryContainer: css`
|
||||
font-weight: ${theme.typography.weight.semibold};
|
||||
width: calc(100% - ${rigtColumnWidth});
|
||||
`,
|
||||
queryRow: css`
|
||||
border-top: 1px solid ${bgColor};
|
||||
border-top: 1px solid ${borderColor};
|
||||
word-break: break-all;
|
||||
padding: 4px 2px;
|
||||
:first-child {
|
||||
@@ -59,134 +96,175 @@ const getStyles = stylesFactory((theme: GrafanaTheme, hasComment?: boolean) => {
|
||||
padding: 0 0 4px 0;
|
||||
}
|
||||
`,
|
||||
buttonRow: css`
|
||||
> * {
|
||||
margin-right: ${theme.spacing.xs};
|
||||
}
|
||||
updateCommentContainer: css`
|
||||
width: calc(100% + ${rigtColumnWidth});
|
||||
margin-top: ${theme.spacing.sm};
|
||||
`,
|
||||
comment: css`
|
||||
overflow-wrap: break-word;
|
||||
font-size: ${theme.typography.size.sm};
|
||||
font-weight: ${theme.typography.weight.regular};
|
||||
margin-top: ${theme.spacing.xs};
|
||||
`,
|
||||
commentButtonRow: css`
|
||||
> * {
|
||||
margin-right: ${theme.spacing.sm};
|
||||
}
|
||||
`,
|
||||
textArea: css`
|
||||
border: 1px solid ${borderColor};
|
||||
background: inherit;
|
||||
color: inherit;
|
||||
width: 100%;
|
||||
font-size: ${theme.typography.size.sm};
|
||||
&placeholder {
|
||||
padding: 0 ${theme.spacing.sm};
|
||||
}
|
||||
`,
|
||||
runButton: css`
|
||||
max-width: ${rigtColumnContentWidth};
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
button {
|
||||
height: auto;
|
||||
padding: ${theme.spacing.sm} ${theme.spacing.md};
|
||||
span {
|
||||
white-space: normal !important;
|
||||
}
|
||||
}
|
||||
`,
|
||||
};
|
||||
});
|
||||
|
||||
export function RichHistoryCard(props: Props) {
|
||||
const {
|
||||
query,
|
||||
dsImg,
|
||||
isRemoved,
|
||||
updateRichHistory,
|
||||
changeQuery,
|
||||
changeDatasource,
|
||||
exploreId,
|
||||
clearQueries,
|
||||
datasourceInstance,
|
||||
setQueries,
|
||||
} = props;
|
||||
const [starred, setStared] = useState(query.starred);
|
||||
const [activeUpdateComment, setActiveUpdateComment] = useState(false);
|
||||
const [comment, setComment] = useState<string | undefined>(query.comment);
|
||||
|
||||
const toggleActiveUpdateComment = () => setActiveUpdateComment(!activeUpdateComment);
|
||||
const theme = useTheme();
|
||||
const styles = getStyles(theme, Boolean(query.comment));
|
||||
const styles = getStyles(theme, isRemoved);
|
||||
|
||||
const changeQueries = () => {
|
||||
query.queries.forEach((q, i) => {
|
||||
const dataQuery = createDataQuery(query, q, i);
|
||||
changeQuery(exploreId, dataQuery, i);
|
||||
});
|
||||
};
|
||||
|
||||
const onChangeQuery = async (query: RichHistoryQuery) => {
|
||||
const onRunQuery = async () => {
|
||||
const dataQueries = query.queries.map((q, i) => createDataQuery(query, q, i));
|
||||
if (query.datasourceName !== datasourceInstance?.name) {
|
||||
await changeDatasource(exploreId, query.datasourceName);
|
||||
changeQueries();
|
||||
setQueries(exploreId, dataQueries);
|
||||
} else {
|
||||
clearQueries(exploreId);
|
||||
changeQueries();
|
||||
setQueries(exploreId, dataQueries);
|
||||
}
|
||||
};
|
||||
|
||||
const onCopyQuery = () => {
|
||||
const queries = query.queries.join('\n\n');
|
||||
copyStringToClipboard(queries);
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Query copied to clipboard']);
|
||||
};
|
||||
|
||||
const onCreateLink = () => {
|
||||
const url = createUrlFromRichHistory(query);
|
||||
copyStringToClipboard(url);
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Link copied to clipboard']);
|
||||
};
|
||||
|
||||
const onDeleteQuery = () => {
|
||||
updateRichHistory(query.ts, 'delete');
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Query deleted']);
|
||||
};
|
||||
|
||||
const onStarrQuery = () => {
|
||||
updateRichHistory(query.ts, 'starred');
|
||||
};
|
||||
|
||||
const onUpdateComment = () => {
|
||||
updateRichHistory(query.ts, 'comment', comment);
|
||||
toggleActiveUpdateComment();
|
||||
};
|
||||
|
||||
const onCancelUpdateComment = () => {
|
||||
toggleActiveUpdateComment();
|
||||
setComment(query.comment);
|
||||
};
|
||||
|
||||
const updateComment = (
|
||||
<div className={styles.updateCommentContainer}>
|
||||
<Forms.TextArea
|
||||
value={comment}
|
||||
placeholder={comment ? undefined : 'An optional description of what the query does.'}
|
||||
onChange={e => setComment(e.currentTarget.value)}
|
||||
className={styles.textArea}
|
||||
/>
|
||||
<div className={styles.commentButtonRow}>
|
||||
<Forms.Button onClick={onUpdateComment}>Save comment</Forms.Button>
|
||||
<Forms.Button variant="secondary" onClick={onCancelUpdateComment}>
|
||||
Cancel
|
||||
</Forms.Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
const queryActionButtons = (
|
||||
<div className={styles.queryActionButtons}>
|
||||
<i
|
||||
className="fa fa-fw fa-comment-o"
|
||||
onClick={toggleActiveUpdateComment}
|
||||
title={query.comment?.length > 0 ? 'Edit comment' : 'Add comment'}
|
||||
></i>
|
||||
<i className="fa fa-fw fa-copy" onClick={onCopyQuery} title="Copy query to clipboard"></i>
|
||||
{!isRemoved && <i className="fa fa-fw fa-link" onClick={onCreateLink} title="Copy link to clipboard"></i>}
|
||||
<i className={'fa fa-trash'} title={'Delete query'} onClick={onDeleteQuery}></i>
|
||||
<i
|
||||
className={cx('fa fa-fw', query.starred ? 'fa-star starred' : 'fa-star-o')}
|
||||
onClick={onStarrQuery}
|
||||
title={query.starred ? 'Unstar query' : 'Star query'}
|
||||
></i>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className={styles.queryCard}>
|
||||
<div className={styles.queryCardLeft} onClick={() => onChangeQuery(query)}>
|
||||
{query.queries.map((q, i) => {
|
||||
return (
|
||||
<div key={`${q}-${i}`} className={styles.queryRow}>
|
||||
{q}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
{!activeUpdateComment && query.comment && <div className={styles.comment}>{query.comment}</div>}
|
||||
{activeUpdateComment && (
|
||||
<div>
|
||||
<Forms.TextArea
|
||||
value={comment}
|
||||
placeholder={comment ? undefined : 'add comment'}
|
||||
onChange={e => setComment(e.currentTarget.value)}
|
||||
/>
|
||||
<div className={styles.buttonRow}>
|
||||
<Forms.Button
|
||||
onClick={e => {
|
||||
e.preventDefault();
|
||||
updateRichHistory(query.ts, 'comment', comment);
|
||||
toggleActiveUpdateComment();
|
||||
}}
|
||||
>
|
||||
Save
|
||||
</Forms.Button>
|
||||
<Forms.Button
|
||||
variant="secondary"
|
||||
className={css`
|
||||
margin-left: 8px;
|
||||
`}
|
||||
onClick={() => {
|
||||
toggleActiveUpdateComment();
|
||||
setComment(query.comment);
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Forms.Button>
|
||||
<div className={styles.cardRow}>
|
||||
<div className={styles.datasourceContainer}>
|
||||
<img src={dsImg} aria-label="Data source icon" />
|
||||
<div aria-label="Data source name">
|
||||
{isRemoved ? 'Data source does not exist anymore' : query.datasourceName}
|
||||
</div>
|
||||
</div>
|
||||
{queryActionButtons}
|
||||
</div>
|
||||
<div className={cx(styles.cardRow)}>
|
||||
<div className={styles.queryContainer}>
|
||||
{query.queries.map((q, i) => {
|
||||
return (
|
||||
<div aria-label="Query text" key={`${q}-${i}`} className={styles.queryRow}>
|
||||
{q}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
{!activeUpdateComment && query.comment && (
|
||||
<div aria-label="Query comment" className={styles.comment}>
|
||||
{query.comment}
|
||||
</div>
|
||||
)}
|
||||
{activeUpdateComment && updateComment}
|
||||
</div>
|
||||
{!activeUpdateComment && (
|
||||
<div className={styles.runButton}>
|
||||
<Forms.Button variant="secondary" onClick={onRunQuery} disabled={isRemoved}>
|
||||
{datasourceInstance?.name === query.datasourceName ? 'Run query' : 'Switch data source and run query'}
|
||||
</Forms.Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className={styles.queryCardRight}>
|
||||
<i
|
||||
className="fa fa-fw fa-comment-o"
|
||||
onClick={() => {
|
||||
toggleActiveUpdateComment();
|
||||
}}
|
||||
title={query.comment?.length > 0 ? 'Edit comment' : 'Add comment'}
|
||||
></i>
|
||||
<i
|
||||
className="fa fa-fw fa-copy"
|
||||
onClick={() => {
|
||||
const queries = query.queries.join('\n\n');
|
||||
copyStringToClipboard(queries);
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Query copied to clipboard']);
|
||||
}}
|
||||
title="Copy query to clipboard"
|
||||
></i>
|
||||
<i
|
||||
className="fa fa-fw fa-link"
|
||||
onClick={() => {
|
||||
const url = createUrlFromRichHistory(query);
|
||||
copyStringToClipboard(url);
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Link copied to clipboard']);
|
||||
}}
|
||||
style={{ fontWeight: 'normal' }}
|
||||
title="Copy link to clipboard"
|
||||
></i>
|
||||
<i
|
||||
className={cx('fa fa-fw', starred ? 'fa-star starred' : 'fa-star-o')}
|
||||
onClick={() => {
|
||||
updateRichHistory(query.ts, 'starred');
|
||||
setStared(!starred);
|
||||
}}
|
||||
title={query.starred ? 'Unstar query' : 'Star query'}
|
||||
></i>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -203,10 +281,9 @@ function mapStateToProps(state: StoreState, { exploreId }: { exploreId: ExploreI
|
||||
}
|
||||
|
||||
const mapDispatchToProps = {
|
||||
changeQuery,
|
||||
changeDatasource,
|
||||
clearQueries,
|
||||
updateRichHistory,
|
||||
setQueries,
|
||||
};
|
||||
|
||||
export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(RichHistoryCard));
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { Resizable } from 're-resizable';
|
||||
|
||||
import { ExploreId } from '../../../types/explore';
|
||||
import { RichHistoryContainer, Props } from './RichHistoryContainer';
|
||||
import { Tabs } from './RichHistory';
|
||||
|
||||
jest.mock('../state/selectors', () => ({ getExploreDatasources: jest.fn() }));
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
const props: Props = {
|
||||
width: 500,
|
||||
exploreId: ExploreId.left,
|
||||
activeDatasourceInstance: 'Test datasource',
|
||||
richHistory: [],
|
||||
firstTab: Tabs.RichHistory,
|
||||
deleteRichHistory: jest.fn(),
|
||||
onClose: jest.fn(),
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = mount(<RichHistoryContainer {...props} />);
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
describe('RichHistoryContainer', () => {
|
||||
it('should render reseizable component', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find(Resizable)).toHaveLength(1);
|
||||
});
|
||||
it('should render component with correct width', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.getDOMNode().getAttribute('style')).toContain('width: 531.5px');
|
||||
});
|
||||
it('should render component with correct height', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.getDOMNode().getAttribute('style')).toContain('height: 400px');
|
||||
});
|
||||
});
|
||||
@@ -22,33 +22,33 @@ import { RichHistory, Tabs } from './RichHistory';
|
||||
import { deleteRichHistory } from '../state/actions';
|
||||
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
const bgColor = theme.isLight ? theme.colors.gray5 : theme.colors.gray15;
|
||||
const bg = theme.isLight ? theme.colors.gray7 : theme.colors.dark2;
|
||||
const borderColor = theme.isLight ? theme.colors.gray5 : theme.colors.dark6;
|
||||
const handleHover = theme.isLight ? theme.colors.gray10 : theme.colors.gray33;
|
||||
const containerBackground = theme.isLight ? theme.colors.gray7 : theme.colors.dark2;
|
||||
const containerBorderColor = theme.isLight ? theme.colors.gray5 : theme.colors.dark6;
|
||||
const handleBackground = theme.isLight ? theme.colors.gray5 : theme.colors.gray15;
|
||||
const handleDots = theme.isLight ? theme.colors.gray70 : theme.colors.gray33;
|
||||
const handleDotsHover = theme.isLight ? theme.colors.gray33 : theme.colors.dark7;
|
||||
const handleBackgroundHover = theme.isLight ? theme.colors.gray70 : theme.colors.gray33;
|
||||
const handleDotsHover = theme.isLight ? theme.colors.gray5 : theme.colors.dark7;
|
||||
|
||||
return {
|
||||
container: css`
|
||||
position: fixed !important;
|
||||
bottom: 0;
|
||||
background: ${bg};
|
||||
border-top: 1px solid ${borderColor};
|
||||
background: ${containerBackground};
|
||||
border-top: 1px solid ${containerBorderColor};
|
||||
margin: 0px;
|
||||
margin-right: -${theme.spacing.md};
|
||||
margin-left: -${theme.spacing.md};
|
||||
`,
|
||||
drawerActive: css`
|
||||
opacity: 1;
|
||||
transition: transform 0.3s ease-in;
|
||||
transition: transform 0.5s ease-in;
|
||||
`,
|
||||
drawerNotActive: css`
|
||||
opacity: 0;
|
||||
transform: translateY(150px);
|
||||
transform: translateY(400px);
|
||||
`,
|
||||
rzHandle: css`
|
||||
background: ${bgColor};
|
||||
background: ${handleBackground};
|
||||
transition: 0.3s background ease-in-out;
|
||||
position: relative;
|
||||
width: 200px !important;
|
||||
@@ -57,7 +57,7 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
border-radius: 4px;
|
||||
|
||||
&:hover {
|
||||
background-color: ${handleHover};
|
||||
background-color: ${handleBackgroundHover};
|
||||
|
||||
&:after {
|
||||
border-color: ${handleDotsHover};
|
||||
@@ -77,25 +77,27 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
};
|
||||
});
|
||||
|
||||
interface Props {
|
||||
export interface Props {
|
||||
width: number;
|
||||
exploreId: ExploreId;
|
||||
activeDatasourceInstance: string;
|
||||
richHistory: RichHistoryQuery[];
|
||||
firstTab: Tabs;
|
||||
deleteRichHistory: typeof deleteRichHistory;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
function RichHistoryContainer(props: Props) {
|
||||
export function RichHistoryContainer(props: Props) {
|
||||
const [visible, setVisible] = useState(false);
|
||||
const [height, setHeight] = useState(400);
|
||||
|
||||
/* To create sliding animation for rich history drawer */
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => setVisible(true), 100);
|
||||
const timer = setTimeout(() => setVisible(true), 10);
|
||||
return () => clearTimeout(timer);
|
||||
}, []);
|
||||
|
||||
const { richHistory, width, firstTab, activeDatasourceInstance, exploreId, deleteRichHistory } = props;
|
||||
const { richHistory, width, firstTab, activeDatasourceInstance, exploreId, deleteRichHistory, onClose } = props;
|
||||
const theme = useTheme();
|
||||
const styles = getStyles(theme);
|
||||
const drawerWidth = `${width + 31.5}px`;
|
||||
@@ -118,6 +120,9 @@ function RichHistoryContainer(props: Props) {
|
||||
maxHeight="100vh"
|
||||
maxWidth={drawerWidth}
|
||||
minWidth={drawerWidth}
|
||||
onResize={(e, dir, ref) => {
|
||||
setHeight(Number(ref.style.height.slice(0, -2)));
|
||||
}}
|
||||
>
|
||||
<RichHistory
|
||||
richHistory={richHistory}
|
||||
@@ -125,6 +130,8 @@ function RichHistoryContainer(props: Props) {
|
||||
activeDatasourceInstance={activeDatasourceInstance}
|
||||
exploreId={exploreId}
|
||||
deleteRichHistory={deleteRichHistory}
|
||||
onClose={onClose}
|
||||
height={height}
|
||||
/>
|
||||
</Resizable>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { ExploreId } from '../../../types/explore';
|
||||
import { SortOrder } from 'app/core/utils/explore';
|
||||
import { RichHistoryQueriesTab, Props } from './RichHistoryQueriesTab';
|
||||
import { Slider } from '@grafana/ui';
|
||||
|
||||
jest.mock('../state/selectors', () => ({ getExploreDatasources: jest.fn() }));
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
const props: Props = {
|
||||
queries: [],
|
||||
sortOrder: SortOrder.Ascending,
|
||||
activeDatasourceOnly: false,
|
||||
datasourceFilters: null,
|
||||
retentionPeriod: 14,
|
||||
height: 100,
|
||||
exploreId: ExploreId.left,
|
||||
onChangeSortOrder: jest.fn(),
|
||||
onSelectDatasourceFilters: jest.fn(),
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = mount(<RichHistoryQueriesTab {...props} />);
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
describe('RichHistoryQueriesTab', () => {
|
||||
describe('slider', () => {
|
||||
it('should render slider', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find(Slider)).toHaveLength(1);
|
||||
});
|
||||
it('should render slider with correct timerange', () => {
|
||||
const wrapper = setup();
|
||||
expect(
|
||||
wrapper
|
||||
.find('.label-slider')
|
||||
.at(1)
|
||||
.text()
|
||||
).toEqual('today');
|
||||
expect(
|
||||
wrapper
|
||||
.find('.label-slider')
|
||||
.at(2)
|
||||
.text()
|
||||
).toEqual('two weeks ago');
|
||||
});
|
||||
});
|
||||
|
||||
describe('sort options', () => {
|
||||
it('should render sorter', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Sort queries' })).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('select datasource', () => {
|
||||
it('should render select datasource if activeDatasourceOnly is false', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Filter datasources' })).toHaveLength(1);
|
||||
});
|
||||
it('should not render select datasource if activeDatasourceOnly is true', () => {
|
||||
const wrapper = setup({ activeDatasourceOnly: true });
|
||||
expect(wrapper.find({ 'aria-label': 'Filter datasources' })).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -8,7 +8,6 @@ import { RichHistoryQuery, ExploreId } from 'app/types/explore';
|
||||
// Utils
|
||||
import { stylesFactory, useTheme } from '@grafana/ui';
|
||||
import { GrafanaTheme, SelectableValue } from '@grafana/data';
|
||||
import { getExploreDatasources } from '../state/selectors';
|
||||
|
||||
import { SortOrder } from 'app/core/utils/explore';
|
||||
import {
|
||||
@@ -16,6 +15,7 @@ import {
|
||||
mapNumbertoTimeInSlider,
|
||||
createRetentionPeriodBoundary,
|
||||
mapQueriesToHeadings,
|
||||
createDatasourcesList,
|
||||
} from 'app/core/utils/richHistory';
|
||||
|
||||
// Components
|
||||
@@ -23,22 +23,24 @@ import RichHistoryCard from './RichHistoryCard';
|
||||
import { sortOrderOptions } from './RichHistory';
|
||||
import { Select, Slider } from '@grafana/ui';
|
||||
|
||||
interface Props {
|
||||
export interface Props {
|
||||
queries: RichHistoryQuery[];
|
||||
sortOrder: SortOrder;
|
||||
activeDatasourceOnly: boolean;
|
||||
datasourceFilters: SelectableValue[] | null;
|
||||
retentionPeriod: number;
|
||||
exploreId: ExploreId;
|
||||
height: number;
|
||||
onChangeSortOrder: (sortOrder: SortOrder) => void;
|
||||
onSelectDatasourceFilters: (value: SelectableValue[] | null) => void;
|
||||
}
|
||||
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme, height: number) => {
|
||||
const bgColor = theme.isLight ? theme.colors.gray5 : theme.colors.dark4;
|
||||
|
||||
/* 134px is based on the width of the Query history tabs bar, so the content is aligned to right side of the tab */
|
||||
const cardWidth = '100% - 134px';
|
||||
const sliderHeight = `${height - 200}px`;
|
||||
return {
|
||||
container: css`
|
||||
display: flex;
|
||||
@@ -61,9 +63,9 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
margin-right: ${theme.spacing.sm};
|
||||
.slider {
|
||||
bottom: 10px;
|
||||
height: 200px;
|
||||
height: ${sliderHeight};
|
||||
width: 127px;
|
||||
padding: ${theme.spacing.xs} 0;
|
||||
padding: ${theme.spacing.sm} 0;
|
||||
}
|
||||
`,
|
||||
slider: css`
|
||||
@@ -127,20 +129,15 @@ export function RichHistoryQueriesTab(props: Props) {
|
||||
activeDatasourceOnly,
|
||||
retentionPeriod,
|
||||
exploreId,
|
||||
height,
|
||||
} = props;
|
||||
|
||||
const [sliderRetentionFilter, setSliderRetentionFilter] = useState<[number, number]>([0, retentionPeriod]);
|
||||
|
||||
const theme = useTheme();
|
||||
const styles = getStyles(theme);
|
||||
const listOfDsNamesWithQueries = uniqBy(queries, 'datasourceName').map(d => d.datasourceName);
|
||||
|
||||
/* Display only explore datasoources, that have saved queries */
|
||||
const datasources = getExploreDatasources()
|
||||
?.filter(ds => listOfDsNamesWithQueries.includes(ds.name))
|
||||
.map(d => {
|
||||
return { value: d.value!, label: d.value!, imgUrl: d.meta.info.logos.small };
|
||||
});
|
||||
const styles = getStyles(theme, height);
|
||||
const datasourcesRetrievedFromQueryHistory = uniqBy(queries, 'datasourceName').map(d => d.datasourceName);
|
||||
const listOfDatasources = createDatasourcesList(datasourcesRetrievedFromQueryHistory);
|
||||
|
||||
const listOfDatasourceFilters = datasourceFilters?.map(d => d.value);
|
||||
const filteredQueriesByDatasource = datasourceFilters
|
||||
@@ -187,18 +184,19 @@ export function RichHistoryQueriesTab(props: Props) {
|
||||
<div className={styles.containerContent}>
|
||||
<div className={styles.selectors}>
|
||||
{!activeDatasourceOnly && (
|
||||
<div className={styles.multiselect}>
|
||||
<div aria-label="Filter datasources" className={styles.multiselect}>
|
||||
<Select
|
||||
isMulti={true}
|
||||
options={datasources}
|
||||
options={listOfDatasources}
|
||||
value={datasourceFilters}
|
||||
placeholder="Filter queries for specific datasources(s)"
|
||||
placeholder="Filter queries for specific data sources(s)"
|
||||
onChange={onSelectDatasourceFilters}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div className={styles.sort}>
|
||||
<div aria-label="Sort queries" className={styles.sort}>
|
||||
<Select
|
||||
value={sortOrderOptions.filter(order => order.value === sortOrder)}
|
||||
options={sortOrderOptions}
|
||||
placeholder="Sort queries by"
|
||||
onChange={e => onChangeSortOrder(e.value as SortOrder)}
|
||||
@@ -211,9 +209,18 @@ export function RichHistoryQueriesTab(props: Props) {
|
||||
<div className={styles.heading}>
|
||||
{heading} <span className={styles.queries}>{mappedQueriesToHeadings[heading].length} queries</span>
|
||||
</div>
|
||||
{mappedQueriesToHeadings[heading].map((q: RichHistoryQuery) => (
|
||||
<RichHistoryCard query={q} key={q.ts} exploreId={exploreId} />
|
||||
))}
|
||||
{mappedQueriesToHeadings[heading].map((q: RichHistoryQuery) => {
|
||||
const idx = listOfDatasources.findIndex(d => d.label === q.datasourceName);
|
||||
return (
|
||||
<RichHistoryCard
|
||||
query={q}
|
||||
key={q.ts}
|
||||
exploreId={exploreId}
|
||||
dsImg={listOfDatasources[idx].imgUrl}
|
||||
isRemoved={listOfDatasources[idx].isRemoved}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { RichHistorySettings, RichHistorySettingsProps } from './RichHistorySettings';
|
||||
import { Forms } from '@grafana/ui';
|
||||
|
||||
const setup = (propOverrides?: Partial<RichHistorySettingsProps>) => {
|
||||
const props: RichHistorySettingsProps = {
|
||||
retentionPeriod: 14,
|
||||
starredTabAsFirstTab: true,
|
||||
activeDatasourceOnly: false,
|
||||
onChangeRetentionPeriod: jest.fn(),
|
||||
toggleStarredTabAsFirstTab: jest.fn(),
|
||||
toggleactiveDatasourceOnly: jest.fn(),
|
||||
deleteRichHistory: jest.fn(),
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = mount(<RichHistorySettings {...props} />);
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
describe('RichHistorySettings', () => {
|
||||
it('should render component with correct retention period', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find(Forms.Select).text()).toEqual('2 weeks');
|
||||
});
|
||||
it('should render component with correctly checked starredTabAsFirstTab settings', () => {
|
||||
const wrapper = setup();
|
||||
expect(
|
||||
wrapper
|
||||
.find(Forms.Switch)
|
||||
.at(0)
|
||||
.prop('value')
|
||||
).toBe(true);
|
||||
});
|
||||
it('should render component with correctly not checked toggleactiveDatasourceOnly settings', () => {
|
||||
const wrapper = setup();
|
||||
expect(
|
||||
wrapper
|
||||
.find(Forms.Switch)
|
||||
.at(1)
|
||||
.prop('value')
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -3,8 +3,9 @@ import { css } from 'emotion';
|
||||
import { stylesFactory, useTheme, Forms } from '@grafana/ui';
|
||||
import { GrafanaTheme, AppEvents } from '@grafana/data';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { CoreEvents } from 'app/types';
|
||||
|
||||
interface RichHistorySettingsProps {
|
||||
export interface RichHistorySettingsProps {
|
||||
retentionPeriod: number;
|
||||
starredTabAsFirstTab: boolean;
|
||||
activeDatasourceOnly: boolean;
|
||||
@@ -57,6 +58,19 @@ export function RichHistorySettings(props: RichHistorySettingsProps) {
|
||||
const styles = getStyles(theme);
|
||||
const selectedOption = retentionPeriodOptions.find(v => v.value === retentionPeriod);
|
||||
|
||||
const onDelete = () => {
|
||||
appEvents.emit(CoreEvents.showConfirmModal, {
|
||||
title: 'Delete',
|
||||
text: 'Are you sure you want to permanently delete your query history?',
|
||||
yesText: 'Delete',
|
||||
icon: 'fa-trash',
|
||||
onConfirm: () => {
|
||||
deleteRichHistory();
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Query history deleted']);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<Forms.Field
|
||||
@@ -78,10 +92,10 @@ export function RichHistorySettings(props: RichHistorySettingsProps) {
|
||||
<div className={styles.label}>Change the default active tab from “Query history” to “Starred”</div>
|
||||
</div>
|
||||
</Forms.Field>
|
||||
<Forms.Field label="Datasource behaviour" description=" " className="space-between">
|
||||
<Forms.Field label="Data source behaviour" description=" " className="space-between">
|
||||
<div className={styles.switch}>
|
||||
<Forms.Switch value={activeDatasourceOnly} onChange={toggleactiveDatasourceOnly}></Forms.Switch>
|
||||
<div className={styles.label}>Only show queries for datasource currently active in Explore</div>
|
||||
<div className={styles.label}>Only show queries for data source currently active in Explore</div>
|
||||
</div>
|
||||
</Forms.Field>
|
||||
<div
|
||||
@@ -98,13 +112,7 @@ export function RichHistorySettings(props: RichHistorySettingsProps) {
|
||||
>
|
||||
Delete all of your query history, permanently.
|
||||
</div>
|
||||
<Forms.Button
|
||||
variant="destructive"
|
||||
onClick={() => {
|
||||
deleteRichHistory();
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Query history deleted']);
|
||||
}}
|
||||
>
|
||||
<Forms.Button variant="destructive" onClick={onDelete}>
|
||||
Clear query history
|
||||
</Forms.Button>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { ExploreId } from '../../../types/explore';
|
||||
import { SortOrder } from 'app/core/utils/explore';
|
||||
import { RichHistoryStarredTab, Props } from './RichHistoryStarredTab';
|
||||
|
||||
jest.mock('../state/selectors', () => ({ getExploreDatasources: jest.fn() }));
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
const props: Props = {
|
||||
queries: [],
|
||||
sortOrder: SortOrder.Ascending,
|
||||
activeDatasourceOnly: false,
|
||||
datasourceFilters: null,
|
||||
exploreId: ExploreId.left,
|
||||
onChangeSortOrder: jest.fn(),
|
||||
onSelectDatasourceFilters: jest.fn(),
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = mount(<RichHistoryStarredTab {...props} />);
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
describe('RichHistoryStarredTab', () => {
|
||||
describe('sorter', () => {
|
||||
it('should render sorter', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Sort queries' })).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('select datasource', () => {
|
||||
it('should render select datasource if activeDatasourceOnly is false', () => {
|
||||
const wrapper = setup();
|
||||
expect(wrapper.find({ 'aria-label': 'Filter datasources' })).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should not render select datasource if activeDatasourceOnly is true', () => {
|
||||
const wrapper = setup({ activeDatasourceOnly: true });
|
||||
expect(wrapper.find({ 'aria-label': 'Filter datasources' })).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -8,17 +8,16 @@ import { RichHistoryQuery, ExploreId } from 'app/types/explore';
|
||||
// Utils
|
||||
import { stylesFactory, useTheme } from '@grafana/ui';
|
||||
import { GrafanaTheme, SelectableValue } from '@grafana/data';
|
||||
import { getExploreDatasources } from '../state/selectors';
|
||||
|
||||
import { SortOrder } from '../../../core/utils/explore';
|
||||
import { sortQueries } from '../../../core/utils/richHistory';
|
||||
import { sortQueries, createDatasourcesList } from '../../../core/utils/richHistory';
|
||||
|
||||
// Components
|
||||
import RichHistoryCard from './RichHistoryCard';
|
||||
import { sortOrderOptions } from './RichHistory';
|
||||
import { Select } from '@grafana/ui';
|
||||
|
||||
interface Props {
|
||||
export interface Props {
|
||||
queries: RichHistoryQuery[];
|
||||
sortOrder: SortOrder;
|
||||
activeDatasourceOnly: boolean;
|
||||
@@ -33,17 +32,6 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
return {
|
||||
container: css`
|
||||
display: flex;
|
||||
.label-slider {
|
||||
font-size: ${theme.typography.size.sm};
|
||||
&:last-of-type {
|
||||
margin-top: ${theme.spacing.lg};
|
||||
}
|
||||
&:first-of-type {
|
||||
margin-top: ${theme.spacing.sm};
|
||||
font-weight: ${theme.typography.weight.semibold};
|
||||
margin-bottom: ${theme.spacing.xs};
|
||||
}
|
||||
}
|
||||
`,
|
||||
containerContent: css`
|
||||
width: 100%;
|
||||
@@ -63,19 +51,18 @@ const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
sort: css`
|
||||
width: 170px;
|
||||
`,
|
||||
sessionName: css`
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
justify-content: flex-start;
|
||||
feedback: css`
|
||||
height: 60px;
|
||||
margin-top: ${theme.spacing.lg};
|
||||
h4 {
|
||||
margin: 0 10px 0 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
font-weight: ${theme.typography.weight.light};
|
||||
font-size: ${theme.typography.size.sm};
|
||||
a {
|
||||
font-weight: ${theme.typography.weight.semibold};
|
||||
margin-left: ${theme.spacing.xxs};
|
||||
}
|
||||
`,
|
||||
heading: css`
|
||||
font-size: ${theme.typography.heading.h4};
|
||||
margin: ${theme.spacing.md} ${theme.spacing.xxs} ${theme.spacing.sm} ${theme.spacing.xxs};
|
||||
`,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -92,18 +79,17 @@ export function RichHistoryStarredTab(props: Props) {
|
||||
|
||||
const theme = useTheme();
|
||||
const styles = getStyles(theme);
|
||||
const listOfDsNamesWithQueries = uniqBy(queries, 'datasourceName').map(d => d.datasourceName);
|
||||
const exploreDatasources = getExploreDatasources()
|
||||
?.filter(ds => listOfDsNamesWithQueries.includes(ds.name))
|
||||
.map(d => {
|
||||
return { value: d.value!, label: d.value!, imgUrl: d.meta.info.logos.small };
|
||||
});
|
||||
|
||||
const datasourcesRetrievedFromQueryHistory = uniqBy(queries, 'datasourceName').map(d => d.datasourceName);
|
||||
const listOfDatasources = createDatasourcesList(datasourcesRetrievedFromQueryHistory);
|
||||
|
||||
const listOfDatasourceFilters = datasourceFilters?.map(d => d.value);
|
||||
|
||||
const starredQueries = queries.filter(q => q.starred === true);
|
||||
const starredQueriesFilteredByDatasource = datasourceFilters
|
||||
? starredQueries?.filter(q => listOfDatasourceFilters?.includes(q.datasourceName))
|
||||
: starredQueries;
|
||||
|
||||
const sortedStarredQueries = sortQueries(starredQueriesFilteredByDatasource, sortOrder);
|
||||
|
||||
return (
|
||||
@@ -111,27 +97,41 @@ export function RichHistoryStarredTab(props: Props) {
|
||||
<div className={styles.containerContent}>
|
||||
<div className={styles.selectors}>
|
||||
{!activeDatasourceOnly && (
|
||||
<div className={styles.multiselect}>
|
||||
<div aria-label="Filter datasources" className={styles.multiselect}>
|
||||
<Select
|
||||
isMulti={true}
|
||||
options={exploreDatasources}
|
||||
options={listOfDatasources}
|
||||
value={datasourceFilters}
|
||||
placeholder="Filter queries for specific datasources(s)"
|
||||
placeholder="Filter queries for specific data sources(s)"
|
||||
onChange={onSelectDatasourceFilters}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div className={styles.sort}>
|
||||
<div aria-label="Sort queries" className={styles.sort}>
|
||||
<Select
|
||||
options={sortOrderOptions}
|
||||
value={sortOrderOptions.filter(order => order.value === sortOrder)}
|
||||
placeholder="Sort queries by"
|
||||
onChange={e => onChangeSortOrder(e.value as SortOrder)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{sortedStarredQueries.map(q => {
|
||||
return <RichHistoryCard query={q} key={q.ts} exploreId={exploreId} />;
|
||||
const idx = listOfDatasources.findIndex(d => d.label === q.datasourceName);
|
||||
return (
|
||||
<RichHistoryCard
|
||||
query={q}
|
||||
key={q.ts}
|
||||
exploreId={exploreId}
|
||||
dsImg={listOfDatasources[idx].imgUrl}
|
||||
isRemoved={listOfDatasources[idx].isRemoved}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
<div className={styles.feedback}>
|
||||
Query history is a beta feature. The history is local to your browser and is not shared with others.
|
||||
<a href="https://github.com/grafana/grafana/issues/new/choose">Feedback?</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -43,6 +43,7 @@ import {
|
||||
deleteAllFromRichHistory,
|
||||
updateStarredInRichHistory,
|
||||
updateCommentInRichHistory,
|
||||
deleteQueryInRichHistory,
|
||||
getQueryDisplayText,
|
||||
getRichHistory,
|
||||
} from 'app/core/utils/richHistory';
|
||||
@@ -439,20 +440,21 @@ export const runQueries = (exploreId: ExploreId): ThunkResult<void> => {
|
||||
|
||||
stopQueryState(querySubscription);
|
||||
|
||||
const datasourceId = datasourceInstance.meta.id;
|
||||
|
||||
const queryOptions: QueryOptions = {
|
||||
minInterval,
|
||||
// maxDataPoints is used in:
|
||||
// Loki - used for logs streaming for buffer size, with undefined it falls back to datasource config if it supports that.
|
||||
// Elastic - limits the number of datapoints for the counts query and for logs it has hardcoded limit.
|
||||
// Influx - used to correctly display logs in graph
|
||||
maxDataPoints: mode === ExploreMode.Logs && datasourceInstance.name === 'Loki' ? undefined : containerWidth,
|
||||
maxDataPoints: mode === ExploreMode.Logs && datasourceId === 'loki' ? undefined : containerWidth,
|
||||
liveStreaming: live,
|
||||
showingGraph,
|
||||
showingTable,
|
||||
mode,
|
||||
};
|
||||
|
||||
const datasourceId = datasourceInstance.meta.id;
|
||||
const datasourceName = exploreItemState.requestedDatasourceName;
|
||||
|
||||
const transaction = buildQueryTransaction(queries, queryOptions, range, scanning);
|
||||
@@ -524,6 +526,9 @@ export const updateRichHistory = (ts: number, property: string, updatedProperty?
|
||||
if (property === 'comment') {
|
||||
nextRichHistory = updateCommentInRichHistory(getState().explore.richHistory, ts, updatedProperty);
|
||||
}
|
||||
if (property === 'delete') {
|
||||
nextRichHistory = deleteQueryInRichHistory(getState().explore.richHistory, ts);
|
||||
}
|
||||
dispatch(richHistoryUpdatedAction({ richHistory: nextRichHistory }));
|
||||
};
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ import AzureMonitorDatasource from '../datasource';
|
||||
import FakeSchemaData from './__mocks__/schema';
|
||||
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { KustoSchema } from '../types';
|
||||
import { KustoSchema, AzureLogsVariable } from '../types';
|
||||
import { toUtc } from '@grafana/data';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
|
||||
@@ -283,53 +283,129 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
});
|
||||
|
||||
describe('When performing metricFindQuery', () => {
|
||||
const tableResponseWithOneColumn = {
|
||||
tables: [
|
||||
{
|
||||
name: 'PrimaryResult',
|
||||
columns: [
|
||||
{
|
||||
name: 'Category',
|
||||
type: 'string',
|
||||
},
|
||||
],
|
||||
rows: [['Administrative'], ['Policy']],
|
||||
},
|
||||
],
|
||||
};
|
||||
let queryResults: AzureLogsVariable[];
|
||||
|
||||
const workspaceResponse = {
|
||||
const workspacesResponse = {
|
||||
value: [
|
||||
{
|
||||
name: 'aworkspace',
|
||||
name: 'workspace1',
|
||||
properties: {
|
||||
source: 'Azure',
|
||||
customerId: 'abc1b44e-3e57-4410-b027-6cc0ae6dee67',
|
||||
customerId: 'eeee4fde-1aaa-4d60-9974-eeee562ffaa1',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'workspace2',
|
||||
properties: {
|
||||
customerId: 'eeee4fde-1aaa-4d60-9974-eeee562ffaa2',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let queryResults: any[];
|
||||
describe('and is the workspaces() macro', () => {
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('xxx');
|
||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
} else {
|
||||
return Promise.resolve({ data: tableResponseWithOneColumn, status: 200 });
|
||||
}
|
||||
queryResults = await ctx.ds.metricFindQuery('workspaces()');
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
it('should return a list of workspaces', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('workspace1');
|
||||
expect(queryResults[0].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa1');
|
||||
expect(queryResults[1].text).toBe('workspace2');
|
||||
expect(queryResults[1].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa2');
|
||||
});
|
||||
});
|
||||
|
||||
it('should return a list of categories in the correct format', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('Administrative');
|
||||
expect(queryResults[0].value).toBe('Administrative');
|
||||
expect(queryResults[1].text).toBe('Policy');
|
||||
expect(queryResults[1].value).toBe('Policy');
|
||||
describe('and is the workspaces() macro with the subscription parameter', () => {
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspaces(11112222-eeee-4949-9b2d-9106972f9123)');
|
||||
});
|
||||
|
||||
it('should return a list of workspaces', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('workspace1');
|
||||
expect(queryResults[0].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa1');
|
||||
expect(queryResults[1].text).toBe('workspace2');
|
||||
expect(queryResults[1].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('and is the workspaces() macro with the subscription parameter quoted', () => {
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspaces("11112222-eeee-4949-9b2d-9106972f9123")');
|
||||
});
|
||||
|
||||
it('should return a list of workspaces', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('workspace1');
|
||||
expect(queryResults[0].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa1');
|
||||
expect(queryResults[1].text).toBe('workspace2');
|
||||
expect(queryResults[1].value).toBe('eeee4fde-1aaa-4d60-9974-eeee562ffaa2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('and is a custom query', () => {
|
||||
const tableResponseWithOneColumn = {
|
||||
tables: [
|
||||
{
|
||||
name: 'PrimaryResult',
|
||||
columns: [
|
||||
{
|
||||
name: 'Category',
|
||||
type: 'string',
|
||||
},
|
||||
],
|
||||
rows: [['Administrative'], ['Policy']],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const workspaceResponse = {
|
||||
value: [
|
||||
{
|
||||
name: 'aworkspace',
|
||||
properties: {
|
||||
source: 'Azure',
|
||||
customerId: 'abc1b44e-3e57-4410-b027-6cc0ae6dee67',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
} else {
|
||||
return Promise.resolve({ data: tableResponseWithOneColumn, status: 200 });
|
||||
}
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
});
|
||||
|
||||
it('should return a list of categories in the correct format', () => {
|
||||
expect(queryResults.length).toBe(2);
|
||||
expect(queryResults[0].text).toBe('Administrative');
|
||||
expect(queryResults[0].value).toBe('Administrative');
|
||||
expect(queryResults[1].text).toBe('Policy');
|
||||
expect(queryResults[1].value).toBe('Policy');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import _ from 'lodash';
|
||||
import LogAnalyticsQuerystringBuilder from '../log_analytics/querystring_builder';
|
||||
import ResponseParser from './response_parser';
|
||||
import { AzureMonitorQuery, AzureDataSourceJsonData } from '../types';
|
||||
import { AzureMonitorQuery, AzureDataSourceJsonData, AzureLogsVariable } from '../types';
|
||||
import { DataQueryRequest, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
@@ -47,7 +47,7 @@ export default class AzureLogAnalyticsDatasource {
|
||||
}
|
||||
}
|
||||
|
||||
getWorkspaces(subscription: string) {
|
||||
getWorkspaces(subscription: string): Promise<AzureLogsVariable[]> {
|
||||
const subscriptionId = this.templateSrv.replace(subscription || this.subscriptionId);
|
||||
|
||||
const workspaceListUrl =
|
||||
@@ -118,6 +118,16 @@ export default class AzureLogAnalyticsDatasource {
|
||||
}
|
||||
|
||||
metricFindQuery(query: string) {
|
||||
const workspacesQuery = query.match(/^workspaces\(\)/i);
|
||||
if (workspacesQuery) {
|
||||
return this.getWorkspaces(this.subscriptionId);
|
||||
}
|
||||
|
||||
const workspacesQueryWithSub = query.match(/^workspaces\(["']?([^\)]+?)["']?\)/i);
|
||||
if (workspacesQueryWithSub) {
|
||||
return this.getWorkspaces((workspacesQueryWithSub[1] || '').trim());
|
||||
}
|
||||
|
||||
return this.getDefaultOrFirstWorkspace().then((workspace: any) => {
|
||||
const queries: any[] = this.buildQuery(query, null, workspace);
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ describe('AzureMonitorDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => Promise.resolve(response));
|
||||
datasourceRequestMock.mockImplementation(() => Promise.resolve(response));
|
||||
});
|
||||
|
||||
it('should return a list of subscriptions', () => {
|
||||
|
||||
@@ -5,6 +5,7 @@ import _ from 'lodash';
|
||||
import GraphiteQuery from './graphite_query';
|
||||
import { QueryCtrl } from 'app/plugins/sdk';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { promiseToDigest } from 'app/core/utils/promiseToDigest';
|
||||
import { auto } from 'angular';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { AppEvents } from '@grafana/data';
|
||||
@@ -59,7 +60,8 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
||||
});
|
||||
|
||||
const checkOtherSegmentsIndex = this.queryModel.checkOtherSegmentsIndex || 0;
|
||||
this.checkOtherSegments(checkOtherSegmentsIndex, modifyLastSegment);
|
||||
|
||||
promiseToDigest(this.$scope)(this.checkOtherSegments(checkOtherSegmentsIndex, modifyLastSegment));
|
||||
|
||||
if (this.queryModel.seriesByTagUsed) {
|
||||
this.fixTagSegments();
|
||||
@@ -207,20 +209,24 @@ export class GraphiteQueryCtrl extends QueryCtrl {
|
||||
const tag = removeTagPrefix(segment.value);
|
||||
this.pause();
|
||||
this.addSeriesByTagFunc(tag);
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (segment.expandable) {
|
||||
return this.checkOtherSegments(segmentIndex + 1).then(() => {
|
||||
this.setSegmentFocus(segmentIndex + 1);
|
||||
this.targetChanged();
|
||||
});
|
||||
return promiseToDigest(this.$scope)(
|
||||
this.checkOtherSegments(segmentIndex + 1).then(() => {
|
||||
this.setSegmentFocus(segmentIndex + 1);
|
||||
this.targetChanged();
|
||||
})
|
||||
);
|
||||
} else {
|
||||
this.spliceSegments(segmentIndex + 1);
|
||||
}
|
||||
|
||||
this.setSegmentFocus(segmentIndex + 1);
|
||||
this.targetChanged();
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
spliceSegments(index: any) {
|
||||
|
||||
@@ -3,6 +3,12 @@ import gfunc from '../gfunc';
|
||||
import { GraphiteQueryCtrl } from '../query_ctrl';
|
||||
import { TemplateSrvStub } from 'test/specs/helpers';
|
||||
|
||||
jest.mock('app/core/utils/promiseToDigest', () => ({
|
||||
promiseToDigest: (scope: any) => {
|
||||
return (p: Promise<any>) => p;
|
||||
},
|
||||
}));
|
||||
|
||||
describe('GraphiteQueryCtrl', () => {
|
||||
const ctx = {
|
||||
datasource: {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import groupBy from 'lodash/groupBy';
|
||||
import { from, of, Observable, merge } from 'rxjs';
|
||||
import { tap } from 'rxjs/operators';
|
||||
import { from, of, Observable, forkJoin } from 'rxjs';
|
||||
import { map, mergeMap, mergeAll } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
LoadingState,
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
DataSourceInstanceSettings,
|
||||
} from '@grafana/data';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
import { mergeMap, map } from 'rxjs/operators';
|
||||
|
||||
export const MIXED_DATASOURCE_NAME = '-- Mixed --';
|
||||
|
||||
@@ -51,64 +50,46 @@ export class MixedDatasource extends DataSourceApi<DataQuery> {
|
||||
}
|
||||
|
||||
batchQueries(mixed: BatchedQueries[], request: DataQueryRequest<DataQuery>): Observable<DataQueryResponse> {
|
||||
const observables: Array<Observable<DataQueryResponse>> = [];
|
||||
let runningSubRequests = 0;
|
||||
const runningQueries = mixed.filter(this.isQueryable).map((query, i) =>
|
||||
from(query.datasource).pipe(
|
||||
mergeMap((api: DataSourceApi) => {
|
||||
const dsRequest = cloneDeep(request);
|
||||
dsRequest.requestId = `mixed-${i}-${dsRequest.requestId || ''}`;
|
||||
dsRequest.targets = query.targets;
|
||||
|
||||
for (let i = 0; i < mixed.length; i++) {
|
||||
const query = mixed[i];
|
||||
if (!query.targets || !query.targets.length) {
|
||||
continue;
|
||||
}
|
||||
const observable = from(query.datasource).pipe(
|
||||
mergeMap((dataSourceApi: DataSourceApi) => {
|
||||
const datasourceRequest = cloneDeep(request);
|
||||
|
||||
datasourceRequest.requestId = `mixed-${i}-${datasourceRequest.requestId || ''}`;
|
||||
datasourceRequest.targets = query.targets;
|
||||
|
||||
runningSubRequests++;
|
||||
let hasCountedAsDone = false;
|
||||
|
||||
return from(dataSourceApi.query(datasourceRequest)).pipe(
|
||||
tap(
|
||||
(response: DataQueryResponse) => {
|
||||
if (
|
||||
hasCountedAsDone ||
|
||||
response.state === LoadingState.Streaming ||
|
||||
response.state === LoadingState.Loading
|
||||
) {
|
||||
return;
|
||||
}
|
||||
runningSubRequests--;
|
||||
hasCountedAsDone = true;
|
||||
},
|
||||
() => {
|
||||
if (hasCountedAsDone) {
|
||||
return;
|
||||
}
|
||||
hasCountedAsDone = true;
|
||||
runningSubRequests--;
|
||||
}
|
||||
),
|
||||
map((response: DataQueryResponse) => {
|
||||
return from(api.query(dsRequest)).pipe(
|
||||
map(response => {
|
||||
return {
|
||||
...response,
|
||||
data: response.data || [],
|
||||
state: runningSubRequests === 0 ? LoadingState.Done : LoadingState.Loading,
|
||||
state: LoadingState.Loading,
|
||||
key: `mixed-${i}-${response.key || ''}`,
|
||||
} as DataQueryResponse;
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
)
|
||||
);
|
||||
|
||||
observables.push(observable);
|
||||
}
|
||||
|
||||
return merge(...observables);
|
||||
return forkJoin(runningQueries).pipe(map(this.markAsDone), mergeAll());
|
||||
}
|
||||
|
||||
testDatasource() {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
|
||||
private isQueryable(query: BatchedQueries): boolean {
|
||||
return query && Array.isArray(query.targets) && query.targets.length > 0;
|
||||
}
|
||||
|
||||
private markAsDone(responses: DataQueryResponse[]): DataQueryResponse[] {
|
||||
const { length } = responses;
|
||||
|
||||
if (length === 0) {
|
||||
return responses;
|
||||
}
|
||||
|
||||
responses[length - 1].state = LoadingState.Done;
|
||||
return responses;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import DatasourceSrv from 'app/features/plugins/datasource_srv';
|
||||
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
|
||||
import StackdriverDatasource from './datasource';
|
||||
import { AuthType, authTypes } from './types';
|
||||
|
||||
export interface JWT {
|
||||
@@ -21,10 +19,9 @@ export class StackdriverConfigCtrl {
|
||||
authenticationTypes: Array<{ key: AuthType; value: string }>;
|
||||
defaultAuthenticationType: string;
|
||||
name: string;
|
||||
gceError: string;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(datasourceSrv: DatasourceSrv, private $scope: any) {
|
||||
constructor(datasourceSrv: DatasourceSrv) {
|
||||
this.defaultAuthenticationType = AuthType.JWT;
|
||||
this.datasourceSrv = datasourceSrv;
|
||||
this.name = this.meta.name;
|
||||
@@ -98,19 +95,4 @@ export class StackdriverConfigCtrl {
|
||||
this.current.secureJsonData = {};
|
||||
this.current.secureJsonFields = {};
|
||||
}
|
||||
|
||||
async loadGCEDefaultAccount() {
|
||||
this.gceError = '';
|
||||
const ds = (await getDatasourceSrv().loadDatasource(this.name)) as StackdriverDatasource;
|
||||
try {
|
||||
const defaultProject = await ds.getGCEDefaultProject();
|
||||
this.$scope.$apply(() => {
|
||||
this.current.jsonData.gceDefaultProject = defaultProject;
|
||||
});
|
||||
} catch (error) {
|
||||
this.$scope.$apply(() => {
|
||||
this.gceError = error;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ export default class StackdriverDatasource extends DataSourceApi<StackdriverQuer
|
||||
authenticationType: string;
|
||||
queryPromise: Promise<any>;
|
||||
metricTypesCache: { [key: string]: MetricDescriptor[] };
|
||||
gceDefaultProject: string;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(
|
||||
@@ -229,6 +230,7 @@ export default class StackdriverDatasource extends DataSourceApi<StackdriverQuer
|
||||
let status, message;
|
||||
const defaultErrorMessage = 'Cannot connect to Stackdriver API';
|
||||
try {
|
||||
await this.ensureGCEDefaultProject();
|
||||
const path = `v3/projects/${this.getDefaultProject()}/metricDescriptors`;
|
||||
const response = await this.doRequest(`${this.baseUrl}${path}`);
|
||||
if (response.status === 200) {
|
||||
|
||||
@@ -64,19 +64,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline" ng-if="ctrl.current.jsonData.authenticationType !== ctrl.defaultAuthenticationType">
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-10">Project</span>
|
||||
<input class="gf-form-input width-40" disabled type="text" ng-model="ctrl.current.jsonData.gceDefaultProject" />
|
||||
</div>
|
||||
|
||||
<div class="gf-form width-18">
|
||||
<a class="btn btn-secondary gf-form-btn" style="margin-top: 2px;" href="#" ng-click="ctrl.loadGCEDefaultAccount()"
|
||||
>{{ctrl.current.jsonData.gceDefaultProject ? 'Reload project name' : 'Load project name'}}
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
ng-if="ctrl.current.jsonData.authenticationType === ctrl.defaultAuthenticationType && !ctrl.current.jsonData.clientEmail && !ctrl.inputDataValid"
|
||||
>
|
||||
@@ -146,20 +133,10 @@
|
||||
<i class="fa fa-save"></i> Do not forget to save your changes after uploading a file.
|
||||
</p>
|
||||
|
||||
<p
|
||||
class="gf-form-label"
|
||||
ng-show="ctrl.current.jsonData.authenticationType !== ctrl.defaultAuthenticationType && !ctrl.current.jsonData.gceDefaultProject"
|
||||
>
|
||||
<i class="fa fa-warning"></i> Before saving, load the Project name that is associated with the default service account
|
||||
</p>
|
||||
|
||||
<div class="gf-form" ng-if="ctrl.gceError">
|
||||
<pre class="gf-form-pre alert alert-error">{{ctrl.gceError}}</pre>
|
||||
</div>
|
||||
|
||||
<p
|
||||
class="gf-form-label"
|
||||
ng-show="ctrl.current.jsonData.authenticationType !== ctrl.defaultAuthenticationType && ctrl.current.jsonData.gceDefaultProject"
|
||||
>
|
||||
<p class="gf-form-label" ng-show="ctrl.current.jsonData.authenticationType !== ctrl.defaultAuthenticationType">
|
||||
<i class="fa fa-save"></i> Verify GCE default service account by clicking Save & Test
|
||||
</p>
|
||||
|
||||
@@ -102,7 +102,7 @@ class GraphElement {
|
||||
|
||||
this.annotations = this.ctrl.annotations || [];
|
||||
this.buildFlotPairs(this.data);
|
||||
const graphHeight = this.elem.height();
|
||||
const graphHeight = this.ctrl.height;
|
||||
updateLegendValues(this.data, this.panel, graphHeight);
|
||||
|
||||
if (!this.panel.legend.show) {
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
<div ng-if="style.type === 'number'">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label width-10">Unit</label>
|
||||
<div class="gf-form-dropdown-typeahead width-16" ng-model="style.unit" dropdown-typeahead2="editor.unitFormats" dropdown-typeahead-on-select="editor.setUnitFormat(style, $subItem)"></div>
|
||||
<unit-picker onChange="editor.setUnitFormat(style)" value="style.unit" width="16"></unit-picker>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label width-10">Decimals</label>
|
||||
|
||||
@@ -73,9 +73,11 @@ export class ColumnOptionsCtrl {
|
||||
this.panelCtrl.render();
|
||||
}
|
||||
|
||||
setUnitFormat(column: any, subItem: any) {
|
||||
column.unit = subItem.value;
|
||||
this.panelCtrl.render();
|
||||
setUnitFormat(column: any) {
|
||||
return (value: any) => {
|
||||
column.unit = value;
|
||||
this.panelCtrl.render();
|
||||
};
|
||||
}
|
||||
|
||||
addColumnStyle() {
|
||||
|
||||
@@ -8,8 +8,8 @@ EXTRA_OPTS="$@"
|
||||
|
||||
# Right now we hack this in into the publish script.
|
||||
# Eventually we might want to keep a list of all previous releases somewhere.
|
||||
_releaseNoteUrl="https://community.grafana.com/t/release-notes-v6-0-x/14010"
|
||||
_whatsNewUrl="http://docs.grafana.org/guides/whats-new-in-v6-0/"
|
||||
_releaseNoteUrl="https://community.grafana.com/t/release-notes-v6-7-x/27119/2"
|
||||
_whatsNewUrl="https://grafana.com/docs/grafana/latest/guides/whats-new-in-v6-7/"
|
||||
|
||||
./scripts/build/release_publisher/release_publisher \
|
||||
--wn "${_whatsNewUrl}" \
|
||||
|
||||
89
vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go
generated
vendored
Normal file
89
vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// Package cmpopts provides common options for the cmp package.
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
func equateAlways(_, _ interface{}) bool { return true }
|
||||
|
||||
// EquateEmpty returns a Comparer option that determines all maps and slices
|
||||
// with a length of zero to be equal, regardless of whether they are nil.
|
||||
//
|
||||
// EquateEmpty can be used in conjunction with SortSlices and SortMaps.
|
||||
func EquateEmpty() cmp.Option {
|
||||
return cmp.FilterValues(isEmpty, cmp.Comparer(equateAlways))
|
||||
}
|
||||
|
||||
func isEmpty(x, y interface{}) bool {
|
||||
vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
|
||||
return (x != nil && y != nil && vx.Type() == vy.Type()) &&
|
||||
(vx.Kind() == reflect.Slice || vx.Kind() == reflect.Map) &&
|
||||
(vx.Len() == 0 && vy.Len() == 0)
|
||||
}
|
||||
|
||||
// EquateApprox returns a Comparer option that determines float32 or float64
|
||||
// values to be equal if they are within a relative fraction or absolute margin.
|
||||
// This option is not used when either x or y is NaN or infinite.
|
||||
//
|
||||
// The fraction determines that the difference of two values must be within the
|
||||
// smaller fraction of the two values, while the margin determines that the two
|
||||
// values must be within some absolute margin.
|
||||
// To express only a fraction or only a margin, use 0 for the other parameter.
|
||||
// The fraction and margin must be non-negative.
|
||||
//
|
||||
// The mathematical expression used is equivalent to:
|
||||
// |x-y| ≤ max(fraction*min(|x|, |y|), margin)
|
||||
//
|
||||
// EquateApprox can be used in conjunction with EquateNaNs.
|
||||
func EquateApprox(fraction, margin float64) cmp.Option {
|
||||
if margin < 0 || fraction < 0 || math.IsNaN(margin) || math.IsNaN(fraction) {
|
||||
panic("margin or fraction must be a non-negative number")
|
||||
}
|
||||
a := approximator{fraction, margin}
|
||||
return cmp.Options{
|
||||
cmp.FilterValues(areRealF64s, cmp.Comparer(a.compareF64)),
|
||||
cmp.FilterValues(areRealF32s, cmp.Comparer(a.compareF32)),
|
||||
}
|
||||
}
|
||||
|
||||
type approximator struct{ frac, marg float64 }
|
||||
|
||||
func areRealF64s(x, y float64) bool {
|
||||
return !math.IsNaN(x) && !math.IsNaN(y) && !math.IsInf(x, 0) && !math.IsInf(y, 0)
|
||||
}
|
||||
func areRealF32s(x, y float32) bool {
|
||||
return areRealF64s(float64(x), float64(y))
|
||||
}
|
||||
func (a approximator) compareF64(x, y float64) bool {
|
||||
relMarg := a.frac * math.Min(math.Abs(x), math.Abs(y))
|
||||
return math.Abs(x-y) <= math.Max(a.marg, relMarg)
|
||||
}
|
||||
func (a approximator) compareF32(x, y float32) bool {
|
||||
return a.compareF64(float64(x), float64(y))
|
||||
}
|
||||
|
||||
// EquateNaNs returns a Comparer option that determines float32 and float64
|
||||
// NaN values to be equal.
|
||||
//
|
||||
// EquateNaNs can be used in conjunction with EquateApprox.
|
||||
func EquateNaNs() cmp.Option {
|
||||
return cmp.Options{
|
||||
cmp.FilterValues(areNaNsF64s, cmp.Comparer(equateAlways)),
|
||||
cmp.FilterValues(areNaNsF32s, cmp.Comparer(equateAlways)),
|
||||
}
|
||||
}
|
||||
|
||||
func areNaNsF64s(x, y float64) bool {
|
||||
return math.IsNaN(x) && math.IsNaN(y)
|
||||
}
|
||||
func areNaNsF32s(x, y float32) bool {
|
||||
return areNaNsF64s(float64(x), float64(y))
|
||||
}
|
||||
207
vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go
generated
vendored
Normal file
207
vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go
generated
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/internal/function"
|
||||
)
|
||||
|
||||
// IgnoreFields returns an Option that ignores exported fields of the
|
||||
// given names on a single struct type.
|
||||
// The struct type is specified by passing in a value of that type.
|
||||
//
|
||||
// The name may be a dot-delimited string (e.g., "Foo.Bar") to ignore a
|
||||
// specific sub-field that is embedded or nested within the parent struct.
|
||||
//
|
||||
// This does not handle unexported fields; use IgnoreUnexported instead.
|
||||
func IgnoreFields(typ interface{}, names ...string) cmp.Option {
|
||||
sf := newStructFilter(typ, names...)
|
||||
return cmp.FilterPath(sf.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
// IgnoreTypes returns an Option that ignores all values assignable to
|
||||
// certain types, which are specified by passing in a value of each type.
|
||||
func IgnoreTypes(typs ...interface{}) cmp.Option {
|
||||
tf := newTypeFilter(typs...)
|
||||
return cmp.FilterPath(tf.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
type typeFilter []reflect.Type
|
||||
|
||||
func newTypeFilter(typs ...interface{}) (tf typeFilter) {
|
||||
for _, typ := range typs {
|
||||
t := reflect.TypeOf(typ)
|
||||
if t == nil {
|
||||
// This occurs if someone tries to pass in sync.Locker(nil)
|
||||
panic("cannot determine type; consider using IgnoreInterfaces")
|
||||
}
|
||||
tf = append(tf, t)
|
||||
}
|
||||
return tf
|
||||
}
|
||||
func (tf typeFilter) filter(p cmp.Path) bool {
|
||||
if len(p) < 1 {
|
||||
return false
|
||||
}
|
||||
t := p.Last().Type()
|
||||
for _, ti := range tf {
|
||||
if t.AssignableTo(ti) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IgnoreInterfaces returns an Option that ignores all values or references of
|
||||
// values assignable to certain interface types. These interfaces are specified
|
||||
// by passing in an anonymous struct with the interface types embedded in it.
|
||||
// For example, to ignore sync.Locker, pass in struct{sync.Locker}{}.
|
||||
func IgnoreInterfaces(ifaces interface{}) cmp.Option {
|
||||
tf := newIfaceFilter(ifaces)
|
||||
return cmp.FilterPath(tf.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
type ifaceFilter []reflect.Type
|
||||
|
||||
func newIfaceFilter(ifaces interface{}) (tf ifaceFilter) {
|
||||
t := reflect.TypeOf(ifaces)
|
||||
if ifaces == nil || t.Name() != "" || t.Kind() != reflect.Struct {
|
||||
panic("input must be an anonymous struct")
|
||||
}
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
fi := t.Field(i)
|
||||
switch {
|
||||
case !fi.Anonymous:
|
||||
panic("struct cannot have named fields")
|
||||
case fi.Type.Kind() != reflect.Interface:
|
||||
panic("embedded field must be an interface type")
|
||||
case fi.Type.NumMethod() == 0:
|
||||
// This matches everything; why would you ever want this?
|
||||
panic("cannot ignore empty interface")
|
||||
default:
|
||||
tf = append(tf, fi.Type)
|
||||
}
|
||||
}
|
||||
return tf
|
||||
}
|
||||
func (tf ifaceFilter) filter(p cmp.Path) bool {
|
||||
if len(p) < 1 {
|
||||
return false
|
||||
}
|
||||
t := p.Last().Type()
|
||||
for _, ti := range tf {
|
||||
if t.AssignableTo(ti) {
|
||||
return true
|
||||
}
|
||||
if t.Kind() != reflect.Ptr && reflect.PtrTo(t).AssignableTo(ti) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IgnoreUnexported returns an Option that only ignores the immediate unexported
|
||||
// fields of a struct, including anonymous fields of unexported types.
|
||||
// In particular, unexported fields within the struct's exported fields
|
||||
// of struct types, including anonymous fields, will not be ignored unless the
|
||||
// type of the field itself is also passed to IgnoreUnexported.
|
||||
//
|
||||
// Avoid ignoring unexported fields of a type which you do not control (i.e. a
|
||||
// type from another repository), as changes to the implementation of such types
|
||||
// may change how the comparison behaves. Prefer a custom Comparer instead.
|
||||
func IgnoreUnexported(typs ...interface{}) cmp.Option {
|
||||
ux := newUnexportedFilter(typs...)
|
||||
return cmp.FilterPath(ux.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
type unexportedFilter struct{ m map[reflect.Type]bool }
|
||||
|
||||
func newUnexportedFilter(typs ...interface{}) unexportedFilter {
|
||||
ux := unexportedFilter{m: make(map[reflect.Type]bool)}
|
||||
for _, typ := range typs {
|
||||
t := reflect.TypeOf(typ)
|
||||
if t == nil || t.Kind() != reflect.Struct {
|
||||
panic(fmt.Sprintf("invalid struct type: %T", typ))
|
||||
}
|
||||
ux.m[t] = true
|
||||
}
|
||||
return ux
|
||||
}
|
||||
func (xf unexportedFilter) filter(p cmp.Path) bool {
|
||||
sf, ok := p.Index(-1).(cmp.StructField)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return xf.m[p.Index(-2).Type()] && !isExported(sf.Name())
|
||||
}
|
||||
|
||||
// isExported reports whether the identifier is exported.
|
||||
func isExported(id string) bool {
|
||||
r, _ := utf8.DecodeRuneInString(id)
|
||||
return unicode.IsUpper(r)
|
||||
}
|
||||
|
||||
// IgnoreSliceElements returns an Option that ignores elements of []V.
|
||||
// The discard function must be of the form "func(T) bool" which is used to
|
||||
// ignore slice elements of type V, where V is assignable to T.
|
||||
// Elements are ignored if the function reports true.
|
||||
func IgnoreSliceElements(discardFunc interface{}) cmp.Option {
|
||||
vf := reflect.ValueOf(discardFunc)
|
||||
if !function.IsType(vf.Type(), function.ValuePredicate) || vf.IsNil() {
|
||||
panic(fmt.Sprintf("invalid discard function: %T", discardFunc))
|
||||
}
|
||||
return cmp.FilterPath(func(p cmp.Path) bool {
|
||||
si, ok := p.Index(-1).(cmp.SliceIndex)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if !si.Type().AssignableTo(vf.Type().In(0)) {
|
||||
return false
|
||||
}
|
||||
vx, vy := si.Values()
|
||||
if vx.IsValid() && vf.Call([]reflect.Value{vx})[0].Bool() {
|
||||
return true
|
||||
}
|
||||
if vy.IsValid() && vf.Call([]reflect.Value{vy})[0].Bool() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}, cmp.Ignore())
|
||||
}
|
||||
|
||||
// IgnoreMapEntries returns an Option that ignores entries of map[K]V.
|
||||
// The discard function must be of the form "func(T, R) bool" which is used to
|
||||
// ignore map entries of type K and V, where K and V are assignable to T and R.
|
||||
// Entries are ignored if the function reports true.
|
||||
func IgnoreMapEntries(discardFunc interface{}) cmp.Option {
|
||||
vf := reflect.ValueOf(discardFunc)
|
||||
if !function.IsType(vf.Type(), function.KeyValuePredicate) || vf.IsNil() {
|
||||
panic(fmt.Sprintf("invalid discard function: %T", discardFunc))
|
||||
}
|
||||
return cmp.FilterPath(func(p cmp.Path) bool {
|
||||
mi, ok := p.Index(-1).(cmp.MapIndex)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if !mi.Key().Type().AssignableTo(vf.Type().In(0)) || !mi.Type().AssignableTo(vf.Type().In(1)) {
|
||||
return false
|
||||
}
|
||||
k := mi.Key()
|
||||
vx, vy := mi.Values()
|
||||
if vx.IsValid() && vf.Call([]reflect.Value{k, vx})[0].Bool() {
|
||||
return true
|
||||
}
|
||||
if vy.IsValid() && vf.Call([]reflect.Value{k, vy})[0].Bool() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}, cmp.Ignore())
|
||||
}
|
||||
147
vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go
generated
vendored
Normal file
147
vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go
generated
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/internal/function"
|
||||
)
|
||||
|
||||
// SortSlices returns a Transformer option that sorts all []V.
|
||||
// The less function must be of the form "func(T, T) bool" which is used to
|
||||
// sort any slice with element type V that is assignable to T.
|
||||
//
|
||||
// The less function must be:
|
||||
// • Deterministic: less(x, y) == less(x, y)
|
||||
// • Irreflexive: !less(x, x)
|
||||
// • Transitive: if !less(x, y) and !less(y, z), then !less(x, z)
|
||||
//
|
||||
// The less function does not have to be "total". That is, if !less(x, y) and
|
||||
// !less(y, x) for two elements x and y, their relative order is maintained.
|
||||
//
|
||||
// SortSlices can be used in conjunction with EquateEmpty.
|
||||
func SortSlices(lessFunc interface{}) cmp.Option {
|
||||
vf := reflect.ValueOf(lessFunc)
|
||||
if !function.IsType(vf.Type(), function.Less) || vf.IsNil() {
|
||||
panic(fmt.Sprintf("invalid less function: %T", lessFunc))
|
||||
}
|
||||
ss := sliceSorter{vf.Type().In(0), vf}
|
||||
return cmp.FilterValues(ss.filter, cmp.Transformer("cmpopts.SortSlices", ss.sort))
|
||||
}
|
||||
|
||||
type sliceSorter struct {
|
||||
in reflect.Type // T
|
||||
fnc reflect.Value // func(T, T) bool
|
||||
}
|
||||
|
||||
func (ss sliceSorter) filter(x, y interface{}) bool {
|
||||
vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
|
||||
if !(x != nil && y != nil && vx.Type() == vy.Type()) ||
|
||||
!(vx.Kind() == reflect.Slice && vx.Type().Elem().AssignableTo(ss.in)) ||
|
||||
(vx.Len() <= 1 && vy.Len() <= 1) {
|
||||
return false
|
||||
}
|
||||
// Check whether the slices are already sorted to avoid an infinite
|
||||
// recursion cycle applying the same transform to itself.
|
||||
ok1 := sort.SliceIsSorted(x, func(i, j int) bool { return ss.less(vx, i, j) })
|
||||
ok2 := sort.SliceIsSorted(y, func(i, j int) bool { return ss.less(vy, i, j) })
|
||||
return !ok1 || !ok2
|
||||
}
|
||||
func (ss sliceSorter) sort(x interface{}) interface{} {
|
||||
src := reflect.ValueOf(x)
|
||||
dst := reflect.MakeSlice(src.Type(), src.Len(), src.Len())
|
||||
for i := 0; i < src.Len(); i++ {
|
||||
dst.Index(i).Set(src.Index(i))
|
||||
}
|
||||
sort.SliceStable(dst.Interface(), func(i, j int) bool { return ss.less(dst, i, j) })
|
||||
ss.checkSort(dst)
|
||||
return dst.Interface()
|
||||
}
|
||||
func (ss sliceSorter) checkSort(v reflect.Value) {
|
||||
start := -1 // Start of a sequence of equal elements.
|
||||
for i := 1; i < v.Len(); i++ {
|
||||
if ss.less(v, i-1, i) {
|
||||
// Check that first and last elements in v[start:i] are equal.
|
||||
if start >= 0 && (ss.less(v, start, i-1) || ss.less(v, i-1, start)) {
|
||||
panic(fmt.Sprintf("incomparable values detected: want equal elements: %v", v.Slice(start, i)))
|
||||
}
|
||||
start = -1
|
||||
} else if start == -1 {
|
||||
start = i
|
||||
}
|
||||
}
|
||||
}
|
||||
func (ss sliceSorter) less(v reflect.Value, i, j int) bool {
|
||||
vx, vy := v.Index(i), v.Index(j)
|
||||
return ss.fnc.Call([]reflect.Value{vx, vy})[0].Bool()
|
||||
}
|
||||
|
||||
// SortMaps returns a Transformer option that flattens map[K]V types to be a
|
||||
// sorted []struct{K, V}. The less function must be of the form
|
||||
// "func(T, T) bool" which is used to sort any map with key K that is
|
||||
// assignable to T.
|
||||
//
|
||||
// Flattening the map into a slice has the property that cmp.Equal is able to
|
||||
// use Comparers on K or the K.Equal method if it exists.
|
||||
//
|
||||
// The less function must be:
|
||||
// • Deterministic: less(x, y) == less(x, y)
|
||||
// • Irreflexive: !less(x, x)
|
||||
// • Transitive: if !less(x, y) and !less(y, z), then !less(x, z)
|
||||
// • Total: if x != y, then either less(x, y) or less(y, x)
|
||||
//
|
||||
// SortMaps can be used in conjunction with EquateEmpty.
|
||||
func SortMaps(lessFunc interface{}) cmp.Option {
|
||||
vf := reflect.ValueOf(lessFunc)
|
||||
if !function.IsType(vf.Type(), function.Less) || vf.IsNil() {
|
||||
panic(fmt.Sprintf("invalid less function: %T", lessFunc))
|
||||
}
|
||||
ms := mapSorter{vf.Type().In(0), vf}
|
||||
return cmp.FilterValues(ms.filter, cmp.Transformer("cmpopts.SortMaps", ms.sort))
|
||||
}
|
||||
|
||||
type mapSorter struct {
|
||||
in reflect.Type // T
|
||||
fnc reflect.Value // func(T, T) bool
|
||||
}
|
||||
|
||||
func (ms mapSorter) filter(x, y interface{}) bool {
|
||||
vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
|
||||
return (x != nil && y != nil && vx.Type() == vy.Type()) &&
|
||||
(vx.Kind() == reflect.Map && vx.Type().Key().AssignableTo(ms.in)) &&
|
||||
(vx.Len() != 0 || vy.Len() != 0)
|
||||
}
|
||||
func (ms mapSorter) sort(x interface{}) interface{} {
|
||||
src := reflect.ValueOf(x)
|
||||
outType := reflect.StructOf([]reflect.StructField{
|
||||
{Name: "K", Type: src.Type().Key()},
|
||||
{Name: "V", Type: src.Type().Elem()},
|
||||
})
|
||||
dst := reflect.MakeSlice(reflect.SliceOf(outType), src.Len(), src.Len())
|
||||
for i, k := range src.MapKeys() {
|
||||
v := reflect.New(outType).Elem()
|
||||
v.Field(0).Set(k)
|
||||
v.Field(1).Set(src.MapIndex(k))
|
||||
dst.Index(i).Set(v)
|
||||
}
|
||||
sort.Slice(dst.Interface(), func(i, j int) bool { return ms.less(dst, i, j) })
|
||||
ms.checkSort(dst)
|
||||
return dst.Interface()
|
||||
}
|
||||
func (ms mapSorter) checkSort(v reflect.Value) {
|
||||
for i := 1; i < v.Len(); i++ {
|
||||
if !ms.less(v, i-1, i) {
|
||||
panic(fmt.Sprintf("partial order detected: want %v < %v", v.Index(i-1), v.Index(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
func (ms mapSorter) less(v reflect.Value, i, j int) bool {
|
||||
vx, vy := v.Index(i).Field(0), v.Index(j).Field(0)
|
||||
return ms.fnc.Call([]reflect.Value{vx, vy})[0].Bool()
|
||||
}
|
||||
182
vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go
generated
vendored
Normal file
182
vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go
generated
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
// filterField returns a new Option where opt is only evaluated on paths that
|
||||
// include a specific exported field on a single struct type.
|
||||
// The struct type is specified by passing in a value of that type.
|
||||
//
|
||||
// The name may be a dot-delimited string (e.g., "Foo.Bar") to select a
|
||||
// specific sub-field that is embedded or nested within the parent struct.
|
||||
func filterField(typ interface{}, name string, opt cmp.Option) cmp.Option {
|
||||
// TODO: This is currently unexported over concerns of how helper filters
|
||||
// can be composed together easily.
|
||||
// TODO: Add tests for FilterField.
|
||||
|
||||
sf := newStructFilter(typ, name)
|
||||
return cmp.FilterPath(sf.filter, opt)
|
||||
}
|
||||
|
||||
type structFilter struct {
|
||||
t reflect.Type // The root struct type to match on
|
||||
ft fieldTree // Tree of fields to match on
|
||||
}
|
||||
|
||||
func newStructFilter(typ interface{}, names ...string) structFilter {
|
||||
// TODO: Perhaps allow * as a special identifier to allow ignoring any
|
||||
// number of path steps until the next field match?
|
||||
// This could be useful when a concrete struct gets transformed into
|
||||
// an anonymous struct where it is not possible to specify that by type,
|
||||
// but the transformer happens to provide guarantees about the names of
|
||||
// the transformed fields.
|
||||
|
||||
t := reflect.TypeOf(typ)
|
||||
if t == nil || t.Kind() != reflect.Struct {
|
||||
panic(fmt.Sprintf("%T must be a struct", typ))
|
||||
}
|
||||
var ft fieldTree
|
||||
for _, name := range names {
|
||||
cname, err := canonicalName(t, name)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("%s: %v", strings.Join(cname, "."), err))
|
||||
}
|
||||
ft.insert(cname)
|
||||
}
|
||||
return structFilter{t, ft}
|
||||
}
|
||||
|
||||
func (sf structFilter) filter(p cmp.Path) bool {
|
||||
for i, ps := range p {
|
||||
if ps.Type().AssignableTo(sf.t) && sf.ft.matchPrefix(p[i+1:]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// fieldTree represents a set of dot-separated identifiers.
|
||||
//
|
||||
// For example, inserting the following selectors:
|
||||
// Foo
|
||||
// Foo.Bar.Baz
|
||||
// Foo.Buzz
|
||||
// Nuka.Cola.Quantum
|
||||
//
|
||||
// Results in a tree of the form:
|
||||
// {sub: {
|
||||
// "Foo": {ok: true, sub: {
|
||||
// "Bar": {sub: {
|
||||
// "Baz": {ok: true},
|
||||
// }},
|
||||
// "Buzz": {ok: true},
|
||||
// }},
|
||||
// "Nuka": {sub: {
|
||||
// "Cola": {sub: {
|
||||
// "Quantum": {ok: true},
|
||||
// }},
|
||||
// }},
|
||||
// }}
|
||||
type fieldTree struct {
|
||||
ok bool // Whether this is a specified node
|
||||
sub map[string]fieldTree // The sub-tree of fields under this node
|
||||
}
|
||||
|
||||
// insert inserts a sequence of field accesses into the tree.
|
||||
func (ft *fieldTree) insert(cname []string) {
|
||||
if ft.sub == nil {
|
||||
ft.sub = make(map[string]fieldTree)
|
||||
}
|
||||
if len(cname) == 0 {
|
||||
ft.ok = true
|
||||
return
|
||||
}
|
||||
sub := ft.sub[cname[0]]
|
||||
sub.insert(cname[1:])
|
||||
ft.sub[cname[0]] = sub
|
||||
}
|
||||
|
||||
// matchPrefix reports whether any selector in the fieldTree matches
|
||||
// the start of path p.
|
||||
func (ft fieldTree) matchPrefix(p cmp.Path) bool {
|
||||
for _, ps := range p {
|
||||
switch ps := ps.(type) {
|
||||
case cmp.StructField:
|
||||
ft = ft.sub[ps.Name()]
|
||||
if ft.ok {
|
||||
return true
|
||||
}
|
||||
if len(ft.sub) == 0 {
|
||||
return false
|
||||
}
|
||||
case cmp.Indirect:
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// canonicalName returns a list of identifiers where any struct field access
|
||||
// through an embedded field is expanded to include the names of the embedded
|
||||
// types themselves.
|
||||
//
|
||||
// For example, suppose field "Foo" is not directly in the parent struct,
|
||||
// but actually from an embedded struct of type "Bar". Then, the canonical name
|
||||
// of "Foo" is actually "Bar.Foo".
|
||||
//
|
||||
// Suppose field "Foo" is not directly in the parent struct, but actually
|
||||
// a field in two different embedded structs of types "Bar" and "Baz".
|
||||
// Then the selector "Foo" causes a panic since it is ambiguous which one it
|
||||
// refers to. The user must specify either "Bar.Foo" or "Baz.Foo".
|
||||
func canonicalName(t reflect.Type, sel string) ([]string, error) {
|
||||
var name string
|
||||
sel = strings.TrimPrefix(sel, ".")
|
||||
if sel == "" {
|
||||
return nil, fmt.Errorf("name must not be empty")
|
||||
}
|
||||
if i := strings.IndexByte(sel, '.'); i < 0 {
|
||||
name, sel = sel, ""
|
||||
} else {
|
||||
name, sel = sel[:i], sel[i:]
|
||||
}
|
||||
|
||||
// Type must be a struct or pointer to struct.
|
||||
if t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
if t.Kind() != reflect.Struct {
|
||||
return nil, fmt.Errorf("%v must be a struct", t)
|
||||
}
|
||||
|
||||
// Find the canonical name for this current field name.
|
||||
// If the field exists in an embedded struct, then it will be expanded.
|
||||
if !isExported(name) {
|
||||
// Disallow unexported fields:
|
||||
// * To discourage people from actually touching unexported fields
|
||||
// * FieldByName is buggy (https://golang.org/issue/4876)
|
||||
return []string{name}, fmt.Errorf("name must be exported")
|
||||
}
|
||||
sf, ok := t.FieldByName(name)
|
||||
if !ok {
|
||||
return []string{name}, fmt.Errorf("does not exist")
|
||||
}
|
||||
var ss []string
|
||||
for i := range sf.Index {
|
||||
ss = append(ss, t.FieldByIndex(sf.Index[:i+1]).Name)
|
||||
}
|
||||
if sel == "" {
|
||||
return ss, nil
|
||||
}
|
||||
ssPost, err := canonicalName(sf.Type, sel)
|
||||
return append(ss, ssPost...), err
|
||||
}
|
||||
35
vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go
generated
vendored
Normal file
35
vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
// Copyright 2018, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
type xformFilter struct{ xform cmp.Option }
|
||||
|
||||
func (xf xformFilter) filter(p cmp.Path) bool {
|
||||
for _, ps := range p {
|
||||
if t, ok := ps.(cmp.Transform); ok && t.Option() == xf.xform {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// AcyclicTransformer returns a Transformer with a filter applied that ensures
|
||||
// that the transformer cannot be recursively applied upon its own output.
|
||||
//
|
||||
// An example use case is a transformer that splits a string by lines:
|
||||
// AcyclicTransformer("SplitLines", func(s string) []string{
|
||||
// return strings.Split(s, "\n")
|
||||
// })
|
||||
//
|
||||
// Had this been an unfiltered Transformer instead, this would result in an
|
||||
// infinite cycle converting a string to []string to [][]string and so on.
|
||||
func AcyclicTransformer(name string, xformFunc interface{}) cmp.Option {
|
||||
xf := xformFilter{cmp.Transformer(name, xformFunc)}
|
||||
return cmp.FilterPath(xf.filter, xf.xform)
|
||||
}
|
||||
55
vendor/github.com/grafana/grafana-plugin-sdk-go/backend/plugin/setup.go
generated
vendored
55
vendor/github.com/grafana/grafana-plugin-sdk-go/backend/plugin/setup.go
generated
vendored
@@ -1,55 +0,0 @@
|
||||
package plugin
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/pprof"
|
||||
"os"
|
||||
|
||||
hclog "github.com/hashicorp/go-hclog"
|
||||
)
|
||||
|
||||
// SetupPluginEnvironment will read the environment variables and apply the
|
||||
// standard environment behavior. As the SDK evolves, this will likely change!
|
||||
func SetupPluginEnvironment(pluginID string) hclog.Logger {
|
||||
pluginLogger := hclog.New(&hclog.LoggerOptions{
|
||||
Name: pluginID,
|
||||
// TODO: How to make level configurable?
|
||||
Level: hclog.LevelFromString("DEBUG"),
|
||||
JSONFormat: true,
|
||||
// Color: hclog.ColorOff, (when we use 0.12)
|
||||
})
|
||||
|
||||
// Enable profiler
|
||||
profilerEnabled := false
|
||||
if value, ok := os.LookupEnv("GF_PLUGINS_PROFILER"); ok {
|
||||
// compare value to plugin name
|
||||
if value == pluginID {
|
||||
profilerEnabled = true
|
||||
}
|
||||
}
|
||||
pluginLogger.Info("Profiler", "enabled", profilerEnabled)
|
||||
if profilerEnabled {
|
||||
profilerPort := "6060"
|
||||
if value, ok := os.LookupEnv("GF_PLUGINS_PROFILER_PORT"); ok {
|
||||
profilerPort = value
|
||||
}
|
||||
|
||||
pluginLogger.Info("Profiler", "port", profilerPort)
|
||||
portConfig := fmt.Sprintf(":%s", profilerPort)
|
||||
|
||||
r := http.NewServeMux()
|
||||
r.HandleFunc("/debug/pprof/", pprof.Index)
|
||||
r.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline)
|
||||
r.HandleFunc("/debug/pprof/profile", pprof.Profile)
|
||||
r.HandleFunc("/debug/pprof/symbol", pprof.Symbol)
|
||||
r.HandleFunc("/debug/pprof/trace", pprof.Trace)
|
||||
|
||||
go func() {
|
||||
if err := http.ListenAndServe(portConfig, r); err != nil {
|
||||
pluginLogger.Error("Error Running profiler: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
return pluginLogger
|
||||
}
|
||||
59
vendor/github.com/grafana/grafana-plugin-sdk-go/data/frame.go
generated
vendored
59
vendor/github.com/grafana/grafana-plugin-sdk-go/data/frame.go
generated
vendored
@@ -11,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
)
|
||||
|
||||
// Frame represents a columnar storage with optional labels.
|
||||
@@ -45,6 +46,15 @@ func (f *Frame) AppendRow(vals ...interface{}) {
|
||||
}
|
||||
}
|
||||
|
||||
// RowCopy returns an interface slice that contains the values of each Field for the given rowIdx.
|
||||
func (f *Frame) RowCopy(rowIdx int) []interface{} {
|
||||
vals := make([]interface{}, len(f.Fields))
|
||||
for i := range f.Fields {
|
||||
vals[i] = f.CopyAt(i, rowIdx)
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
// AppendWarning adds warnings to the data frame.
|
||||
func (f *Frame) AppendWarning(message string, details string) {
|
||||
f.Warnings = append(f.Warnings, Warning{Message: message, Details: details})
|
||||
@@ -76,6 +86,44 @@ func (f *Frame) AppendRowSafe(vals ...interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// FilterRowsByField returns a copy of frame f (as per EmptyCopy()) that includes rows
|
||||
// where the filter returns true and no error. If filter returns an error, then an error is returned.
|
||||
func (f *Frame) FilterRowsByField(fieldIdx int, filter func(i interface{}) (bool, error)) (*Frame, error) {
|
||||
filteredFrame := f.EmptyCopy()
|
||||
rowLen, err := f.RowLen()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for inRowIdx := 0; inRowIdx < rowLen; inRowIdx++ {
|
||||
match, err := filter(f.At(fieldIdx, inRowIdx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
filteredFrame.AppendRow(f.RowCopy(inRowIdx)...)
|
||||
}
|
||||
return filteredFrame, nil
|
||||
}
|
||||
|
||||
// EmptyCopy returns a copy of Frame f but with Fields of zero length, and no copy of the FieldConfigs, Metadata, or Warnings.
|
||||
func (f *Frame) EmptyCopy() *Frame {
|
||||
newFrame := &Frame{
|
||||
Name: f.Name,
|
||||
RefID: f.RefID,
|
||||
Fields: make(Fields, 0, len(f.Fields)),
|
||||
}
|
||||
|
||||
for _, field := range f.Fields {
|
||||
copy := NewFieldFromFieldType(field.Type(), 0)
|
||||
copy.Name = field.Name
|
||||
copy.Labels = field.Labels.Copy()
|
||||
newFrame.Fields = append(newFrame.Fields, copy)
|
||||
}
|
||||
return newFrame
|
||||
}
|
||||
|
||||
// TypeIndices returns a slice of Field index positions for the given pTypes.
|
||||
func (f *Frame) TypeIndices(pTypes ...FieldType) []int {
|
||||
indices := []int{}
|
||||
@@ -319,6 +367,15 @@ func (l Labels) Equals(arg Labels) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Copy returns a copy of the labels.
|
||||
func (l Labels) Copy() Labels {
|
||||
c := make(Labels, len(l))
|
||||
for k, v := range l {
|
||||
c[k] = v
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// Contains returns true if all k=v pairs of the argument are in the receiver.
|
||||
func (l Labels) Contains(arg Labels) bool {
|
||||
if len(arg) > len(l) {
|
||||
@@ -483,5 +540,5 @@ func FrameTestCompareOptions() []cmp.Option {
|
||||
})
|
||||
|
||||
unexportedField := cmp.AllowUnexported(Field{})
|
||||
return []cmp.Option{confFloats, unexportedField}
|
||||
return []cmp.Option{confFloats, unexportedField, cmpopts.EquateEmpty()}
|
||||
}
|
||||
|
||||
10
vendor/github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2/backend.pb.go
generated
vendored
10
vendor/github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2/backend.pb.go
generated
vendored
@@ -877,7 +877,7 @@ func (m *CheckHealthRequest) GetConfig() *PluginConfig {
|
||||
type CheckHealthResponse struct {
|
||||
Status CheckHealthResponse_HealthStatus `protobuf:"varint,1,opt,name=status,proto3,enum=pluginv2.CheckHealthResponse_HealthStatus" json:"status,omitempty"`
|
||||
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
|
||||
JsonDetails string `protobuf:"bytes,3,opt,name=jsonDetails,proto3" json:"jsonDetails,omitempty"`
|
||||
JsonDetails []byte `protobuf:"bytes,3,opt,name=jsonDetails,proto3" json:"jsonDetails,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -922,11 +922,11 @@ func (m *CheckHealthResponse) GetMessage() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *CheckHealthResponse) GetJsonDetails() string {
|
||||
func (m *CheckHealthResponse) GetJsonDetails() []byte {
|
||||
if m != nil {
|
||||
return m.JsonDetails
|
||||
}
|
||||
return ""
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
@@ -1016,7 +1016,7 @@ var fileDescriptor_5ab9ba5b8d8b2ba5 = []byte{
|
||||
0x33, 0xa7, 0xce, 0xd9, 0x2e, 0x25, 0x3e, 0x9f, 0xff, 0x47, 0xb2, 0xd9, 0x7f, 0x68, 0xb0, 0x59,
|
||||
0x4a, 0xb3, 0x6c, 0xa6, 0xc6, 0x38, 0xe1, 0xea, 0xe5, 0x8d, 0x92, 0x98, 0x5e, 0x0f, 0x1f, 0xa4,
|
||||
0xe6, 0x44, 0xde, 0xc0, 0xea, 0x26, 0xb2, 0x04, 0x20, 0x8c, 0x91, 0x59, 0x06, 0x70, 0x66, 0x0a,
|
||||
0x61, 0x90, 0x3f, 0x06, 0x28, 0x27, 0x9e, 0xcf, 0x94, 0xe0, 0x17, 0x5d, 0xf6, 0x40, 0xd2, 0x6d,
|
||||
0x61, 0x90, 0x3f, 0x06, 0x28, 0x27, 0x9e, 0xcf, 0x94, 0x24, 0x15, 0x5d, 0xf6, 0x40, 0xd2, 0x6d,
|
||||
0x99, 0x13, 0x35, 0xa1, 0x7e, 0x72, 0xb8, 0x7f, 0x78, 0xf4, 0xfd, 0x61, 0xfb, 0x23, 0x54, 0x03,
|
||||
0xfd, 0x68, 0xbf, 0xad, 0x21, 0x13, 0xaa, 0x63, 0x8c, 0x8f, 0x70, 0x5b, 0x1f, 0xfe, 0x00, 0x8d,
|
||||
0x4c, 0x07, 0xd1, 0x11, 0xb4, 0x8a, 0xba, 0x88, 0x1e, 0xdd, 0xf8, 0x87, 0xa0, 0xfb, 0xf8, 0x66,
|
||||
@@ -1027,7 +1027,7 @@ var fileDescriptor_5ab9ba5b8d8b2ba5 = []byte{
|
||||
0xe9, 0xf0, 0x04, 0xcc, 0xb7, 0x31, 0x09, 0xd9, 0x69, 0x14, 0x07, 0x68, 0x17, 0xee, 0x2d, 0x8d,
|
||||
0xff, 0x87, 0xc3, 0x8f, 0x70, 0xbf, 0x94, 0x49, 0x80, 0xbf, 0x4d, 0x9c, 0xb3, 0xbb, 0x81, 0x79,
|
||||
0xbb, 0xf5, 0x0e, 0x06, 0x2f, 0xb3, 0xef, 0xd3, 0x9a, 0xfc, 0x87, 0xe5, 0xf9, 0x3f, 0x01, 0x00,
|
||||
0x00, 0xff, 0xff, 0x9f, 0xc1, 0xca, 0xb8, 0xc1, 0x0c, 0x00, 0x00,
|
||||
0x00, 0xff, 0xff, 0xfc, 0xf3, 0x6d, 0x5e, 0xc1, 0x0c, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
|
||||
3
vendor/modules.txt
vendored
3
vendor/modules.txt
vendored
@@ -134,6 +134,7 @@ github.com/golang/snappy
|
||||
github.com/google/flatbuffers/go
|
||||
# github.com/google/go-cmp v0.3.1
|
||||
github.com/google/go-cmp/cmp
|
||||
github.com/google/go-cmp/cmp/cmpopts
|
||||
github.com/google/go-cmp/cmp/internal/diff
|
||||
github.com/google/go-cmp/cmp/internal/flags
|
||||
github.com/google/go-cmp/cmp/internal/function
|
||||
@@ -147,7 +148,7 @@ github.com/gosimple/slug
|
||||
# github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
|
||||
github.com/grafana/grafana-plugin-model/go/datasource
|
||||
github.com/grafana/grafana-plugin-model/go/renderer
|
||||
# github.com/grafana/grafana-plugin-sdk-go v0.26.0
|
||||
# github.com/grafana/grafana-plugin-sdk-go v0.30.0
|
||||
github.com/grafana/grafana-plugin-sdk-go/backend/plugin
|
||||
github.com/grafana/grafana-plugin-sdk-go/data
|
||||
github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2
|
||||
|
||||
Reference in New Issue
Block a user