Compare commits

...

54 Commits

Author SHA1 Message Date
Ryan McKinley
a3a9ce7f24 AppPlugin: give full control to page layout when navigation is missing (#26247)
(cherry picked from commit 5f8eb93db1)
2020-07-13 14:41:21 +02:00
Ryan McKinley
1e3265d047 Flux: use monaco query editor (#26179)
(cherry picked from commit 33acf4c056)
2020-07-13 14:41:21 +02:00
Alex Khomenko
88270a2bf8 Grafana UI: Make FileUpload button size customizable (#26013)
(cherry picked from commit c3d4e69a32)
2020-07-13 14:41:21 +02:00
Andrej Ocenas
f8bdda7adf Release v7.1.0-beta3 2020-07-13 14:41:21 +02:00
Ryan McKinley
7618fd36ac DataLinks: add internal flag in comments (#26215)
(cherry picked from commit 3d98641a45)
2020-07-13 14:41:21 +02:00
Tobias Skarhed
bca82d8814 Issue Template: @grafana/ui component request (#25981)
* Add first draft of issue template

* Remove feature request line

* Update PR feedback

* Minor tweaks

* Update .github/ISSUE_TEMPLATE/4-grafana_ui_component.md

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>

* Fix punctuation

Co-authored-by: Clarity-89 <homes89@ukr.net>
Co-authored-by: Alex Khomenko <Clarity-89@users.noreply.github.com>
Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit 5a3eb413d9)
2020-07-13 14:41:21 +02:00
Andrej Ocenas
783e5d12c7 Explore: Unification of logs/metrics/traces user interface (#25890)
Removes "Metrics"/"Logs" mode switcher from Explore, allowing for both
metrics and logs queries at the same time.

Co-authored-by: kay delaney <kay@grafana.com>
(cherry picked from commit 64bc85963b)
2020-07-13 14:41:21 +02:00
Peter Holmberg
e39fef1649 Fix: Redirect to correct url after creating a folder (#26160)
(cherry picked from commit 9948e9298f)
2020-07-13 14:41:21 +02:00
kay delaney
2145633e9b Datasource/CloudWatch: More robust handling of different query modes (#25691)
* Datasource/CloudWatch: More robust handling of different query modes
A small refactor which changes how the CloudWatch datasource handles
multiple queries with different query modes. Groundwork for future
Logs/Metrics unification work.

(cherry picked from commit 2ac1bfcc79)
2020-07-13 14:41:21 +02:00
Andrej Ocenas
6503962013 Explore: Run query on splitOpen action (#26161)
(cherry picked from commit 53eb856d20)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
40a33c56c2 Templating: Fix recursive loop of template variable queries when changing ad-hoc-variable (#26191)
* Templating: Fix url sync issue with adhoc variables

* Update packages/grafana-ui/src/components/Segment/SegmentAsync.tsx

* Update packages/grafana-ui/src/components/Segment/SegmentAsync.tsx

Co-authored-by: Dominik Prokop <dominik.prokop@grafana.com>
(cherry picked from commit 0428f27194)
2020-07-13 14:41:21 +02:00
Ivana Huckova
42069bb254 Loki: Improve error message for unescaped \ and add LogQL link to docs (#26136)
* Add custom escaping error message

* Include regex escape error message

* Update docs, add logql link

* Refactor

(cherry picked from commit d3dcb19a5b)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
4684c7971c BarGauge: Fix space bug in single series mode (#26176)
(cherry picked from commit 7b80e300d9)
2020-07-13 14:41:21 +02:00
Kyle Brandt
903eccad20 Docs: Azure Monitor data source changes for 7.1 (#26096)
Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit c6a3afb4b8)
2020-07-13 14:41:21 +02:00
kay delaney
2bc9374a9e Backend: use latest Go plugin SDK (#26162)
(cherry picked from commit fd29b952a6)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
47e162d6b2 Templating: Fixed recursive queries triggered when switching dashboard settings view (#26137)
* Templating: Fixed recursive queries triggered when going into dashboard settings

* Fixed unused import

* use locationUtil

(cherry picked from commit cace879c96)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
b8c5174461 Dashboard: Refresh intervals should not always add the server min refresh interval (#26150)
(cherry picked from commit 8ab5d2ddd9)
2020-07-13 14:41:21 +02:00
Marcus Andersson
eedf6e53de Transform: adding missing "table"-transform and "series to rows"-transform to Grafana v7-transforms. (#26042)
* Fixed so the merge for table values works as it did before.

* wip

* fixed tests.

* merge tests are green.

* removed unused code and simplify the seriesToRows.

* added time series to rows editor.

* using getFrameDisplayName for the metric value.

* updated description of transforms.

* updated docs.

* fixed according to feedback.

* changed from images to markdown tables for the examples.

* forgot to save :P

(cherry picked from commit 17d87071e6)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
5f539230da DataLinks: Fixed interpolation of repeated variables used in data links (#26147)
(cherry picked from commit 89b56782c6)
2020-07-13 14:41:21 +02:00
Ryan McKinley
c14ac88b8d CSV: force UTF-8 encoding for download (#26145)
(cherry picked from commit b26ef1db25)
2020-07-13 14:41:21 +02:00
Dominik Prokop
9417a0c277 Do not break dashboard settings UI when time intervals end with trailing comma (#26126)
(cherry picked from commit 30a682a4da)
2020-07-13 14:41:21 +02:00
Ryan McKinley
4c27708b7b DataSourceWithBackend: add internal comment for new method (#26139)
(cherry picked from commit 2b6833d0da)
2020-07-13 14:41:21 +02:00
Kyle Brandt
763d28ad96 Azure: Restore Insights Metrics alias feature (#26098)
also fix case sensitivity for azure monitor metrics

(cherry picked from commit 9164a35240)
2020-07-13 14:41:21 +02:00
Dominik Prokop
949988219f grafana/ui: Do not rename export for InlineFormLabel (#26118)
(cherry picked from commit eb4391a228)
2020-07-13 14:41:21 +02:00
Arve Knudsen
4aa1d28683 Upgrade build pipeline tool (#26112)
Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit e7e854ea33)
2020-07-13 14:41:21 +02:00
Dominik Prokop
a4846ee4fd Chore: Bump rxjs to latest (#26084)
(cherry picked from commit aa671c863e)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
c2403767a0 StatPanel: Fixes issue with name showing for single series / field results (#26070)
* StatPanel: Fix text mode auto logic

* Removed import

(cherry picked from commit c9f22b72e3)
2020-07-13 14:41:21 +02:00
Peter Holmberg
fce35e7aa9 Fix: Icon and Tooltip on Variables editor (#26086)
(cherry picked from commit 8f1115c6ac)
2020-07-13 14:41:21 +02:00
Marcus Olsson
0d47601175 Docs: Add guidelines for inclusive language (#25533)
* Docs: Add guidelines for inclusive language

* Fix review comments

* Fix review comments

* Update documentation-style-guide.md

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit 0f41ca620b)
2020-07-13 14:41:21 +02:00
Sofia Papagiannaki
28e50ae4fd Auth: Fix POST request failures with anonymous access (#26049)
Macaron context.QueryBool() seems to modify the request context
that causes the POST and PUT requests to fail with:
"http: proxy error: net/http: HTTP/1.x transport connection broken: http: ContentLength=333 with Body length 0"

(cherry picked from commit 44dff6fdd0)
2020-07-13 14:41:21 +02:00
Andreas Opferkuch
fc96444b23 ThemeContext: Make useStyles type-aware (#26056)
PLUS:
Make it more consise
Add unit test

(cherry picked from commit 390c80d7f5)
2020-07-13 14:41:21 +02:00
annegies
9eb16756b1 Remove break from ldap, get all groups from all the group base searches specified (#25825)
Signed-off-by: Annegies van 't Zand <ace.vtzand@gmail.com>
(cherry picked from commit a2737c0896)
2020-07-13 14:41:21 +02:00
Ryan McKinley
cde6a2de68 panel Inspect: improve structure debugging (#26065)
(cherry picked from commit af5dff8a1b)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
111c238df2 TextPanel: Fixed issue with new react text panel (#26061)
(cherry picked from commit 8be735a6ec)
2020-07-13 14:41:21 +02:00
Carl Bergquist
1630f21aee Instrument dashboard versions and annotation count (#26044)
(cherry picked from commit 26852ca788)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
c2125493a7 InfluxDB: Fixed new group by dropdown now showing (#26031)
(cherry picked from commit 66460ae740)
2020-07-13 14:41:21 +02:00
Ryan McKinley
33eec937dd Table: JSON Cell should try to convert strings to JSON (#26024)
(cherry picked from commit 3acc2a6ac2)
2020-07-13 14:41:21 +02:00
Steven Vachon
073f46e289 @grafana/e2e: close options panel before interacting with the query form (#26036)
... it's logically better, but the real reason is to appease Cypress which was cause a consistent request error for a single plugin (datadog-datasource). An error which could not be reproduced manually.

(cherry picked from commit 634d8d60d6)
2020-07-13 14:41:21 +02:00
Tobias Skarhed
7c1c0bc8c1 AdminUsersTable: Fix width (#26019)
(cherry picked from commit b06d2cf30f)
2020-07-13 14:41:21 +02:00
Josh Soref
a9fc5ff45d Graph panel: Move Stacking and null values before Hover tooltip options (#26037)
(cherry picked from commit dec76b4556)
2020-07-13 14:41:21 +02:00
Ivana Huckova
7d1f0d619d Elastic: Fix displaying of correct log message (#26020)
* Fix default field, remove redundant line field check

* Add comments

(cherry picked from commit 8b46655361)
2020-07-13 14:41:21 +02:00
Ryan McKinley
aa1cdf0a20 grafana/data: do not bundle rxjs (#26039)
(cherry picked from commit b7792de16d)
2020-07-13 14:41:21 +02:00
Alex Khomenko
0b109a1637 Forgot password: Fix styling (#26002)
(cherry picked from commit 9e47114c45)
2020-07-02 13:07:33 +03:00
Arve Knudsen
6a1f05d7ec CircleCI: Upgrade build pipeline tool (#26006)
Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit 3e9e2db384)
2020-07-02 13:07:33 +03:00
Ryan McKinley
1576b16219 Monaco: check suggestions against current word (#25992)
* trigger on current word

* proper index

* test suggestsions

* test suggestsions

* fix test

(cherry picked from commit 085b2f3dbf)
2020-07-02 13:07:33 +03:00
Sofia Papagiannaki
44ba5482f1 Release v7.1.0-beta2 2020-07-02 13:07:33 +03:00
Ryan McKinley
972e07bd2e Panel Loading: spin clockwise, not counter clockwise (#25998)
* spin clockwise

* spin clockwise

(cherry picked from commit 90a5a85eb1)
2020-07-02 13:07:33 +03:00
Sebastian Widmer
22211e5bdd Loki: Allow aliasing Loki queries in dashboard (#25706)
* Loki: Add Legend field to query editor

* Loki: Basic test for legend field

* Loki: Mention legend is only for metric queries

* Loki: Fix absolute timerange never updating

(cherry picked from commit 5789f80e14)
2020-07-02 13:07:33 +03:00
Ryan McKinley
d95c494d9d Value Mappings: remove unused operator property from interface (#25989)
(cherry picked from commit 73e82af4df)
2020-07-02 13:07:33 +03:00
Ivana Huckova
6e3a9d7927 Fix href to datasources for NoDataSourceCallToAction in Explore (#25991)
(cherry picked from commit c9751707c5)
2020-07-02 13:07:33 +03:00
Dan Cech
4a68ba7b23 provide license token directly via plugin environment (#25987)
* provide license token directly via plugin environment

(cherry picked from commit b5ca2381bc)
2020-07-02 13:07:33 +03:00
Sofia Papagiannaki
74ca7121eb Fix build-in plugins failing to load in windows (#25982)
(cherry picked from commit bcaa42fbb3)
2020-07-02 13:07:33 +03:00
Marcus Andersson
4d1ea72426 release 7.1.0-beta1 2020-07-01 12:13:45 +02:00
Marcus Andersson
11385c6cfe bumped version to beta1. 2020-07-01 12:11:50 +02:00
161 changed files with 2510 additions and 1637 deletions

View File

@@ -56,7 +56,7 @@ commands:
- run:
name: "Install Grafana build pipeline tool"
command: |
VERSION=0.4.17
VERSION=0.4.19
curl -fLO https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v${VERSION}/grabpl
chmod +x grabpl
mv grabpl /tmp

View File

@@ -0,0 +1,39 @@
---
name: '@grafana/ui component request'
about: Suggest a component for the @grafana/ui package
labels: 'area/grafana/ui'
---
<!--
By using this template you will make it easier for us to make sure that documentation and implementation stays up to date for every component in @grafana/ui
Thank you!
-->
**Why is this component needed**:
<!-- Explain your use case -->
___
- [ ] Is/could it be used in more than one place in Grafana?
**Where is/could it be used?**:
___
- [ ] Post screenshots possible.
- [ ] It has a single use case.
- [ ] It is/could be used in multiple places.
**Implementation** (Checklist meant for the person implementing the component)
- [ ] Component has a story in Storybook.
- [ ] Props and naming follows [our style guide](https://github.com/grafana/grafana/blob/master/contribute/style-guides/frontend.md).
- [ ] It is extendable (rest props are spread, styles with className work, and so on).
- [ ] Uses [theme for spacing, colors, and so on](https://github.com/grafana/grafana/blob/master/contribute/style-guides/themes.md).
- [ ] Works with both light and dark theme.
**Documentation**
- [ ] Properties are documented.
- [ ] Use cases are described.
- [ ] Code examples for the different use cases.
- [ ] Dos and don'ts.
- [ ] Styling guidelines, specific color usage (if applicable).

View File

@@ -18,6 +18,32 @@ For all items not covered in this guide, refer to the [Microsoft Style Guide](ht
The [codespell](https://github.com/codespell-project/codespell) tool is run for every change to catch common misspellings.
## Inclusive language
This section provides guidelines on how to avoid using charged language in documentation.
### Allowing and blocking
Don't use "whitelist" or "blacklist" when referring to allowing or blocking content or traffic.
* When used as a noun, use "allowlist" or "blocklist".
* When used as a verb, use "allow" or "block"
Example: _To **allow** outgoing traffic, add the IP to the **allowlist**._
### Leader and follower
Don't use "master" or "slave" to describe relationships between nodes or processes.
* Use "leader", "main" or "primary," instead of "master."
* Use "follower" or "secondary," instead of "slave."
### Exceptions
When referring to a configuration or settings used by third-party libraries och technologies outside the Grafana project, prefer the original name to avoid confusion.
For example, use "master" when referring to the default Git branch.
## Grafana-specific style
The following sections provide general guidelines on topics specific to Grafana documentation. Note that for the most part, these are *guidelines*, not rigid rules. If you have questions, ask in the #docs channel of Grafana Slack.
@@ -31,7 +57,7 @@ The following sections provide general guidelines on topics specific to Grafana
* Write in present tense.
- Not: The panel will open.
- Use: The panel opens. Grafana opens the panel.
* Do not use an ampersand (&) as an abbreviation for "and."
* Do not use an ampersand (&) as an abbreviation for "and."
- **Exceptions:** If an ampersand is used in the Grafana UI, then match the UI.
* Avoid using internal slang and jargon in technical documentation.
@@ -156,7 +182,7 @@ One word, not two.
#### open source, open-source
Do not hyphenate when used as an adjective unless the lack of hyphen would cause confusion. For example: _Open source software design is the most open open-source system I can imagine._
Do not hyphenate when used as an adjective unless the lack of hyphen would cause confusion. For example: _Open source software design is the most open open-source system I can imagine._
Do not hyphenate when it is used as a noun. For example: _Open source is the best way to develop software._

View File

@@ -21,9 +21,9 @@ The Azure Monitor data source supports multiple services in the Azure cloud:
- **[Azure Monitor]({{< relref "#querying-the-azure-monitor-service" >}})** is the platform service that provides a single source for monitoring Azure resources.
- **[Application Insights]({{< relref "#querying-the-application-insights-service" >}})** is an extensible Application Performance Management (APM) service for web developers on multiple platforms and can be used to monitor your live web application - it will automatically detect performance anomalies.
- **[Azure Log Analytics]({{< relref "#querying-the-azure-log-analytics-service" >}})** (or Azure Logs) gives you access to log data collected by Azure Monitor.
- **[Application Insights Analytics]({{< relref "#writing-analytics-queries-for-the-application-insights-service" >}})** allows you to query [Application Insights data](https://docs.microsoft.com/en-us/azure/azure-monitor/app/analytics) using the same query language used for Azure Log Analytics.
- **[Application Insights Analytics]({{< relref "#query-the-application-insights-analytics-service" >}})** allows you to query [Application Insights data](https://docs.microsoft.com/en-us/azure/azure-monitor/app/analytics) using the same query language used for Azure Log Analytics.
## Adding the data source
## Add the data source
The data source can access metrics from four different services. You can configure access to the services that you use. It is also possible to use the same credentials for multiple services if that is how you have set it up in Azure AD.
@@ -76,10 +76,13 @@ In the query editor for a panel, after choosing your Azure Monitor data source,
- `Azure Monitor`
- `Application Insights`
- `Azure Log Analytics`
- `Insights Analytics`
The query editor will change depending on which one you pick. Azure Monitor is the default.
The query editor changes depending on which one you pick. Azure Monitor is the default.
## Querying the Azure Monitor service
Starting in Grafana 7.1, Insights Analytics replaced the former edit mode from within Application Insights.
## Query the Azure Monitor service
The Azure Monitor service provides metrics for all the Azure services that you have running. It helps you understand how your applications on Azure are performing and to proactively find issues affecting your applications.
@@ -93,29 +96,34 @@ Examples of metrics that you can get from the service are:
{{< docs-imagebox img="/img/docs/v60/azuremonitor-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Monitor Query Editor" >}}
### Formatting legend keys with aliases for Azure Monitor
As of Grafana 7.1, the query editor allows you to query multiple dimensions for metrics that support them. Metrics that support multiple dimensions are those listed in the [Azure Monitor supported Metrics List](https://docs.microsoft.com/en-us/azure/azure-monitor/platform/metrics-supported) that have one or more values listed in the "Dimension" column for the metric.
### Format legend keys with aliases for Azure Monitor
The default legend formatting for the Azure Monitor API is:
`resourceName{dimensionValue=dimensionName}.metricName`
`metricName{dimensionName=dimensionValue,dimensionTwoName=DimensionTwoValue}`
These can be quite long but this formatting can be changed using aliases. In the Legend Format field, the aliases which are defined below can be combined any way you want.
> **Note:** Before Grafana 7.1, the formatting included the resource name in the default: `resourceName{dimensionName=dimensionValue}.metricName`. As of Grafana 7.1, the resource name has been removed from the default legend.
Azure Monitor Examples:
These can be quite long, but this formatting can be changed by using aliases. In the **Legend Format** field, you can combine the aliases defined below any way you want.
- `dimension: {{dimensionvalue}}`
- `{{resourcegroup}} - {{resourcename}}`
Azure Monitor examples:
- `Blob Type: {{ blobtype }}`
- `{{ resourcegroup }} - {{ resourcename }}`
### Alias patterns for Azure Monitor
- `{{resourcegroup}}` = replaced with the value of the Resource Group
- `{{namespace}}` = replaced with the value of the Namespace (e.g. Microsoft.Compute/virtualMachines)
- `{{resourcename}}` = replaced with the value of the Resource Name
- `{{metric}}` = replaced with metric name (e.g. Percentage CPU)
- `{{dimensionname}}` = replaced with dimension key/label (e.g. blobtype)
- `{{dimensionvalue}}` = replaced with dimension value (e.g. BlockBlob)
- `{{ resourcegroup }}` = replaced with the value of the Resource Group
- `{{ namespace }}` = replaced with the value of the Namespace (e.g. Microsoft.Compute/virtualMachines)
- `{{ resourcename }}` = replaced with the value of the Resource Name
- `{{ metric }}` = replaced with metric name (e.g. Percentage CPU)
- `{{ dimensionname }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with the first dimension's key/label (as sorted by the key/label) (e.g. blobtype)
- `{{ dimensionvalue }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with first dimension's value (as sorted by the key/label) (e.g. BlockBlob)
- `{{ arbitraryDim }}` = *Available in 7.1+* replaced with the value of the corresponding dimension. (e.g. `{{ blobtype }}` becomes BlockBlob)
### Templating with variables for Azure Monitor
### Create template variables for Azure Monitor
Instead of hard-coding things like server, application and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard.
@@ -159,29 +167,31 @@ Grafana alerting is supported for the Azure Monitor service. This is not Azure A
{{< docs-imagebox img="/img/docs/v60/azuremonitor-alerting.png" class="docs-image--no-shadow" caption="Azure Monitor Alerting" >}}
## Querying the Application Insights Service
## Query the Application Insights Service
{{< docs-imagebox img="/img/docs/v60/appinsights-service-query-editor.png" class="docs-image--no-shadow" caption="Application Insights Query Editor" >}}
{{< docs-imagebox img="/img/docs/azuremonitor/insights_metrics_multi-dim.png" class="docs-image--no-shadow" caption="Application Insights Query Editor" >}}
As of Grafana 7.1, you can select more than one group by dimension.
### Formatting legend keys with aliases for Application Insights
The default legend formatting is:
`metric/name{group/by="groupbyvalue"}`
`metricName{dimensionName=dimensionValue,dimensionTwoName=DimensionTwoValue}`
In the Legend Format field, the aliases which are defined below can be combined any way you want.
Application Insights Examples:
Application Insights examples:
- `server: {{groupbyvalue}}`
- `city: {{groupbyvalue}}`
- `{{groupbyname}}: {{groupbyvalue}}`
- `city: {{ client/city }}`
- `{{ metric }} [Location: {{ client/countryOrRegion }}, {{ client/city }}]`
### Alias patterns for Application Insights
- `{{groupbyvalue}}` = replaced with the value of the group by
- `{{groupbyname}}` = replaced with the name/label of the group by
- `{{metric}}` = replaced with metric name (e.g. requests/count)
- `{{ groupbyvalue }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with the first dimension's key/label (as sorted by the key/label)
- `{{ groupbyname }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with first dimension's value (as sorted by the key/label) (e.g. BlockBlob)
- `{{ metric }}` = replaced with metric name (e.g. requests/count)
- `{{ arbitraryDim }}` = *Available in 7.1+* replaced with the value of the corresponding dimension. (e.g. `{{ client/city }}` becomes Chicago)
### Filter expressions for Application Insights
@@ -222,30 +232,55 @@ Grafana alerting is supported for Application Insights. This is not Azure Alerts
## Querying the Azure Log Analytics service
Queries are written in the new [Azure Log Analytics (or KustoDB) Query Language](https://docs.loganalytics.io/index). A Log Analytics Query can be formatted as Time Series data or as Table data.
Queries are written in the new [Azure Log Analytics (or KustoDB) Query Language](https://docs.loganalytics.io/index). A Log Analytics query can be formatted as time series data or as table data.
Time Series queries are for the Graph Panel (and other panels like the Single Stat panel) and must contain a datetime column, a metric name column and a value column. Here is an example query that returns the aggregated count grouped by the Category column and grouped by hour:
If your credentials give you access to multiple subscriptions, then choose the appropriate subscription before entering queries.
```
AzureActivity
### Time series queries
Time series queries are for the Graph panel and other panels like the SingleStat panel. Each query must contain at least a datetime column and a numeric value column. The result must also be sorted in ascending order by the datetime column.
Here is an example query that returns the aggregated count grouped by hour:
```kusto
Perf
| where $__timeFilter(TimeGenerated)
| summarize count() by Category, bin(TimeGenerated, 1h)
| summarize count() by bin(TimeGenerated, 1h)
| order by TimeGenerated asc
```
Table queries are mainly used in the Table panel and row a list of columns and rows. This example query returns rows with the 6 specified columns:
A query can also have one or more non-numeric/non-datetime columns, and those columns are considered dimensions and become labels in the response. For example, a query that returns the aggregated count grouped by hour, Computer, and the CounterName:
```kusto
Perf
| where $__timeFilter(TimeGenerated)
| summarize count() by bin(TimeGenerated, 1h), Computer, CounterName
| order by TimeGenerated asc
```
You can also select additional number value columns (with, or without multiple dimensions). For example, getting a count and average value by hour, Computer, CounterName, and InstanceName:
```kusto
Perf
| where $__timeFilter(TimeGenerated)
| summarize Samples=count(), AvgValue=avg(CounterValue)
by bin(TimeGenerated, $__interval), Computer, CounterName, InstanceName
| order by TimeGenerated asc
```
{{< docs-imagebox img="/img/docs/azuremonitor/logs_multi-value_multi-dim.png" class="docs-image--no-shadow" caption="Azure Logs query with multiple values and multiple dimensions" >}}
### Table queries
Table queries are mainly used in the Table panel and show a list of columns and rows. This example query returns rows with the six specified columns:
```kusto
AzureActivity
| where $__timeFilter()
| project TimeGenerated, ResourceGroup, Category, OperationName, ActivityStatus, Caller
| order by TimeGenerated desc
```
If your credentials give you access to multiple subscriptions then choose the appropriate subscription first.
{{< docs-imagebox img="/img/docs/v60/azureloganalytics-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Log Analytics Query Editor" >}}
### Azure Log Analytics macros
To make writing queries easier there are several Grafana macros that can be used in the where clause of a query:
@@ -304,7 +339,7 @@ Example variable queries:
Example of a time series query using variables:
```
```kusto
Perf
| where ObjectName == "$object" and CounterName == "$metric"
| where TimeGenerated >= $__timeFrom() and TimeGenerated <= $__timeTo()
@@ -331,21 +366,11 @@ If you're not currently logged in to the Azure Portal, then the link opens the l
Grafana alerting is supported for Application Insights. This is not Azure Alerts support. Read more about how alerting in Grafana works in [Alerting rules]({{< relref "../../alerting/alerts-overview.md" >}}).
### Writing analytics queries For the Application Insights service
## Query the Application Insights Analytics service
If you change the service type to "Application Insights", the menu icon to the right adds another option, "Toggle Edit Mode". Once clicked, the query edit mode changes to give you a full text area in which to write log analytics queries. (This is identical to how the InfluxDB data source lets you write raw queries.)
If you change the service type to **Insights Analytics**, then a similar editor to the Log Analytics service is available. This service also uses the Kusto language, so the instructions for querying data are identical to [querying the log analytics service]({{< relref "#querying-the-azure-log-analytics-service" >}}), except that you query Application Insights Analytics data instead.
Once a query is written, the column names are automatically parsed out of the response data. You can then select them in the "X-axis", "Y-axis", and "Split On" dropdown menus, or just type them out.
There are some important caveats to remember:
- You'll want to order your y-axis in the query, eg. `order by timestamp asc`. The graph may come out looking bizarre otherwise. It's better to have Microsoft sort it on their side where it's faster, than to implement this in the plugin.
- If you copy a log analytics query, typically they'll end with a render instruction, like `render barchart`. This is unnecessary, but harmless.
- Currently, four default dashboard variables are supported: `$__timeFilter()`, `$__from`, `$__to`, and `$__interval`. If you're searching in timestamped data, replace the beginning of your where clause to `where $__timeFilter()`. Dashboard changes by time region are handled as you'd expect, as long as you leave the name of the `timestamp` column alone. Likewise, `$__interval` will automatically change based on the dashboard's time region _and_ the width of the chart being displayed. Use it in bins, so `bin(timestamp,$__interval)` changes into something like `bin(timestamp,1s)`. Use `$__from` and `$__to` if you just want the formatted dates to be inserted.
- Templated dashboard variables are not yet supported! They will come in a future version.
{{< docs-imagebox img="/img/docs/azuremonitor/insights_analytics_multi-dim.png" class="docs-image--no-shadow" caption="Azure Application Insights Analytics query with multiple dimensions" >}}
## Configure the data source with provisioning

View File

@@ -59,7 +59,7 @@ The new field with the link shown in log details:
## Querying Logs
Querying and displaying log data from Loki is available via [Explore]({{< relref "../explore" >}}), and with the [logs panel]({{< relref "../../panels/visualizations/logs-panel.md" >}}) in dashboards. Select the Loki data source, and then enter a log query to display your logs.
Querying and displaying log data from Loki is available via [Explore]({{< relref "../explore" >}}), and with the [logs panel]({{< relref "../../panels/visualizations/logs-panel.md" >}}) in dashboards. Select the Loki data source, and then enter a [LogQL](https://github.com/grafana/loki/blob/master/docs/logql.md) query to display your logs.
### Log Queries

View File

@@ -11,7 +11,7 @@ weight = 300
This page explains what transformations in Grafana are and how to use them.
> **Note:** This documentation refers to a Grafana 7.0 beta feature. This documentation will be frequently updated to reflect updates to the feature, and it will probably be broken into smaller sections when the feature moves out of beta.
> **Note:** This documentation refers to a Grafana 7.0 feature. This documentation will be frequently updated to reflect updates to the feature, and it will probably be broken into smaller sections when the feature moves out of beta.
Transformations process the result set before its passed to the visualization. You access transformations in the Transform tab of the Grafana panel editor.
@@ -74,6 +74,7 @@ Grafana comes with the following transformations:
- [Join by field (outer join)](#join-by-field-outer-join)
- [Add field from calculation](#add-field-from-calculation)
- [Labels to fields](#labels-to-fields)
- [Series to rows](#series-to-rows)
- [Debug transformations](#debug-transformations)
Keep reading for detailed descriptions of each type of transformation and the options available for each, as well as suggestions on how to use them.
@@ -96,25 +97,33 @@ After I apply the transformation, there is no time value and each column has bee
### Merge
Use this transformation to combine the result from multiple queries into one single result based on the time field. This is helpful when using the table panel visualization.
> **Note:** This documentation refers to a Grafana 7.1 feature.
In the example below, we are visualizing multiple queries returning table data before applying the transformation.
Use this transformation to combine the result from multiple queries into one single result. This is helpful when using the table panel visualization. Values that can be merged are combined into the same row. Values are mergeable if the shared fields contains the same data.
{{< docs-imagebox img="/img/docs/transformations/table-data-before-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
In the example below, we have two queries returning table data. It is visualized as two separate tables before applying the transformation.
Here is the same example after applying the merge transformation.
Query A:
{{< docs-imagebox img="/img/docs/transformations/table-data-after-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
| Time | Job | Uptime |
|---------------------|---------|-----------|
| 2020-07-07 11:34:20 | node | 25260122 |
| 2020-07-07 11:24:20 | postgre | 123001233 |
If any of the queries return time series data, then a `Metric` column containing the name of the query is added. You can be customized this value by defining `Label` on the source query.
Query B:
In the example below, we are visualizing multiple queries returning time series data before applying the transformation.
| Time | Job | Errors |
|---------------------|---------|--------|
| 2020-07-07 11:34:20 | node | 15 |
| 2020-07-07 11:24:20 | postgre | 5 |
{{< docs-imagebox img="/img/docs/transformations/time-series-before-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
Here is the result after applying the `Merge` transformation.
Here is the same example after applying the merge transformation.
| Time | Job | Errors | Uptime |
|---------------------|---------|--------|-----------|
| 2020-07-07 11:34:20 | node | 15 | 25260122 |
| 2020-07-07 11:24:20 | postgre | 5 | 123001233 |
{{< docs-imagebox img="/img/docs/transformations/time-series-after-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
### Filter by name
@@ -213,6 +222,43 @@ After I apply the transformation, my labels appear in the table as fields.
{{< docs-imagebox img="/img/docs/transformations/labels-to-fields-after-7-0.png" class="docs-image--no-shadow" max-width= "1100px" >}}
## Series to rows
> **Note:** This documentation refers to a Grafana 7.1 feature.
Use this transformation to combine the result from multiple time series data queries into one single result. This is helpful when using the table panel visualization.
The result from this transformation will contain three columns: `Time`, `Metric`, and `Value`. The `Metric` column is added so you easily can see from which query the metric originates from. Customize this value by defining `Label` on the source query.
In the example below, we have two queries returning time series data. It is visualized as two separate tables before applying the transformation.
Query A:
| Time | Temperature |
|---------------------|-------------|
| 2020-07-07 11:34:20 | 25 |
| 2020-07-07 10:31:22 | 22 |
| 2020-07-07 09:30:05 | 19 |
Query B:
| Time | Humidity |
|---------------------|----------|
| 2020-07-07 11:34:20 | 24 |
| 2020-07-07 10:32:20 | 29 |
| 2020-07-07 09:30:57 | 33 |
Here is the result after applying the `Series to rows` transformation.
| Time | Metric | Value |
|---------------------|-------------|-------|
| 2020-07-07 11:34:20 | Temperature | 25 |
| 2020-07-07 11:34:20 | Humidity | 22 |
| 2020-07-07 10:32:20 | Humidity | 29 |
| 2020-07-07 10:31:22 | Temperature | 22 |
| 2020-07-07 09:30:57 | Humidity | 33 |
| 2020-07-07 09:30:05 | Temperature | 19 |
## Debug transformations
To see the input and the output result sets of the transformation, click the bug icon on the right side of the transformation row.

View File

@@ -32,6 +32,17 @@ Use these settings to refine your visualization.
- **Points -** Display points for values.
- **Point radius -** Controls how large the points are.
### Stacking and null value
- **Stack -** Each series is stacked on top of another.
- **Percent -** Available when **Stack** is selected. Each series is drawn as a percentage of the total of all series.
- **Null value -** How null values are displayed. _This is a very important setting._ See note below.
- **connected -** If there is a gap in the series, meaning a null value or values, then the line will skip the gap and connect to the next non-null value.
- **null -** (default) If there is a gap in the series, meaning a null value, then the line in the graph will be broken and show the gap.
- **null as zero -** If there is a gap in the series, meaning a null value, then it will be displayed as a zero value in the graph panel.
> **Note:** If you are monitoring a server's CPU load and the load reaches 100%, then the server will lock up and the agent sending statistics will not be able to collect the load statistic. This leads to a gap in the metrics and having the default as _null_ means Grafana will show the gaps and indicate that something is wrong. If this is set to _connected_, then it would be easy to miss this signal.
### Hover tooltip
Use these settings to change the appearance of the tooltip that appears when you hover your cursor over the graph visualization.
@@ -44,17 +55,6 @@ Use these settings to change the appearance of the tooltip that appears when you
- **Increasing -** The series in the hover tooltip are sorted by value and in increasing order, with the lowest value at the top of the list.
- **Decreasing -** The series in the hover tooltip are sorted by value and in decreasing order, with the highest value at the top of the list.
### Stacking and null value
- **Stack -** Each series is stacked on top of another.
- **Percent -** Available when **Stack** is selected. Each series is drawn as a percentage of the total of all series.
- **Null value -** How null values are displayed. _This is a very important setting._ See note below.
- **connected -** If there is a gap in the series, meaning a null value or values, then the line will skip the gap and connect to the next non-null value.
- **null -** (default) If there is a gap in the series, meaning a null value, then the line in the graph will be broken and show the gap.
- **null as zero -** If there is a gap in the series, meaning a null value, then it will be displayed as a zero value in the graph panel.
> **Note:** If you are monitoring a server's CPU load and the load reaches 100%, then the server will lock up and the agent sending statistics will not be able to collect the load statistic. This leads to a gap in the metrics and having the default as _null_ means Grafana will show the gaps and indicate that something is wrong. If this is set to _connected_, then it would be easy to miss this signal.
## Series overrides
Series overrides allow a series in a graph panel to be rendered differently from the others. You can customize display options on a per-series bases or by using regex rules. For example, one series can have a thicker line width to make it stand out or be moved to the right Y-axis.

2
go.mod
View File

@@ -30,7 +30,7 @@ require (
github.com/gorilla/websocket v1.4.1
github.com/gosimple/slug v1.4.2
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
github.com/grafana/grafana-plugin-sdk-go v0.70.0
github.com/grafana/grafana-plugin-sdk-go v0.71.0
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
github.com/hashicorp/go-plugin v1.2.2
github.com/hashicorp/go-version v1.1.0

4
go.sum
View File

@@ -148,8 +148,8 @@ github.com/gosimple/slug v1.4.2 h1:jDmprx3q/9Lfk4FkGZtvzDQ9Cj9eAmsjzeQGp24PeiQ=
github.com/gosimple/slug v1.4.2/go.mod h1:ER78kgg1Mv0NQGlXiDe57DpCyfbNywXXZ9mIorhxAf0=
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4 h1:SPdxCL9BChFTlyi0Khv64vdCW4TMna8+sxL7+Chx+Ag=
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4/go.mod h1:nc0XxBzjeGcrMltCDw269LoWF9S8ibhgxolCdA1R8To=
github.com/grafana/grafana-plugin-sdk-go v0.70.0 h1:tbwf0KMp8QEQQYF3bDBOOv/npegD6YP8T90OWbLr7n4=
github.com/grafana/grafana-plugin-sdk-go v0.70.0/go.mod h1:NvxLzGkVhnoBKwzkst6CFfpMFKwAdIUZ1q8ssuLeF60=
github.com/grafana/grafana-plugin-sdk-go v0.71.0 h1:dF2H1O03aTekFujss+iU/dcrvdDMsk16URbyExNJxqY=
github.com/grafana/grafana-plugin-sdk-go v0.71.0/go.mod h1:NvxLzGkVhnoBKwzkst6CFfpMFKwAdIUZ1q8ssuLeF60=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=

View File

@@ -1,6 +1,8 @@
{
"npmClient": "yarn",
"useWorkspaces": true,
"packages": ["packages/*"],
"version": "7.1.0-pre.0"
"packages": [
"packages/*"
],
"version": "7.1.0-beta.3"
}

View File

@@ -3,7 +3,7 @@
"license": "Apache-2.0",
"private": true,
"name": "grafana",
"version": "7.1.0-pre",
"version": "7.1.0-beta3",
"repository": "github:grafana/grafana",
"scripts": {
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
@@ -267,7 +267,7 @@
"regenerator-runtime": "0.13.3",
"reselect": "4.0.0",
"rst2html": "github:thoward/rst2html#990cb89",
"rxjs": "6.5.5",
"rxjs": "6.6.0",
"search-query-parser": "1.5.4",
"slate": "0.47.8",
"slate-plain-serializer": "0.7.10",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/data",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Grafana Data Library",
"keywords": [
"typescript"
@@ -26,7 +26,7 @@
"@braintree/sanitize-url": "4.0.0",
"apache-arrow": "0.16.0",
"lodash": "4.17.15",
"rxjs": "6.5.5",
"rxjs": "6.6.0",
"xss": "1.0.6"
},
"devDependencies": {

View File

@@ -21,7 +21,7 @@ const buildCjsPackage = ({ env }) => {
globals: {},
},
],
external: ['lodash', 'apache-arrow'], // Use Lodash & arrow from grafana
external: ['lodash', 'rxjs', 'apache-arrow'], // Use Lodash, rxjs & arrow from grafana
plugins: [
json({
include: ['../../node_modules/moment-timezone/data/packed/latest.json'],

View File

@@ -0,0 +1,12 @@
import { DataFrame, FieldType } from '../types/dataFrame';
export const isTimeSerie = (frame: DataFrame): boolean => {
if (frame.fields.length > 2) {
return false;
}
return !!frame.fields.find(field => field.type === FieldType.time);
};
export const isTimeSeries = (data: DataFrame[]): boolean => {
return !data.find(frame => !isTimeSerie(frame));
};

View File

@@ -152,8 +152,8 @@ describe('Format value', () => {
it('should return formatted value if there are no matching value mappings', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
{ id: 0, text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
];
const value = '10';
const instance = getDisplayProcessorFromConfig({ decimals: 1, mappings: valueMappings });
@@ -186,8 +186,8 @@ describe('Format value', () => {
it('should return mapped value if there are matching value mappings', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '11';
const instance = getDisplayProcessorFromConfig({ decimals: 1, mappings: valueMappings });
@@ -196,9 +196,7 @@ describe('Format value', () => {
});
it('should return mapped value and leave numeric value in tact if value mapping maps to empty string', () => {
const valueMappings: ValueMapping[] = [
{ id: 1, operator: '', text: '', type: MappingType.ValueToText, value: '1' },
];
const valueMappings: ValueMapping[] = [{ id: 1, text: '', type: MappingType.ValueToText, value: '1' }];
const value = '1';
const instance = getDisplayProcessorFromConfig({ decimals: 1, mappings: valueMappings });

View File

@@ -545,7 +545,7 @@ describe('getLinksSupplier', () => {
expect.objectContaining({
title: 'testDS',
href:
'/explore?left={"datasource":"testDS","queries":["12345"],"mode":"Metrics","ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
'/explore?left={"datasource":"testDS","queries":["12345"],"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
onClick: undefined,
})
);

View File

@@ -8,10 +8,11 @@ import { filterFramesByRefIdTransformer } from './transformers/filterByRefId';
import { orderFieldsTransformer } from './transformers/order';
import { organizeFieldsTransformer } from './transformers/organize';
import { seriesToColumnsTransformer } from './transformers/seriesToColumns';
import { seriesToRowsTransformer } from './transformers/seriesToRows';
import { renameFieldsTransformer } from './transformers/rename';
import { labelsToFieldsTransformer } from './transformers/labelsToFields';
import { ensureColumnsTransformer } from './transformers/ensureColumns';
import { mergeTransformer } from './transformers/merge/merge';
import { mergeTransformer } from './transformers/merge';
export const standardTransformers = {
noopTransformer,
@@ -25,6 +26,7 @@ export const standardTransformers = {
reduceTransformer,
calculateFieldTransformer,
seriesToColumnsTransformer,
seriesToRowsTransformer,
renameFieldsTransformer,
labelsToFieldsTransformer,
ensureColumnsTransformer,

View File

@@ -8,6 +8,7 @@ export enum DataTransformerID {
rename = 'rename',
calculateField = 'calculateField',
seriesToColumns = 'seriesToColumns',
seriesToRows = 'seriesToRows',
merge = 'merge',
labelsToFields = 'labelsToFields',
filterFields = 'filterFields',

View File

@@ -1,9 +1,9 @@
import { mockTransformationsRegistry } from '../../../utils/tests/mockTransformationsRegistry';
import { DataTransformerConfig, Field, FieldType } from '../../../types';
import { DataTransformerID } from '../ids';
import { toDataFrame } from '../../../dataframe';
import { transformDataFrame } from '../../transformDataFrame';
import { ArrayVector } from '../../../vector';
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
import { DataTransformerConfig, Field, FieldType } from '../../types';
import { DataTransformerID } from './ids';
import { toDataFrame } from '../../dataframe';
import { transformDataFrame } from '../transformDataFrame';
import { ArrayVector } from '../../vector';
import { mergeTransformer, MergeTransformerOptions } from './merge';
describe('Merge multipe to single', () => {
@@ -35,12 +35,11 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [1000, 2000]),
createField('Metric', FieldType.string, ['A', 'B']),
createField('Value', FieldType.number, [1, -1]),
createField('Time', FieldType.time, [2000, 1000]),
createField('Temp', FieldType.number, [-1, 1]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two series with multiple values into one', () => {
@@ -67,12 +66,11 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine three series into one', () => {
@@ -107,12 +105,11 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [seriesA, seriesB, seriesC]);
const expected: Field[] = [
createField('Time', FieldType.time, [500, 1000, 2000]),
createField('Metric', FieldType.string, ['C', 'A', 'B']),
createField('Value', FieldType.number, [2, 1, -1]),
createField('Time', FieldType.time, [2000, 1000, 500]),
createField('Temp', FieldType.number, [-1, 1, 2]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine one serie and two tables into one table', () => {
@@ -149,13 +146,12 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [tableA, seriesB, tableB]);
const expected: Field[] = [
createField('Time', FieldType.time, [500, 1000, 1000]),
createField('Metric', FieldType.string, ['C', 'A', 'B']),
createField('Temp', FieldType.number, [2, 1, -1]),
createField('Humidity', FieldType.number, [5, 10, null]),
createField('Time', FieldType.time, [1000, 1000, 500]),
createField('Temp', FieldType.number, [1, -1, 2]),
createField('Humidity', FieldType.number, [10, null, 5]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine one serie and two tables with ISO dates into one table', () => {
@@ -192,13 +188,12 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [tableA, seriesB, tableC]);
const expected: Field[] = [
createField('Time', FieldType.time, ['2019-09-01T11:10:23Z', '2019-10-01T11:10:23Z', '2019-11-01T11:10:23Z']),
createField('Metric', FieldType.string, ['B', 'A', 'C']),
createField('Temp', FieldType.number, [-1, 1, 2]),
createField('Humidity', FieldType.number, [null, 10, 5]),
createField('Time', FieldType.time, ['2019-11-01T11:10:23Z', '2019-10-01T11:10:23Z', '2019-09-01T11:10:23Z']),
createField('Temp', FieldType.number, [2, 1, -1]),
createField('Humidity', FieldType.number, [5, 10, null]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine three tables with multiple values into one', () => {
@@ -235,14 +230,15 @@ describe('Merge multipe to single', () => {
});
const result = transformDataFrame([cfg], [tableA, tableB, tableC]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 100, 124, 125, 126, 149, 150, 200]),
createField('Temp', FieldType.number, [1, -1, 1, 4, 2, 3, 5, 4, 5]),
createField('Humidity', FieldType.number, [10, null, 22, 25, null, null, 30, 14, 55]),
createField('Enabled', FieldType.boolean, [null, true, null, null, false, true, null, null, null]),
createField('Time', FieldType.time, [200, 150, 149, 126, 125, 124, 100, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 5, 3, 2, 4, 1, -1, 1]),
createField('Humidity', FieldType.number, [55, 14, 30, null, null, 25, 10, null, 22]),
createField('Enabled', FieldType.boolean, [null, null, null, true, false, null, null, true, null]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two time series, where first serie fields has displayName, into one', () => {
@@ -269,13 +265,14 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(result[0].fields[2].config).toEqual({});
expect(result[0].fields).toMatchObject(expected);
const fields = unwrap(result[0].fields);
expect(fields[1].config).toEqual({});
expect(fields).toEqual(expected);
});
it('combine two time series, where first serie fields has units, into one', () => {
@@ -302,13 +299,14 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5], { units: 'celsius' }),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1], { units: 'celsius' }),
];
expect(result[0].fields[2].config).toEqual({ units: 'celsius' });
expect(result[0].fields).toMatchObject(expected);
const fields = unwrap(result[0].fields);
expect(fields[1].config).toEqual({ units: 'celsius' });
expect(fields).toEqual(expected);
});
it('combine two time series, where second serie fields has units, into one', () => {
@@ -335,16 +333,28 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(result[0].fields[2].config).toEqual({});
expect(result[0].fields).toMatchObject(expected);
const fields = unwrap(result[0].fields);
expect(fields[1].config).toEqual({});
expect(fields).toEqual(expected);
});
});
const createField = (name: string, type: FieldType, values: any[], config = {}): Field => {
return { name, type, values: new ArrayVector(values), config, labels: undefined };
};
const unwrap = (fields: Field[]): Field[] => {
return fields.map(field =>
createField(
field.name,
field.type,
field.values.toArray().map((value: any) => value),
field.config
)
);
};

View File

@@ -0,0 +1,216 @@
import { DataTransformerID } from './ids';
import { DataTransformerInfo } from '../../types/transformations';
import { DataFrame, Field, FieldType } from '../../types/dataFrame';
import { omit } from 'lodash';
import { ArrayVector } from '../../vector/ArrayVector';
import { MutableDataFrame, sortDataFrame } from '../../dataframe';
type MergeDetailsKeyFactory = (existing: Record<string, any>, value: Record<string, any>) => string;
export interface MergeTransformerOptions {}
export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
id: DataTransformerID.merge,
name: 'Merge series/tables',
description: 'Merges multiple series/tables into a single serie/table',
defaultOptions: {},
transformer: (options: MergeTransformerOptions) => {
return (data: DataFrame[]) => {
if (!Array.isArray(data) || data.length <= 1) {
return data;
}
const fieldByName = new Set<string>();
const fieldIndexByName: Record<string, Record<number, number>> = {};
const fieldNamesForKey: string[] = [];
const dataFrame = new MutableDataFrame();
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
const field = frame.fields[fieldIndex];
if (!fieldByName.has(field.name)) {
dataFrame.addField(copyFieldStructure(field));
fieldByName.add(field.name);
}
fieldIndexByName[field.name] = fieldIndexByName[field.name] || {};
fieldIndexByName[field.name][frameIndex] = fieldIndex;
if (data.length - 1 !== frameIndex) {
continue;
}
if (Object.keys(fieldIndexByName[field.name]).length === data.length) {
fieldNamesForKey.push(field.name);
}
}
}
if (fieldNamesForKey.length === 0) {
return data;
}
const dataFrameIndexByKey: Record<string, number> = {};
const keyFactory = createKeyFactory(data, fieldIndexByName, fieldNamesForKey);
const detailsKeyFactory = createDetailsKeyFactory(fieldByName, fieldNamesForKey);
const valueMapper = createValueMapper(data, fieldByName, fieldIndexByName);
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let valueIndex = 0; valueIndex < frame.length; valueIndex++) {
const key = keyFactory(frameIndex, valueIndex);
const value = valueMapper(frameIndex, valueIndex);
mergeOrAdd(key, value, dataFrame, dataFrameIndexByKey, detailsKeyFactory);
}
}
const timeIndex = dataFrame.fields.findIndex(field => field.type === FieldType.time);
if (typeof timeIndex === 'number') {
return [sortDataFrame(dataFrame, timeIndex, true)];
}
return [dataFrame];
};
},
};
const copyFieldStructure = (field: Field): Field => {
return {
...omit(field, ['values', 'state', 'labels', 'config']),
values: new ArrayVector(),
config: {
...omit(field.config, 'displayName'),
},
};
};
const createKeyFactory = (
data: DataFrame[],
fieldPointerByName: Record<string, Record<string, number>>,
keyFieldNames: string[]
) => {
const factoryIndex = keyFieldNames.reduce((index: Record<string, number[]>, fieldName) => {
return Object.keys(fieldPointerByName[fieldName]).reduce((index: Record<string, number[]>, frameIndex) => {
index[frameIndex] = index[frameIndex] || [];
index[frameIndex].push(fieldPointerByName[fieldName][frameIndex]);
return index;
}, index);
}, {});
return (frameIndex: number, valueIndex: number): string => {
return factoryIndex[frameIndex].reduce((key: string, fieldIndex: number) => {
return key + data[frameIndex].fields[fieldIndex].values.get(valueIndex);
}, '');
};
};
const createDetailsKeyFactory = (fieldByName: Set<string>, fieldNamesForKey: string[]): MergeDetailsKeyFactory => {
const fieldNamesToExclude = fieldNamesForKey.reduce((exclude: Record<string, boolean>, fieldName: string) => {
exclude[fieldName] = true;
return exclude;
}, {});
const checkOrder = Array.from(fieldByName).filter(fieldName => !fieldNamesToExclude[fieldName]);
return (existing: Record<string, any>, value: Record<string, any>) => {
return checkOrder.reduce((key: string, fieldName: string) => {
if (typeof existing[fieldName] === 'undefined') {
return key;
}
if (typeof value[fieldName] === 'undefined') {
return key;
}
if (existing[fieldName] === value[fieldName]) {
return key;
}
return key + value[fieldName];
}, '');
};
};
const createValueMapper = (
data: DataFrame[],
fieldByName: Set<string>,
fieldIndexByName: Record<string, Record<number, number>>
) => {
return (frameIndex: number, valueIndex: number) => {
const value: Record<string, any> = {};
const fieldNames = Array.from(fieldByName);
for (const fieldName of fieldNames) {
const fieldIndexByFrameIndex = fieldIndexByName[fieldName];
if (!fieldIndexByFrameIndex) {
continue;
}
const fieldIndex = fieldIndexByFrameIndex[frameIndex];
if (typeof fieldIndex !== 'number') {
continue;
}
const frame = data[frameIndex];
if (!frame || !frame.fields) {
continue;
}
const field = frame.fields[fieldIndex];
if (!field || !field.values) {
continue;
}
value[fieldName] = field.values.get(valueIndex);
}
return value;
};
};
const isMergable = (existing: Record<string, any>, value: Record<string, any>): boolean => {
let mergable = true;
for (const prop in value) {
if (typeof existing[prop] === 'undefined') {
continue;
}
if (existing[prop] === null) {
continue;
}
if (existing[prop] !== value[prop]) {
mergable = false;
break;
}
}
return mergable;
};
const mergeOrAdd = (
key: string,
value: Record<string, any>,
dataFrame: MutableDataFrame,
dataFrameIndexByKey: Record<string, number>,
detailsKeyFactory: MergeDetailsKeyFactory
) => {
if (typeof dataFrameIndexByKey[key] === 'undefined') {
dataFrame.add(value);
dataFrameIndexByKey[key] = dataFrame.length - 1;
return;
}
const dataFrameIndex = dataFrameIndexByKey[key];
const existing = dataFrame.get(dataFrameIndex);
if (isMergable(existing, value)) {
const merged = { ...existing, ...value };
dataFrame.set(dataFrameIndex, merged);
return;
}
const nextKey = key + detailsKeyFactory(existing, value);
mergeOrAdd(nextKey, value, dataFrame, dataFrameIndexByKey, detailsKeyFactory);
};

View File

@@ -1,135 +0,0 @@
import { MutableDataFrame } from '../../../dataframe';
import {
DataFrame,
FieldType,
Field,
TIME_SERIES_TIME_FIELD_NAME,
TIME_SERIES_VALUE_FIELD_NAME,
} from '../../../types/dataFrame';
import { ArrayVector } from '../../../vector';
import { omit } from 'lodash';
import { getFrameDisplayName } from '../../../field';
interface DataFrameBuilderResult {
dataFrame: MutableDataFrame;
valueMapper: ValueMapper;
}
type ValueMapper = (frame: DataFrame, valueIndex: number, timeIndex: number) => Record<string, any>;
const TIME_SERIES_METRIC_FIELD_NAME = 'Metric';
export class DataFrameBuilder {
private isOnlyTimeSeries: boolean;
private displayMetricField: boolean;
private valueFields: Record<string, Field>;
private timeField: Field | null;
constructor() {
this.isOnlyTimeSeries = true;
this.displayMetricField = false;
this.valueFields = {};
this.timeField = null;
}
addFields(frame: DataFrame, timeIndex: number): void {
if (frame.fields.length > 2) {
this.isOnlyTimeSeries = false;
}
if (frame.fields.length === 2) {
this.displayMetricField = true;
}
for (let index = 0; index < frame.fields.length; index++) {
const field = frame.fields[index];
if (index === timeIndex) {
if (!this.timeField) {
this.timeField = this.copyStructure(field, TIME_SERIES_TIME_FIELD_NAME);
}
continue;
}
if (!this.valueFields[field.name]) {
this.valueFields[field.name] = this.copyStructure(field, field.name);
}
}
}
build(): DataFrameBuilderResult {
return {
dataFrame: this.createDataFrame(),
valueMapper: this.createValueMapper(),
};
}
private createValueMapper(): ValueMapper {
return (frame: DataFrame, valueIndex: number, timeIndex: number) => {
return frame.fields.reduce((values: Record<string, any>, field, index) => {
const value = field.values.get(valueIndex);
if (index === timeIndex) {
values[TIME_SERIES_TIME_FIELD_NAME] = value;
if (this.displayMetricField) {
values[TIME_SERIES_METRIC_FIELD_NAME] = getFrameDisplayName(frame);
}
return values;
}
if (this.isOnlyTimeSeries) {
values[TIME_SERIES_VALUE_FIELD_NAME] = value;
return values;
}
values[field.name] = value;
return values;
}, {});
};
}
private createDataFrame(): MutableDataFrame {
const dataFrame = new MutableDataFrame();
if (this.timeField) {
dataFrame.addField(this.timeField);
if (this.displayMetricField) {
dataFrame.addField({
name: TIME_SERIES_METRIC_FIELD_NAME,
type: FieldType.string,
});
}
}
const valueFields = Object.values(this.valueFields);
if (this.isOnlyTimeSeries) {
if (valueFields.length > 0) {
dataFrame.addField({
...valueFields[0],
name: TIME_SERIES_VALUE_FIELD_NAME,
});
}
return dataFrame;
}
for (const field of valueFields) {
dataFrame.addField(field);
}
return dataFrame;
}
private copyStructure(field: Field, name: string): Field {
return {
...omit(field, ['values', 'name', 'state', 'labels', 'config']),
name,
values: new ArrayVector(),
config: {
...omit(field.config, 'displayName'),
},
};
}
}

View File

@@ -1,74 +0,0 @@
import { DataFrame } from '../../../types/dataFrame';
import { timeComparer } from '../../../field/fieldComparers';
import { sortDataFrame } from '../../../dataframe';
import { TimeFieldsByFrame } from './TimeFieldsByFrame';
interface DataFrameStackValue {
valueIndex: number;
timeIndex: number;
frame: DataFrame;
}
export class DataFramesStackedByTime {
private valuesPointerByFrame: Record<number, number>;
private dataFrames: DataFrame[];
private isSorted: boolean;
constructor(private timeFields: TimeFieldsByFrame) {
this.valuesPointerByFrame = {};
this.dataFrames = [];
this.isSorted = false;
}
push(frame: DataFrame): number {
const index = this.dataFrames.length;
this.valuesPointerByFrame[index] = 0;
this.dataFrames.push(frame);
return index;
}
pop(): DataFrameStackValue {
if (!this.isSorted) {
this.sortByTime();
this.isSorted = true;
}
const frameIndex = this.dataFrames.reduce((champion, frame, index) => {
const championTime = this.peekTimeValueForFrame(champion);
const contenderTime = this.peekTimeValueForFrame(index);
return timeComparer(contenderTime, championTime) >= 0 ? champion : index;
}, 0);
const previousPointer = this.movePointerForward(frameIndex);
return {
frame: this.dataFrames[frameIndex],
valueIndex: previousPointer,
timeIndex: this.timeFields.getFieldIndex(frameIndex),
};
}
getLength(): number {
const frames = Object.values(this.dataFrames);
return frames.reduce((length: number, frame) => (length += frame.length), 0);
}
private peekTimeValueForFrame(frameIndex: number): any {
const timeField = this.timeFields.getField(frameIndex);
const valuePointer = this.valuesPointerByFrame[frameIndex];
return timeField.values.get(valuePointer);
}
private movePointerForward(frameIndex: number): number {
const currentPointer = this.valuesPointerByFrame[frameIndex];
this.valuesPointerByFrame[frameIndex] = currentPointer + 1;
return currentPointer;
}
private sortByTime() {
this.dataFrames = this.dataFrames.map((frame, index) => {
const timeFieldIndex = this.timeFields.getFieldIndex(index);
return sortDataFrame(frame, timeFieldIndex);
});
}
}

View File

@@ -1,39 +0,0 @@
import { isNumber } from 'lodash';
import { Field, DataFrame } from '../../../types/dataFrame';
import { getTimeField } from '../../../dataframe';
export class TimeFieldsByFrame {
private timeIndexByFrameIndex: Record<number, number>;
private timeFieldByFrameIndex: Record<number, Field>;
constructor() {
this.timeIndexByFrameIndex = {};
this.timeFieldByFrameIndex = {};
}
add(frameIndex: number, frame: DataFrame): void {
const fieldDescription = getTimeField(frame);
const timeIndex = fieldDescription?.timeIndex;
const timeField = fieldDescription?.timeField;
if (isNumber(timeIndex)) {
this.timeIndexByFrameIndex[frameIndex] = timeIndex;
}
if (timeField) {
this.timeFieldByFrameIndex[frameIndex] = timeField;
}
}
getField(frameIndex: number): Field {
return this.timeFieldByFrameIndex[frameIndex];
}
getFieldIndex(frameIndex: number): number {
return this.timeIndexByFrameIndex[frameIndex];
}
getLength() {
return Object.keys(this.timeIndexByFrameIndex).length;
}
}

View File

@@ -1,47 +0,0 @@
import { DataTransformerID } from '../ids';
import { DataTransformerInfo } from '../../../types/transformations';
import { DataFrame } from '../../../types/dataFrame';
import { DataFrameBuilder } from './DataFrameBuilder';
import { TimeFieldsByFrame } from './TimeFieldsByFrame';
import { DataFramesStackedByTime } from './DataFramesStackedByTime';
export interface MergeTransformerOptions {}
export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
id: DataTransformerID.merge,
name: 'Merge series/tables',
description: 'Merges multiple series/tables by time into a single serie/table',
defaultOptions: {},
transformer: (options: MergeTransformerOptions) => {
return (data: DataFrame[]) => {
if (!Array.isArray(data) || data.length <= 1) {
return data;
}
const timeFields = new TimeFieldsByFrame();
const framesStack = new DataFramesStackedByTime(timeFields);
const dataFrameBuilder = new DataFrameBuilder();
for (const frame of data) {
const frameIndex = framesStack.push(frame);
timeFields.add(frameIndex, frame);
const timeIndex = timeFields.getFieldIndex(frameIndex);
dataFrameBuilder.addFields(frame, timeIndex);
}
if (data.length !== timeFields.getLength()) {
return data;
}
const { dataFrame, valueMapper } = dataFrameBuilder.build();
for (let index = 0; index < framesStack.getLength(); index++) {
const { frame, valueIndex, timeIndex } = framesStack.pop();
dataFrame.add(valueMapper(frame, valueIndex, timeIndex));
}
return [dataFrame];
};
},
};

View File

@@ -0,0 +1,237 @@
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
import { DataTransformerConfig, Field, FieldType } from '../../types';
import { DataTransformerID } from './ids';
import { toDataFrame } from '../../dataframe';
import { transformDataFrame } from '../transformDataFrame';
import { ArrayVector } from '../../vector';
import { seriesToRowsTransformer, SeriesToRowsTransformerOptions } from './seriesToRows';
describe('Series to rows', () => {
beforeAll(() => {
mockTransformationsRegistry([seriesToRowsTransformer]);
});
it('combine two series into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const seriesA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [1000] },
{ name: 'Temp', type: FieldType.number, values: [1] },
],
});
const seriesB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [2000] },
{ name: 'Temp', type: FieldType.number, values: [-1] },
],
});
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [2000, 1000]),
createField('Metric', FieldType.string, ['B', 'A']),
createField('Value', FieldType.number, [-1, 1]),
];
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two series with multiple values into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const seriesA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5] },
],
});
const seriesB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
],
});
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine three series into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const seriesA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [1000] },
{ name: 'Temp', type: FieldType.number, values: [1] },
],
});
const seriesB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [2000] },
{ name: 'Temp', type: FieldType.number, values: [-1] },
],
});
const seriesC = toDataFrame({
name: 'C',
fields: [
{ name: 'Time', type: FieldType.time, values: [500] },
{ name: 'Temp', type: FieldType.number, values: [2] },
],
});
const result = transformDataFrame([cfg], [seriesA, seriesB, seriesC]);
const expected: Field[] = [
createField('Time', FieldType.time, [2000, 1000, 500]),
createField('Metric', FieldType.string, ['B', 'A', 'C']),
createField('Value', FieldType.number, [-1, 1, 2]),
];
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two time series, where first serie fields has displayName, into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const serieA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200], config: { displayName: 'Random time' } },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5], config: { displayName: 'Temp' } },
],
});
const serieB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
],
});
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
const fields = unwrap(result[0].fields);
expect(fields[2].config).toEqual({});
expect(fields).toEqual(expected);
});
it('combine two time series, where first serie fields has units, into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const serieA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5], config: { units: 'celsius' } },
],
});
const serieB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
],
});
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1], { units: 'celsius' }),
];
const fields = unwrap(result[0].fields);
expect(fields[2].config).toEqual({ units: 'celsius' });
expect(fields).toEqual(expected);
});
it('combine two time series, where second serie fields has units, into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const serieA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5] },
],
});
const serieB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3], config: { units: 'celsius' } },
],
});
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
const fields = unwrap(result[0].fields);
expect(fields[2].config).toEqual({});
expect(fields).toEqual(expected);
});
});
const createField = (name: string, type: FieldType, values: any[], config = {}): Field => {
return { name, type, values: new ArrayVector(values), config, labels: undefined };
};
const unwrap = (fields: Field[]): Field[] => {
return fields.map(field =>
createField(
field.name,
field.type,
field.values.toArray().map((value: any) => value),
field.config
)
);
};

View File

@@ -0,0 +1,97 @@
import { omit } from 'lodash';
import { DataTransformerID } from './ids';
import { DataTransformerInfo } from '../../types/transformations';
import {
DataFrame,
Field,
FieldType,
TIME_SERIES_TIME_FIELD_NAME,
TIME_SERIES_VALUE_FIELD_NAME,
TIME_SERIES_METRIC_FIELD_NAME,
} from '../../types/dataFrame';
import { isTimeSeries } from '../../dataframe/utils';
import { MutableDataFrame, sortDataFrame } from '../../dataframe';
import { ArrayVector } from '../../vector';
import { getFrameDisplayName } from '../../field/fieldState';
export interface SeriesToRowsTransformerOptions {}
export const seriesToRowsTransformer: DataTransformerInfo<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
name: 'Series to rows',
description: 'Combines multiple series into a single serie and appends a column with metric name per value.',
defaultOptions: {},
transformer: (options: SeriesToRowsTransformerOptions) => {
return (data: DataFrame[]) => {
if (!Array.isArray(data) || data.length <= 1) {
return data;
}
if (!isTimeSeries(data)) {
return data;
}
const timeFieldByIndex: Record<number, number> = {};
const targetFields = new Set<string>();
const dataFrame = new MutableDataFrame();
const metricField: Field = {
name: TIME_SERIES_METRIC_FIELD_NAME,
values: new ArrayVector(),
config: {},
type: FieldType.string,
};
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
const field = frame.fields[fieldIndex];
if (field.type === FieldType.time) {
timeFieldByIndex[frameIndex] = fieldIndex;
if (!targetFields.has(TIME_SERIES_TIME_FIELD_NAME)) {
dataFrame.addField(copyFieldStructure(field, TIME_SERIES_TIME_FIELD_NAME));
dataFrame.addField(metricField);
targetFields.add(TIME_SERIES_TIME_FIELD_NAME);
}
continue;
}
if (!targetFields.has(TIME_SERIES_VALUE_FIELD_NAME)) {
dataFrame.addField(copyFieldStructure(field, TIME_SERIES_VALUE_FIELD_NAME));
targetFields.add(TIME_SERIES_VALUE_FIELD_NAME);
}
}
}
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let valueIndex = 0; valueIndex < frame.length; valueIndex++) {
const timeFieldIndex = timeFieldByIndex[frameIndex];
const valueFieldIndex = timeFieldIndex === 0 ? 1 : 0;
dataFrame.add({
[TIME_SERIES_TIME_FIELD_NAME]: frame.fields[timeFieldIndex].values.get(valueIndex),
[TIME_SERIES_METRIC_FIELD_NAME]: getFrameDisplayName(frame),
[TIME_SERIES_VALUE_FIELD_NAME]: frame.fields[valueFieldIndex].values.get(valueIndex),
});
}
}
return [sortDataFrame(dataFrame, 0, true)];
};
},
};
const copyFieldStructure = (field: Field, name: string): Field => {
return {
...omit(field, ['values', 'state', 'labels', 'config', 'name']),
name: name,
values: new ArrayVector(),
config: {
...omit(field.config, 'displayName'),
},
};
};

View File

@@ -42,6 +42,10 @@ export class AppPlugin<T = KeyValue> extends GrafanaPlugin<AppPluginMeta<T>> {
/**
* Set the component displayed under:
* /a/${plugin-id}/*
*
* If the NavModel is configured, the page will have a managed frame, otheriwse it has full control.
*
* NOTE: this structure will change in 7.2+ so that it is managed with a normal react router
*/
setRootPage(root: ComponentClass<AppRootProps<T>>, rootNav?: NavModel) {
this.root = root;

View File

@@ -15,7 +15,7 @@ export enum LoadingState {
Error = 'Error',
}
export type PreferredVisualisationType = 'graph' | 'table';
export type PreferredVisualisationType = 'graph' | 'table' | 'logs' | 'trace';
export interface QueryResultMeta {
/** DatasSource Specific Values */
@@ -47,6 +47,7 @@ export interface QueryResultMeta {
searchWords?: string[]; // used by log models and loki
limit?: number; // used by log models and loki
json?: boolean; // used to keep track of old json doc values
instant?: boolean;
}
export interface QueryResultMetaStat extends FieldConfig {

View File

@@ -150,3 +150,4 @@ export interface FieldCalcs extends Record<string, any> {}
export const TIME_SERIES_VALUE_FIELD_NAME = 'Value';
export const TIME_SERIES_TIME_FIELD_NAME = 'Time';
export const TIME_SERIES_METRIC_FIELD_NAME = 'Metric';

View File

@@ -35,6 +35,7 @@ export interface DataLink<T extends DataQuery = any> {
// If dataLink represents internal link this has to be filled. Internal link is defined as a query in a particular
// datas ource that we want to show to the user. Usually this results in a link to explore but can also lead to
// more custom onClick behaviour if needed.
// @internal and subject to change in future releases
internal?: {
query: T;
datasourceUid: string;

View File

@@ -310,7 +310,6 @@ export interface QueryEditorProps<
* Contains query response filtered by refId of QueryResultBase and possible query error
*/
data?: PanelData;
exploreMode?: ExploreMode;
exploreId?: any;
history?: HistoryItem[];
}
@@ -334,13 +333,11 @@ export interface ExploreQueryFieldProps<
history: any[];
onBlur?: () => void;
absoluteRange?: AbsoluteTimeRange;
exploreMode?: ExploreMode;
exploreId?: any;
}
export interface ExploreStartPageProps {
datasource?: DataSourceApi;
exploreMode: ExploreMode;
onClickExample: (query: DataQuery) => void;
exploreId?: any;
}

View File

@@ -1,4 +1,3 @@
import { ExploreMode } from './datasource';
import { RawTimeRange } from './time';
import { LogsDedupStrategy } from './logs';
@@ -6,7 +5,6 @@ import { LogsDedupStrategy } from './logs';
export interface ExploreUrlState {
datasource: string;
queries: any[]; // Should be a DataQuery, but we're going to strip refIds, so typing makes less sense
mode: ExploreMode;
range: RawTimeRange;
ui: ExploreUIState;
originPanelId?: number;

View File

@@ -4,9 +4,8 @@ export enum MappingType {
}
interface BaseMap {
id: number;
operator: string;
text: string;
id: number; // this could/should just be the array index
text: string; // the final display value
type: MappingType;
}

View File

@@ -31,7 +31,7 @@ describe('mapInternalLinkToExplore', () => {
expect.objectContaining({
title: 'testDS',
href:
'/explore?left={"datasource":"testDS","queries":[{"query":"12344"}],"mode":"Metrics","ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
'/explore?left={"datasource":"testDS","queries":[{"query":"12344"}],"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
onClick: undefined,
})
);

View File

@@ -2,7 +2,6 @@ import {
DataLink,
DataQuery,
DataSourceInstanceSettings,
ExploreMode,
Field,
InterpolateFunction,
LinkModel,
@@ -82,7 +81,6 @@ function generateInternalHref<T extends DataQuery = any>(datasourceName: string,
queries: [query],
// This should get overwritten if datasource does not support that mode and we do not know what mode is
// preferred anyway.
mode: ExploreMode.Metrics,
ui: {
showingGraph: true,
showingTable: true,

View File

@@ -139,7 +139,6 @@ export function serializeStateToUrlParam(urlState: ExploreUrlState, compact?: bo
urlState.range.to,
urlState.datasource,
...urlState.queries,
{ mode: urlState.mode },
{
ui: [
!!urlState.ui.showingGraph,

View File

@@ -11,8 +11,8 @@ describe('Format value with value mappings', () => {
it('should return undefined with no matching valuemappings', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
{ id: 0, text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
];
const value = '10';
@@ -21,8 +21,8 @@ describe('Format value with value mappings', () => {
it('should return first matching mapping with lowest id', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'tio', type: MappingType.ValueToText, value: '10' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'tio', type: MappingType.ValueToText, value: '10' },
];
const value = '10';
@@ -31,8 +31,8 @@ describe('Format value with value mappings', () => {
it('should return if value is null and value to text mapping value is null', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: '<NULL>', type: MappingType.ValueToText, value: 'null' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: '<NULL>', type: MappingType.ValueToText, value: 'null' },
];
const value = null;
@@ -41,8 +41,8 @@ describe('Format value with value mappings', () => {
it('should return if value is null and range to text mapping from and to is null', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '<NULL>', type: MappingType.RangeToText, from: 'null', to: 'null' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '<NULL>', type: MappingType.RangeToText, from: 'null', to: 'null' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = null;
@@ -51,8 +51,8 @@ describe('Format value with value mappings', () => {
it('should return rangeToText mapping where value equals to', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-10', type: MappingType.RangeToText, from: '1', to: '10' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '1-10', type: MappingType.RangeToText, from: '1', to: '10' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '10';
@@ -61,8 +61,8 @@ describe('Format value with value mappings', () => {
it('should return rangeToText mapping where value equals from', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '10-20', type: MappingType.RangeToText, from: '10', to: '20' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '10-20', type: MappingType.RangeToText, from: '10', to: '20' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '10';
@@ -71,8 +71,8 @@ describe('Format value with value mappings', () => {
it('should return rangeToText mapping where value is between from and to', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '10';
@@ -81,8 +81,8 @@ describe('Format value with value mappings', () => {
it('should map value text to mapping', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'ELVA', type: MappingType.ValueToText, value: 'elva' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'ELVA', type: MappingType.ValueToText, value: 'elva' },
];
const value = 'elva';

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e-selectors",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Grafana End-to-End Test Selectors Library",
"keywords": [
"cli",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Grafana End-to-End Test Library",
"keywords": [
"cli",
@@ -45,7 +45,7 @@
"types": "src/index.ts",
"dependencies": {
"@cypress/webpack-preprocessor": "4.1.3",
"@grafana/e2e-selectors": "7.1.0-pre.0",
"@grafana/e2e-selectors": "7.1.0-beta.3",
"@grafana/tsconfig": "^1.0.0-rc1",
"@mochajs/json-file-reporter": "^1.2.0",
"blink-diff": "1.0.13",

View File

@@ -68,6 +68,8 @@ export const addPanel = (config?: Partial<AddPanelConfig>): any =>
.click();
closeOptionsGroup('type');
closeOptions();
queriesForm(fullConfig);
e2e().wait('@chartData');
@@ -77,8 +79,6 @@ export const addPanel = (config?: Partial<AddPanelConfig>): any =>
//e2e.components.Panels.Panel.containerByTitle(panelTitle).find('.panel-content').contains('No data');
//e2e.components.QueryEditorRow.actionButton('Disable/enable query').click();
closeOptions();
e2e()
.get('button[title="Apply changes and go back to dashboard"]')
.click();

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/runtime",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Grafana Runtime Library",
"keywords": [
"grafana",
@@ -23,8 +23,8 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@grafana/data": "7.1.0-pre.0",
"@grafana/ui": "7.1.0-pre.0",
"@grafana/data": "7.1.0-beta.3",
"@grafana/ui": "7.1.0-beta.3",
"systemjs": "0.20.19",
"systemjs-plugin-css": "0.1.37"
},
@@ -32,9 +32,9 @@
"@grafana/tsconfig": "^1.0.0-rc1",
"@rollup/plugin-commonjs": "11.0.2",
"@rollup/plugin-node-resolve": "7.1.1",
"@types/jest": "23.3.14",
"@types/rollup-plugin-visualizer": "2.6.0",
"@types/systemjs": "^0.20.6",
"@types/jest": "23.3.14",
"lodash": "4.17.15",
"pretty-format": "25.1.0",
"rollup": "2.0.6",

View File

@@ -129,6 +129,11 @@ export class DataSourceWithBackend<
/**
* Optionally augment the response before returning the results to the
*
* NOTE: this was added in 7.1 for azure, and will be removed in 7.2
* when the entire response pipeline is Observable
*
* @internal
*/
processResponse?(res: DataQueryResponse): Promise<DataQueryResponse>;

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/toolkit",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Grafana Toolkit",
"keywords": [
"grafana",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/ui",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Grafana Components Library",
"keywords": [
"grafana",
@@ -28,8 +28,8 @@
},
"dependencies": {
"@emotion/core": "^10.0.27",
"@grafana/data": "7.1.0-pre.0",
"@grafana/e2e-selectors": "7.1.0-pre.0",
"@grafana/data": "7.1.0-beta.3",
"@grafana/e2e-selectors": "7.1.0-beta.3",
"@grafana/slate-react": "0.22.9-grafana",
"@grafana/tsconfig": "^1.0.0-rc1",
"@iconscout/react-unicons": "^1.0.0",
@@ -47,9 +47,8 @@
"immutable": "3.8.2",
"jquery": "3.5.1",
"lodash": "4.17.15",
"monaco-editor": "0.20.0",
"react-monaco-editor": "0.36.0",
"moment": "2.24.0",
"monaco-editor": "0.20.0",
"papaparse": "4.6.3",
"rc-cascader": "1.0.1",
"rc-drawer": "3.1.3",
@@ -63,6 +62,7 @@
"react-dom": "16.12.0",
"react-highlight-words": "0.16.0",
"react-hook-form": "5.1.3",
"react-monaco-editor": "0.36.0",
"react-popper": "1.3.3",
"react-storybook-addon-props-combinations": "1.1.0",
"react-table": "7.0.0",

View File

@@ -55,6 +55,12 @@ export interface Props extends Themeable {
justifyMode?: BigValueJustifyMode;
alignmentFactors?: DisplayValueAlignmentFactors;
textMode?: BigValueTextMode;
/**
* If part of a series of stat panes, this is the total number.
* Used by BigValueTextMode.Auto text mode.
*/
count?: number;
}
export class BigValue extends PureComponent<Props> {

View File

@@ -463,12 +463,18 @@ export interface BigValueTextValues extends DisplayValue {
}
function getTextValues(props: Props): BigValueTextValues {
const { textMode: nameAndValue, value, alignmentFactors } = props;
const { value, alignmentFactors, count } = props;
let { textMode } = props;
const titleToAlignTo = alignmentFactors ? alignmentFactors.title : value.title;
const valueToAlignTo = formattedValueToString(alignmentFactors ? alignmentFactors : value);
switch (nameAndValue) {
// In the auto case we only show title if this big value is part of more panes (count > 1)
if (textMode === BigValueTextMode.Auto && (count ?? 1) === 1) {
textMode = BigValueTextMode.Value;
}
switch (textMode) {
case BigValueTextMode.Name:
return {
...value,
@@ -498,6 +504,7 @@ function getTextValues(props: Props): BigValueTextValues {
valueToAlignTo: '1',
tooltip: `Name: ${value.title}\nValue: ${formattedValueToString(value)}`,
};
case BigValueTextMode.ValueAndName:
default:
return {
...value,

View File

@@ -2,6 +2,8 @@ import React from 'react';
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
import { FileUpload } from './FileUpload';
import mdx from './FileUpload.mdx';
import { useSize } from '../../utils/storybook/useSize';
import { ComponentSize } from '../../types/size';
export default {
title: 'Forms/FileUpload',
@@ -15,8 +17,10 @@ export default {
};
export const single = () => {
const size = useSize();
return (
<FileUpload
size={size as ComponentSize}
onFileUpload={({ currentTarget }) => console.log('file', currentTarget?.files && currentTarget.files[0])}
/>
);

View File

@@ -3,12 +3,14 @@ import { GrafanaTheme } from '@grafana/data';
import { css, cx } from 'emotion';
import { getFormStyles, Icon } from '../index';
import { stylesFactory, useTheme } from '../../themes';
import { ComponentSize } from '../../types/size';
export interface Props {
onFileUpload: (event: FormEvent<HTMLInputElement>) => void;
/** Accepted file extensions */
accept?: string;
className?: string;
size?: ComponentSize;
}
function trimFileName(fileName: string) {
@@ -24,9 +26,15 @@ function trimFileName(fileName: string) {
return `${file.substring(0, nameLength)}...${extension}`;
}
export const FileUpload: FC<Props> = ({ onFileUpload, className, children = 'Upload file', accept = '*' }) => {
export const FileUpload: FC<Props> = ({
onFileUpload,
className,
children = 'Upload file',
accept = '*',
size = 'md',
}) => {
const theme = useTheme();
const style = getStyles(theme);
const style = getStyles(theme, size);
const [fileName, setFileName] = useState('');
const onChange = useCallback((event: FormEvent<HTMLInputElement>) => {
@@ -60,8 +68,8 @@ export const FileUpload: FC<Props> = ({ onFileUpload, className, children = 'Upl
);
};
const getStyles = stylesFactory((theme: GrafanaTheme) => {
const buttonFormStyle = getFormStyles(theme, { variant: 'primary', invalid: false, size: 'md' }).button.button;
const getStyles = stylesFactory((theme: GrafanaTheme, size: ComponentSize) => {
const buttonFormStyle = getFormStyles(theme, { variant: 'primary', invalid: false, size }).button.button;
return {
fileUpload: css`
display: none;

View File

@@ -34,10 +34,12 @@ export const FormLabel: FunctionComponent<Props> = ({
{tooltip && (
<Tooltip placement="top" content={tooltip} theme={'info'}>
<div className="gf-form-help-icon gf-form-help-icon--right-normal">
<Icon name="info-circle" size="xs" style={{ marginLeft: '10px' }} />
<Icon name="info-circle" size="sm" style={{ marginLeft: '10px' }} />
</div>
</Tooltip>
)}
</label>
);
};
export const InlineFormLabel = FormLabel;

View File

@@ -2,6 +2,7 @@ import React, { PureComponent } from 'react';
import uniqueId from 'lodash/uniqueId';
import { Tooltip } from '../../../Tooltip/Tooltip';
import * as PopperJS from 'popper.js';
import { Icon } from '../../..';
export interface Props {
label: string;
@@ -54,7 +55,7 @@ export class Switch extends PureComponent<Props, State> {
{tooltip && (
<Tooltip placement={tooltipPlacement ? tooltipPlacement : 'auto'} content={tooltip} theme={'info'}>
<div className="gf-form-help-icon gf-form-help-icon--right-normal">
<i className="fa fa-info-circle" />
<Icon name="info-circle" size="sm" style={{ marginLeft: '10px' }} />
</div>
</Tooltip>
)}

View File

@@ -0,0 +1,38 @@
import { findInsertIndex } from './suggestions';
describe('Check suggestion index', () => {
it('find last $ sign', () => {
const line = ' hello $123';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.indexOf('$'));
expect(prefix).toEqual('$123');
});
it('insert into empty line', () => {
const line = '';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(0);
expect(prefix).toEqual('');
});
it('insert new word', () => {
const line = 'this is a new ';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.length);
expect(prefix).toEqual('');
});
it('complte a simple word', () => {
const line = 'SELECT * FROM tab';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.lastIndexOf(' ') + 1);
expect(prefix).toEqual('tab');
});
it('complete a quoted word', () => {
const line = 'SELECT "hello", "wo';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.lastIndexOf('"') + 1);
expect(prefix).toEqual('wo');
});
});

View File

@@ -2,6 +2,33 @@ import * as monaco from 'monaco-editor/esm/vs/editor/editor.api';
import { CodeEditorSuggestionItem, CodeEditorSuggestionItemKind, CodeEditorSuggestionProvider } from './types';
/**
* @internal -- only exported for tests
*/
export function findInsertIndex(line: string): { index: number; prefix: string } {
for (let i = line.length - 1; i > 0; i--) {
const ch = line.charAt(i);
if (ch === '$') {
return {
index: i,
prefix: line.substring(i),
};
}
// Keep these seperators
if (ch === ' ' || ch === '\t' || ch === '"' || ch === "'") {
return {
index: i + 1,
prefix: line.substring(i + 1),
};
}
}
return {
index: 0,
prefix: line,
};
}
function getCompletionItems(
prefix: string,
suggestions: CodeEditorSuggestionItem[],
@@ -53,51 +80,39 @@ export function registerSuggestions(
triggerCharacters: ['$'],
provideCompletionItems: (model, position, context) => {
const range = {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: position.column,
endColumn: position.column,
};
// Simple check if this was triggered by pressing `$`
if (context.triggerCharacter === '$') {
const range = {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: position.column - 1,
endColumn: position.column,
};
range.startColumn = position.column - 1;
return {
suggestions: getCompletionItems('$', getSuggestions(), range),
};
}
// find out if we are completing a property in the 'dependencies' object.
const lineText = model.getValueInRange({
// Find the replacement region
const currentLine = model.getValueInRange({
startLineNumber: position.lineNumber,
startColumn: 1,
endLineNumber: position.lineNumber,
endColumn: position.column,
});
const idx = lineText.lastIndexOf('$');
if (idx >= 0) {
const range = {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: idx, // the last $ we found
endColumn: position.column,
};
return {
suggestions: getCompletionItems(lineText.substr(idx), getSuggestions(), range),
};
const { index, prefix } = findInsertIndex(currentLine);
range.startColumn = index + 1;
const suggestions = getCompletionItems(prefix, getSuggestions(), range);
if (suggestions.length) {
// NOTE, this will replace any language provided suggestions
return { suggestions };
}
// Empty line that asked for suggestion
if (lineText.trim().length < 1) {
return {
suggestions: getCompletionItems('', getSuggestions(), {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: position.column,
endColumn: position.column,
}),
};
}
// console.log('complete?', lineText, context);
// Default language suggestions
return undefined;
},
});

View File

@@ -3,6 +3,7 @@ import { css, cx } from 'emotion';
import { TableCellProps } from './types';
import { Tooltip } from '../Tooltip/Tooltip';
import { JSONFormatter } from '../JSONFormatter/JSONFormatter';
import { isString } from 'lodash';
export const JSONViewCell: FC<TableCellProps> = props => {
const { field, cell, tableStyles } = props;
@@ -16,8 +17,16 @@ export const JSONViewCell: FC<TableCellProps> = props => {
font-family: monospace;
`;
const displayValue = JSON.stringify(cell.value);
const content = <JSONTooltip value={cell.value} />;
let value = cell.value;
let displayValue = value;
if (isString(value)) {
try {
value = JSON.parse(value);
} catch {} // ignore errors
} else {
displayValue = JSON.stringify(value);
}
const content = <JSONTooltip value={value} />;
return (
<div className={cx(txt, tableStyles.tableCell)}>
<Tooltip placement="auto" content={content} theme={'info'}>

View File

@@ -12,8 +12,8 @@ const setup = (spy?: any, propOverrides?: object) => {
}
},
valueMappings: [
{ id: 1, operator: '', type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, operator: '', type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
{ id: 1, type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
],
};
@@ -35,9 +35,7 @@ describe('On remove mapping', () => {
const remove = wrapper.find('button[aria-label="ValueMappingsEditor remove button"]');
remove.at(0).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 2, operator: '', type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
]);
expect(onChangeSpy).toBeCalledWith([{ id: 2, type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' }]);
});
it('should remove mapping at index 1', () => {
@@ -47,9 +45,7 @@ describe('On remove mapping', () => {
const remove = wrapper.find('button[aria-label="ValueMappingsEditor remove button"]');
remove.at(1).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 1, operator: '', type: MappingType.ValueToText, value: '20', text: 'Ok' },
]);
expect(onChangeSpy).toBeCalledWith([{ id: 1, type: MappingType.ValueToText, value: '20', text: 'Ok' }]);
});
});
@@ -62,9 +58,9 @@ describe('Next id to add', () => {
add.at(0).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 1, operator: '', type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, operator: '', type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
{ id: 3, operator: '', type: MappingType.ValueToText, from: '', to: '', text: '' },
{ id: 1, type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
{ id: 3, type: MappingType.ValueToText, from: '', to: '', text: '' },
]);
});
@@ -73,8 +69,6 @@ describe('Next id to add', () => {
const wrapper = setup(onChangeSpy, { valueMappings: [] });
const add = wrapper.find('*[aria-label="ValueMappingsEditor add mapping button"]');
add.at(0).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 0, operator: '', type: MappingType.ValueToText, from: '', to: '', text: '' },
]);
expect(onChangeSpy).toBeCalledWith([{ id: 0, type: MappingType.ValueToText, from: '', to: '', text: '' }]);
});
});

View File

@@ -15,7 +15,6 @@ export const ValueMappingsEditor: React.FC<Props> = ({ valueMappings, onChange,
type: MappingType.ValueToText,
from: '',
to: '',
operator: '',
text: '',
};
const id = update && update.length > 0 ? Math.max(...update.map(v => v.id)) + 1 : 0;

View File

@@ -163,7 +163,7 @@ export { FileUpload } from './FileUpload/FileUpload';
// Legacy forms
// Export this until we've figured out a good approach to inline form styles.
export { FormLabel as InlineFormLabel } from './FormLabel/FormLabel';
export { InlineFormLabel } from './FormLabel/FormLabel';
// Select
import { Select, AsyncSelect } from './Forms/Legacy/Select/Select';

View File

@@ -0,0 +1,27 @@
import React from 'react';
import { config } from '@grafana/runtime';
import { css } from 'emotion';
import { mount } from 'enzyme';
import { useStyles } from './ThemeContext';
describe('useStyles', () => {
it('passes in theme and returns style object', () => {
const Dummy: React.FC = function() {
const styles = useStyles(theme => {
expect(theme).toEqual(config.theme);
return {
someStyle: css`
color: ${theme?.palette.critical};
`,
};
});
expect(typeof styles.someStyle).toBe('string');
return <div>dummy</div>;
};
mount(<Dummy />);
});
});

View File

@@ -38,12 +38,11 @@ export const withTheme = <P extends Themeable, S extends {} = {}>(Component: Rea
export function useTheme(): GrafanaTheme {
return useContext(ThemeContextMock || ThemeContext);
}
/** Hook for using memoized styles with access to the theme. */
export const useStyles = (getStyles: (theme?: GrafanaTheme) => any) => {
const currentTheme = useTheme();
const callback = stylesFactory(stylesTheme => getStyles(stylesTheme));
return callback(currentTheme);
};
export function useStyles<T>(getStyles: (theme: GrafanaTheme) => T) {
return stylesFactory(getStyles)(useTheme());
}
/**
* Enables theme context mocking

View File

@@ -155,6 +155,7 @@ export const getStandardFieldConfigs = () => {
id: 'mappings',
path: 'mappings',
name: 'Value mappings',
description: 'Modify the display text based on input value',
editor: standardEditorsRegistry.get('mappings').editor as any,
override: standardEditorsRegistry.get('mappings').editor as any,

View File

@@ -0,0 +1,7 @@
import { select } from '@storybook/addon-knobs';
import { ComponentSize } from '../../types/size';
export function useSize(size: ComponentSize = 'md') {
const sizes = ['xs', 'sm', 'md', 'lg'];
return select('Size', sizes, size);
}

View File

@@ -1,6 +1,6 @@
{
"name": "@jaegertracing/jaeger-ui-components",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"main": "src/index.ts",
"types": "src/index.ts",
"license": "Apache-2.0",
@@ -14,7 +14,7 @@
"typescript": "3.9.3"
},
"dependencies": {
"@grafana/data": "7.1.0-pre.0",
"@grafana/data": "7.1.0-beta.3",
"@types/classnames": "^2.2.7",
"@types/deep-freeze": "^0.1.1",
"@types/hoist-non-react-statics": "^3.3.1",

View File

@@ -159,6 +159,12 @@ var (
// StatsTotalDataSources is a metric total number of defined datasources, labeled by pluginId
StatsTotalDataSources *prometheus.GaugeVec
// StatsTotalAnnotations is a metric of total number of annotations stored in Grafana.
StatsTotalAnnotations prometheus.Gauge
// StatsTotalDashboardVersions is a metric of total number of dashboard versions stored in Grafana.
StatsTotalDashboardVersions prometheus.Gauge
// grafanaBuildVersion is a metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built
grafanaBuildVersion *prometheus.GaugeVec
@@ -483,6 +489,18 @@ func init() {
Help: "A metric with a constant '1' value labeled by pluginId, pluginType and version from which Grafana plugin was built",
Namespace: ExporterName,
}, []string{"plugin_id", "plugin_type", "version"})
StatsTotalDashboardVersions = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_totals_dashboard_versions",
Help: "total amount of dashboard versions in the database",
Namespace: ExporterName,
})
StatsTotalAnnotations = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_totals_annotations",
Help: "total amount of annotations in the database",
Namespace: ExporterName,
})
}
// SetBuildInformation sets the build information for this binary
@@ -550,6 +568,8 @@ func initMetricVars() {
StatsTotalDataSources,
grafanaBuildVersion,
grafanaPluginBuildInfoDesc,
StatsTotalDashboardVersions,
StatsTotalAnnotations,
)
}

View File

@@ -61,6 +61,8 @@ func (uss *UsageStatsService) sendUsageStats(oauthProviders map[string]bool) {
metrics["stats.snapshots.count"] = statsQuery.Result.Snapshots
metrics["stats.teams.count"] = statsQuery.Result.Teams
metrics["stats.total_auth_token.count"] = statsQuery.Result.AuthTokens
metrics["stats.dashboard_versions.count"] = statsQuery.Result.DashboardVersions
metrics["stats.annotations.count"] = statsQuery.Result.Annotations
metrics["stats.valid_license.count"] = getValidLicenseCount(uss.License.HasValidLicense())
metrics["stats.edition.oss.count"] = getOssEditionCount()
metrics["stats.edition.enterprise.count"] = getEnterpriseEditionCount()
@@ -212,6 +214,8 @@ func (uss *UsageStatsService) updateTotalStats() {
metrics.StatsTotalActiveEditors.Set(float64(statsQuery.Result.ActiveEditors))
metrics.StatsTotalAdmins.Set(float64(statsQuery.Result.Admins))
metrics.StatsTotalActiveAdmins.Set(float64(statsQuery.Result.ActiveAdmins))
metrics.StatsTotalDashboardVersions.Set(float64(statsQuery.Result.DashboardVersions))
metrics.StatsTotalAnnotations.Set(float64(statsQuery.Result.Annotations))
dsStats := models.GetDataSourceStatsQuery{}
if err := uss.Bus.Dispatch(&dsStats); err != nil {

View File

@@ -50,6 +50,8 @@ func TestMetrics(t *testing.T) {
Snapshots: 13,
Teams: 14,
AuthTokens: 15,
DashboardVersions: 16,
Annotations: 17,
}
getSystemStatsQuery = query
return nil
@@ -238,6 +240,8 @@ func TestMetrics(t *testing.T) {
So(metrics.Get("stats.teams.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Teams)
So(metrics.Get("stats.total_auth_token.count").MustInt64(), ShouldEqual, 15)
So(metrics.Get("stats.avg_auth_token_per_user.count").MustInt64(), ShouldEqual, 5)
So(metrics.Get("stats.dashboard_versions.count").MustInt64(), ShouldEqual, 16)
So(metrics.Get("stats.annotations.count").MustInt64(), ShouldEqual, 17)
So(metrics.Get("stats.ds."+models.DS_ES+".count").MustInt(), ShouldEqual, 9)
So(metrics.Get("stats.ds."+models.DS_PROMETHEUS+".count").MustInt(), ShouldEqual, 10)

View File

@@ -2,6 +2,7 @@ package middleware
import (
"net/url"
"strconv"
"strings"
macaron "gopkg.in/macaron.v1"
@@ -87,7 +88,13 @@ func RoleAuth(roles ...models.RoleType) macaron.Handler {
func Auth(options *AuthOptions) macaron.Handler {
return func(c *models.ReqContext) {
forceLogin := c.AllowAnonymous && c.QueryBool("forceLogin")
forceLogin := false
if c.AllowAnonymous {
forceLoginParam, err := strconv.ParseBool(c.Req.URL.Query().Get("forceLogin"))
if err == nil {
forceLogin = forceLoginParam
}
}
requireLogin := !c.AllowAnonymous || forceLogin
if !c.IsSignedIn && options.ReqSignedIn && requireLogin {
notAuthorized(c)

View File

@@ -16,4 +16,6 @@ type Licensing interface {
LicenseURL(user *SignedInUser) string
StateInfo() string
TokenRaw() string
}

View File

@@ -16,6 +16,8 @@ type SystemStats struct {
Folders int64
ProvisionedDashboards int64
AuthTokens int64
DashboardVersions int64
Annotations int64
Admins int
Editors int

View File

@@ -100,7 +100,11 @@ func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error {
}
if m.License.HasLicense() {
hostEnv = append(hostEnv, fmt.Sprintf("GF_ENTERPRISE_LICENSE_PATH=%s", m.Cfg.EnterpriseLicensePath))
hostEnv = append(
hostEnv,
fmt.Sprintf("GF_ENTERPRISE_LICENSE_PATH=%s", m.Cfg.EnterpriseLicensePath),
fmt.Sprintf("GF_ENTERPRISE_LICENSE_TEXT=%s", m.License.TokenRaw()),
)
}
env := pluginSettings.ToEnv("GF_PLUGIN", hostEnv)

View File

@@ -251,6 +251,7 @@ func TestManager(t *testing.T) {
t.Run("Plugin registration scenario when Grafana is licensed", func(t *testing.T) {
ctx.license.edition = "Enterprise"
ctx.license.hasLicense = true
ctx.license.tokenRaw = "testtoken"
ctx.cfg.BuildVersion = "7.0.0"
ctx.cfg.EnterpriseLicensePath = "/license.txt"
@@ -258,8 +259,8 @@ func TestManager(t *testing.T) {
require.NoError(t, err)
t.Run("Should provide expected host environment variables", func(t *testing.T) {
require.Len(t, ctx.env, 3)
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Enterprise", "GF_ENTERPRISE_LICENSE_PATH=/license.txt"}, ctx.env)
require.Len(t, ctx.env, 4)
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Enterprise", "GF_ENTERPRISE_LICENSE_PATH=/license.txt", "GF_ENTERPRISE_LICENSE_TEXT=testtoken"}, ctx.env)
})
})
})
@@ -383,6 +384,7 @@ func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourc
type testLicensingService struct {
edition string
hasLicense bool
tokenRaw string
}
func (t *testLicensingService) HasLicense() bool {
@@ -408,3 +410,7 @@ func (t *testLicensingService) LicenseURL(user *models.SignedInUser) string {
func (t *testLicensingService) HasValidLicense() bool {
return false
}
func (t *testLicensingService) TokenRaw() string {
return t.tokenRaw
}

View File

@@ -3,6 +3,7 @@ package plugins
import (
"net/url"
"path"
"path/filepath"
"strings"
"github.com/grafana/grafana/pkg/setting"
@@ -63,7 +64,9 @@ func (fp *FrontendPluginBase) handleModuleDefaults() {
// Previously there was an assumption that the plugin directory
// should be public/app/plugins/<plugin type>/<plugin id>
// However this can be an issue if the plugin directory should be renamed to something else
currentDir := path.Base(fp.PluginDir)
currentDir := filepath.Base(fp.PluginDir)
// use path package for the following statements
// because these are not file paths
fp.Module = path.Join("app/plugins", fp.Type, currentDir, "module")
fp.BaseUrl = path.Join("public/app/plugins", fp.Type, currentDir)
}

View File

@@ -538,7 +538,6 @@ func (server *Server) requestMemberOf(entry *ldap.Entry) ([]string, error) {
getAttribute(groupIDAttribute, group),
)
}
break
}
}

View File

@@ -56,3 +56,7 @@ func (l *OSSLicensingService) Init() error {
func (*OSSLicensingService) HasValidLicense() bool {
return false
}
func (*OSSLicensingService) TokenRaw() string {
return ""
}

View File

@@ -75,6 +75,8 @@ func GetSystemStats(query *models.GetSystemStatsQuery) error {
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_provisioning") + `) AS provisioned_dashboards,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_snapshot") + `) AS snapshots,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_version") + `) AS dashboard_versions,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("annotation") + `) AS annotations,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("team") + `) AS teams,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("user_auth_token") + `) AS auth_tokens,`)

View File

@@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"path"
"sort"
"strings"
"time"
@@ -193,6 +194,9 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
queryResult.Error = err
return queryResult, nil
}
applyInsightsMetricAlias(frame, query.Alias)
queryResult.Dataframes = tsdb.NewDecodedDataFrames(data.Frames{frame})
return queryResult, nil
}
@@ -250,3 +254,62 @@ func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourc
return pluginRoute, pluginRouteName, nil
}
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
// Alias patterns like {{metric}} are replaced with the appropriate data values.
func formatApplicationInsightsLegendKey(alias string, metricName string, labels data.Labels) string {
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
switch metaPartName {
case "metric":
return []byte(metricName)
case "dimensionname", "groupbyname":
return []byte(keys[0])
case "dimensionvalue", "groupbyvalue":
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
func applyInsightsMetricAlias(frame *data.Frame, alias string) {
if alias == "" {
return
}
for _, field := range frame.Fields {
if field.Type() == data.FieldTypeTime || field.Type() == data.FieldTypeNullableTime {
continue
}
displayName := formatApplicationInsightsLegendKey(alias, field.Name, field.Labels)
if field.Config == nil {
field.Config = &data.FieldConfig{}
}
field.Config.DisplayName = displayName
}
}

View File

@@ -18,6 +18,7 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
name string
testFile string
metric string
alias string
agg string
dimensions []string
expectedFrame func() *data.Frame
@@ -99,6 +100,49 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
}),
)
return frame
},
},
{
name: "segmented series with alias",
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json",
metric: "traces/count",
alias: "{{ metric }}: Country,City: {{ client/countryOrRegion }},{{ client/city }}",
agg: "sum",
dimensions: []string{"client/countryOrRegion", "client/city"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC),
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC),
}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
nil,
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Washington"}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Des Moines"}),
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(11),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,"}),
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(3),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Chicago"}),
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{
nil,
pointer.Float64(1),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: Japan,Tokyo"}),
)
return frame
},
},
@@ -110,6 +154,9 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
frame, err := InsightsMetricsResultToFrame(res, tt.metric, tt.agg, tt.dimensions)
require.NoError(t, err)
applyInsightsMetricAlias(frame, tt.alias)
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}

View File

@@ -338,6 +338,17 @@ func formatAzureMonitorLegendKey(alias string, resourceName string, metricName s
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
@@ -359,23 +370,15 @@ func formatAzureMonitorLegendKey(alias string, resourceName string, metricName s
return []byte(metricName)
}
keys := make([]string, 0, len(labels))
if metaPartName == "dimensionname" || metaPartName == "dimensionvalue" {
for k := range labels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
}
if metaPartName == "dimensionname" {
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
return []byte(labels[keys[0]])
return []byte(lowerLabels[keys[0]])
}
if v, ok := labels[metaPartName]; ok {
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in

View File

@@ -374,7 +374,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
name: "multiple dimension time series response with label alias",
responseFile: "7-azure-monitor-response-multi-dimension.json",
mockQuery: &AzureMonitorQuery{
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{tier}}}",
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}",
UrlComponents: map[string]string{
"resourceName": "grafana",
},

View File

@@ -45,6 +45,14 @@ func (e *CloudWatchExecutor) executeLogActions(ctx context.Context, queryContext
return nil
}
if dataframe.Meta != nil {
dataframe.Meta.PreferredVisualization = "logs"
} else {
dataframe.Meta = &data.FrameMeta{
PreferredVisualization: "logs",
}
}
resultChan <- &tsdb.QueryResult{RefId: query.RefId, Dataframes: tsdb.NewDecodedDataFrames(data.Frames{dataframe})}
return nil
})

View File

@@ -1,6 +1,6 @@
{
"name": "@grafana-plugins/input-datasource",
"version": "7.1.0-pre.0",
"version": "7.1.0-beta.3",
"description": "Input Datasource",
"private": true,
"repository": {
@@ -16,9 +16,9 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"devDependencies": {
"@grafana/data": "7.1.0-pre.0",
"@grafana/toolkit": "7.1.0-pre.0",
"@grafana/ui": "7.1.0-pre.0"
"@grafana/data": "7.1.0-beta.3",
"@grafana/toolkit": "7.1.0-beta.3",
"@grafana/ui": "7.1.0-beta.3"
},
"volta": {
"node": "12.16.2"

View File

@@ -45,7 +45,7 @@ export const ForgottenPassword: FC = () => {
<Legend>Reset password</Legend>
<Field
label="User"
description="Enter your informaton to get a reset link sent to you"
description="Enter your information to get a reset link sent to you"
invalid={!!errors.userOrEmail}
error={errors?.userOrEmail?.message}
>

View File

@@ -49,10 +49,11 @@ export const getLoginStyles = (theme: GrafanaTheme) => {
min-height: 100vh;
background-position: center;
background-repeat: no-repeat;
background-color: ${theme.palette.black};
min-width: 100%;
margin-left: 0;
background-color: $black;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
`,
@@ -76,7 +77,7 @@ export const getLoginStyles = (theme: GrafanaTheme) => {
text-align: center;
`,
mainTitle: css`
font-size: '32px';
font-size: 32px;
`,
subTitle: css`
font-size: ${theme.typography.size.md};

View File

@@ -1,6 +1,6 @@
import React from 'react';
import { DataTransformerID, standardTransformers, TransformerRegistyItem, TransformerUIProps } from '@grafana/data';
import { MergeTransformerOptions } from '@grafana/data/src/transformations/transformers/merge/merge';
import { MergeTransformerOptions } from '@grafana/data/src/transformations/transformers/merge';
export const MergeTransformerEditor: React.FC<TransformerUIProps<MergeTransformerOptions>> = ({
input,
@@ -14,7 +14,7 @@ export const mergeTransformerRegistryItem: TransformerRegistyItem<MergeTransform
id: DataTransformerID.merge,
editor: MergeTransformerEditor,
transformation: standardTransformers.mergeTransformer,
name: 'Merge on time',
description: `Merge series/tables by time and return a single table with values as rows.
Useful for showing multiple time series, tables or a combination of both visualized in a table.`,
name: 'Merge',
description: `Merge many series/tables and return a single table where mergeable values will be combined into the same row.
Useful for showing multiple series, tables or a combination of both visualized in a table.`,
};

View File

@@ -0,0 +1,20 @@
import React from 'react';
import { DataTransformerID, standardTransformers, TransformerRegistyItem, TransformerUIProps } from '@grafana/data';
import { SeriesToRowsTransformerOptions } from '@grafana/data/src/transformations/transformers/seriesToRows';
export const SeriesToRowsTransformerEditor: React.FC<TransformerUIProps<SeriesToRowsTransformerOptions>> = ({
input,
options,
onChange,
}) => {
return null;
};
export const seriesToRowsTransformerRegistryItem: TransformerRegistyItem<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
editor: SeriesToRowsTransformerEditor,
transformation: standardTransformers.seriesToRowsTransformer,
name: 'Series to rows',
description: `Merge many series and return a single series with time, metric and value as columns.
Useful for showing multiple time series visualized in a table.`,
};

View File

@@ -32,7 +32,6 @@ import {
import { getThemeColor } from 'app/core/utils/colors';
import { sortInAscendingOrder, deduplicateLogRowsById } from 'app/core/utils/explore';
import { getGraphSeriesModel } from 'app/plugins/panel/graph2/getGraphSeriesModel';
import { decimalSIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters';
export const LogLevelColor = {
@@ -143,19 +142,23 @@ export function makeSeriesForLogs(sortedRows: LogRowModel[], bucketSize: number,
const fieldCache = new FieldCache(data);
const timeField = fieldCache.getFirstFieldOfType(FieldType.time);
timeField.display = getDisplayProcessor({
field: timeField,
timeZone,
});
if (timeField) {
timeField.display = getDisplayProcessor({
field: timeField,
timeZone,
});
}
const valueField = fieldCache.getFirstFieldOfType(FieldType.number);
valueField.config = {
...valueField.config,
color: series.color,
};
valueField.name = series.alias;
const fieldDisplayProcessor = getDisplayProcessor({ field: valueField, timeZone });
valueField.display = (value: any) => ({ ...fieldDisplayProcessor(value), color: series.color });
if (valueField) {
valueField.config = {
...valueField.config,
color: series.color,
};
valueField.name = series.alias;
const fieldDisplayProcessor = getDisplayProcessor({ field: valueField, timeZone });
valueField.display = (value: any) => ({ ...fieldDisplayProcessor(value), color: series.color });
}
const points = getFlotPairs({
xField: timeField,
@@ -201,35 +204,21 @@ export function dataFrameToLogsModel(
timeZone: TimeZone,
absoluteRange?: AbsoluteTimeRange
): LogsModel {
const { logSeries, metricSeries } = separateLogsAndMetrics(dataFrame);
const { logSeries } = separateLogsAndMetrics(dataFrame);
const logsModel = logSeriesToLogsModel(logSeries);
// unification: Removed logic for using metrics data in LogsModel as with the unification changes this would result
// in the incorrect data being used. Instead logs series are always derived from logs.
if (logsModel) {
if (metricSeries.length === 0) {
// Create histogram metrics from logs using the interval as bucket size for the line count
if (intervalMs && logsModel.rows.length > 0) {
const sortedRows = logsModel.rows.sort(sortInAscendingOrder);
const { visibleRange, bucketSize } = getSeriesProperties(sortedRows, intervalMs, absoluteRange);
logsModel.visibleRange = visibleRange;
logsModel.series = makeSeriesForLogs(sortedRows, bucketSize, timeZone);
} else {
logsModel.series = [];
}
// Create histogram metrics from logs using the interval as bucket size for the line count
if (intervalMs && logsModel.rows.length > 0) {
const sortedRows = logsModel.rows.sort(sortInAscendingOrder);
const { visibleRange, bucketSize } = getSeriesProperties(sortedRows, intervalMs, absoluteRange);
logsModel.visibleRange = visibleRange;
logsModel.series = makeSeriesForLogs(sortedRows, bucketSize, timeZone);
} else {
// We got metrics in the dataFrame so process those
logsModel.series = getGraphSeriesModel(
metricSeries,
timeZone,
{},
{ showBars: true, showLines: false, showPoints: false },
{
asTable: false,
isVisible: true,
placement: 'under',
}
);
logsModel.series = [];
}
return logsModel;
}
@@ -319,10 +308,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
// Find the fields we care about and collect all labels
const allSeries: LogFields[] = logSeries.map(series => {
const fieldCache = new FieldCache(series);
const stringField = fieldCache.hasFieldNamed('line')
? fieldCache.getFieldByName('line')
: fieldCache.getFirstFieldOfType(FieldType.string);
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
if (stringField?.labels) {
allLabels.push(stringField.labels);
}
@@ -434,8 +420,8 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
// Stats are per query, keeping track by refId
const { refId } = series;
if (refId && !queriesVisited[refId]) {
if (totalBytesKey && series.meta.stats) {
const byteStat = series.meta.stats.find(stat => stat.displayName === totalBytesKey);
if (totalBytesKey && series.meta?.stats) {
const byteStat = series.meta?.stats.find(stat => stat.displayName === totalBytesKey);
if (byteStat) {
totalBytes += byteStat.value;
}

View File

@@ -40,4 +40,37 @@ describe('when checking template variables', () => {
expect(findTemplateVarChanges(b, a)).toBeUndefined();
expect(findTemplateVarChanges(a, b)).toBeUndefined();
});
it('then should ignore empty array values', () => {
const a: UrlQueryMap = {
'var-adhoc': [],
};
const b: UrlQueryMap = {};
expect(findTemplateVarChanges(b, a)).toBeUndefined();
expect(findTemplateVarChanges(a, b)).toBeUndefined();
});
it('Should handle array values with one value same as just value', () => {
const a: UrlQueryMap = {
'var-test': ['test'],
};
const b: UrlQueryMap = {
'var-test': 'test',
};
expect(findTemplateVarChanges(b, a)).toBeUndefined();
expect(findTemplateVarChanges(a, b)).toBeUndefined();
});
it('Should detect change in array value and return array with single value', () => {
const a: UrlQueryMap = {
'var-test': ['test'],
};
const b: UrlQueryMap = {
'var-test': 'asd',
};
expect(findTemplateVarChanges(a, b)['var-test']).toEqual(['test']);
});
});

View File

@@ -8,6 +8,7 @@ import { GrafanaRootScope } from 'app/routes/GrafanaCtrl';
import { locationUtil, UrlQueryMap } from '@grafana/data';
import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
import { templateVarsChangedInUrl } from 'app/features/variables/state/actions';
import { isArray, isEqual } from 'lodash';
// Services that handles angular -> redux store sync & other react <-> angular sync
export class BridgeSrv {
@@ -15,6 +16,7 @@ export class BridgeSrv {
private lastQuery: UrlQueryMap = {};
private lastPath = '';
private angularUrl: string;
private lastUrl: string | null = null;
/** @ngInject */
constructor(
@@ -62,6 +64,11 @@ export class BridgeSrv {
const state = store.getState();
const url = state.location.url;
// No url change ignore redux store change
if (url === this.lastUrl) {
return;
}
if (this.angularUrl !== url) {
// store angular url right away as otherwise we end up syncing multiple times
this.angularUrl = url;
@@ -86,11 +93,11 @@ export class BridgeSrv {
dispatch(templateVarsChangedInUrl(changes));
}
}
this.lastQuery = state.location.query;
} else {
this.lastQuery = {};
}
this.lastQuery = state.location.query;
this.lastPath = state.location.path;
this.lastUrl = state.location.url;
});
appEvents.on(CoreEvents.locationChange, payload => {
@@ -108,22 +115,48 @@ export class BridgeSrv {
}
}
function getUrlValueForComparison(value: any): any {
if (isArray(value)) {
if (value.length === 0) {
value = undefined;
} else if (value.length === 1) {
value = value[0];
}
}
return value;
}
export function findTemplateVarChanges(query: UrlQueryMap, old: UrlQueryMap): UrlQueryMap | undefined {
let count = 0;
const changes: UrlQueryMap = {};
for (const key in query) {
if (!key.startsWith('var-')) {
continue;
}
if (query[key] !== old[key]) {
let oldValue = getUrlValueForComparison(old[key]);
let newValue = getUrlValueForComparison(query[key]);
if (!isEqual(newValue, oldValue)) {
changes[key] = query[key];
count++;
}
}
for (const key in old) {
if (!key.startsWith('var-')) {
continue;
}
const value = old[key];
// ignore empty array values
if (isArray(value) && value.length === 0) {
continue;
}
if (!query.hasOwnProperty(key)) {
changes[key] = ''; // removed
count++;

View File

@@ -18,7 +18,6 @@ import store from 'app/core/store';
import {
DataQueryError,
dateTime,
ExploreMode,
LogLevel,
LogRowModel,
LogsDedupStrategy,
@@ -33,7 +32,6 @@ const DEFAULT_EXPLORE_STATE: ExploreUrlState = {
datasource: '',
queries: [],
range: DEFAULT_RANGE,
mode: ExploreMode.Metrics,
ui: {
showingGraph: true,
showingTable: true,
@@ -101,7 +99,6 @@ describe('state functions', () => {
expect(serializeStateToUrlParam(state)).toBe(
'{"datasource":"foo","queries":[{"expr":"metric{test=\\"a/b\\"}"},' +
'{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"},' +
'"mode":"Metrics",' +
'"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true,"dedupStrategy":"none"}}'
);
});
@@ -124,7 +121,7 @@ describe('state functions', () => {
},
};
expect(serializeStateToUrlParam(state, true)).toBe(
'["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"},{"mode":"Metrics"},{"ui":[true,true,true,"none"]}]'
'["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"},{"ui":[true,true,true,"none"]}]'
);
});
});

View File

@@ -10,7 +10,6 @@ import {
DataSourceApi,
dateMath,
DefaultTimeZone,
ExploreMode,
HistoryItem,
IntervalValues,
LogRowModel,
@@ -248,9 +247,6 @@ export function parseUrlState(initial: string | undefined): ExploreUrlState {
const metricProperties = ['expr', 'expression', 'target', 'datasource', 'query'];
const queries = parsedSegments.filter(segment => isSegment(segment, ...metricProperties));
const modeObj = parsedSegments.filter(segment => isSegment(segment, 'mode'))[0];
const mode = modeObj ? modeObj.mode : ExploreMode.Metrics;
const uiState = parsedSegments.filter(segment => isSegment(segment, 'ui'))[0];
const ui = uiState
? {
@@ -262,7 +258,7 @@ export function parseUrlState(initial: string | undefined): ExploreUrlState {
: DEFAULT_UI_STATE;
const originPanelId = parsedSegments.filter(segment => isSegment(segment, 'originPanelId'))[0];
return { datasource, queries, range, ui, mode, originPanelId };
return { datasource, queries, range, ui, originPanelId };
}
export function generateKey(index = 0): string {

View File

@@ -2,15 +2,7 @@
import _ from 'lodash';
// Services & Utils
import {
DataQuery,
DataSourceApi,
ExploreMode,
dateTimeFormat,
AppEvents,
urlUtil,
ExploreUrlState,
} from '@grafana/data';
import { DataQuery, DataSourceApi, dateTimeFormat, AppEvents, urlUtil, ExploreUrlState } from '@grafana/data';
import appEvents from 'app/core/app_events';
import store from 'app/core/store';
import { SortOrder } from './explore';
@@ -187,15 +179,6 @@ export const createUrlFromRichHistory = (query: RichHistoryQuery) => {
range: { from: 'now-1h', to: 'now' },
datasource: query.datasourceName,
queries: query.queries,
/* Default mode is metrics. Exceptions are Loki (logs) and Jaeger (tracing) data sources.
* In the future, we can remove this as we are working on metrics & logs logic.
**/
mode:
query.datasourceId === 'loki'
? ExploreMode.Logs
: query.datasourceId === 'jaeger'
? ExploreMode.Tracing
: ExploreMode.Metrics,
ui: {
showingGraph: true,
showingLogs: true,

View File

@@ -7,6 +7,7 @@ import { seriesToFieldsTransformerRegistryItem } from '../components/Transformer
import { calculateFieldTransformRegistryItem } from '../components/TransformersUI/CalculateFieldTransformerEditor';
import { labelsToFieldsTransformerRegistryItem } from '../components/TransformersUI/LabelsToFieldsTransformerEditor';
import { mergeTransformerRegistryItem } from '../components/TransformersUI/MergeTransformerEditor';
import { seriesToRowsTransformerRegistryItem } from '../components/TransformersUI/SeriesToRowsTransformerEditor';
export const getStandardTransformers = (): Array<TransformerRegistyItem<any>> => {
return [
@@ -15,6 +16,7 @@ export const getStandardTransformers = (): Array<TransformerRegistyItem<any>> =>
filterFramesByRefIdTransformRegistryItem,
organizeFieldsTransformRegistryItem,
seriesToFieldsTransformerRegistryItem,
seriesToRowsTransformerRegistryItem,
calculateFieldTransformRegistryItem,
labelsToFieldsTransformerRegistryItem,
mergeTransformerRegistryItem,

View File

@@ -99,14 +99,20 @@ const renderUser = (user: UserDTO) => {
<img className="filter-table__avatar" src={user.avatarUrl} />
</a>
</td>
<td className="link-td">
<a href={editUrl}>{user.login}</a>
<td className="link-td max-width-10">
<a className="ellipsis" href={editUrl} title={user.login}>
{user.login}
</a>
</td>
<td className="link-td">
<a href={editUrl}>{user.email}</a>
<td className="link-td max-width-10">
<a className="ellipsis" href={editUrl} title={user.email}>
{user.email}
</a>
</td>
<td className="link-td">
<a href={editUrl}>{user.name}</a>
<td className="link-td max-width-10">
<a className="ellipsis" href={editUrl} title={user.name}>
{user.name}
</a>
</td>
<td className="link-td">{user.lastSeenAtAge && <a href={editUrl}>{user.lastSeenAtAge}</a>}</td>
<td className="link-td">

View File

@@ -1,16 +1,14 @@
import $ from 'jquery';
import _ from 'lodash';
import angular, { ILocationService, IScope } from 'angular';
import { selectors } from '@grafana/e2e-selectors';
import { appEvents, contextSrv, coreModule } from 'app/core/core';
import { DashboardModel } from '../../state/DashboardModel';
import { getConfig } from 'app/core/config';
import { backendSrv } from 'app/core/services/backend_srv';
import { DashboardSrv } from '../../services/DashboardSrv';
import { CoreEvents } from 'app/types';
import { GrafanaRootScope } from 'app/routes/GrafanaCtrl';
import { AppEvents, locationUtil, TimeZone } from '@grafana/data';
import { AppEvents, locationUtil, TimeZone, urlUtil } from '@grafana/data';
import { promiseToDigest } from '../../../../core/utils/promiseToDigest';
export class SettingsCtrl {
@@ -121,8 +119,7 @@ export class SettingsCtrl {
const url = this.$location.path();
for (const section of this.sections) {
const sectionParams = _.defaults({ editview: section.id }, params);
section.url = getConfig().appSubUrl + url + '?' + $.param(sectionParams);
section.url = locationUtil.assureBaseUrl(urlUtil.renderUrl(url, { ...params, editview: section.id }));
}
}
@@ -255,7 +252,7 @@ export class SettingsCtrl {
};
onRefreshIntervalChange = (intervals: string[]) => {
this.dashboard.timepicker.refresh_intervals = intervals;
this.dashboard.timepicker.refresh_intervals = intervals.filter(i => i.trim() !== '');
};
onNowDelayChange = (nowDelay: string) => {

View File

@@ -112,7 +112,7 @@ export class TimePickerSettings extends PureComponent<Props, State> {
<div className="gf-form">
<span className="gf-form-label width-7">Auto-refresh</span>
<Input width={60} value={this.getRefreshIntervals()} onChange={this.onRefreshIntervalChange} />
<Input width={60} defaultValue={this.getRefreshIntervals()} onBlur={this.onRefreshIntervalChange} />
</div>
<div className="gf-form">
<span className="gf-form-label width-7">Now delay now-</span>

View File

@@ -87,8 +87,8 @@ export class InspectDataTab extends PureComponent<Props, State> {
const dataFrameCsv = toCSV([dataFrame]);
const blob = new Blob([dataFrameCsv], {
type: 'application/csv;charset=utf-8',
const blob = new Blob([String.fromCharCode(0xfeff), dataFrameCsv], {
type: 'text/csv;charset=utf-8',
});
const transformation = transformId !== DataTransformerID.noop ? '-as-' + transformId.toLocaleLowerCase() : '';
const fileName = `${panel.title}-data${transformation}-${dateTimeFormat(new Date())}.csv`;

View File

@@ -74,16 +74,16 @@ export class InspectJSONTab extends PureComponent<Props, State> {
return { note: 'Missing Response Data' };
}
return this.props.data.series.map(frame => {
const fields = frame.fields.map(field => {
return chain(field)
.omit('values')
.omit('calcs')
.omit('display')
.value();
});
const { table, fields, ...rest } = frame as any; // remove 'table' from arrow response
return {
...frame,
fields,
...rest,
fields: frame.fields.map(field => {
return chain(field)
.omit('values')
.omit('state')
.omit('display')
.value();
}),
};
});
}

View File

@@ -95,7 +95,7 @@ export class PanelHeader extends Component<Props, State> {
return (
<div className="panel-loading" onClick={this.onCancelQuery}>
<Tooltip content="Cancel query">
<Icon className="panel-loading__spinner spin-counter-clock" name="sync" />
<Icon className="panel-loading__spinner spin-clockwise" name="sync" />
</Tooltip>
</div>
);

View File

@@ -79,12 +79,7 @@ export class TimeSrv {
return intervals;
}
const validIntervals = intervals.filter(str => str !== '').filter(this.contextSrv.isAllowedInterval);
if (validIntervals.indexOf(this.contextSrv.minRefreshInterval) === -1) {
validIntervals.unshift(this.contextSrv.minRefreshInterval);
}
return validIntervals;
return intervals.filter(str => str !== '').filter(this.contextSrv.isAllowedInterval);
}
private parseTime() {

View File

@@ -1,23 +1,12 @@
import React from 'react';
import {
DataSourceApi,
LoadingState,
ExploreMode,
toUtc,
DataQueryError,
DataQueryRequest,
CoreApp,
MutableDataFrame,
} from '@grafana/data';
import { DataSourceApi, LoadingState, toUtc, DataQueryError, DataQueryRequest, CoreApp } from '@grafana/data';
import { getFirstNonQueryRowSpecificError } from 'app/core/utils/explore';
import { ExploreId } from 'app/types/explore';
import { shallow } from 'enzyme';
import AutoSizer from 'react-virtualized-auto-sizer';
import { Explore, ExploreProps } from './Explore';
import { scanStopAction } from './state/actionTypes';
import { toggleGraph } from './state/actions';
import { SecondaryActions } from './SecondaryActions';
import { TraceView } from './TraceView/TraceView';
import { getTheme } from '@grafana/ui';
const dummyProps: ExploreProps = {
@@ -64,7 +53,6 @@ const dummyProps: ExploreProps = {
to: 'now',
},
},
mode: ExploreMode.Metrics,
initialUI: {
showingTable: false,
showingGraph: false,
@@ -119,6 +107,10 @@ const dummyProps: ExploreProps = {
originPanelId: 1,
addQueryRow: jest.fn(),
theme: getTheme(),
showMetrics: true,
showLogs: true,
showTable: true,
showTrace: true,
};
const setupErrors = (hasRefId?: boolean) => {
@@ -144,34 +136,6 @@ describe('Explore', () => {
expect(wrapper.find(SecondaryActions).props().addQueryRowButtonHidden).toBe(false);
});
it('does not show add row button if mode is tracing', () => {
const wrapper = shallow(<Explore {...{ ...dummyProps, mode: ExploreMode.Tracing }} />);
expect(wrapper.find(SecondaryActions).props().addQueryRowButtonHidden).toBe(true);
});
it('renders TraceView if tracing mode', () => {
const wrapper = shallow(
<Explore
{...{
...dummyProps,
mode: ExploreMode.Tracing,
queryResponse: {
...dummyProps.queryResponse,
state: LoadingState.Done,
series: [new MutableDataFrame({ fields: [{ name: 'trace', values: [{}] }] })],
},
}}
/>
);
const autoSizer = shallow(
wrapper
.find(AutoSizer)
.props()
.children({ width: 100, height: 100 }) as React.ReactElement
);
expect(autoSizer.find(TraceView).length).toBe(1);
});
it('should filter out a query-row-specific error when looking for non-query-row-specific errors', async () => {
const queryErrors = setupErrors(true);
const queryError = getFirstNonQueryRowSpecificError(queryErrors);

View File

@@ -11,7 +11,6 @@ import {
AbsoluteTimeRange,
DataQuery,
DataSourceApi,
ExploreMode,
GrafanaTheme,
GraphSeriesXY,
LoadingState,
@@ -21,6 +20,7 @@ import {
TimeZone,
ExploreUIState,
ExploreUrlState,
LogsModel,
} from '@grafana/data';
import store from 'app/core/store';
@@ -58,6 +58,7 @@ import { getTimeZone } from '../profile/state/selectors';
import { ErrorContainer } from './ErrorContainer';
import { scanStopAction } from './state/actionTypes';
import { ExploreGraphPanel } from './ExploreGraphPanel';
//TODO:unification
import { TraceView } from './TraceView/TraceView';
import { SecondaryActions } from './SecondaryActions';
import { FILTER_FOR_OPERATOR, FILTER_OUT_OPERATOR, FilterItem } from '@grafana/ui/src/components/Table/types';
@@ -104,12 +105,12 @@ export interface ExploreProps {
initialDatasource: string;
initialQueries: DataQuery[];
initialRange: TimeRange;
mode: ExploreMode;
initialUI: ExploreUIState;
isLive: boolean;
syncedTimes: boolean;
updateTimeRange: typeof updateTimeRange;
graphResult?: GraphSeriesXY[];
logsResult?: LogsModel;
loading?: boolean;
absoluteRange: AbsoluteTimeRange;
showingGraph?: boolean;
@@ -121,6 +122,10 @@ export interface ExploreProps {
originPanelId: number;
addQueryRow: typeof addQueryRow;
theme: GrafanaTheme;
showMetrics: boolean;
showTable: boolean;
showLogs: boolean;
showTrace: boolean;
}
interface ExploreState {
@@ -170,7 +175,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
initialDatasource,
initialQueries,
initialRange,
mode,
initialUI,
originPanelId,
} = this.props;
@@ -183,7 +187,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
initialDatasource,
initialQueries,
initialRange,
mode,
width,
this.exploreEvents,
initialUI,
@@ -301,7 +304,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
exploreId,
split,
queryKeys,
mode,
graphResult,
loading,
absoluteRange,
@@ -312,6 +314,10 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
syncedTimes,
isLive,
theme,
showMetrics,
showTable,
showLogs,
showTrace,
} = this.props;
const { showRichHistory } = this.state;
const exploreClass = split ? 'explore explore-split' : 'explore';
@@ -334,7 +340,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
<SecondaryActions
addQueryRowButtonDisabled={isLive}
// We cannot show multiple traces at the same time right now so we do not show add query button.
addQueryRowButtonHidden={mode === ExploreMode.Tracing}
//TODO:unification
addQueryRowButtonHidden={false}
richHistoryButtonActive={showRichHistory}
onClickAddQueryRowButton={this.onClickAddQueryRowButton}
onClickRichHistoryButton={this.toggleShowRichHistory}
@@ -355,14 +362,13 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
<StartPage
onClickExample={this.onClickExample}
datasource={datasourceInstance}
exploreMode={mode}
exploreId={exploreId}
/>
</div>
)}
{!showStartPage && (
<>
{mode === ExploreMode.Metrics && (
{showMetrics && (
<ExploreGraphPanel
series={graphResult}
width={width}
@@ -379,7 +385,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
showLines={true}
/>
)}
{mode === ExploreMode.Metrics && (
{showTable && (
<TableContainer
width={width}
exploreId={exploreId}
@@ -388,7 +394,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
}
/>
)}
{mode === ExploreMode.Logs && (
{showLogs && (
<LogsContainer
width={width}
exploreId={exploreId}
@@ -399,7 +405,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
onStopScanning={this.onStopScanning}
/>
)}
{mode === ExploreMode.Tracing &&
{/* TODO:unification */}
{showTrace &&
// We expect only one trace at the moment to be in the dataframe
// If there is not data (like 404) we show a separate error so no need to show anything here
queryResponse.series[0] && (
@@ -442,9 +449,12 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps): Partia
urlState,
update,
isLive,
supportedModes,
mode,
graphResult,
logsResult,
showLogs,
showMetrics,
showTable,
showTrace,
loading,
showingGraph,
showingTable,
@@ -452,31 +462,13 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps): Partia
queryResponse,
} = item;
const { datasource, queries, range: urlRange, mode: urlMode, ui, originPanelId } = (urlState ||
{}) as ExploreUrlState;
const { datasource, queries, range: urlRange, ui, originPanelId } = (urlState || {}) as ExploreUrlState;
const initialDatasource = datasource || store.get(lastUsedDatasourceKeyForOrgId(state.user.orgId));
const initialQueries: DataQuery[] = ensureQueriesMemoized(queries);
const initialRange = urlRange
? getTimeRangeFromUrlMemoized(urlRange, timeZone)
: getTimeRange(timeZone, DEFAULT_RANGE);
let newMode: ExploreMode | undefined;
if (supportedModes.length) {
const urlModeIsValid = supportedModes.includes(urlMode);
const modeStateIsValid = supportedModes.includes(mode);
if (modeStateIsValid) {
newMode = mode;
} else if (urlModeIsValid) {
newMode = urlMode;
} else {
newMode = supportedModes[0];
}
} else {
newMode = [ExploreMode.Metrics, ExploreMode.Logs, ExploreMode.Tracing].includes(urlMode) ? urlMode : undefined;
}
const initialUI = ui || DEFAULT_UI_STATE;
return {
@@ -489,10 +481,10 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps): Partia
initialDatasource,
initialQueries,
initialRange,
mode: newMode,
initialUI,
isLive,
graphResult,
graphResult: graphResult ?? undefined,
logsResult: logsResult ?? undefined,
loading,
showingGraph,
showingTable,
@@ -501,6 +493,10 @@ function mapStateToProps(state: StoreState, { exploreId }: ExploreProps): Partia
originPanelId,
syncedTimes,
timeZone,
showLogs,
showMetrics,
showTable,
showTrace,
};
}

View File

@@ -1,69 +0,0 @@
import React from 'react';
import { shallow, ShallowWrapper } from 'enzyme';
import { UnConnectedExploreToolbar } from './ExploreToolbar';
import { ExploreMode } from '@grafana/data';
import { ExploreId } from '../../types';
import { ToggleButtonGroup } from '@grafana/ui';
jest.mock('./state/selectors', () => {
return {
__esModule: true,
getExploreDatasources: () => [] as any,
};
});
describe('ExploreToolbar', () => {
it('displays correct modes', () => {
let wrapper = shallow(createToolbar([ExploreMode.Tracing, ExploreMode.Logs]));
checkModes(wrapper, ['Logs', 'Tracing']);
wrapper = shallow(createToolbar([ExploreMode.Logs]));
checkModes(wrapper, []);
wrapper = shallow(createToolbar([ExploreMode.Logs, ExploreMode.Tracing, ExploreMode.Metrics]));
checkModes(wrapper, ['Metrics', 'Logs', 'Tracing']);
});
});
function checkModes(wrapper: ShallowWrapper, modes: string[]) {
expect(
wrapper
.find(ToggleButtonGroup)
.children()
.map(node => node.children().text())
).toEqual(modes);
}
function createToolbar(supportedModes: ExploreMode[]) {
return (
<UnConnectedExploreToolbar
datasourceMissing={false}
loading={false}
range={{} as any}
timeZone={'UTC'}
splitted={false}
syncedTimes={false}
supportedModes={supportedModes}
selectedMode={ExploreMode.Tracing}
hasLiveOption={false}
isLive={false}
isPaused={false}
queries={[]}
containerWidth={0}
changeDatasource={(() => {}) as any}
clearAll={(() => {}) as any}
cancelQueries={(() => {}) as any}
runQueries={(() => {}) as any}
closeSplit={(() => {}) as any}
split={(() => {}) as any}
syncTimes={(() => {}) as any}
changeRefreshInterval={(() => {}) as any}
changeMode={(() => {}) as any}
updateLocation={(() => {}) as any}
setDashboardQueriesToUpdateOnLoad={(() => {}) as any}
exploreId={ExploreId.left}
onChangeTime={(() => {}) as any}
onChangeTimeZone={(() => {}) as any}
/>
);
}

Some files were not shown because too many files have changed in this diff Show More