Compare commits

...

84 Commits

Author SHA1 Message Date
Ryan McKinley
8101355285 Influx: fix flux panic. cherry picks for #26329 (#26331)
* influx fix for 7.1.0

* don't upgrade sdk

* revert go mod changes
2020-07-16 13:04:17 +02:00
Dominik Prokop
226cba97e1 Release v7.1.0 2020-07-16 13:04:17 +02:00
Marcus Efraimsson
7d7f2d9262 Provisioning: Fix bug when provision app plugins using Enterprise edition (#26340)
In OSS provisioning service init after plugin registration, but in
Enterprise it's the opposite order and installed app plugin check
fails. This adjusts service registry init priority to make sure plugins
are registered before provisioning inits.

Which issue(s) this PR fixes:
Fixes #26336

(cherry picked from commit b97d1f4170)
2020-07-16 13:04:17 +02:00
Kyle Brandt
7ac9057f71 Azure: Insights Analytics, fix possible panics and return error body (#26361)
missing return statements can result in panic

(cherry picked from commit c980f37509)
2020-07-16 13:04:17 +02:00
Ivana Huckova
8673bd4974 Update bg color based on theme (#26359)
(cherry picked from commit 21971a4df8)
2020-07-16 13:04:17 +02:00
Sofia Papagiannaki
f870bea288 Cloud monitoring: Add query parameter (#26354)
(cherry picked from commit 9d8ae39108)
2020-07-16 13:04:17 +02:00
Kyle Brandt
1ab8109892 Azure: Handle real type nan/inf values in Log/Insights Analytics (#26342)
Before this, if the user were to divide by 0.0, "Infinity" would be returned in the result and the user would get an error: "unexpected type, expected json.Number but got string". Now these values are properly set as Inf values (and also made sure to handle NaN as well).

(cherry picked from commit 590702c497)
2020-07-16 13:04:17 +02:00
Ivana Huckova
3655752bae Elasticsearch: Create Raw Doc metric to render raw JSON docs in columns in the new table panel (#26233)
* test

* WIP: Create v2 version

* Update tests, remove conosole logs, refactor

* Remove incorrect types

* Update type

* Rename legacy and new metrics

* Update

* Run request when Raw Data tto Raw Document switch

* Fix size updating

* Remove _source field from table results as we are showing each source field as column

* Remove _source just for metrics, not logs

* Revert "Remove _source just for metrics, not logs"

This reverts commit 611b6922f7.

* Revert "Remove _source field from table results as we are showing each source field as column"

This reverts commit 31a9d5f81b.

* Add vis preference for logs

* Update visualisation to logs

* Revert "Revert "Remove _source just for metrics""

This reverts commit a102ab2894.

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
(cherry picked from commit 3fd810417f)
2020-07-16 13:04:17 +02:00
Ivana Huckova
d44da8786f Docs: Add derived field datasourceUid information (#26285)
* Add datasourceUid info

* Revert "Add datasourceUid info"

This reverts commit 84dce1dcfd.

* Add datasourceUid info

* Update docs/sources/features/datasources/loki.md

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>

* Update derived fields info

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit 5b2ff4498e)
2020-07-16 13:04:17 +02:00
kay delaney
2df8fe30da Docs: Add info about logs/metrics unification to What's New docs (#26325)
* Docs: Add info about logs/metrics unification to What's New docs

* Apply suggestions from code review

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit 1de810b26a)
2020-07-16 13:04:17 +02:00
Alex Khomenko
562e811179 Admin: Update org list after delete (#26277)
(cherry picked from commit 208f95a76c)
2020-07-16 13:04:17 +02:00
Dominik Prokop
2747607805 DashboardLinks: do not over-query search endpoint (#26311)
* DashboardLinks: WIP fix for dashboard links issue

* Make the dashboard links update on change(hacky)

* Replace dashboard links with new array when updating/adding dash links

* Update snaps

* Deep clone dashboard links on save

Co-authored-by: Dominik Prokop <dominik.prokop@grafana.com>
(cherry picked from commit 23e93175d1)
2020-07-16 13:04:17 +02:00
Andreas Opferkuch
10a9031b1b ThemeContext: Fix useStyles memoization (#26200)
(cherry picked from commit 72cd9a3222)
2020-07-16 13:04:17 +02:00
Dominik Prokop
a74d03b807 SignIn button - use correct url (#26239)
* SignIn button - use correct url

* Fix SignIn test (#26266)

* Don't use absolute URL

* post review

* Fix snap

Co-authored-by: Sofia Papagiannaki <papagian@users.noreply.github.com>
(cherry picked from commit 187612ca8d)
2020-07-16 13:04:17 +02:00
kay delaney
e621f54fe3 CloudWatch Logs: Fixes grouping of results by numeric field (#26298)
* CloudWatch Logs: Fixes grouping of results by numeric field
Closes #25721

(cherry picked from commit 54ad5b869e)
2020-07-16 13:04:17 +02:00
Zoltán Bedi
05a0019fcb PluginsListPage: More plugins button should open in new window (#26305)
Fixes #24622

(cherry picked from commit 7bbfb57dd1)
2020-07-16 13:04:17 +02:00
Mitsuhiro Tanda
dfb2119afa Implicit import symbol-observable to avoid inconsistent symbol (#26288)
(cherry picked from commit 077aec465c)
2020-07-16 13:04:17 +02:00
Kyle Brandt
e931df8ab9 Docs: Flux support in influxdb data source in 7.1 (#25551)
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit d28b62740b)
2020-07-16 13:04:17 +02:00
Harrison Shoff
1e35109d3e AdminUsers: reset page to zero on query change (#26293)
(cherry picked from commit e63028e8c6)
2020-07-16 13:04:17 +02:00
Peter Holmberg
ee35db1b8b Fix: Make the JSON in JSONCell Tooltip more visible (#26048)
* add white background to tooltip component

* remove faulty background

* new theme for tooltip

* correct colors, remove custom box-shadow

* rename theme and add documentation

* use useStyles for style memo

(cherry picked from commit 7cdbae4ae6)
2020-07-16 13:04:17 +02:00
Zoltán Bedi
236ac5c394 Prometheus: Fix prom links in mixed mode (#26244)
* Prometheus: Fix prom links in mixed mode

* Modify PromLink with code style changes

(cherry picked from commit 0dd2bc8953)
2020-07-16 13:04:17 +02:00
Torkel Ödegaard
81014a717d BarGauge: Fixed width of unfilled region not being correct when value is close to 100% (#26193)
* BarGauge: Fixed width of unfilled region not being correct when value is close to 100%

* removed accidental change

* updated snapshot

(cherry picked from commit b1e48f429f)
2020-07-16 13:04:17 +02:00
Ivana Huckova
fab8bea6f7 Chore: Fix failing master due to strictNullCheck error (#26283)
* Fix typecheck error on master

* Fix typecheck error on master

(cherry picked from commit 95bf064776)
2020-07-16 13:04:17 +02:00
Ivana Huckova
ea469bf48f Jaeger/Zipkin: URL-encode service names and trace ids for API calls (#26115)
* Encode services and id

* Encode URL for Zipkin API calls

* Add test coverage

(cherry picked from commit 73468e1481)
2020-07-16 13:04:17 +02:00
Kamal Galrani
85ca9c933b removes erroneous $ (#26257)
(cherry picked from commit c136f7da39)
2020-07-16 13:04:17 +02:00
Kamal Galrani
051c7a7c8e fixes footer inconsistency (#26255)
(cherry picked from commit f4b6abb05e)
2020-07-16 13:04:17 +02:00
Kyle Brandt
28978db293 backend: use latest go plugin sdk (0.74.0) to sort wide frames (#26207)
This makes it so results are more stable between refreshes of data and fixes sorting issues with Azure Application Insights service as well as the two Azure Analytics services.

fixes #22937

(cherry picked from commit c1ede4fc71)
2020-07-16 13:04:17 +02:00
Ivana Huckova
56a0b90559 Elastic: Fix error "e.buckets[Symbol.iterator] is not a function" when using filter (#26217)
* Add bucket to array if it is not in array

* Fix issue one level above

* Rename variable

* Move array check to processAggregationDocs

(cherry picked from commit 6d8545da63)
2020-07-16 13:04:17 +02:00
Ivana Huckova
461fc94103 Explore/Loki: Escape \ in labels for show context queries (#26116)
* Hot fix, keep the file

* Add comment

* Update public/app/plugins/datasource/loki/datasource.ts

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Remove script.go file

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit 52f12d10c5)
2020-07-16 13:04:17 +02:00
Marcus Efraimsson
ee21c1be81 Docker: Make sure to create default plugin provisioning directory (#26017)
(cherry picked from commit e25c6db7e9)
2020-07-16 13:04:17 +02:00
Ryan McKinley
a3a9ce7f24 AppPlugin: give full control to page layout when navigation is missing (#26247)
(cherry picked from commit 5f8eb93db1)
2020-07-13 14:41:21 +02:00
Ryan McKinley
1e3265d047 Flux: use monaco query editor (#26179)
(cherry picked from commit 33acf4c056)
2020-07-13 14:41:21 +02:00
Alex Khomenko
88270a2bf8 Grafana UI: Make FileUpload button size customizable (#26013)
(cherry picked from commit c3d4e69a32)
2020-07-13 14:41:21 +02:00
Andrej Ocenas
f8bdda7adf Release v7.1.0-beta3 2020-07-13 14:41:21 +02:00
Ryan McKinley
7618fd36ac DataLinks: add internal flag in comments (#26215)
(cherry picked from commit 3d98641a45)
2020-07-13 14:41:21 +02:00
Tobias Skarhed
bca82d8814 Issue Template: @grafana/ui component request (#25981)
* Add first draft of issue template

* Remove feature request line

* Update PR feedback

* Minor tweaks

* Update .github/ISSUE_TEMPLATE/4-grafana_ui_component.md

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>

* Fix punctuation

Co-authored-by: Clarity-89 <homes89@ukr.net>
Co-authored-by: Alex Khomenko <Clarity-89@users.noreply.github.com>
Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit 5a3eb413d9)
2020-07-13 14:41:21 +02:00
Andrej Ocenas
783e5d12c7 Explore: Unification of logs/metrics/traces user interface (#25890)
Removes "Metrics"/"Logs" mode switcher from Explore, allowing for both
metrics and logs queries at the same time.

Co-authored-by: kay delaney <kay@grafana.com>
(cherry picked from commit 64bc85963b)
2020-07-13 14:41:21 +02:00
Peter Holmberg
e39fef1649 Fix: Redirect to correct url after creating a folder (#26160)
(cherry picked from commit 9948e9298f)
2020-07-13 14:41:21 +02:00
kay delaney
2145633e9b Datasource/CloudWatch: More robust handling of different query modes (#25691)
* Datasource/CloudWatch: More robust handling of different query modes
A small refactor which changes how the CloudWatch datasource handles
multiple queries with different query modes. Groundwork for future
Logs/Metrics unification work.

(cherry picked from commit 2ac1bfcc79)
2020-07-13 14:41:21 +02:00
Andrej Ocenas
6503962013 Explore: Run query on splitOpen action (#26161)
(cherry picked from commit 53eb856d20)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
40a33c56c2 Templating: Fix recursive loop of template variable queries when changing ad-hoc-variable (#26191)
* Templating: Fix url sync issue with adhoc variables

* Update packages/grafana-ui/src/components/Segment/SegmentAsync.tsx

* Update packages/grafana-ui/src/components/Segment/SegmentAsync.tsx

Co-authored-by: Dominik Prokop <dominik.prokop@grafana.com>
(cherry picked from commit 0428f27194)
2020-07-13 14:41:21 +02:00
Ivana Huckova
42069bb254 Loki: Improve error message for unescaped \ and add LogQL link to docs (#26136)
* Add custom escaping error message

* Include regex escape error message

* Update docs, add logql link

* Refactor

(cherry picked from commit d3dcb19a5b)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
4684c7971c BarGauge: Fix space bug in single series mode (#26176)
(cherry picked from commit 7b80e300d9)
2020-07-13 14:41:21 +02:00
Kyle Brandt
903eccad20 Docs: Azure Monitor data source changes for 7.1 (#26096)
Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit c6a3afb4b8)
2020-07-13 14:41:21 +02:00
kay delaney
2bc9374a9e Backend: use latest Go plugin SDK (#26162)
(cherry picked from commit fd29b952a6)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
47e162d6b2 Templating: Fixed recursive queries triggered when switching dashboard settings view (#26137)
* Templating: Fixed recursive queries triggered when going into dashboard settings

* Fixed unused import

* use locationUtil

(cherry picked from commit cace879c96)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
b8c5174461 Dashboard: Refresh intervals should not always add the server min refresh interval (#26150)
(cherry picked from commit 8ab5d2ddd9)
2020-07-13 14:41:21 +02:00
Marcus Andersson
eedf6e53de Transform: adding missing "table"-transform and "series to rows"-transform to Grafana v7-transforms. (#26042)
* Fixed so the merge for table values works as it did before.

* wip

* fixed tests.

* merge tests are green.

* removed unused code and simplify the seriesToRows.

* added time series to rows editor.

* using getFrameDisplayName for the metric value.

* updated description of transforms.

* updated docs.

* fixed according to feedback.

* changed from images to markdown tables for the examples.

* forgot to save :P

(cherry picked from commit 17d87071e6)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
5f539230da DataLinks: Fixed interpolation of repeated variables used in data links (#26147)
(cherry picked from commit 89b56782c6)
2020-07-13 14:41:21 +02:00
Ryan McKinley
c14ac88b8d CSV: force UTF-8 encoding for download (#26145)
(cherry picked from commit b26ef1db25)
2020-07-13 14:41:21 +02:00
Dominik Prokop
9417a0c277 Do not break dashboard settings UI when time intervals end with trailing comma (#26126)
(cherry picked from commit 30a682a4da)
2020-07-13 14:41:21 +02:00
Ryan McKinley
4c27708b7b DataSourceWithBackend: add internal comment for new method (#26139)
(cherry picked from commit 2b6833d0da)
2020-07-13 14:41:21 +02:00
Kyle Brandt
763d28ad96 Azure: Restore Insights Metrics alias feature (#26098)
also fix case sensitivity for azure monitor metrics

(cherry picked from commit 9164a35240)
2020-07-13 14:41:21 +02:00
Dominik Prokop
949988219f grafana/ui: Do not rename export for InlineFormLabel (#26118)
(cherry picked from commit eb4391a228)
2020-07-13 14:41:21 +02:00
Arve Knudsen
4aa1d28683 Upgrade build pipeline tool (#26112)
Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit e7e854ea33)
2020-07-13 14:41:21 +02:00
Dominik Prokop
a4846ee4fd Chore: Bump rxjs to latest (#26084)
(cherry picked from commit aa671c863e)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
c2403767a0 StatPanel: Fixes issue with name showing for single series / field results (#26070)
* StatPanel: Fix text mode auto logic

* Removed import

(cherry picked from commit c9f22b72e3)
2020-07-13 14:41:21 +02:00
Peter Holmberg
fce35e7aa9 Fix: Icon and Tooltip on Variables editor (#26086)
(cherry picked from commit 8f1115c6ac)
2020-07-13 14:41:21 +02:00
Marcus Olsson
0d47601175 Docs: Add guidelines for inclusive language (#25533)
* Docs: Add guidelines for inclusive language

* Fix review comments

* Fix review comments

* Update documentation-style-guide.md

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
(cherry picked from commit 0f41ca620b)
2020-07-13 14:41:21 +02:00
Sofia Papagiannaki
28e50ae4fd Auth: Fix POST request failures with anonymous access (#26049)
Macaron context.QueryBool() seems to modify the request context
that causes the POST and PUT requests to fail with:
"http: proxy error: net/http: HTTP/1.x transport connection broken: http: ContentLength=333 with Body length 0"

(cherry picked from commit 44dff6fdd0)
2020-07-13 14:41:21 +02:00
Andreas Opferkuch
fc96444b23 ThemeContext: Make useStyles type-aware (#26056)
PLUS:
Make it more consise
Add unit test

(cherry picked from commit 390c80d7f5)
2020-07-13 14:41:21 +02:00
annegies
9eb16756b1 Remove break from ldap, get all groups from all the group base searches specified (#25825)
Signed-off-by: Annegies van 't Zand <ace.vtzand@gmail.com>
(cherry picked from commit a2737c0896)
2020-07-13 14:41:21 +02:00
Ryan McKinley
cde6a2de68 panel Inspect: improve structure debugging (#26065)
(cherry picked from commit af5dff8a1b)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
111c238df2 TextPanel: Fixed issue with new react text panel (#26061)
(cherry picked from commit 8be735a6ec)
2020-07-13 14:41:21 +02:00
Carl Bergquist
1630f21aee Instrument dashboard versions and annotation count (#26044)
(cherry picked from commit 26852ca788)
2020-07-13 14:41:21 +02:00
Torkel Ödegaard
c2125493a7 InfluxDB: Fixed new group by dropdown now showing (#26031)
(cherry picked from commit 66460ae740)
2020-07-13 14:41:21 +02:00
Ryan McKinley
33eec937dd Table: JSON Cell should try to convert strings to JSON (#26024)
(cherry picked from commit 3acc2a6ac2)
2020-07-13 14:41:21 +02:00
Steven Vachon
073f46e289 @grafana/e2e: close options panel before interacting with the query form (#26036)
... it's logically better, but the real reason is to appease Cypress which was cause a consistent request error for a single plugin (datadog-datasource). An error which could not be reproduced manually.

(cherry picked from commit 634d8d60d6)
2020-07-13 14:41:21 +02:00
Tobias Skarhed
7c1c0bc8c1 AdminUsersTable: Fix width (#26019)
(cherry picked from commit b06d2cf30f)
2020-07-13 14:41:21 +02:00
Josh Soref
a9fc5ff45d Graph panel: Move Stacking and null values before Hover tooltip options (#26037)
(cherry picked from commit dec76b4556)
2020-07-13 14:41:21 +02:00
Ivana Huckova
7d1f0d619d Elastic: Fix displaying of correct log message (#26020)
* Fix default field, remove redundant line field check

* Add comments

(cherry picked from commit 8b46655361)
2020-07-13 14:41:21 +02:00
Ryan McKinley
aa1cdf0a20 grafana/data: do not bundle rxjs (#26039)
(cherry picked from commit b7792de16d)
2020-07-13 14:41:21 +02:00
Alex Khomenko
0b109a1637 Forgot password: Fix styling (#26002)
(cherry picked from commit 9e47114c45)
2020-07-02 13:07:33 +03:00
Arve Knudsen
6a1f05d7ec CircleCI: Upgrade build pipeline tool (#26006)
Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit 3e9e2db384)
2020-07-02 13:07:33 +03:00
Ryan McKinley
1576b16219 Monaco: check suggestions against current word (#25992)
* trigger on current word

* proper index

* test suggestsions

* test suggestsions

* fix test

(cherry picked from commit 085b2f3dbf)
2020-07-02 13:07:33 +03:00
Sofia Papagiannaki
44ba5482f1 Release v7.1.0-beta2 2020-07-02 13:07:33 +03:00
Ryan McKinley
972e07bd2e Panel Loading: spin clockwise, not counter clockwise (#25998)
* spin clockwise

* spin clockwise

(cherry picked from commit 90a5a85eb1)
2020-07-02 13:07:33 +03:00
Sebastian Widmer
22211e5bdd Loki: Allow aliasing Loki queries in dashboard (#25706)
* Loki: Add Legend field to query editor

* Loki: Basic test for legend field

* Loki: Mention legend is only for metric queries

* Loki: Fix absolute timerange never updating

(cherry picked from commit 5789f80e14)
2020-07-02 13:07:33 +03:00
Ryan McKinley
d95c494d9d Value Mappings: remove unused operator property from interface (#25989)
(cherry picked from commit 73e82af4df)
2020-07-02 13:07:33 +03:00
Ivana Huckova
6e3a9d7927 Fix href to datasources for NoDataSourceCallToAction in Explore (#25991)
(cherry picked from commit c9751707c5)
2020-07-02 13:07:33 +03:00
Dan Cech
4a68ba7b23 provide license token directly via plugin environment (#25987)
* provide license token directly via plugin environment

(cherry picked from commit b5ca2381bc)
2020-07-02 13:07:33 +03:00
Sofia Papagiannaki
74ca7121eb Fix build-in plugins failing to load in windows (#25982)
(cherry picked from commit bcaa42fbb3)
2020-07-02 13:07:33 +03:00
Marcus Andersson
4d1ea72426 release 7.1.0-beta1 2020-07-01 12:13:45 +02:00
Marcus Andersson
11385c6cfe bumped version to beta1. 2020-07-01 12:11:50 +02:00
225 changed files with 3577 additions and 1858 deletions

View File

@@ -56,7 +56,7 @@ commands:
- run:
name: "Install Grafana build pipeline tool"
command: |
VERSION=0.4.17
VERSION=0.4.19
curl -fLO https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v${VERSION}/grabpl
chmod +x grabpl
mv grabpl /tmp

View File

@@ -0,0 +1,39 @@
---
name: '@grafana/ui component request'
about: Suggest a component for the @grafana/ui package
labels: 'area/grafana/ui'
---
<!--
By using this template you will make it easier for us to make sure that documentation and implementation stays up to date for every component in @grafana/ui
Thank you!
-->
**Why is this component needed**:
<!-- Explain your use case -->
___
- [ ] Is/could it be used in more than one place in Grafana?
**Where is/could it be used?**:
___
- [ ] Post screenshots possible.
- [ ] It has a single use case.
- [ ] It is/could be used in multiple places.
**Implementation** (Checklist meant for the person implementing the component)
- [ ] Component has a story in Storybook.
- [ ] Props and naming follows [our style guide](https://github.com/grafana/grafana/blob/master/contribute/style-guides/frontend.md).
- [ ] It is extendable (rest props are spread, styles with className work, and so on).
- [ ] Uses [theme for spacing, colors, and so on](https://github.com/grafana/grafana/blob/master/contribute/style-guides/themes.md).
- [ ] Works with both light and dark theme.
**Documentation**
- [ ] Properties are documented.
- [ ] Use cases are described.
- [ ] Code examples for the different use cases.
- [ ] Dos and don'ts.
- [ ] Styling guidelines, specific color usage (if applicable).

View File

@@ -18,6 +18,32 @@ For all items not covered in this guide, refer to the [Microsoft Style Guide](ht
The [codespell](https://github.com/codespell-project/codespell) tool is run for every change to catch common misspellings.
## Inclusive language
This section provides guidelines on how to avoid using charged language in documentation.
### Allowing and blocking
Don't use "whitelist" or "blacklist" when referring to allowing or blocking content or traffic.
* When used as a noun, use "allowlist" or "blocklist".
* When used as a verb, use "allow" or "block"
Example: _To **allow** outgoing traffic, add the IP to the **allowlist**._
### Leader and follower
Don't use "master" or "slave" to describe relationships between nodes or processes.
* Use "leader", "main" or "primary," instead of "master."
* Use "follower" or "secondary," instead of "slave."
### Exceptions
When referring to a configuration or settings used by third-party libraries och technologies outside the Grafana project, prefer the original name to avoid confusion.
For example, use "master" when referring to the default Git branch.
## Grafana-specific style
The following sections provide general guidelines on topics specific to Grafana documentation. Note that for the most part, these are *guidelines*, not rigid rules. If you have questions, ask in the #docs channel of Grafana Slack.
@@ -31,7 +57,7 @@ The following sections provide general guidelines on topics specific to Grafana
* Write in present tense.
- Not: The panel will open.
- Use: The panel opens. Grafana opens the panel.
* Do not use an ampersand (&) as an abbreviation for "and."
* Do not use an ampersand (&) as an abbreviation for "and."
- **Exceptions:** If an ampersand is used in the Grafana UI, then match the UI.
* Avoid using internal slang and jargon in technical documentation.
@@ -156,7 +182,7 @@ One word, not two.
#### open source, open-source
Do not hyphenate when used as an adjective unless the lack of hyphen would cause confusion. For example: _Open source software design is the most open open-source system I can imagine._
Do not hyphenate when used as an adjective unless the lack of hyphen would cause confusion. For example: _Open source software design is the most open open-source system I can imagine._
Do not hyphenate when it is used as a noun. For example: _Open source is the best way to develop software._

View File

@@ -21,9 +21,9 @@ The Azure Monitor data source supports multiple services in the Azure cloud:
- **[Azure Monitor]({{< relref "#querying-the-azure-monitor-service" >}})** is the platform service that provides a single source for monitoring Azure resources.
- **[Application Insights]({{< relref "#querying-the-application-insights-service" >}})** is an extensible Application Performance Management (APM) service for web developers on multiple platforms and can be used to monitor your live web application - it will automatically detect performance anomalies.
- **[Azure Log Analytics]({{< relref "#querying-the-azure-log-analytics-service" >}})** (or Azure Logs) gives you access to log data collected by Azure Monitor.
- **[Application Insights Analytics]({{< relref "#writing-analytics-queries-for-the-application-insights-service" >}})** allows you to query [Application Insights data](https://docs.microsoft.com/en-us/azure/azure-monitor/app/analytics) using the same query language used for Azure Log Analytics.
- **[Application Insights Analytics]({{< relref "#query-the-application-insights-analytics-service" >}})** allows you to query [Application Insights data](https://docs.microsoft.com/en-us/azure/azure-monitor/app/analytics) using the same query language used for Azure Log Analytics.
## Adding the data source
## Add the data source
The data source can access metrics from four different services. You can configure access to the services that you use. It is also possible to use the same credentials for multiple services if that is how you have set it up in Azure AD.
@@ -76,10 +76,13 @@ In the query editor for a panel, after choosing your Azure Monitor data source,
- `Azure Monitor`
- `Application Insights`
- `Azure Log Analytics`
- `Insights Analytics`
The query editor will change depending on which one you pick. Azure Monitor is the default.
The query editor changes depending on which one you pick. Azure Monitor is the default.
## Querying the Azure Monitor service
Starting in Grafana 7.1, Insights Analytics replaced the former edit mode from within Application Insights.
## Query the Azure Monitor service
The Azure Monitor service provides metrics for all the Azure services that you have running. It helps you understand how your applications on Azure are performing and to proactively find issues affecting your applications.
@@ -93,29 +96,34 @@ Examples of metrics that you can get from the service are:
{{< docs-imagebox img="/img/docs/v60/azuremonitor-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Monitor Query Editor" >}}
### Formatting legend keys with aliases for Azure Monitor
As of Grafana 7.1, the query editor allows you to query multiple dimensions for metrics that support them. Metrics that support multiple dimensions are those listed in the [Azure Monitor supported Metrics List](https://docs.microsoft.com/en-us/azure/azure-monitor/platform/metrics-supported) that have one or more values listed in the "Dimension" column for the metric.
### Format legend keys with aliases for Azure Monitor
The default legend formatting for the Azure Monitor API is:
`resourceName{dimensionValue=dimensionName}.metricName`
`metricName{dimensionName=dimensionValue,dimensionTwoName=DimensionTwoValue}`
These can be quite long but this formatting can be changed using aliases. In the Legend Format field, the aliases which are defined below can be combined any way you want.
> **Note:** Before Grafana 7.1, the formatting included the resource name in the default: `resourceName{dimensionName=dimensionValue}.metricName`. As of Grafana 7.1, the resource name has been removed from the default legend.
Azure Monitor Examples:
These can be quite long, but this formatting can be changed by using aliases. In the **Legend Format** field, you can combine the aliases defined below any way you want.
- `dimension: {{dimensionvalue}}`
- `{{resourcegroup}} - {{resourcename}}`
Azure Monitor examples:
- `Blob Type: {{ blobtype }}`
- `{{ resourcegroup }} - {{ resourcename }}`
### Alias patterns for Azure Monitor
- `{{resourcegroup}}` = replaced with the value of the Resource Group
- `{{namespace}}` = replaced with the value of the Namespace (e.g. Microsoft.Compute/virtualMachines)
- `{{resourcename}}` = replaced with the value of the Resource Name
- `{{metric}}` = replaced with metric name (e.g. Percentage CPU)
- `{{dimensionname}}` = replaced with dimension key/label (e.g. blobtype)
- `{{dimensionvalue}}` = replaced with dimension value (e.g. BlockBlob)
- `{{ resourcegroup }}` = replaced with the value of the Resource Group
- `{{ namespace }}` = replaced with the value of the Namespace (e.g. Microsoft.Compute/virtualMachines)
- `{{ resourcename }}` = replaced with the value of the Resource Name
- `{{ metric }}` = replaced with metric name (e.g. Percentage CPU)
- `{{ dimensionname }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with the first dimension's key/label (as sorted by the key/label) (e.g. blobtype)
- `{{ dimensionvalue }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with first dimension's value (as sorted by the key/label) (e.g. BlockBlob)
- `{{ arbitraryDim }}` = *Available in 7.1+* replaced with the value of the corresponding dimension. (e.g. `{{ blobtype }}` becomes BlockBlob)
### Templating with variables for Azure Monitor
### Create template variables for Azure Monitor
Instead of hard-coding things like server, application and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard.
@@ -159,29 +167,31 @@ Grafana alerting is supported for the Azure Monitor service. This is not Azure A
{{< docs-imagebox img="/img/docs/v60/azuremonitor-alerting.png" class="docs-image--no-shadow" caption="Azure Monitor Alerting" >}}
## Querying the Application Insights Service
## Query the Application Insights Service
{{< docs-imagebox img="/img/docs/v60/appinsights-service-query-editor.png" class="docs-image--no-shadow" caption="Application Insights Query Editor" >}}
{{< docs-imagebox img="/img/docs/azuremonitor/insights_metrics_multi-dim.png" class="docs-image--no-shadow" caption="Application Insights Query Editor" >}}
As of Grafana 7.1, you can select more than one group by dimension.
### Formatting legend keys with aliases for Application Insights
The default legend formatting is:
`metric/name{group/by="groupbyvalue"}`
`metricName{dimensionName=dimensionValue,dimensionTwoName=DimensionTwoValue}`
In the Legend Format field, the aliases which are defined below can be combined any way you want.
Application Insights Examples:
Application Insights examples:
- `server: {{groupbyvalue}}`
- `city: {{groupbyvalue}}`
- `{{groupbyname}}: {{groupbyvalue}}`
- `city: {{ client/city }}`
- `{{ metric }} [Location: {{ client/countryOrRegion }}, {{ client/city }}]`
### Alias patterns for Application Insights
- `{{groupbyvalue}}` = replaced with the value of the group by
- `{{groupbyname}}` = replaced with the name/label of the group by
- `{{metric}}` = replaced with metric name (e.g. requests/count)
- `{{ groupbyvalue }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with the first dimension's key/label (as sorted by the key/label)
- `{{ groupbyname }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with first dimension's value (as sorted by the key/label) (e.g. BlockBlob)
- `{{ metric }}` = replaced with metric name (e.g. requests/count)
- `{{ arbitraryDim }}` = *Available in 7.1+* replaced with the value of the corresponding dimension. (e.g. `{{ client/city }}` becomes Chicago)
### Filter expressions for Application Insights
@@ -222,30 +232,55 @@ Grafana alerting is supported for Application Insights. This is not Azure Alerts
## Querying the Azure Log Analytics service
Queries are written in the new [Azure Log Analytics (or KustoDB) Query Language](https://docs.loganalytics.io/index). A Log Analytics Query can be formatted as Time Series data or as Table data.
Queries are written in the new [Azure Log Analytics (or KustoDB) Query Language](https://docs.loganalytics.io/index). A Log Analytics query can be formatted as time series data or as table data.
Time Series queries are for the Graph Panel (and other panels like the Single Stat panel) and must contain a datetime column, a metric name column and a value column. Here is an example query that returns the aggregated count grouped by the Category column and grouped by hour:
If your credentials give you access to multiple subscriptions, then choose the appropriate subscription before entering queries.
```
AzureActivity
### Time series queries
Time series queries are for the Graph panel and other panels like the SingleStat panel. Each query must contain at least a datetime column and a numeric value column. The result must also be sorted in ascending order by the datetime column.
Here is an example query that returns the aggregated count grouped by hour:
```kusto
Perf
| where $__timeFilter(TimeGenerated)
| summarize count() by Category, bin(TimeGenerated, 1h)
| summarize count() by bin(TimeGenerated, 1h)
| order by TimeGenerated asc
```
Table queries are mainly used in the Table panel and row a list of columns and rows. This example query returns rows with the 6 specified columns:
A query can also have one or more non-numeric/non-datetime columns, and those columns are considered dimensions and become labels in the response. For example, a query that returns the aggregated count grouped by hour, Computer, and the CounterName:
```kusto
Perf
| where $__timeFilter(TimeGenerated)
| summarize count() by bin(TimeGenerated, 1h), Computer, CounterName
| order by TimeGenerated asc
```
You can also select additional number value columns (with, or without multiple dimensions). For example, getting a count and average value by hour, Computer, CounterName, and InstanceName:
```kusto
Perf
| where $__timeFilter(TimeGenerated)
| summarize Samples=count(), AvgValue=avg(CounterValue)
by bin(TimeGenerated, $__interval), Computer, CounterName, InstanceName
| order by TimeGenerated asc
```
{{< docs-imagebox img="/img/docs/azuremonitor/logs_multi-value_multi-dim.png" class="docs-image--no-shadow" caption="Azure Logs query with multiple values and multiple dimensions" >}}
### Table queries
Table queries are mainly used in the Table panel and show a list of columns and rows. This example query returns rows with the six specified columns:
```kusto
AzureActivity
| where $__timeFilter()
| project TimeGenerated, ResourceGroup, Category, OperationName, ActivityStatus, Caller
| order by TimeGenerated desc
```
If your credentials give you access to multiple subscriptions then choose the appropriate subscription first.
{{< docs-imagebox img="/img/docs/v60/azureloganalytics-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Log Analytics Query Editor" >}}
### Azure Log Analytics macros
To make writing queries easier there are several Grafana macros that can be used in the where clause of a query:
@@ -304,7 +339,7 @@ Example variable queries:
Example of a time series query using variables:
```
```kusto
Perf
| where ObjectName == "$object" and CounterName == "$metric"
| where TimeGenerated >= $__timeFrom() and TimeGenerated <= $__timeTo()
@@ -331,21 +366,11 @@ If you're not currently logged in to the Azure Portal, then the link opens the l
Grafana alerting is supported for Application Insights. This is not Azure Alerts support. Read more about how alerting in Grafana works in [Alerting rules]({{< relref "../../alerting/alerts-overview.md" >}}).
### Writing analytics queries For the Application Insights service
## Query the Application Insights Analytics service
If you change the service type to "Application Insights", the menu icon to the right adds another option, "Toggle Edit Mode". Once clicked, the query edit mode changes to give you a full text area in which to write log analytics queries. (This is identical to how the InfluxDB data source lets you write raw queries.)
If you change the service type to **Insights Analytics**, then a similar editor to the Log Analytics service is available. This service also uses the Kusto language, so the instructions for querying data are identical to [querying the log analytics service]({{< relref "#querying-the-azure-log-analytics-service" >}}), except that you query Application Insights Analytics data instead.
Once a query is written, the column names are automatically parsed out of the response data. You can then select them in the "X-axis", "Y-axis", and "Split On" dropdown menus, or just type them out.
There are some important caveats to remember:
- You'll want to order your y-axis in the query, eg. `order by timestamp asc`. The graph may come out looking bizarre otherwise. It's better to have Microsoft sort it on their side where it's faster, than to implement this in the plugin.
- If you copy a log analytics query, typically they'll end with a render instruction, like `render barchart`. This is unnecessary, but harmless.
- Currently, four default dashboard variables are supported: `$__timeFilter()`, `$__from`, `$__to`, and `$__interval`. If you're searching in timestamped data, replace the beginning of your where clause to `where $__timeFilter()`. Dashboard changes by time region are handled as you'd expect, as long as you leave the name of the `timestamp` column alone. Likewise, `$__interval` will automatically change based on the dashboard's time region _and_ the width of the chart being displayed. Use it in bins, so `bin(timestamp,$__interval)` changes into something like `bin(timestamp,1s)`. Use `$__from` and `$__to` if you just want the formatted dates to be inserted.
- Templated dashboard variables are not yet supported! They will come in a future version.
{{< docs-imagebox img="/img/docs/azuremonitor/insights_analytics_multi-dim.png" class="docs-image--no-shadow" caption="Azure Application Insights Analytics query with multiple dimensions" >}}
## Configure the data source with provisioning

View File

@@ -1,7 +1,7 @@
+++
title = "Using InfluxDB in Grafana"
description = "Guide for using InfluxDB in Grafana"
keywords = ["grafana", "influxdb", "guide"]
keywords = ["grafana", "influxdb", "guide", "flux"]
type = "docs"
aliases = ["/docs/grafana/latest/datasources/influxdb"]
[menu.docs]
@@ -20,14 +20,18 @@ Grafana ships with a feature-rich data source plugin for InfluxDB. The plugin in
2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`.
3. Click the `+ Add data source` button in the top header.
4. Select *InfluxDB* from the *Type* dropdown.
5. Select *InfluxQL* or *Flux* from the `Query Language` list.
> NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization.
> **Note:** If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization.
### InfluxQL (classic InfluxDB query)
Name | Description
------------ | -------------
*Name* | The data source name. This is how you refer to the data source in panels and queries.
*Default* | Default data source means that it will be pre-selected for new panels.
*Url* | The HTTP protocol, IP address and port of your InfluxDB API (InfluxDB API port is by default 8086)
*URL* | The HTTP protocol, IP address and port of your InfluxDB API (InfluxDB API port is by default 8086)
*Access* | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser.
*Database* | Name of your InfluxDB database
*User* | Name of your database user
@@ -117,6 +121,61 @@ You can switch to raw query mode by clicking hamburger icon and then `Switch edi
You can remove the group by time by clicking on the `time` part and then the `x` icon. You can
change the option `Format As` to `Table` if you want to show raw data in the `Table` panel.
## Flux support
> Starting in v7.1, Grafana can execute Flux queries.
The client supports Flux running on InfluxDB 1.8+. See [1.8 compatibility](https://github.com/influxdata/influxdb-client-go/#influxdb-18-api-compatibility) for more information and connection details.
Name | Description
------------ | -------------
*URL* | The HTTP protocol, IP address and port of your InfluxDB API (InfluxDB 2.0 API port is by default 9999)
*Organization* | The [Influx organization](https://v2.docs.influxdata.com/v2.0/organizations/) that will be used for Flux queries. This is also used to for the `v.organization` query macro
*Token* | The authentication token used for Flux queries. With Influx 2.0, use the [influx authentication token to function](https://v2.docs.influxdata.com/v2.0/security/tokens/create-token/). For influx 1.8, the token is `username:password`
*Default Bucket* | The [Influx bucket](https://v2.docs.influxdata.com/v2.0/organizations/buckets/) that will be used for the `v.defaultBucket` macro in Flux queries
You can use the [Flux query and scripting language](https://www.influxdata.com/products/flux/). Grafana's Flux query editor is a text editor for raw Flux queries with Macro support.
### Supported macros
The macros support copying and pasting from [Chronograph](https://www.influxdata.com/time-series-platform/chronograf/).
Macro example | Description
------------ | -------------
*`v.timeRangeStart`* | Will be replaced by the start of the currently active time selection. For example, *2020-06-11T13:31:00Z*
*`v.timeRangeEnd`* | Will be replaced by the end of the currently active time selection. For example, *2020-06-11T14:31:00Z*
*`v.windowPeriod`* | Will be replaced with an interval string compatible with Flux that corresponds to Grafana's calculated interval based on the time range of the active time selection. For example, *5s*
*`v.defaultBucket`* | Will be replaced with the data source configuration's "Default Bucket" setting
*`v.organization`* | Will be replaced with the data source configuration's "Organization" setting
For example, the following query will be interpolated as the query that follows it, with interval and time period values changing according to active time selection\):
Grafana Flux query:
```flux
from(bucket: v.defaultBucket)
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|> filter(fn: (r) => r["_measurement"] == "cpu" or r["_measurement"] == "swap")
|> filter(fn: (r) => r["_field"] == "usage_system" or r["_field"] == "free")
|> aggregateWindow(every: v.windowPeriod, fn: mean)
|> yield(name: "mean")
```
Interpolated query send to Influx:
```flux
from(bucket: "grafana")
|> range(start: 2020-06-11T13:59:07Z, stop: 2020-06-11T14:59:07Z)
|> filter(fn: (r) => r["_measurement"] == "cpu" or r["_measurement"] == "swap")
|> filter(fn: (r) => r["_field"] == "usage_system" or r["_field"] == "free")
|> aggregateWindow(every: 2s, fn: mean)
|> yield(name: "mean")
```
You can view the interpolated version of a query with the Query Inspector.
## Querying Logs (BETA)
> Only available in Grafana v6.3+.

View File

@@ -47,9 +47,10 @@ You can use this functionality to link to your tracing backend directly from you
{{< docs-imagebox img="/img/docs/v65/loki_derived_fields.png" class="docs-image--no-shadow" caption="Screenshot of the derived fields configuration" >}}
Each derived field consists of:
- **Name:** Shown in the log details as a label.
- **Regex:** A Regex pattern that runs on the log message and captures part of it as the value of the new field. Can only contain a single capture group.
- **URL**: A URL template used to construct a link next to the field value in log details. Use special `${__value.raw}` value in your template to interpolate the real field value into your URL template.
- **Name -** Shown in the log details as a label.
- **Regex -** A Regex pattern that runs on the log message and captures part of it as the value of the new field. Can only contain a single capture group.
- **URL/query -** If the link is external, then enter the full link URL. If the link is internal link, then this input serves as query for the target data source. In both cases, you can interpolate the value from the field with `${__value.raw }` macro.
- **Internal link -** Select if the link is internal or external. In case of internal link, a data source selector allows you to select the target data source. Only tracing data sources are supported.
You can use a debug section to see what your fields extract and how the URL is interpolated. Click **Show example log message** to show the text area where you can enter a log message.
{{< docs-imagebox img="/img/docs/v65/loki_derived_fields_debug.png" class="docs-image--no-shadow" caption="Screenshot of the derived fields debugging" >}}
@@ -59,7 +60,7 @@ The new field with the link shown in log details:
## Querying Logs
Querying and displaying log data from Loki is available via [Explore]({{< relref "../explore" >}}), and with the [logs panel]({{< relref "../../panels/visualizations/logs-panel.md" >}}) in dashboards. Select the Loki data source, and then enter a log query to display your logs.
Querying and displaying log data from Loki is available via [Explore]({{< relref "../explore" >}}), and with the [logs panel]({{< relref "../../panels/visualizations/logs-panel.md" >}}) in dashboards. Select the Loki data source, and then enter a [LogQL](https://github.com/grafana/loki/blob/master/docs/logql.md) query to display your logs.
### Log Queries
@@ -193,6 +194,7 @@ datasources:
derivedFields:
# Field with internal link pointing to data source in Grafana.
# Right now, Grafana supports only Jaeger and Zipkin data sources as link targets.
# datasourceUid value can be anything, but it should be unique across all defined data source uids.
- datasourceUid: my_jaeger_uid
matcherRegex: "traceID=(\\w+)"
name: TraceID

View File

@@ -0,0 +1,101 @@
+++
title = "What's New in Grafana v7.1"
description = "Feature and improvement highlights for Grafana v7.1"
keywords = ["grafana", "new", "documentation", "7.1", "release notes"]
type = "docs"
[menu.docs]
name = "Version 7.1"
identifier = "v7.1"
parent = "whatsnew"
weight = -16
+++
# What's new in Grafana v7.1
This topic includes the release notes for the Grafana v7.1, which is currently in beta. For all details, read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md).
The main highlights are:
- [**Query history search**]({{< relref "#query-history-search" >}})
- [**Provisioning of apps**]({{< relref "#provisioning-of-apps" >}})
- [**Azure Monitor Datasource**]({{< relref "#azure-monitor-datasource" >}})
- [**Influx Datasource**]({{< relref "#influx-datasource" >}})
- [**Deep linking for Google Cloud Monitoring (formerly named Google Stackdriver) datasource**]({{< relref "#deep-linking-for-google-cloud-monitoring-formerly-named-google-stackdriver-datasource" >}})
- [**Transforms**]({{< relref "#transforms" >}})
- [**Stat panel text mode**]({{< relref "#stat-panel-text-mode" >}})
- [**Unification of Explore modes**]({{< relref "#explore-modes-unified" >}})
- [**Grafana Enterprise features**]({{< relref "#grafana-enterprise-features" >}})
- [**Support for HashiCorp Vault**]({{< relref "#support-for-hashicorp-vault" >}})
- [**Internal links for Elastic**]({{< relref "#internal-links-for-elastic" >}})
## Query history search
In Grafana v 7.1 we are introducing search functionality in Query history. You can search across queries and your comments. It is especially useful in combination with a time filter and data source filter. Read more about Query history [here]({{<relref "../features/explore/index.md#query-history" >}}).
{{< docs-imagebox img="/img/docs/v71/query_history_search.gif" max-width="800px" caption="Query history search" >}}
## Provisioning of apps
Grafana v7.1 adds support for provisioning of app plugins. This allows app plugins to be configured and enabled/disabled using configuration files. Read more about provisioning of app plugins [here]({{ < relref "../administration/provisioning.md#plugins" >}}).
## Azure Monitor Datasource
Support for multiple dimensions has been added to all services in the Azure Monitor datasource. This means you can now group by more than one dimension with time series queries. With the Kusto based services, Log Analytics and Application Insights Analytics, you can also select multiple metrics as well as multiple dimensions.
Additionally, the “Raw Edit” mode for Application Insights Analytics has been replaced with a new service in the drop down for the datasource and is called “Insights Analytics”. The new query editor behaves in the same way as Log Analytics.
## Influx Datasource
Support for Flux and Influx v2 has been added.
## Deep linking for Google Cloud Monitoring (formerly named Google Stackdriver) datasource
A new feature in Grafana 7.1 is [deep linking from Grafana panels to the Metrics Explorer in Gooogle Cloud Console]({{<relref "../features/datasources/cloudmonitoring.md#deep-linking-from-grafana-panels-to-the-metrics-explorer-in-google-cloud-console">}}). Click on a time series in the panel to see a context menu with a link to View in Metrics explorer in Google Cloud Console. Clicking that link opens the Metrics explorer in the Monitoring Google Cloud Console and runs the query from the Grafana panel there.
## Internal links for Elastic
You can now create links in Elastic configuration that point to another datasource similar to existing feature in
Loki. This allows you to link traceID from your logs to tracing data source in Grafana.
## Transformations
We have added a new **Merge on time** transform that can combine many time series or table results. Unlike the join transform this combines the result into one table even when the time values does not align / match.
## Stat panel text mode
The [stat panel]({{<relref "../panels/visualizations/stat-panel.md#text-mode" >}}) has a new **Text mode** option to control what text to show.
By default, the Stat panel displays:
- Just the value for a single series or field.
- Both the value and name for multiple series or fields.
You can use the Text mode option to control what text the panel renders. If the value is not important, only name and color is, then change the `Text mode` to **Name**. The value will still be used to determine color and is displayed in a tooltip.
{{< docs-imagebox img="/img/docs/v71/stat-panel-text-modes.png" max-width="1025px" caption="Stat panel" >}}
## Explore modes unified
Grafana 7.1 includes a major change to Explore: it removes the query mode selector.
Many data sources tell Grafana whether a response contains time series data or logs data. Using this information, Explore chooses which visualization to use for that data. This means that you don't need to switch back and forth between Logs and Metrics modes depending on the type of query that you want to make.
## Grafana Enterprise features
General features are included in the Grafana Enterprise edition software.
### Support for HashiCorp Vault
You can now use HashiCorp Vault to get secrets for configuration and provisioning of Grafana Enterprise. Learn more about this feature [here]({{<relref "../enterprise/vault.md">}}).
### Support for monthly in reports
With Grafana Enterprise 7.1 you can configure reports to be generated on a [monthly schedule]({{<relref "../enterprise/reporting.md#scheduling">}}).
## Upgrading
See [upgrade notes]({{<relref "../installation/upgrading.md">}}).
## Changelog
Check out [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) for a complete list of new features, changes, and bug fixes.

View File

@@ -11,7 +11,7 @@ weight = 300
This page explains what transformations in Grafana are and how to use them.
> **Note:** This documentation refers to a Grafana 7.0 beta feature. This documentation will be frequently updated to reflect updates to the feature, and it will probably be broken into smaller sections when the feature moves out of beta.
> **Note:** This documentation refers to a Grafana 7.0 feature. This documentation will be frequently updated to reflect updates to the feature, and it will probably be broken into smaller sections when the feature moves out of beta.
Transformations process the result set before its passed to the visualization. You access transformations in the Transform tab of the Grafana panel editor.
@@ -74,6 +74,7 @@ Grafana comes with the following transformations:
- [Join by field (outer join)](#join-by-field-outer-join)
- [Add field from calculation](#add-field-from-calculation)
- [Labels to fields](#labels-to-fields)
- [Series to rows](#series-to-rows)
- [Debug transformations](#debug-transformations)
Keep reading for detailed descriptions of each type of transformation and the options available for each, as well as suggestions on how to use them.
@@ -96,25 +97,33 @@ After I apply the transformation, there is no time value and each column has bee
### Merge
Use this transformation to combine the result from multiple queries into one single result based on the time field. This is helpful when using the table panel visualization.
> **Note:** This documentation refers to a Grafana 7.1 feature.
In the example below, we are visualizing multiple queries returning table data before applying the transformation.
Use this transformation to combine the result from multiple queries into one single result. This is helpful when using the table panel visualization. Values that can be merged are combined into the same row. Values are mergeable if the shared fields contains the same data.
{{< docs-imagebox img="/img/docs/transformations/table-data-before-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
In the example below, we have two queries returning table data. It is visualized as two separate tables before applying the transformation.
Here is the same example after applying the merge transformation.
Query A:
{{< docs-imagebox img="/img/docs/transformations/table-data-after-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
| Time | Job | Uptime |
|---------------------|---------|-----------|
| 2020-07-07 11:34:20 | node | 25260122 |
| 2020-07-07 11:24:20 | postgre | 123001233 |
If any of the queries return time series data, then a `Metric` column containing the name of the query is added. You can be customized this value by defining `Label` on the source query.
Query B:
In the example below, we are visualizing multiple queries returning time series data before applying the transformation.
| Time | Job | Errors |
|---------------------|---------|--------|
| 2020-07-07 11:34:20 | node | 15 |
| 2020-07-07 11:24:20 | postgre | 5 |
{{< docs-imagebox img="/img/docs/transformations/time-series-before-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
Here is the result after applying the `Merge` transformation.
Here is the same example after applying the merge transformation.
| Time | Job | Errors | Uptime |
|---------------------|---------|--------|-----------|
| 2020-07-07 11:34:20 | node | 15 | 25260122 |
| 2020-07-07 11:24:20 | postgre | 5 | 123001233 |
{{< docs-imagebox img="/img/docs/transformations/time-series-after-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
### Filter by name
@@ -213,6 +222,43 @@ After I apply the transformation, my labels appear in the table as fields.
{{< docs-imagebox img="/img/docs/transformations/labels-to-fields-after-7-0.png" class="docs-image--no-shadow" max-width= "1100px" >}}
## Series to rows
> **Note:** This documentation refers to a Grafana 7.1 feature.
Use this transformation to combine the result from multiple time series data queries into one single result. This is helpful when using the table panel visualization.
The result from this transformation will contain three columns: `Time`, `Metric`, and `Value`. The `Metric` column is added so you easily can see from which query the metric originates from. Customize this value by defining `Label` on the source query.
In the example below, we have two queries returning time series data. It is visualized as two separate tables before applying the transformation.
Query A:
| Time | Temperature |
|---------------------|-------------|
| 2020-07-07 11:34:20 | 25 |
| 2020-07-07 10:31:22 | 22 |
| 2020-07-07 09:30:05 | 19 |
Query B:
| Time | Humidity |
|---------------------|----------|
| 2020-07-07 11:34:20 | 24 |
| 2020-07-07 10:32:20 | 29 |
| 2020-07-07 09:30:57 | 33 |
Here is the result after applying the `Series to rows` transformation.
| Time | Metric | Value |
|---------------------|-------------|-------|
| 2020-07-07 11:34:20 | Temperature | 25 |
| 2020-07-07 11:34:20 | Humidity | 22 |
| 2020-07-07 10:32:20 | Humidity | 29 |
| 2020-07-07 10:31:22 | Temperature | 22 |
| 2020-07-07 09:30:57 | Humidity | 33 |
| 2020-07-07 09:30:05 | Temperature | 19 |
## Debug transformations
To see the input and the output result sets of the transformation, click the bug icon on the right side of the transformation row.

View File

@@ -32,6 +32,17 @@ Use these settings to refine your visualization.
- **Points -** Display points for values.
- **Point radius -** Controls how large the points are.
### Stacking and null value
- **Stack -** Each series is stacked on top of another.
- **Percent -** Available when **Stack** is selected. Each series is drawn as a percentage of the total of all series.
- **Null value -** How null values are displayed. _This is a very important setting._ See note below.
- **connected -** If there is a gap in the series, meaning a null value or values, then the line will skip the gap and connect to the next non-null value.
- **null -** (default) If there is a gap in the series, meaning a null value, then the line in the graph will be broken and show the gap.
- **null as zero -** If there is a gap in the series, meaning a null value, then it will be displayed as a zero value in the graph panel.
> **Note:** If you are monitoring a server's CPU load and the load reaches 100%, then the server will lock up and the agent sending statistics will not be able to collect the load statistic. This leads to a gap in the metrics and having the default as _null_ means Grafana will show the gaps and indicate that something is wrong. If this is set to _connected_, then it would be easy to miss this signal.
### Hover tooltip
Use these settings to change the appearance of the tooltip that appears when you hover your cursor over the graph visualization.
@@ -44,17 +55,6 @@ Use these settings to change the appearance of the tooltip that appears when you
- **Increasing -** The series in the hover tooltip are sorted by value and in increasing order, with the lowest value at the top of the list.
- **Decreasing -** The series in the hover tooltip are sorted by value and in decreasing order, with the highest value at the top of the list.
### Stacking and null value
- **Stack -** Each series is stacked on top of another.
- **Percent -** Available when **Stack** is selected. Each series is drawn as a percentage of the total of all series.
- **Null value -** How null values are displayed. _This is a very important setting._ See note below.
- **connected -** If there is a gap in the series, meaning a null value or values, then the line will skip the gap and connect to the next non-null value.
- **null -** (default) If there is a gap in the series, meaning a null value, then the line in the graph will be broken and show the gap.
- **null as zero -** If there is a gap in the series, meaning a null value, then it will be displayed as a zero value in the graph panel.
> **Note:** If you are monitoring a server's CPU load and the load reaches 100%, then the server will lock up and the agent sending statistics will not be able to collect the load statistic. This leads to a gap in the metrics and having the default as _null_ means Grafana will show the gaps and indicate that something is wrong. If this is set to _connected_, then it would be easy to miss this signal.
## Series overrides
Series overrides allow a series in a graph panel to be rendered differently from the others. You can customize display options on a per-series bases or by using regex rules. For example, one series can have a thicker line width to make it stand out or be moved to the right Y-axis.

2
go.mod
View File

@@ -30,7 +30,7 @@ require (
github.com/gorilla/websocket v1.4.1
github.com/gosimple/slug v1.4.2
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
github.com/grafana/grafana-plugin-sdk-go v0.70.0
github.com/grafana/grafana-plugin-sdk-go v0.75.0
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
github.com/hashicorp/go-plugin v1.2.2
github.com/hashicorp/go-version v1.1.0

4
go.sum
View File

@@ -148,8 +148,8 @@ github.com/gosimple/slug v1.4.2 h1:jDmprx3q/9Lfk4FkGZtvzDQ9Cj9eAmsjzeQGp24PeiQ=
github.com/gosimple/slug v1.4.2/go.mod h1:ER78kgg1Mv0NQGlXiDe57DpCyfbNywXXZ9mIorhxAf0=
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4 h1:SPdxCL9BChFTlyi0Khv64vdCW4TMna8+sxL7+Chx+Ag=
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4/go.mod h1:nc0XxBzjeGcrMltCDw269LoWF9S8ibhgxolCdA1R8To=
github.com/grafana/grafana-plugin-sdk-go v0.70.0 h1:tbwf0KMp8QEQQYF3bDBOOv/npegD6YP8T90OWbLr7n4=
github.com/grafana/grafana-plugin-sdk-go v0.70.0/go.mod h1:NvxLzGkVhnoBKwzkst6CFfpMFKwAdIUZ1q8ssuLeF60=
github.com/grafana/grafana-plugin-sdk-go v0.75.0 h1:b0Ugpn88VNx17Q8MA1MsWkD3ddBEbRvFxzid+Nka3F0=
github.com/grafana/grafana-plugin-sdk-go v0.75.0/go.mod h1:NvxLzGkVhnoBKwzkst6CFfpMFKwAdIUZ1q8ssuLeF60=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=

View File

@@ -2,5 +2,5 @@
"npmClient": "yarn",
"useWorkspaces": true,
"packages": ["packages/*"],
"version": "7.1.0-pre.0"
"version": "7.1.0"
}

View File

@@ -3,7 +3,7 @@
"license": "Apache-2.0",
"private": true,
"name": "grafana",
"version": "7.1.0-pre",
"version": "7.1.0",
"repository": "github:grafana/grafana",
"scripts": {
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
@@ -267,7 +267,7 @@
"regenerator-runtime": "0.13.3",
"reselect": "4.0.0",
"rst2html": "github:thoward/rst2html#990cb89",
"rxjs": "6.5.5",
"rxjs": "6.6.0",
"search-query-parser": "1.5.4",
"slate": "0.47.8",
"slate-plain-serializer": "0.7.10",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/data",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"description": "Grafana Data Library",
"keywords": [
"typescript"
@@ -26,7 +26,7 @@
"@braintree/sanitize-url": "4.0.0",
"apache-arrow": "0.16.0",
"lodash": "4.17.15",
"rxjs": "6.5.5",
"rxjs": "6.6.0",
"xss": "1.0.6"
},
"devDependencies": {

View File

@@ -21,7 +21,7 @@ const buildCjsPackage = ({ env }) => {
globals: {},
},
],
external: ['lodash', 'apache-arrow'], // Use Lodash & arrow from grafana
external: ['lodash', 'rxjs', 'apache-arrow'], // Use Lodash, rxjs & arrow from grafana
plugins: [
json({
include: ['../../node_modules/moment-timezone/data/packed/latest.json'],

View File

@@ -0,0 +1,12 @@
import { DataFrame, FieldType } from '../types/dataFrame';
export const isTimeSerie = (frame: DataFrame): boolean => {
if (frame.fields.length > 2) {
return false;
}
return !!frame.fields.find(field => field.type === FieldType.time);
};
export const isTimeSeries = (data: DataFrame[]): boolean => {
return !data.find(frame => !isTimeSerie(frame));
};

View File

@@ -152,8 +152,8 @@ describe('Format value', () => {
it('should return formatted value if there are no matching value mappings', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
{ id: 0, text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
];
const value = '10';
const instance = getDisplayProcessorFromConfig({ decimals: 1, mappings: valueMappings });
@@ -186,8 +186,8 @@ describe('Format value', () => {
it('should return mapped value if there are matching value mappings', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '11';
const instance = getDisplayProcessorFromConfig({ decimals: 1, mappings: valueMappings });
@@ -196,9 +196,7 @@ describe('Format value', () => {
});
it('should return mapped value and leave numeric value in tact if value mapping maps to empty string', () => {
const valueMappings: ValueMapping[] = [
{ id: 1, operator: '', text: '', type: MappingType.ValueToText, value: '1' },
];
const valueMappings: ValueMapping[] = [{ id: 1, text: '', type: MappingType.ValueToText, value: '1' }];
const value = '1';
const instance = getDisplayProcessorFromConfig({ decimals: 1, mappings: valueMappings });

View File

@@ -545,7 +545,7 @@ describe('getLinksSupplier', () => {
expect.objectContaining({
title: 'testDS',
href:
'/explore?left={"datasource":"testDS","queries":["12345"],"mode":"Metrics","ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
'/explore?left={"datasource":"testDS","queries":["12345"],"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
onClick: undefined,
})
);

View File

@@ -8,10 +8,11 @@ import { filterFramesByRefIdTransformer } from './transformers/filterByRefId';
import { orderFieldsTransformer } from './transformers/order';
import { organizeFieldsTransformer } from './transformers/organize';
import { seriesToColumnsTransformer } from './transformers/seriesToColumns';
import { seriesToRowsTransformer } from './transformers/seriesToRows';
import { renameFieldsTransformer } from './transformers/rename';
import { labelsToFieldsTransformer } from './transformers/labelsToFields';
import { ensureColumnsTransformer } from './transformers/ensureColumns';
import { mergeTransformer } from './transformers/merge/merge';
import { mergeTransformer } from './transformers/merge';
export const standardTransformers = {
noopTransformer,
@@ -25,6 +26,7 @@ export const standardTransformers = {
reduceTransformer,
calculateFieldTransformer,
seriesToColumnsTransformer,
seriesToRowsTransformer,
renameFieldsTransformer,
labelsToFieldsTransformer,
ensureColumnsTransformer,

View File

@@ -8,6 +8,7 @@ export enum DataTransformerID {
rename = 'rename',
calculateField = 'calculateField',
seriesToColumns = 'seriesToColumns',
seriesToRows = 'seriesToRows',
merge = 'merge',
labelsToFields = 'labelsToFields',
filterFields = 'filterFields',

View File

@@ -1,9 +1,9 @@
import { mockTransformationsRegistry } from '../../../utils/tests/mockTransformationsRegistry';
import { DataTransformerConfig, Field, FieldType } from '../../../types';
import { DataTransformerID } from '../ids';
import { toDataFrame } from '../../../dataframe';
import { transformDataFrame } from '../../transformDataFrame';
import { ArrayVector } from '../../../vector';
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
import { DataTransformerConfig, Field, FieldType } from '../../types';
import { DataTransformerID } from './ids';
import { toDataFrame } from '../../dataframe';
import { transformDataFrame } from '../transformDataFrame';
import { ArrayVector } from '../../vector';
import { mergeTransformer, MergeTransformerOptions } from './merge';
describe('Merge multipe to single', () => {
@@ -35,12 +35,11 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [1000, 2000]),
createField('Metric', FieldType.string, ['A', 'B']),
createField('Value', FieldType.number, [1, -1]),
createField('Time', FieldType.time, [2000, 1000]),
createField('Temp', FieldType.number, [-1, 1]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two series with multiple values into one', () => {
@@ -67,12 +66,11 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine three series into one', () => {
@@ -107,12 +105,11 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [seriesA, seriesB, seriesC]);
const expected: Field[] = [
createField('Time', FieldType.time, [500, 1000, 2000]),
createField('Metric', FieldType.string, ['C', 'A', 'B']),
createField('Value', FieldType.number, [2, 1, -1]),
createField('Time', FieldType.time, [2000, 1000, 500]),
createField('Temp', FieldType.number, [-1, 1, 2]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine one serie and two tables into one table', () => {
@@ -149,13 +146,12 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [tableA, seriesB, tableB]);
const expected: Field[] = [
createField('Time', FieldType.time, [500, 1000, 1000]),
createField('Metric', FieldType.string, ['C', 'A', 'B']),
createField('Temp', FieldType.number, [2, 1, -1]),
createField('Humidity', FieldType.number, [5, 10, null]),
createField('Time', FieldType.time, [1000, 1000, 500]),
createField('Temp', FieldType.number, [1, -1, 2]),
createField('Humidity', FieldType.number, [10, null, 5]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine one serie and two tables with ISO dates into one table', () => {
@@ -192,13 +188,12 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [tableA, seriesB, tableC]);
const expected: Field[] = [
createField('Time', FieldType.time, ['2019-09-01T11:10:23Z', '2019-10-01T11:10:23Z', '2019-11-01T11:10:23Z']),
createField('Metric', FieldType.string, ['B', 'A', 'C']),
createField('Temp', FieldType.number, [-1, 1, 2]),
createField('Humidity', FieldType.number, [null, 10, 5]),
createField('Time', FieldType.time, ['2019-11-01T11:10:23Z', '2019-10-01T11:10:23Z', '2019-09-01T11:10:23Z']),
createField('Temp', FieldType.number, [2, 1, -1]),
createField('Humidity', FieldType.number, [5, 10, null]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine three tables with multiple values into one', () => {
@@ -235,14 +230,15 @@ describe('Merge multipe to single', () => {
});
const result = transformDataFrame([cfg], [tableA, tableB, tableC]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 100, 124, 125, 126, 149, 150, 200]),
createField('Temp', FieldType.number, [1, -1, 1, 4, 2, 3, 5, 4, 5]),
createField('Humidity', FieldType.number, [10, null, 22, 25, null, null, 30, 14, 55]),
createField('Enabled', FieldType.boolean, [null, true, null, null, false, true, null, null, null]),
createField('Time', FieldType.time, [200, 150, 149, 126, 125, 124, 100, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 5, 3, 2, 4, 1, -1, 1]),
createField('Humidity', FieldType.number, [55, 14, 30, null, null, 25, 10, null, 22]),
createField('Enabled', FieldType.boolean, [null, null, null, true, false, null, null, true, null]),
];
expect(result[0].fields).toMatchObject(expected);
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two time series, where first serie fields has displayName, into one', () => {
@@ -269,13 +265,14 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(result[0].fields[2].config).toEqual({});
expect(result[0].fields).toMatchObject(expected);
const fields = unwrap(result[0].fields);
expect(fields[1].config).toEqual({});
expect(fields).toEqual(expected);
});
it('combine two time series, where first serie fields has units, into one', () => {
@@ -302,13 +299,14 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5], { units: 'celsius' }),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1], { units: 'celsius' }),
];
expect(result[0].fields[2].config).toEqual({ units: 'celsius' });
expect(result[0].fields).toMatchObject(expected);
const fields = unwrap(result[0].fields);
expect(fields[1].config).toEqual({ units: 'celsius' });
expect(fields).toEqual(expected);
});
it('combine two time series, where second serie fields has units, into one', () => {
@@ -335,16 +333,28 @@ describe('Merge multipe to single', () => {
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(result[0].fields[2].config).toEqual({});
expect(result[0].fields).toMatchObject(expected);
const fields = unwrap(result[0].fields);
expect(fields[1].config).toEqual({});
expect(fields).toEqual(expected);
});
});
const createField = (name: string, type: FieldType, values: any[], config = {}): Field => {
return { name, type, values: new ArrayVector(values), config, labels: undefined };
};
const unwrap = (fields: Field[]): Field[] => {
return fields.map(field =>
createField(
field.name,
field.type,
field.values.toArray().map((value: any) => value),
field.config
)
);
};

View File

@@ -0,0 +1,216 @@
import { DataTransformerID } from './ids';
import { DataTransformerInfo } from '../../types/transformations';
import { DataFrame, Field, FieldType } from '../../types/dataFrame';
import { omit } from 'lodash';
import { ArrayVector } from '../../vector/ArrayVector';
import { MutableDataFrame, sortDataFrame } from '../../dataframe';
type MergeDetailsKeyFactory = (existing: Record<string, any>, value: Record<string, any>) => string;
export interface MergeTransformerOptions {}
export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
id: DataTransformerID.merge,
name: 'Merge series/tables',
description: 'Merges multiple series/tables into a single serie/table',
defaultOptions: {},
transformer: (options: MergeTransformerOptions) => {
return (data: DataFrame[]) => {
if (!Array.isArray(data) || data.length <= 1) {
return data;
}
const fieldByName = new Set<string>();
const fieldIndexByName: Record<string, Record<number, number>> = {};
const fieldNamesForKey: string[] = [];
const dataFrame = new MutableDataFrame();
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
const field = frame.fields[fieldIndex];
if (!fieldByName.has(field.name)) {
dataFrame.addField(copyFieldStructure(field));
fieldByName.add(field.name);
}
fieldIndexByName[field.name] = fieldIndexByName[field.name] || {};
fieldIndexByName[field.name][frameIndex] = fieldIndex;
if (data.length - 1 !== frameIndex) {
continue;
}
if (Object.keys(fieldIndexByName[field.name]).length === data.length) {
fieldNamesForKey.push(field.name);
}
}
}
if (fieldNamesForKey.length === 0) {
return data;
}
const dataFrameIndexByKey: Record<string, number> = {};
const keyFactory = createKeyFactory(data, fieldIndexByName, fieldNamesForKey);
const detailsKeyFactory = createDetailsKeyFactory(fieldByName, fieldNamesForKey);
const valueMapper = createValueMapper(data, fieldByName, fieldIndexByName);
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let valueIndex = 0; valueIndex < frame.length; valueIndex++) {
const key = keyFactory(frameIndex, valueIndex);
const value = valueMapper(frameIndex, valueIndex);
mergeOrAdd(key, value, dataFrame, dataFrameIndexByKey, detailsKeyFactory);
}
}
const timeIndex = dataFrame.fields.findIndex(field => field.type === FieldType.time);
if (typeof timeIndex === 'number') {
return [sortDataFrame(dataFrame, timeIndex, true)];
}
return [dataFrame];
};
},
};
const copyFieldStructure = (field: Field): Field => {
return {
...omit(field, ['values', 'state', 'labels', 'config']),
values: new ArrayVector(),
config: {
...omit(field.config, 'displayName'),
},
};
};
const createKeyFactory = (
data: DataFrame[],
fieldPointerByName: Record<string, Record<string, number>>,
keyFieldNames: string[]
) => {
const factoryIndex = keyFieldNames.reduce((index: Record<string, number[]>, fieldName) => {
return Object.keys(fieldPointerByName[fieldName]).reduce((index: Record<string, number[]>, frameIndex) => {
index[frameIndex] = index[frameIndex] || [];
index[frameIndex].push(fieldPointerByName[fieldName][frameIndex]);
return index;
}, index);
}, {});
return (frameIndex: number, valueIndex: number): string => {
return factoryIndex[frameIndex].reduce((key: string, fieldIndex: number) => {
return key + data[frameIndex].fields[fieldIndex].values.get(valueIndex);
}, '');
};
};
const createDetailsKeyFactory = (fieldByName: Set<string>, fieldNamesForKey: string[]): MergeDetailsKeyFactory => {
const fieldNamesToExclude = fieldNamesForKey.reduce((exclude: Record<string, boolean>, fieldName: string) => {
exclude[fieldName] = true;
return exclude;
}, {});
const checkOrder = Array.from(fieldByName).filter(fieldName => !fieldNamesToExclude[fieldName]);
return (existing: Record<string, any>, value: Record<string, any>) => {
return checkOrder.reduce((key: string, fieldName: string) => {
if (typeof existing[fieldName] === 'undefined') {
return key;
}
if (typeof value[fieldName] === 'undefined') {
return key;
}
if (existing[fieldName] === value[fieldName]) {
return key;
}
return key + value[fieldName];
}, '');
};
};
const createValueMapper = (
data: DataFrame[],
fieldByName: Set<string>,
fieldIndexByName: Record<string, Record<number, number>>
) => {
return (frameIndex: number, valueIndex: number) => {
const value: Record<string, any> = {};
const fieldNames = Array.from(fieldByName);
for (const fieldName of fieldNames) {
const fieldIndexByFrameIndex = fieldIndexByName[fieldName];
if (!fieldIndexByFrameIndex) {
continue;
}
const fieldIndex = fieldIndexByFrameIndex[frameIndex];
if (typeof fieldIndex !== 'number') {
continue;
}
const frame = data[frameIndex];
if (!frame || !frame.fields) {
continue;
}
const field = frame.fields[fieldIndex];
if (!field || !field.values) {
continue;
}
value[fieldName] = field.values.get(valueIndex);
}
return value;
};
};
const isMergable = (existing: Record<string, any>, value: Record<string, any>): boolean => {
let mergable = true;
for (const prop in value) {
if (typeof existing[prop] === 'undefined') {
continue;
}
if (existing[prop] === null) {
continue;
}
if (existing[prop] !== value[prop]) {
mergable = false;
break;
}
}
return mergable;
};
const mergeOrAdd = (
key: string,
value: Record<string, any>,
dataFrame: MutableDataFrame,
dataFrameIndexByKey: Record<string, number>,
detailsKeyFactory: MergeDetailsKeyFactory
) => {
if (typeof dataFrameIndexByKey[key] === 'undefined') {
dataFrame.add(value);
dataFrameIndexByKey[key] = dataFrame.length - 1;
return;
}
const dataFrameIndex = dataFrameIndexByKey[key];
const existing = dataFrame.get(dataFrameIndex);
if (isMergable(existing, value)) {
const merged = { ...existing, ...value };
dataFrame.set(dataFrameIndex, merged);
return;
}
const nextKey = key + detailsKeyFactory(existing, value);
mergeOrAdd(nextKey, value, dataFrame, dataFrameIndexByKey, detailsKeyFactory);
};

View File

@@ -1,135 +0,0 @@
import { MutableDataFrame } from '../../../dataframe';
import {
DataFrame,
FieldType,
Field,
TIME_SERIES_TIME_FIELD_NAME,
TIME_SERIES_VALUE_FIELD_NAME,
} from '../../../types/dataFrame';
import { ArrayVector } from '../../../vector';
import { omit } from 'lodash';
import { getFrameDisplayName } from '../../../field';
interface DataFrameBuilderResult {
dataFrame: MutableDataFrame;
valueMapper: ValueMapper;
}
type ValueMapper = (frame: DataFrame, valueIndex: number, timeIndex: number) => Record<string, any>;
const TIME_SERIES_METRIC_FIELD_NAME = 'Metric';
export class DataFrameBuilder {
private isOnlyTimeSeries: boolean;
private displayMetricField: boolean;
private valueFields: Record<string, Field>;
private timeField: Field | null;
constructor() {
this.isOnlyTimeSeries = true;
this.displayMetricField = false;
this.valueFields = {};
this.timeField = null;
}
addFields(frame: DataFrame, timeIndex: number): void {
if (frame.fields.length > 2) {
this.isOnlyTimeSeries = false;
}
if (frame.fields.length === 2) {
this.displayMetricField = true;
}
for (let index = 0; index < frame.fields.length; index++) {
const field = frame.fields[index];
if (index === timeIndex) {
if (!this.timeField) {
this.timeField = this.copyStructure(field, TIME_SERIES_TIME_FIELD_NAME);
}
continue;
}
if (!this.valueFields[field.name]) {
this.valueFields[field.name] = this.copyStructure(field, field.name);
}
}
}
build(): DataFrameBuilderResult {
return {
dataFrame: this.createDataFrame(),
valueMapper: this.createValueMapper(),
};
}
private createValueMapper(): ValueMapper {
return (frame: DataFrame, valueIndex: number, timeIndex: number) => {
return frame.fields.reduce((values: Record<string, any>, field, index) => {
const value = field.values.get(valueIndex);
if (index === timeIndex) {
values[TIME_SERIES_TIME_FIELD_NAME] = value;
if (this.displayMetricField) {
values[TIME_SERIES_METRIC_FIELD_NAME] = getFrameDisplayName(frame);
}
return values;
}
if (this.isOnlyTimeSeries) {
values[TIME_SERIES_VALUE_FIELD_NAME] = value;
return values;
}
values[field.name] = value;
return values;
}, {});
};
}
private createDataFrame(): MutableDataFrame {
const dataFrame = new MutableDataFrame();
if (this.timeField) {
dataFrame.addField(this.timeField);
if (this.displayMetricField) {
dataFrame.addField({
name: TIME_SERIES_METRIC_FIELD_NAME,
type: FieldType.string,
});
}
}
const valueFields = Object.values(this.valueFields);
if (this.isOnlyTimeSeries) {
if (valueFields.length > 0) {
dataFrame.addField({
...valueFields[0],
name: TIME_SERIES_VALUE_FIELD_NAME,
});
}
return dataFrame;
}
for (const field of valueFields) {
dataFrame.addField(field);
}
return dataFrame;
}
private copyStructure(field: Field, name: string): Field {
return {
...omit(field, ['values', 'name', 'state', 'labels', 'config']),
name,
values: new ArrayVector(),
config: {
...omit(field.config, 'displayName'),
},
};
}
}

View File

@@ -1,74 +0,0 @@
import { DataFrame } from '../../../types/dataFrame';
import { timeComparer } from '../../../field/fieldComparers';
import { sortDataFrame } from '../../../dataframe';
import { TimeFieldsByFrame } from './TimeFieldsByFrame';
interface DataFrameStackValue {
valueIndex: number;
timeIndex: number;
frame: DataFrame;
}
export class DataFramesStackedByTime {
private valuesPointerByFrame: Record<number, number>;
private dataFrames: DataFrame[];
private isSorted: boolean;
constructor(private timeFields: TimeFieldsByFrame) {
this.valuesPointerByFrame = {};
this.dataFrames = [];
this.isSorted = false;
}
push(frame: DataFrame): number {
const index = this.dataFrames.length;
this.valuesPointerByFrame[index] = 0;
this.dataFrames.push(frame);
return index;
}
pop(): DataFrameStackValue {
if (!this.isSorted) {
this.sortByTime();
this.isSorted = true;
}
const frameIndex = this.dataFrames.reduce((champion, frame, index) => {
const championTime = this.peekTimeValueForFrame(champion);
const contenderTime = this.peekTimeValueForFrame(index);
return timeComparer(contenderTime, championTime) >= 0 ? champion : index;
}, 0);
const previousPointer = this.movePointerForward(frameIndex);
return {
frame: this.dataFrames[frameIndex],
valueIndex: previousPointer,
timeIndex: this.timeFields.getFieldIndex(frameIndex),
};
}
getLength(): number {
const frames = Object.values(this.dataFrames);
return frames.reduce((length: number, frame) => (length += frame.length), 0);
}
private peekTimeValueForFrame(frameIndex: number): any {
const timeField = this.timeFields.getField(frameIndex);
const valuePointer = this.valuesPointerByFrame[frameIndex];
return timeField.values.get(valuePointer);
}
private movePointerForward(frameIndex: number): number {
const currentPointer = this.valuesPointerByFrame[frameIndex];
this.valuesPointerByFrame[frameIndex] = currentPointer + 1;
return currentPointer;
}
private sortByTime() {
this.dataFrames = this.dataFrames.map((frame, index) => {
const timeFieldIndex = this.timeFields.getFieldIndex(index);
return sortDataFrame(frame, timeFieldIndex);
});
}
}

View File

@@ -1,39 +0,0 @@
import { isNumber } from 'lodash';
import { Field, DataFrame } from '../../../types/dataFrame';
import { getTimeField } from '../../../dataframe';
export class TimeFieldsByFrame {
private timeIndexByFrameIndex: Record<number, number>;
private timeFieldByFrameIndex: Record<number, Field>;
constructor() {
this.timeIndexByFrameIndex = {};
this.timeFieldByFrameIndex = {};
}
add(frameIndex: number, frame: DataFrame): void {
const fieldDescription = getTimeField(frame);
const timeIndex = fieldDescription?.timeIndex;
const timeField = fieldDescription?.timeField;
if (isNumber(timeIndex)) {
this.timeIndexByFrameIndex[frameIndex] = timeIndex;
}
if (timeField) {
this.timeFieldByFrameIndex[frameIndex] = timeField;
}
}
getField(frameIndex: number): Field {
return this.timeFieldByFrameIndex[frameIndex];
}
getFieldIndex(frameIndex: number): number {
return this.timeIndexByFrameIndex[frameIndex];
}
getLength() {
return Object.keys(this.timeIndexByFrameIndex).length;
}
}

View File

@@ -1,47 +0,0 @@
import { DataTransformerID } from '../ids';
import { DataTransformerInfo } from '../../../types/transformations';
import { DataFrame } from '../../../types/dataFrame';
import { DataFrameBuilder } from './DataFrameBuilder';
import { TimeFieldsByFrame } from './TimeFieldsByFrame';
import { DataFramesStackedByTime } from './DataFramesStackedByTime';
export interface MergeTransformerOptions {}
export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
id: DataTransformerID.merge,
name: 'Merge series/tables',
description: 'Merges multiple series/tables by time into a single serie/table',
defaultOptions: {},
transformer: (options: MergeTransformerOptions) => {
return (data: DataFrame[]) => {
if (!Array.isArray(data) || data.length <= 1) {
return data;
}
const timeFields = new TimeFieldsByFrame();
const framesStack = new DataFramesStackedByTime(timeFields);
const dataFrameBuilder = new DataFrameBuilder();
for (const frame of data) {
const frameIndex = framesStack.push(frame);
timeFields.add(frameIndex, frame);
const timeIndex = timeFields.getFieldIndex(frameIndex);
dataFrameBuilder.addFields(frame, timeIndex);
}
if (data.length !== timeFields.getLength()) {
return data;
}
const { dataFrame, valueMapper } = dataFrameBuilder.build();
for (let index = 0; index < framesStack.getLength(); index++) {
const { frame, valueIndex, timeIndex } = framesStack.pop();
dataFrame.add(valueMapper(frame, valueIndex, timeIndex));
}
return [dataFrame];
};
},
};

View File

@@ -0,0 +1,237 @@
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
import { DataTransformerConfig, Field, FieldType } from '../../types';
import { DataTransformerID } from './ids';
import { toDataFrame } from '../../dataframe';
import { transformDataFrame } from '../transformDataFrame';
import { ArrayVector } from '../../vector';
import { seriesToRowsTransformer, SeriesToRowsTransformerOptions } from './seriesToRows';
describe('Series to rows', () => {
beforeAll(() => {
mockTransformationsRegistry([seriesToRowsTransformer]);
});
it('combine two series into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const seriesA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [1000] },
{ name: 'Temp', type: FieldType.number, values: [1] },
],
});
const seriesB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [2000] },
{ name: 'Temp', type: FieldType.number, values: [-1] },
],
});
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [2000, 1000]),
createField('Metric', FieldType.string, ['B', 'A']),
createField('Value', FieldType.number, [-1, 1]),
];
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two series with multiple values into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const seriesA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5] },
],
});
const seriesB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
],
});
const result = transformDataFrame([cfg], [seriesA, seriesB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine three series into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const seriesA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [1000] },
{ name: 'Temp', type: FieldType.number, values: [1] },
],
});
const seriesB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [2000] },
{ name: 'Temp', type: FieldType.number, values: [-1] },
],
});
const seriesC = toDataFrame({
name: 'C',
fields: [
{ name: 'Time', type: FieldType.time, values: [500] },
{ name: 'Temp', type: FieldType.number, values: [2] },
],
});
const result = transformDataFrame([cfg], [seriesA, seriesB, seriesC]);
const expected: Field[] = [
createField('Time', FieldType.time, [2000, 1000, 500]),
createField('Metric', FieldType.string, ['B', 'A', 'C']),
createField('Value', FieldType.number, [-1, 1, 2]),
];
expect(unwrap(result[0].fields)).toEqual(expected);
});
it('combine two time series, where first serie fields has displayName, into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const serieA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200], config: { displayName: 'Random time' } },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5], config: { displayName: 'Temp' } },
],
});
const serieB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
],
});
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
const fields = unwrap(result[0].fields);
expect(fields[2].config).toEqual({});
expect(fields).toEqual(expected);
});
it('combine two time series, where first serie fields has units, into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const serieA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5], config: { units: 'celsius' } },
],
});
const serieB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
],
});
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1], { units: 'celsius' }),
];
const fields = unwrap(result[0].fields);
expect(fields[2].config).toEqual({ units: 'celsius' });
expect(fields).toEqual(expected);
});
it('combine two time series, where second serie fields has units, into one', () => {
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
options: {},
};
const serieA = toDataFrame({
name: 'A',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5] },
],
});
const serieB = toDataFrame({
name: 'B',
fields: [
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3], config: { units: 'celsius' } },
],
});
const result = transformDataFrame([cfg], [serieA, serieB]);
const expected: Field[] = [
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
];
const fields = unwrap(result[0].fields);
expect(fields[2].config).toEqual({});
expect(fields).toEqual(expected);
});
});
const createField = (name: string, type: FieldType, values: any[], config = {}): Field => {
return { name, type, values: new ArrayVector(values), config, labels: undefined };
};
const unwrap = (fields: Field[]): Field[] => {
return fields.map(field =>
createField(
field.name,
field.type,
field.values.toArray().map((value: any) => value),
field.config
)
);
};

View File

@@ -0,0 +1,97 @@
import { omit } from 'lodash';
import { DataTransformerID } from './ids';
import { DataTransformerInfo } from '../../types/transformations';
import {
DataFrame,
Field,
FieldType,
TIME_SERIES_TIME_FIELD_NAME,
TIME_SERIES_VALUE_FIELD_NAME,
TIME_SERIES_METRIC_FIELD_NAME,
} from '../../types/dataFrame';
import { isTimeSeries } from '../../dataframe/utils';
import { MutableDataFrame, sortDataFrame } from '../../dataframe';
import { ArrayVector } from '../../vector';
import { getFrameDisplayName } from '../../field/fieldState';
export interface SeriesToRowsTransformerOptions {}
export const seriesToRowsTransformer: DataTransformerInfo<SeriesToRowsTransformerOptions> = {
id: DataTransformerID.seriesToRows,
name: 'Series to rows',
description: 'Combines multiple series into a single serie and appends a column with metric name per value.',
defaultOptions: {},
transformer: (options: SeriesToRowsTransformerOptions) => {
return (data: DataFrame[]) => {
if (!Array.isArray(data) || data.length <= 1) {
return data;
}
if (!isTimeSeries(data)) {
return data;
}
const timeFieldByIndex: Record<number, number> = {};
const targetFields = new Set<string>();
const dataFrame = new MutableDataFrame();
const metricField: Field = {
name: TIME_SERIES_METRIC_FIELD_NAME,
values: new ArrayVector(),
config: {},
type: FieldType.string,
};
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
const field = frame.fields[fieldIndex];
if (field.type === FieldType.time) {
timeFieldByIndex[frameIndex] = fieldIndex;
if (!targetFields.has(TIME_SERIES_TIME_FIELD_NAME)) {
dataFrame.addField(copyFieldStructure(field, TIME_SERIES_TIME_FIELD_NAME));
dataFrame.addField(metricField);
targetFields.add(TIME_SERIES_TIME_FIELD_NAME);
}
continue;
}
if (!targetFields.has(TIME_SERIES_VALUE_FIELD_NAME)) {
dataFrame.addField(copyFieldStructure(field, TIME_SERIES_VALUE_FIELD_NAME));
targetFields.add(TIME_SERIES_VALUE_FIELD_NAME);
}
}
}
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
const frame = data[frameIndex];
for (let valueIndex = 0; valueIndex < frame.length; valueIndex++) {
const timeFieldIndex = timeFieldByIndex[frameIndex];
const valueFieldIndex = timeFieldIndex === 0 ? 1 : 0;
dataFrame.add({
[TIME_SERIES_TIME_FIELD_NAME]: frame.fields[timeFieldIndex].values.get(valueIndex),
[TIME_SERIES_METRIC_FIELD_NAME]: getFrameDisplayName(frame),
[TIME_SERIES_VALUE_FIELD_NAME]: frame.fields[valueFieldIndex].values.get(valueIndex),
});
}
}
return [sortDataFrame(dataFrame, 0, true)];
};
},
};
const copyFieldStructure = (field: Field, name: string): Field => {
return {
...omit(field, ['values', 'state', 'labels', 'config', 'name']),
name: name,
values: new ArrayVector(),
config: {
...omit(field.config, 'displayName'),
},
};
};

View File

@@ -42,6 +42,10 @@ export class AppPlugin<T = KeyValue> extends GrafanaPlugin<AppPluginMeta<T>> {
/**
* Set the component displayed under:
* /a/${plugin-id}/*
*
* If the NavModel is configured, the page will have a managed frame, otheriwse it has full control.
*
* NOTE: this structure will change in 7.2+ so that it is managed with a normal react router
*/
setRootPage(root: ComponentClass<AppRootProps<T>>, rootNav?: NavModel) {
this.root = root;

View File

@@ -15,7 +15,7 @@ export enum LoadingState {
Error = 'Error',
}
export type PreferredVisualisationType = 'graph' | 'table';
export type PreferredVisualisationType = 'graph' | 'table' | 'logs' | 'trace';
export interface QueryResultMeta {
/** DatasSource Specific Values */
@@ -47,6 +47,7 @@ export interface QueryResultMeta {
searchWords?: string[]; // used by log models and loki
limit?: number; // used by log models and loki
json?: boolean; // used to keep track of old json doc values
instant?: boolean;
}
export interface QueryResultMetaStat extends FieldConfig {

View File

@@ -150,3 +150,4 @@ export interface FieldCalcs extends Record<string, any> {}
export const TIME_SERIES_VALUE_FIELD_NAME = 'Value';
export const TIME_SERIES_TIME_FIELD_NAME = 'Time';
export const TIME_SERIES_METRIC_FIELD_NAME = 'Metric';

View File

@@ -35,6 +35,7 @@ export interface DataLink<T extends DataQuery = any> {
// If dataLink represents internal link this has to be filled. Internal link is defined as a query in a particular
// datas ource that we want to show to the user. Usually this results in a link to explore but can also lead to
// more custom onClick behaviour if needed.
// @internal and subject to change in future releases
internal?: {
query: T;
datasourceUid: string;

View File

@@ -310,7 +310,6 @@ export interface QueryEditorProps<
* Contains query response filtered by refId of QueryResultBase and possible query error
*/
data?: PanelData;
exploreMode?: ExploreMode;
exploreId?: any;
history?: HistoryItem[];
}
@@ -334,13 +333,11 @@ export interface ExploreQueryFieldProps<
history: any[];
onBlur?: () => void;
absoluteRange?: AbsoluteTimeRange;
exploreMode?: ExploreMode;
exploreId?: any;
}
export interface ExploreStartPageProps {
datasource?: DataSourceApi;
exploreMode: ExploreMode;
onClickExample: (query: DataQuery) => void;
exploreId?: any;
}

View File

@@ -1,4 +1,3 @@
import { ExploreMode } from './datasource';
import { RawTimeRange } from './time';
import { LogsDedupStrategy } from './logs';
@@ -6,7 +5,6 @@ import { LogsDedupStrategy } from './logs';
export interface ExploreUrlState {
datasource: string;
queries: any[]; // Should be a DataQuery, but we're going to strip refIds, so typing makes less sense
mode: ExploreMode;
range: RawTimeRange;
ui: ExploreUIState;
originPanelId?: number;

View File

@@ -4,9 +4,8 @@ export enum MappingType {
}
interface BaseMap {
id: number;
operator: string;
text: string;
id: number; // this could/should just be the array index
text: string; // the final display value
type: MappingType;
}

View File

@@ -31,7 +31,7 @@ describe('mapInternalLinkToExplore', () => {
expect.objectContaining({
title: 'testDS',
href:
'/explore?left={"datasource":"testDS","queries":[{"query":"12344"}],"mode":"Metrics","ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
'/explore?left={"datasource":"testDS","queries":[{"query":"12344"}],"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}',
onClick: undefined,
})
);

View File

@@ -2,7 +2,6 @@ import {
DataLink,
DataQuery,
DataSourceInstanceSettings,
ExploreMode,
Field,
InterpolateFunction,
LinkModel,
@@ -82,7 +81,6 @@ function generateInternalHref<T extends DataQuery = any>(datasourceName: string,
queries: [query],
// This should get overwritten if datasource does not support that mode and we do not know what mode is
// preferred anyway.
mode: ExploreMode.Metrics,
ui: {
showingGraph: true,
showingTable: true,

View File

@@ -139,7 +139,6 @@ export function serializeStateToUrlParam(urlState: ExploreUrlState, compact?: bo
urlState.range.to,
urlState.datasource,
...urlState.queries,
{ mode: urlState.mode },
{
ui: [
!!urlState.ui.showingGraph,

View File

@@ -11,8 +11,8 @@ describe('Format value with value mappings', () => {
it('should return undefined with no matching valuemappings', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
{ id: 0, text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 1, text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
];
const value = '10';
@@ -21,8 +21,8 @@ describe('Format value with value mappings', () => {
it('should return first matching mapping with lowest id', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'tio', type: MappingType.ValueToText, value: '10' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'tio', type: MappingType.ValueToText, value: '10' },
];
const value = '10';
@@ -31,8 +31,8 @@ describe('Format value with value mappings', () => {
it('should return if value is null and value to text mapping value is null', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: '<NULL>', type: MappingType.ValueToText, value: 'null' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: '<NULL>', type: MappingType.ValueToText, value: 'null' },
];
const value = null;
@@ -41,8 +41,8 @@ describe('Format value with value mappings', () => {
it('should return if value is null and range to text mapping from and to is null', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '<NULL>', type: MappingType.RangeToText, from: 'null', to: 'null' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '<NULL>', type: MappingType.RangeToText, from: 'null', to: 'null' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = null;
@@ -51,8 +51,8 @@ describe('Format value with value mappings', () => {
it('should return rangeToText mapping where value equals to', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-10', type: MappingType.RangeToText, from: '1', to: '10' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '1-10', type: MappingType.RangeToText, from: '1', to: '10' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '10';
@@ -61,8 +61,8 @@ describe('Format value with value mappings', () => {
it('should return rangeToText mapping where value equals from', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '10-20', type: MappingType.RangeToText, from: '10', to: '20' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '10-20', type: MappingType.RangeToText, from: '10', to: '20' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '10';
@@ -71,8 +71,8 @@ describe('Format value with value mappings', () => {
it('should return rangeToText mapping where value is between from and to', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'elva', type: MappingType.ValueToText, value: '11' },
];
const value = '10';
@@ -81,8 +81,8 @@ describe('Format value with value mappings', () => {
it('should map value text to mapping', () => {
const valueMappings: ValueMapping[] = [
{ id: 0, operator: '', text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, operator: '', text: 'ELVA', type: MappingType.ValueToText, value: 'elva' },
{ id: 0, text: '1-20', type: MappingType.RangeToText, from: '1', to: '20' },
{ id: 1, text: 'ELVA', type: MappingType.ValueToText, value: 'elva' },
];
const value = 'elva';

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e-selectors",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"description": "Grafana End-to-End Test Selectors Library",
"keywords": [
"cli",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"description": "Grafana End-to-End Test Library",
"keywords": [
"cli",
@@ -45,7 +45,7 @@
"types": "src/index.ts",
"dependencies": {
"@cypress/webpack-preprocessor": "4.1.3",
"@grafana/e2e-selectors": "7.1.0-pre.0",
"@grafana/e2e-selectors": "7.1.0",
"@grafana/tsconfig": "^1.0.0-rc1",
"@mochajs/json-file-reporter": "^1.2.0",
"blink-diff": "1.0.13",

View File

@@ -68,6 +68,8 @@ export const addPanel = (config?: Partial<AddPanelConfig>): any =>
.click();
closeOptionsGroup('type');
closeOptions();
queriesForm(fullConfig);
e2e().wait('@chartData');
@@ -77,8 +79,6 @@ export const addPanel = (config?: Partial<AddPanelConfig>): any =>
//e2e.components.Panels.Panel.containerByTitle(panelTitle).find('.panel-content').contains('No data');
//e2e.components.QueryEditorRow.actionButton('Disable/enable query').click();
closeOptions();
e2e()
.get('button[title="Apply changes and go back to dashboard"]')
.click();

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/runtime",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"description": "Grafana Runtime Library",
"keywords": [
"grafana",
@@ -23,8 +23,8 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@grafana/data": "7.1.0-pre.0",
"@grafana/ui": "7.1.0-pre.0",
"@grafana/data": "7.1.0",
"@grafana/ui": "7.1.0",
"systemjs": "0.20.19",
"systemjs-plugin-css": "0.1.37"
},
@@ -32,9 +32,9 @@
"@grafana/tsconfig": "^1.0.0-rc1",
"@rollup/plugin-commonjs": "11.0.2",
"@rollup/plugin-node-resolve": "7.1.1",
"@types/jest": "23.3.14",
"@types/rollup-plugin-visualizer": "2.6.0",
"@types/systemjs": "^0.20.6",
"@types/jest": "23.3.14",
"lodash": "4.17.15",
"pretty-format": "25.1.0",
"rollup": "2.0.6",

View File

@@ -129,6 +129,11 @@ export class DataSourceWithBackend<
/**
* Optionally augment the response before returning the results to the
*
* NOTE: this was added in 7.1 for azure, and will be removed in 7.2
* when the entire response pipeline is Observable
*
* @internal
*/
processResponse?(res: DataQueryResponse): Promise<DataQueryResponse>;

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/toolkit",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"description": "Grafana Toolkit",
"keywords": [
"grafana",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/ui",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"description": "Grafana Components Library",
"keywords": [
"grafana",
@@ -28,8 +28,8 @@
},
"dependencies": {
"@emotion/core": "^10.0.27",
"@grafana/data": "7.1.0-pre.0",
"@grafana/e2e-selectors": "7.1.0-pre.0",
"@grafana/data": "7.1.0",
"@grafana/e2e-selectors": "7.1.0",
"@grafana/slate-react": "0.22.9-grafana",
"@grafana/tsconfig": "^1.0.0-rc1",
"@iconscout/react-unicons": "^1.0.0",
@@ -47,9 +47,8 @@
"immutable": "3.8.2",
"jquery": "3.5.1",
"lodash": "4.17.15",
"monaco-editor": "0.20.0",
"react-monaco-editor": "0.36.0",
"moment": "2.24.0",
"monaco-editor": "0.20.0",
"papaparse": "4.6.3",
"rc-cascader": "1.0.1",
"rc-drawer": "3.1.3",
@@ -63,6 +62,7 @@
"react-dom": "16.12.0",
"react-highlight-words": "0.16.0",
"react-hook-form": "5.1.3",
"react-monaco-editor": "0.36.0",
"react-popper": "1.3.3",
"react-storybook-addon-props-combinations": "1.1.0",
"react-table": "7.0.0",

View File

@@ -605,9 +605,6 @@ function getValueStyles(
styles.paddingRight = `${VALUE_LEFT_PADDING}px`;
// Need to remove the left padding from the text width constraints
textWidth -= VALUE_LEFT_PADDING;
// adjust width of title box
styles.width = measureText(formattedValueString, styles.fontSize).width + VALUE_LEFT_PADDING * 2;
}
return styles;

View File

@@ -35,7 +35,7 @@ exports[`BarGauge Render with basic options should render 1`] = `
"lineHeight": 1,
"paddingLeft": "10px",
"paddingRight": "10px",
"width": 22,
"width": "60px",
}
}
value={

View File

@@ -55,6 +55,12 @@ export interface Props extends Themeable {
justifyMode?: BigValueJustifyMode;
alignmentFactors?: DisplayValueAlignmentFactors;
textMode?: BigValueTextMode;
/**
* If part of a series of stat panes, this is the total number.
* Used by BigValueTextMode.Auto text mode.
*/
count?: number;
}
export class BigValue extends PureComponent<Props> {

View File

@@ -463,12 +463,18 @@ export interface BigValueTextValues extends DisplayValue {
}
function getTextValues(props: Props): BigValueTextValues {
const { textMode: nameAndValue, value, alignmentFactors } = props;
const { value, alignmentFactors, count } = props;
let { textMode } = props;
const titleToAlignTo = alignmentFactors ? alignmentFactors.title : value.title;
const valueToAlignTo = formattedValueToString(alignmentFactors ? alignmentFactors : value);
switch (nameAndValue) {
// In the auto case we only show title if this big value is part of more panes (count > 1)
if (textMode === BigValueTextMode.Auto && (count ?? 1) === 1) {
textMode = BigValueTextMode.Value;
}
switch (textMode) {
case BigValueTextMode.Name:
return {
...value,
@@ -498,6 +504,7 @@ function getTextValues(props: Props): BigValueTextValues {
valueToAlignTo: '1',
tooltip: `Name: ${value.title}\nValue: ${formattedValueToString(value)}`,
};
case BigValueTextMode.ValueAndName:
default:
return {
...value,

View File

@@ -2,6 +2,8 @@ import React from 'react';
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
import { FileUpload } from './FileUpload';
import mdx from './FileUpload.mdx';
import { useSize } from '../../utils/storybook/useSize';
import { ComponentSize } from '../../types/size';
export default {
title: 'Forms/FileUpload',
@@ -15,8 +17,10 @@ export default {
};
export const single = () => {
const size = useSize();
return (
<FileUpload
size={size as ComponentSize}
onFileUpload={({ currentTarget }) => console.log('file', currentTarget?.files && currentTarget.files[0])}
/>
);

View File

@@ -3,12 +3,14 @@ import { GrafanaTheme } from '@grafana/data';
import { css, cx } from 'emotion';
import { getFormStyles, Icon } from '../index';
import { stylesFactory, useTheme } from '../../themes';
import { ComponentSize } from '../../types/size';
export interface Props {
onFileUpload: (event: FormEvent<HTMLInputElement>) => void;
/** Accepted file extensions */
accept?: string;
className?: string;
size?: ComponentSize;
}
function trimFileName(fileName: string) {
@@ -24,9 +26,15 @@ function trimFileName(fileName: string) {
return `${file.substring(0, nameLength)}...${extension}`;
}
export const FileUpload: FC<Props> = ({ onFileUpload, className, children = 'Upload file', accept = '*' }) => {
export const FileUpload: FC<Props> = ({
onFileUpload,
className,
children = 'Upload file',
accept = '*',
size = 'md',
}) => {
const theme = useTheme();
const style = getStyles(theme);
const style = getStyles(theme, size);
const [fileName, setFileName] = useState('');
const onChange = useCallback((event: FormEvent<HTMLInputElement>) => {
@@ -60,8 +68,8 @@ export const FileUpload: FC<Props> = ({ onFileUpload, className, children = 'Upl
);
};
const getStyles = stylesFactory((theme: GrafanaTheme) => {
const buttonFormStyle = getFormStyles(theme, { variant: 'primary', invalid: false, size: 'md' }).button.button;
const getStyles = stylesFactory((theme: GrafanaTheme, size: ComponentSize) => {
const buttonFormStyle = getFormStyles(theme, { variant: 'primary', invalid: false, size }).button.button;
return {
fileUpload: css`
display: none;

View File

@@ -34,10 +34,12 @@ export const FormLabel: FunctionComponent<Props> = ({
{tooltip && (
<Tooltip placement="top" content={tooltip} theme={'info'}>
<div className="gf-form-help-icon gf-form-help-icon--right-normal">
<Icon name="info-circle" size="xs" style={{ marginLeft: '10px' }} />
<Icon name="info-circle" size="sm" style={{ marginLeft: '10px' }} />
</div>
</Tooltip>
)}
</label>
);
};
export const InlineFormLabel = FormLabel;

View File

@@ -2,6 +2,7 @@ import React, { PureComponent } from 'react';
import uniqueId from 'lodash/uniqueId';
import { Tooltip } from '../../../Tooltip/Tooltip';
import * as PopperJS from 'popper.js';
import { Icon } from '../../..';
export interface Props {
label: string;
@@ -54,7 +55,7 @@ export class Switch extends PureComponent<Props, State> {
{tooltip && (
<Tooltip placement={tooltipPlacement ? tooltipPlacement : 'auto'} content={tooltip} theme={'info'}>
<div className="gf-form-help-icon gf-form-help-icon--right-normal">
<i className="fa fa-info-circle" />
<Icon name="info-circle" size="sm" style={{ marginLeft: '10px' }} />
</div>
</Tooltip>
)}

View File

@@ -0,0 +1,38 @@
import { findInsertIndex } from './suggestions';
describe('Check suggestion index', () => {
it('find last $ sign', () => {
const line = ' hello $123';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.indexOf('$'));
expect(prefix).toEqual('$123');
});
it('insert into empty line', () => {
const line = '';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(0);
expect(prefix).toEqual('');
});
it('insert new word', () => {
const line = 'this is a new ';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.length);
expect(prefix).toEqual('');
});
it('complte a simple word', () => {
const line = 'SELECT * FROM tab';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.lastIndexOf(' ') + 1);
expect(prefix).toEqual('tab');
});
it('complete a quoted word', () => {
const line = 'SELECT "hello", "wo';
const { index, prefix } = findInsertIndex(line);
expect(index).toEqual(line.lastIndexOf('"') + 1);
expect(prefix).toEqual('wo');
});
});

View File

@@ -2,6 +2,33 @@ import * as monaco from 'monaco-editor/esm/vs/editor/editor.api';
import { CodeEditorSuggestionItem, CodeEditorSuggestionItemKind, CodeEditorSuggestionProvider } from './types';
/**
* @internal -- only exported for tests
*/
export function findInsertIndex(line: string): { index: number; prefix: string } {
for (let i = line.length - 1; i > 0; i--) {
const ch = line.charAt(i);
if (ch === '$') {
return {
index: i,
prefix: line.substring(i),
};
}
// Keep these seperators
if (ch === ' ' || ch === '\t' || ch === '"' || ch === "'") {
return {
index: i + 1,
prefix: line.substring(i + 1),
};
}
}
return {
index: 0,
prefix: line,
};
}
function getCompletionItems(
prefix: string,
suggestions: CodeEditorSuggestionItem[],
@@ -53,51 +80,39 @@ export function registerSuggestions(
triggerCharacters: ['$'],
provideCompletionItems: (model, position, context) => {
const range = {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: position.column,
endColumn: position.column,
};
// Simple check if this was triggered by pressing `$`
if (context.triggerCharacter === '$') {
const range = {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: position.column - 1,
endColumn: position.column,
};
range.startColumn = position.column - 1;
return {
suggestions: getCompletionItems('$', getSuggestions(), range),
};
}
// find out if we are completing a property in the 'dependencies' object.
const lineText = model.getValueInRange({
// Find the replacement region
const currentLine = model.getValueInRange({
startLineNumber: position.lineNumber,
startColumn: 1,
endLineNumber: position.lineNumber,
endColumn: position.column,
});
const idx = lineText.lastIndexOf('$');
if (idx >= 0) {
const range = {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: idx, // the last $ we found
endColumn: position.column,
};
return {
suggestions: getCompletionItems(lineText.substr(idx), getSuggestions(), range),
};
const { index, prefix } = findInsertIndex(currentLine);
range.startColumn = index + 1;
const suggestions = getCompletionItems(prefix, getSuggestions(), range);
if (suggestions.length) {
// NOTE, this will replace any language provided suggestions
return { suggestions };
}
// Empty line that asked for suggestion
if (lineText.trim().length < 1) {
return {
suggestions: getCompletionItems('', getSuggestions(), {
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: position.column,
endColumn: position.column,
}),
};
}
// console.log('complete?', lineText, context);
// Default language suggestions
return undefined;
},
});

View File

@@ -1,8 +1,11 @@
import React, { FC } from 'react';
import { css, cx } from 'emotion';
import { TableCellProps } from './types';
import { isString } from 'lodash';
import { Tooltip } from '../Tooltip/Tooltip';
import { JSONFormatter } from '../JSONFormatter/JSONFormatter';
import { useStyles } from '../../themes';
import { TableCellProps } from './types';
import { GrafanaTheme } from '@grafana/data';
export const JSONViewCell: FC<TableCellProps> = props => {
const { field, cell, tableStyles } = props;
@@ -16,11 +19,19 @@ export const JSONViewCell: FC<TableCellProps> = props => {
font-family: monospace;
`;
const displayValue = JSON.stringify(cell.value);
const content = <JSONTooltip value={cell.value} />;
let value = cell.value;
let displayValue = value;
if (isString(value)) {
try {
value = JSON.parse(value);
} catch {} // ignore errors
} else {
displayValue = JSON.stringify(value);
}
const content = <JSONTooltip value={value} />;
return (
<div className={cx(txt, tableStyles.tableCell)}>
<Tooltip placement="auto" content={content} theme={'info'}>
<Tooltip placement="auto" content={content} theme="info-alt">
<div className={tableStyles.overflow}>{displayValue}</div>
</Tooltip>
</div>
@@ -32,12 +43,19 @@ interface PopupProps {
}
const JSONTooltip: FC<PopupProps> = props => {
const clazz = css`
padding: 10px;
`;
const styles = useStyles((theme: GrafanaTheme) => {
return {
container: css`
padding: ${theme.spacing.xs};
`,
};
});
return (
<div className={clazz}>
<JSONFormatter json={props.value} open={4} />
<div className={styles.container}>
<div>
<JSONFormatter json={props.value} open={4} />
</div>
</div>
);
};

View File

@@ -0,0 +1,22 @@
import { Props } from '@storybook/addon-docs/blocks';
import { Tooltip } from './Tooltip';
# Tooltip
## Theme
There are currently themes available for the Tooltip.
- Info
- Error
- Info-alt (alternative)
### Info
This is the default theme, usually used in forms to show more information.
### Error
Tooltip with a red background.
### Info alternative
We added this to be able to add a `<JSONFormatter />` in the tooltip.
<Props of={Tooltip } />

View File

@@ -0,0 +1,28 @@
import React from 'react';
import { select } from '@storybook/addon-knobs';
import { Tooltip } from './Tooltip';
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
import { Button } from '../Button';
import mdx from '../Tooltip/Tooltip.mdx';
export default {
title: 'Overlays/Tooltip',
component: Tooltip,
decorators: [withCenteredStory],
parameters: {
docs: {
page: mdx,
},
},
};
export const basic = () => {
const VISUAL_GROUP = 'Visual options';
// ---
const theme = select('Theme', ['info', 'error', 'info-alt'], 'info', VISUAL_GROUP);
return (
<Tooltip content="This is a tooltip" theme={theme}>
<Button>Hover me for Tooltip </Button>
</Tooltip>
);
};

View File

@@ -1,10 +1,10 @@
import React, { createRef } from 'react';
import React, { createRef, FC } from 'react';
import * as PopperJS from 'popper.js';
import { Popover } from './Popover';
import { PopoverController, UsingPopperProps } from './PopoverController';
export interface TooltipProps extends UsingPopperProps {
theme?: 'info' | 'error';
theme?: 'info' | 'error' | 'info-alt';
}
export interface PopoverContentProps {
@@ -13,7 +13,7 @@ export interface PopoverContentProps {
export type PopoverContent = string | React.ReactElement<any> | ((props: PopoverContentProps) => JSX.Element);
export const Tooltip = ({ children, theme, ...controllerProps }: TooltipProps) => {
export const Tooltip: FC<TooltipProps> = ({ children, theme, ...controllerProps }: TooltipProps) => {
const tooltipTriggerRef = createRef<PopperJS.ReferenceObject>();
const popperBackgroundClassName = 'popper__background' + (theme ? ' popper__background--' + theme : '');

View File

@@ -47,6 +47,10 @@ $popper-margin-from-ref: 5px;
&.popper__background--info {
@include popper-theme($popover-help-bg, $popover-help-color);
}
&.popper__background--info-alt {
@include popper-theme($popover-code-bg, $text-color);
}
}
.popper__arrow {

View File

@@ -12,8 +12,8 @@ const setup = (spy?: any, propOverrides?: object) => {
}
},
valueMappings: [
{ id: 1, operator: '', type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, operator: '', type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
{ id: 1, type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
],
};
@@ -35,9 +35,7 @@ describe('On remove mapping', () => {
const remove = wrapper.find('button[aria-label="ValueMappingsEditor remove button"]');
remove.at(0).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 2, operator: '', type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
]);
expect(onChangeSpy).toBeCalledWith([{ id: 2, type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' }]);
});
it('should remove mapping at index 1', () => {
@@ -47,9 +45,7 @@ describe('On remove mapping', () => {
const remove = wrapper.find('button[aria-label="ValueMappingsEditor remove button"]');
remove.at(1).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 1, operator: '', type: MappingType.ValueToText, value: '20', text: 'Ok' },
]);
expect(onChangeSpy).toBeCalledWith([{ id: 1, type: MappingType.ValueToText, value: '20', text: 'Ok' }]);
});
});
@@ -62,9 +58,9 @@ describe('Next id to add', () => {
add.at(0).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 1, operator: '', type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, operator: '', type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
{ id: 3, operator: '', type: MappingType.ValueToText, from: '', to: '', text: '' },
{ id: 1, type: MappingType.ValueToText, value: '20', text: 'Ok' },
{ id: 2, type: MappingType.RangeToText, from: '21', to: '30', text: 'Meh' },
{ id: 3, type: MappingType.ValueToText, from: '', to: '', text: '' },
]);
});
@@ -73,8 +69,6 @@ describe('Next id to add', () => {
const wrapper = setup(onChangeSpy, { valueMappings: [] });
const add = wrapper.find('*[aria-label="ValueMappingsEditor add mapping button"]');
add.at(0).simulate('click');
expect(onChangeSpy).toBeCalledWith([
{ id: 0, operator: '', type: MappingType.ValueToText, from: '', to: '', text: '' },
]);
expect(onChangeSpy).toBeCalledWith([{ id: 0, type: MappingType.ValueToText, from: '', to: '', text: '' }]);
});
});

View File

@@ -15,7 +15,6 @@ export const ValueMappingsEditor: React.FC<Props> = ({ valueMappings, onChange,
type: MappingType.ValueToText,
from: '',
to: '',
operator: '',
text: '',
};
const id = update && update.length > 0 ? Math.max(...update.map(v => v.id)) + 1 : 0;

View File

@@ -163,7 +163,7 @@ export { FileUpload } from './FileUpload/FileUpload';
// Legacy forms
// Export this until we've figured out a good approach to inline form styles.
export { FormLabel as InlineFormLabel } from './FormLabel/FormLabel';
export { InlineFormLabel } from './FormLabel/FormLabel';
// Select
import { Select, AsyncSelect } from './Forms/Legacy/Select/Select';

View File

@@ -0,0 +1,73 @@
import React from 'react';
import { config } from '@grafana/runtime';
import { renderHook } from '@testing-library/react-hooks';
import { css } from 'emotion';
import { mount } from 'enzyme';
import { memoizedStyleCreators, mockThemeContext, useStyles } from './ThemeContext';
describe('useStyles', () => {
it('memoizes the passed in function correctly', () => {
const stylesCreator = () => ({});
const { rerender, result } = renderHook(() => useStyles(stylesCreator));
const storedReference = result.current;
rerender();
expect(storedReference).toBe(result.current);
});
it('does not memoize if the passed in function changes every time', () => {
const { rerender, result } = renderHook(() => useStyles(() => ({})));
const storedReference = result.current;
rerender();
expect(storedReference).not.toBe(result.current);
});
it('updates the memoized function when the theme changes', () => {
const stylesCreator = () => ({});
const { rerender, result } = renderHook(() => useStyles(stylesCreator));
const storedReference = result.current;
const restoreThemeContext = mockThemeContext({});
rerender();
expect(storedReference).not.toBe(result.current);
restoreThemeContext();
});
it('cleans up memoized functions whenever a new one comes along or the component unmounts', () => {
const styleCreators: Function[] = [];
const { rerender, unmount } = renderHook(() => {
const styleCreator = () => ({});
styleCreators.push(styleCreator);
return useStyles(styleCreator);
});
expect(typeof memoizedStyleCreators.get(styleCreators[0])).toBe('function');
rerender();
expect(memoizedStyleCreators.get(styleCreators[0])).toBeUndefined();
expect(typeof memoizedStyleCreators.get(styleCreators[1])).toBe('function');
unmount();
expect(memoizedStyleCreators.get(styleCreators[0])).toBeUndefined();
expect(memoizedStyleCreators.get(styleCreators[1])).toBeUndefined();
});
it('passes in theme and returns style object', done => {
const Dummy: React.FC = function() {
const styles = useStyles(theme => {
expect(theme).toEqual(config.theme);
return {
someStyle: css`
color: ${theme.palette.critical};
`,
};
});
expect(typeof styles.someStyle).toBe('string');
done();
return <div>dummy</div>;
};
mount(<Dummy />);
});
});

View File

@@ -1,9 +1,8 @@
import React, { useContext } from 'react';
import hoistNonReactStatics from 'hoist-non-react-statics';
import { getTheme } from './getTheme';
import { Themeable } from '../types/theme';
import { GrafanaTheme, GrafanaThemeType } from '@grafana/data';
import hoistNonReactStatics from 'hoist-non-react-statics';
import React, { useContext, useEffect } from 'react';
import { Themeable } from '../types/theme';
import { getTheme } from './getTheme';
import { stylesFactory } from './stylesFactory';
type Omit<T, K> = Pick<T, Exclude<keyof T, K>>;
@@ -14,6 +13,9 @@ type Subtract<T, K> = Omit<T, keyof K>;
*/
let ThemeContextMock: React.Context<GrafanaTheme> | null = null;
// Used by useStyles()
export const memoizedStyleCreators = new WeakMap();
// Use Grafana Dark theme by default
export const ThemeContext = React.createContext(getTheme(GrafanaThemeType.Dark));
ThemeContext.displayName = 'ThemeContext';
@@ -38,12 +40,31 @@ export const withTheme = <P extends Themeable, S extends {} = {}>(Component: Rea
export function useTheme(): GrafanaTheme {
return useContext(ThemeContextMock || ThemeContext);
}
/** Hook for using memoized styles with access to the theme. */
export const useStyles = (getStyles: (theme?: GrafanaTheme) => any) => {
const currentTheme = useTheme();
const callback = stylesFactory(stylesTheme => getStyles(stylesTheme));
return callback(currentTheme);
};
/**
* Hook for using memoized styles with access to the theme.
*
* NOTE: For memoization to work, you need to ensure that the function
* you pass in doesn't change, or only if it needs to. (i.e. declare
* your style creator outside of a function component or use `useCallback()`.)
* */
export function useStyles<T>(getStyles: (theme: GrafanaTheme) => T) {
const theme = useTheme();
let memoizedStyleCreator = memoizedStyleCreators.get(getStyles);
if (!memoizedStyleCreator) {
memoizedStyleCreator = stylesFactory(getStyles);
memoizedStyleCreators.set(getStyles, memoizedStyleCreator);
}
useEffect(() => {
return () => {
memoizedStyleCreators.delete(getStyles);
};
}, [getStyles]);
return memoizedStyleCreator(theme);
}
/**
* Enables theme context mocking

View File

@@ -300,6 +300,9 @@ $popover-help-bg: $tooltipBackground;
$popover-help-color: $text-color;
$popover-error-bg: $btn-danger-bg;
$popover-code-bg: $popover-bg;
$popover-code-boxshadow: $tooltipShadow;
// images
$checkboxImageUrl: '../img/checkbox.png';

View File

@@ -293,6 +293,9 @@ $popover-error-bg: $btn-danger-bg;
$popover-help-bg: $tooltipBackground;
$popover-help-color: $tooltipColor;
$popover-code-bg: ${theme.colors.bg1};
$popover-code-boxshadow: 0 0 5px $gray60;
// images
$checkboxImageUrl: '../img/checkbox_white.png';

View File

@@ -155,6 +155,7 @@ export const getStandardFieldConfigs = () => {
id: 'mappings',
path: 'mappings',
name: 'Value mappings',
description: 'Modify the display text based on input value',
editor: standardEditorsRegistry.get('mappings').editor as any,
override: standardEditorsRegistry.get('mappings').editor as any,

View File

@@ -0,0 +1,7 @@
import { select } from '@storybook/addon-knobs';
import { ComponentSize } from '../../types/size';
export function useSize(size: ComponentSize = 'md') {
const sizes = ['xs', 'sm', 'md', 'lg'];
return select('Size', sizes, size);
}

View File

@@ -1,6 +1,6 @@
{
"name": "@jaegertracing/jaeger-ui-components",
"version": "7.1.0-pre.0",
"version": "7.1.0",
"main": "src/index.ts",
"types": "src/index.ts",
"license": "Apache-2.0",
@@ -14,7 +14,7 @@
"typescript": "3.9.3"
},
"dependencies": {
"@grafana/data": "7.1.0-pre.0",
"@grafana/data": "7.1.0",
"@types/classnames": "^2.2.7",
"@types/deep-freeze": "^0.1.1",
"@types/hoist-non-react-statics": "^3.3.1",

View File

@@ -49,6 +49,7 @@ RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
"$GF_PATHS_PROVISIONING/dashboards" \
"$GF_PATHS_PROVISIONING/notifiers" \
"$GF_PATHS_PROVISIONING/plugins" \
"$GF_PATHS_LOGS" \
"$GF_PATHS_PLUGINS" \
"$GF_PATHS_DATA" && \

View File

@@ -39,6 +39,7 @@ RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
"$GF_PATHS_PROVISIONING/dashboards" \
"$GF_PATHS_PROVISIONING/notifiers" \
"$GF_PATHS_PROVISIONING/plugins" \
"$GF_PATHS_LOGS" \
"$GF_PATHS_PLUGINS" \
"$GF_PATHS_DATA" && \

View File

@@ -159,6 +159,12 @@ var (
// StatsTotalDataSources is a metric total number of defined datasources, labeled by pluginId
StatsTotalDataSources *prometheus.GaugeVec
// StatsTotalAnnotations is a metric of total number of annotations stored in Grafana.
StatsTotalAnnotations prometheus.Gauge
// StatsTotalDashboardVersions is a metric of total number of dashboard versions stored in Grafana.
StatsTotalDashboardVersions prometheus.Gauge
// grafanaBuildVersion is a metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built
grafanaBuildVersion *prometheus.GaugeVec
@@ -483,6 +489,18 @@ func init() {
Help: "A metric with a constant '1' value labeled by pluginId, pluginType and version from which Grafana plugin was built",
Namespace: ExporterName,
}, []string{"plugin_id", "plugin_type", "version"})
StatsTotalDashboardVersions = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_totals_dashboard_versions",
Help: "total amount of dashboard versions in the database",
Namespace: ExporterName,
})
StatsTotalAnnotations = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "stat_totals_annotations",
Help: "total amount of annotations in the database",
Namespace: ExporterName,
})
}
// SetBuildInformation sets the build information for this binary
@@ -550,6 +568,8 @@ func initMetricVars() {
StatsTotalDataSources,
grafanaBuildVersion,
grafanaPluginBuildInfoDesc,
StatsTotalDashboardVersions,
StatsTotalAnnotations,
)
}

View File

@@ -61,6 +61,8 @@ func (uss *UsageStatsService) sendUsageStats(oauthProviders map[string]bool) {
metrics["stats.snapshots.count"] = statsQuery.Result.Snapshots
metrics["stats.teams.count"] = statsQuery.Result.Teams
metrics["stats.total_auth_token.count"] = statsQuery.Result.AuthTokens
metrics["stats.dashboard_versions.count"] = statsQuery.Result.DashboardVersions
metrics["stats.annotations.count"] = statsQuery.Result.Annotations
metrics["stats.valid_license.count"] = getValidLicenseCount(uss.License.HasValidLicense())
metrics["stats.edition.oss.count"] = getOssEditionCount()
metrics["stats.edition.enterprise.count"] = getEnterpriseEditionCount()
@@ -212,6 +214,8 @@ func (uss *UsageStatsService) updateTotalStats() {
metrics.StatsTotalActiveEditors.Set(float64(statsQuery.Result.ActiveEditors))
metrics.StatsTotalAdmins.Set(float64(statsQuery.Result.Admins))
metrics.StatsTotalActiveAdmins.Set(float64(statsQuery.Result.ActiveAdmins))
metrics.StatsTotalDashboardVersions.Set(float64(statsQuery.Result.DashboardVersions))
metrics.StatsTotalAnnotations.Set(float64(statsQuery.Result.Annotations))
dsStats := models.GetDataSourceStatsQuery{}
if err := uss.Bus.Dispatch(&dsStats); err != nil {

View File

@@ -50,6 +50,8 @@ func TestMetrics(t *testing.T) {
Snapshots: 13,
Teams: 14,
AuthTokens: 15,
DashboardVersions: 16,
Annotations: 17,
}
getSystemStatsQuery = query
return nil
@@ -238,6 +240,8 @@ func TestMetrics(t *testing.T) {
So(metrics.Get("stats.teams.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Teams)
So(metrics.Get("stats.total_auth_token.count").MustInt64(), ShouldEqual, 15)
So(metrics.Get("stats.avg_auth_token_per_user.count").MustInt64(), ShouldEqual, 5)
So(metrics.Get("stats.dashboard_versions.count").MustInt64(), ShouldEqual, 16)
So(metrics.Get("stats.annotations.count").MustInt64(), ShouldEqual, 17)
So(metrics.Get("stats.ds."+models.DS_ES+".count").MustInt(), ShouldEqual, 9)
So(metrics.Get("stats.ds."+models.DS_PROMETHEUS+".count").MustInt(), ShouldEqual, 10)

View File

@@ -2,6 +2,7 @@ package middleware
import (
"net/url"
"strconv"
"strings"
macaron "gopkg.in/macaron.v1"
@@ -87,7 +88,13 @@ func RoleAuth(roles ...models.RoleType) macaron.Handler {
func Auth(options *AuthOptions) macaron.Handler {
return func(c *models.ReqContext) {
forceLogin := c.AllowAnonymous && c.QueryBool("forceLogin")
forceLogin := false
if c.AllowAnonymous {
forceLoginParam, err := strconv.ParseBool(c.Req.URL.Query().Get("forceLogin"))
if err == nil {
forceLogin = forceLoginParam
}
}
requireLogin := !c.AllowAnonymous || forceLogin
if !c.IsSignedIn && options.ReqSignedIn && requireLogin {
notAuthorized(c)

View File

@@ -16,4 +16,6 @@ type Licensing interface {
LicenseURL(user *SignedInUser) string
StateInfo() string
TokenRaw() string
}

View File

@@ -16,6 +16,8 @@ type SystemStats struct {
Folders int64
ProvisionedDashboards int64
AuthTokens int64
DashboardVersions int64
Annotations int64
Admins int
Editors int

View File

@@ -34,11 +34,7 @@ var (
)
func init() {
registry.Register(&registry.Descriptor{
Name: "BackendPluginManager",
Instance: &manager{},
InitPriority: registry.Low,
})
registry.RegisterService(&manager{})
}
// Manager manages backend plugins.
@@ -100,7 +96,11 @@ func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error {
}
if m.License.HasLicense() {
hostEnv = append(hostEnv, fmt.Sprintf("GF_ENTERPRISE_LICENSE_PATH=%s", m.Cfg.EnterpriseLicensePath))
hostEnv = append(
hostEnv,
fmt.Sprintf("GF_ENTERPRISE_LICENSE_PATH=%s", m.Cfg.EnterpriseLicensePath),
fmt.Sprintf("GF_ENTERPRISE_LICENSE_TEXT=%s", m.License.TokenRaw()),
)
}
env := pluginSettings.ToEnv("GF_PLUGIN", hostEnv)

View File

@@ -251,6 +251,7 @@ func TestManager(t *testing.T) {
t.Run("Plugin registration scenario when Grafana is licensed", func(t *testing.T) {
ctx.license.edition = "Enterprise"
ctx.license.hasLicense = true
ctx.license.tokenRaw = "testtoken"
ctx.cfg.BuildVersion = "7.0.0"
ctx.cfg.EnterpriseLicensePath = "/license.txt"
@@ -258,8 +259,8 @@ func TestManager(t *testing.T) {
require.NoError(t, err)
t.Run("Should provide expected host environment variables", func(t *testing.T) {
require.Len(t, ctx.env, 3)
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Enterprise", "GF_ENTERPRISE_LICENSE_PATH=/license.txt"}, ctx.env)
require.Len(t, ctx.env, 4)
require.EqualValues(t, []string{"GF_VERSION=7.0.0", "GF_EDITION=Enterprise", "GF_ENTERPRISE_LICENSE_PATH=/license.txt", "GF_ENTERPRISE_LICENSE_TEXT=testtoken"}, ctx.env)
})
})
})
@@ -383,6 +384,7 @@ func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourc
type testLicensingService struct {
edition string
hasLicense bool
tokenRaw string
}
func (t *testLicensingService) HasLicense() bool {
@@ -408,3 +410,7 @@ func (t *testLicensingService) LicenseURL(user *models.SignedInUser) string {
func (t *testLicensingService) HasValidLicense() bool {
return false
}
func (t *testLicensingService) TokenRaw() string {
return t.tokenRaw
}

View File

@@ -3,6 +3,7 @@ package plugins
import (
"net/url"
"path"
"path/filepath"
"strings"
"github.com/grafana/grafana/pkg/setting"
@@ -63,7 +64,9 @@ func (fp *FrontendPluginBase) handleModuleDefaults() {
// Previously there was an assumption that the plugin directory
// should be public/app/plugins/<plugin type>/<plugin id>
// However this can be an issue if the plugin directory should be renamed to something else
currentDir := path.Base(fp.PluginDir)
currentDir := filepath.Base(fp.PluginDir)
// use path package for the following statements
// because these are not file paths
fp.Module = path.Join("app/plugins", fp.Type, currentDir, "module")
fp.BaseUrl = path.Join("public/app/plugins", fp.Type, currentDir)
}

View File

@@ -20,7 +20,7 @@ func RegisterService(instance Service) {
services = append(services, &Descriptor{
Name: reflect.TypeOf(instance).Elem().Name(),
Instance: instance,
InitPriority: Low,
InitPriority: Medium,
})
}
@@ -114,6 +114,7 @@ func IsDisabled(srv Service) bool {
type Priority int
const (
High Priority = 100
Low Priority = 0
High Priority = 100
Medium Priority = 50
Low Priority = 0
)

View File

@@ -538,7 +538,6 @@ func (server *Server) requestMemberOf(entry *ldap.Entry) ([]string, error) {
getAttribute(groupIDAttribute, group),
)
}
break
}
}

View File

@@ -56,3 +56,7 @@ func (l *OSSLicensingService) Init() error {
func (*OSSLicensingService) HasValidLicense() bool {
return false
}
func (*OSSLicensingService) TokenRaw() string {
return ""
}

View File

@@ -25,14 +25,18 @@ type ProvisioningService interface {
}
func init() {
registry.RegisterService(NewProvisioningServiceImpl(
func(path string) (dashboards.DashboardProvisioner, error) {
return dashboards.New(path)
},
notifiers.Provision,
datasources.Provision,
plugins.Provision,
))
registry.Register(&registry.Descriptor{
Name: "ProvisioningService",
Instance: NewProvisioningServiceImpl(
func(path string) (dashboards.DashboardProvisioner, error) {
return dashboards.New(path)
},
notifiers.Provision,
datasources.Provision,
plugins.Provision,
),
InitPriority: registry.Low,
})
}
func NewProvisioningServiceImpl(

View File

@@ -11,11 +11,7 @@ import (
)
func init() {
registry.Register(&registry.Descriptor{
Name: "SearchService",
Instance: &SearchService{},
InitPriority: 20,
})
registry.RegisterService(&SearchService{})
}
type Query struct {

View File

@@ -75,6 +75,8 @@ func GetSystemStats(query *models.GetSystemStatsQuery) error {
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_provisioning") + `) AS provisioned_dashboards,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_snapshot") + `) AS snapshots,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_version") + `) AS dashboard_versions,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("annotation") + `) AS annotations,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("team") + `) AS teams,`)
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("user_auth_token") + `) AS auth_tokens,`)

View File

@@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"path"
"sort"
"strings"
"time"
@@ -193,6 +194,9 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
queryResult.Error = err
return queryResult, nil
}
applyInsightsMetricAlias(frame, query.Alias)
queryResult.Dataframes = tsdb.NewDecodedDataFrames(data.Frames{frame})
return queryResult, nil
}
@@ -250,3 +254,62 @@ func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourc
return pluginRoute, pluginRouteName, nil
}
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
// Alias patterns like {{metric}} are replaced with the appropriate data values.
func formatApplicationInsightsLegendKey(alias string, metricName string, labels data.Labels) string {
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
switch metaPartName {
case "metric":
return []byte(metricName)
case "dimensionname", "groupbyname":
return []byte(keys[0])
case "dimensionvalue", "groupbyvalue":
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
func applyInsightsMetricAlias(frame *data.Frame, alias string) {
if alias == "" {
return
}
for _, field := range frame.Fields {
if field.Type() == data.FieldTypeTime || field.Type() == data.FieldTypeNullableTime {
continue
}
displayName := formatApplicationInsightsLegendKey(alias, field.Name, field.Labels)
if field.Config == nil {
field.Config = &data.FieldConfig{}
}
field.Config.DisplayName = displayName
}
}

View File

@@ -106,6 +106,11 @@ func InsightsMetricsResultToFrame(mr MetricsResult, metric, agg string, dimensio
}
rowCounter++
}
if err := data.SortWideFrameFields(frame, dimensions...); err != nil {
return nil, err
}
return frame, nil
}

View File

@@ -18,6 +18,7 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
name string
testFile string
metric string
alias string
agg string
dimensions []string
expectedFrame func() *data.Frame
@@ -66,7 +67,7 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
},
},
{
name: "segmented series",
name: "multi segmented series",
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json",
metric: "traces/count",
agg: "sum",
@@ -77,12 +78,8 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC),
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC),
}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{
nil,
}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}),
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{
@@ -93,10 +90,54 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
nil,
pointer.Float64(3),
}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
nil,
}),
)
return frame
},
},
{
name: "segmented series with alias",
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json",
metric: "traces/count",
alias: "{{ metric }}: Country,City: {{ client/countryOrRegion }},{{ client/city }}",
agg: "sum",
dimensions: []string{"client/countryOrRegion", "client/city"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC),
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC),
}),
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{
nil,
pointer.Float64(1),
}),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: Japan,Tokyo"}),
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(11),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,"}),
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(3),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Chicago"}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Des Moines"}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
nil,
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Washington"}),
)
return frame
@@ -110,6 +151,9 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
frame, err := InsightsMetricsResultToFrame(res, tt.metric, tt.agg, tt.dimensions)
require.NoError(t, err)
applyInsightsMetricAlias(frame, tt.alias)
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}

View File

@@ -3,6 +3,7 @@ package azuremonitor
import (
"encoding/json"
"fmt"
"math"
"strconv"
"time"
@@ -115,7 +116,21 @@ var realConverter = data.FieldConverter{
}
jN, ok := v.(json.Number)
if !ok {
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
s, sOk := v.(string)
if sOk {
switch s {
case "Infinity":
f := math.Inf(0)
return &f, nil
case "-Infinity":
f := math.Inf(-1)
return &f, nil
case "NaN":
f := math.NaN()
return &f, nil
}
}
return nil, fmt.Errorf("unexpected type, expected json.Number but got type %T for value %v", v, v)
}
f, err := jN.Float64()
if err != nil {

View File

@@ -2,6 +2,7 @@ package azuremonitor
import (
"encoding/json"
"math"
"os"
"path/filepath"
"testing"
@@ -119,6 +120,21 @@ func TestLogTableToFrame(t *testing.T) {
return frame
},
},
{
name: "nan and infinity in real response",
testFile: "loganalytics/8-log-analytics-response-nan-inf.json",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("XInf", nil, []*float64{pointer.Float64(math.Inf(0))}),
data.NewField("XInfNeg", nil, []*float64{pointer.Float64(math.Inf(-2))}),
data.NewField("XNan", nil, []*float64{pointer.Float64(math.NaN())}),
)
frame.Meta = &data.FrameMeta{
Custom: &LogAnalyticsMeta{ColumnTypes: []string{"real", "real", "real"}},
}
return frame
},
},
}
for _, tt := range tests {

View File

@@ -338,6 +338,17 @@ func formatAzureMonitorLegendKey(alias string, resourceName string, metricName s
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
@@ -359,23 +370,15 @@ func formatAzureMonitorLegendKey(alias string, resourceName string, metricName s
return []byte(metricName)
}
keys := make([]string, 0, len(labels))
if metaPartName == "dimensionname" || metaPartName == "dimensionvalue" {
for k := range labels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
}
if metaPartName == "dimensionname" {
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
return []byte(labels[keys[0]])
return []byte(lowerLabels[keys[0]])
}
if v, ok := labels[metaPartName]; ok {
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in

View File

@@ -374,7 +374,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
name: "multiple dimension time series response with label alias",
responseFile: "7-azure-monitor-response-multi-dimension.json",
mockQuery: &AzureMonitorQuery{
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{tier}}}",
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}",
UrlComponents: map[string]string{
"resourceName": "grafana",
},

View File

@@ -105,7 +105,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
req, err := e.createRequest(ctx, e.dsInfo)
if err != nil {
queryResultError(err)
return queryResultError(err)
}
req.URL.Path = path.Join(req.URL.Path, "query")
req.URL.RawQuery = query.Params.Encode()
@@ -129,30 +129,30 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
res, err := ctxhttp.Do(ctx, e.httpClient, req)
if err != nil {
queryResultError(err)
return queryResultError(err)
}
body, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
queryResultError(err)
return queryResultError(err)
}
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
queryResultError(fmt.Errorf("Request failed status: %v", res.Status))
return queryResultError(fmt.Errorf("Request failed status: %v %w", res.Status, fmt.Errorf(string(body))))
}
var logResponse AzureLogAnalyticsResponse
d := json.NewDecoder(bytes.NewReader(body))
d.UseNumber()
err = d.Decode(&logResponse)
if err != nil {
queryResultError(err)
return queryResultError(err)
}
t, err := logResponse.GetPrimaryResultTable()
if err != nil {
queryResultError(err)
return queryResultError(err)
}
frame, err := LogTableToFrame(t)

View File

@@ -0,0 +1,29 @@
{
"tables": [
{
"name": "PrimaryResult",
"columns": [
{
"name": "XInf",
"type": "real"
},
{
"name": "XInfNeg",
"type": "real"
},
{
"name": "XNan",
"type": "real"
}
],
"rows": [
[
"Infinity",
"-Infinity",
"NaN"
]
]
}
]
}

View File

@@ -139,6 +139,7 @@ func (query *cloudMonitoringQuery) buildDeepLink() string {
q := u.Query()
q.Set("project", query.ProjectName)
q.Set("Grafana_deeplink", "true")
pageState := map[string]interface{}{
"xyChart": map[string]interface{}{

View File

@@ -1019,6 +1019,9 @@ func verifyDeepLink(dl string, expectedTimeSelection map[string]string, expected
params, err = url.ParseQuery(u.RawQuery)
So(err, ShouldBeNil)
deepLinkParam := params.Get("Grafana_deeplink")
So(deepLinkParam, ShouldNotBeEmpty)
pageStateStr := params.Get("pageState")
So(pageStateStr, ShouldNotBeEmpty)

Some files were not shown because too many files have changed in this diff Show More