Compare commits

...

87 Commits

Author SHA1 Message Date
Sofia Papagiannaki
3fa63cfc34 Fix linting issues 2020-02-20 14:03:49 +02:00
Arve Knudsen
f416c57743 Docker: Downgrade to 18.04 LTS (#22313)
Downgrade Ubuntu Docker images to 18.04 LTS since 18.10 has reached EOL.
2020-02-20 14:03:49 +02:00
Carl Bergquist
4127f8374c Metrics: Add gauge for requests currently in flight (#22168)
Add gauge for requests currently in flight.

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
(cherry picked from commit b0b46991ec)
2020-02-20 14:03:49 +02:00
Arve Knudsen
7163726493 OAuth: Enforce auto_assign_org_id setting when role mapping enabled using Generic OAuth (#22268)
* OAuth: Make use of auto_assign_org_id setting

(cherry picked from commit 8d1bef3769)
2020-02-20 14:03:49 +02:00
Dominik Prokop
daa8035c48 Circle: Introduce es-check to branches & pr workflow (#21677)
* Transpile selected es6 node deps

* Add es-check to build-fast-frontend

* change es-check is used

* transpile react-hook-form

(cherry picked from commit 814020c05c)
2020-02-20 14:03:49 +02:00
Andrej Ocenas
448cf2a74d Elastic: Replace range as number not string (#22173)
(cherry picked from commit 9c55500cc0)
2020-02-20 14:03:49 +02:00
Sofia Papagiannaki
b7f77459c4 release 6.6.2 2020-02-20 14:03:49 +02:00
Dominik Prokop
4d02406d6d Make Explore panel link work when grafana served from sub url (#22202)
* Make explore url work when grafana served from sub url

* Update public/app/core/utils/explore.ts

(cherry picked from commit fa34a7692e)
2020-02-20 14:03:49 +02:00
Marcus Efraimsson
b2034d693d API: Improve recovery middleware when response already been written (#22256)
Suppresses stacktrace in recovery middleware if error is
http.ErrAbortHandler.
Skips writing response error in recovery middleware if
resoonse have already been written.
Skips try rotate of auth token if response have already
been written.
Skips adding default response headers if response have
already been written.

Fixes #15728
Ref #18082

Co-Authored-By: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit fd52570b7f)
2020-02-20 14:03:49 +02:00
Adrian Coveney
95e304b277 Reorder cipher suites for better security (#22101)
Put the cipher suites with Forward Secrecy at or nearer the top, keeping any TLS v1.3 suites at the top, following best practice guides for the ordering of the rest. There is no change to the selection of suites only reordering.

(cherry picked from commit 39e0ce305b)
2020-02-20 14:03:49 +02:00
Carl Bergquist
1fb7155a58 Data proxy: Log proxy errors using Grafana logger (#22174)
Now any errors logged by http.ReverseProxy are forwarded to 
Grafana's logger and includes more contextual information like
level (error), user id, org id, username, proxy path, referer and 
IP address.

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
(cherry picked from commit ff9556229a)
2020-02-20 14:03:49 +02:00
Marcus Andersson
7656e72df9 TimePicker: fixing weird behavior with calendar when switching between months/years (#22253)
* fixed issues with timepicker closing when month is changed.

* removed console.logs.

* fixed so calendar isn't changing size when selecting several dates.

* Prevent calendar from closing when changing year.

* fixed according to PR feedback.

(cherry picked from commit 08bf2a5452)
2020-02-20 14:03:49 +02:00
Carl Bergquist
61ac3085de Auth: Don't rotate auth token when requests are cancelled by client (#22106)
if the client closes the connection we should not
rotate token since the client will never receive the
new token.

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit fe16028e02)
2020-02-20 14:03:49 +02:00
Andrej Ocenas
d016a08303 Elastic: Map level field based on config. (#22182)
* Map level field based on config.

* Fix type

(cherry picked from commit 934d93ad94)
2020-02-20 14:03:49 +02:00
Brian Gann
d2ab56961f API: Fix redirect issue when configured to use a subpath (#21652)
* request uri will contain the subpath

(cherry picked from commit 0e2d874ecf)
2020-02-20 14:03:49 +02:00
kay delaney
c0cfcc2b32 Datasource/Loki: Fixes issue where live tailing displayed date as invalid (#22128)
Closes #21929

(cherry picked from commit 003fb4a3d0)
2020-02-20 14:03:49 +02:00
Ivana Huckova
90e7909cef Loki, Prometheus: Fix PromQL and LogQL syntax highlighting (#21944)
* Loki, Prometheus: Fix syntax to not highlight # in quotes as a comment in queryField

* Loki, Prometheus: Fix syntax to not highlight # in quotes as a comment in queryField

* Fix regex

* PromQL, LogQL: Update syntax

* LogQL, PromQL highlighting: Add tests

(cherry picked from commit 1448767c08)
2020-02-20 14:03:49 +02:00
Dominik Prokop
23f4477cfd Links: Assure base url when single stat, panel and data links are built (#21956)
* Assure base url when single stat, panel and data links are built

* Update public/app/features/dashboard/dashgrid/PanelHeader/PanelHeaderCorner.tsx

* Update public/app/features/panel/panellinks/link_srv.ts

* Update public/app/features/panel/panellinks/link_srv.ts

* Update public/app/features/panel/panellinks/link_srv.ts

* Update public/app/features/panel/panellinks/link_srv.ts

* Review updates

* Remove unnecessary code

(cherry picked from commit dd37d003ef)
2020-02-20 14:03:49 +02:00
Marcus Efraimsson
5f42c258a5 Chore: Resolve random failure with golangci-lint (#21970)
(cherry picked from commit 285ebd3413)
2020-02-20 14:03:49 +02:00
Lukas Siatka
5f786b0824 Datasource: updates PromExploreQueryEditor to prevent it from throwing error on edit (#21605)
* Datasource: updates PromExploreQueryEditor - rewrite to functional component

* Datasource: updates PromQueryField - moves an extra field from children to the separate prop

* Datasource: adds PromExploreExtraField

* Datasource: updates PromExploreQueryEditor - fixes typo

* Datasource: updates prometheus explore editor snapshots

* Datasource: updates PromExploreExtraField export

* Datasource: removes unnecessary div from PromExploreQueryEditor

* Datasource: adds basic PromExploreExtraField snapshot test

* Datasource: adds basic PromExploreQueryEditor test

* Datasource: updates PromExploreQueryEditor snapshot to fix timezone issues

* Datasource: updates PromExploreQueryEditor - onChangeQueryStep cleanup

* Datasource: updates PromExploreQueryEditor test to check ExtraFieldElement render

* Datasource: simplified PromExploreQueryEditor onStepChange method

* Datasource: updates Prometheus module import

* Datasource: updates PromExploreQueryEditor test

* Datasource: updates PromExploreQueryEditor tests

* Datasource: fixes PromExploreQueryEditor error on empty interval init

* Datasource: adds a tooltip to PromExploreExtraField mounted in PromExploreQueryEditor

* Datasource: updates PromExploreQueryEditor snapshots

(cherry picked from commit 2d3c5064e1)
2020-02-20 14:03:49 +02:00
Ivana Huckova
5071be541b @grafana/ui: Fix displaying of bars in React Graph (#21968)
(cherry picked from commit 9b9f1ad1b9)
2020-02-20 14:03:49 +02:00
Ivana Huckova
b6564b85b1 Elastic: Limit the number of datapoints for the counts query (#21937)
(cherry picked from commit 89d1ab37de)
2020-02-20 14:03:49 +02:00
Hugo Häggmark
4770126073 Chore: Removes Cypress record (#21782)
(cherry picked from commit 9e5eb05769)
2020-02-07 09:48:37 +01:00
Hugo Häggmark
21bf8b71bc Chore: Fixes test based on master branch 2020-02-06 12:53:10 +03:00
Alexander Zobnin
dcb8beecb1 release 6.6.1 2020-02-06 12:53:10 +03:00
Leonard Gram
64568a1938 Quota: Makes sure we provide the request context to the quota service (#21949)
It was missing for ldap_login which means that the first signup failed
for users with LDAP+quota enabled. There's also potential cases where we
can't provide a request context (background jobs) which is also covered,
but needs a refactoring.

(cherry picked from commit 59530e4758)
2020-02-06 12:53:10 +03:00
Sofia Papagiannaki
9b3241a629 Annotations: Change indices and rewrites annotation find query to improve database query performance (#21915)
Drop indices and create new ones and rewrites annotation find query
to address performance issues when querying annotation table and
there is a large amount of rows.

Fixes #21902

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
Co-authored-by: Kyle Brandt <kyle@kbrandt.com>
(cherry picked from commit 5ae95190ed)
2020-02-06 12:53:10 +03:00
Hugo Häggmark
4bc6bf5e54 Prometheus: Fixes default step value for annotation query (#21934)
Fixes #21914

(cherry picked from commit 26d71c90f5)
2020-02-06 12:53:10 +03:00
Dominik Prokop
828ba74674 Dashboard edit: Fix 404 when making dashboard editable
(cherry picked from commit 90d415861d)
2020-02-06 12:53:10 +03:00
Mark Carey
568bbf4ff7 Metrics: Adds back missing summary quantiles (#21858)
Adds back missing summary quantiles which was mistakenly
removed in v6.6.0.

Fixes #21857

(cherry picked from commit 28230bbf52)
2020-02-06 12:53:10 +03:00
Ivana Huckova
17fc5251e1 grafana/ui: Fix displaying of bars in React Graph (#21922)
(cherry picked from commit 88226672f1)
2020-02-06 12:53:10 +03:00
Erik Sundell
b1d3fec9a8 Fix formatting (#21894)
(cherry picked from commit 78b1ab8360)
2020-02-06 12:53:10 +03:00
Edgar Orendain
12d3576666 Graph Panel: Fixed typo in thresholds form (#21903)
(cherry picked from commit bb8e15ceab)
2020-02-06 12:53:10 +03:00
Tobias Skarhed
476f9b6224 Disable logging in button (#21900)
(cherry picked from commit 959c49f6d8)
2020-02-06 12:53:10 +03:00
Jorge Luis Betancourt
29c6fa4114 Datasource: Show access (Browser/Server) select on the Prometheus datasource (#21833)
* Datasource: Show access (Browser/Server) select on the Prometheus datasource configuration editor

* Trigger build

(cherry picked from commit 96099636dc)
2020-02-06 12:53:10 +03:00
Shavonn Brown
248f73a00f deps so can mock in tests (#21827)
(cherry picked from commit c4e3110034)
2020-02-06 12:53:10 +03:00
Peter Holmberg
4fa2e9b90a Fix: Reimplement HideFromTabs in Tabs component (#21863)
* reimplement hidefromtabs

* remove console log

* going with option b instead

* less explicit

(cherry picked from commit 93195facba)
2020-02-06 12:53:10 +03:00
Marcus Efraimsson
04c2e41733 Image Rendering: Fix render of graph panel legend aligned to the right using Grafana image renderer plugin/service (#21854)
Don't render class body--phantomjs on body element when
PhantomJS renderer not is in use.

Fixes #21830

(cherry picked from commit 6e80315531)
2020-02-06 12:53:10 +03:00
Dominik Prokop
4c21a1e016 grafana/toolkit: Fix failing linter when there were lint issues (#21849)
(cherry picked from commit f8654a3a2f)
2020-02-06 12:53:10 +03:00
Torkel Ödegaard
604a603e82 DatasourceSettings: Fixed issue navigating away from data source settings page (#21841)
(cherry picked from commit b7faa9023e)
2020-02-06 12:53:10 +03:00
Dominik Prokop
9dd964f503 AppPageCtrl: Fix digest issue with app page initialisation (#21847)
(cherry picked from commit 050d902ed1)
2020-02-06 12:53:10 +03:00
Dominik Prokop
338c2b738e Fix digest issue with query part editor's actions menu (#21834)
(cherry picked from commit 1ef91e3fc4)
2020-02-06 12:53:10 +03:00
Torkel Ödegaard
5bc6a3505d Graphite: Fixed issue with functions with multiple required params and no defaults caused params that could not be edited (groupByNodes groupByTags) (#21814)
* Graphite: Fixed issue functions with multiple required params and no defaults

* removed some prev changes

* Update public/app/plugins/datasource/graphite/func_editor.ts

Co-authored-by: Dominik Prokop <dominik.prokop@grafana.com>
(cherry picked from commit 0fd088c757)
2020-02-06 12:53:10 +03:00
Evgeny Bibko
3b5efdbc84 TimePicker: Should display in kiosk mode (#21816)
* Timepicker class fixed

* Missed arrow in dashboard title

(cherry picked from commit 7638156666)
2020-02-06 12:53:10 +03:00
Torkel Ödegaard
a3cea78f40 StatPanels: Fixed migration from old singlestat and default min & max being copied even when gauge was disbled (#21820)
(cherry picked from commit 13948c0b76)
2020-02-06 12:53:10 +03:00
Marcus Andersson
36f02aaef7 Fixed strict errors (#21823)
(cherry picked from commit ffe0a1f975)
2020-02-06 12:53:10 +03:00
Marcus Andersson
2aefb73876 Fix: prevents the BarGauge from exploding when the datasource returns empty result. (#21791)
* Fixed issue where gauge throw error on empty result.

* Some refactorings to improve the code.

* Added some tests to make sure this doesn't happen again.

(cherry picked from commit cab082438e)
2020-02-06 12:53:10 +03:00
Shavonn Brown
94c374d187 Azure Monitor: Fix Application Insights API key field to allow input (#21738)
* Fix update api key input

* update snapshot

(cherry picked from commit 0fa20cb231)
2020-02-06 12:53:10 +03:00
Andrej Ocenas
6eb60b943a Influxdb: Fix cascader when doing log query in explore (#21787)
* Fix cascader options and add tests

* Add comment

* Fix typo

(cherry picked from commit 85dad73e9d)
2020-02-06 12:53:10 +03:00
Leonard Gram
8c14a6e070 MSI: License for Enterprise (#21794)
(cherry picked from commit 20e96a9241)
2020-02-06 12:53:10 +03:00
Shavonn Brown
a069b5d639 Make importDataSourcePlugin cancelable (#21430)
* make importDataSourcePlugin cancelable

* fix imported plugin assignment

* init datasource plugin to redux

* remove commented

* testDataSource to redux

* add err console log

* isTesting is never used

* tests, loadError type

* more tests, testingStatus obj

(cherry picked from commit b3d5e678f4)
2020-02-06 12:53:10 +03:00
Marcus Efraimsson
49255fbb6a OpenTSDB: Add back missing ngInject (#21796)
Adds back missing ngInject on datasource constructor
to make it work again.

Fixes #21770

(cherry picked from commit b75412d6ae)
2020-02-06 12:53:10 +03:00
Emil Tullstedt
0c843ae8d2 Config: add meta feature toggle (#21786)
(cherry picked from commit e95bcc4ba2)
2020-02-06 12:53:10 +03:00
Ivana Huckova
52a5645c85 Logs panel: Rename labels to unique labels (#21783)
(cherry picked from commit b3bcbcccce)
2020-02-06 12:53:10 +03:00
Ryan McKinley
9ad66b7fed grafana/data: Add type for secure json in DataSourceAPI (#21772)
(cherry picked from commit 67c5531961)
2020-02-06 12:53:10 +03:00
kay delaney
af10ba3f1f Explore/Loki: Fix handling of legacy log row context request (#21767)
Closes #21695

(cherry picked from commit 7569a8608a)
2020-02-06 12:53:10 +03:00
Leonard Gram
5c11bbdfb4 release 6.6.0 2020-01-27 13:32:03 +01:00
Emil Tullstedt
872bc2d973 Footer: Display Grafana edition (#21717)
Co-authored-by: Torkel Ödegaard <torkel@grafana.com>
(cherry picked from commit 3fabbbff4d)
2020-01-27 13:32:03 +01:00
Andrej Ocenas
cbace87b56 Explore: Fix context view in logs, where some rows may have been filtered out. (#21729)
* Fix timestamp formats and use uid to filter context rows

* Remove timestamps from tests

(cherry picked from commit 0fda3c4f44)
2020-01-27 13:32:03 +01:00
Ryan McKinley
f59b9b6545 Toolkit: add canvas-mock to test setup (#21739)
(cherry picked from commit ed140346a7)
2020-01-27 13:32:03 +01:00
Tobias Skarhed
3ac81e50d7 TablePabel: Sanitize column link (#21735)
(cherry picked from commit 751eb2c8bb)
2020-01-27 13:32:03 +01:00
Tobias Skarhed
0378c66dcd Template vars: Add error message for failed query var (#21731)
(cherry picked from commit 4c41d7e7fb)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
79de911d0a Devenv: Fixed devenv dashboard template var datasource (#21715)
(cherry picked from commit b28eac2626)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
eecd09d1c8 Footer: added back missing footer to login page (#21720)
(cherry picked from commit 198f561541)
2020-01-27 13:32:03 +01:00
Marcus Efraimsson
14ae363aaa Admin: Viewer should not see link to teams in side menu (#21716)
Fixes so that viewers don't see a link to teams in side menu when
editors_can_admin setting is enabled.

(cherry picked from commit 63a912629d)
2020-01-27 13:32:03 +01:00
Dominik Prokop
18a92cc540 Annotations: Fix issue with annotation queries editors (#21712)
(cherry picked from commit d9e1cb44c8)
2020-01-27 13:32:03 +01:00
Dominik Prokop
cfb8912200 grafana/ui: Remove path import from grafana-data (#21707)
(cherry picked from commit 5e87af8b2a)
2020-01-27 13:32:03 +01:00
Ivana Huckova
47c57a1b9d Loki: Fix Loki with repeated panels and interpolation for Explore (#21685)
(cherry picked from commit e75840737e)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
ce3f43c6d0 StatPanels: Fixed possible migration issue (#21681)
(cherry picked from commit 8266959681)
2020-01-27 13:32:03 +01:00
Dominik Prokop
6717d43921 PhantomJS: Fix rendering of panels using Prometheus datasource
In 043bb59 a URLSearchParams usage was introduced which is not supported by PhantomJS. @babel/polyfill(deprecated) does not contain polyfill for URLSearchParams, hence the code (and Prometheus graphs rendering) was failing in PhantomJS environment.

The solution is to add https://www.npmjs.com/package/url-search-params-polyfill that takes care of the URLSearchParams

(cherry picked from commit cdfac32dfd)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
a951bab782 StatPanel: minor height tweak (#21663)
(cherry picked from commit a734cd3640)
2020-01-27 13:32:03 +01:00
Erik Sundell
841e140f5b Run query when region, namespace and metric changes (#21633)
(cherry picked from commit 296af36a6f)
2020-01-27 13:32:03 +01:00
kay delaney
bbd2014e9d Explore: Fixes some LogDetailsRow markup (#21671)
- Moves filter titles to icons rather than table cell
- Increases colspan of ad-hoc stats cell instead of
rendering empty cells for parsed fields

(cherry picked from commit a115729c55)
2020-01-27 13:32:03 +01:00
Sofia Papagiannaki
8ce48b98dc SQLStore: Fix PostgreSQL failure to create organisation for first time (#21648)
* Fix PostgreSQL failure to create organisation for first time

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>
(cherry picked from commit 2283ceec09)
2020-01-27 13:32:03 +01:00
Erik Sundell
60419f7e72 CloudWatch: Auto period snap to next higher period (#21659)
* Snap to next higher period instead of closest

* Adjust period calc

(cherry picked from commit 685c9043a8)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
38e4db88d1 Login: Better auto sizing of login logo (#21645)
(cherry picked from commit 741e1bb7e9)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
d619c529f0 Alert: Minor tweak to work with license warnings (#21654)
(cherry picked from commit c228cde2b6)
2020-01-27 13:32:03 +01:00
Ryan McKinley
a8643d89be Toolkit: copyIfNonExistent order swapped (#21653)
(cherry picked from commit aee07949a3)
2020-01-27 13:32:03 +01:00
Ivana Huckova
a7c52c7dc8 Explore: Fix log level color and add tests (#21646)
(cherry picked from commit 6feb4a3221)
2020-01-27 13:32:03 +01:00
Torkel Ödegaard
7ad14532a3 Templating: A way to support object syntax for global vars (#21634)
(cherry picked from commit 92ef8644c5)
2020-01-27 13:32:03 +01:00
kenju
23f977f000 CloudWatch: Add DynamoDB Accelerator (DAX) metrics & dimensions (#21644)
Closes #10494

(cherry picked from commit 935d447c6a)
2020-01-27 13:32:03 +01:00
Emil Hessman
c172fe8915 Plugins: Apply adhoc filter in Elasticsearch logs query (#21346)
Fixes #21086

(cherry picked from commit 25e2f1c2dd)
2020-01-27 13:32:03 +01:00
Ryan McKinley
ddeee1820d TestData: allow negative values for random_walk parameters (#21627)
(cherry picked from commit 5f14d62c0d)
2020-01-27 13:32:03 +01:00
Sofia Papagiannaki
f28fd41c3b Update musl checksums (#21621)
(cherry picked from commit 2021a2df74)
2020-01-27 13:32:03 +01:00
Erik Sundell
57fb967fec CloudWatch: Expand dimension value in alias correctly (#21626)
* Make sure dimension value is being returned, and not just label

* Fix typo

(cherry picked from commit a1733bb412)
2020-01-27 13:32:03 +01:00
Leonard Gram
9046263122 Build: adds missing filters required to build oss msi (#21618)
(cherry picked from commit 7e0890d57d)
2020-01-20 14:39:44 +01:00
Leonard Gram
2306826cff release 6.6.0-beta1 2020-01-20 13:34:39 +01:00
169 changed files with 3944 additions and 994 deletions

View File

@@ -125,7 +125,7 @@ jobs:
- node_modules
- run:
name: run end-to-end tests
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests:ci'
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
no_output_timeout: 5m
- store_artifacts:
path: public/e2e-tests/screenShots/theTruth
@@ -167,7 +167,7 @@ jobs:
- node_modules
- run:
name: run end-to-end tests
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests:ci'
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
no_output_timeout: 5m
- store_artifacts:
path: public/e2e-tests/screenShots/theTruth
@@ -419,6 +419,12 @@ jobs:
- run:
name: build grafana frontend
command: './scripts/build/build.sh --fast --frontend-only'
- run:
name: es-check install
command: 'yarn global add es-check'
- run:
name: es-check run
command: 'es-check es5 ./public/build/*.js'
- save_cache:
key: frontend-dependency-cache-{{ checksum "yarn.lock" }}
paths:
@@ -1211,6 +1217,7 @@ workflows:
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-master
- build-ee-msi:
requires:
- build-all-enterprise
@@ -1323,6 +1330,7 @@ workflows:
- shellcheck
- mysql-integration-test
- postgres-integration-test
filters: *filter-only-release
- build-ee-msi:
requires:
- build-all-enterprise

View File

@@ -27,14 +27,14 @@ COPY packages packages
RUN yarn install --pure-lockfile
COPY Gruntfile.js tsconfig.json tslint.json .browserslistrc ./
COPY public public
COPY public public
COPY scripts scripts
COPY emails emails
ENV NODE_ENV production
RUN ./node_modules/.bin/grunt build
FROM ubuntu:18.10
FROM ubuntu:18.04
LABEL maintainer="Grafana team <hello@grafana.com>"
EXPOSE 3000

View File

@@ -66,7 +66,7 @@
{
"targetBlank": false,
"title": "Drill it down",
"url": "http://localhost:3000/d/wfTJJL5Wz/datalinks-source?var-seriesName=${__series.name}&var-labelDatacenter=${__series.labels.datacenter}&var-labelDatacenterRegion=${__series.labels[\"datacenter.region\"]}&var-valueTime=${__value.time}&var-valueNumeric=${__value.numeric}&var-valueText=${__value.text}"
"url": "/d/wfTJJL5Wz/datalinks-source?var-seriesName=${__series.name}&var-labelDatacenter=${__series.labels.datacenter}&var-labelDatacenterRegion=${__series.labels[\"datacenter.region\"]}&var-valueTime=${__value.time}&var-valueNumeric=${__value.numeric}&var-valueText=${__value.text}"
}
]
},
@@ -164,7 +164,7 @@
{
"targetBlank": false,
"title": "Drill it down",
"url": "http://localhost:3000/d/wfTJJL5Wz/datalinks-source?var-seriesName=${__series.name}&var-valueTime=${__value.time}&var-valueNumeric=${__value.numeric}&var-valueText=${__value.text}&var-fieldName=${__field.name}"
"url": "/d/wfTJJL5Wz/datalinks-source?var-seriesName=${__series.name}&var-valueTime=${__value.time}&var-valueNumeric=${__value.numeric}&var-valueText=${__value.text}&var-fieldName=${__field.name}"
}
]
},
@@ -246,7 +246,7 @@
{
"targetBlank": true,
"title": "Drill it down!",
"url": "http://localhost:3000/d/wfTJJL5Wz/datalinks-source\n?var-fieldName=${__field.name}\n&var-labelDatacenter=${__series.labels.datacenter}\n&var-labelDatacenterRegion=${__series.labels[\"datacenter.region\"]}\n&var-valueNumeric=${__value.numeric}\n&var-valueText=${__value.text}\n&var-valueCalc=${__value.calc}"
"url": "/d/wfTJJL5Wz/datalinks-source\n?var-fieldName=${__field.name}\n&var-labelDatacenter=${__series.labels.datacenter}\n&var-labelDatacenterRegion=${__series.labels[\"datacenter.region\"]}\n&var-valueNumeric=${__value.numeric}\n&var-valueText=${__value.text}\n&var-valueCalc=${__value.calc}"
}
],
"mappings": [
@@ -307,7 +307,7 @@
"links": [
{
"title": "Drill it down",
"url": "http://localhost:3000/d/wfTJJL5Wz/datalinks-source?var-fieldName=${__field.name}&var-labelDatacenter=${__series.labels.datacenter}&var-labelDatacenterRegion=${__series.labels[\"datacenter.region\"]}&var-valueNumeric=${__value.numeric}&var-valueText=${__value.text}&var-valueCalc=${__value.calc}"
"url": "/d/wfTJJL5Wz/datalinks-source?var-fieldName=${__field.name}&var-labelDatacenter=${__series.labels.datacenter}&var-labelDatacenterRegion=${__series.labels[\"datacenter.region\"]}&var-valueNumeric=${__value.numeric}&var-valueText=${__value.text}&var-valueCalc=${__value.calc}"
}
],
"mappings": [],

View File

@@ -222,7 +222,7 @@
"text": "A",
"value": ["A"]
},
"datasource": "TestData DB-1",
"datasource": "gdev-testdata",
"definition": "*",
"hide": 0,
"includeAll": true,
@@ -247,7 +247,7 @@
"text": "AA",
"value": ["AA"]
},
"datasource": "TestData DB-1",
"datasource": "gdev-testdata",
"definition": "$datacenter.*",
"hide": 0,
"includeAll": true,

View File

@@ -0,0 +1,71 @@
import { sleep, check, group } from 'k6';
import { createClient, createBasicAuthClient } from './modules/client.js';
import { createTestOrgIfNotExists, createTestdataDatasourceIfNotExists } from './modules/util.js';
export let options = {
noCookiesReset: true
};
let endpoint = __ENV.URL || 'http://localhost:3000';
const client = createClient(endpoint);
export const setup = () => {
const basicAuthClient = createBasicAuthClient(endpoint, 'admin', 'admin');
const orgId = createTestOrgIfNotExists(basicAuthClient);
const datasourceId = createTestdataDatasourceIfNotExists(basicAuthClient);
client.withOrgId(orgId);
return {
orgId: orgId,
datasourceId: datasourceId,
};
}
export default (data) => {
group("annotation by tag test", () => {
if (__ITER === 0) {
group("user authenticates thru ui with username and password", () => {
let res = client.ui.login('admin', 'admin');
check(res, {
'response status is 200': (r) => r.status === 200,
'response has cookie \'grafana_session\' with 32 characters': (r) => r.cookies.grafana_session[0].value.length === 32,
});
});
}
if (__ITER !== 0) {
group("batch tsdb requests with annotations by tag", () => {
const batchCount = 20;
const requests = [];
const payload = {
from: '1547765247624',
to: '1547768847624',
queries: [{
refId: 'A',
scenarioId: 'random_walk',
intervalMs: 10000,
maxDataPoints: 433,
datasourceId: data.datasourceId,
}]
};
requests.push({ method: 'GET', url: '/api/annotations?from=1580825186534&to=1580846786535' });
for (let n = 0; n < batchCount; n++) {
requests.push({ method: 'POST', url: '/api/tsdb/query', body: payload });
}
let responses = client.batch(requests);
for (let n = 0; n < batchCount; n++) {
check(responses[n], {
'response status is 200': (r) => r.status === 200,
});
}
});
}
});
sleep(5)
}
export const teardown = (data) => {}

View File

@@ -2,5 +2,5 @@
"npmClient": "yarn",
"useWorkspaces": true,
"packages": ["packages/*"],
"version": "6.6.0-pre"
"version": "6.6.2"
}

View File

@@ -3,7 +3,7 @@
"license": "Apache-2.0",
"private": true,
"name": "grafana",
"version": "6.6.0-pre",
"version": "6.6.2",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"
@@ -163,7 +163,6 @@
"jest-ci": "mkdir -p reports/junit && export JEST_JUNIT_OUTPUT_DIR=reports/junit && jest --ci --reporters=default --reporters=jest-junit --maxWorkers 2",
"e2e": "cd packages/grafana-e2e && yarn start --env BASE_URL=$BASE_URL,CIRCLE_SHA1=$CIRCLE_SHA1,SLOWMO=$SLOWMO --config integrationFolder=../../public/e2e-tests/integration,screenshotsFolder=../../public/e2e-tests/screenShots,videosFolder=../../public/e2e-tests/videos,fileServerFolder=./cypress,viewportWidth=1920,viewportHeight=1080,trashAssetsBeforeRuns=false",
"e2e-tests": "yarn e2e",
"e2e-tests:ci": "yarn e2e --record",
"e2e-tests:debug": "SLOWMO=1 yarn e2e --headed --no-exit",
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
"storybook": "cd packages/grafana-ui && yarn storybook --ci",
@@ -266,6 +265,7 @@
"tether-drop": "https://github.com/torkelo/drop/tarball/master",
"tinycolor2": "1.4.1",
"tti-polyfill": "0.2.2",
"url-search-params-polyfill": "7.0.1",
"xss": "1.0.3"
},
"resolutions": {

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/data",
"version": "6.6.0-pre",
"version": "6.6.2",
"description": "Grafana Data Library",
"keywords": [
"typescript"

View File

@@ -1,5 +1,6 @@
import { Vector } from '../types/vector';
import { DataFrame } from '../types/dataFrame';
import { DisplayProcessor } from '../types';
/**
* This abstraction will present the contents of a DataFrame as if
@@ -55,6 +56,20 @@ export class DataFrameView<T = any> implements Vector<T> {
return this.data.length;
}
getFieldDisplayProcessor(colIndex: number): DisplayProcessor | null {
if (!this.dataFrame || !this.dataFrame.fields) {
return null;
}
const field = this.dataFrame.fields[colIndex];
if (!field || !field.display) {
return null;
}
return field.display;
}
get(idx: number) {
this.index = idx;
return this.obj;

View File

@@ -33,15 +33,16 @@ export type DataSourceOptionsType<DSType extends DataSourceApi<any, any>> = DSTy
export class DataSourcePlugin<
DSType extends DataSourceApi<TQuery, TOptions>,
TQuery extends DataQuery = DataSourceQueryType<DSType>,
TOptions extends DataSourceJsonData = DataSourceOptionsType<DSType>
TOptions extends DataSourceJsonData = DataSourceOptionsType<DSType>,
TSecureOptions = {}
> extends GrafanaPlugin<DataSourcePluginMeta<TOptions>> {
components: DataSourcePluginComponents<DSType, TQuery, TOptions> = {};
components: DataSourcePluginComponents<DSType, TQuery, TOptions, TSecureOptions> = {};
constructor(public DataSourceClass: DataSourceConstructor<DSType, TQuery, TOptions>) {
super();
}
setConfigEditor(editor: ComponentType<DataSourcePluginOptionsEditorProps<TOptions>>) {
setConfigEditor(editor: ComponentType<DataSourcePluginOptionsEditorProps<TOptions, TSecureOptions>>) {
this.components.ConfigEditor = editor;
return this;
}
@@ -131,7 +132,8 @@ interface PluginMetaQueryOptions {
export interface DataSourcePluginComponents<
DSType extends DataSourceApi<TQuery, TOptions>,
TQuery extends DataQuery = DataQuery,
TOptions extends DataSourceJsonData = DataSourceJsonData
TOptions extends DataSourceJsonData = DataSourceJsonData,
TSecureOptions = {}
> {
QueryCtrl?: any;
AnnotationsQueryCtrl?: any;
@@ -141,7 +143,7 @@ export interface DataSourcePluginComponents<
ExploreMetricsQueryField?: ComponentType<ExploreQueryFieldProps<DSType, TQuery, TOptions>>;
ExploreLogsQueryField?: ComponentType<ExploreQueryFieldProps<DSType, TQuery, TOptions>>;
ExploreStartPage?: ComponentType<ExploreStartPageProps>;
ConfigEditor?: ComponentType<DataSourcePluginOptionsEditorProps<TOptions>>;
ConfigEditor?: ComponentType<DataSourcePluginOptionsEditorProps<TOptions, TSecureOptions>>;
MetadataInspector?: ComponentType<MetadataInspectorProps<DSType, TQuery, TOptions>>;
}
@@ -276,7 +278,7 @@ export abstract class DataSourceApi<
*/
annotationQuery?(options: AnnotationQueryRequest<TQuery>): Promise<AnnotationEvent[]>;
interpolateVariablesInQueries?(queries: TQuery[]): TQuery[];
interpolateVariablesInQueries?(queries: TQuery[], scopedVars: ScopedVars | {}): TQuery[];
}
export interface MetadataInspectorProps<

View File

@@ -56,7 +56,6 @@ export interface LogRowModel {
logLevel: LogLevel;
raw: string;
searchWords?: string[];
timestamp: string; // ISO with nanosec precision
timeFromNow: string;
timeEpochMs: number;
timeLocal: string;

View File

@@ -52,6 +52,7 @@ export interface PanelModel<TOptions = any> {
id: number;
options: TOptions;
pluginVersion?: string;
scopedVars?: ScopedVars;
}
/**

View File

@@ -6,6 +6,7 @@ import {
getParser,
LogsParsers,
calculateStats,
getLogLevelFromKey,
} from './logs';
describe('getLoglevel()', () => {
@@ -23,6 +24,10 @@ describe('getLoglevel()', () => {
expect(getLogLevel('[Warn]')).toBe('warning');
});
it('returns correct log level when level is capitalized', () => {
expect(getLogLevel('WARN')).toBe(LogLevel.warn);
});
it('returns log level on line contains a log level', () => {
expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn);
expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn);
@@ -33,6 +38,15 @@ describe('getLoglevel()', () => {
});
});
describe('getLogLevelFromKey()', () => {
it('returns correct log level', () => {
expect(getLogLevelFromKey('info')).toBe(LogLevel.info);
});
it('returns correct log level when level is capitalized', () => {
expect(getLogLevelFromKey('INFO')).toBe(LogLevel.info);
});
});
describe('calculateLogsLabelStats()', () => {
test('should return no stats for empty rows', () => {
expect(calculateLogsLabelStats([], '')).toEqual([]);

View File

@@ -33,7 +33,7 @@ export function getLogLevel(line: string): LogLevel {
}
export function getLogLevelFromKey(key: string): LogLevel {
const level = (LogLevel as any)[key];
const level = (LogLevel as any)[key.toLowerCase()];
if (level) {
return level;
}

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e",
"version": "6.4.0-pre",
"version": "6.6.2",
"description": "Grafana End to End Test Library",
"keywords": [
"grafana",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/runtime",
"version": "6.6.0-pre",
"version": "6.6.2",
"description": "Grafana Runtime Library",
"keywords": [
"grafana",
@@ -21,8 +21,8 @@
"build": "grafana-toolkit package:build --scope=runtime"
},
"dependencies": {
"@grafana/data": "^6.6.0-pre",
"@grafana/ui": "^6.6.0-pre",
"@grafana/data": "6.6.2",
"@grafana/ui": "6.6.2",
"systemjs": "0.20.19",
"systemjs-plugin-css": "0.1.37"
},

View File

@@ -7,6 +7,7 @@ export interface BuildInfo {
commit: string;
isEnterprise: boolean; // deprecated: use licenseInfo.hasLicense instead
env: string;
edition: string;
latestVersion: string;
hasUpdate: boolean;
}
@@ -16,11 +17,14 @@ interface FeatureToggles {
inspect: boolean;
expressions: boolean;
newEdit: boolean;
meta: boolean;
}
interface LicenseInfo {
hasLicense: boolean;
expiry: number;
licenseUrl: string;
stateInfo: string;
}
export class GrafanaBootConfig {
@@ -60,8 +64,10 @@ export class GrafanaBootConfig {
inspect: false,
expressions: false,
newEdit: false,
meta: false,
};
licenseInfo: LicenseInfo = {} as LicenseInfo;
phantomJSRenderer = false;
constructor(options: GrafanaBootConfig) {
this.theme = options.bootData.user.lightTheme ? getTheme(GrafanaThemeType.Light) : getTheme(GrafanaThemeType.Dark);

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/toolkit",
"version": "6.6.0-pre",
"version": "6.6.2",
"description": "Grafana Toolkit",
"keywords": [
"grafana",
@@ -28,8 +28,8 @@
"dependencies": {
"@babel/core": "7.6.4",
"@babel/preset-env": "7.6.3",
"@grafana/data": "^6.6.0-pre",
"@grafana/ui": "^6.6.0-pre",
"@grafana/data": "6.6.2",
"@grafana/ui": "6.6.2",
"@types/command-exists": "^1.2.0",
"@types/execa": "^0.9.0",
"@types/expect-puppeteer": "3.3.1",
@@ -62,9 +62,12 @@
"html-webpack-plugin": "^3.2.0",
"inquirer": "^6.3.1",
"jest": "24.8.0",
"jest-canvas-mock": "2.1.2",
"jest-cli": "^24.8.0",
"jest-coverage-badges": "^1.1.2",
"jest-junit": "^6.4.0",
"less": "^3.10.3",
"less-loader": "^5.0.0",
"lodash": "4.17.15",
"md5-file": "^4.0.0",
"mini-css-extract-plugin": "^0.7.0",
@@ -95,9 +98,7 @@
"tslint-config-prettier": "^1.18.0",
"typescript": "3.7.2",
"url-loader": "^2.0.1",
"webpack": "4.35.0",
"less": "^3.10.3",
"less-loader": "^5.0.0"
"webpack": "4.35.0"
},
"_moduleAliases": {
"puppeteer": "node_modules/puppeteer-core"

View File

@@ -40,13 +40,13 @@ export const prepare = useSpinner<void>('Preparing', async () => {
await Promise.all([
// Copy only if local tsconfig does not exist. Otherwise this will work, but have odd behavior
copyIfNonExistent(
resolvePath(process.cwd(), 'tsconfig.json'),
resolvePath(__dirname, '../../config/tsconfig.plugin.local.json')
resolvePath(__dirname, '../../config/tsconfig.plugin.local.json'),
resolvePath(process.cwd(), 'tsconfig.json')
),
// Copy only if local prettierrc does not exist. Otherwise this will work, but have odd behavior
copyIfNonExistent(
resolvePath(process.cwd(), '.prettierrc.js'),
resolvePath(__dirname, '../../config/prettier.plugin.rc.js')
resolvePath(__dirname, '../../config/prettier.plugin.rc.js'),
resolvePath(process.cwd(), '.prettierrc.js')
),
]);
@@ -149,7 +149,9 @@ export const lintPlugin = useSpinner<Fixable>('Linting', async ({ fix }) => {
if (lintResults.length > 0) {
console.log('\n');
const failures: RuleFailure[] = lintResults.flat();
const failures = lintResults.reduce<RuleFailure[]>((failures, result) => {
return [...failures, ...result.failures];
}, []);
failures.forEach(f => {
// tslint:disable-next-line
console.log(

View File

@@ -50,7 +50,7 @@ export const jestConfig = (baseDir: string = process.cwd()) => {
const setupFile = getSetupFile(setupFilePath);
const shimsFile = getSetupFile(shimsFilePath);
const setupFiles = [setupFile, shimsFile].filter(f => f);
const setupFiles = [setupFile, shimsFile, 'jest-canvas-mock'].filter(f => f);
const defaultJestConfig = {
preset: 'ts-jest',
verbose: false,

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/ui",
"version": "6.6.0-pre",
"version": "6.6.2",
"description": "Grafana Components Library",
"keywords": [
"grafana",
@@ -25,7 +25,7 @@
"build": "grafana-toolkit package:build --scope=ui"
},
"dependencies": {
"@grafana/data": "^6.6.0-pre",
"@grafana/data": "6.6.2",
"@grafana/slate-react": "0.22.9-grafana",
"@torkelo/react-select": "2.1.1",
"@types/react-color": "2.17.0",
@@ -45,7 +45,7 @@
"rc-drawer": "3.0.2",
"rc-time-picker": "^3.7.2",
"react": "16.12.0",
"react-calendar": "2.18.1",
"react-calendar": "2.19.2",
"react-color": "2.17.0",
"react-custom-scrollbars": "4.2.1",
"react-dom": "16.12.0",

View File

@@ -54,6 +54,7 @@
background: none;
display: flex;
align-items: center;
.fa {
align-self: flex-end;
font-size: 21px;
@@ -78,6 +79,11 @@
.alert-body {
flex-grow: 1;
a {
color: $white;
text-decoration: underline;
}
}
.alert-icon-on-top {

View File

@@ -131,7 +131,8 @@ export class BarGauge extends PureComponent<Props> {
};
}
const color = display(positionValue).color;
const color = display ? display(positionValue).color : null;
if (color) {
// if we are past real value the cell is not "on"
if (value === null || (positionValue !== null && positionValue > value.numeric)) {

View File

@@ -363,8 +363,9 @@ export class StackedWithChartLayout extends BigValueLayout {
// make title fontsize it's a bit smaller than valueFontSize
this.titleFontSize = Math.min(this.valueFontSize * 0.7, this.titleFontSize);
// make chart take up onused space
this.chartHeight = height - this.titleFontSize * LINE_HEIGHT - this.valueFontSize * LINE_HEIGHT + height * 0.05;
this.chartHeight = height - this.titleFontSize * LINE_HEIGHT - this.valueFontSize * LINE_HEIGHT;
}
getValueAndTitleContainerStyles() {

View File

@@ -29,9 +29,11 @@ interface CascaderState {
export interface CascaderOption {
value: any;
label: string;
// Items will be just flattened into the main list of items recursively.
items?: CascaderOption[];
disabled?: boolean;
title?: string;
// Children will be shown in a submenu.
children?: CascaderOption[];
}

View File

@@ -20,7 +20,6 @@ const setup = (propOverrides?: Partial<Props>, rowOverrides?: Partial<LogRowMode
hasAnsi: false,
entry: '',
raw: '',
timestamp: '',
uid: '0',
labels: {},
...(rowOverrides || {}),

View File

@@ -92,17 +92,28 @@ class UnThemedLogDetailsRow extends PureComponent<Props, State> {
return (
<tr className={cx(style.logDetailsValue, { [styles.noHoverBackground]: showFieldsStats })}>
{/* Action buttons - show stats/filter results */}
<td title="Ad-hoc statistics" onClick={this.showStats} className={style.logsDetailsIcon}>
<i className={`fa fa-signal ${styles.hoverCursor}`} />
<td className={style.logsDetailsIcon} colSpan={isLabel ? undefined : 3}>
<i title="Ad-hoc statistics" className={`fa fa-signal ${styles.hoverCursor}`} onClick={this.showStats} />
</td>
<td title="Filter for value" onClick={() => isLabel && this.filterLabel()} className={style.logsDetailsIcon}>
{isLabel && <i className={`fa fa-search-plus ${styles.hoverCursor}`} />}
</td>
<td title="Filter out value" onClick={() => isLabel && this.filterOutLabel()} className={style.logsDetailsIcon}>
{isLabel && <i className={`fa fa-search-minus ${styles.hoverCursor}`} />}
</td>
{isLabel && (
<>
<td className={style.logsDetailsIcon}>
<i
title="Filter for value"
className={`fa fa-search-plus ${styles.hoverCursor}`}
onClick={this.filterLabel}
/>
</td>
<td className={style.logsDetailsIcon}>
<i
title="Filter out value"
className={`fa fa-search-minus ${styles.hoverCursor}`}
onClick={this.filterOutLabel}
/>
</td>
</>
)}
{/* Key - value columns */}
<td className={style.logDetailsLabel}>{parsedKey}</td>

View File

@@ -3,7 +3,38 @@ import { getRowContexts } from './LogRowContextProvider';
describe('getRowContexts', () => {
describe('when called with a DataFrame and results are returned', () => {
it('then the result should be in correct format', async () => {
it('then the result should be in correct format and filtered', async () => {
const firstResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [3, 2, 1] },
{ name: 'line', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
{ name: 'id', type: FieldType.string, values: ['3', '2', '1'], labels: {} },
],
});
const secondResult = new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'ts', type: FieldType.time, values: [6, 5, 4] },
{ name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
{ name: 'id', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
],
});
let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) {
called = true;
return Promise.resolve({ data: [firstResult] });
}
return Promise.resolve({ data: [secondResult] });
};
const result = await getRowContexts(getRowContextMock, row, 10);
expect(result).toEqual({ data: [[['3', '2']], [['6', '5', '4']]], errors: ['', ''] });
});
it('then the result should be in correct format and filtered without uid', async () => {
const firstResult = new MutableDataFrame({
refId: 'B',
fields: [
@@ -18,23 +49,6 @@ describe('getRowContexts', () => {
{ name: 'line', type: FieldType.string, values: ['6', '5', '4'], labels: {} },
],
});
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
};
let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) {
@@ -46,7 +60,7 @@ describe('getRowContexts', () => {
const result = await getRowContexts(getRowContextMock, row, 10);
expect(result).toEqual({ data: [[['3', '2', '1']], [['6', '5', '4']]], errors: ['', ''] });
expect(result).toEqual({ data: [[['3', '2', '1']], [['6', '5']]], errors: ['', ''] });
});
});
@@ -54,23 +68,6 @@ describe('getRowContexts', () => {
it('then the result should be in correct format', async () => {
const firstError = new Error('Error 1');
const secondError = new Error('Error 2');
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
timestamp: '4',
uid: '1',
};
let called = false;
const getRowContextMock = (row: LogRowModel, options?: any): Promise<DataQueryResponse> => {
if (!called) {
@@ -86,3 +83,19 @@ describe('getRowContexts', () => {
});
});
});
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
dataFrame: new MutableDataFrame(),
entry: '4',
labels: (null as any) as Labels,
hasAnsi: false,
raw: '4',
logLevel: LogLevel.info,
timeEpochMs: 4,
timeFromNow: '',
timeLocal: '',
timeUtc: '',
uid: '1',
};

View File

@@ -1,5 +1,5 @@
import { LogRowModel, toDataFrame, Field } from '@grafana/data';
import { useState, useEffect } from 'react';
import { LogRowModel, toDataFrame, Field, FieldCache } from '@grafana/data';
import React, { useState, useEffect } from 'react';
import flatten from 'lodash/flatten';
import useAsync from 'react-use/lib/useAsync';
@@ -45,7 +45,8 @@ export const getRowContexts = async (
limit,
}),
getRowContext(row, {
limit: limit + 1, // Lets add one more to the limit as we're filtering out one row see comment below
// The start time is inclusive so we will get the one row we are using as context entry
limit: limit + 1,
direction: 'FORWARD',
}),
];
@@ -62,16 +63,33 @@ export const getRowContexts = async (
const data: any[] = [];
for (let index = 0; index < dataResult.data.length; index++) {
const dataFrame = toDataFrame(dataResult.data[index]);
const timestampField: Field<string> = dataFrame.fields.filter(field => field.name === 'ts')[0];
const fieldCache = new FieldCache(dataFrame);
const timestampField: Field<string> = fieldCache.getFieldByName('ts')!;
const idField: Field<string> | undefined = fieldCache.getFieldByName('id');
for (let fieldIndex = 0; fieldIndex < timestampField.values.length; fieldIndex++) {
const timestamp = timestampField.values.get(fieldIndex);
// TODO: this filtering is datasource dependant so it will make sense to move it there so the API is
// to return correct list of lines handling inclusive ranges or how to filter the correct line on the
// datasource.
// We need to filter out the row we're basing our search from because of how start/end params work in Loki API
// see https://github.com/grafana/loki/issues/597#issuecomment-506408980
// the alternative to create our own add 1 nanosecond method to the a timestamp string would be quite complex
if (timestamp === row.timestamp) {
continue;
// Filter out the row that is the one used as a focal point for the context as we will get it in one of the
// requests.
if (idField) {
// For Loki this means we filter only the one row. Issue is we could have other rows logged at the same
// ns which came before but they come in the response that search for logs after. This means right now
// we will show those as if they came after. This is not strictly correct but seems better than loosing them
// and making this correct would mean quite a bit of complexity to shuffle things around and messing up
//counts.
if (idField.values.get(fieldIndex) === row.uid) {
continue;
}
} else {
// Fallback to timestamp. This should not happen right now as this feature is implemented only for loki
// and that has ID. Later this branch could be used in other DS but mind that this could also filter out
// logs which were logged in the same timestamp and that can be a problem depending on the precision.
if (parseInt(timestampField.values.get(fieldIndex), 10) === row.timeEpochMs) {
continue;
}
}
const lineField: Field<string> = dataFrame.fields.filter(field => field.name === 'line')[0];

View File

@@ -109,7 +109,6 @@ const makeLog = (overrides: Partial<LogRowModel>): LogRowModel => {
hasAnsi: false,
labels: {},
raw: entry,
timestamp: '',
timeFromNow: '',
timeEpochMs: 1,
timeLocal: '',

View File

@@ -86,7 +86,7 @@ export function sharedSingleStatPanelChangedHandler(
defaults.mappings = mappings;
}
if (panel.gauge) {
if (panel.gauge && panel.gauge.show) {
defaults.min = panel.gauge.minValue;
defaults.max = panel.gauge.maxValue;
}
@@ -151,11 +151,10 @@ export function sharedSingleStatMigrationHandler(panel: PanelModel<SingleStatBas
// Migrate color from simple string to a mode
const { defaults } = fieldOptions;
if (defaults.color) {
const old = defaults.color;
if (defaults.color && typeof defaults.color === 'string') {
defaults.color = {
mode: FieldColorMode.Fixed,
fixedColor: old,
fixedColor: defaults.color,
};
}

View File

@@ -158,7 +158,7 @@ export class UnthemedTimePicker extends PureComponent<Props, State> {
const hasAbsolute = isDateTime(value.raw.from) || isDateTime(value.raw.to);
const syncedTimePicker = timeSyncButton && isSynced;
const timePickerIconClass = cx('fa fa-clock-o fa-fw', { ['icon-brand-gradient']: syncedTimePicker });
const timePickerButtonClass = cx('btn navbar-button navbar-button--zoom', {
const timePickerButtonClass = cx('btn navbar-button navbar-button--tight', {
[`btn--radius-right-0 ${styles.noRightBorderStyle}`]: !!timeSyncButton,
[`explore-active-button-glow ${styles.syncedTimePicker}`]: syncedTimePicker,
});

View File

@@ -1,4 +1,4 @@
import React, { memo } from 'react';
import React, { memo, useState, useEffect } from 'react';
import { css, cx } from 'emotion';
import Calendar from 'react-calendar/dist/entry.nostyle';
import { GrafanaTheme, dateTime, TIME_FORMAT } from '@grafana/data';
@@ -84,7 +84,6 @@ const getBodyStyles = stylesFactory((theme: GrafanaTheme) => {
title: css`
color: ${theme.colors.text}
background-color: ${colors.background};
line-height: 21px;
font-size: ${theme.typography.size.md};
border: 1px solid transparent;
@@ -129,6 +128,7 @@ const getBodyStyles = stylesFactory((theme: GrafanaTheme) => {
.react-calendar__tile--now {
margin-bottom: 4px;
background-color: inherit;
height: 26px;
}
.react-calendar__navigation__label,
@@ -158,7 +158,8 @@ const getBodyStyles = stylesFactory((theme: GrafanaTheme) => {
background-color: ${theme.colors.blue77};
border-radius: 100px;
display: block;
padding: 2px 7px 3px;
padding-top: 2px;
height: 26px;
}
}
@@ -202,6 +203,8 @@ interface Props {
isFullscreen: boolean;
}
const stopPropagation = (event: React.MouseEvent<HTMLDivElement>) => event.stopPropagation();
export const TimePickerCalendar = memo<Props>(props => {
const theme = useTheme();
const styles = getStyles(theme);
@@ -214,7 +217,7 @@ export const TimePickerCalendar = memo<Props>(props => {
if (isFullscreen) {
return (
<ClickOutsideWrapper onClick={props.onClose}>
<div className={styles.container}>
<div className={styles.container} onClick={stopPropagation}>
<Body {...props} />
</div>
</ClickOutsideWrapper>
@@ -223,14 +226,14 @@ export const TimePickerCalendar = memo<Props>(props => {
return (
<Portal>
<div className={styles.modal} onClick={event => event.stopPropagation()}>
<div className={styles.modal} onClick={stopPropagation}>
<div className={styles.content}>
<Header {...props} />
<Body {...props} />
<Footer {...props} />
</div>
</div>
<div className={styles.backdrop} onClick={event => event.stopPropagation()} />
<div className={styles.backdrop} onClick={stopPropagation} />
</Portal>
);
});
@@ -247,10 +250,14 @@ const Header = memo<Props>(({ onClose }) => {
);
});
const Body = memo<Props>(props => {
const Body = memo<Props>(({ onChange, from, to }) => {
const [value, setValue] = useState<Date[]>();
const theme = useTheme();
const styles = getBodyStyles(theme);
const { from, to, onChange } = props;
useEffect(() => {
setValue(inputToValue(from, to));
}, []);
return (
<Calendar
@@ -259,7 +266,7 @@ const Body = memo<Props>(props => {
prev2Label={null}
className={styles.body}
tileClassName={styles.title}
value={inputToValue(from, to)}
value={value}
nextLabel={<span className="fa fa-angle-right" />}
prevLabel={<span className="fa fa-angle-left" />}
onChange={value => valueToInput(value, onChange)}

View File

@@ -10,7 +10,6 @@ import {
TIME_FORMAT,
} from '@grafana/data';
import { stringToDateTimeType } from '../time';
import { isMathString } from '@grafana/data/src/datetime/datemath';
export const mapOptionToTimeRange = (option: TimeOption, timeZone?: TimeZone): TimeRange => {
return {
@@ -41,7 +40,7 @@ export const mapStringsToTimeRange = (from: string, to: string, roundup?: boolea
const fromDate = stringToDateTimeType(from, roundup, timeZone);
const toDate = stringToDateTimeType(to, roundup, timeZone);
if (isMathString(from) || isMathString(to)) {
if (dateMath.isMathString(from) || dateMath.isMathString(to)) {
return {
from: fromDate,
to: toDate,

View File

@@ -35,7 +35,7 @@ exports[`TimePicker renders buttons correctly 1`] = `
>
<button
aria-label="TimePicker Open Button"
className="btn navbar-button navbar-button--zoom"
className="btn navbar-button navbar-button--tight"
onClick={[Function]}
>
<i
@@ -342,7 +342,7 @@ exports[`TimePicker renders content correctly after beeing open 1`] = `
>
<button
aria-label="TimePicker Open Button"
className="btn navbar-button navbar-button--zoom"
className="btn navbar-button navbar-button--tight"
onClick={[Function]}
>
<i

View File

@@ -14,3 +14,4 @@
@import 'TimePicker/TimeOfDayPicker';
@import 'Tooltip/Tooltip';
@import 'ValueMappingsEditor/ValueMappingsEditor';
@import 'Alert/Alert';

View File

@@ -64,7 +64,7 @@ docker_build () {
else
libc=""
dockerfile="ubuntu.Dockerfile"
base_image="${base_arch}ubuntu:18.10"
base_image="${base_arch}ubuntu:18.04"
fi
grafana_tgz="grafana-latest.linux-${arch}${libc}.tar.gz"

View File

@@ -1,4 +1,4 @@
ARG BASE_IMAGE=ubuntu:18.10
ARG BASE_IMAGE=ubuntu:18.04
FROM ${BASE_IMAGE} AS grafana-builder
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"

View File

@@ -3,6 +3,8 @@ package api
import (
"strconv"
"github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/util"
@@ -194,6 +196,7 @@ func (hs *HTTPServer) getFrontendSettingsMap(c *m.ReqContext) (map[string]interf
"version": setting.BuildVersion,
"commit": setting.BuildCommit,
"buildstamp": setting.BuildStamp,
"edition": hs.License.Edition(),
"latestVersion": plugins.GrafanaLatestVersion,
"hasUpdate": plugins.GrafanaHasUpdate,
"env": setting.Env,
@@ -202,8 +205,11 @@ func (hs *HTTPServer) getFrontendSettingsMap(c *m.ReqContext) (map[string]interf
"licenseInfo": map[string]interface{}{
"hasLicense": hs.License.HasLicense(),
"expiry": hs.License.Expiry(),
"stateInfo": hs.License.StateInfo(),
"licenseUrl": hs.License.LicenseURL(c.SignedInUser),
},
"featureToggles": hs.Cfg.FeatureToggles,
"featureToggles": hs.Cfg.FeatureToggles,
"phantomJSRenderer": rendering.IsPhantomJSEnabled,
}
return jsonObj, nil

View File

@@ -190,18 +190,18 @@ func (hs *HTTPServer) configureHttps() error {
MinVersion: tls.VersionTLS12,
PreferServerCipherSuites: true,
CipherSuites: []uint16{
tls.TLS_RSA_WITH_AES_128_CBC_SHA,
tls.TLS_RSA_WITH_AES_256_CBC_SHA,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
tls.TLS_RSA_WITH_AES_128_GCM_SHA256,
tls.TLS_RSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
tls.TLS_RSA_WITH_AES_128_CBC_SHA,
tls.TLS_RSA_WITH_AES_256_CBC_SHA,
},
}
@@ -235,12 +235,12 @@ func (hs *HTTPServer) configureHttp2() error {
tls.TLS_CHACHA20_POLY1305_SHA256,
tls.TLS_AES_128_GCM_SHA256,
tls.TLS_AES_256_GCM_SHA384,
tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
},
NextProtos: []string{"h2", "http/1.1"},
}

View File

@@ -275,7 +275,7 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
})
}
if c.OrgRole == m.ROLE_ADMIN || hs.Cfg.EditorsCanAdmin {
if c.OrgRole == m.ROLE_ADMIN || (hs.Cfg.EditorsCanAdmin && c.OrgRole == m.ROLE_EDITOR) {
configNodes = append(configNodes, &dtos.NavLink{
Text: "Teams",
Id: "teams",
@@ -357,7 +357,7 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
Children: []*dtos.NavLink{},
})
hs.HooksService.RunIndexDataHooks(&data)
hs.HooksService.RunIndexDataHooks(&data, c)
sort.SliceStable(data.NavTree, func(i, j int) bool {
return data.NavTree[i].SortWeight < data.NavTree[j].SortWeight

View File

@@ -216,6 +216,7 @@ func (server *HTTPServer) PostSyncUserWithLDAP(c *models.ReqContext) Response {
}
upsertCmd := &models.UpsertUserCommand{
ReqContext: c,
ExternalUser: user,
SignupAllowed: setting.LDAPAllowSignup,
}

View File

@@ -35,7 +35,9 @@ func (hs *HTTPServer) validateRedirectTo(redirectTo string) error {
if to.IsAbs() {
return login.ErrAbsoluteRedirectTo
}
if hs.Cfg.AppSubUrl != "" && !strings.HasPrefix(to.Path, "/"+hs.Cfg.AppSubUrl) {
// when using a subUrl, the redirect_to should have a relative or absolute path that includes the subUrl, otherwise the redirect
// will send the user to the wrong location
if hs.Cfg.AppSubUrl != "" && !strings.HasPrefix(to.Path, hs.Cfg.AppSubUrl) && !strings.HasPrefix(to.Path, "/"+hs.Cfg.AppSubUrl) {
return login.ErrInvalidRedirectTo
}
return nil
@@ -177,6 +179,11 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
if redirectTo, _ := url.QueryUnescape(c.GetCookie("redirect_to")); len(redirectTo) > 0 {
if err := hs.validateRedirectTo(redirectTo); err == nil {
// remove subpath if it exists at the beginning of the redirect_to
// LoginCtrl.tsx is already prepending the redirectUrl with the subpath
if setting.AppSubUrl != "" && strings.Index(redirectTo, setting.AppSubUrl) == 0 {
redirectTo = strings.Replace(redirectTo, setting.AppSubUrl, "", 1)
}
result["redirectUrl"] = redirectTo
} else {
log.Info("Ignored invalid redirect_to cookie value: %v", redirectTo)

View File

@@ -186,7 +186,13 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
if userInfo.Role != "" {
rt := m.RoleType(userInfo.Role)
if rt.IsValid() {
extUser.OrgRoles[1] = rt
var orgID int64
if setting.AutoAssignOrg && setting.AutoAssignOrgId > 0 {
orgID = int64(setting.AutoAssignOrgId)
} else {
orgID = int64(1)
}
extUser.OrgRoles[orgID] = rt
}
}

View File

@@ -72,6 +72,7 @@ type redirectCase struct {
err error
appURL string
appSubURL string
path string
}
func TestLoginErrorCookieApiEndpoint(t *testing.T) {
@@ -154,6 +155,7 @@ func TestLoginViewRedirect(t *testing.T) {
desc: "grafana relative url without subpath",
url: "/profile",
appURL: "http://localhost:3000",
path: "/",
status: 302,
},
{
@@ -161,6 +163,15 @@ func TestLoginViewRedirect(t *testing.T) {
url: "/grafana/profile",
appURL: "http://localhost:3000",
appSubURL: "grafana",
path: "grafana/",
status: 302,
},
{
desc: "grafana slashed relative url with subpath",
url: "/grafana/profile",
appURL: "http://localhost:3000",
appSubURL: "grafana",
path: "/grafana/",
status: 302,
},
{
@@ -168,13 +179,23 @@ func TestLoginViewRedirect(t *testing.T) {
url: "/profile",
appURL: "http://localhost:3000",
appSubURL: "grafana",
path: "grafana/",
status: 200,
err: login.ErrInvalidRedirectTo,
},
{
desc: "grafana subpath absolute url",
url: "http://localhost:3000/grafana/profile",
appURL: "http://localhost:3000",
appSubURL: "grafana",
path: "/grafana/profile",
status: 200,
},
{
desc: "grafana absolute url",
url: "http://localhost:3000/profile",
appURL: "http://localhost:3000",
path: "/",
status: 200,
err: login.ErrAbsoluteRedirectTo,
},
@@ -182,6 +203,7 @@ func TestLoginViewRedirect(t *testing.T) {
desc: "non grafana absolute url",
url: "http://example.com",
appURL: "http://localhost:3000",
path: "/",
status: 200,
err: login.ErrAbsoluteRedirectTo,
},
@@ -189,6 +211,7 @@ func TestLoginViewRedirect(t *testing.T) {
desc: "invalid url",
url: ":foo",
appURL: "http://localhost:3000",
path: "/",
status: 200,
err: login.ErrInvalidRedirectTo,
},
@@ -203,7 +226,7 @@ func TestLoginViewRedirect(t *testing.T) {
MaxAge: 60,
Value: c.url,
HttpOnly: true,
Path: hs.Cfg.AppSubUrl + "/",
Path: c.path,
Secure: hs.Cfg.CookieSecure,
SameSite: hs.Cfg.CookieSameSiteMode,
}
@@ -219,7 +242,7 @@ func TestLoginViewRedirect(t *testing.T) {
assert.True(t, ok, "Set-Cookie exists")
assert.Greater(t, len(setCookie), 0)
var redirectToCookieFound bool
expCookieValue := fmt.Sprintf("redirect_to=%v; Path=%v; Max-Age=60; HttpOnly; Secure", c.url, hs.Cfg.AppSubUrl+"/")
expCookieValue := fmt.Sprintf("redirect_to=%v; Path=%v; Max-Age=60; HttpOnly; Secure", c.url, c.path)
for _, cookieValue := range setCookie {
if cookieValue == expCookieValue {
redirectToCookieFound = true
@@ -281,6 +304,12 @@ func TestLoginPostRedirect(t *testing.T) {
appURL: "https://localhost:3000",
appSubURL: "grafana",
},
{
desc: "grafana no slash relative url with subpath",
url: "grafana/profile",
appURL: "https://localhost:3000",
appSubURL: "grafana",
},
{
desc: "relative url with missing subpath",
url: "/profile",

View File

@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"io/ioutil"
"log"
"net"
"net/http"
"net/http/httputil"
@@ -17,7 +18,7 @@ import (
"golang.org/x/oauth2"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/infra/log"
glog "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/login/social"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
@@ -26,7 +27,7 @@ import (
)
var (
logger = log.New("data-proxy-log")
logger = glog.New("data-proxy-log")
client = newHTTPClient()
)
@@ -57,6 +58,18 @@ type httpClient interface {
Do(req *http.Request) (*http.Response, error)
}
type logWrapper struct {
logger glog.Logger
}
// Write writes log messages as bytes from proxy
func (lw *logWrapper) Write(p []byte) (n int, err error) {
withoutNewline := strings.TrimSuffix(string(p), "\n")
lw.logger.Error("Data proxy error", "error", withoutNewline)
return len(p), nil
}
// NewDataSourceProxy creates a new Datasource proxy
func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx *m.ReqContext, proxyPath string, cfg *setting.Cfg) *DataSourceProxy {
targetURL, _ := url.Parse(ds.Url)
@@ -83,9 +96,12 @@ func (proxy *DataSourceProxy) HandleRequest() {
return
}
proxyErrorLogger := logger.New("userId", proxy.ctx.UserId, "orgId", proxy.ctx.OrgId, "uname", proxy.ctx.Login, "path", proxy.ctx.Req.URL.Path, "remote_addr", proxy.ctx.RemoteAddr(), "referer", proxy.ctx.Req.Referer())
reverseProxy := &httputil.ReverseProxy{
Director: proxy.getDirector(),
FlushInterval: time.Millisecond * 200,
ErrorLog: log.New(&logWrapper{logger: proxyErrorLogger}, "", 0),
}
transport, err := proxy.ds.GetHttpTransport()

View File

@@ -154,6 +154,8 @@ var (
func init() {
httpStatusCodes := []string{"200", "404", "500", "unknown"}
objectiveMap := map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}
MInstanceStart = prometheus.NewCounter(prometheus.CounterOpts{
Name: "instance_start_total",
Help: "counter for started instances",
@@ -191,8 +193,9 @@ func init() {
MHttpRequestSummary = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Name: "http_request_duration_milliseconds",
Help: "http request summary",
Name: "http_request_duration_milliseconds",
Help: "http request summary",
Objectives: objectiveMap,
},
[]string{"handler", "statuscode", "method"},
)
@@ -216,21 +219,24 @@ func init() {
})
MApiDashboardSave = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_save_milliseconds",
Help: "summary for dashboard save duration",
Namespace: exporterName,
Name: "api_dashboard_save_milliseconds",
Help: "summary for dashboard save duration",
Objectives: objectiveMap,
Namespace: exporterName,
})
MApiDashboardGet = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_get_milliseconds",
Help: "summary for dashboard get duration",
Namespace: exporterName,
Name: "api_dashboard_get_milliseconds",
Help: "summary for dashboard get duration",
Objectives: objectiveMap,
Namespace: exporterName,
})
MApiDashboardSearch = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_search_milliseconds",
Help: "summary for dashboard search duration",
Namespace: exporterName,
Name: "api_dashboard_search_milliseconds",
Help: "summary for dashboard search duration",
Objectives: objectiveMap,
Namespace: exporterName,
})
MApiAdminUserCreate = newCounterStartingAtZero(prometheus.CounterOpts{
@@ -330,21 +336,24 @@ func init() {
})
LDAPUsersSyncExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "ldap_users_sync_execution_time",
Help: "summary for LDAP users sync execution duration",
Namespace: exporterName,
Name: "ldap_users_sync_execution_time",
Help: "summary for LDAP users sync execution duration",
Objectives: objectiveMap,
Namespace: exporterName,
})
MDataSourceProxyReqTimer = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dataproxy_request_all_milliseconds",
Help: "summary for dataproxy request duration",
Namespace: exporterName,
Name: "api_dataproxy_request_all_milliseconds",
Help: "summary for dataproxy request duration",
Objectives: objectiveMap,
Namespace: exporterName,
})
MAlertingExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "alerting_execution_time_milliseconds",
Help: "summary of alert exeuction duration",
Namespace: exporterName,
Name: "alerting_execution_time_milliseconds",
Help: "summary of alert exeuction duration",
Objectives: objectiveMap,
Namespace: exporterName,
})
MAlertingActiveAlerts = prometheus.NewGauge(prometheus.GaugeOpts{

View File

@@ -51,6 +51,7 @@ var loginUsingLDAP = func(query *models.LoginUserQuery) (bool, error) {
}
upsert := &models.UpsertUserCommand{
ReqContext: query.ReqContext,
ExternalUser: externalUser,
SignupAllowed: setting.LDAPAllowSignup,
}

View File

@@ -47,7 +47,7 @@ func notAuthorized(c *m.ReqContext) {
return
}
WriteCookie(c.Resp, "redirect_to", url.QueryEscape(setting.AppSubUrl+c.Req.RequestURI), 0, newCookieOptions)
WriteCookie(c.Resp, "redirect_to", url.QueryEscape(c.Req.RequestURI), 0, newCookieOptions)
c.Redirect(setting.AppSubUrl + "/login")
}

View File

@@ -1,6 +1,7 @@
package middleware
import (
"context"
"fmt"
"net/url"
"strconv"
@@ -228,7 +229,24 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
// Rotate the token just before we write response headers to ensure there is no delay between
// the new token being generated and the client receiving it.
ctx.Resp.Before(func(w macaron.ResponseWriter) {
ctx.Resp.Before(rotateEndOfRequestFunc(ctx, authTokenService, token))
return true
}
func rotateEndOfRequestFunc(ctx *models.ReqContext, authTokenService models.UserTokenService, token *models.UserToken) macaron.BeforeFunc {
return func(w macaron.ResponseWriter) {
// if response has already been written, skip.
if w.Written() {
return
}
// if the request is cancelled by the client we should not try
// to rotate the token since the client would not accept any result.
if ctx.Context.Req.Context().Err() == context.Canceled {
return
}
rotated, err := authTokenService.TryRotateToken(ctx.Req.Context(), token, ctx.RemoteAddr(), ctx.Req.UserAgent())
if err != nil {
ctx.Logger.Error("Failed to rotate token", "error", err)
@@ -238,9 +256,7 @@ func initContextWithToken(authTokenService models.UserTokenService, ctx *models.
if rotated {
WriteSessionCookie(ctx, token.UnhashedToken, setting.LoginMaxLifetimeDays)
}
})
return true
}
}
func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays int) {
@@ -262,6 +278,11 @@ func WriteSessionCookie(ctx *models.ReqContext, value string, maxLifetimeDays in
func AddDefaultResponseHeaders() macaron.Handler {
return func(ctx *macaron.Context) {
ctx.Resp.Before(func(w macaron.ResponseWriter) {
// if response has already been written, skip.
if w.Written() {
return
}
if !strings.HasPrefix(ctx.Req.URL.Path, "/api/datasources/proxy/") {
AddNoCacheHeaders(ctx.Resp)
}

View File

@@ -6,16 +6,19 @@ import (
"errors"
"fmt"
"net/http"
"net/http/httptest"
"path/filepath"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gopkg.in/macaron.v1"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/remotecache"
authproxy "github.com/grafana/grafana/pkg/middleware/auth_proxy"
"github.com/grafana/grafana/pkg/models"
@@ -541,7 +544,8 @@ func middlewareScenario(t *testing.T, desc string, fn scenarioFunc) {
sc := &scenarioContext{}
viewsPath, _ := filepath.Abs("../../public/views")
viewsPath, err := filepath.Abs("../../public/views")
require.NoError(t, err)
sc.m = macaron.New()
sc.m.Use(AddDefaultResponseHeaders())
@@ -571,3 +575,88 @@ func middlewareScenario(t *testing.T, desc string, fn scenarioFunc) {
fn(sc)
})
}
func TestDontRotateTokensOnCancelledRequests(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
reqContext, _, err := initTokenRotationTest(ctx)
require.NoError(t, err)
tryRotateCallCount := 0
uts := &auth.FakeUserAuthTokenService{
TryRotateTokenProvider: func(ctx context.Context, token *models.UserToken, clientIP, userAgent string) (bool, error) {
tryRotateCallCount++
return false, nil
},
}
token := &models.UserToken{AuthToken: "oldtoken"}
fn := rotateEndOfRequestFunc(reqContext, uts, token)
cancel()
fn(reqContext.Resp)
assert.Equal(t, 0, tryRotateCallCount, "Token rotation was attempted")
}
func TestTokenRotationAtEndOfRequest(t *testing.T) {
reqContext, rr, err := initTokenRotationTest(context.Background())
require.NoError(t, err)
uts := &auth.FakeUserAuthTokenService{
TryRotateTokenProvider: func(ctx context.Context, token *models.UserToken, clientIP, userAgent string) (bool, error) {
newToken, err := util.RandomHex(16)
require.NoError(t, err)
token.AuthToken = newToken
return true, nil
},
}
token := &models.UserToken{AuthToken: "oldtoken"}
rotateEndOfRequestFunc(reqContext, uts, token)(reqContext.Resp)
foundLoginCookie := false
for _, c := range rr.Result().Cookies() {
if c.Name == "login_token" {
foundLoginCookie = true
require.NotEqual(t, token.AuthToken, c.Value, "Auth token is still the same")
}
}
assert.True(t, foundLoginCookie, "Could not find cookie")
}
func initTokenRotationTest(ctx context.Context) (*models.ReqContext, *httptest.ResponseRecorder, error) {
setting.LoginCookieName = "login_token"
setting.LoginMaxLifetimeDays = 7
rr := httptest.NewRecorder()
req, err := http.NewRequestWithContext(ctx, "", "", nil)
if err != nil {
return nil, nil, err
}
reqContext := &models.ReqContext{
Context: &macaron.Context{
Req: macaron.Request{
Request: req,
},
},
Logger: log.New("testlogger"),
}
mw := mockWriter{rr}
reqContext.Resp = mw
return reqContext, rr, nil
}
type mockWriter struct {
*httptest.ResponseRecorder
}
func (mw mockWriter) Flush() {}
func (mw mockWriter) Status() int { return 0 }
func (mw mockWriter) Size() int { return 0 }
func (mw mockWriter) Written() bool { return false }
func (mw mockWriter) Before(macaron.BeforeFunc) {}

View File

@@ -19,6 +19,7 @@ import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"runtime"
"gopkg.in/macaron.v1"
@@ -102,8 +103,6 @@ func Recovery() macaron.Handler {
return func(c *macaron.Context) {
defer func() {
if err := recover(); err != nil {
stack := stack(3)
panicLogger := log.Root
// try to get request logger
if ctx, ok := c.Data["ctx"]; ok {
@@ -111,8 +110,22 @@ func Recovery() macaron.Handler {
panicLogger = ctxTyped.Logger
}
// http.ErrAbortHandler is suppressed by default in the http package
// and used as a signal for aborting requests. Suppresses stacktrace
// since it doesn't add any important information.
if err == http.ErrAbortHandler {
panicLogger.Error("Request error", "error", err)
return
}
stack := stack(3)
panicLogger.Error("Request error", "error", err, "stack", string(stack))
// if response has already been written, skip.
if c.Written() {
return
}
c.Data["Title"] = "Server Error"
c.Data["AppSubUrl"] = setting.AppSubUrl
c.Data["Theme"] = setting.DefaultTheme

View File

@@ -7,13 +7,32 @@ import (
"time"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/prometheus/client_golang/prometheus"
"gopkg.in/macaron.v1"
)
var (
httpRequestsInFlight prometheus.Gauge
)
func init() {
httpRequestsInFlight = prometheus.NewGauge(
prometheus.GaugeOpts{
Name: "http_request_in_flight",
Help: "A gauge of requests currently being served by Grafana.",
},
)
prometheus.MustRegister(httpRequestsInFlight)
}
// RequestMetrics is a middleware handler that instruments the request
func RequestMetrics(handler string) macaron.Handler {
return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) {
rw := res.(macaron.ResponseWriter)
now := time.Now()
httpRequestsInFlight.Inc()
defer httpRequestsInFlight.Dec()
c.Next()
status := rw.Status()

View File

@@ -9,4 +9,11 @@ type Licensing interface {
// Expiry returns the unix epoch timestamp when the license expires, or 0 if no valid license is provided
Expiry() int64
// Return edition
Edition() string
LicenseURL(user *SignedInUser) string
StateInfo() string
}

View File

@@ -2,10 +2,11 @@ package hooks
import (
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/registry"
)
type IndexDataHook func(indexData *dtos.IndexViewData)
type IndexDataHook func(indexData *dtos.IndexViewData, req *models.ReqContext)
type HooksService struct {
indexDataHooks []IndexDataHook
@@ -23,8 +24,8 @@ func (srv *HooksService) AddIndexDataHook(hook IndexDataHook) {
srv.indexDataHooks = append(srv.indexDataHooks, hook)
}
func (srv *HooksService) RunIndexDataHooks(indexData *dtos.IndexViewData) {
func (srv *HooksService) RunIndexDataHooks(indexData *dtos.IndexViewData, req *models.ReqContext) {
for _, hook := range srv.indexDataHooks {
hook(indexData)
hook(indexData, req)
}
}

View File

@@ -2,6 +2,7 @@ package licensing
import (
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/hooks"
"github.com/grafana/grafana/pkg/setting"
)
@@ -19,14 +20,30 @@ func (*OSSLicensingService) Expiry() int64 {
return 0
}
func (*OSSLicensingService) Edition() string {
return "Open Source"
}
func (*OSSLicensingService) StateInfo() string {
return ""
}
func (l *OSSLicensingService) LicenseURL(user *models.SignedInUser) string {
if user.IsGrafanaAdmin {
return l.Cfg.AppSubUrl + "/admin/upgrading"
}
return "https://grafana.com/products/enterprise/?utm_source=grafana_footer"
}
func (l *OSSLicensingService) Init() error {
l.HooksService.AddIndexDataHook(func(indexData *dtos.IndexViewData) {
l.HooksService.AddIndexDataHook(func(indexData *dtos.IndexViewData, req *models.ReqContext) {
for _, node := range indexData.NavTree {
if node.Id == "admin" {
node.Children = append(node.Children, &dtos.NavLink{
Text: "Upgrade",
Id: "upgrading",
Url: l.Cfg.AppSubUrl + "/admin/upgrading",
Url: l.LicenseURL(req.SignedInUser),
Icon: "fa fa-fw fa-unlock-alt",
})
}

View File

@@ -27,6 +27,7 @@ func (ls *LoginService) Init() error {
return nil
}
// UpsertUser updates an existing user, or if it doesn't exist, inserts a new one.
func (ls *LoginService) UpsertUser(cmd *models.UpsertUserCommand) error {
extUser := cmd.ExternalUser
@@ -38,12 +39,10 @@ func (ls *LoginService) UpsertUser(cmd *models.UpsertUserCommand) error {
Login: extUser.Login,
}
err := bus.Dispatch(userQuery)
if err != models.ErrUserNotFound && err != nil {
return err
}
if err != nil {
if err := bus.Dispatch(userQuery); err != nil {
if err != models.ErrUserNotFound {
return err
}
if !cmd.SignupAllowed {
log.Warn("Not allowing %s login, user not found in internal user database and allow signup = false", extUser.AuthModule)
return ErrInvalidCredentials
@@ -74,7 +73,6 @@ func (ls *LoginService) UpsertUser(cmd *models.UpsertUserCommand) error {
return err
}
}
} else {
cmd.Result = userQuery.Result
@@ -99,9 +97,7 @@ func (ls *LoginService) UpsertUser(cmd *models.UpsertUserCommand) error {
}
}
err = syncOrgRoles(cmd.Result, extUser)
if err != nil {
if err := syncOrgRoles(cmd.Result, extUser); err != nil {
return err
}
@@ -112,16 +108,15 @@ func (ls *LoginService) UpsertUser(cmd *models.UpsertUserCommand) error {
}
}
err = ls.Bus.Dispatch(&models.SyncTeamsCommand{
err := ls.Bus.Dispatch(&models.SyncTeamsCommand{
User: cmd.Result,
ExternalUser: extUser,
})
if err == bus.ErrHandlerNotFound {
return nil
if err != nil && err != bus.ErrHandlerNotFound {
return err
}
return err
return nil
}
func createUser(extUser *models.ExternalUserInfo) (*models.User, error) {

View File

@@ -446,3 +446,76 @@ func TestMultiLDAP(t *testing.T) {
})
})
}
// mockLDAP represents testing struct for ldap testing
type mockLDAP struct {
dialCalledTimes int
loginCalledTimes int
closeCalledTimes int
usersCalledTimes int
bindCalledTimes int
dialErrReturn error
loginErrReturn error
loginReturn *models.ExternalUserInfo
bindErrReturn error
usersErrReturn error
usersFirstReturn []*models.ExternalUserInfo
usersRestReturn []*models.ExternalUserInfo
}
// Login test fn
func (mock *mockLDAP) Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) {
mock.loginCalledTimes = mock.loginCalledTimes + 1
return mock.loginReturn, mock.loginErrReturn
}
// Users test fn
func (mock *mockLDAP) Users([]string) ([]*models.ExternalUserInfo, error) {
mock.usersCalledTimes = mock.usersCalledTimes + 1
if mock.usersCalledTimes == 1 {
return mock.usersFirstReturn, mock.usersErrReturn
}
return mock.usersRestReturn, mock.usersErrReturn
}
// UserBind test fn
func (mock *mockLDAP) UserBind(string, string) error {
return nil
}
// Dial test fn
func (mock *mockLDAP) Dial() error {
mock.dialCalledTimes = mock.dialCalledTimes + 1
return mock.dialErrReturn
}
// Close test fn
func (mock *mockLDAP) Close() {
mock.closeCalledTimes = mock.closeCalledTimes + 1
}
func (mock *mockLDAP) Bind() error {
mock.bindCalledTimes++
return mock.bindErrReturn
}
func setup() *mockLDAP {
mock := &mockLDAP{}
newLDAP = func(config *ldap.ServerConfig) ldap.IServer {
return mock
}
return mock
}
func teardown() {
newLDAP = ldap.New
}

View File

@@ -1,119 +0,0 @@
package multildap
import (
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/ldap"
)
// MockLDAP represents testing struct for ldap testing
type MockLDAP struct {
dialCalledTimes int
loginCalledTimes int
closeCalledTimes int
usersCalledTimes int
bindCalledTimes int
dialErrReturn error
loginErrReturn error
loginReturn *models.ExternalUserInfo
bindErrReturn error
usersErrReturn error
usersFirstReturn []*models.ExternalUserInfo
usersRestReturn []*models.ExternalUserInfo
}
// Login test fn
func (mock *MockLDAP) Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) {
mock.loginCalledTimes = mock.loginCalledTimes + 1
return mock.loginReturn, mock.loginErrReturn
}
// Users test fn
func (mock *MockLDAP) Users([]string) ([]*models.ExternalUserInfo, error) {
mock.usersCalledTimes = mock.usersCalledTimes + 1
if mock.usersCalledTimes == 1 {
return mock.usersFirstReturn, mock.usersErrReturn
}
return mock.usersRestReturn, mock.usersErrReturn
}
// UserBind test fn
func (mock *MockLDAP) UserBind(string, string) error {
return nil
}
// Dial test fn
func (mock *MockLDAP) Dial() error {
mock.dialCalledTimes = mock.dialCalledTimes + 1
return mock.dialErrReturn
}
// Close test fn
func (mock *MockLDAP) Close() {
mock.closeCalledTimes = mock.closeCalledTimes + 1
}
func (mock *MockLDAP) Bind() error {
mock.bindCalledTimes++
return mock.bindErrReturn
}
// MockMultiLDAP represents testing struct for multildap testing
type MockMultiLDAP struct {
LoginCalledTimes int
UsersCalledTimes int
UserCalledTimes int
PingCalledTimes int
UsersResult []*models.ExternalUserInfo
}
func (mock *MockMultiLDAP) Ping() ([]*ServerStatus, error) {
mock.PingCalledTimes = mock.PingCalledTimes + 1
return nil, nil
}
// Login test fn
func (mock *MockMultiLDAP) Login(query *models.LoginUserQuery) (
*models.ExternalUserInfo, error,
) {
mock.LoginCalledTimes = mock.LoginCalledTimes + 1
return nil, nil
}
// Users test fn
func (mock *MockMultiLDAP) Users(logins []string) (
[]*models.ExternalUserInfo, error,
) {
mock.UsersCalledTimes = mock.UsersCalledTimes + 1
return mock.UsersResult, nil
}
// User test fn
func (mock *MockMultiLDAP) User(login string) (
*models.ExternalUserInfo, ldap.ServerConfig, error,
) {
mock.UserCalledTimes = mock.UserCalledTimes + 1
return nil, ldap.ServerConfig{}, nil
}
func setup() *MockLDAP {
mock := &MockLDAP{}
newLDAP = func(config *ldap.ServerConfig) ldap.IServer {
return mock
}
return mock
}
func teardown() {
newLDAP = ldap.New
}

View File

@@ -23,7 +23,12 @@ func (qs *QuotaService) QuotaReached(c *m.ReqContext, target string) (bool, erro
if !setting.Quota.Enabled {
return false, nil
}
// No request context means this is a background service, like LDAP Background Sync.
// TODO: we should replace the req context with a more limited interface or struct,
// something that we could easily provide from background jobs.
if c == nil {
return false, nil
}
// get the list of scopes that this target is valid for. Org, User, Global
scopes, err := m.GetQuotaScopes(target)
if err != nil {

View File

@@ -20,6 +20,8 @@ func init() {
registry.RegisterService(&RenderingService{})
}
var IsPhantomJSEnabled = false
type RenderingService struct {
log log.Logger
pluginInfo *plugins.RendererPlugin
@@ -68,6 +70,7 @@ func (rs *RenderingService) Run(ctx context.Context) error {
rs.log.Warn("phantomJS is deprecated and will be removed in a future release. " +
"You should consider migrating from phantomJS to grafana-image-renderer plugin.")
rs.renderAction = rs.renderViaPhantomJS
IsPhantomJSEnabled = true
<-ctx.Done()
return nil
}

View File

@@ -144,46 +144,48 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I
FROM annotation
LEFT OUTER JOIN ` + dialect.Quote("user") + ` as usr on usr.id = annotation.user_id
LEFT OUTER JOIN alert on alert.id = annotation.alert_id
INNER JOIN (
SELECT a.id from annotation a
`)
sql.WriteString(`WHERE annotation.org_id = ?`)
sql.WriteString(`WHERE a.org_id = ?`)
params = append(params, query.OrgId)
if query.AnnotationId != 0 {
// fmt.Print("annotation query")
sql.WriteString(` AND annotation.id = ?`)
sql.WriteString(` AND a.id = ?`)
params = append(params, query.AnnotationId)
}
if query.AlertId != 0 {
sql.WriteString(` AND annotation.alert_id = ?`)
sql.WriteString(` AND a.alert_id = ?`)
params = append(params, query.AlertId)
}
if query.DashboardId != 0 {
sql.WriteString(` AND annotation.dashboard_id = ?`)
sql.WriteString(` AND a.dashboard_id = ?`)
params = append(params, query.DashboardId)
}
if query.PanelId != 0 {
sql.WriteString(` AND annotation.panel_id = ?`)
sql.WriteString(` AND a.panel_id = ?`)
params = append(params, query.PanelId)
}
if query.UserId != 0 {
sql.WriteString(` AND annotation.user_id = ?`)
sql.WriteString(` AND a.user_id = ?`)
params = append(params, query.UserId)
}
if query.From > 0 && query.To > 0 {
sql.WriteString(` AND annotation.epoch <= ? AND annotation.epoch_end >= ?`)
sql.WriteString(` AND a.epoch <= ? AND a.epoch_end >= ?`)
params = append(params, query.To, query.From)
}
if query.Type == "alert" {
sql.WriteString(` AND annotation.alert_id > 0`)
sql.WriteString(` AND a.alert_id > 0`)
} else if query.Type == "annotation" {
sql.WriteString(` AND annotation.alert_id = 0`)
sql.WriteString(` AND a.alert_id = 0`)
}
if len(query.Tags) > 0 {
@@ -204,7 +206,7 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I
tagsSubQuery := fmt.Sprintf(`
SELECT SUM(1) FROM annotation_tag at
INNER JOIN tag on tag.id = at.tag_id
WHERE at.annotation_id = annotation.id
WHERE at.annotation_id = a.id
AND (
%s
)
@@ -223,7 +225,8 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I
query.Limit = 100
}
sql.WriteString(" ORDER BY epoch DESC" + dialect.Limit(query.Limit))
// order of ORDER BY arguments match the order of a sql index for performance
sql.WriteString(" ORDER BY a.org_id, a.epoch_end DESC, a.epoch DESC" + dialect.Limit(query.Limit) + " ) dt on dt.id = annotation.id")
items := make([]*annotations.ItemDTO, 0)

View File

@@ -123,7 +123,28 @@ func addAnnotationMig(mg *Migrator) {
mg.AddMigration("Make epoch_end the same as epoch", NewRawSqlMigration("UPDATE annotation SET epoch_end = epoch"))
mg.AddMigration("Move region to single row", &AddMakeRegionSingleRowMigration{})
// TODO! drop region_id column?
//
// 6.6.1: Optimize annotation queries
//
mg.AddMigration("Remove index org_id_epoch from annotation table", NewDropIndexMigration(table, &Index{
Cols: []string{"org_id", "epoch"}, Type: IndexType,
}))
mg.AddMigration("Remove index org_id_dashboard_id_panel_id_epoch from annotation table", NewDropIndexMigration(table, &Index{
Cols: []string{"org_id", "dashboard_id", "panel_id", "epoch"}, Type: IndexType,
}))
mg.AddMigration("Add index for org_id_dashboard_id_epoch_end_epoch on annotation table", NewAddIndexMigration(table, &Index{
Cols: []string{"org_id", "dashboard_id", "epoch_end", "epoch"}, Type: IndexType,
}))
mg.AddMigration("Add index for org_id_epoch_end_epoch on annotation table", NewAddIndexMigration(table, &Index{
Cols: []string{"org_id", "epoch_end", "epoch"}, Type: IndexType,
}))
mg.AddMigration("Remove index org_id_epoch_epoch_end from annotation table", NewDropIndexMigration(table, &Index{
Cols: []string{"org_id", "epoch", "epoch_end"}, Type: IndexType,
}))
}
type AddMakeRegionSingleRowMigration struct {

View File

@@ -6,6 +6,7 @@ import (
"strings"
"github.com/go-xorm/xorm"
"github.com/grafana/grafana/pkg/util/errutil"
"github.com/lib/pq"
)
@@ -155,3 +156,15 @@ func (db *Postgres) IsUniqueConstraintViolation(err error) bool {
func (db *Postgres) IsDeadlock(err error) bool {
return db.isThisError(err, "40P01")
}
func (db *Postgres) PostInsertId(table string, sess *xorm.Session) error {
if table != "org" {
return nil
}
// sync primary key sequence of org table
if _, err := sess.Exec("SELECT setval('org_id_seq', (SELECT max(id) FROM org));"); err != nil {
return errutil.Wrapf(err, "failed to sync primary key for org table")
}
return nil
}

View File

@@ -36,11 +36,10 @@ const (
)
const (
DEV = "development"
PROD = "production"
TEST = "test"
APP_NAME = "Grafana"
APP_NAME_ENTERPRISE = "Grafana Enterprise"
DEV = "development"
PROD = "production"
TEST = "test"
APP_NAME = "Grafana"
)
var (
@@ -619,9 +618,6 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
Raw = cfg.Raw
ApplicationName = APP_NAME
if IsEnterprise {
ApplicationName = APP_NAME_ENTERPRISE
}
Env, err = valueAsString(iniFile.Section(""), "app_mode", "development")
if err != nil {

View File

@@ -58,6 +58,7 @@ func init() {
"AWS/DMS": {"CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCIncomingChanges", "CDCLatencySource", "CDCLatencyTarget", "CDCThroughputBandwidthSource", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CPUUtilization", "FreeStorageSpace", "FreeableMemory", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "ReadIOPS", "ReadLatency", "ReadThroughput", "SwapUsage", "WriteIOPS", "WriteLatency", "WriteThroughput"},
"AWS/DocDB": {"BackupRetentionPeriodStorageUsed", "BufferCacheHitRatio", "CPUUtilization", "DatabaseConnections", "DBInstanceReplicaLag", "DBClusterReplicaLagMaximum", "DBClusterReplicaLagMinimum", "DiskQueueDepth", "EngineUptime", "FreeableMemory", "FreeLocalStorage", "NetworkReceiveThroughput", "NetworkThroughput", "NetworkTransmitThroughput", "ReadIOPS", "ReadLatency", "ReadThroughput", "SnapshotStorageUsed", "SwapUsage", "TotalBackupStorageBilled", "VolumeBytesUsed", "VolumeReadIOPs", "VolumeWriteIOPs", "WriteIOPS", "WriteLatency", "WriteThroughput"},
"AWS/DX": {"ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelRx", "ConnectionLightLevelTx", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionState"},
"AWS/DAX": {"CPUUtilization", "NetworkPacketsIn", "NetworkPacketsOut", "GetItemRequestCount", "BatchGetItemRequestCount", "BatchWriteItemRequestCount", "DeleteItemRequestCount", "PutItemRequestCount", "UpdateItemRequestCount", "TransactWriteItemsCount", "TransactGetItemsCount", "ItemCacheHits", "ItemCacheMisses", "QueryCacheHits", "QueryCacheMisses", "ScanCacheHits", "ScanCacheMisses", "TotalRequestCount", "ErrorRequestCount", "FaultRequestCount", "FailedRequestCount", "QueryRequestCount", "ScanRequestCount", "ClientConnections", "EstimatedDbSize", "EvictedSize"},
"AWS/DynamoDB": {"ConditionalCheckFailedRequests", "ConsumedReadCapacityUnits", "ConsumedWriteCapacityUnits", "OnlineIndexConsumedWriteCapacity", "OnlineIndexPercentageProgress", "OnlineIndexThrottleEvents", "PendingReplicationCount", "ProvisionedReadCapacityUnits", "ProvisionedWriteCapacityUnits", "ReadThrottleEvents", "ReplicationLatency", "ReturnedBytes", "ReturnedItemCount", "ReturnedRecordsCount", "SuccessfulRequestLatency", "SystemErrors", "ThrottledRequests", "TimeToLiveDeletedItemCount", "UserErrors", "WriteThrottleEvents"},
"AWS/EBS": {"BurstBalance", "VolumeConsumedReadWriteOps", "VolumeIdleTime", "VolumeQueueLength", "VolumeReadBytes", "VolumeReadOps", "VolumeThroughputPercentage", "VolumeTotalReadTime", "VolumeTotalWriteTime", "VolumeWriteBytes", "VolumeWriteOps"},
"AWS/EC2": {"CPUCreditBalance", "CPUCreditUsage", "CPUSurplusCreditBalance", "CPUSurplusCreditsCharged", "CPUUtilization", "DiskReadBytes", "DiskReadOps", "DiskWriteBytes", "DiskWriteOps", "EBSByteBalance%", "EBSIOBalance%", "EBSReadBytes", "EBSReadOps", "EBSWriteBytes", "EBSWriteOps", "NetworkIn", "NetworkOut", "NetworkPacketsIn", "NetworkPacketsOut", "StatusCheckFailed", "StatusCheckFailed_Instance", "StatusCheckFailed_System"},
@@ -140,6 +141,7 @@ func init() {
"AWS/DMS": {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
"AWS/DocDB": {"DBClusterIdentifier"},
"AWS/DX": {"ConnectionId"},
"AWS/DAX": {"Account", "ClusterId", "NodeId"},
"AWS/DynamoDB": {"GlobalSecondaryIndexName", "Operation", "ReceivingRegion", "StreamLabel", "TableName"},
"AWS/EBS": {"VolumeId"},
"AWS/EC2": {"AutoScalingGroupName", "ImageId", "InstanceId", "InstanceType"},

View File

@@ -100,7 +100,7 @@ func (e *CloudWatchExecutor) transformQueryResponseToQueryResult(cloudwatchRespo
}
if partialData {
queryResult.ErrorString = "Cloudwatch GetMetricData error: Too many datapoints requested - your search have been limited. Please try to reduce the time range"
queryResult.ErrorString = "Cloudwatch GetMetricData error: Too many datapoints requested - your search has been limited. Please try to reduce the time range"
}
queryResult.Series = append(queryResult.Series, timeSeries...)

View File

@@ -68,8 +68,15 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
var period int
if strings.ToLower(p) == "auto" || p == "" {
deltaInSeconds := endTime.Sub(startTime).Seconds()
periods := []int{60, 300, 900, 3600, 21600}
period = closest(periods, int(math.Ceil(deltaInSeconds/2000)))
periods := []int{60, 300, 900, 3600, 21600, 86400}
datapoints := int(math.Ceil(deltaInSeconds / 2000))
period = periods[len(periods)-1]
for _, value := range periods {
if datapoints <= value {
period = value
break
}
}
} else {
if regexp.MustCompile(`^\d+$`).Match([]byte(p)) {
period, err = strconv.Atoi(p)
@@ -158,25 +165,3 @@ func sortDimensions(dimensions map[string][]string) map[string][]string {
}
return sortedDimensions
}
func closest(array []int, num int) int {
minDiff := array[len(array)-1]
var closest int
if num <= array[0] {
return array[0]
}
if num >= array[len(array)-1] {
return array[len(array)-1]
}
for _, value := range array {
var m = int(math.Abs(float64(num - value)))
if m <= minDiff {
minDiff = m
closest = value
}
}
return closest
}

View File

@@ -2,6 +2,7 @@ package cloudwatch
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
@@ -127,66 +128,86 @@ func TestRequestParser(t *testing.T) {
"period": "auto",
})
Convey("when time range is short", func() {
Convey("when time range is 5 minutes", func() {
query.Set("period", "auto")
timeRange := tsdb.NewTimeRange("now-2h", "now-1h")
from, _ := timeRange.ParseFrom()
to, _ := timeRange.ParseTo()
to := time.Now()
from := to.Local().Add(time.Minute * time.Duration(5))
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 60)
})
Convey("when time range is 5y", func() {
timeRange := tsdb.NewTimeRange("now-5y", "now")
from, _ := timeRange.ParseFrom()
to, _ := timeRange.ParseTo()
Convey("when time range is 1 day", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(0, 0, -1)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 60)
})
Convey("when time range is 2 days", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(0, 0, -2)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 300)
})
Convey("when time range is 7 days", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(0, 0, -7)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 900)
})
Convey("when time range is 30 days", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(0, 0, -30)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 3600)
})
Convey("when time range is 90 days", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(0, 0, -90)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 21600)
})
})
Convey("closest works as expected", func() {
periods := []int{60, 300, 900, 3600, 21600}
Convey("and input is lower than 60", func() {
So(closest(periods, 6), ShouldEqual, 60)
Convey("when time range is 1 year", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(-1, 0, 0)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 21600)
})
Convey("and input is exactly 60", func() {
So(closest(periods, 60), ShouldEqual, 60)
})
Convey("when time range is 2 years", func() {
query.Set("period", "auto")
to := time.Now()
from := to.AddDate(-2, 0, 0)
Convey("and input is exactly between two steps", func() {
So(closest(periods, 180), ShouldEqual, 300)
})
Convey("and input is exactly 2000", func() {
So(closest(periods, 2000), ShouldEqual, 900)
})
Convey("and input is exactly 5000", func() {
So(closest(periods, 5000), ShouldEqual, 3600)
})
Convey("and input is exactly 50000", func() {
So(closest(periods, 50000), ShouldEqual, 21600)
})
Convey("and period isn't shorter than min retension for 15 days", func() {
So(closest(periods, (60*60*24*15)+1/2000), ShouldBeGreaterThanOrEqualTo, 300)
})
Convey("and period isn't shorter than min retension for 63 days", func() {
So(closest(periods, (60*60*24*63)+1/2000), ShouldBeGreaterThanOrEqualTo, 3600)
})
Convey("and period isn't shorter than min retension for 455 days", func() {
So(closest(periods, (60*60*24*455)+1/2000), ShouldBeGreaterThanOrEqualTo, 21600)
res, err := parseRequestQuery(query, "ref1", from, to)
So(err, ShouldBeNil)
So(res.Period, ShouldEqual, 86400)
})
})
})
})
}

View File

@@ -100,8 +100,10 @@ func parseGetMetricDataTimeSeries(metricDataResults map[string]*cloudwatch.Metri
series.Tags[key] = values[0]
} else {
for _, value := range values {
if value == label || value == "*" || strings.Contains(label, value) {
if value == label || value == "*" {
series.Tags[key] = label
} else if strings.Contains(label, value) {
series.Tags[key] = value
}
}
}

View File

@@ -53,7 +53,7 @@ func TestCloudWatchResponseParser(t *testing.T) {
Namespace: "AWS/ApplicationELB",
MetricName: "TargetResponseTime",
Dimensions: map[string][]string{
"LoadBalancer": {"lb2"},
"LoadBalancer": {"lb1", "lb2"},
"TargetGroup": {"tg"},
},
Stats: "Average",
@@ -65,8 +65,12 @@ func TestCloudWatchResponseParser(t *testing.T) {
So(err, ShouldBeNil)
So(partialData, ShouldBeFalse)
So(timeSeries.Name, ShouldEqual, "lb2 Expanded")
So(timeSeries.Tags["LoadBalancer"], ShouldEqual, "lb2")
So(timeSeries.Name, ShouldEqual, "lb1 Expanded")
So(timeSeries.Tags["LoadBalancer"], ShouldEqual, "lb1")
timeSeries2 := (*series)[1]
So(timeSeries2.Name, ShouldEqual, "lb2 Expanded")
So(timeSeries2.Tags["LoadBalancer"], ShouldEqual, "lb2")
})
Convey("can expand dimension value using substring", func() {
@@ -110,7 +114,7 @@ func TestCloudWatchResponseParser(t *testing.T) {
Namespace: "AWS/ApplicationELB",
MetricName: "TargetResponseTime",
Dimensions: map[string][]string{
"LoadBalancer": {"lb1"},
"LoadBalancer": {"lb1", "lb2"},
"TargetGroup": {"tg"},
},
Stats: "Average",
@@ -119,11 +123,14 @@ func TestCloudWatchResponseParser(t *testing.T) {
}
series, partialData, err := parseGetMetricDataTimeSeries(resp, query)
timeSeries := (*series)[0]
So(err, ShouldBeNil)
So(partialData, ShouldBeFalse)
So(timeSeries.Name, ShouldEqual, "lb1 Expanded")
So(timeSeries.Tags["LoadBalancer"], ShouldEqual, "lb1")
timeSeries2 := (*series)[1]
So(timeSeries2.Name, ShouldEqual, "lb2 Expanded")
So(timeSeries2.Tags["LoadBalancer"], ShouldEqual, "lb2")
})
Convey("can expand dimension value using wildcard", func() {

View File

@@ -542,8 +542,11 @@ func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery, index int) *tsd
startValue := query.Model.Get("startValue").MustFloat64(rand.Float64() * 100)
spread := query.Model.Get("spread").MustFloat64(1)
noise := query.Model.Get("noise").MustFloat64(0)
min, hasMin := query.Model.Get("min").Float64()
max, hasMax := query.Model.Get("max").Float64()
min, err := query.Model.Get("min").Float64()
hasMin := err == nil
max, err := query.Model.Get("max").Float64()
hasMax := err == nil
points := make(tsdb.TimeSeriesPoints, 0)
walker := startValue
@@ -551,12 +554,12 @@ func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery, index int) *tsd
for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
nextValue := walker + (rand.Float64() * noise)
if hasMin == nil && nextValue < min {
if hasMin && nextValue < min {
nextValue = min
walker = min
}
if hasMax == nil && nextValue > max {
if hasMax && nextValue > max {
nextValue = max
walker = max
}

View File

@@ -1,4 +1,5 @@
import '@babel/polyfill';
import 'url-search-params-polyfill'; // fetch polyfill needed for PhantomJs rendering
import 'file-saver';
import 'lodash';
import 'jquery';

View File

@@ -7,9 +7,13 @@ export interface BrandComponentProps {
}
export const LoginLogo: FC<BrandComponentProps> = ({ className }) => {
const maxSize = css`
max-width: 150px;
`;
return (
<>
<img className={className} src="public/img/grafana_icon.svg" alt="Grafana" />
<img className={cx(className, maxSize)} src="public/img/grafana_icon.svg" alt="Grafana" />
<div className="logo-wordmark" />
</>
);

View File

@@ -5,7 +5,7 @@ export interface FooterLink {
text: string;
icon?: string;
url?: string;
target: string;
target?: string;
}
export let getFooterLinks = (): FooterLink[] => {
@@ -17,7 +17,7 @@ export let getFooterLinks = (): FooterLink[] => {
target: '_blank',
},
{
text: 'Support & Enterprise',
text: 'Support',
icon: 'fa fa-support',
url: 'https://grafana.com/products/enterprise/?utm_source=grafana_footer',
target: '_blank',
@@ -32,15 +32,12 @@ export let getFooterLinks = (): FooterLink[] => {
};
export let getVersionLinks = (): FooterLink[] => {
const { buildInfo } = config;
const { buildInfo, licenseInfo } = config;
const links: FooterLink[] = [];
const stateInfo = licenseInfo.stateInfo ? ` (${licenseInfo.stateInfo})` : '';
const links: FooterLink[] = [
{
text: `Grafana v${buildInfo.version} (commit: ${buildInfo.commit})`,
url: 'https://grafana.com',
target: '_blank',
},
];
links.push({ text: `${buildInfo.edition}${stateInfo}`, url: licenseInfo.licenseUrl });
links.push({ text: `v${buildInfo.version} (${buildInfo.commit})` });
if (buildInfo.hasUpdate) {
links.push({

View File

@@ -103,7 +103,7 @@ export class LoginForm extends PureComponent<Props, State> {
Log In
</button>
) : (
<button type="submit" className="btn btn-large p-x-2 btn-inverse btn-loading">
<button type="submit" disabled className="btn btn-large p-x-2 btn-inverse btn-loading">
Logging In<span>.</span>
<span>.</span>
<span>.</span>

View File

@@ -9,13 +9,14 @@ import LoginCtrl from './LoginCtrl';
import { LoginForm } from './LoginForm';
import { ChangePassword } from './ChangePassword';
import { Branding } from 'app/core/components/Branding/Branding';
import { Footer } from 'app/core/components/Footer/Footer';
export const LoginPage: FC = () => {
return (
<Branding.LoginBackground className="login container">
<div className="login-content">
<div className="login-branding">
<Branding.LoginLogo className="logo-icon" />
<Branding.LoginLogo className="login-logo" />
</div>
<LoginCtrl>
{({
@@ -62,6 +63,7 @@ export const LoginPage: FC = () => {
<div className="clearfix" />
</div>
<Footer />
</Branding.LoginBackground>
);
};

View File

@@ -60,13 +60,15 @@ const Navigation = ({ main }: { main: NavModelItem }) => {
<TabsBar className="page-header__tabs" hideBorder={true}>
{main.children.map((child, index) => {
return (
<Tab
label={child.text}
active={child.active}
key={`${child.url}-${index}`}
icon={child.icon}
onChangeTab={() => goToUrl(index)}
/>
!child.hideFromTabs && (
<Tab
label={child.text}
active={child.active}
key={`${child.url}-${index}`}
icon={child.icon}
onChangeTab={() => goToUrl(index)}
/>
)
);
})}
</TabsBar>

View File

@@ -1,6 +1,7 @@
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module';
import { promiseToDigest } from '../../utils/promiseToDigest';
const template = `
<div class="dropdown cascade-open">
@@ -138,9 +139,11 @@ export function queryPartEditorDirective(templateSrv: any) {
}
$scope.showActionsMenu = () => {
$scope.handleEvent({ $event: { name: 'get-part-actions' } }).then((res: any) => {
$scope.partActions = res;
});
promiseToDigest($scope)(
$scope.handleEvent({ $event: { name: 'get-part-actions' } }).then((res: any) => {
$scope.partActions = res;
})
);
};
$scope.triggerPartAction = (action: string) => {

View File

@@ -172,7 +172,7 @@ export function sqlPartEditorDirective(templateSrv: any) {
}
const paramValue = templateSrv.highlightVariablesAsHtml(part.params[index]);
const $paramLink = $('<a class="graphite-func-param-link pointer">' + paramValue + '</a>');
const $paramLink = $('<a class="query-part__link">' + paramValue + '</a>');
const $input = $(paramTemplate);
$paramLink.appendTo($paramsContainer);

View File

@@ -223,7 +223,6 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([
{
timestamp: '2019-04-26T09:28:11.352440161Z',
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'info',
@@ -231,7 +230,6 @@ describe('dataFrameToLogsModel', () => {
uid: 'foo',
},
{
timestamp: '2019-04-26T14:42:50.991981292Z',
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'error',

View File

@@ -312,7 +312,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
let logLevel = LogLevel.unknown;
if (logLevelField) {
if (logLevelField && logLevelField.values.get(j)) {
logLevel = getLogLevelFromKey(logLevelField.values.get(j));
} else if (seriesLogLevel) {
logLevel = seriesLogLevel;
@@ -328,14 +328,13 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
timeFromNow: time.fromNow(),
timeEpochMs: time.valueOf(),
timeLocal: time.format(logTimeFormat),
timeUtc: toUtc(ts).format(logTimeFormat),
timeUtc: toUtc(time.valueOf()).format(logTimeFormat),
uniqueLabels,
hasAnsi,
searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message,
labels: stringField.labels,
timestamp: ts,
uid: idField ? idField.values.get(j) : j.toString(),
});
}

View File

@@ -2,7 +2,7 @@ import locationUtil from 'app/core/utils/location_util';
jest.mock('app/core/config', () => {
return {
appSubUrl: '/subUrl',
getConfig: () => ({ appSubUrl: '/subUrl' }),
};
});

View File

@@ -15,7 +15,15 @@ import {
} from './explore';
import { ExploreUrlState, ExploreMode } from 'app/types/explore';
import store from 'app/core/store';
import { DataQueryError, LogsDedupStrategy, LogsModel, LogLevel, dateTime, MutableDataFrame } from '@grafana/data';
import {
DataQueryError,
LogsDedupStrategy,
LogsModel,
LogLevel,
dateTime,
MutableDataFrame,
LogRowModel,
} from '@grafana/data';
import { RefreshPicker } from '@grafana/ui';
const DEFAULT_EXPLORE_STATE: ExploreUrlState = {
@@ -372,11 +380,10 @@ describe('refreshIntervalToSortOrder', () => {
});
describe('sortLogsResult', () => {
const firstRow = {
const firstRow: LogRowModel = {
rowIndex: 0,
entryFieldIndex: 0,
dataFrame: new MutableDataFrame(),
timestamp: '2019-01-01T21:00:0.0000000Z',
entry: '',
hasAnsi: false,
labels: {},
@@ -389,17 +396,16 @@ describe('sortLogsResult', () => {
uid: '1',
};
const sameAsFirstRow = firstRow;
const secondRow = {
const secondRow: LogRowModel = {
rowIndex: 1,
entryFieldIndex: 0,
dataFrame: new MutableDataFrame(),
timestamp: '2019-01-01T22:00:0.0000000Z',
entry: '',
hasAnsi: false,
labels: {},
logLevel: LogLevel.info,
raw: '',
timeEpochMs: 0,
timeEpochMs: 10,
timeFromNow: '',
timeLocal: '',
timeUtc: '',

View File

@@ -88,11 +88,12 @@ export async function getExploreUrl(args: GetExploreUrlArguments) {
const range = timeSrv.timeRangeForUrl();
let state: Partial<ExploreUrlState> = { range };
if (exploreDatasource.interpolateVariablesInQueries) {
const scopedVars = panel.scopedVars || {};
state = {
...state,
datasource: exploreDatasource.name,
context: 'explore',
queries: exploreDatasource.interpolateVariablesInQueries(exploreTargets),
queries: exploreDatasource.interpolateVariablesInQueries(exploreTargets, scopedVars),
};
} else {
state = {
@@ -106,8 +107,7 @@ export async function getExploreUrl(args: GetExploreUrlArguments) {
const exploreState = JSON.stringify({ ...state, originPanelId: panel.id });
url = renderUrl('/explore', { left: exploreState });
}
const finalUrl = config.appSubUrl + url;
return finalUrl;
return url;
}
export function buildQueryTransaction(
@@ -473,11 +473,11 @@ export const getRefIds = (value: any): string[] => {
};
export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp < b.timestamp) {
if (a.timeEpochMs < b.timeEpochMs) {
return -1;
}
if (a.timestamp > b.timestamp) {
if (a.timeEpochMs > b.timeEpochMs) {
return 1;
}
@@ -485,11 +485,11 @@ export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
};
const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp > b.timestamp) {
if (a.timeEpochMs > b.timeEpochMs) {
return -1;
}
if (a.timestamp < b.timestamp) {
if (a.timeEpochMs < b.timeEpochMs) {
return 1;
}

View File

@@ -1,7 +1,7 @@
import config from 'app/core/config';
import { getConfig } from 'app/core/config';
export const stripBaseFromUrl = (url: string): string => {
const appSubUrl = config.appSubUrl;
const appSubUrl = getConfig().appSubUrl;
const stripExtraChars = appSubUrl.endsWith('/') ? 1 : 0;
const urlWithoutBase =
url.length > 0 && url.indexOf(appSubUrl) === 0 ? url.slice(appSubUrl.length - stripExtraChars) : url;
@@ -9,4 +9,11 @@ export const stripBaseFromUrl = (url: string): string => {
return urlWithoutBase;
};
export default { stripBaseFromUrl };
export const assureBaseUrl = (url: string) => {
if (url.startsWith('/')) {
return `${getConfig().appSubUrl}${stripBaseFromUrl(url)}`;
}
return url;
};
export default { stripBaseFromUrl, assureBaseUrl };

View File

@@ -57,7 +57,6 @@ export const LicenseChrome: React.FC<Props> = ({ header, editionNotice, subheade
}}
>
<img
className="logo-icon"
src="/public/img/grafana_icon.svg"
alt="Grafana"
width="80px"

View File

@@ -180,7 +180,7 @@ exports[`ServerStats Should render table with stats 1`] = `
className="fa fa-support"
/>
Support & Enterprise
Support
</a>
</li>
<li>
@@ -198,13 +198,22 @@ exports[`ServerStats Should render table with stats 1`] = `
</li>
<li>
<a
href="https://grafana.com"
rel="noopener"
target="_blank"
>
<i />
Grafana vv1.0 (commit: 1)
undefined
</a>
</li>
<li>
<a
rel="noopener"
target="_blank"
>
<i />
vv1.0 (1)
</a>
</li>
</ul>

View File

@@ -10,6 +10,7 @@ import { CoreEvents } from 'app/types';
import { GrafanaRootScope } from 'app/routes/GrafanaCtrl';
import { AppEvents } from '@grafana/data';
import { e2e } from '@grafana/e2e';
import locationUtil from 'app/core/utils/location_util';
export class SettingsCtrl {
dashboard: DashboardModel;
@@ -183,7 +184,7 @@ export class SettingsCtrl {
this.buildSectionList();
const currentSection: any = _.find(this.sections, { id: this.viewId } as any);
this.$location.url(currentSection.url);
this.$location.url(locationUtil.stripBaseFromUrl(currentSection.url));
}
deleteDashboard() {

View File

@@ -2,11 +2,11 @@ import React, { Component } from 'react';
import { renderMarkdown, LinkModelSupplier, ScopedVars } from '@grafana/data';
import { Tooltip, PopoverContent } from '@grafana/ui';
import { getLocationSrv } from '@grafana/runtime';
import { PanelModel } from 'app/features/dashboard/state/PanelModel';
import templateSrv from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { getLocationSrv } from '@grafana/runtime';
enum InfoMode {
Error = 'Error',

View File

@@ -24,6 +24,8 @@ const setup = (propOverrides?: object) => {
loadDataSource: jest.fn(),
setDataSourceName,
updateDataSource: jest.fn(),
initDataSourceSettings: jest.fn(),
testDataSource: jest.fn(),
setIsDefault,
dataSourceLoaded,
query: {},

View File

@@ -1,7 +1,6 @@
// Libraries
import React, { PureComponent } from 'react';
import { hot } from 'react-hot-loader';
import { connect } from 'react-redux';
import isString from 'lodash/isString';
import { e2e } from '@grafana/e2e';
// Components
@@ -11,11 +10,15 @@ import BasicSettings from './BasicSettings';
import ButtonRow from './ButtonRow';
// Services & Utils
import appEvents from 'app/core/app_events';
import { getBackendSrv } from 'app/core/services/backend_srv';
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
// Actions & selectors
import { getDataSource, getDataSourceMeta } from '../state/selectors';
import { deleteDataSource, loadDataSource, updateDataSource } from '../state/actions';
import {
deleteDataSource,
loadDataSource,
updateDataSource,
initDataSourceSettings,
testDataSource,
} from '../state/actions';
import { getNavModel } from 'app/core/selectors/navModel';
import { getRouteParamsId } from 'app/core/selectors/location';
// Types
@@ -24,8 +27,8 @@ import { UrlQueryMap } from '@grafana/runtime';
import { DataSourcePluginMeta, DataSourceSettings, NavModel } from '@grafana/data';
import { getDataSourceLoadingNav } from '../state/navModel';
import PluginStateinfo from 'app/features/plugins/PluginStateInfo';
import { importDataSourcePlugin } from 'app/features/plugins/plugin_loader';
import { dataSourceLoaded, setDataSourceName, setIsDefault } from '../state/reducers';
import { connectWithCleanUp } from 'app/core/components/connectWithCleanUp';
export interface Props {
navModel: NavModel;
@@ -38,55 +41,22 @@ export interface Props {
updateDataSource: typeof updateDataSource;
setIsDefault: typeof setIsDefault;
dataSourceLoaded: typeof dataSourceLoaded;
initDataSourceSettings: typeof initDataSourceSettings;
testDataSource: typeof testDataSource;
plugin?: GenericDataSourcePlugin;
query: UrlQueryMap;
page?: string;
testingStatus?: {
message?: string;
status?: string;
};
loadError?: Error | string;
}
interface State {
plugin?: GenericDataSourcePlugin;
isTesting?: boolean;
testingMessage?: string;
testingStatus?: string;
loadError?: any;
}
export class DataSourceSettingsPage extends PureComponent<Props, State> {
constructor(props: Props) {
super(props);
this.state = {
plugin: props.plugin,
};
}
async loadPlugin(pluginId?: string) {
const { dataSourceMeta } = this.props;
let importedPlugin: GenericDataSourcePlugin;
try {
importedPlugin = await importDataSourcePlugin(dataSourceMeta);
} catch (e) {
console.log('Failed to import plugin module', e);
}
this.setState({ plugin: importedPlugin });
}
async componentDidMount() {
const { loadDataSource, pageId } = this.props;
if (isNaN(pageId)) {
this.setState({ loadError: 'Invalid ID' });
return;
}
try {
await loadDataSource(pageId);
if (!this.state.plugin) {
await this.loadPlugin();
}
} catch (err) {
this.setState({ loadError: err });
}
export class DataSourceSettingsPage extends PureComponent<Props> {
componentDidMount() {
const { initDataSourceSettings, pageId } = this.props;
initDataSourceSettings(pageId);
}
onSubmit = async (evt: React.FormEvent<HTMLFormElement>) => {
@@ -136,40 +106,9 @@ export class DataSourceSettingsPage extends PureComponent<Props, State> {
);
}
async testDataSource() {
const dsApi = await getDatasourceSrv().get(this.props.dataSource.name);
if (!dsApi.testDatasource) {
return;
}
this.setState({ isTesting: true, testingMessage: 'Testing...', testingStatus: 'info' });
getBackendSrv().withNoBackendCache(async () => {
try {
const result = await dsApi.testDatasource();
this.setState({
isTesting: false,
testingStatus: result.status,
testingMessage: result.message,
});
} catch (err) {
let message = '';
if (err.statusText) {
message = 'HTTP Error ' + err.statusText;
} else {
message = err.message;
}
this.setState({
isTesting: false,
testingStatus: 'error',
testingMessage: message,
});
}
});
testDataSource() {
const { dataSource, testDataSource } = this.props;
testDataSource(dataSource.name);
}
get hasDataSource() {
@@ -218,7 +157,7 @@ export class DataSourceSettingsPage extends PureComponent<Props, State> {
}
renderConfigPageBody(page: string) {
const { plugin } = this.state;
const { plugin } = this.props;
if (!plugin || !plugin.configPages) {
return null; // still loading
}
@@ -233,8 +172,7 @@ export class DataSourceSettingsPage extends PureComponent<Props, State> {
}
renderSettings() {
const { dataSourceMeta, setDataSourceName, setIsDefault, dataSource } = this.props;
const { testingMessage, testingStatus, plugin } = this.state;
const { dataSourceMeta, setDataSourceName, setIsDefault, dataSource, testingStatus, plugin } = this.props;
return (
<form onSubmit={this.onSubmit}>
@@ -265,10 +203,10 @@ export class DataSourceSettingsPage extends PureComponent<Props, State> {
)}
<div className="gf-form-group">
{testingMessage && (
<div className={`alert-${testingStatus} alert`} aria-label={e2e.pages.DataSource.selectors.alert}>
{testingStatus && testingStatus.message && (
<div className={`alert-${testingStatus.status} alert`} aria-label={e2e.pages.DataSource.selectors.alert}>
<div className="alert-icon">
{testingStatus === 'error' ? (
{testingStatus.status === 'error' ? (
<i className="fa fa-exclamation-triangle" />
) : (
<i className="fa fa-check" />
@@ -276,7 +214,7 @@ export class DataSourceSettingsPage extends PureComponent<Props, State> {
</div>
<div className="alert-body">
<div className="alert-title" aria-label={e2e.pages.DataSource.selectors.alertMessage}>
{testingMessage}
{testingStatus.message}
</div>
</div>
</div>
@@ -294,8 +232,7 @@ export class DataSourceSettingsPage extends PureComponent<Props, State> {
}
render() {
const { navModel, page } = this.props;
const { loadError } = this.state;
const { navModel, page, loadError } = this.props;
if (loadError) {
return this.renderLoadError(loadError);
@@ -315,6 +252,7 @@ function mapStateToProps(state: StoreState) {
const pageId = getRouteParamsId(state.location);
const dataSource = getDataSource(state.dataSources, pageId);
const page = state.location.query.page as string;
const { plugin, loadError, testingStatus } = state.dataSourceSettings;
return {
navModel: getNavModel(
@@ -327,6 +265,9 @@ function mapStateToProps(state: StoreState) {
pageId: pageId,
query: state.location.query,
page,
plugin,
loadError,
testingStatus,
};
}
@@ -337,6 +278,10 @@ const mapDispatchToProps = {
updateDataSource,
setIsDefault,
dataSourceLoaded,
initDataSourceSettings,
testDataSource,
};
export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(DataSourceSettingsPage));
export default hot(module)(
connectWithCleanUp(mapStateToProps, mapDispatchToProps, state => state.dataSourceSettings)(DataSourceSettingsPage)
);

View File

@@ -1,5 +1,33 @@
import { findNewName, nameExits } from './actions';
import {
findNewName,
nameExits,
InitDataSourceSettingDependencies,
testDataSource,
TestDataSourceDependencies,
} from './actions';
import { getMockPlugin, getMockPlugins } from '../../plugins/__mocks__/pluginMocks';
import { thunkTester } from 'test/core/thunk/thunkTester';
import {
initDataSourceSettingsSucceeded,
initDataSourceSettingsFailed,
testDataSourceStarting,
testDataSourceSucceeded,
testDataSourceFailed,
} from './reducers';
import { initDataSourceSettings } from '../state/actions';
import { ThunkResult, ThunkDispatch } from 'app/types';
import { GenericDataSourcePlugin } from '../settings/PluginSettings';
const getBackendSrvMock = () =>
({
get: jest.fn().mockReturnValue({
testDatasource: jest.fn().mockReturnValue({
status: '',
message: '',
}),
}),
withNoBackendCache: jest.fn().mockImplementationOnce(cb => cb()),
} as any);
describe('Name exists', () => {
const plugins = getMockPlugins(5);
@@ -42,3 +70,131 @@ describe('Find new name', () => {
expect(findNewName(plugins, name)).toEqual('pretty cool plugin-');
});
});
describe('initDataSourceSettings', () => {
describe('when pageId is not a number', () => {
it('then initDataSourceSettingsFailed should be dispatched', async () => {
const dispatchedActions = await thunkTester({})
.givenThunk(initDataSourceSettings)
.whenThunkIsDispatched('some page');
expect(dispatchedActions).toEqual([initDataSourceSettingsFailed(new Error('Invalid ID'))]);
});
});
describe('when pageId is a number', () => {
it('then initDataSourceSettingsSucceeded should be dispatched', async () => {
const thunkMock = (): ThunkResult<void> => (dispatch: ThunkDispatch, getState) => {};
const dataSource = { type: 'app' };
const dataSourceMeta = { id: 'some id' };
const dependencies: InitDataSourceSettingDependencies = {
loadDataSource: jest.fn(thunkMock),
getDataSource: jest.fn().mockReturnValue(dataSource),
getDataSourceMeta: jest.fn().mockReturnValue(dataSourceMeta),
importDataSourcePlugin: jest.fn().mockReturnValue({} as GenericDataSourcePlugin),
};
const state = {
dataSourceSettings: {},
dataSources: {},
};
const dispatchedActions = await thunkTester(state)
.givenThunk(initDataSourceSettings)
.whenThunkIsDispatched(256, dependencies);
expect(dispatchedActions).toEqual([initDataSourceSettingsSucceeded({} as GenericDataSourcePlugin)]);
expect(dependencies.loadDataSource).toHaveBeenCalledTimes(1);
expect(dependencies.loadDataSource).toHaveBeenCalledWith(256);
expect(dependencies.getDataSource).toHaveBeenCalledTimes(1);
expect(dependencies.getDataSource).toHaveBeenCalledWith({}, 256);
expect(dependencies.getDataSourceMeta).toHaveBeenCalledTimes(1);
expect(dependencies.getDataSourceMeta).toHaveBeenCalledWith({}, 'app');
expect(dependencies.importDataSourcePlugin).toHaveBeenCalledTimes(1);
expect(dependencies.importDataSourcePlugin).toHaveBeenCalledWith(dataSourceMeta);
});
});
describe('when plugin loading fails', () => {
it('then initDataSourceSettingsFailed should be dispatched', async () => {
const dependencies: InitDataSourceSettingDependencies = {
loadDataSource: jest.fn().mockImplementation(() => {
throw new Error('Error loading plugin');
}),
getDataSource: jest.fn(),
getDataSourceMeta: jest.fn(),
importDataSourcePlugin: jest.fn(),
};
const state = {
dataSourceSettings: {},
dataSources: {},
};
const dispatchedActions = await thunkTester(state)
.givenThunk(initDataSourceSettings)
.whenThunkIsDispatched(301, dependencies);
expect(dispatchedActions).toEqual([initDataSourceSettingsFailed(new Error('Error loading plugin'))]);
expect(dependencies.loadDataSource).toHaveBeenCalledTimes(1);
expect(dependencies.loadDataSource).toHaveBeenCalledWith(301);
});
});
});
describe('testDataSource', () => {
describe('when a datasource is tested', () => {
it('then testDataSourceStarting and testDataSourceSucceeded should be dispatched', async () => {
const dependencies: TestDataSourceDependencies = {
getDatasourceSrv: () =>
({
get: jest.fn().mockReturnValue({
testDatasource: jest.fn().mockReturnValue({
status: '',
message: '',
}),
}),
} as any),
getBackendSrv: getBackendSrvMock,
};
const state = {
testingStatus: {
status: '',
message: '',
},
};
const dispatchedActions = await thunkTester(state)
.givenThunk(testDataSource)
.whenThunkIsDispatched('Azure Monitor', dependencies);
expect(dispatchedActions).toEqual([testDataSourceStarting(), testDataSourceSucceeded(state.testingStatus)]);
});
it('then testDataSourceFailed should be dispatched', async () => {
const dependencies: TestDataSourceDependencies = {
getDatasourceSrv: () =>
({
get: jest.fn().mockReturnValue({
testDatasource: jest.fn().mockImplementation(() => {
throw new Error('Error testing datasource');
}),
}),
} as any),
getBackendSrv: getBackendSrvMock,
};
const result = {
message: 'Error testing datasource',
};
const state = {
testingStatus: {
message: '',
status: '',
},
};
const dispatchedActions = await thunkTester(state)
.givenThunk(testDataSource)
.whenThunkIsDispatched('Azure Monitor', dependencies);
expect(dispatchedActions).toEqual([testDataSourceStarting(), testDataSourceFailed(result)]);
});
});
});

View File

@@ -1,10 +1,10 @@
import config from '../../../core/config';
import { getBackendSrv } from '@grafana/runtime';
import { getBackendSrv } from 'app/core/services/backend_srv';
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
import { updateLocation, updateNavIndex } from 'app/core/actions';
import { buildNavModel } from './navModel';
import { DataSourcePluginMeta, DataSourceSettings } from '@grafana/data';
import { DataSourcePluginCategory, ThunkResult } from 'app/types';
import { DataSourcePluginCategory, ThunkResult, ThunkDispatch } from 'app/types';
import { getPluginSettings } from 'app/features/plugins/PluginSettingsCache';
import { importDataSourcePlugin } from 'app/features/plugins/plugin_loader';
import {
@@ -13,14 +13,102 @@ import {
dataSourcePluginsLoad,
dataSourcePluginsLoaded,
dataSourcesLoaded,
initDataSourceSettingsFailed,
initDataSourceSettingsSucceeded,
testDataSourceStarting,
testDataSourceSucceeded,
testDataSourceFailed,
} from './reducers';
import { buildCategories } from './buildCategories';
import { getDataSource, getDataSourceMeta } from './selectors';
import { getDataSourceSrv } from '@grafana/runtime';
export interface DataSourceTypesLoadedPayload {
plugins: DataSourcePluginMeta[];
categories: DataSourcePluginCategory[];
}
export interface InitDataSourceSettingDependencies {
loadDataSource: typeof loadDataSource;
getDataSource: typeof getDataSource;
getDataSourceMeta: typeof getDataSourceMeta;
importDataSourcePlugin: typeof importDataSourcePlugin;
}
export interface TestDataSourceDependencies {
getDatasourceSrv: typeof getDataSourceSrv;
getBackendSrv: typeof getBackendSrv;
}
export const initDataSourceSettings = (
pageId: number,
dependencies: InitDataSourceSettingDependencies = {
loadDataSource,
getDataSource,
getDataSourceMeta,
importDataSourcePlugin,
}
): ThunkResult<void> => {
return async (dispatch: ThunkDispatch, getState) => {
if (isNaN(pageId)) {
dispatch(initDataSourceSettingsFailed(new Error('Invalid ID')));
return;
}
try {
await dispatch(dependencies.loadDataSource(pageId));
if (getState().dataSourceSettings.plugin) {
return;
}
const dataSource = dependencies.getDataSource(getState().dataSources, pageId);
const dataSourceMeta = dependencies.getDataSourceMeta(getState().dataSources, dataSource.type);
const importedPlugin = await dependencies.importDataSourcePlugin(dataSourceMeta);
dispatch(initDataSourceSettingsSucceeded(importedPlugin));
} catch (err) {
console.log('Failed to import plugin module', err);
dispatch(initDataSourceSettingsFailed(err));
}
};
};
export const testDataSource = (
dataSourceName: string,
dependencies: TestDataSourceDependencies = {
getDatasourceSrv,
getBackendSrv,
}
): ThunkResult<void> => {
return async (dispatch: ThunkDispatch, getState) => {
const dsApi = await dependencies.getDatasourceSrv().get(dataSourceName);
if (!dsApi.testDatasource) {
return;
}
dispatch(testDataSourceStarting());
dependencies.getBackendSrv().withNoBackendCache(async () => {
try {
const result = await dsApi.testDatasource();
dispatch(testDataSourceSucceeded(result));
} catch (err) {
let message = '';
if (err.statusText) {
message = 'HTTP Error ' + err.statusText;
} else {
message = err.message;
}
dispatch(testDataSourceFailed({ message }));
}
});
};
};
export function loadDataSources(): ThunkResult<void> {
return async dispatch => {
const response = await getBackendSrv().get('/api/datasources');
@@ -123,7 +211,7 @@ export function findNewName(dataSources: ItemWithName[], name: string) {
function updateFrontendSettings() {
return getBackendSrv()
.get('/api/frontend/settings')
.then(settings => {
.then((settings: any) => {
config.datasources = settings.datasources;
config.defaultDatasource = settings.defaultDatasource;
getDatasourceSrv().init();

View File

@@ -12,11 +12,16 @@ import {
setDataSourcesSearchQuery,
setDataSourceTypeSearchQuery,
setIsDefault,
dataSourceSettingsReducer,
initialDataSourceSettingsState,
initDataSourceSettingsSucceeded,
initDataSourceSettingsFailed,
} from './reducers';
import { getMockDataSource, getMockDataSources } from '../__mocks__/dataSourcesMocks';
import { LayoutModes } from 'app/core/components/LayoutSelector/LayoutSelector';
import { DataSourcesState } from 'app/types';
import { DataSourcesState, DataSourceSettingsState } from 'app/types';
import { PluginMeta, PluginMetaInfo, PluginType } from '@grafana/data';
import { GenericDataSourcePlugin } from '../settings/PluginSettings';
const mockPlugin = () =>
({
@@ -136,3 +141,34 @@ describe('dataSourcesReducer', () => {
});
});
});
describe('dataSourceSettingsReducer', () => {
describe('when initDataSourceSettingsSucceeded is dispatched', () => {
it('then state should be correct', () => {
reducerTester<DataSourceSettingsState>()
.givenReducer(dataSourceSettingsReducer, { ...initialDataSourceSettingsState })
.whenActionIsDispatched(initDataSourceSettingsSucceeded({} as GenericDataSourcePlugin))
.thenStateShouldEqual({
...initialDataSourceSettingsState,
plugin: {} as GenericDataSourcePlugin,
loadError: null,
});
});
});
describe('when initDataSourceSettingsFailed is dispatched', () => {
it('then state should be correct', () => {
reducerTester<DataSourceSettingsState>()
.givenReducer(dataSourceSettingsReducer, {
...initialDataSourceSettingsState,
plugin: {} as GenericDataSourcePlugin,
})
.whenActionIsDispatched(initDataSourceSettingsFailed(new Error('Some error')))
.thenStatePredicateShouldEqual(resultingState => {
expect(resultingState.plugin).toEqual(null);
expect(resultingState.loadError).toEqual('Some error');
return true;
});
});
});
});

View File

@@ -1,9 +1,10 @@
import { AnyAction, createAction } from '@reduxjs/toolkit';
import { DataSourcePluginMeta, DataSourceSettings } from '@grafana/data';
import { DataSourcesState } from 'app/types';
import { DataSourcesState, DataSourceSettingsState } from 'app/types';
import { LayoutMode, LayoutModes } from 'app/core/components/LayoutSelector/LayoutSelector';
import { DataSourceTypesLoadedPayload } from './actions';
import { GenericDataSourcePlugin } from '../settings/PluginSettings';
export const initialState: DataSourcesState = {
dataSources: [],
@@ -94,6 +95,76 @@ export const dataSourcesReducer = (state: DataSourcesState = initialState, actio
return state;
};
export const initialDataSourceSettingsState: DataSourceSettingsState = {
testingStatus: {
status: null,
message: null,
},
loadError: null,
plugin: null,
};
export const initDataSourceSettingsSucceeded = createAction<GenericDataSourcePlugin>(
'dataSourceSettings/initDataSourceSettingsSucceeded'
);
export const initDataSourceSettingsFailed = createAction<Error>('dataSourceSettings/initDataSourceSettingsFailed');
export const testDataSourceStarting = createAction<undefined>('dataSourceSettings/testDataSourceStarting');
export const testDataSourceSucceeded = createAction<{
status: string;
message: string;
}>('dataSourceSettings/testDataSourceSucceeded');
export const testDataSourceFailed = createAction<{ message: string }>('dataSourceSettings/testDataSourceFailed');
export const dataSourceSettingsReducer = (
state: DataSourceSettingsState = initialDataSourceSettingsState,
action: AnyAction
): DataSourceSettingsState => {
if (initDataSourceSettingsSucceeded.match(action)) {
return { ...state, plugin: action.payload, loadError: null };
}
if (initDataSourceSettingsFailed.match(action)) {
return { ...state, plugin: null, loadError: action.payload.message };
}
if (testDataSourceStarting.match(action)) {
return {
...state,
testingStatus: {
message: 'Testing...',
status: 'info',
},
};
}
if (testDataSourceSucceeded.match(action)) {
return {
...state,
testingStatus: {
status: action.payload.status,
message: action.payload.message,
},
};
}
if (testDataSourceFailed.match(action)) {
return {
...state,
testingStatus: {
status: 'error',
message: action.payload.message,
},
};
}
return state;
};
export default {
dataSources: dataSourcesReducer,
dataSourceSettings: dataSourceSettingsReducer,
};

View File

@@ -70,7 +70,6 @@ const makeLog = (overides: Partial<LogRowModel>): LogRowModel => {
hasAnsi: false,
labels: {},
raw: entry,
timestamp: '',
timeFromNow: '',
timeEpochMs: 1,
timeLocal: '',

View File

@@ -428,9 +428,11 @@ export const runQueries = (exploreId: ExploreId): ThunkResult<void> => {
const queryOptions: QueryOptions = {
minInterval,
// This is used for logs streaming for buffer size, with undefined it falls back to datasource config if it
// supports that.
maxDataPoints: mode === ExploreMode.Logs ? undefined : containerWidth,
// maxDataPoints is used in:
// Loki - used for logs streaming for buffer size, with undefined it falls back to datasource config if it supports that.
// Elastic - limits the number of datapoints for the counts query and for logs it has hardcoded limit.
// Influx - used to correctly display logs in graph
maxDataPoints: mode === ExploreMode.Logs && datasourceInstance.name === 'Loki' ? undefined : containerWidth,
liveStreaming: live,
showingGraph,
showingTable,

View File

@@ -187,7 +187,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 300,
uid: '2',
uniqueLabels: {},
},
@@ -205,7 +204,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 200,
uid: '1',
uniqueLabels: {},
},
@@ -223,7 +221,6 @@ describe('ResultProcessor', () => {
timeFromNow: 'fromNow() jest mocked',
timeLocal: 'format() jest mocked',
timeUtc: 'format() jest mocked',
timestamp: 100,
uid: '0',
uniqueLabels: {},
},

View File

@@ -5,6 +5,7 @@ import coreModule from 'app/core/core_module';
import { appendQueryToUrl, toUrlParams } from 'app/core/utils/url';
import { sanitizeUrl } from 'app/core/utils/text';
import { getConfig } from 'app/core/config';
import locationUtil from 'app/core/utils/location_util';
import { VariableSuggestion, VariableOrigin, DataLinkBuiltInVars } from '@grafana/ui';
import {
DataLink,
@@ -216,7 +217,7 @@ export class LinkSrv implements LinkService {
constructor(private templateSrv: TemplateSrv, private timeSrv: TimeSrv) {}
getLinkUrl(link: any) {
const url = this.templateSrv.replace(link.url || '');
let url = locationUtil.assureBaseUrl(this.templateSrv.replace(link.url || ''));
const params: { [key: string]: any } = {};
if (link.keepTime) {
@@ -229,7 +230,8 @@ export class LinkSrv implements LinkService {
this.templateSrv.fillVariableValuesForUrl(params);
}
return appendQueryToUrl(url, toUrlParams(params));
url = appendQueryToUrl(url, toUrlParams(params));
return getConfig().disableSanitizeHtml ? url : sanitizeUrl(url);
}
getAnchorInfo(link: any) {
@@ -266,7 +268,7 @@ export class LinkSrv implements LinkService {
}
const info: LinkModel<T> = {
href: href.replace(/\s|\n/g, ''),
href: locationUtil.assureBaseUrl(href.replace(/\s|\n/g, '')),
title: this.templateSrv.replace(link.title || '', scopedVars),
target: link.targetBlank ? '_blank' : '_self',
origin,

Some files were not shown because too many files have changed in this diff Show More