Compare commits

...

47 Commits

Author SHA1 Message Date
Torkel Ödegaard
d812109ebf Merge pull request #14485 from grafana/cp-v5.4.2
Cherry picks for v5.4.2
2018-12-13 13:49:23 +01:00
Torkel Ödegaard
f44a006cb9 updated version 2018-12-13 13:48:54 +01:00
Johannes Schill
43c0405ae1 Filter tags select box on text input #14437
(cherry picked from commit 6bb9415b0e)
2018-12-13 13:31:07 +01:00
Marcus Efraimsson
ed05e9de72 add oauth_auto_login setting to defaults file
Making the setting configurable thru environment variable

(cherry picked from commit b9e91cab0e)
2018-12-13 13:31:04 +01:00
Peter Holmberg
e43f13bc03 Check with lowercase
(cherry picked from commit 29bcdef104)
2018-12-13 13:30:58 +01:00
Torkel Ödegaard
308c818cd7 Merge pull request #14431 from grafana/v5.4.1-cherry-picks
v5.4.1 cherry picks
2018-12-10 15:08:32 +01:00
Marcus Efraimsson
d49d8bf13d fix time regions bugs
(cherry picked from commit 8f26fe0fbb)
2018-12-10 14:54:04 +01:00
Torkel Ödegaard
3701f22d66 bumped version number to v5.4.1 2018-12-10 14:44:52 +01:00
Torkel Ödegaard
4c60ef398a fixed issue with colorpicker position above window, fixes #14412
(cherry picked from commit 0c5fd21327)
2018-12-10 14:39:50 +01:00
Torkel Ödegaard
ec98666de1 fixed issue with singlestat and repeated scopedVars, was only working for time series data sources, and only if there was any series, now scoped vars is always set, fixes #14367
(cherry picked from commit 976d25d6ae)
2018-12-10 14:39:08 +01:00
Torkel Ödegaard
7fe10e2eef fix search tag issues, fixes #14391
(cherry picked from commit e05f6c5397)
2018-12-10 14:38:37 +01:00
Scott Glajch
4a8cd4c023 Add the AWS/SES Cloudwatch metrics of BounceRate and ComplaintRate. Pull request #14399
(cherry picked from commit 62a5cd27ba)
2018-12-10 14:38:16 +01:00
moznion
174be1abab Put issue number to test code
(cherry picked from commit 4397ee61d0)
2018-12-10 14:37:52 +01:00
moznion
4c13e02aef Fix bug what updating user quota doesn't work
Reason is same as 061e06c226

(cherry picked from commit d1e1cde00e)
2018-12-10 14:37:36 +01:00
moznion
4a8a3d40e7 Fix bug what updating org quota doesn't work
3c330c8e4c/pkg/services/sqlstore/quota.go (L106)

In the real use case, `has` that is described by the above code is always `false` because it includes `Updated` in a query.

So this commit fixes this issue.

(cherry picked from commit 061e06c226)
2018-12-10 14:37:26 +01:00
Johannes Schill
01c4b71cfb If user login equals user email, only show the email once #14341
(cherry picked from commit 02b14d33a6)
2018-12-10 14:35:42 +01:00
Johannes Schill
e7cd39a543 UserPicker and TeamPicker should use min-width instead of fixed widths to avoid overflowing form buttons. #14341
(cherry picked from commit 114a264da4)
2018-12-10 14:35:31 +01:00
Torkel Ödegaard
6f241a4bac fix for panel-initialized event not being called
(cherry picked from commit 757cada4a5)
2018-12-10 14:34:46 +01:00
Brian Gann
0a19581c48 redact value for plugin proxy routes
(cherry picked from commit 02365514f9)
2018-12-10 14:34:25 +01:00
Torkel Ödegaard
96cb4df83a fix for panel embedding. Solo panel height was not correctly set. Made panel--solo into panel-solo class. in develop branch we have remove the need for the panel class
(cherry picked from commit e31490ac68)
2018-12-10 14:34:07 +01:00
Tom Nitti
7820775a53 added support for influxdb cumulative_sum function in tsdb
(cherry picked from commit 1e53c12921)
2018-12-10 14:33:41 +01:00
Dominik Henneke
9699133501 Use buildTableConstraint instead of buildSchemaConstraint to find the datatype of a column if using a table from a different database schema
(cherry picked from commit b450b778cb)
2018-12-10 14:32:55 +01:00
flopp999
80ecd8ea8e fixedUnit for Flow:l/min and mL/min
got stranged result with decimalSIPrefix
(cherry picked from commit 8caeb13026)
2018-12-10 14:32:31 +01:00
Marcus Efraimsson
2ab2259091 fix time regions using zero hours
(cherry picked from commit ad33cd5c5c)
2018-12-10 14:32:08 +01:00
Kornelijus Survila
deb305b95f dataproxy: Override incoming Authorization header
(cherry picked from commit 0cafd9a663)
2018-12-10 14:31:28 +01:00
Leonard Gram
d42c17efad build: update latest when pushing docker.
(cherry picked from commit 9a771555f3)
2018-12-04 11:17:50 +01:00
Leonard Gram
972aaef2a6 build: always test publisher.
(cherry picked from commit cb0d58c6f1)
2018-12-03 15:18:10 +01:00
Leonard Gram
ce3982d406 build: packages linked to dl.grafana.com.
(cherry picked from commit ff0730ca1a)
2018-12-03 15:18:10 +01:00
Marcus Efraimsson
69c5191926 Merge pull request #14212 from grafana/cp-5.4.0
Cherry picks for v5.4.0
2018-12-03 02:17:42 -08:00
Marcus Efraimsson
99ee3bbe5a release v5.4.0 2018-12-03 10:53:20 +01:00
Marcus Efraimsson
01840cbd70 let each sql datasource handle timeFrom and timeTo macros
(cherry picked from commit 624e5e5b3d)
2018-11-30 16:27:36 +01:00
Matthew Coltman
23b19543bd Add AWS/CodeBuild namespace for CloudWatch datasource
(cherry picked from commit bbd0ec3a8b)
2018-11-30 16:27:07 +01:00
Ryan McKinley
bb4e5934fb check for null with toLocalString (#14208)
(cherry picked from commit b3e6da0cbd)
2018-11-28 10:23:43 +01:00
Torkel Ödegaard
fd3821d2f1 Fix elastic ng-inject (build issue) (#14195)
fix elastic ng-inject issue in query editor

(cherry picked from commit 2faf8c722f)
2018-11-28 10:23:22 +01:00
Leonard Gram
8b1d0b14b6 docker: Upgrades base packages in the images.
Related to #14182

(cherry picked from commit bccce9922a)
2018-11-28 10:22:39 +01:00
Marcus Efraimsson
23c6bea21b return actual error if failing to update alert data
(cherry picked from commit 36aec52c08)
2018-11-28 10:21:26 +01:00
Marcus Efraimsson
e3abefa19f fix handle of elasticsearch 6.0+ version
(cherry picked from commit a022284cb0)
2018-11-28 10:20:41 +01:00
flopp999
4ee92bd59c Fix abbreviations of Litre/min and milliLitre/min (#14114)
(cherry picked from commit 91d97ab5b5)
2018-11-28 10:20:04 +01:00
Marcus Efraimsson
780e5153d0 cloudwatch: handle invalid time ranges
(cherry picked from commit 3534762f49)
2018-11-28 10:00:23 +01:00
Marcus Efraimsson
be9058d7ef cloudwatch: recover/handle panics when executing queries
(cherry picked from commit 879aed7d06)
2018-11-28 10:00:16 +01:00
bergquist
3301f96811 updates time range options for alert queries
add some new options for the to value.
removes '1s' option for from since thats unreasonable low

closes #12134

(cherry picked from commit 14688766ef)
2018-11-28 09:59:52 +01:00
bergquist
1c59669da0 format: remove </input> and align tabs
(cherry picked from commit 60c291c8dc)
2018-11-28 09:59:45 +01:00
Torkel Ödegaard
1ad60be47b fixed failing graph tests
(cherry picked from commit 0731b8635b)
2018-11-28 09:59:10 +01:00
Torkel Ödegaard
9ec0af73ec fixed issue with new legend not checking if panel.legend.show
(cherry picked from commit cab92f88af)
2018-11-28 09:59:00 +01:00
Leonard Gram
8190d10827 build: docker build for ge.
(cherry picked from commit 6c267cb592)
2018-11-21 09:28:15 +01:00
Marcus Efraimsson
18b5f630f7 update release publish script links
(cherry picked from commit e421c387ea)
2018-11-20 16:24:59 +01:00
Marcus Efraimsson
9df26af3db release v5.4.0-beta1 2018-11-20 14:08:27 +01:00
63 changed files with 446 additions and 176 deletions

View File

@@ -162,8 +162,8 @@ jobs:
name: Build Grafana.com master publisher name: Build Grafana.com master publisher
command: 'go build -o scripts/publish scripts/build/publish.go' command: 'go build -o scripts/publish scripts/build/publish.go'
- run: - run:
name: Build Grafana.com release publisher name: Test and build Grafana.com release publisher
command: 'cd scripts/build/release_publisher && go build -o release_publisher .' command: 'cd scripts/build/release_publisher && go test . && go build -o release_publisher .'
- persist_to_workspace: - persist_to_workspace:
root: . root: .
paths: paths:
@@ -191,6 +191,9 @@ jobs:
- run: - run:
name: sha-sum packages name: sha-sum packages
command: 'go run build.go sha-dist' command: 'go run build.go sha-dist'
- run:
name: Test Grafana.com release publisher
command: 'cd scripts/build/release_publisher && go test .'
- persist_to_workspace: - persist_to_workspace:
root: . root: .
paths: paths:
@@ -510,6 +513,7 @@ workflows:
- grafana-docker-release: - grafana-docker-release:
requires: requires:
- build-all - build-all
- build-all-enterprise
- test-backend - test-backend
- test-frontend - test-frontend
- codespell - codespell

View File

@@ -50,7 +50,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
WORKDIR $GF_PATHS_HOME WORKDIR $GF_PATHS_HOME
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \ RUN apt-get update && apt-get upgrade -y && \
apt-get install -qq -y libfontconfig ca-certificates && \
apt-get autoremove -y && \ apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*

View File

@@ -246,6 +246,10 @@ disable_signout_menu = false
# URL to redirect the user to after sign out # URL to redirect the user to after sign out
signout_redirect_url = signout_redirect_url =
# Set to true to attempt login with OAuth automatically, skipping the login screen.
# This setting is ignored if multiple OAuth providers are configured.
oauth_auto_login = false
#################################### Anonymous Auth ###################### #################################### Anonymous Auth ######################
[auth.anonymous] [auth.anonymous]
# enable anonymous access # enable anonymous access

View File

@@ -223,6 +223,10 @@ log_queries =
# URL to redirect the user to after sign out # URL to redirect the user to after sign out
;signout_redirect_url = ;signout_redirect_url =
# Set to true to attempt login with OAuth automatically, skipping the login screen.
# This setting is ignored if multiple OAuth providers are configured.
;oauth_auto_login = false
#################################### Anonymous Auth ########################## #################################### Anonymous Auth ##########################
[auth.anonymous] [auth.anonymous]
# enable anonymous access # enable anonymous access

View File

@@ -133,9 +133,9 @@ Macro example | Description
------------ | ------------- ------------ | -------------
*$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec* *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec* *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'* *$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN FROM_UNIXTIME(1494410783) AND FROM_UNIXTIME(1494410983)*
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'* *$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'* *$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494410983)*
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),* *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value. *$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points. *$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.

View File

@@ -4,7 +4,7 @@
"company": "Grafana Labs" "company": "Grafana Labs"
}, },
"name": "grafana", "name": "grafana",
"version": "5.4.0-pre1", "version": "5.4.2",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "http://github.com/grafana/grafana.git" "url": "http://github.com/grafana/grafana.git"

View File

@@ -25,7 +25,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
WORKDIR $GF_PATHS_HOME WORKDIR $GF_PATHS_HOME
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates curl && \ RUN apt-get update && apt-get -y upgrade && \
apt-get install -qq -y libfontconfig ca-certificates curl && \
apt-get autoremove -y && \ apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*

View File

@@ -1,12 +1,25 @@
#!/bin/sh #!/bin/sh
set -e set -e
_grafana_tag=$1 _raw_grafana_tag=$1
_docker_repo=${2:-grafana/grafana-enterprise} _docker_repo=${2:-grafana/grafana-enterprise}
if echo "$_raw_grafana_tag" | grep -q "^v"; then
_grafana_tag=$(echo "${_raw_grafana_tag}" | cut -d "v" -f 2)
else
_grafana_tag="${_raw_grafana_tag}"
fi
echo "Building and deploying ${_docker_repo}:${_grafana_tag}"
docker build \ docker build \
--tag "${_docker_repo}:${_grafana_tag}"\ --tag "${_docker_repo}:${_grafana_tag}"\
--no-cache=true \ --no-cache=true \
. .
docker push "${_docker_repo}:${_grafana_tag}" docker push "${_docker_repo}:${_grafana_tag}"
if echo "$_raw_grafana_tag" | grep -q "^v" && echo "$_raw_grafana_tag" | grep -qv "beta"; then
docker tag "${_docker_repo}:${_grafana_tag}" "${_docker_repo}:latest"
docker push "${_docker_repo}:latest"
fi

View File

@@ -277,10 +277,6 @@ func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response {
return Error(500, "Failed to save dashboard", err) return Error(500, "Failed to save dashboard", err)
} }
if err == m.ErrDashboardFailedToUpdateAlertData {
return Error(500, "Invalid alert data. Cannot save dashboard", err)
}
c.TimeRequest(metrics.M_Api_Dashboard_Save) c.TimeRequest(metrics.M_Api_Dashboard_Save)
return JSON(200, util.DynMap{ return JSON(200, util.DynMap{
"status": "success", "status": "success",

View File

@@ -727,7 +727,6 @@ func TestDashboardApiEndpoint(t *testing.T) {
{SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400}, {SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400},
{SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400}, {SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400},
{SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422}, {SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422},
{SaveError: m.ErrDashboardFailedToUpdateAlertData, ExpectedStatusCode: 500},
{SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500}, {SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500},
{SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400}, {SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400},
{SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400}, {SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400},

View File

@@ -51,7 +51,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
if token, err := tokenProvider.getAccessToken(data); err != nil { if token, err := tokenProvider.getAccessToken(data); err != nil {
logger.Error("Failed to get access token", "error", err) logger.Error("Failed to get access token", "error", err)
} else { } else {
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
} }
} }
@@ -60,7 +60,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil { if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
logger.Error("Failed to get access token", "error", err) logger.Error("Failed to get access token", "error", err)
} else { } else {
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
} }
} }
@@ -73,7 +73,7 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
if err != nil { if err != nil {
logger.Error("Failed to get default access token from meta data server", "error", err) logger.Error("Failed to get default access token from meta data server", "error", err)
} else { } else {
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken)) req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
} }
} }
} }

View File

@@ -87,7 +87,7 @@ func NewApiPluginProxy(ctx *m.ReqContext, proxyPath string, route *plugins.AppPl
} }
for key, value := range headers { for key, value := range headers {
log.Trace("setting key %v value %v", key, value[0]) log.Trace("setting key %v value <redacted>", key)
req.Header.Set(key, value[0]) req.Header.Set(key, value[0])
} }
} }

View File

@@ -21,7 +21,6 @@ var (
ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else") ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else")
ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty") ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty")
ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder") ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder")
ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data")
ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists") ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists")
ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id") ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id")
ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder") ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder")

View File

@@ -165,7 +165,7 @@ func (dr *dashboardServiceImpl) updateAlerting(cmd *models.SaveDashboardCommand,
} }
if err := bus.Dispatch(&alertCmd); err != nil { if err := bus.Dispatch(&alertCmd); err != nil {
return models.ErrDashboardFailedToUpdateAlertData return err
} }
return nil return nil

View File

@@ -99,14 +99,14 @@ func UpdateOrgQuota(cmd *m.UpdateOrgQuotaCmd) error {
return inTransaction(func(sess *DBSession) error { return inTransaction(func(sess *DBSession) error {
//Check if quota is already defined in the DB //Check if quota is already defined in the DB
quota := m.Quota{ quota := m.Quota{
Target: cmd.Target, Target: cmd.Target,
OrgId: cmd.OrgId, OrgId: cmd.OrgId,
Updated: time.Now(),
} }
has, err := sess.Get(&quota) has, err := sess.Get(&quota)
if err != nil { if err != nil {
return err return err
} }
quota.Updated = time.Now()
quota.Limit = cmd.Limit quota.Limit = cmd.Limit
if !has { if !has {
quota.Created = time.Now() quota.Created = time.Now()
@@ -201,14 +201,14 @@ func UpdateUserQuota(cmd *m.UpdateUserQuotaCmd) error {
return inTransaction(func(sess *DBSession) error { return inTransaction(func(sess *DBSession) error {
//Check if quota is already defined in the DB //Check if quota is already defined in the DB
quota := m.Quota{ quota := m.Quota{
Target: cmd.Target, Target: cmd.Target,
UserId: cmd.UserId, UserId: cmd.UserId,
Updated: time.Now(),
} }
has, err := sess.Get(&quota) has, err := sess.Get(&quota)
if err != nil { if err != nil {
return err return err
} }
quota.Updated = time.Now()
quota.Limit = cmd.Limit quota.Limit = cmd.Limit
if !has { if !has {
quota.Created = time.Now() quota.Created = time.Now()

View File

@@ -2,6 +2,7 @@ package sqlstore
import ( import (
"testing" "testing"
"time"
m "github.com/grafana/grafana/pkg/models" m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
@@ -168,5 +169,69 @@ func TestQuotaCommandsAndQueries(t *testing.T) {
So(query.Result.Limit, ShouldEqual, 5) So(query.Result.Limit, ShouldEqual, 5)
So(query.Result.Used, ShouldEqual, 1) So(query.Result.Used, ShouldEqual, 1)
}) })
// related: https://github.com/grafana/grafana/issues/14342
Convey("Should org quota updating is successful even if it called multiple time", func() {
orgCmd := m.UpdateOrgQuotaCmd{
OrgId: orgId,
Target: "org_user",
Limit: 5,
}
err := UpdateOrgQuota(&orgCmd)
So(err, ShouldBeNil)
query := m.GetOrgQuotaByTargetQuery{OrgId: orgId, Target: "org_user", Default: 1}
err = GetOrgQuotaByTarget(&query)
So(err, ShouldBeNil)
So(query.Result.Limit, ShouldEqual, 5)
// XXX: resolution of `Updated` column is 1sec, so this makes delay
time.Sleep(1 * time.Second)
orgCmd = m.UpdateOrgQuotaCmd{
OrgId: orgId,
Target: "org_user",
Limit: 10,
}
err = UpdateOrgQuota(&orgCmd)
So(err, ShouldBeNil)
query = m.GetOrgQuotaByTargetQuery{OrgId: orgId, Target: "org_user", Default: 1}
err = GetOrgQuotaByTarget(&query)
So(err, ShouldBeNil)
So(query.Result.Limit, ShouldEqual, 10)
})
// related: https://github.com/grafana/grafana/issues/14342
Convey("Should user quota updating is successful even if it called multiple time", func() {
userQuotaCmd := m.UpdateUserQuotaCmd{
UserId: userId,
Target: "org_user",
Limit: 5,
}
err := UpdateUserQuota(&userQuotaCmd)
So(err, ShouldBeNil)
query := m.GetUserQuotaByTargetQuery{UserId: userId, Target: "org_user", Default: 1}
err = GetUserQuotaByTarget(&query)
So(err, ShouldBeNil)
So(query.Result.Limit, ShouldEqual, 5)
// XXX: resolution of `Updated` column is 1sec, so this makes delay
time.Sleep(1 * time.Second)
userQuotaCmd = m.UpdateUserQuotaCmd{
UserId: userId,
Target: "org_user",
Limit: 10,
}
err = UpdateUserQuota(&userQuotaCmd)
So(err, ShouldBeNil)
query = m.GetUserQuotaByTargetQuery{UserId: userId, Target: "org_user", Default: 1}
err = GetUserQuotaByTarget(&query)
So(err, ShouldBeNil)
So(query.Result.Limit, ShouldEqual, 10)
})
}) })
} }

View File

@@ -126,6 +126,18 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
} }
eg.Go(func() error { eg.Go(func() error {
defer func() {
if err := recover(); err != nil {
plog.Error("Execute Query Panic", "error", err, "stack", log.Stack(1))
if theErr, ok := err.(error); ok {
resultChan <- &tsdb.QueryResult{
RefId: query.RefId,
Error: theErr,
}
}
}
}()
queryRes, err := e.executeQuery(ectx, query, queryContext) queryRes, err := e.executeQuery(ectx, query, queryContext)
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
return err return err
@@ -146,6 +158,17 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
for region, getMetricDataQuery := range getMetricDataQueries { for region, getMetricDataQuery := range getMetricDataQueries {
q := getMetricDataQuery q := getMetricDataQuery
eg.Go(func() error { eg.Go(func() error {
defer func() {
if err := recover(); err != nil {
plog.Error("Execute Get Metric Data Query Panic", "error", err, "stack", log.Stack(1))
if theErr, ok := err.(error); ok {
resultChan <- &tsdb.QueryResult{
Error: theErr,
}
}
}
}()
queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext)
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
return err return err
@@ -188,8 +211,8 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatch
return nil, err return nil, err
} }
if endTime.Before(startTime) { if !startTime.Before(endTime) {
return nil, fmt.Errorf("Invalid time range: End time can't be before start time") return nil, fmt.Errorf("Invalid time range: Start time must be before end time")
} }
params := &cloudwatch.GetMetricStatisticsInput{ params := &cloudwatch.GetMetricStatisticsInput{

View File

@@ -1,9 +1,13 @@
package cloudwatch package cloudwatch
import ( import (
"context"
"testing" "testing"
"time" "time"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/null"
@@ -14,6 +18,24 @@ import (
func TestCloudWatch(t *testing.T) { func TestCloudWatch(t *testing.T) {
Convey("CloudWatch", t, func() { Convey("CloudWatch", t, func() {
Convey("executeQuery", func() {
e := &CloudWatchExecutor{
DataSource: &models.DataSource{
JsonData: simplejson.New(),
},
}
Convey("End time before start time should result in error", func() {
_, err := e.executeQuery(context.Background(), &CloudWatchQuery{}, &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("now-1h", "now-2h")})
So(err.Error(), ShouldEqual, "Invalid time range: Start time must be before end time")
})
Convey("End time equals start time should result in error", func() {
_, err := e.executeQuery(context.Background(), &CloudWatchQuery{}, &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("now-1h", "now-1h")})
So(err.Error(), ShouldEqual, "Invalid time range: Start time must be before end time")
})
})
Convey("can parse cloudwatch json model", func() { Convey("can parse cloudwatch json model", func() {
json := ` json := `
{ {

View File

@@ -47,6 +47,7 @@ func init() {
"AWS/CloudFront": {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"}, "AWS/CloudFront": {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
"AWS/CloudSearch": {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"}, "AWS/CloudSearch": {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
"AWS/CloudHSM": {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"}, "AWS/CloudHSM": {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"},
"AWS/CodeBuild": {"BuildDuration", "Builds", "DownloadSourceDuration", "Duration", "FailedBuilds", "FinalizingDuration", "InstallDuration", "PostBuildDuration", "PreBuildDuration", "ProvisioningDuration", "QueuedDuration", "SubmittedDuration", "SucceededBuilds", "UploadArtifactsDuration"},
"AWS/Connect": {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"}, "AWS/Connect": {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
"AWS/DMS": {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"}, "AWS/DMS": {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
"AWS/DX": {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"}, "AWS/DX": {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
@@ -100,7 +101,7 @@ func init() {
"AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"},
"AWS/Route53": {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"}, "AWS/Route53": {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"},
"AWS/S3": {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"}, "AWS/S3": {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"},
"AWS/SES": {"Bounce", "Complaint", "Delivery", "Reject", "Send"}, "AWS/SES": {"Bounce", "Complaint", "Delivery", "Reject", "Send", "Reputation.BounceRate", "Reputation.ComplaintRate"},
"AWS/SNS": {"NumberOfMessagesPublished", "PublishSize", "NumberOfNotificationsDelivered", "NumberOfNotificationsFailed"}, "AWS/SNS": {"NumberOfMessagesPublished", "PublishSize", "NumberOfNotificationsDelivered", "NumberOfNotificationsFailed"},
"AWS/SQS": {"NumberOfMessagesSent", "SentMessageSize", "NumberOfMessagesReceived", "NumberOfEmptyReceives", "NumberOfMessagesDeleted", "ApproximateAgeOfOldestMessage", "ApproximateNumberOfMessagesDelayed", "ApproximateNumberOfMessagesVisible", "ApproximateNumberOfMessagesNotVisible"}, "AWS/SQS": {"NumberOfMessagesSent", "SentMessageSize", "NumberOfMessagesReceived", "NumberOfEmptyReceives", "NumberOfMessagesDeleted", "ApproximateAgeOfOldestMessage", "ApproximateNumberOfMessagesDelayed", "ApproximateNumberOfMessagesVisible", "ApproximateNumberOfMessagesNotVisible"},
"AWS/States": {"ExecutionTime", "ExecutionThrottled", "ExecutionsAborted", "ExecutionsFailed", "ExecutionsStarted", "ExecutionsSucceeded", "ExecutionsTimedOut", "ActivityRunTime", "ActivityScheduleTime", "ActivityTime", "ActivitiesFailed", "ActivitiesHeartbeatTimedOut", "ActivitiesScheduled", "ActivitiesScheduled", "ActivitiesSucceeded", "ActivitiesTimedOut", "LambdaFunctionRunTime", "LambdaFunctionScheduleTime", "LambdaFunctionTime", "LambdaFunctionsFailed", "LambdaFunctionsHeartbeatTimedOut", "LambdaFunctionsScheduled", "LambdaFunctionsStarted", "LambdaFunctionsSucceeded", "LambdaFunctionsTimedOut"}, "AWS/States": {"ExecutionTime", "ExecutionThrottled", "ExecutionsAborted", "ExecutionsFailed", "ExecutionsStarted", "ExecutionsSucceeded", "ExecutionsTimedOut", "ActivityRunTime", "ActivityScheduleTime", "ActivityTime", "ActivitiesFailed", "ActivitiesHeartbeatTimedOut", "ActivitiesScheduled", "ActivitiesScheduled", "ActivitiesSucceeded", "ActivitiesTimedOut", "LambdaFunctionRunTime", "LambdaFunctionScheduleTime", "LambdaFunctionTime", "LambdaFunctionsFailed", "LambdaFunctionsHeartbeatTimedOut", "LambdaFunctionsScheduled", "LambdaFunctionsStarted", "LambdaFunctionsSucceeded", "LambdaFunctionsTimedOut"},
@@ -123,6 +124,7 @@ func init() {
"AWS/CloudFront": {"DistributionId", "Region"}, "AWS/CloudFront": {"DistributionId", "Region"},
"AWS/CloudSearch": {}, "AWS/CloudSearch": {},
"AWS/CloudHSM": {"Region", "ClusterId", "HsmId"}, "AWS/CloudHSM": {"Region", "ClusterId", "HsmId"},
"AWS/CodeBuild": {"ProjectName"},
"AWS/Connect": {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"}, "AWS/Connect": {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
"AWS/DMS": {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"}, "AWS/DMS": {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
"AWS/DX": {"ConnectionId"}, "AWS/DX": {"ConnectionId"},

View File

@@ -65,7 +65,7 @@ var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb
clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", ")) clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", "))
switch version { switch version {
case 2, 5, 56: case 2, 5, 56, 60:
return &baseClientImpl{ return &baseClientImpl{
ctx: ctx, ctx: ctx,
ds: ds, ds: ds,

View File

@@ -90,6 +90,19 @@ func TestClient(t *testing.T) {
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(c.GetVersion(), ShouldEqual, 56) So(c.GetVersion(), ShouldEqual, 56)
}) })
Convey("When version 60 should return v6.0 client", func() {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 60,
"timeField": "@timestamp",
}),
}
c, err := NewClient(context.Background(), ds, nil)
So(err, ShouldBeNil)
So(c.GetVersion(), ShouldEqual, 60)
})
}) })
Convey("Given a fake http client", func() { Convey("Given a fake http client", func() {
@@ -153,8 +166,6 @@ func TestClient(t *testing.T) {
jBody, err := simplejson.NewJson(bodyBytes) jBody, err := simplejson.NewJson(bodyBytes)
So(err, ShouldBeNil) So(err, ShouldBeNil)
fmt.Println("body", string(headerBytes))
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15") So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true) So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
So(jHeader.Get("search_type").MustString(), ShouldEqual, "count") So(jHeader.Get("search_type").MustString(), ShouldEqual, "count")
@@ -209,8 +220,6 @@ func TestClient(t *testing.T) {
jBody, err := simplejson.NewJson(bodyBytes) jBody, err := simplejson.NewJson(bodyBytes)
So(err, ShouldBeNil) So(err, ShouldBeNil)
fmt.Println("body", string(headerBytes))
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15") So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true) So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch") So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
@@ -265,8 +274,6 @@ func TestClient(t *testing.T) {
jBody, err := simplejson.NewJson(bodyBytes) jBody, err := simplejson.NewJson(bodyBytes)
So(err, ShouldBeNil) So(err, ShouldBeNil)
fmt.Println("body", string(headerBytes))
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15") So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true) So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch") So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")

View File

@@ -32,6 +32,7 @@ func init() {
renders["median"] = QueryDefinition{Renderer: functionRenderer} renders["median"] = QueryDefinition{Renderer: functionRenderer}
renders["sum"] = QueryDefinition{Renderer: functionRenderer} renders["sum"] = QueryDefinition{Renderer: functionRenderer}
renders["mode"] = QueryDefinition{Renderer: functionRenderer} renders["mode"] = QueryDefinition{Renderer: functionRenderer}
renders["cumulative_sum"] = QueryDefinition{Renderer: functionRenderer}
renders["holt_winters"] = QueryDefinition{ renders["holt_winters"] = QueryDefinition{
Renderer: functionRenderer, Renderer: functionRenderer,

View File

@@ -23,6 +23,7 @@ func TestInfluxdbQueryPart(t *testing.T) {
{mode: "alias", params: []string{"test"}, input: "mean(value)", expected: `mean(value) AS "test"`}, {mode: "alias", params: []string{"test"}, input: "mean(value)", expected: `mean(value) AS "test"`},
{mode: "count", params: []string{}, input: "distinct(value)", expected: `count(distinct(value))`}, {mode: "count", params: []string{}, input: "distinct(value)", expected: `count(distinct(value))`},
{mode: "mode", params: []string{}, input: "value", expected: `mode(value)`}, {mode: "mode", params: []string{}, input: "value", expected: `mode(value)`},
{mode: "cumulative_sum", params: []string{}, input: "mean(value)", expected: `cumulative_sum(mean(value))`},
} }
queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")} queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")}

View File

@@ -66,6 +66,10 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
} }
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeFrom":
return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
case "__timeTo":
return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup": case "__timeGroup":
if len(args) < 2 { if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name) return "", fmt.Errorf("macro %v needs time column and interval", name)

View File

@@ -52,6 +52,20 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
}) })
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
})
Convey("interpolate __timeGroup function", func() { Convey("interpolate __timeGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil) So(err, ShouldBeNil)

View File

@@ -61,6 +61,10 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
} }
return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil
case "__timeFrom":
return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetFromAsSecondsEpoch()), nil
case "__timeTo":
return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetToAsSecondsEpoch()), nil
case "__timeGroup": case "__timeGroup":
if len(args) < 2 { if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name) return "", fmt.Errorf("macro %v needs time column and interval", name)

View File

@@ -63,6 +63,20 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
}) })
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
})
Convey("interpolate __unixEpochFilter function", func() { Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil) So(err, ShouldBeNil)

View File

@@ -761,7 +761,7 @@ func TestMySQL(t *testing.T) {
{ {
DataSource: &models.DataSource{JsonData: simplejson.New()}, DataSource: &models.DataSource{JsonData: simplejson.New()},
Model: simplejson.NewFromAny(map[string]interface{}{ Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeTo() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
"format": "time_series", "format": "time_series",
}), }),
RefId: "A", RefId: "A",
@@ -773,7 +773,7 @@ func TestMySQL(t *testing.T) {
So(err, ShouldBeNil) So(err, ShouldBeNil)
queryResult := resp.Results["A"] queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil) So(queryResult.Error, ShouldBeNil)
So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > FROM_UNIXTIME(1521118500) OR time < FROM_UNIXTIME(1521118800) OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
}) })

View File

@@ -87,6 +87,10 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
} }
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeFrom":
return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
case "__timeTo":
return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup": case "__timeGroup":
if len(args) < 2 { if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)

View File

@@ -44,6 +44,20 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
}) })
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
})
Convey("interpolate __timeGroup function pre 5.3 compatibility", func() { Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value") sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")

View File

@@ -196,8 +196,6 @@ var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string,
sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1) sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
sql = strings.Replace(sql, "$__interval", interval.Text, -1) sql = strings.Replace(sql, "$__interval", interval.Text, -1)
sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1)
sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1)
sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1) sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1) sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)

View File

@@ -44,20 +44,6 @@ func TestSqlEngine(t *testing.T) {
So(sql, ShouldEqual, "select 60000 ") So(sql, ShouldEqual, "select 60000 ")
}) })
Convey("interpolate __timeFrom function", func() {
sql, err := Interpolate(query, timeRange, "select $__timeFrom()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := Interpolate(query, timeRange, "select $__timeTo()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFrom function", func() { Convey("interpolate __unixEpochFrom function", func() {
sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()") sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
So(err, ShouldBeNil) So(err, ShouldBeNil)

View File

@@ -16,7 +16,7 @@ export function registerAngularDirectives() {
react2AngularDirective('searchResult', SearchResult, []); react2AngularDirective('searchResult', SearchResult, []);
react2AngularDirective('tagFilter', TagFilter, [ react2AngularDirective('tagFilter', TagFilter, [
'tags', 'tags',
['onSelect', { watchDepth: 'reference' }], ['onChange', { watchDepth: 'reference' }],
['tagOptions', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }],
]); ]);
} }

View File

@@ -84,7 +84,7 @@ class AddPermissions extends Component<Props, NewDashboardAclItem> {
render() { render() {
const { onCancel } = this.props; const { onCancel } = this.props;
const newItem = this.state; const newItem = this.state;
const pickerClassName = 'width-20'; const pickerClassName = 'min-width-20';
const isValid = this.isValid(); const isValid = this.isValid();
return ( return (
<div className="gf-form-inline cta-form"> <div className="gf-form-inline cta-form">

View File

@@ -40,7 +40,7 @@ export class UserPicker extends Component<Props, State> {
.then(result => { .then(result => {
return result.map(user => ({ return result.map(user => ({
id: user.userId, id: user.userId,
label: `${user.login} - ${user.email}`, label: user.login === user.email ? user.login : `${user.login} - ${user.email}`,
avatarUrl: user.avatarUrl, avatarUrl: user.avatarUrl,
login: user.login, login: user.login,
})); }));

View File

@@ -10,7 +10,7 @@ import ResetStyles from 'app/core/components/Picker/ResetStyles';
export interface Props { export interface Props {
tags: string[]; tags: string[];
tagOptions: () => any; tagOptions: () => any;
onSelect: (tag: string) => void; onChange: (tags: string[]) => void;
} }
export class TagFilter extends React.Component<Props, any> { export class TagFilter extends React.Component<Props, any> {
@@ -18,12 +18,9 @@ export class TagFilter extends React.Component<Props, any> {
constructor(props) { constructor(props) {
super(props); super(props);
this.searchTags = this.searchTags.bind(this);
this.onChange = this.onChange.bind(this);
} }
searchTags(query) { onLoadOptions = query => {
return this.props.tagOptions().then(options => { return this.props.tagOptions().then(options => {
return options.map(option => ({ return options.map(option => ({
value: option.term, value: option.term,
@@ -31,18 +28,20 @@ export class TagFilter extends React.Component<Props, any> {
count: option.count, count: option.count,
})); }));
}); });
} };
onChange(newTags) { onChange = (newTags: any[]) => {
this.props.onSelect(newTags); this.props.onChange(newTags.map(tag => tag.value));
} };
render() { render() {
const tags = this.props.tags.map(tag => ({ value: tag, label: tag, count: 0 }));
const selectOptions = { const selectOptions = {
classNamePrefix: 'gf-form-select-box', classNamePrefix: 'gf-form-select-box',
isMulti: true, isMulti: true,
defaultOptions: true, defaultOptions: true,
loadOptions: this.searchTags, loadOptions: this.onLoadOptions,
onChange: this.onChange, onChange: this.onChange,
className: 'gf-form-input gf-form-input--form-dropdown', className: 'gf-form-input gf-form-input--form-dropdown',
placeholder: 'Tags', placeholder: 'Tags',
@@ -50,8 +49,12 @@ export class TagFilter extends React.Component<Props, any> {
noOptionsMessage: () => 'No tags found', noOptionsMessage: () => 'No tags found',
getOptionValue: i => i.value, getOptionValue: i => i.value,
getOptionLabel: i => i.label, getOptionLabel: i => i.label,
value: this.props.tags, value: tags,
styles: ResetStyles, styles: ResetStyles,
filterOption: (option, searchQuery) => {
const regex = RegExp(searchQuery, 'i');
return regex.test(option.value);
},
components: { components: {
Option: TagOption, Option: TagOption,
IndicatorsContainer, IndicatorsContainer,

View File

@@ -44,7 +44,7 @@ export class SeriesColorPicker extends React.Component<SeriesColorPickerProps> {
const drop = new Drop({ const drop = new Drop({
target: this.pickerElem, target: this.pickerElem,
content: dropContentElem, content: dropContentElem,
position: 'top center', position: 'bottom center',
classes: 'drop-popover', classes: 'drop-popover',
openOn: 'hover', openOn: 'hover',
hoverCloseDelay: 200, hoverCloseDelay: 200,

View File

@@ -41,7 +41,7 @@
</a> </a>
</div> </div>
<tag-filter tags="ctrl.query.tag" tagOptions="ctrl.getTags" onSelect="ctrl.onTagSelect"> <tag-filter tags="ctrl.query.tag" tagOptions="ctrl.getTags" onChange="ctrl.onTagFiltersChanged">
</tag-filter> </tag-filter>
</div> </div>

View File

@@ -25,8 +25,6 @@ export class SearchCtrl {
appEvents.on('hide-dash-search', this.closeSearch.bind(this), $scope); appEvents.on('hide-dash-search', this.closeSearch.bind(this), $scope);
this.initialFolderFilterTitle = 'All'; this.initialFolderFilterTitle = 'All';
this.getTags = this.getTags.bind(this);
this.onTagSelect = this.onTagSelect.bind(this);
this.isEditor = contextSrv.isEditor; this.isEditor = contextSrv.isEditor;
this.hasEditPermissionInFolders = contextSrv.hasEditPermissionInFolders; this.hasEditPermissionInFolders = contextSrv.hasEditPermissionInFolders;
} }
@@ -162,7 +160,7 @@ export class SearchCtrl {
const localSearchId = this.currentSearchId; const localSearchId = this.currentSearchId;
const query = { const query = {
...this.query, ...this.query,
tag: this.query.tag.map(i => i.value), tag: this.query.tag,
}; };
return this.searchSrv.search(query).then(results => { return this.searchSrv.search(query).then(results => {
@@ -195,14 +193,14 @@ export class SearchCtrl {
evt.preventDefault(); evt.preventDefault();
} }
getTags() { getTags = () => {
return this.searchSrv.getDashboardTags(); return this.searchSrv.getDashboardTags();
} };
onTagSelect(newTags) { onTagFiltersChanged = (tags: string[]) => {
this.query.tag = newTags; this.query.tag = tags;
this.search(); this.search();
} };
clearSearchFilter() { clearSearchFilter() {
this.query.tag = []; this.query.tag = [];

View File

@@ -428,10 +428,16 @@ kbn.valueFormats.hex0x = (value, decimals) => {
}; };
kbn.valueFormats.sci = (value, decimals) => { kbn.valueFormats.sci = (value, decimals) => {
if (value == null) {
return '';
}
return value.toExponential(decimals); return value.toExponential(decimals);
}; };
kbn.valueFormats.locale = (value, decimals) => { kbn.valueFormats.locale = (value, decimals) => {
if (value == null) {
return '';
}
return value.toLocaleString(undefined, { maximumFractionDigits: decimals }); return value.toLocaleString(undefined, { maximumFractionDigits: decimals });
}; };
@@ -584,8 +590,8 @@ kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms');
kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs'); kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm'); kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h'); kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('L'); kbn.valueFormats.flowlpm = kbn.formatBuilders.fixedUnit('l/min');
kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('L', -1); kbn.valueFormats.flowmlpm = kbn.formatBuilders.fixedUnit('mL/min');
// Angle // Angle
kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°'); kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');

View File

@@ -64,9 +64,9 @@
</div> </div>
<div class="gf-form"> <div class="gf-form">
<metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model> <metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model>
<input class="gf-form-input max-width-9" type="number" step="any" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()"></input> <input class="gf-form-input max-width-9" type="number" step="any" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()">
<label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label> <label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
<input class="gf-form-input max-width-9" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()"></input> <input class="gf-form-input max-width-9" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()">
</div> </div>
<div class="gf-form"> <div class="gf-form">
<label class="gf-form-label"> <label class="gf-form-label">

View File

@@ -8,9 +8,9 @@ const alertQueryDef = new QueryPartDef({
{ {
name: 'from', name: 'from',
type: 'string', type: 'string',
options: ['1s', '10s', '1m', '5m', '10m', '15m', '1h', '24h', '48h'], options: ['10s', '1m', '5m', '10m', '15m', '1h', '24h', '48h'],
}, },
{ name: 'to', type: 'string', options: ['now'] }, { name: 'to', type: 'string', options: ['now', 'now-1m', 'now-5m', 'now-10m', 'now-1h'] },
], ],
defaultParams: ['#A', '15m', 'now', 'avg'], defaultParams: ['#A', '15m', 'now', 'avg'],
}); });

View File

@@ -223,6 +223,8 @@ export class DashboardModel {
} }
panelInitialized(panel: PanelModel) { panelInitialized(panel: PanelModel) {
panel.initialized();
if (!this.otherPanelInFullscreen(panel)) { if (!this.otherPanelInFullscreen(panel)) {
panel.refresh(); panel.refresh();
} }

View File

@@ -132,7 +132,7 @@ export class PanelModel {
} }
} }
panelInitialized() { initialized() {
this.events.emit('panel-initialized'); this.events.emit('panel-initialized');
} }

View File

@@ -148,7 +148,7 @@ export function loadDataSourceTypes(): ThunkResult<void> {
export function nameExits(dataSources, name) { export function nameExits(dataSources, name) {
return ( return (
dataSources.filter(dataSource => { dataSources.filter(dataSource => {
return dataSource.name === name; return dataSource.name.toLowerCase() === name.toLowerCase();
}).length > 0 }).length > 0
); );
} }

View File

@@ -1,5 +1,4 @@
<div class="panel panel--solo" ng-if="panel" style="width: 100%"> <div class="panel-solo" ng-if="panel">
<plugin-component type="panel"> <plugin-component type="panel">
</plugin-component> </plugin-component>
</div> </div>
<div class="clearfix"></div>

View File

@@ -115,7 +115,7 @@ export class TeamMembers extends PureComponent<Props, State> {
</button> </button>
<h5>Add Team Member</h5> <h5>Add Team Member</h5>
<div className="gf-form-inline"> <div className="gf-form-inline">
<UserPicker onSelected={this.onUserSelected} className="width-30" /> <UserPicker onSelected={this.onUserSelected} className="min-width-30" />
{this.state.newTeamMember && ( {this.state.newTeamMember && (
<button className="btn btn-success gf-form-btn" type="submit" onClick={this.onAddUserToTeam}> <button className="btn btn-success gf-form-btn" type="submit" onClick={this.onAddUserToTeam}>
Add to team Add to team

View File

@@ -58,7 +58,7 @@ exports[`Render should render component 1`] = `
className="gf-form-inline" className="gf-form-inline"
> >
<UserPicker <UserPicker
className="width-30" className="min-width-30"
onSelected={[Function]} onSelected={[Function]}
/> />
</div> </div>
@@ -152,7 +152,7 @@ exports[`Render should render team members 1`] = `
className="gf-form-inline" className="gf-form-inline"
> >
<UserPicker <UserPicker
className="width-30" className="min-width-30"
onSelected={[Function]} onSelected={[Function]}
/> />
</div> </div>
@@ -372,7 +372,7 @@ exports[`Render should render team members when sync enabled 1`] = `
className="gf-form-inline" className="gf-form-inline"
> >
<UserPicker <UserPicker
className="width-30" className="min-width-30"
onSelected={[Function]} onSelected={[Function]}
/> />
</div> </div>

View File

@@ -2,22 +2,8 @@ import coreModule from 'app/core/core_module';
import _ from 'lodash'; import _ from 'lodash';
import * as queryDef from './query_def'; import * as queryDef from './query_def';
export function elasticBucketAgg() {
return {
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html',
controller: 'ElasticBucketAggCtrl',
restrict: 'E',
scope: {
target: '=',
index: '=',
onChange: '&',
getFields: '&',
},
};
}
export class ElasticBucketAggCtrl { export class ElasticBucketAggCtrl {
/** @nginject */ /** @ngInject */
constructor($scope, uiSegmentSrv, $q, $rootScope) { constructor($scope, uiSegmentSrv, $q, $rootScope) {
const bucketAggs = $scope.target.bucketAggs; const bucketAggs = $scope.target.bucketAggs;
@@ -226,5 +212,18 @@ export class ElasticBucketAggCtrl {
} }
} }
export function elasticBucketAgg() {
return {
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html',
controller: ElasticBucketAggCtrl,
restrict: 'E',
scope: {
target: '=',
index: '=',
onChange: '&',
getFields: '&',
},
};
}
coreModule.directive('elasticBucketAgg', elasticBucketAgg); coreModule.directive('elasticBucketAgg', elasticBucketAgg);
coreModule.controller('ElasticBucketAggCtrl', ElasticBucketAggCtrl);

View File

@@ -2,22 +2,8 @@ import coreModule from 'app/core/core_module';
import _ from 'lodash'; import _ from 'lodash';
import * as queryDef from './query_def'; import * as queryDef from './query_def';
export function elasticMetricAgg() {
return {
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/metric_agg.html',
controller: 'ElasticMetricAggCtrl',
restrict: 'E',
scope: {
target: '=',
index: '=',
onChange: '&',
getFields: '&',
esVersion: '=',
},
};
}
export class ElasticMetricAggCtrl { export class ElasticMetricAggCtrl {
/** @ngInject */
constructor($scope, uiSegmentSrv, $q, $rootScope) { constructor($scope, uiSegmentSrv, $q, $rootScope) {
const metricAggs = $scope.target.metrics; const metricAggs = $scope.target.metrics;
$scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion); $scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
@@ -209,5 +195,19 @@ export class ElasticMetricAggCtrl {
} }
} }
export function elasticMetricAgg() {
return {
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/metric_agg.html',
controller: ElasticMetricAggCtrl,
restrict: 'E',
scope: {
target: '=',
index: '=',
onChange: '&',
getFields: '&',
esVersion: '=',
},
};
}
coreModule.directive('elasticMetricAgg', elasticMetricAgg); coreModule.directive('elasticMetricAgg', elasticMetricAgg);
coreModule.controller('ElasticMetricAggCtrl', ElasticMetricAggCtrl);

View File

@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
Macros: Macros:
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec) - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec) - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z' - $__timeFilter(column) -&gt; column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
- $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877 - $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
Or build your own conditionals using these macros which just return the values: Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; '2017-04-21T05:01:17Z' - $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
- $__timeTo() -&gt; '2017-04-21T05:01:17Z' - $__timeTo() -&gt; FROM_UNIXTIME(1492750877)
- $__unixEpochFrom() -&gt; 1492750877 - $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877 - $__unixEpochTo() -&gt; 1492750877
</pre> </pre>

View File

@@ -151,7 +151,7 @@ Table:
Macros: Macros:
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec - $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec - $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z' - $__timeFilter(column) -&gt; column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
- $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877 - $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
- $__timeGroup(column,'5m'[, fillvalue]) -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed) - $__timeGroup(column,'5m'[, fillvalue]) -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
by setting fillvalue grafana will fill in missing values according to the interval by setting fillvalue grafana will fill in missing values according to the interval
@@ -169,8 +169,8 @@ GROUP BY 1
ORDER BY 1 ORDER BY 1
Or build your own conditionals using these macros which just return the values: Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; '2017-04-21T05:01:17Z' - $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
- $__timeTo() -&gt; '2017-04-21T05:01:17Z' - $__timeTo() -&gt; FROM_UNIXTIME(1492750877)
- $__unixEpochFrom() -&gt; 1492750877 - $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877 - $__unixEpochTo() -&gt; 1492750877
</pre> </pre>

View File

@@ -151,8 +151,7 @@ table_schema IN (
buildDatatypeQuery(column: string) { buildDatatypeQuery(column: string) {
let query = 'SELECT udt_name FROM information_schema.columns WHERE '; let query = 'SELECT udt_name FROM information_schema.columns WHERE ';
query += this.buildSchemaConstraint(); query += this.buildTableConstraint(this.target.table);
query += ' AND table_name = ' + this.quoteIdentAsLiteral(this.target.table);
query += ' AND column_name = ' + this.quoteIdentAsLiteral(column); query += ' AND column_name = ' + this.quoteIdentAsLiteral(column);
return query; return query;
} }

View File

@@ -58,15 +58,7 @@ class GraphElement {
// panel events // panel events
this.ctrl.events.on('panel-teardown', this.onPanelTeardown.bind(this)); this.ctrl.events.on('panel-teardown', this.onPanelTeardown.bind(this));
/**
* Split graph rendering into two parts.
* First, calculate series stats in buildFlotPairs() function. Then legend rendering started
* (see ctrl.events.on('render') in legend.ts).
* When legend is rendered it emits 'legend-rendering-complete' and graph rendered.
*/
this.ctrl.events.on('render', this.onRender.bind(this)); this.ctrl.events.on('render', this.onRender.bind(this));
this.ctrl.events.on('legend-rendering-complete', this.onLegendRenderingComplete.bind(this));
// global events // global events
appEvents.on('graph-hover', this.onGraphHover.bind(this), scope); appEvents.on('graph-hover', this.onGraphHover.bind(this), scope);
@@ -85,11 +77,20 @@ class GraphElement {
if (!this.data) { if (!this.data) {
return; return;
} }
this.annotations = this.ctrl.annotations || []; this.annotations = this.ctrl.annotations || [];
this.buildFlotPairs(this.data); this.buildFlotPairs(this.data);
const graphHeight = this.elem.height(); const graphHeight = this.elem.height();
updateLegendValues(this.data, this.panel, graphHeight); updateLegendValues(this.data, this.panel, graphHeight);
if (!this.panel.legend.show) {
if (this.legendElem.hasChildNodes()) {
ReactDOM.unmountComponentAtNode(this.legendElem);
}
this.renderPanel();
return;
}
const { values, min, max, avg, current, total } = this.panel.legend; const { values, min, max, avg, current, total } = this.panel.legend;
const { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero } = this.panel.legend; const { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero } = this.panel.legend;
const legendOptions = { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero }; const legendOptions = { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero };
@@ -104,12 +105,9 @@ class GraphElement {
onColorChange: this.ctrl.onColorChange, onColorChange: this.ctrl.onColorChange,
onToggleAxis: this.ctrl.onToggleAxis, onToggleAxis: this.ctrl.onToggleAxis,
}; };
const legendReactElem = React.createElement(Legend, legendProps);
ReactDOM.render(legendReactElem, this.legendElem, () => this.onLegendRenderingComplete());
}
onLegendRenderingComplete() { const legendReactElem = React.createElement(Legend, legendProps);
this.render_panel(); ReactDOM.render(legendReactElem, this.legendElem, () => this.renderPanel());
} }
onGraphHover(evt) { onGraphHover(evt) {
@@ -281,7 +279,7 @@ class GraphElement {
} }
// Function for rendering panel // Function for rendering panel
render_panel() { renderPanel() {
this.panelWidth = this.elem.width(); this.panelWidth = this.elem.width();
if (this.shouldAbortRender()) { if (this.shouldAbortRender()) {
return; return;

View File

@@ -125,7 +125,7 @@ describe('grafanaGraph', () => {
//Emulate functions called by event listeners //Emulate functions called by event listeners
link.buildFlotPairs(link.data); link.buildFlotPairs(link.data);
link.render_panel(); link.renderPanel();
ctx.plotData = ctrl.plot.mock.calls[0][1]; ctx.plotData = ctrl.plot.mock.calls[0][1];
ctx.plotOptions = ctrl.plot.mock.calls[0][2]; ctx.plotOptions = ctrl.plot.mock.calls[0][2];

View File

@@ -130,6 +130,33 @@ describe('TimeRegionManager', () => {
}); });
}); });
plotOptionsScenario('for time from/to region', ctx => {
const regions = [{ from: '00:00', to: '05:00', fill: true, colorMode: 'red' }];
const from = moment('2018-12-01T00:00+01:00');
const to = moment('2018-12-03T23:59+01:00');
ctx.setup(regions, from, to);
it('should add 3 markings', () => {
expect(ctx.options.grid.markings.length).toBe(3);
});
it('should add one fill between 00:00 and 05:00 each day', () => {
const markings = ctx.options.grid.markings;
expect(moment(markings[0].xaxis.from).format()).toBe(moment('2018-12-01T01:00:00+01:00').format());
expect(moment(markings[0].xaxis.to).format()).toBe(moment('2018-12-01T06:00:00+01:00').format());
expect(markings[0].color).toBe(colorModes.red.color.fill);
expect(moment(markings[1].xaxis.from).format()).toBe(moment('2018-12-02T01:00:00+01:00').format());
expect(moment(markings[1].xaxis.to).format()).toBe(moment('2018-12-02T06:00:00+01:00').format());
expect(markings[1].color).toBe(colorModes.red.color.fill);
expect(moment(markings[2].xaxis.from).format()).toBe(moment('2018-12-03T01:00:00+01:00').format());
expect(moment(markings[2].xaxis.to).format()).toBe(moment('2018-12-03T06:00:00+01:00').format());
expect(markings[2].color).toBe(colorModes.red.color.fill);
});
});
plotOptionsScenario('for day of week from/to region', ctx => { plotOptionsScenario('for day of week from/to region', ctx => {
const regions = [{ fromDayOfWeek: 7, toDayOfWeek: 7, fill: true, colorMode: 'red' }]; const regions = [{ fromDayOfWeek: 7, toDayOfWeek: 7, fill: true, colorMode: 'red' }];
const from = moment('2018-01-01T18:45:05+01:00'); const from = moment('2018-01-01T18:45:05+01:00');
@@ -211,6 +238,42 @@ describe('TimeRegionManager', () => {
}); });
}); });
plotOptionsScenario('for day of week from/to time region', ctx => {
const regions = [{ fromDayOfWeek: 7, from: '23:00', toDayOfWeek: 1, to: '01:40', fill: true, colorMode: 'red' }];
const from = moment('2018-12-07T12:51:19+01:00');
const to = moment('2018-12-10T13:51:29+01:00');
ctx.setup(regions, from, to);
it('should add 1 marking', () => {
expect(ctx.options.grid.markings.length).toBe(1);
});
it('should add one fill between sunday 23:00 and monday 01:40', () => {
const markings = ctx.options.grid.markings;
expect(moment(markings[0].xaxis.from).format()).toBe(moment('2018-12-10T00:00:00+01:00').format());
expect(moment(markings[0].xaxis.to).format()).toBe(moment('2018-12-10T02:40:00+01:00').format());
});
});
plotOptionsScenario('for day of week from/to time region', ctx => {
const regions = [{ fromDayOfWeek: 6, from: '03:00', toDayOfWeek: 7, to: '02:00', fill: true, colorMode: 'red' }];
const from = moment('2018-12-07T12:51:19+01:00');
const to = moment('2018-12-10T13:51:29+01:00');
ctx.setup(regions, from, to);
it('should add 1 marking', () => {
expect(ctx.options.grid.markings.length).toBe(1);
});
it('should add one fill between saturday 03:00 and sunday 02:00', () => {
const markings = ctx.options.grid.markings;
expect(moment(markings[0].xaxis.from).format()).toBe(moment('2018-12-08T04:00:00+01:00').format());
expect(moment(markings[0].xaxis.to).format()).toBe(moment('2018-12-09T03:00:00+01:00').format());
});
});
plotOptionsScenario('for day of week from/to time region with daylight saving time', ctx => { plotOptionsScenario('for day of week from/to time region with daylight saving time', ctx => {
const regions = [{ fromDayOfWeek: 7, from: '20:00', toDayOfWeek: 7, to: '23:00', fill: true, colorMode: 'red' }]; const regions = [{ fromDayOfWeek: 7, from: '20:00', toDayOfWeek: 7, to: '23:00', fill: true, colorMode: 'red' }];
const from = moment('2018-03-17T06:00:00+01:00'); const from = moment('2018-03-17T06:00:00+01:00');

View File

@@ -87,6 +87,14 @@ export class TimeRegionManager {
continue; continue;
} }
if (timeRegion.from && !timeRegion.to) {
timeRegion.to = timeRegion.from;
}
if (!timeRegion.from && timeRegion.to) {
timeRegion.from = timeRegion.to;
}
hRange = { hRange = {
from: this.parseTimeRange(timeRegion.from), from: this.parseTimeRange(timeRegion.from),
to: this.parseTimeRange(timeRegion.to), to: this.parseTimeRange(timeRegion.to),
@@ -108,21 +116,13 @@ export class TimeRegionManager {
hRange.to.dayOfWeek = Number(timeRegion.toDayOfWeek); hRange.to.dayOfWeek = Number(timeRegion.toDayOfWeek);
} }
if (!hRange.from.h && hRange.to.h) { if (hRange.from.dayOfWeek && hRange.from.h === null && hRange.from.m === null) {
hRange.from = hRange.to;
}
if (hRange.from.h && !hRange.to.h) {
hRange.to = hRange.from;
}
if (hRange.from.dayOfWeek && !hRange.from.h && !hRange.from.m) {
hRange.from.h = 0; hRange.from.h = 0;
hRange.from.m = 0; hRange.from.m = 0;
hRange.from.s = 0; hRange.from.s = 0;
} }
if (hRange.to.dayOfWeek && !hRange.to.h && !hRange.to.m) { if (hRange.to.dayOfWeek && hRange.to.h === null && hRange.to.m === null) {
hRange.to.h = 23; hRange.to.h = 23;
hRange.to.m = 59; hRange.to.m = 59;
hRange.to.s = 59; hRange.to.s = 59;
@@ -169,8 +169,16 @@ export class TimeRegionManager {
fromEnd.add(hRange.to.h - hRange.from.h, 'hours'); fromEnd.add(hRange.to.h - hRange.from.h, 'hours');
} else if (hRange.from.h + hRange.to.h < 23) { } else if (hRange.from.h + hRange.to.h < 23) {
fromEnd.add(hRange.to.h, 'hours'); fromEnd.add(hRange.to.h, 'hours');
while (fromEnd.hour() !== hRange.to.h) {
fromEnd.add(-1, 'hours');
}
} else { } else {
fromEnd.add(24 - hRange.from.h, 'hours'); fromEnd.add(24 - hRange.from.h, 'hours');
while (fromEnd.hour() !== hRange.to.h) {
fromEnd.add(1, 'hours');
}
} }
fromEnd.set('minute', hRange.to.m); fromEnd.set('minute', hRange.to.m);

View File

@@ -107,7 +107,10 @@ class SingleStatCtrl extends MetricsPanelCtrl {
} }
onDataReceived(dataList) { onDataReceived(dataList) {
const data: any = {}; const data: any = {
scopedVars: _.extend({}, this.panel.scopedVars),
};
if (dataList.length > 0 && dataList[0].type === 'table') { if (dataList.length > 0 && dataList[0].type === 'table') {
this.dataType = 'table'; this.dataType = 'table';
const tableData = dataList.map(this.tableHandler.bind(this)); const tableData = dataList.map(this.tableHandler.bind(this));
@@ -117,6 +120,7 @@ class SingleStatCtrl extends MetricsPanelCtrl {
this.series = dataList.map(this.seriesHandler.bind(this)); this.series = dataList.map(this.seriesHandler.bind(this));
this.setValues(data); this.setValues(data);
} }
this.data = data; this.data = data;
this.render(); this.render();
} }
@@ -320,7 +324,6 @@ class SingleStatCtrl extends MetricsPanelCtrl {
} }
// Add $__name variable for using in prefix or postfix // Add $__name variable for using in prefix or postfix
data.scopedVars = _.extend({}, this.panel.scopedVars);
data.scopedVars['__name'] = { value: this.series[0].label }; data.scopedVars['__name'] = { value: this.series[0].label };
} }
this.setValueMapping(data); this.setValueMapping(data);

View File

@@ -199,7 +199,6 @@ small,
mark, mark,
.mark { .mark {
padding: 0.2em;
background: $alert-warning-bg; background: $alert-warning-bg;
} }

View File

@@ -19,16 +19,23 @@ div.flot-text {
.panel { .panel {
height: 100%; height: 100%;
}
&--solo { .panel-solo {
position: fixed; position: fixed;
bottom: 0; bottom: 0;
right: 0; right: 0;
margin: 0; margin: 0;
.panel-container { left: 0;
border: none; top: 0;
z-index: $zindex-sidemenu + 1;
} .panel-container {
border: none;
}
.panel-menu-toggle,
.panel-menu {
display: none;
} }
} }

View File

@@ -19,6 +19,12 @@
} }
} }
@for $i from 1 through 30 {
.min-width-#{$i} {
min-width: ($spacer * $i) - $gf-form-margin !important;
}
}
@for $i from 1 through 30 { @for $i from 1 through 30 {
.offset-width-#{$i} { .offset-width-#{$i} {
margin-left: ($spacer * $i) !important; margin-left: ($spacer * $i) !important;

View File

@@ -6,8 +6,8 @@ EXTRA_OPTS="$@"
# Right now we hack this in into the publish script. # Right now we hack this in into the publish script.
# Eventually we might want to keep a list of all previous releases somewhere. # Eventually we might want to keep a list of all previous releases somewhere.
_releaseNoteUrl="https://community.grafana.com/t/release-notes-v5-3-x/10244" _releaseNoteUrl="https://community.grafana.com/t/release-notes-v5-4-x/12215"
_whatsNewUrl="http://docs.grafana.org/guides/whats-new-in-v5-3/" _whatsNewUrl="http://docs.grafana.org/guides/whats-new-in-v5-4/"
./scripts/build/release_publisher/release_publisher \ ./scripts/build/release_publisher/release_publisher \
--wn ${_whatsNewUrl} \ --wn ${_whatsNewUrl} \

View File

@@ -41,12 +41,12 @@ func main() {
var builder releaseBuilder var builder releaseBuilder
var product string var product string
archiveProviderRoot := "https://s3-us-west-2.amazonaws.com" archiveProviderRoot := "https://dl.grafana.com"
buildArtifacts := completeBuildArtifactConfigurations buildArtifacts := completeBuildArtifactConfigurations
if enterprise { if enterprise {
product = "grafana-enterprise" product = "grafana-enterprise"
baseUrl = createBaseUrl(archiveProviderRoot, "grafana-enterprise-releases", product, nightly) baseUrl = createBaseUrl(archiveProviderRoot, "enterprise", product, nightly)
var err error var err error
buildArtifacts, err = filterBuildArtifacts([]artifactFilter{ buildArtifacts, err = filterBuildArtifacts([]artifactFilter{
{os: "deb", arch: "amd64"}, {os: "deb", arch: "amd64"},
@@ -61,7 +61,7 @@ func main() {
} else { } else {
product = "grafana" product = "grafana"
baseUrl = createBaseUrl(archiveProviderRoot, "grafana-releases", product, nightly) baseUrl = createBaseUrl(archiveProviderRoot, "oss", product, nightly)
} }
if fromLocal { if fromLocal {