mirror of
https://github.com/grafana/grafana.git
synced 2025-12-21 20:24:41 +08:00
Compare commits
27 Commits
sriram/pos
...
v5.1.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
844bdc53a2 | ||
|
|
ebffcc21cf | ||
|
|
61de54be5a | ||
|
|
08963b1414 | ||
|
|
70f4797a03 | ||
|
|
9df8c4fe86 | ||
|
|
9149a0c655 | ||
|
|
cb83ec8945 | ||
|
|
cadca93d93 | ||
|
|
fa1c1274db | ||
|
|
b58dc6cd49 | ||
|
|
66938e80c9 | ||
|
|
ac22f85d37 | ||
|
|
3147bcccdd | ||
|
|
cb8d436ea3 | ||
|
|
8e7147a7da | ||
|
|
f72c4bc0e0 | ||
|
|
06d52adf4e | ||
|
|
790fd99676 | ||
|
|
3908571baf | ||
|
|
0e8f05e6d5 | ||
|
|
111839bdcc | ||
|
|
a46d0204d9 | ||
|
|
3e93fd1372 | ||
|
|
a07f525686 | ||
|
|
6a2d9e21dc | ||
|
|
9dfc53d5a4 |
@@ -117,7 +117,7 @@ jobs:
|
|||||||
- image: circleci/python:2.7-stretch
|
- image: circleci/python:2.7-stretch
|
||||||
steps:
|
steps:
|
||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: dist
|
at: .
|
||||||
- run:
|
- run:
|
||||||
name: install awscli
|
name: install awscli
|
||||||
command: 'sudo pip install awscli'
|
command: 'sudo pip install awscli'
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
"company": "Grafana Labs"
|
"company": "Grafana Labs"
|
||||||
},
|
},
|
||||||
"name": "grafana",
|
"name": "grafana",
|
||||||
"version": "5.1.0-pre1",
|
"version": "5.1.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "http://github.com/grafana/grafana.git"
|
"url": "http://github.com/grafana/grafana.git"
|
||||||
|
|||||||
@@ -69,6 +69,7 @@ type DashboardAclInfoDTO struct {
|
|||||||
Slug string `json:"slug"`
|
Slug string `json:"slug"`
|
||||||
IsFolder bool `json:"isFolder"`
|
IsFolder bool `json:"isFolder"`
|
||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
|
Inherited bool `json:"inherited"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dto *DashboardAclInfoDTO) hasSameRoleAs(other *DashboardAclInfoDTO) bool {
|
func (dto *DashboardAclInfoDTO) hasSameRoleAs(other *DashboardAclInfoDTO) bool {
|
||||||
|
|||||||
@@ -154,12 +154,7 @@ func (g *dashboardGuardianImpl) CheckPermissionBeforeUpdate(permission m.Permiss
|
|||||||
// validate overridden permissions to be higher
|
// validate overridden permissions to be higher
|
||||||
for _, a := range acl {
|
for _, a := range acl {
|
||||||
for _, existingPerm := range existingPermissions {
|
for _, existingPerm := range existingPermissions {
|
||||||
// handle default permissions
|
if !existingPerm.Inherited {
|
||||||
if existingPerm.DashboardId == -1 {
|
|
||||||
existingPerm.DashboardId = g.dashId
|
|
||||||
}
|
|
||||||
|
|
||||||
if a.DashboardId == existingPerm.DashboardId {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -187,13 +182,6 @@ func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, a := range query.Result {
|
|
||||||
// handle default permissions
|
|
||||||
if a.DashboardId == -1 {
|
|
||||||
a.DashboardId = g.dashId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
g.acl = query.Result
|
g.acl = query.Result
|
||||||
return g.acl, nil
|
return g.acl, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -217,13 +217,13 @@ func (sc *scenarioContext) parentFolderPermissionScenario(pt permissionType, per
|
|||||||
|
|
||||||
switch pt {
|
switch pt {
|
||||||
case USER:
|
case USER:
|
||||||
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, UserId: userID, Permission: permission}}
|
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, UserId: userID, Permission: permission, Inherited: true}}
|
||||||
case TEAM:
|
case TEAM:
|
||||||
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, TeamId: teamID, Permission: permission}}
|
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, TeamId: teamID, Permission: permission, Inherited: true}}
|
||||||
case EDITOR:
|
case EDITOR:
|
||||||
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, Role: &editorRole, Permission: permission}}
|
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, Role: &editorRole, Permission: permission, Inherited: true}}
|
||||||
case VIEWER:
|
case VIEWER:
|
||||||
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, Role: &viewerRole, Permission: permission}}
|
folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, Role: &viewerRole, Permission: permission, Inherited: true}}
|
||||||
}
|
}
|
||||||
|
|
||||||
permissionScenario(fmt.Sprintf("and parent folder has %s with permission to %s", pt.String(), permission.String()), childDashboardID, sc, folderPermissionList, func(sc *scenarioContext) {
|
permissionScenario(fmt.Sprintf("and parent folder has %s with permission to %s", pt.String(), permission.String()), childDashboardID, sc, folderPermissionList, func(sc *scenarioContext) {
|
||||||
|
|||||||
@@ -67,7 +67,8 @@ func GetDashboardAclInfoList(query *m.GetDashboardAclInfoListQuery) error {
|
|||||||
'' as title,
|
'' as title,
|
||||||
'' as slug,
|
'' as slug,
|
||||||
'' as uid,` +
|
'' as uid,` +
|
||||||
falseStr + ` AS is_folder
|
falseStr + ` AS is_folder,` +
|
||||||
|
falseStr + ` AS inherited
|
||||||
FROM dashboard_acl as da
|
FROM dashboard_acl as da
|
||||||
WHERE da.dashboard_id = -1`
|
WHERE da.dashboard_id = -1`
|
||||||
query.Result = make([]*m.DashboardAclInfoDTO, 0)
|
query.Result = make([]*m.DashboardAclInfoDTO, 0)
|
||||||
@@ -94,7 +95,8 @@ func GetDashboardAclInfoList(query *m.GetDashboardAclInfoListQuery) error {
|
|||||||
d.title,
|
d.title,
|
||||||
d.slug,
|
d.slug,
|
||||||
d.uid,
|
d.uid,
|
||||||
d.is_folder
|
d.is_folder,
|
||||||
|
CASE WHEN (da.dashboard_id = -1 AND d.folder_id > 0) OR da.dashboard_id = d.folder_id THEN ` + dialect.BooleanStr(true) + ` ELSE ` + falseStr + ` END AS inherited
|
||||||
FROM dashboard as d
|
FROM dashboard as d
|
||||||
LEFT JOIN dashboard folder on folder.id = d.folder_id
|
LEFT JOIN dashboard folder on folder.id = d.folder_id
|
||||||
LEFT JOIN dashboard_acl AS da ON
|
LEFT JOIN dashboard_acl AS da ON
|
||||||
|
|||||||
@@ -26,6 +26,22 @@ func TestDashboardAclDataAccess(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
Convey("Given dashboard folder with default permissions", func() {
|
Convey("Given dashboard folder with default permissions", func() {
|
||||||
|
Convey("When reading folder acl should include default acl", func() {
|
||||||
|
query := m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1}
|
||||||
|
|
||||||
|
err := GetDashboardAclInfoList(&query)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(len(query.Result), ShouldEqual, 2)
|
||||||
|
defaultPermissionsId := -1
|
||||||
|
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
|
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
||||||
|
So(query.Result[0].Inherited, ShouldBeFalse)
|
||||||
|
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
|
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
||||||
|
So(query.Result[1].Inherited, ShouldBeFalse)
|
||||||
|
})
|
||||||
|
|
||||||
Convey("When reading dashboard acl should include acl for parent folder", func() {
|
Convey("When reading dashboard acl should include acl for parent folder", func() {
|
||||||
query := m.GetDashboardAclInfoListQuery{DashboardId: childDash.Id, OrgId: 1}
|
query := m.GetDashboardAclInfoListQuery{DashboardId: childDash.Id, OrgId: 1}
|
||||||
|
|
||||||
@@ -36,8 +52,10 @@ func TestDashboardAclDataAccess(t *testing.T) {
|
|||||||
defaultPermissionsId := -1
|
defaultPermissionsId := -1
|
||||||
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
||||||
|
So(query.Result[0].Inherited, ShouldBeTrue)
|
||||||
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
||||||
|
So(query.Result[1].Inherited, ShouldBeTrue)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -94,7 +112,9 @@ func TestDashboardAclDataAccess(t *testing.T) {
|
|||||||
|
|
||||||
So(len(query.Result), ShouldEqual, 2)
|
So(len(query.Result), ShouldEqual, 2)
|
||||||
So(query.Result[0].DashboardId, ShouldEqual, savedFolder.Id)
|
So(query.Result[0].DashboardId, ShouldEqual, savedFolder.Id)
|
||||||
|
So(query.Result[0].Inherited, ShouldBeTrue)
|
||||||
So(query.Result[1].DashboardId, ShouldEqual, childDash.Id)
|
So(query.Result[1].DashboardId, ShouldEqual, childDash.Id)
|
||||||
|
So(query.Result[1].Inherited, ShouldBeFalse)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@@ -118,9 +138,12 @@ func TestDashboardAclDataAccess(t *testing.T) {
|
|||||||
So(len(query.Result), ShouldEqual, 3)
|
So(len(query.Result), ShouldEqual, 3)
|
||||||
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
||||||
|
So(query.Result[0].Inherited, ShouldBeTrue)
|
||||||
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
||||||
|
So(query.Result[1].Inherited, ShouldBeTrue)
|
||||||
So(query.Result[2].DashboardId, ShouldEqual, childDash.Id)
|
So(query.Result[2].DashboardId, ShouldEqual, childDash.Id)
|
||||||
|
So(query.Result[2].Inherited, ShouldBeFalse)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -209,8 +232,10 @@ func TestDashboardAclDataAccess(t *testing.T) {
|
|||||||
defaultPermissionsId := -1
|
defaultPermissionsId := -1
|
||||||
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER)
|
||||||
|
So(query.Result[0].Inherited, ShouldBeFalse)
|
||||||
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId)
|
||||||
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR)
|
||||||
|
So(query.Result[1].Inherited, ShouldBeFalse)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ func TestUserAuth(t *testing.T) {
|
|||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
_, err = x.Exec("DELETE FROM org WHERE 1=1")
|
_, err = x.Exec("DELETE FROM org WHERE 1=1")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
_, err = x.Exec("DELETE FROM user WHERE 1=1")
|
_, err = x.Exec("DELETE FROM " + dialect.Quote("user") + " WHERE 1=1")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
_, err = x.Exec("DELETE FROM user_auth WHERE 1=1")
|
_, err = x.Exec("DELETE FROM user_auth WHERE 1=1")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
@@ -117,7 +117,7 @@ func TestUserAuth(t *testing.T) {
|
|||||||
So(query.Result.Login, ShouldEqual, "loginuser1")
|
So(query.Result.Login, ShouldEqual, "loginuser1")
|
||||||
|
|
||||||
// remove user
|
// remove user
|
||||||
_, err = x.Exec("DELETE FROM user WHERE id=?", query.Result.Id)
|
_, err = x.Exec("DELETE FROM "+dialect.Quote("user")+" WHERE id=?", query.Result.Id)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
// get via user_auth for deleted user
|
// get via user_auth for deleted user
|
||||||
|
|||||||
@@ -256,16 +256,10 @@ func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
switch columnValue := values[i].(type) {
|
if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
|
||||||
case int64:
|
return err
|
||||||
value = null.FloatFrom(float64(columnValue))
|
|
||||||
case float64:
|
|
||||||
value = null.FloatFrom(columnValue)
|
|
||||||
case nil:
|
|
||||||
value.Valid = false
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if metricIndex == -1 {
|
if metricIndex == -1 {
|
||||||
metric = col
|
metric = col
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -374,12 +374,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
_, err = sess.InsertMulti(series)
|
_, err = sess.InsertMulti(series)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int64) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeInt64 as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeInt64 as time, timeInt64 FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -396,12 +396,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int64 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeInt64Nullable as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeInt64Nullable as time, timeInt64Nullable FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -418,12 +418,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float64) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeFloat64 as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeFloat64 as time, timeFloat64 FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -440,12 +440,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float64 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeFloat64Nullable as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeFloat64Nullable as time, timeFloat64Nullable FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -462,12 +462,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int32) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeInt32 as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeInt32 as time, timeInt32 FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -484,12 +484,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int32 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeInt32Nullable as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeInt32Nullable as time, timeInt32Nullable FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -506,12 +506,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float32) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeFloat32 as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeFloat32 as time, timeFloat32 FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -528,12 +528,12 @@ func TestMSSQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float32 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT TOP 1 timeFloat32Nullable as time, valueOne FROM metric_values ORDER BY time`,
|
"rawSql": `SELECT TOP 1 timeFloat32Nullable as time, timeFloat32Nullable FROM metric_values ORDER BY time`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
|
|||||||
@@ -265,16 +265,10 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
switch columnValue := values[i].(type) {
|
if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
|
||||||
case int64:
|
return err
|
||||||
value = null.FloatFrom(float64(columnValue))
|
|
||||||
case float64:
|
|
||||||
value = null.FloatFrom(columnValue)
|
|
||||||
case nil:
|
|
||||||
value.Valid = false
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if metricIndex == -1 {
|
if metricIndex == -1 {
|
||||||
metric = col
|
metric = col
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -420,12 +420,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int64) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeInt64 as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeInt64 as time, timeInt64 FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -442,12 +442,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int64 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeInt64Nullable as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeInt64Nullable as time, timeInt64Nullable FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -464,12 +464,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float64) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeFloat64 as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeFloat64 as time, timeFloat64 FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -486,12 +486,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float64 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeFloat64Nullable as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeFloat64Nullable as time, timeFloat64Nullable FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -508,12 +508,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int32) as time column should return metric with time in milliseconds", func() {
|
FocusConvey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeInt32 as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeInt32 as time, timeInt32 FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -530,12 +530,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int32 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeInt32Nullable as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeInt32Nullable as time, timeInt32Nullable FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -552,12 +552,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float32) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeFloat32 as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeFloat32 as time, timeFloat32 FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -574,12 +574,12 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float32 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT timeFloat32Nullable as time, valueOne FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT timeFloat32Nullable as time, timeFloat32Nullable FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
|
|||||||
@@ -245,16 +245,10 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
switch columnValue := values[i].(type) {
|
if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
|
||||||
case int64:
|
return err
|
||||||
value = null.FloatFrom(float64(columnValue))
|
|
||||||
case float64:
|
|
||||||
value = null.FloatFrom(columnValue)
|
|
||||||
case nil:
|
|
||||||
value.Valid = false
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if metricIndex == -1 {
|
if metricIndex == -1 {
|
||||||
metric = col
|
metric = col
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -353,12 +353,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
_, err = sess.InsertMulti(series)
|
_, err = sess.InsertMulti(series)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int64) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeInt64" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeInt64" as time, "timeInt64" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -375,12 +375,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int64 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeInt64Nullable" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeInt64Nullable" as time, "timeInt64Nullable" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -397,12 +397,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float64) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeFloat64" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeFloat64" as time, "timeFloat64" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -419,12 +419,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float64 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeFloat64Nullable" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeFloat64Nullable" as time, "timeFloat64Nullable" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -441,12 +441,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int32) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeInt32" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeInt32" as time, "timeInt32" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -463,12 +463,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (int32 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeInt32Nullable" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeInt32Nullable" as time, "timeInt32Nullable" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -485,12 +485,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6))
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float32) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeFloat32" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeFloat32" as time, "timeFloat32" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -507,12 +507,12 @@ func TestPostgres(t *testing.T) {
|
|||||||
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
|
So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("When doing a metric query using epoch (float32 nullable) as time column should return metric with time in milliseconds", func() {
|
Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
|
||||||
query := &tsdb.TsdbQuery{
|
query := &tsdb.TsdbQuery{
|
||||||
Queries: []*tsdb.Query{
|
Queries: []*tsdb.Query{
|
||||||
{
|
{
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT "timeFloat32Nullable" as time, "valueOne" FROM metric_values ORDER BY time LIMIT 1`,
|
"rawSql": `SELECT "timeFloat32Nullable" as time, "timeFloat32Nullable" FROM metric_values ORDER BY time LIMIT 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
|
|||||||
@@ -2,9 +2,12 @@ package tsdb
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/components/null"
|
||||||
|
|
||||||
"github.com/go-xorm/core"
|
"github.com/go-xorm/core"
|
||||||
"github.com/go-xorm/xorm"
|
"github.com/go-xorm/xorm"
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
@@ -185,3 +188,109 @@ func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ConvertSqlValueColumnToFloat converts timeseries value column to float.
|
||||||
|
func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (null.Float, error) {
|
||||||
|
var value null.Float
|
||||||
|
|
||||||
|
switch typedValue := columnValue.(type) {
|
||||||
|
case int:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *int:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case int64:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *int64:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case int32:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *int32:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case int16:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *int16:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case int8:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *int8:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case uint:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *uint:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case uint64:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *uint64:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case uint32:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *uint32:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case uint16:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *uint16:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case uint8:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *uint8:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case float64:
|
||||||
|
value = null.FloatFrom(typedValue)
|
||||||
|
case *float64:
|
||||||
|
value = null.FloatFromPtr(typedValue)
|
||||||
|
case float32:
|
||||||
|
value = null.FloatFrom(float64(typedValue))
|
||||||
|
case *float32:
|
||||||
|
if typedValue == nil {
|
||||||
|
value.Valid = false
|
||||||
|
} else {
|
||||||
|
value = null.FloatFrom(float64(*typedValue))
|
||||||
|
}
|
||||||
|
case nil:
|
||||||
|
value.Valid = false
|
||||||
|
default:
|
||||||
|
return null.NewFloat(0, false), fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", columnName, typedValue, typedValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
return value, nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
package tsdb
|
package tsdb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/components/null"
|
||||||
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -156,8 +157,6 @@ func TestSqlEngine(t *testing.T) {
|
|||||||
So(fixtures[1].(float64), ShouldEqual, tMilliseconds)
|
So(fixtures[1].(float64), ShouldEqual, tMilliseconds)
|
||||||
So(fixtures[2].(float64), ShouldEqual, tMilliseconds)
|
So(fixtures[2].(float64), ShouldEqual, tMilliseconds)
|
||||||
So(fixtures[3].(float64), ShouldEqual, tMilliseconds)
|
So(fixtures[3].(float64), ShouldEqual, tMilliseconds)
|
||||||
fmt.Println(fixtures[4].(float64))
|
|
||||||
fmt.Println(tMilliseconds)
|
|
||||||
So(fixtures[4].(float64), ShouldEqual, tMilliseconds)
|
So(fixtures[4].(float64), ShouldEqual, tMilliseconds)
|
||||||
So(fixtures[5].(float64), ShouldEqual, tMilliseconds)
|
So(fixtures[5].(float64), ShouldEqual, tMilliseconds)
|
||||||
So(fixtures[6], ShouldBeNil)
|
So(fixtures[6], ShouldBeNil)
|
||||||
@@ -183,5 +182,101 @@ func TestSqlEngine(t *testing.T) {
|
|||||||
So(fixtures[2], ShouldBeNil)
|
So(fixtures[2], ShouldBeNil)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("Given row with value columns", func() {
|
||||||
|
intValue := 1
|
||||||
|
int64Value := int64(1)
|
||||||
|
int32Value := int32(1)
|
||||||
|
int16Value := int16(1)
|
||||||
|
int8Value := int8(1)
|
||||||
|
float64Value := float64(1)
|
||||||
|
float32Value := float32(1)
|
||||||
|
uintValue := uint(1)
|
||||||
|
uint64Value := uint64(1)
|
||||||
|
uint32Value := uint32(1)
|
||||||
|
uint16Value := uint16(1)
|
||||||
|
uint8Value := uint8(1)
|
||||||
|
|
||||||
|
fixtures := make([]interface{}, 24)
|
||||||
|
fixtures[0] = intValue
|
||||||
|
fixtures[1] = &intValue
|
||||||
|
fixtures[2] = int64Value
|
||||||
|
fixtures[3] = &int64Value
|
||||||
|
fixtures[4] = int32Value
|
||||||
|
fixtures[5] = &int32Value
|
||||||
|
fixtures[6] = int16Value
|
||||||
|
fixtures[7] = &int16Value
|
||||||
|
fixtures[8] = int8Value
|
||||||
|
fixtures[9] = &int8Value
|
||||||
|
fixtures[10] = float64Value
|
||||||
|
fixtures[11] = &float64Value
|
||||||
|
fixtures[12] = float32Value
|
||||||
|
fixtures[13] = &float32Value
|
||||||
|
fixtures[14] = uintValue
|
||||||
|
fixtures[15] = &uintValue
|
||||||
|
fixtures[16] = uint64Value
|
||||||
|
fixtures[17] = &uint64Value
|
||||||
|
fixtures[18] = uint32Value
|
||||||
|
fixtures[19] = &uint32Value
|
||||||
|
fixtures[20] = uint16Value
|
||||||
|
fixtures[21] = &uint16Value
|
||||||
|
fixtures[22] = uint8Value
|
||||||
|
fixtures[23] = &uint8Value
|
||||||
|
|
||||||
|
var intNilPointer *int
|
||||||
|
var int64NilPointer *int64
|
||||||
|
var int32NilPointer *int32
|
||||||
|
var int16NilPointer *int16
|
||||||
|
var int8NilPointer *int8
|
||||||
|
var float64NilPointer *float64
|
||||||
|
var float32NilPointer *float32
|
||||||
|
var uintNilPointer *uint
|
||||||
|
var uint64NilPointer *uint64
|
||||||
|
var uint32NilPointer *uint32
|
||||||
|
var uint16NilPointer *uint16
|
||||||
|
var uint8NilPointer *uint8
|
||||||
|
|
||||||
|
nilPointerFixtures := make([]interface{}, 12)
|
||||||
|
nilPointerFixtures[0] = intNilPointer
|
||||||
|
nilPointerFixtures[1] = int64NilPointer
|
||||||
|
nilPointerFixtures[2] = int32NilPointer
|
||||||
|
nilPointerFixtures[3] = int16NilPointer
|
||||||
|
nilPointerFixtures[4] = int8NilPointer
|
||||||
|
nilPointerFixtures[5] = float64NilPointer
|
||||||
|
nilPointerFixtures[6] = float32NilPointer
|
||||||
|
nilPointerFixtures[7] = uintNilPointer
|
||||||
|
nilPointerFixtures[8] = uint64NilPointer
|
||||||
|
nilPointerFixtures[9] = uint32NilPointer
|
||||||
|
nilPointerFixtures[10] = uint16NilPointer
|
||||||
|
nilPointerFixtures[11] = uint8NilPointer
|
||||||
|
|
||||||
|
Convey("When converting values to float should return expected value", func() {
|
||||||
|
for _, f := range fixtures {
|
||||||
|
value, _ := ConvertSqlValueColumnToFloat("col", f)
|
||||||
|
|
||||||
|
if !value.Valid {
|
||||||
|
t.Fatalf("Failed to convert %T value, expected a valid float value", f)
|
||||||
|
}
|
||||||
|
|
||||||
|
if value.Float64 != null.FloatFrom(1).Float64 {
|
||||||
|
t.Fatalf("Failed to convert %T value, expected a float value of 1.000, but got %v", f, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("When converting nil pointer values to float should return expected value", func() {
|
||||||
|
for _, f := range nilPointerFixtures {
|
||||||
|
value, err := ConvertSqlValueColumnToFloat("col", f)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to convert %T value, expected a non nil error, but got %v", f, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if value.Valid {
|
||||||
|
t.Fatalf("Failed to convert %T value, expected an invalid float value", f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ export default observer(({ item, removeItem, permissionChanged, itemIndex, folde
|
|||||||
permissionChanged(itemIndex, permissionOption.value, permissionOption.label);
|
permissionChanged(itemIndex, permissionOption.value, permissionOption.label);
|
||||||
};
|
};
|
||||||
|
|
||||||
const inheritedFromRoot = item.dashboardId === -1 && folderInfo && folderInfo.id === 0;
|
const inheritedFromRoot = item.dashboardId === -1 && !item.inherited;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<tr className={setClassNameHelper(item.inherited)}>
|
<tr className={setClassNameHelper(item.inherited)}>
|
||||||
|
|||||||
@@ -7,7 +7,11 @@ export class Analytics {
|
|||||||
constructor(private $rootScope, private $location) {}
|
constructor(private $rootScope, private $location) {}
|
||||||
|
|
||||||
gaInit() {
|
gaInit() {
|
||||||
$.getScript('https://www.google-analytics.com/analytics.js'); // jQuery shortcut
|
$.ajax({
|
||||||
|
url: 'https://www.google-analytics.com/analytics.js',
|
||||||
|
dataType: 'script',
|
||||||
|
cache: true,
|
||||||
|
});
|
||||||
var ga = ((<any>window).ga =
|
var ga = ((<any>window).ga =
|
||||||
(<any>window).ga ||
|
(<any>window).ga ||
|
||||||
function() {
|
function() {
|
||||||
|
|||||||
@@ -170,7 +170,9 @@ export class BackendSrv {
|
|||||||
|
|
||||||
return this.$http(options)
|
return this.$http(options)
|
||||||
.then(response => {
|
.then(response => {
|
||||||
appEvents.emit('ds-request-response', response);
|
if (!options.silent) {
|
||||||
|
appEvents.emit('ds-request-response', response);
|
||||||
|
}
|
||||||
return response;
|
return response;
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
@@ -201,8 +203,9 @@ export class BackendSrv {
|
|||||||
if (err.data && !err.data.message && _.isString(err.data.error)) {
|
if (err.data && !err.data.message && _.isString(err.data.error)) {
|
||||||
err.data.message = err.data.error;
|
err.data.message = err.data.error;
|
||||||
}
|
}
|
||||||
|
if (!options.silent) {
|
||||||
appEvents.emit('ds-request-error', err);
|
appEvents.emit('ds-request-error', err);
|
||||||
|
}
|
||||||
throw err;
|
throw err;
|
||||||
})
|
})
|
||||||
.finally(() => {
|
.finally(() => {
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { PanelModel } from '../panel_model';
|
|||||||
import { PanelContainer } from './PanelContainer';
|
import { PanelContainer } from './PanelContainer';
|
||||||
import templateSrv from 'app/features/templating/template_srv';
|
import templateSrv from 'app/features/templating/template_srv';
|
||||||
import appEvents from 'app/core/app_events';
|
import appEvents from 'app/core/app_events';
|
||||||
import config from 'app/core/config';
|
|
||||||
|
|
||||||
export interface DashboardRowProps {
|
export interface DashboardRowProps {
|
||||||
panel: PanelModel;
|
panel: PanelModel;
|
||||||
@@ -95,7 +94,7 @@ export class DashboardRow extends React.Component<DashboardRowProps, any> {
|
|||||||
{title}
|
{title}
|
||||||
<span className="dashboard-row__panel_count">({hiddenPanels} hidden panels)</span>
|
<span className="dashboard-row__panel_count">({hiddenPanels} hidden panels)</span>
|
||||||
</a>
|
</a>
|
||||||
{config.bootData.user.orgRole !== 'Viewer' && (
|
{this.dashboard.meta.canEdit === true && (
|
||||||
<div className="dashboard-row__actions">
|
<div className="dashboard-row__actions">
|
||||||
<a className="pointer" onClick={this.openSettings}>
|
<a className="pointer" onClick={this.openSettings}>
|
||||||
<i className="fa fa-cog" />
|
<i className="fa fa-cog" />
|
||||||
|
|||||||
@@ -2,17 +2,15 @@ import React from 'react';
|
|||||||
import { shallow } from 'enzyme';
|
import { shallow } from 'enzyme';
|
||||||
import { DashboardRow } from '../dashgrid/DashboardRow';
|
import { DashboardRow } from '../dashgrid/DashboardRow';
|
||||||
import { PanelModel } from '../panel_model';
|
import { PanelModel } from '../panel_model';
|
||||||
import config from '../../../core/config';
|
|
||||||
|
|
||||||
describe('DashboardRow', () => {
|
describe('DashboardRow', () => {
|
||||||
let wrapper, panel, getPanelContainer, dashboardMock;
|
let wrapper, panel, getPanelContainer, dashboardMock;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
dashboardMock = { toggleRow: jest.fn() };
|
dashboardMock = {
|
||||||
|
toggleRow: jest.fn(),
|
||||||
config.bootData = {
|
meta: {
|
||||||
user: {
|
canEdit: true,
|
||||||
orgRole: 'Admin',
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -41,8 +39,8 @@ describe('DashboardRow', () => {
|
|||||||
expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(2);
|
expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(2);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have zero actions as viewer', () => {
|
it('should have zero actions when cannot edit', () => {
|
||||||
config.bootData.user.orgRole = 'Viewer';
|
dashboardMock.meta.canEdit = false;
|
||||||
panel = new PanelModel({ collapsed: false });
|
panel = new PanelModel({ collapsed: false });
|
||||||
wrapper = shallow(<DashboardRow panel={panel} getPanelContainer={getPanelContainer} />);
|
wrapper = shallow(<DashboardRow panel={panel} getPanelContainer={getPanelContainer} />);
|
||||||
expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(0);
|
expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(0);
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ export class VariableEditorCtrl {
|
|||||||
$scope.ctrl = {};
|
$scope.ctrl = {};
|
||||||
$scope.namePattern = /^(?!__).*$/;
|
$scope.namePattern = /^(?!__).*$/;
|
||||||
$scope._ = _;
|
$scope._ = _;
|
||||||
|
$scope.optionsLimit = 20;
|
||||||
|
|
||||||
$scope.refreshOptions = [
|
$scope.refreshOptions = [
|
||||||
{ value: 0, text: 'Never' },
|
{ value: 0, text: 'Never' },
|
||||||
@@ -96,6 +97,7 @@ export class VariableEditorCtrl {
|
|||||||
};
|
};
|
||||||
|
|
||||||
$scope.runQuery = function() {
|
$scope.runQuery = function() {
|
||||||
|
$scope.optionsLimit = 20;
|
||||||
return variableSrv.updateOptions($scope.current).catch(err => {
|
return variableSrv.updateOptions($scope.current).catch(err => {
|
||||||
if (err.data && err.data.message) {
|
if (err.data && err.data.message) {
|
||||||
err.message = err.data.message;
|
err.message = err.data.message;
|
||||||
@@ -165,6 +167,10 @@ export class VariableEditorCtrl {
|
|||||||
$scope.removeVariable = function(variable) {
|
$scope.removeVariable = function(variable) {
|
||||||
variableSrv.removeVariable(variable);
|
variableSrv.removeVariable(variable);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
$scope.showMoreOptions = function() {
|
||||||
|
$scope.optionsLimit += 20;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -280,11 +280,14 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="gf-form-group" ng-show="current.options.length">
|
<div class="gf-form-group" ng-show="current.options.length">
|
||||||
<h5>Preview of values (shows max 20)</h5>
|
<h5>Preview of values</h5>
|
||||||
<div class="gf-form-inline">
|
<div class="gf-form-inline">
|
||||||
<div class="gf-form" ng-repeat="option in current.options | limitTo: 20">
|
<div class="gf-form" ng-repeat="option in current.options | limitTo: optionsLimit">
|
||||||
<span class="gf-form-label">{{option.text}}</span>
|
<span class="gf-form-label">{{option.text}}</span>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="gf-form" ng-if= "current.options.length > optionsLimit">
|
||||||
|
<a class="gf-form-label btn-secondary" ng-click="showMoreOptions()">Show more</a>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import kbn from 'app/core/utils/kbn';
|
|||||||
import * as dateMath from 'app/core/utils/datemath';
|
import * as dateMath from 'app/core/utils/datemath';
|
||||||
import PrometheusMetricFindQuery from './metric_find_query';
|
import PrometheusMetricFindQuery from './metric_find_query';
|
||||||
import { ResultTransformer } from './result_transformer';
|
import { ResultTransformer } from './result_transformer';
|
||||||
|
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||||
|
|
||||||
export function prometheusRegularEscape(value) {
|
export function prometheusRegularEscape(value) {
|
||||||
return value.replace(/'/g, "\\\\'");
|
return value.replace(/'/g, "\\\\'");
|
||||||
@@ -29,7 +30,7 @@ export class PrometheusDatasource {
|
|||||||
resultTransformer: ResultTransformer;
|
resultTransformer: ResultTransformer;
|
||||||
|
|
||||||
/** @ngInject */
|
/** @ngInject */
|
||||||
constructor(instanceSettings, private $q, private backendSrv, private templateSrv, private timeSrv) {
|
constructor(instanceSettings, private $q, private backendSrv: BackendSrv, private templateSrv, private timeSrv) {
|
||||||
this.type = 'prometheus';
|
this.type = 'prometheus';
|
||||||
this.editorSrc = 'app/features/prometheus/partials/query.editor.html';
|
this.editorSrc = 'app/features/prometheus/partials/query.editor.html';
|
||||||
this.name = instanceSettings.name;
|
this.name = instanceSettings.name;
|
||||||
@@ -43,13 +44,13 @@ export class PrometheusDatasource {
|
|||||||
this.resultTransformer = new ResultTransformer(templateSrv);
|
this.resultTransformer = new ResultTransformer(templateSrv);
|
||||||
}
|
}
|
||||||
|
|
||||||
_request(method, url, data?, requestId?) {
|
_request(url, data?, options?: any) {
|
||||||
var options: any = {
|
var options: any = {
|
||||||
url: this.url + url,
|
url: this.url + url,
|
||||||
method: method,
|
method: this.httpMethod,
|
||||||
requestId: requestId,
|
...options,
|
||||||
};
|
};
|
||||||
if (method === 'GET') {
|
if (options.method === 'GET') {
|
||||||
if (!_.isEmpty(data)) {
|
if (!_.isEmpty(data)) {
|
||||||
options.url =
|
options.url =
|
||||||
options.url +
|
options.url +
|
||||||
@@ -81,6 +82,11 @@ export class PrometheusDatasource {
|
|||||||
return this.backendSrv.datasourceRequest(options);
|
return this.backendSrv.datasourceRequest(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use this for tab completion features, wont publish response to other components
|
||||||
|
metadataRequest(url) {
|
||||||
|
return this._request(url, null, { method: 'GET', silent: true });
|
||||||
|
}
|
||||||
|
|
||||||
interpolateQueryExpr(value, variable, defaultFormatFn) {
|
interpolateQueryExpr(value, variable, defaultFormatFn) {
|
||||||
// if no multi or include all do not regexEscape
|
// if no multi or include all do not regexEscape
|
||||||
if (!variable.multi && !variable.includeAll) {
|
if (!variable.multi && !variable.includeAll) {
|
||||||
@@ -206,7 +212,7 @@ export class PrometheusDatasource {
|
|||||||
end: end,
|
end: end,
|
||||||
step: query.step,
|
step: query.step,
|
||||||
};
|
};
|
||||||
return this._request(this.httpMethod, url, data, query.requestId);
|
return this._request(url, data, { requestId: query.requestId });
|
||||||
}
|
}
|
||||||
|
|
||||||
performInstantQuery(query, time) {
|
performInstantQuery(query, time) {
|
||||||
@@ -215,7 +221,7 @@ export class PrometheusDatasource {
|
|||||||
query: query.expr,
|
query: query.expr,
|
||||||
time: time,
|
time: time,
|
||||||
};
|
};
|
||||||
return this._request(this.httpMethod, url, data, query.requestId);
|
return this._request(url, data, { requestId: query.requestId });
|
||||||
}
|
}
|
||||||
|
|
||||||
performSuggestQuery(query, cache = false) {
|
performSuggestQuery(query, cache = false) {
|
||||||
@@ -229,7 +235,7 @@ export class PrometheusDatasource {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return this._request('GET', url).then(result => {
|
return this.metadataRequest(url).then(result => {
|
||||||
this.metricsNameCache = {
|
this.metricsNameCache = {
|
||||||
data: result.data.data,
|
data: result.data.data,
|
||||||
expire: Date.now() + 60 * 1000,
|
expire: Date.now() + 60 * 1000,
|
||||||
@@ -323,4 +329,8 @@ export class PrometheusDatasource {
|
|||||||
}
|
}
|
||||||
return Math.ceil(date.valueOf() / 1000);
|
return Math.ceil(date.valueOf() / 1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getOriginalMetricName(labelData) {
|
||||||
|
return this.resultTransformer.getOriginalMetricName(labelData);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ export default class PrometheusMetricFindQuery {
|
|||||||
// return label values globally
|
// return label values globally
|
||||||
url = '/api/v1/label/' + label + '/values';
|
url = '/api/v1/label/' + label + '/values';
|
||||||
|
|
||||||
return this.datasource._request('GET', url).then(function(result) {
|
return this.datasource.metadataRequest(url).then(function(result) {
|
||||||
return _.map(result.data.data, function(value) {
|
return _.map(result.data.data, function(value) {
|
||||||
return { text: value };
|
return { text: value };
|
||||||
});
|
});
|
||||||
@@ -56,7 +56,7 @@ export default class PrometheusMetricFindQuery {
|
|||||||
var end = this.datasource.getPrometheusTime(this.range.to, true);
|
var end = this.datasource.getPrometheusTime(this.range.to, true);
|
||||||
url = '/api/v1/series?match[]=' + encodeURIComponent(metric) + '&start=' + start + '&end=' + end;
|
url = '/api/v1/series?match[]=' + encodeURIComponent(metric) + '&start=' + start + '&end=' + end;
|
||||||
|
|
||||||
return this.datasource._request('GET', url).then(function(result) {
|
return this.datasource.metadataRequest(url).then(function(result) {
|
||||||
var _labels = _.map(result.data.data, function(metric) {
|
var _labels = _.map(result.data.data, function(metric) {
|
||||||
return metric[label] || '';
|
return metric[label] || '';
|
||||||
}).filter(function(label) {
|
}).filter(function(label) {
|
||||||
@@ -76,7 +76,7 @@ export default class PrometheusMetricFindQuery {
|
|||||||
metricNameQuery(metricFilterPattern) {
|
metricNameQuery(metricFilterPattern) {
|
||||||
var url = '/api/v1/label/__name__/values';
|
var url = '/api/v1/label/__name__/values';
|
||||||
|
|
||||||
return this.datasource._request('GET', url).then(function(result) {
|
return this.datasource.metadataRequest(url).then(function(result) {
|
||||||
return _.chain(result.data.data)
|
return _.chain(result.data.data)
|
||||||
.filter(function(metricName) {
|
.filter(function(metricName) {
|
||||||
var r = new RegExp(metricFilterPattern);
|
var r = new RegExp(metricFilterPattern);
|
||||||
@@ -120,8 +120,8 @@ export default class PrometheusMetricFindQuery {
|
|||||||
var url = '/api/v1/series?match[]=' + encodeURIComponent(query) + '&start=' + start + '&end=' + end;
|
var url = '/api/v1/series?match[]=' + encodeURIComponent(query) + '&start=' + start + '&end=' + end;
|
||||||
|
|
||||||
var self = this;
|
var self = this;
|
||||||
return this.datasource._request('GET', url).then(function(result) {
|
return this.datasource.metadataRequest(url).then(function(result) {
|
||||||
return _.map(result.data.data, function(metric) {
|
return _.map(result.data.data, metric => {
|
||||||
return {
|
return {
|
||||||
text: self.datasource.getOriginalMetricName(metric),
|
text: self.datasource.getOriginalMetricName(metric),
|
||||||
expandable: true,
|
expandable: true,
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ describe('PrometheusDatasource', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
ctx.backendSrvMock = {};
|
ctx.backendSrvMock = {};
|
||||||
|
|
||||||
ctx.templateSrvMock = {
|
ctx.templateSrvMock = {
|
||||||
replace: a => a,
|
replace: a => a,
|
||||||
};
|
};
|
||||||
@@ -23,6 +24,45 @@ describe('PrometheusDatasource', () => {
|
|||||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock);
|
ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Datasource metadata requests', () => {
|
||||||
|
it('should perform a GET request with the default config', () => {
|
||||||
|
ctx.backendSrvMock.datasourceRequest = jest.fn();
|
||||||
|
ctx.ds.metadataRequest('/foo');
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest.mock.calls.length).toBe(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest.mock.calls[0][0].method).toBe('GET');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should still perform a GET request with the DS HTTP method set to POST', () => {
|
||||||
|
ctx.backendSrvMock.datasourceRequest = jest.fn();
|
||||||
|
const postSettings = _.cloneDeep(instanceSettings);
|
||||||
|
postSettings.jsonData.httpMethod = 'POST';
|
||||||
|
const ds = new PrometheusDatasource(postSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock);
|
||||||
|
ds.metadataRequest('/foo');
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest.mock.calls.length).toBe(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest.mock.calls[0][0].method).toBe('GET');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('When performing performSuggestQuery', () => {
|
||||||
|
it('should cache response', async () => {
|
||||||
|
ctx.backendSrvMock.datasourceRequest.mockReturnValue(
|
||||||
|
Promise.resolve({
|
||||||
|
status: 'success',
|
||||||
|
data: { data: ['value1', 'value2', 'value3'] },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
let results = await ctx.ds.performSuggestQuery('value', true);
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
|
||||||
|
ctx.backendSrvMock.datasourceRequest.mockReset();
|
||||||
|
results = await ctx.ds.performSuggestQuery('value', true);
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('When converting prometheus histogram to heatmap format', () => {
|
describe('When converting prometheus histogram to heatmap format', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
ctx.query = {
|
ctx.query = {
|
||||||
|
|||||||
@@ -0,0 +1,205 @@
|
|||||||
|
import moment from 'moment';
|
||||||
|
import { PrometheusDatasource } from '../datasource';
|
||||||
|
import PrometheusMetricFindQuery from '../metric_find_query';
|
||||||
|
import q from 'q';
|
||||||
|
|
||||||
|
describe('PrometheusMetricFindQuery', function() {
|
||||||
|
let instanceSettings = {
|
||||||
|
url: 'proxied',
|
||||||
|
directUrl: 'direct',
|
||||||
|
user: 'test',
|
||||||
|
password: 'mupp',
|
||||||
|
jsonData: { httpMethod: 'GET' },
|
||||||
|
};
|
||||||
|
const raw = {
|
||||||
|
from: moment.utc('2018-04-25 10:00'),
|
||||||
|
to: moment.utc('2018-04-25 11:00'),
|
||||||
|
};
|
||||||
|
let ctx: any = {
|
||||||
|
backendSrvMock: {
|
||||||
|
datasourceRequest: jest.fn(() => Promise.resolve({})),
|
||||||
|
},
|
||||||
|
templateSrvMock: {
|
||||||
|
replace: a => a,
|
||||||
|
},
|
||||||
|
timeSrvMock: {
|
||||||
|
timeRange: () => ({
|
||||||
|
from: raw.from,
|
||||||
|
to: raw.to,
|
||||||
|
raw: raw,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
ctx.setupMetricFindQuery = (data: any) => {
|
||||||
|
ctx.backendSrvMock.datasourceRequest.mockReturnValue(Promise.resolve({ status: 'success', data: data.response }));
|
||||||
|
return new PrometheusMetricFindQuery(ctx.ds, data.query, ctx.timeSrvMock);
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
ctx.backendSrvMock.datasourceRequest.mockReset();
|
||||||
|
ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('When performing metricFindQuery', () => {
|
||||||
|
it('label_values(resource) should generate label search query', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'label_values(resource)',
|
||||||
|
response: {
|
||||||
|
data: ['value1', 'value2', 'value3'],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: 'proxied/api/v1/label/resource/values',
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('label_values(metric, resource) should generate series query with correct time', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'label_values(metric, resource)',
|
||||||
|
response: {
|
||||||
|
data: [
|
||||||
|
{ __name__: 'metric', resource: 'value1' },
|
||||||
|
{ __name__: 'metric', resource: 'value2' },
|
||||||
|
{ __name__: 'metric', resource: 'value3' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: `proxied/api/v1/series?match[]=metric&start=${raw.from.unix()}&end=${raw.to.unix()}`,
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('label_values(metric{label1="foo", label2="bar", label3="baz"}, resource) should generate series query with correct time', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'label_values(metric{label1="foo", label2="bar", label3="baz"}, resource)',
|
||||||
|
response: {
|
||||||
|
data: [
|
||||||
|
{ __name__: 'metric', resource: 'value1' },
|
||||||
|
{ __name__: 'metric', resource: 'value2' },
|
||||||
|
{ __name__: 'metric', resource: 'value3' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: `proxied/api/v1/series?match[]=${encodeURIComponent(
|
||||||
|
'metric{label1="foo", label2="bar", label3="baz"}'
|
||||||
|
)}&start=${raw.from.unix()}&end=${raw.to.unix()}`,
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('label_values(metric, resource) result should not contain empty string', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'label_values(metric, resource)',
|
||||||
|
response: {
|
||||||
|
data: [
|
||||||
|
{ __name__: 'metric', resource: 'value1' },
|
||||||
|
{ __name__: 'metric', resource: 'value2' },
|
||||||
|
{ __name__: 'metric', resource: '' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(2);
|
||||||
|
expect(results[0].text).toBe('value1');
|
||||||
|
expect(results[1].text).toBe('value2');
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: `proxied/api/v1/series?match[]=metric&start=${raw.from.unix()}&end=${raw.to.unix()}`,
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('metrics(metric.*) should generate metric name query', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'metrics(metric.*)',
|
||||||
|
response: {
|
||||||
|
data: ['metric1', 'metric2', 'metric3', 'nomatch'],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: 'proxied/api/v1/label/__name__/values',
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('query_result(metric) should generate metric name query', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'query_result(metric)',
|
||||||
|
response: {
|
||||||
|
data: {
|
||||||
|
resultType: 'vector',
|
||||||
|
result: [
|
||||||
|
{
|
||||||
|
metric: { __name__: 'metric', job: 'testjob' },
|
||||||
|
value: [1443454528.0, '3846'],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].text).toBe('metric{job="testjob"} 3846 1443454528000');
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: `proxied/api/v1/query?query=metric&time=${raw.to.unix()}`,
|
||||||
|
requestId: undefined,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('up{job="job1"} should fallback using generate series query', async () => {
|
||||||
|
const query = ctx.setupMetricFindQuery({
|
||||||
|
query: 'up{job="job1"}',
|
||||||
|
response: {
|
||||||
|
data: [
|
||||||
|
{ __name__: 'up', instance: '127.0.0.1:1234', job: 'job1' },
|
||||||
|
{ __name__: 'up', instance: '127.0.0.1:5678', job: 'job1' },
|
||||||
|
{ __name__: 'up', instance: '127.0.0.1:9102', job: 'job1' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const results = await query.process();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(3);
|
||||||
|
expect(results[0].text).toBe('up{instance="127.0.0.1:1234",job="job1"}');
|
||||||
|
expect(results[1].text).toBe('up{instance="127.0.0.1:5678",job="job1"}');
|
||||||
|
expect(results[2].text).toBe('up{instance="127.0.0.1:9102",job="job1"}');
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1);
|
||||||
|
expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({
|
||||||
|
method: 'GET',
|
||||||
|
url: `proxied/api/v1/series?match[]=${encodeURIComponent(
|
||||||
|
'up{job="job1"}'
|
||||||
|
)}&start=${raw.from.unix()}&end=${raw.to.unix()}`,
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,181 +0,0 @@
|
|||||||
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
|
||||||
|
|
||||||
import moment from 'moment';
|
|
||||||
import helpers from 'test/specs/helpers';
|
|
||||||
import { PrometheusDatasource } from '../datasource';
|
|
||||||
import PrometheusMetricFindQuery from '../metric_find_query';
|
|
||||||
|
|
||||||
describe('PrometheusMetricFindQuery', function() {
|
|
||||||
var ctx = new helpers.ServiceTestContext();
|
|
||||||
var instanceSettings = {
|
|
||||||
url: 'proxied',
|
|
||||||
directUrl: 'direct',
|
|
||||||
user: 'test',
|
|
||||||
password: 'mupp',
|
|
||||||
jsonData: { httpMethod: 'GET' },
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(angularMocks.module('grafana.core'));
|
|
||||||
beforeEach(angularMocks.module('grafana.services'));
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
|
|
||||||
ctx.$q = $q;
|
|
||||||
ctx.$httpBackend = $httpBackend;
|
|
||||||
ctx.$rootScope = $rootScope;
|
|
||||||
ctx.ds = $injector.instantiate(PrometheusDatasource, {
|
|
||||||
instanceSettings: instanceSettings,
|
|
||||||
});
|
|
||||||
$httpBackend.when('GET', /\.html$/).respond('');
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('When performing metricFindQuery', function() {
|
|
||||||
var results;
|
|
||||||
var response;
|
|
||||||
it('label_values(resource) should generate label search query', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: ['value1', 'value2', 'value3'],
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', 'proxied/api/v1/label/resource/values').respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(resource)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(3);
|
|
||||||
});
|
|
||||||
it('label_values(metric, resource) should generate series query', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: [
|
|
||||||
{ __name__: 'metric', resource: 'value1' },
|
|
||||||
{ __name__: 'metric', resource: 'value2' },
|
|
||||||
{ __name__: 'metric', resource: 'value3' },
|
|
||||||
],
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=.*&end=.*/).respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(3);
|
|
||||||
});
|
|
||||||
it('label_values(metric, resource) should pass correct time', function() {
|
|
||||||
ctx.timeSrv.setTime({
|
|
||||||
from: moment.utc('2011-01-01'),
|
|
||||||
to: moment.utc('2015-01-01'),
|
|
||||||
});
|
|
||||||
ctx.$httpBackend
|
|
||||||
.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=1293840000&end=1420070400/)
|
|
||||||
.respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
});
|
|
||||||
it('label_values(metric{label1="foo", label2="bar", label3="baz"}, resource) should generate series query', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: [
|
|
||||||
{ __name__: 'metric', resource: 'value1' },
|
|
||||||
{ __name__: 'metric', resource: 'value2' },
|
|
||||||
{ __name__: 'metric', resource: 'value3' },
|
|
||||||
],
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=.*&end=.*/).respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(3);
|
|
||||||
});
|
|
||||||
it('label_values(metric, resource) result should not contain empty string', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: [
|
|
||||||
{ __name__: 'metric', resource: 'value1' },
|
|
||||||
{ __name__: 'metric', resource: 'value2' },
|
|
||||||
{ __name__: 'metric', resource: '' },
|
|
||||||
],
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=.*&end=.*/).respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(2);
|
|
||||||
expect(results[0].text).to.be('value1');
|
|
||||||
expect(results[1].text).to.be('value2');
|
|
||||||
});
|
|
||||||
it('metrics(metric.*) should generate metric name query', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: ['metric1', 'metric2', 'metric3', 'nomatch'],
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', 'proxied/api/v1/label/__name__/values').respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'metrics(metric.*)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(3);
|
|
||||||
});
|
|
||||||
it('query_result(metric) should generate metric name query', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: {
|
|
||||||
resultType: 'vector',
|
|
||||||
result: [
|
|
||||||
{
|
|
||||||
metric: { __name__: 'metric', job: 'testjob' },
|
|
||||||
value: [1443454528.0, '3846'],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/query\?query=metric&time=.*/).respond(response);
|
|
||||||
var pm = new PrometheusMetricFindQuery(ctx.ds, 'query_result(metric)', ctx.timeSrv);
|
|
||||||
pm.process().then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(1);
|
|
||||||
expect(results[0].text).to.be('metric{job="testjob"} 3846 1443454528000');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('When performing performSuggestQuery', function() {
|
|
||||||
var results;
|
|
||||||
var response;
|
|
||||||
it('cache response', function() {
|
|
||||||
response = {
|
|
||||||
status: 'success',
|
|
||||||
data: ['value1', 'value2', 'value3'],
|
|
||||||
};
|
|
||||||
ctx.$httpBackend.expect('GET', 'proxied/api/v1/label/__name__/values').respond(response);
|
|
||||||
ctx.ds.performSuggestQuery('value', true).then(function(data) {
|
|
||||||
results = data;
|
|
||||||
});
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
ctx.$rootScope.$apply();
|
|
||||||
expect(results.length).to.be(3);
|
|
||||||
ctx.ds.performSuggestQuery('value', true).then(function(data) {
|
|
||||||
// get from cache, no need to flush
|
|
||||||
results = data;
|
|
||||||
expect(results.length).to.be(3);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -16,6 +16,7 @@ describe('PermissionsStore', () => {
|
|||||||
permissionName: 'View',
|
permissionName: 'View',
|
||||||
teamId: 1,
|
teamId: 1,
|
||||||
team: 'MyTestTeam',
|
team: 'MyTestTeam',
|
||||||
|
inherited: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 5,
|
id: 5,
|
||||||
|
|||||||
@@ -224,8 +224,6 @@ const prepareServerResponse = (response, dashboardId: number, isFolder: boolean,
|
|||||||
};
|
};
|
||||||
|
|
||||||
const prepareItem = (item, dashboardId: number, isFolder: boolean, isInRoot: boolean) => {
|
const prepareItem = (item, dashboardId: number, isFolder: boolean, isInRoot: boolean) => {
|
||||||
item.inherited = !isFolder && !isInRoot && dashboardId !== item.dashboardId;
|
|
||||||
|
|
||||||
item.sortRank = 0;
|
item.sortRank = 0;
|
||||||
if (item.userId > 0) {
|
if (item.userId > 0) {
|
||||||
item.name = item.userLogin;
|
item.name = item.userLogin;
|
||||||
|
|||||||
@@ -256,17 +256,15 @@
|
|||||||
|
|
||||||
// Caret to indicate there is a submenu
|
// Caret to indicate there is a submenu
|
||||||
.dropdown-submenu > a::after {
|
.dropdown-submenu > a::after {
|
||||||
display: block;
|
position: absolute;
|
||||||
content: ' ';
|
top: 35%;
|
||||||
float: right;
|
right: $input-padding-x;
|
||||||
width: 0;
|
background-color: transparent;
|
||||||
height: 0;
|
color: $text-color-weak;
|
||||||
border-color: transparent;
|
font: normal normal normal $font-size-sm/1 FontAwesome;
|
||||||
border-style: solid;
|
content: '\f0da';
|
||||||
border-width: 5px 0 5px 5px;
|
pointer-events: none;
|
||||||
border-left-color: $text-color-weak;
|
font-size: 11px;
|
||||||
margin-top: 5px;
|
|
||||||
margin-right: -4px;
|
|
||||||
}
|
}
|
||||||
.dropdown-submenu:hover > a::after {
|
.dropdown-submenu:hover > a::after {
|
||||||
border-left-color: $dropdownLinkColorHover;
|
border-left-color: $dropdownLinkColorHover;
|
||||||
|
|||||||
@@ -341,19 +341,19 @@ $input-border: 1px solid $input-border-color;
|
|||||||
margin-right: $gf-form-margin;
|
margin-right: $gf-form-margin;
|
||||||
position: relative;
|
position: relative;
|
||||||
background-color: $input-bg;
|
background-color: $input-bg;
|
||||||
padding-right: $input-padding-x;
|
|
||||||
border: $input-border;
|
border: $input-border;
|
||||||
border-radius: $input-border-radius;
|
border-radius: $input-border-radius;
|
||||||
|
|
||||||
&::after {
|
&::after {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
top: 35%;
|
top: 35%;
|
||||||
right: $input-padding-x/2;
|
right: $input-padding-x;
|
||||||
background-color: transparent;
|
background-color: transparent;
|
||||||
color: $input-color;
|
color: $input-color;
|
||||||
font: normal normal normal $font-size-sm/1 FontAwesome;
|
font: normal normal normal $font-size-sm/1 FontAwesome;
|
||||||
content: '\f0d7';
|
content: '\f0d7';
|
||||||
pointer-events: none;
|
pointer-events: none;
|
||||||
|
font-size: 11px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.gf-form-input {
|
.gf-form-input {
|
||||||
|
|||||||
@@ -33,7 +33,6 @@ div.flot-text {
|
|||||||
border: $panel-border;
|
border: $panel-border;
|
||||||
position: relative;
|
position: relative;
|
||||||
border-radius: 3px;
|
border-radius: 3px;
|
||||||
height: 100%;
|
|
||||||
|
|
||||||
&.panel-transparent {
|
&.panel-transparent {
|
||||||
background-color: transparent;
|
background-color: transparent;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
%_signature gpg
|
%_signature gpg
|
||||||
%_gpg_path /home/ubuntu/.gnupg
|
%_gpg_path /root/.gnupg
|
||||||
%_gpg_name Grafana
|
%_gpg_name Grafana
|
||||||
%_gpgbin /usr/bin/gpg
|
%_gpgbin /usr/bin/gpg
|
||||||
|
|||||||
Reference in New Issue
Block a user