mirror of
https://github.com/grafana/grafana.git
synced 2025-12-22 04:34:27 +08:00
Compare commits
19 Commits
sriram/pos
...
v5.4.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
69c5191926 | ||
|
|
99ee3bbe5a | ||
|
|
01840cbd70 | ||
|
|
23b19543bd | ||
|
|
bb4e5934fb | ||
|
|
fd3821d2f1 | ||
|
|
8b1d0b14b6 | ||
|
|
23c6bea21b | ||
|
|
e3abefa19f | ||
|
|
4ee92bd59c | ||
|
|
780e5153d0 | ||
|
|
be9058d7ef | ||
|
|
3301f96811 | ||
|
|
1c59669da0 | ||
|
|
1ad60be47b | ||
|
|
9ec0af73ec | ||
|
|
8190d10827 | ||
|
|
18b5f630f7 | ||
|
|
9df26af3db |
@@ -510,6 +510,7 @@ workflows:
|
|||||||
- grafana-docker-release:
|
- grafana-docker-release:
|
||||||
requires:
|
requires:
|
||||||
- build-all
|
- build-all
|
||||||
|
- build-all-enterprise
|
||||||
- test-backend
|
- test-backend
|
||||||
- test-frontend
|
- test-frontend
|
||||||
- codespell
|
- codespell
|
||||||
|
|||||||
@@ -50,7 +50,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
|
|||||||
|
|
||||||
WORKDIR $GF_PATHS_HOME
|
WORKDIR $GF_PATHS_HOME
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
|
RUN apt-get update && apt-get upgrade -y && \
|
||||||
|
apt-get install -qq -y libfontconfig ca-certificates && \
|
||||||
apt-get autoremove -y && \
|
apt-get autoremove -y && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|||||||
@@ -133,9 +133,9 @@ Macro example | Description
|
|||||||
------------ | -------------
|
------------ | -------------
|
||||||
*$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
|
*$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
|
||||||
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
|
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
|
||||||
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
|
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN FROM_UNIXTIME(1494410783) AND FROM_UNIXTIME(1494410983)*
|
||||||
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
|
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
|
||||||
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
|
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494410983)*
|
||||||
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
|
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
|
||||||
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
|
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
|
||||||
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
|
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
"company": "Grafana Labs"
|
"company": "Grafana Labs"
|
||||||
},
|
},
|
||||||
"name": "grafana",
|
"name": "grafana",
|
||||||
"version": "5.4.0-pre1",
|
"version": "5.4.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "http://github.com/grafana/grafana.git"
|
"url": "http://github.com/grafana/grafana.git"
|
||||||
|
|||||||
@@ -25,7 +25,8 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
|
|||||||
|
|
||||||
WORKDIR $GF_PATHS_HOME
|
WORKDIR $GF_PATHS_HOME
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates curl && \
|
RUN apt-get update && apt-get -y upgrade && \
|
||||||
|
apt-get install -qq -y libfontconfig ca-certificates curl && \
|
||||||
apt-get autoremove -y && \
|
apt-get autoremove -y && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,17 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
_grafana_tag=$1
|
_raw_grafana_tag=$1
|
||||||
_docker_repo=${2:-grafana/grafana-enterprise}
|
_docker_repo=${2:-grafana/grafana-enterprise}
|
||||||
|
|
||||||
|
if echo "$_raw_grafana_tag" | grep -q "^v"; then
|
||||||
|
_grafana_tag=$(echo "${_raw_grafana_tag}" | cut -d "v" -f 2)
|
||||||
|
else
|
||||||
|
_grafana_tag="${_raw_grafana_tag}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Building and deploying ${_docker_repo}:${_grafana_tag}"
|
||||||
|
|
||||||
docker build \
|
docker build \
|
||||||
--tag "${_docker_repo}:${_grafana_tag}"\
|
--tag "${_docker_repo}:${_grafana_tag}"\
|
||||||
--no-cache=true \
|
--no-cache=true \
|
||||||
|
|||||||
@@ -277,10 +277,6 @@ func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response {
|
|||||||
return Error(500, "Failed to save dashboard", err)
|
return Error(500, "Failed to save dashboard", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err == m.ErrDashboardFailedToUpdateAlertData {
|
|
||||||
return Error(500, "Invalid alert data. Cannot save dashboard", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
||||||
return JSON(200, util.DynMap{
|
return JSON(200, util.DynMap{
|
||||||
"status": "success",
|
"status": "success",
|
||||||
|
|||||||
@@ -727,7 +727,6 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
|||||||
{SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400},
|
{SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400},
|
||||||
{SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400},
|
{SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400},
|
||||||
{SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422},
|
{SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422},
|
||||||
{SaveError: m.ErrDashboardFailedToUpdateAlertData, ExpectedStatusCode: 500},
|
|
||||||
{SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500},
|
{SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500},
|
||||||
{SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400},
|
{SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400},
|
||||||
{SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400},
|
{SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400},
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ var (
|
|||||||
ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else")
|
ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else")
|
||||||
ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty")
|
ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty")
|
||||||
ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder")
|
ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder")
|
||||||
ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data")
|
|
||||||
ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists")
|
ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists")
|
||||||
ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id")
|
ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id")
|
||||||
ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder")
|
ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder")
|
||||||
|
|||||||
@@ -165,7 +165,7 @@ func (dr *dashboardServiceImpl) updateAlerting(cmd *models.SaveDashboardCommand,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err := bus.Dispatch(&alertCmd); err != nil {
|
if err := bus.Dispatch(&alertCmd); err != nil {
|
||||||
return models.ErrDashboardFailedToUpdateAlertData
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -126,6 +126,18 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
|
|||||||
}
|
}
|
||||||
|
|
||||||
eg.Go(func() error {
|
eg.Go(func() error {
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
plog.Error("Execute Query Panic", "error", err, "stack", log.Stack(1))
|
||||||
|
if theErr, ok := err.(error); ok {
|
||||||
|
resultChan <- &tsdb.QueryResult{
|
||||||
|
RefId: query.RefId,
|
||||||
|
Error: theErr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
queryRes, err := e.executeQuery(ectx, query, queryContext)
|
queryRes, err := e.executeQuery(ectx, query, queryContext)
|
||||||
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
|
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
|
||||||
return err
|
return err
|
||||||
@@ -146,6 +158,17 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
|
|||||||
for region, getMetricDataQuery := range getMetricDataQueries {
|
for region, getMetricDataQuery := range getMetricDataQueries {
|
||||||
q := getMetricDataQuery
|
q := getMetricDataQuery
|
||||||
eg.Go(func() error {
|
eg.Go(func() error {
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
plog.Error("Execute Get Metric Data Query Panic", "error", err, "stack", log.Stack(1))
|
||||||
|
if theErr, ok := err.(error); ok {
|
||||||
|
resultChan <- &tsdb.QueryResult{
|
||||||
|
Error: theErr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext)
|
queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext)
|
||||||
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
|
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
|
||||||
return err
|
return err
|
||||||
@@ -188,8 +211,8 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatch
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if endTime.Before(startTime) {
|
if !startTime.Before(endTime) {
|
||||||
return nil, fmt.Errorf("Invalid time range: End time can't be before start time")
|
return nil, fmt.Errorf("Invalid time range: Start time must be before end time")
|
||||||
}
|
}
|
||||||
|
|
||||||
params := &cloudwatch.GetMetricStatisticsInput{
|
params := &cloudwatch.GetMetricStatisticsInput{
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
package cloudwatch
|
package cloudwatch
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/models"
|
||||||
|
"github.com/grafana/grafana/pkg/tsdb"
|
||||||
|
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||||
"github.com/grafana/grafana/pkg/components/null"
|
"github.com/grafana/grafana/pkg/components/null"
|
||||||
@@ -14,6 +18,24 @@ import (
|
|||||||
func TestCloudWatch(t *testing.T) {
|
func TestCloudWatch(t *testing.T) {
|
||||||
Convey("CloudWatch", t, func() {
|
Convey("CloudWatch", t, func() {
|
||||||
|
|
||||||
|
Convey("executeQuery", func() {
|
||||||
|
e := &CloudWatchExecutor{
|
||||||
|
DataSource: &models.DataSource{
|
||||||
|
JsonData: simplejson.New(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
Convey("End time before start time should result in error", func() {
|
||||||
|
_, err := e.executeQuery(context.Background(), &CloudWatchQuery{}, &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("now-1h", "now-2h")})
|
||||||
|
So(err.Error(), ShouldEqual, "Invalid time range: Start time must be before end time")
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("End time equals start time should result in error", func() {
|
||||||
|
_, err := e.executeQuery(context.Background(), &CloudWatchQuery{}, &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("now-1h", "now-1h")})
|
||||||
|
So(err.Error(), ShouldEqual, "Invalid time range: Start time must be before end time")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
Convey("can parse cloudwatch json model", func() {
|
Convey("can parse cloudwatch json model", func() {
|
||||||
json := `
|
json := `
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -47,6 +47,7 @@ func init() {
|
|||||||
"AWS/CloudFront": {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
|
"AWS/CloudFront": {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
|
||||||
"AWS/CloudSearch": {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
|
"AWS/CloudSearch": {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
|
||||||
"AWS/CloudHSM": {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"},
|
"AWS/CloudHSM": {"HsmUnhealthy", "HsmTemperature", "HsmKeysSessionOccupied", "HsmKeysTokenOccupied", "HsmSslCtxsOccupied", "HsmSessionCount", "HsmUsersAvailable", "HsmUsersMax", "InterfaceEth2OctetsInput", "InterfaceEth2OctetsOutput"},
|
||||||
|
"AWS/CodeBuild": {"BuildDuration", "Builds", "DownloadSourceDuration", "Duration", "FailedBuilds", "FinalizingDuration", "InstallDuration", "PostBuildDuration", "PreBuildDuration", "ProvisioningDuration", "QueuedDuration", "SubmittedDuration", "SucceededBuilds", "UploadArtifactsDuration"},
|
||||||
"AWS/Connect": {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
|
"AWS/Connect": {"CallsBreachingConcurrencyQuota", "CallBackNotDialableNumber", "CallRecordingUploadError", "CallsPerInterval", "ConcurrentCalls", "ConcurrentCallsPercentage", "ContactFlowErrors", "ContactFlowFatalErrors", "LongestQueueWaitTime", "MissedCalls", "MisconfiguredPhoneNumbers", "PublicSigningKeyUsage", "QueueCapacityExceededError", "QueueSize", "ThrottledCalls", "ToInstancePacketLossRate"},
|
||||||
"AWS/DMS": {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
|
"AWS/DMS": {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
|
||||||
"AWS/DX": {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
|
"AWS/DX": {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
|
||||||
@@ -123,6 +124,7 @@ func init() {
|
|||||||
"AWS/CloudFront": {"DistributionId", "Region"},
|
"AWS/CloudFront": {"DistributionId", "Region"},
|
||||||
"AWS/CloudSearch": {},
|
"AWS/CloudSearch": {},
|
||||||
"AWS/CloudHSM": {"Region", "ClusterId", "HsmId"},
|
"AWS/CloudHSM": {"Region", "ClusterId", "HsmId"},
|
||||||
|
"AWS/CodeBuild": {"ProjectName"},
|
||||||
"AWS/Connect": {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
|
"AWS/Connect": {"InstanceId", "MetricGroup", "Participant", "QueueName", "Stream Type", "Type of Connection"},
|
||||||
"AWS/DMS": {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
|
"AWS/DMS": {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
|
||||||
"AWS/DX": {"ConnectionId"},
|
"AWS/DX": {"ConnectionId"},
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb
|
|||||||
clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", "))
|
clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", "))
|
||||||
|
|
||||||
switch version {
|
switch version {
|
||||||
case 2, 5, 56:
|
case 2, 5, 56, 60:
|
||||||
return &baseClientImpl{
|
return &baseClientImpl{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
ds: ds,
|
ds: ds,
|
||||||
|
|||||||
@@ -90,6 +90,19 @@ func TestClient(t *testing.T) {
|
|||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
So(c.GetVersion(), ShouldEqual, 56)
|
So(c.GetVersion(), ShouldEqual, 56)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("When version 60 should return v6.0 client", func() {
|
||||||
|
ds := &models.DataSource{
|
||||||
|
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||||
|
"esVersion": 60,
|
||||||
|
"timeField": "@timestamp",
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
c, err := NewClient(context.Background(), ds, nil)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(c.GetVersion(), ShouldEqual, 60)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("Given a fake http client", func() {
|
Convey("Given a fake http client", func() {
|
||||||
@@ -153,8 +166,6 @@ func TestClient(t *testing.T) {
|
|||||||
jBody, err := simplejson.NewJson(bodyBytes)
|
jBody, err := simplejson.NewJson(bodyBytes)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
fmt.Println("body", string(headerBytes))
|
|
||||||
|
|
||||||
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
||||||
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
||||||
So(jHeader.Get("search_type").MustString(), ShouldEqual, "count")
|
So(jHeader.Get("search_type").MustString(), ShouldEqual, "count")
|
||||||
@@ -209,8 +220,6 @@ func TestClient(t *testing.T) {
|
|||||||
jBody, err := simplejson.NewJson(bodyBytes)
|
jBody, err := simplejson.NewJson(bodyBytes)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
fmt.Println("body", string(headerBytes))
|
|
||||||
|
|
||||||
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
||||||
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
||||||
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
|
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
|
||||||
@@ -265,8 +274,6 @@ func TestClient(t *testing.T) {
|
|||||||
jBody, err := simplejson.NewJson(bodyBytes)
|
jBody, err := simplejson.NewJson(bodyBytes)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
fmt.Println("body", string(headerBytes))
|
|
||||||
|
|
||||||
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
||||||
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
||||||
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
|
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
|
||||||
|
|||||||
@@ -66,6 +66,10 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
|
|||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
||||||
|
case "__timeFrom":
|
||||||
|
return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
|
||||||
|
case "__timeTo":
|
||||||
|
return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
||||||
case "__timeGroup":
|
case "__timeGroup":
|
||||||
if len(args) < 2 {
|
if len(args) < 2 {
|
||||||
return "", fmt.Errorf("macro %v needs time column and interval", name)
|
return "", fmt.Errorf("macro %v needs time column and interval", name)
|
||||||
|
|||||||
@@ -52,6 +52,20 @@ func TestMacroEngine(t *testing.T) {
|
|||||||
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
|
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("interpolate __timeFrom function", func() {
|
||||||
|
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("interpolate __timeTo function", func() {
|
||||||
|
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
|
||||||
|
})
|
||||||
|
|
||||||
Convey("interpolate __timeGroup function", func() {
|
Convey("interpolate __timeGroup function", func() {
|
||||||
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
|
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|||||||
@@ -61,6 +61,10 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
|
|||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil
|
return fmt.Sprintf("%s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", args[0], m.timeRange.GetFromAsSecondsEpoch(), m.timeRange.GetToAsSecondsEpoch()), nil
|
||||||
|
case "__timeFrom":
|
||||||
|
return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetFromAsSecondsEpoch()), nil
|
||||||
|
case "__timeTo":
|
||||||
|
return fmt.Sprintf("FROM_UNIXTIME(%d)", m.timeRange.GetToAsSecondsEpoch()), nil
|
||||||
case "__timeGroup":
|
case "__timeGroup":
|
||||||
if len(args) < 2 {
|
if len(args) < 2 {
|
||||||
return "", fmt.Errorf("macro %v needs time column and interval", name)
|
return "", fmt.Errorf("macro %v needs time column and interval", name)
|
||||||
|
|||||||
@@ -63,6 +63,20 @@ func TestMacroEngine(t *testing.T) {
|
|||||||
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
|
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("interpolate __timeFrom function", func() {
|
||||||
|
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("interpolate __timeTo function", func() {
|
||||||
|
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
|
||||||
|
})
|
||||||
|
|
||||||
Convey("interpolate __unixEpochFilter function", func() {
|
Convey("interpolate __unixEpochFilter function", func() {
|
||||||
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
|
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|||||||
@@ -761,7 +761,7 @@ func TestMySQL(t *testing.T) {
|
|||||||
{
|
{
|
||||||
DataSource: &models.DataSource{JsonData: simplejson.New()},
|
DataSource: &models.DataSource{JsonData: simplejson.New()},
|
||||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||||
"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
|
"rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeTo() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
}),
|
}),
|
||||||
RefId: "A",
|
RefId: "A",
|
||||||
@@ -773,7 +773,7 @@ func TestMySQL(t *testing.T) {
|
|||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
queryResult := resp.Results["A"]
|
queryResult := resp.Results["A"]
|
||||||
So(queryResult.Error, ShouldBeNil)
|
So(queryResult.Error, ShouldBeNil)
|
||||||
So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
|
So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > FROM_UNIXTIME(1521118500) OR time < FROM_UNIXTIME(1521118800) OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -87,6 +87,10 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
|
|||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
||||||
|
case "__timeFrom":
|
||||||
|
return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
|
||||||
|
case "__timeTo":
|
||||||
|
return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
||||||
case "__timeGroup":
|
case "__timeGroup":
|
||||||
if len(args) < 2 {
|
if len(args) < 2 {
|
||||||
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
|
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
|
||||||
|
|||||||
@@ -44,6 +44,20 @@ func TestMacroEngine(t *testing.T) {
|
|||||||
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
|
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("interpolate __timeFrom function", func() {
|
||||||
|
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom()")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(sql, ShouldEqual, "select '2018-04-12T18:00:00Z'")
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("interpolate __timeTo function", func() {
|
||||||
|
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo()")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(sql, ShouldEqual, "select '2018-04-12T18:05:00Z'")
|
||||||
|
})
|
||||||
|
|
||||||
Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
|
Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
|
||||||
|
|
||||||
sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")
|
sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")
|
||||||
|
|||||||
@@ -196,8 +196,6 @@ var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string,
|
|||||||
|
|
||||||
sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
|
sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
|
||||||
sql = strings.Replace(sql, "$__interval", interval.Text, -1)
|
sql = strings.Replace(sql, "$__interval", interval.Text, -1)
|
||||||
sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1)
|
|
||||||
sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1)
|
|
||||||
sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
|
sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
|
||||||
sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
|
sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
|
||||||
|
|
||||||
|
|||||||
@@ -44,20 +44,6 @@ func TestSqlEngine(t *testing.T) {
|
|||||||
So(sql, ShouldEqual, "select 60000 ")
|
So(sql, ShouldEqual, "select 60000 ")
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("interpolate __timeFrom function", func() {
|
|
||||||
sql, err := Interpolate(query, timeRange, "select $__timeFrom()")
|
|
||||||
So(err, ShouldBeNil)
|
|
||||||
|
|
||||||
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
|
|
||||||
})
|
|
||||||
|
|
||||||
Convey("interpolate __timeTo function", func() {
|
|
||||||
sql, err := Interpolate(query, timeRange, "select $__timeTo()")
|
|
||||||
So(err, ShouldBeNil)
|
|
||||||
|
|
||||||
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
|
|
||||||
})
|
|
||||||
|
|
||||||
Convey("interpolate __unixEpochFrom function", func() {
|
Convey("interpolate __unixEpochFrom function", func() {
|
||||||
sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
|
sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|||||||
@@ -428,10 +428,16 @@ kbn.valueFormats.hex0x = (value, decimals) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
kbn.valueFormats.sci = (value, decimals) => {
|
kbn.valueFormats.sci = (value, decimals) => {
|
||||||
|
if (value == null) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
return value.toExponential(decimals);
|
return value.toExponential(decimals);
|
||||||
};
|
};
|
||||||
|
|
||||||
kbn.valueFormats.locale = (value, decimals) => {
|
kbn.valueFormats.locale = (value, decimals) => {
|
||||||
|
if (value == null) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
return value.toLocaleString(undefined, { maximumFractionDigits: decimals });
|
return value.toLocaleString(undefined, { maximumFractionDigits: decimals });
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -584,8 +590,8 @@ kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms');
|
|||||||
kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
|
kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
|
||||||
kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
|
kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
|
||||||
kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
|
kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
|
||||||
kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('L');
|
kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('l/min');
|
||||||
kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('L', -1);
|
kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('mL/min', -1);
|
||||||
|
|
||||||
// Angle
|
// Angle
|
||||||
kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');
|
kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');
|
||||||
|
|||||||
@@ -64,9 +64,9 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="gf-form">
|
<div class="gf-form">
|
||||||
<metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model>
|
<metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model>
|
||||||
<input class="gf-form-input max-width-9" type="number" step="any" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()"></input>
|
<input class="gf-form-input max-width-9" type="number" step="any" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()">
|
||||||
<label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
|
<label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
|
||||||
<input class="gf-form-input max-width-9" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()"></input>
|
<input class="gf-form-input max-width-9" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()">
|
||||||
</div>
|
</div>
|
||||||
<div class="gf-form">
|
<div class="gf-form">
|
||||||
<label class="gf-form-label">
|
<label class="gf-form-label">
|
||||||
|
|||||||
@@ -8,9 +8,9 @@ const alertQueryDef = new QueryPartDef({
|
|||||||
{
|
{
|
||||||
name: 'from',
|
name: 'from',
|
||||||
type: 'string',
|
type: 'string',
|
||||||
options: ['1s', '10s', '1m', '5m', '10m', '15m', '1h', '24h', '48h'],
|
options: ['10s', '1m', '5m', '10m', '15m', '1h', '24h', '48h'],
|
||||||
},
|
},
|
||||||
{ name: 'to', type: 'string', options: ['now'] },
|
{ name: 'to', type: 'string', options: ['now', 'now-1m', 'now-5m', 'now-10m', 'now-1h'] },
|
||||||
],
|
],
|
||||||
defaultParams: ['#A', '15m', 'now', 'avg'],
|
defaultParams: ['#A', '15m', 'now', 'avg'],
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,22 +2,8 @@ import coreModule from 'app/core/core_module';
|
|||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import * as queryDef from './query_def';
|
import * as queryDef from './query_def';
|
||||||
|
|
||||||
export function elasticBucketAgg() {
|
|
||||||
return {
|
|
||||||
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html',
|
|
||||||
controller: 'ElasticBucketAggCtrl',
|
|
||||||
restrict: 'E',
|
|
||||||
scope: {
|
|
||||||
target: '=',
|
|
||||||
index: '=',
|
|
||||||
onChange: '&',
|
|
||||||
getFields: '&',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ElasticBucketAggCtrl {
|
export class ElasticBucketAggCtrl {
|
||||||
/** @nginject */
|
/** @ngInject */
|
||||||
constructor($scope, uiSegmentSrv, $q, $rootScope) {
|
constructor($scope, uiSegmentSrv, $q, $rootScope) {
|
||||||
const bucketAggs = $scope.target.bucketAggs;
|
const bucketAggs = $scope.target.bucketAggs;
|
||||||
|
|
||||||
@@ -226,5 +212,18 @@ export class ElasticBucketAggCtrl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function elasticBucketAgg() {
|
||||||
|
return {
|
||||||
|
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/bucket_agg.html',
|
||||||
|
controller: ElasticBucketAggCtrl,
|
||||||
|
restrict: 'E',
|
||||||
|
scope: {
|
||||||
|
target: '=',
|
||||||
|
index: '=',
|
||||||
|
onChange: '&',
|
||||||
|
getFields: '&',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
coreModule.directive('elasticBucketAgg', elasticBucketAgg);
|
coreModule.directive('elasticBucketAgg', elasticBucketAgg);
|
||||||
coreModule.controller('ElasticBucketAggCtrl', ElasticBucketAggCtrl);
|
|
||||||
|
|||||||
@@ -2,22 +2,8 @@ import coreModule from 'app/core/core_module';
|
|||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import * as queryDef from './query_def';
|
import * as queryDef from './query_def';
|
||||||
|
|
||||||
export function elasticMetricAgg() {
|
|
||||||
return {
|
|
||||||
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/metric_agg.html',
|
|
||||||
controller: 'ElasticMetricAggCtrl',
|
|
||||||
restrict: 'E',
|
|
||||||
scope: {
|
|
||||||
target: '=',
|
|
||||||
index: '=',
|
|
||||||
onChange: '&',
|
|
||||||
getFields: '&',
|
|
||||||
esVersion: '=',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ElasticMetricAggCtrl {
|
export class ElasticMetricAggCtrl {
|
||||||
|
/** @ngInject */
|
||||||
constructor($scope, uiSegmentSrv, $q, $rootScope) {
|
constructor($scope, uiSegmentSrv, $q, $rootScope) {
|
||||||
const metricAggs = $scope.target.metrics;
|
const metricAggs = $scope.target.metrics;
|
||||||
$scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
|
$scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
|
||||||
@@ -209,5 +195,19 @@ export class ElasticMetricAggCtrl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function elasticMetricAgg() {
|
||||||
|
return {
|
||||||
|
templateUrl: 'public/app/plugins/datasource/elasticsearch/partials/metric_agg.html',
|
||||||
|
controller: ElasticMetricAggCtrl,
|
||||||
|
restrict: 'E',
|
||||||
|
scope: {
|
||||||
|
target: '=',
|
||||||
|
index: '=',
|
||||||
|
onChange: '&',
|
||||||
|
getFields: '&',
|
||||||
|
esVersion: '=',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
coreModule.directive('elasticMetricAgg', elasticMetricAgg);
|
coreModule.directive('elasticMetricAgg', elasticMetricAgg);
|
||||||
coreModule.controller('ElasticMetricAggCtrl', ElasticMetricAggCtrl);
|
|
||||||
|
|||||||
@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
|
|||||||
Macros:
|
Macros:
|
||||||
- $__time(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
|
- $__time(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
|
||||||
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
|
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
|
||||||
- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
|
- $__timeFilter(column) -> column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
|
||||||
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
||||||
|
|
||||||
Or build your own conditionals using these macros which just return the values:
|
Or build your own conditionals using these macros which just return the values:
|
||||||
- $__timeFrom() -> '2017-04-21T05:01:17Z'
|
- $__timeFrom() -> FROM_UNIXTIME(1492750877)
|
||||||
- $__timeTo() -> '2017-04-21T05:01:17Z'
|
- $__timeTo() -> FROM_UNIXTIME(1492750877)
|
||||||
- $__unixEpochFrom() -> 1492750877
|
- $__unixEpochFrom() -> 1492750877
|
||||||
- $__unixEpochTo() -> 1492750877
|
- $__unixEpochTo() -> 1492750877
|
||||||
</pre>
|
</pre>
|
||||||
|
|||||||
@@ -151,7 +151,7 @@ Table:
|
|||||||
Macros:
|
Macros:
|
||||||
- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
|
- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
|
||||||
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time_sec
|
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time_sec
|
||||||
- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
|
- $__timeFilter(column) -> column BETWEEN FROM_UNIXTIME(1492750877) AND FROM_UNIXTIME(1492750877)
|
||||||
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
||||||
- $__timeGroup(column,'5m'[, fillvalue]) -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
|
- $__timeGroup(column,'5m'[, fillvalue]) -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
|
||||||
by setting fillvalue grafana will fill in missing values according to the interval
|
by setting fillvalue grafana will fill in missing values according to the interval
|
||||||
@@ -169,8 +169,8 @@ GROUP BY 1
|
|||||||
ORDER BY 1
|
ORDER BY 1
|
||||||
|
|
||||||
Or build your own conditionals using these macros which just return the values:
|
Or build your own conditionals using these macros which just return the values:
|
||||||
- $__timeFrom() -> '2017-04-21T05:01:17Z'
|
- $__timeFrom() -> FROM_UNIXTIME(1492750877)
|
||||||
- $__timeTo() -> '2017-04-21T05:01:17Z'
|
- $__timeTo() -> FROM_UNIXTIME(1492750877)
|
||||||
- $__unixEpochFrom() -> 1492750877
|
- $__unixEpochFrom() -> 1492750877
|
||||||
- $__unixEpochTo() -> 1492750877
|
- $__unixEpochTo() -> 1492750877
|
||||||
</pre>
|
</pre>
|
||||||
|
|||||||
@@ -58,15 +58,7 @@ class GraphElement {
|
|||||||
|
|
||||||
// panel events
|
// panel events
|
||||||
this.ctrl.events.on('panel-teardown', this.onPanelTeardown.bind(this));
|
this.ctrl.events.on('panel-teardown', this.onPanelTeardown.bind(this));
|
||||||
|
|
||||||
/**
|
|
||||||
* Split graph rendering into two parts.
|
|
||||||
* First, calculate series stats in buildFlotPairs() function. Then legend rendering started
|
|
||||||
* (see ctrl.events.on('render') in legend.ts).
|
|
||||||
* When legend is rendered it emits 'legend-rendering-complete' and graph rendered.
|
|
||||||
*/
|
|
||||||
this.ctrl.events.on('render', this.onRender.bind(this));
|
this.ctrl.events.on('render', this.onRender.bind(this));
|
||||||
this.ctrl.events.on('legend-rendering-complete', this.onLegendRenderingComplete.bind(this));
|
|
||||||
|
|
||||||
// global events
|
// global events
|
||||||
appEvents.on('graph-hover', this.onGraphHover.bind(this), scope);
|
appEvents.on('graph-hover', this.onGraphHover.bind(this), scope);
|
||||||
@@ -85,11 +77,20 @@ class GraphElement {
|
|||||||
if (!this.data) {
|
if (!this.data) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.annotations = this.ctrl.annotations || [];
|
this.annotations = this.ctrl.annotations || [];
|
||||||
this.buildFlotPairs(this.data);
|
this.buildFlotPairs(this.data);
|
||||||
const graphHeight = this.elem.height();
|
const graphHeight = this.elem.height();
|
||||||
updateLegendValues(this.data, this.panel, graphHeight);
|
updateLegendValues(this.data, this.panel, graphHeight);
|
||||||
|
|
||||||
|
if (!this.panel.legend.show) {
|
||||||
|
if (this.legendElem.hasChildNodes()) {
|
||||||
|
ReactDOM.unmountComponentAtNode(this.legendElem);
|
||||||
|
}
|
||||||
|
this.renderPanel();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const { values, min, max, avg, current, total } = this.panel.legend;
|
const { values, min, max, avg, current, total } = this.panel.legend;
|
||||||
const { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero } = this.panel.legend;
|
const { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero } = this.panel.legend;
|
||||||
const legendOptions = { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero };
|
const legendOptions = { alignAsTable, rightSide, sideWidth, sort, sortDesc, hideEmpty, hideZero };
|
||||||
@@ -104,12 +105,9 @@ class GraphElement {
|
|||||||
onColorChange: this.ctrl.onColorChange,
|
onColorChange: this.ctrl.onColorChange,
|
||||||
onToggleAxis: this.ctrl.onToggleAxis,
|
onToggleAxis: this.ctrl.onToggleAxis,
|
||||||
};
|
};
|
||||||
const legendReactElem = React.createElement(Legend, legendProps);
|
|
||||||
ReactDOM.render(legendReactElem, this.legendElem, () => this.onLegendRenderingComplete());
|
|
||||||
}
|
|
||||||
|
|
||||||
onLegendRenderingComplete() {
|
const legendReactElem = React.createElement(Legend, legendProps);
|
||||||
this.render_panel();
|
ReactDOM.render(legendReactElem, this.legendElem, () => this.renderPanel());
|
||||||
}
|
}
|
||||||
|
|
||||||
onGraphHover(evt) {
|
onGraphHover(evt) {
|
||||||
@@ -281,7 +279,7 @@ class GraphElement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Function for rendering panel
|
// Function for rendering panel
|
||||||
render_panel() {
|
renderPanel() {
|
||||||
this.panelWidth = this.elem.width();
|
this.panelWidth = this.elem.width();
|
||||||
if (this.shouldAbortRender()) {
|
if (this.shouldAbortRender()) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -125,7 +125,7 @@ describe('grafanaGraph', () => {
|
|||||||
|
|
||||||
//Emulate functions called by event listeners
|
//Emulate functions called by event listeners
|
||||||
link.buildFlotPairs(link.data);
|
link.buildFlotPairs(link.data);
|
||||||
link.render_panel();
|
link.renderPanel();
|
||||||
ctx.plotData = ctrl.plot.mock.calls[0][1];
|
ctx.plotData = ctrl.plot.mock.calls[0][1];
|
||||||
|
|
||||||
ctx.plotOptions = ctrl.plot.mock.calls[0][2];
|
ctx.plotOptions = ctrl.plot.mock.calls[0][2];
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ EXTRA_OPTS="$@"
|
|||||||
|
|
||||||
# Right now we hack this in into the publish script.
|
# Right now we hack this in into the publish script.
|
||||||
# Eventually we might want to keep a list of all previous releases somewhere.
|
# Eventually we might want to keep a list of all previous releases somewhere.
|
||||||
_releaseNoteUrl="https://community.grafana.com/t/release-notes-v5-3-x/10244"
|
_releaseNoteUrl="https://community.grafana.com/t/release-notes-v5-4-x/12215"
|
||||||
_whatsNewUrl="http://docs.grafana.org/guides/whats-new-in-v5-3/"
|
_whatsNewUrl="http://docs.grafana.org/guides/whats-new-in-v5-4/"
|
||||||
|
|
||||||
./scripts/build/release_publisher/release_publisher \
|
./scripts/build/release_publisher/release_publisher \
|
||||||
--wn ${_whatsNewUrl} \
|
--wn ${_whatsNewUrl} \
|
||||||
|
|||||||
Reference in New Issue
Block a user