Compare commits

...

34 Commits

Author SHA1 Message Date
Carl Bergquist
c613a317a1 Merge pull request #13589 from grafana/cp_v5.3.0
Cherry-picks for v5.3.0 stable
2018-10-10 12:20:21 +02:00
Torkel Ödegaard
8d01075223 fix: minor ux fixes in v5.3 2018-10-10 12:00:55 +02:00
Mitsuhiro Tanda
4e2607b8e7 fix id validation
(cherry picked from commit 6e32c9bb3f)
2018-10-10 11:57:37 +02:00
bergquist
6611aefea4 release 5.3.0 2018-10-10 10:54:47 +02:00
Marcus Efraimsson
04ba06ccad cloudwatch: return early if execute query returns error
This will stop a segfault from happening

(cherry picked from commit 0612ce9b75)
2018-10-10 10:50:16 +02:00
Mitsuhiro Tanda
002da27e98 add test for automatically unit set
(cherry picked from commit f0fb8123ae)
2018-10-10 10:50:16 +02:00
Mitsuhiro Tanda
55712d61f4 fix crach bug
(cherry picked from commit 37e749f6da)
2018-10-10 10:50:16 +02:00
Mitsuhiro Tanda
cc57377f03 set unit for CloudWatch GetMetricStatistics result
(cherry picked from commit 6ed1cbd5bb)
2018-10-10 10:50:16 +02:00
Daniel Lee
5250c84ca7 stackdriver metric name fix. Fixes #13562
Sets metric name even when the metric does not have a displayName field. Closes #13562.

(cherry picked from commit 6fce178ec7)
2018-10-10 10:42:35 +02:00
Erik Sundell
b67e69bc52 stackdriver: improve filter docs for wildcards and regular expressions
(cherry picked from commit 11b9f9691c)
2018-10-10 10:39:07 +02:00
Erik Sundell
0d0df00b8e stackdriver: always use regex full match for =~ and !=~operator
(cherry picked from commit 8d53799bcd)
2018-10-10 10:39:07 +02:00
Erik Sundell
25f255f560 stackdriver: add tests from regex matching
(cherry picked from commit 7e6a5c0a74)
2018-10-10 10:39:07 +02:00
Erik Sundell
a109c53cea stackdriver: always use regex full match for =~ and !=~operator
(cherry picked from commit 46ca306c2f)
2018-10-10 10:39:07 +02:00
Erik Sundell
322535a2b7 stackdriver: test build filter string
(cherry picked from commit a3122a4b85)
2018-10-10 10:39:07 +02:00
Erik Sundell
93fb427310 stackdriver: test that no interpolation is done when there are no wildcards
(cherry picked from commit 5f7795aa1f)
2018-10-10 10:39:07 +02:00
Erik Sundell
84094b5051 stackdriver: remove debug logging
(cherry picked from commit 2a0d7a8803)
2018-10-10 10:39:06 +02:00
Erik Sundell
0ef06d467a stackdriver: add more tests
(cherry picked from commit 035be6cbbe)
2018-10-10 10:39:06 +02:00
Erik Sundell
dee26f3d2f stackdriver: fix broken substring. also adds tests
(cherry picked from commit 68332c5951)
2018-10-10 10:39:06 +02:00
Erik Sundell
897cf51e75 stackdriver: remove not necessary helper functions
(cherry picked from commit 2e665fba0f)
2018-10-10 10:39:06 +02:00
Erik Sundell
a4e148e300 stackdriver: interpolate stackdriver filter wildcards when asterix is used in filter
(cherry picked from commit 4d8f594d31)
2018-10-10 10:39:06 +02:00
Torkel Ödegaard
56c32963d6 ux: minor update to look of stackdriver query help
(cherry picked from commit 3fa83d2755)
2018-10-10 10:34:24 +02:00
Mitsuhiro Tanda
221341b3e8 add test
(cherry picked from commit c2c0cdb49c)
2018-10-10 10:33:25 +02:00
Mitsuhiro Tanda
464e0cf540 stackdriver heatmap support
(cherry picked from commit 6770f2e940)
2018-10-10 10:33:14 +02:00
bergquist
d275ea05a5 build: fix for invalid pathing for release publisher
(cherry picked from commit 96a0c9c56d)
2018-10-03 16:54:42 +02:00
Carl Bergquist
69b4bf8125 Merge pull request #13510 from bergquist/cp_v5.3.x
Cherry-pick 5.3.0-beta3 fixes
2018-10-03 16:06:49 +02:00
Daniel Lee
62f85c3772 stackdriver: adds missing nginject attribute
(cherry picked from commit 6d8a3ce1a3)
2018-10-03 15:26:57 +02:00
bergquist
2e0165e80a release v5.3.0-beta3 2018-10-03 14:22:28 +02:00
Johannes Schill
272840e0cb Fix issue with updating role permissions #13507
(cherry picked from commit 97802f30ae)
2018-10-03 14:20:30 +02:00
bergquist
694c738b6d build: automatically publish releases to grafana.com.
(cherry picked from commit add6cee742)
2018-10-03 14:19:02 +02:00
Marcus Efraimsson
a493a773a2 Merge branch 'master' into v5.3.x 2018-10-02 16:03:30 +02:00
Torkel Ödegaard
c6f7ae4e02 Merge pull request #13472 from grafana/v5.3.0-beta2
V5.3.0 beta2
2018-10-01 12:12:21 +02:00
Leonard Gram
a049b22cb0 release v5.3.0-beta2. 2018-10-01 11:40:43 +02:00
Leonard Gram
c026e6f320 Merge remote-tracking branch 'origin/master' into v5.3.0-beta2
* origin/master: (397 commits)
  stackdriver: set default view parameter to FULL
  stackdriver: no tags for annotations (yet)
  stackdriver: add help section for annotations
  stackdriver: revert an accidental commit for text template variable
  Added test for url state in Explore
  Make Explore a pure component
  stackdriver: remove metric.category alias pattern
  stackdriver: remove commented code
  stackdriver: unit test group by and aggregation dropdown changes
  stackdriver: make it impossible to select no aggregation when a group by is selected
  Explore: Store UI state in URL
  stackdriver: add relevant error message for when a user tries to create a template variable
  stackdriver: make sure labels are loaded when service is changed in dropdown
  stackdriver: change info logging to debug logging
  stackdriver: change pattern for annotation to metric.value
  stackdriver: add support for bool values
  stackdriver: add support for int64 values
  stackdriver: use correct default value for alignment period
  stackdriver: fix reducer names
  stackdriver: fix froamt annotation text for value
  ...
2018-10-01 11:20:52 +02:00
Marcus Efraimsson
e05033a693 v5.3.0-beta1 release 2018-09-06 14:38:22 +02:00
31 changed files with 921 additions and 94 deletions

View File

@@ -158,14 +158,18 @@ jobs:
name: sha-sum packages
command: 'go run build.go sha-dist'
- run:
name: Build Grafana.com publisher
name: Build Grafana.com master publisher
command: 'go build -o scripts/publish scripts/build/publish.go'
- run:
name: Build Grafana.com release publisher
command: 'cd scripts/build/release_publisher && go build -o release_publisher .'
- persist_to_workspace:
root: .
paths:
- dist/grafana*
- scripts/*.sh
- scripts/publish
- scripts/build/release_publisher/release_publisher
build:
docker:
@@ -299,8 +303,8 @@ jobs:
name: deploy to s3
command: 'aws s3 sync ./dist s3://$BUCKET_NAME/release'
- run:
name: Trigger Windows build
command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release'
name: Deploy to Grafana.com
command: './scripts/build/publish.sh'
workflows:
version: 2

2
.gitignore vendored
View File

@@ -73,3 +73,5 @@ debug.test
/devenv/bulk-dashboards/*.json
/devenv/bulk_alerting_dashboards/*.json
/scripts/build/release_publisher/release_publisher

View File

@@ -74,7 +74,17 @@ Click on the links above and click the `Enable` button:
Choose a metric from the `Metric` dropdown.
To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`
### Filter
To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`. You can remove the filter by clicking on the filter name and select `--remove filter--`.
#### Simple wildcards
When the operator is set to `=` or `!=` it is possible to add wildcards to the filter value field. E.g `us-*` will capture all values that starts with "us-" and `*central-a` will capture all values that ends with "central-a". `*-central-*` captures all values that has the substring of -central-. Simple wildcards are less expensive than regular expressions.
#### Regular expressions
When the operator is set to `=~` or `!=~` it is possible to add regular expressions to the filter value field. E.g `us-central[1-3]-[af]` would match all values that starts with "us-central", is followed by a number in the range of 1 to 3, a dash and then either an "a" or an "f". Leading and trailing slashes are not needed when creating regular expressions.
### Aggregation
@@ -105,20 +115,20 @@ The Alias By field allows you to control the format of the legend keys. The defa
#### Metric Type Patterns
Alias Pattern | Description | Example Result
----------------- | ---------------------------- | -------------
`{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization`
`{{metric.name}}` | returns the metric name part | `instance/cpu/utilization`
`{{metric.service}}` | returns the service part | `compute`
| Alias Pattern | Description | Example Result |
| -------------------- | ---------------------------- | ------------------------------------------------- |
| `{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization` |
| `{{metric.name}}` | returns the metric name part | `instance/cpu/utilization` |
| `{{metric.service}}` | returns the service part | `compute` |
#### Label Patterns
In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns.
Alias Pattern Format | Description | Alias Pattern Example | Example Result
---------------------- | ---------------------------------- | ---------------------------- | -------------
`{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod`
`{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b`
| Alias Pattern Format | Description | Alias Pattern Example | Example Result |
| ------------------------ | -------------------------------- | -------------------------------- | ---------------- |
| `{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` |
| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` |
Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}`

View File

@@ -4,7 +4,7 @@
"company": "Grafana Labs"
},
"name": "grafana",
"version": "5.3.0-pre1",
"version": "5.3.0",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"

View File

@@ -129,10 +129,13 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
return err
}
result.Results[queryRes.RefId] = queryRes
if err != nil {
result.Results[queryRes.RefId].Error = err
result.Results[query.RefId] = &tsdb.QueryResult{
Error: err,
}
return nil
}
result.Results[queryRes.RefId] = queryRes
return nil
})
}
@@ -269,7 +272,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
for _, query := range queries {
// 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600
if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) {
return nil, errors.New("too long query period")
return queryResponses, errors.New("too long query period")
}
mdq := &cloudwatch.MetricDataQuery{
@@ -362,6 +365,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
}
queryRes.Series = append(queryRes.Series, &series)
queryRes.Meta = simplejson.New()
queryResponses = append(queryResponses, queryRes)
}
@@ -565,6 +569,12 @@ func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatch
}
queryRes.Series = append(queryRes.Series, &series)
queryRes.Meta = simplejson.New()
if len(resp.Datapoints) > 0 && resp.Datapoints[0].Unit != nil {
if unit, ok := cloudwatchUnitMappings[*resp.Datapoints[0].Unit]; ok {
queryRes.Meta.Set("unit", unit)
}
}
}
return queryRes, nil

View File

@@ -71,6 +71,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(30.0),
"p90.00": aws.Float64(40.0),
},
Unit: aws.String("Seconds"),
},
},
}
@@ -103,6 +104,7 @@ func TestCloudWatch(t *testing.T) {
So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String())
So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
So(queryRes.Meta.Get("unit").MustString(), ShouldEqual, "s")
})
Convey("terminate gap of data points", func() {
@@ -118,6 +120,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(30.0),
"p90.00": aws.Float64(40.0),
},
Unit: aws.String("Seconds"),
},
{
Timestamp: aws.Time(timestamp.Add(60 * time.Second)),
@@ -127,6 +130,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(40.0),
"p90.00": aws.Float64(50.0),
},
Unit: aws.String("Seconds"),
},
{
Timestamp: aws.Time(timestamp.Add(180 * time.Second)),
@@ -136,6 +140,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(50.0),
"p90.00": aws.Float64(60.0),
},
Unit: aws.String("Seconds"),
},
},
}

View File

@@ -0,0 +1,30 @@
package cloudwatch
var cloudwatchUnitMappings = map[string]string{
"Seconds": "s",
"Microseconds": "µs",
"Milliseconds": "ms",
"Bytes": "bytes",
"Kilobytes": "kbytes",
"Megabytes": "mbytes",
"Gigabytes": "gbytes",
//"Terabytes": "",
"Bits": "bits",
//"Kilobits": "",
//"Megabits": "",
//"Gigabits": "",
//"Terabits": "",
"Percent": "percent",
//"Count": "",
"Bytes/Second": "Bps",
"Kilobytes/Second": "KBs",
"Megabytes/Second": "MBs",
"Gigabytes/Second": "GBs",
//"Terabytes/Second": "",
"Bits/Second": "bps",
"Kilobits/Second": "Kbits",
"Megabits/Second": "Mbits",
"Gigabits/Second": "Gbits",
//"Terabits/Second": "",
//"Count/Second": "",
}

View File

@@ -159,6 +159,39 @@ func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*Stackd
return stackdriverQueries, nil
}
func reverse(s string) string {
chars := []rune(s)
for i, j := 0, len(chars)-1; i < j; i, j = i+1, j-1 {
chars[i], chars[j] = chars[j], chars[i]
}
return string(chars)
}
func interpolateFilterWildcards(value string) string {
re := regexp.MustCompile("[*]")
matches := len(re.FindAllStringIndex(value, -1))
if matches == 2 && strings.HasSuffix(value, "*") && strings.HasPrefix(value, "*") {
value = strings.Replace(value, "*", "", -1)
value = fmt.Sprintf(`has_substring("%s")`, value)
} else if matches == 1 && strings.HasPrefix(value, "*") {
value = strings.Replace(value, "*", "", 1)
value = fmt.Sprintf(`ends_with("%s")`, value)
} else if matches == 1 && strings.HasSuffix(value, "*") {
value = reverse(strings.Replace(reverse(value), "*", "", 1))
value = fmt.Sprintf(`starts_with("%s")`, value)
} else if matches != 0 {
re := regexp.MustCompile(`[-\/^$+?.()|[\]{}]`)
value = string(re.ReplaceAllFunc([]byte(value), func(in []byte) []byte {
return []byte(strings.Replace(string(in), string(in), `\\`+string(in), 1))
}))
value = strings.Replace(value, "*", ".*", -1)
value = strings.Replace(value, `"`, `\\"`, -1)
value = fmt.Sprintf(`monitoring.regex.full_match("^%s$")`, value)
}
return value
}
func buildFilterString(metricType string, filterParts []interface{}) string {
filterString := ""
for i, part := range filterParts {
@@ -166,7 +199,15 @@ func buildFilterString(metricType string, filterParts []interface{}) string {
if part == "AND" {
filterString += " "
} else if mod == 2 {
filterString += fmt.Sprintf(`"%s"`, part)
operator := filterParts[i-1]
if operator == "=~" || operator == "!=~" {
filterString = reverse(strings.Replace(reverse(filterString), "~", "", 1))
filterString += fmt.Sprintf(`monitoring.regex.full_match("%s")`, part)
} else if strings.Contains(part.(string), "*") {
filterString += interpolateFilterWildcards(part.(string))
} else {
filterString += fmt.Sprintf(`"%s"`, part)
}
} else {
filterString += part.(string)
}
@@ -300,29 +341,6 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
for _, series := range data.TimeSeries {
points := make([]tsdb.TimePoint, 0)
// reverse the order to be ascending
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
value := point.Value.DoubleValue
if series.ValueType == "INT64" {
parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
if err == nil {
value = parsedValue
}
}
if series.ValueType == "BOOL" {
if point.Value.BoolValue {
value = 1
} else {
value = 0
}
}
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
defaultMetricName := series.Metric.Type
for key, value := range series.Metric.Labels {
@@ -338,18 +356,87 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
if !containsLabel(resourceLabels[key], value) {
resourceLabels[key] = append(resourceLabels[key], value)
}
if containsLabel(query.GroupBys, "resource.label."+key) {
defaultMetricName += " " + value
}
}
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query)
// reverse the order to be ascending
if series.ValueType != "DISTRIBUTION" {
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
value := point.Value.DoubleValue
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: metricName,
Points: points,
})
if series.ValueType == "INT64" {
parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
if err == nil {
value = parsedValue
}
}
if series.ValueType == "BOOL" {
if point.Value.BoolValue {
value = 1
} else {
value = 0
}
}
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: metricName,
Points: points,
})
} else {
buckets := make(map[int]*tsdb.TimeSeries)
for i := len(series.Points) - 1; i >= 0; i-- {
point := series.Points[i]
if len(point.Value.DistributionValue.BucketCounts) == 0 {
continue
}
maxKey := 0
for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ {
value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64)
if err != nil {
continue
}
if _, ok := buckets[i]; !ok {
// set lower bounds
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
}
if maxKey < i {
maxKey = i
}
}
buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
// fill empty bucket
for i := 0; i < maxKey; i++ {
if _, ok := buckets[i]; !ok {
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
}
}
}
}
for i := 0; i < len(buckets); i++ {
queryRes.Series = append(queryRes.Series, buckets[i])
}
}
}
queryRes.Meta.Set("resourceLabels", resourceLabels)
@@ -368,7 +455,7 @@ func containsLabel(labels []string, newLabel string) bool {
return false
}
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string {
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
if query.AliasBy == "" {
return defaultMetricName
}
@@ -400,6 +487,10 @@ func formatLegendKeys(metricType string, defaultMetricName string, metricLabels
return []byte(val)
}
if val, exists := additionalLabels[metaPartName]; exists {
return []byte(val)
}
return in
})
@@ -425,6 +516,22 @@ func replaceWithMetricPart(metaPartName string, metricType string) []byte {
return nil
}
func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
bucketBound := "0"
if n == 0 {
return bucketBound
}
if bucketOptions.LinearBuckets != nil {
bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10)
} else if bucketOptions.ExponentialBuckets != nil {
bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
} else if bucketOptions.ExplicitBuckets != nil {
bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10)
}
return bucketBound
}
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "render")

View File

@@ -4,6 +4,8 @@ import (
"encoding/json"
"fmt"
"io/ioutil"
"math"
"strconv"
"testing"
"time"
@@ -341,6 +343,137 @@ func TestStackdriver(t *testing.T) {
})
})
})
Convey("when data from query is distribution", func() {
data, err := loadTestFile("./test-data/3-series-response-distribution.json")
So(err, ShouldBeNil)
So(len(data.TimeSeries), ShouldEqual, 1)
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
query := &StackdriverQuery{AliasBy: "{{bucket}}"}
err = executor.parseResponse(res, data, query)
So(err, ShouldBeNil)
So(len(res.Series), ShouldEqual, 11)
for i := 0; i < 11; i++ {
if i == 0 {
So(res.Series[i].Name, ShouldEqual, "0")
} else {
So(res.Series[i].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10))
}
So(len(res.Series[i].Points), ShouldEqual, 3)
}
Convey("timestamps should be in ascending order", func() {
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536668940000)
So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536669000000)
So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000)
})
Convey("value should be correct", func() {
So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[10].Points[0][0].Float64, ShouldEqual, 1)
So(res.Series[8].Points[1][0].Float64, ShouldEqual, 0)
So(res.Series[9].Points[1][0].Float64, ShouldEqual, 0)
So(res.Series[10].Points[1][0].Float64, ShouldEqual, 1)
So(res.Series[8].Points[2][0].Float64, ShouldEqual, 0)
So(res.Series[9].Points[2][0].Float64, ShouldEqual, 1)
So(res.Series[10].Points[2][0].Float64, ShouldEqual, 0)
})
})
})
Convey("when interpolating filter wildcards", func() {
Convey("and wildcard is used in the beginning and the end of the word", func() {
Convey("and theres no wildcard in the middle of the word", func() {
value := interpolateFilterWildcards("*-central1*")
So(value, ShouldEqual, `has_substring("-central1")`)
})
Convey("and there is a wildcard in the middle of the word", func() {
value := interpolateFilterWildcards("*-cent*ral1*")
So(value, ShouldNotStartWith, `has_substring`)
})
})
Convey("and wildcard is used in the beginning of the word", func() {
Convey("and there is not a wildcard elsewhere in the word", func() {
value := interpolateFilterWildcards("*-central1")
So(value, ShouldEqual, `ends_with("-central1")`)
})
Convey("and there is a wildcard elsewhere in the word", func() {
value := interpolateFilterWildcards("*-cent*al1")
So(value, ShouldNotStartWith, `ends_with`)
})
})
Convey("and wildcard is used at the end of the word", func() {
Convey("and there is not a wildcard elsewhere in the word", func() {
value := interpolateFilterWildcards("us-central*")
So(value, ShouldEqual, `starts_with("us-central")`)
})
Convey("and there is a wildcard elsewhere in the word", func() {
value := interpolateFilterWildcards("*us-central*")
So(value, ShouldNotStartWith, `starts_with`)
})
})
Convey("and wildcard is used in the middle of the word", func() {
Convey("and there is only one wildcard", func() {
value := interpolateFilterWildcards("us-ce*tral1-b")
So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-b$")`)
})
Convey("and there is more than one wildcard", func() {
value := interpolateFilterWildcards("us-ce*tra*1-b")
So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tra.*1\\-b$")`)
})
})
Convey("and wildcard is used in the middle of the word and in the beginning of the word", func() {
value := interpolateFilterWildcards("*s-ce*tral1-b")
So(value, ShouldEqual, `monitoring.regex.full_match("^.*s\\-ce.*tral1\\-b$")`)
})
Convey("and wildcard is used in the middle of the word and in the ending of the word", func() {
value := interpolateFilterWildcards("us-ce*tral1-*")
So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-.*$")`)
})
Convey("and no wildcard is used", func() {
value := interpolateFilterWildcards("us-central1-a}")
So(value, ShouldEqual, `us-central1-a}`)
})
})
Convey("when building filter string", func() {
Convey("and theres no regex operator", func() {
Convey("and there are wildcards in a filter value", func() {
filterParts := []interface{}{"zone", "=", "*-central1*"}
value := buildFilterString("somemetrictype", filterParts)
So(value, ShouldEqual, `metric.type="somemetrictype" zone=has_substring("-central1")`)
})
Convey("and there are no wildcards in any filter value", func() {
filterParts := []interface{}{"zone", "!=", "us-central1-a"}
value := buildFilterString("somemetrictype", filterParts)
So(value, ShouldEqual, `metric.type="somemetrictype" zone!="us-central1-a"`)
})
})
Convey("and there is a regex operator", func() {
filterParts := []interface{}{"zone", "=~", "us-central1-a~"}
value := buildFilterString("somemetrictype", filterParts)
Convey("it should remove the ~ character from the operator that belongs to the value", func() {
So(value, ShouldNotContainSubstring, `=~`)
So(value, ShouldContainSubstring, `zone=`)
})
Convey("it should insert monitoring.regex.full_match before filter value", func() {
So(value, ShouldContainSubstring, `zone=monitoring.regex.full_match("us-central1-a~")`)
})
})
})
})
}

View File

@@ -0,0 +1,112 @@
{
"timeSeries": [
{
"metric": {
"type": "loadbalancing.googleapis.com\/https\/backend_latencies"
},
"resource": {
"type": "https_lb_rule",
"labels": {
"project_id": "grafana-prod"
}
},
"metricKind": "DELTA",
"valueType": "DISTRIBUTION",
"points": [
{
"interval": {
"startTime": "2018-09-11T12:30:00Z",
"endTime": "2018-09-11T12:31:00Z"
},
"value": {
"distributionValue": {
"count": "1",
"bucketOptions": {
"exponentialBuckets": {
"numFiniteBuckets": 10,
"growthFactor": 2,
"scale": 1
}
},
"bucketCounts": [
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"0"
]
}
}
},
{
"interval": {
"startTime": "2018-09-11T12:29:00Z",
"endTime": "2018-09-11T12:30:00Z"
},
"value": {
"distributionValue": {
"count": "1",
"bucketOptions": {
"exponentialBuckets": {
"numFiniteBuckets": 10,
"growthFactor": 2,
"scale": 1
}
},
"bucketCounts": [
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1"
]
}
}
},
{
"interval": {
"startTime": "2018-09-11T12:28:00Z",
"endTime": "2018-09-11T12:29:00Z"
},
"value": {
"distributionValue": {
"count": "3",
"bucketOptions": {
"exponentialBuckets": {
"numFiniteBuckets": 10,
"growthFactor": 2,
"scale": 1
}
},
"bucketCounts": [
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"0",
"1",
"1",
"1"
]
}
}
}
]
}
]
}

View File

@@ -14,6 +14,22 @@ type StackdriverQuery struct {
AliasBy string
}
type StackdriverBucketOptions struct {
LinearBuckets *struct {
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
Width int64 `json:"width"`
Offset int64 `json:"offset"`
} `json:"linearBuckets"`
ExponentialBuckets *struct {
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
GrowthFactor float64 `json:"growthFactor"`
Scale float64 `json:"scale"`
} `json:"exponentialBuckets"`
ExplicitBuckets *struct {
Bounds []int64 `json:"bounds"`
} `json:"explicitBuckets"`
}
// StackdriverResponse is the data returned from the external Google Stackdriver API
type StackdriverResponse struct {
TimeSeries []struct {
@@ -33,10 +49,26 @@ type StackdriverResponse struct {
EndTime time.Time `json:"endTime"`
} `json:"interval"`
Value struct {
DoubleValue float64 `json:"doubleValue"`
StringValue string `json:"stringValue"`
BoolValue bool `json:"boolValue"`
IntValue string `json:"int64Value"`
DoubleValue float64 `json:"doubleValue"`
StringValue string `json:"stringValue"`
BoolValue bool `json:"boolValue"`
IntValue string `json:"int64Value"`
DistributionValue struct {
Count string `json:"count"`
Mean float64 `json:"mean"`
SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
Range struct {
Min int `json:"min"`
Max int `json:"max"`
} `json:"range"`
BucketOptions StackdriverBucketOptions `json:"bucketOptions"`
BucketCounts []string `json:"bucketCounts"`
Examplars []struct {
Value float64 `json:"value"`
Timestamp string `json:"timestamp"`
// attachments
} `json:"examplars"`
} `json:"distributionValue"`
} `json:"value"`
} `json:"points"`
} `json:"timeSeries"`

View File

@@ -58,7 +58,7 @@ export function updateDashboardPermission(
continue;
}
const updated = toUpdateItem(itemToUpdate);
const updated = toUpdateItem(item);
// if this is the item we want to update, update it's permisssion
if (itemToUpdate === item) {

View File

@@ -39,7 +39,6 @@ export class DataSourcesActionBar extends PureComponent<Props> {
</div>
<div className="page-action-bar__spacer" />
<a className="page-header__cta btn btn-success" href="datasources/new">
<i className="fa fa-plus" />
Add data source
</a>
</div>

View File

@@ -33,9 +33,6 @@ exports[`Render should render component 1`] = `
className="page-header__cta btn btn-success"
href="datasources/new"
>
<i
className="fa fa-plus"
/>
Add data source
</a>
</div>

View File

@@ -110,7 +110,7 @@ export function updateFolderPermission(itemToUpdate: DashboardAcl, level: Permis
continue;
}
const updated = toUpdateItem(itemToUpdate);
const updated = toUpdateItem(item);
// if this is the item we want to update, update it's permisssion
if (itemToUpdate === item) {

View File

@@ -103,7 +103,7 @@ export class TeamList extends PureComponent<Props, any> {
<div className="page-action-bar__spacer" />
<a className="btn btn-success" href="org/teams/new">
<i className="fa fa-plus" /> New team
New team
</a>
</div>

View File

@@ -62,10 +62,7 @@ exports[`Render should render teams table 1`] = `
className="btn btn-success"
href="org/teams/new"
>
<i
className="fa fa-plus"
/>
New team
New team
</a>
</div>
<div

View File

@@ -131,7 +131,7 @@ export default class CloudWatchDatasource {
if (res.results) {
_.forEach(res.results, queryRes => {
_.forEach(queryRes.series, series => {
data.push({ target: series.name, datapoints: series.points });
data.push({ target: series.name, datapoints: series.points, unit: queryRes.meta.unit || 'none' });
});
});
}

View File

@@ -37,8 +37,7 @@
Id
<info-popover mode="right-normal ">Id can include numbers, letters, and underscore, and must start with a lowercase letter.</info-popover>
</label>
<input type="text " class="gf-form-input " ng-model="target.id " spellcheck='false' ng-pattern='/^[a-z][A-Z0-9_]*/' ng-model-onblur
ng-change="onChange() ">
<input type="text " class="gf-form-input " ng-model="target.id " spellcheck='false' ng-pattern='/^[a-z][a-zA-Z0-9_]*$/' ng-model-onblur ng-change="onChange() ">
</div>
<div class="gf-form max-width-30 ">
<label class="gf-form-label query-keyword width-7 ">Expression</label>

View File

@@ -60,6 +60,7 @@ describe('CloudWatchDatasource', () => {
A: {
error: '',
refId: 'A',
meta: {},
series: [
{
name: 'CPUUtilization_Average',
@@ -221,6 +222,7 @@ describe('CloudWatchDatasource', () => {
A: {
error: '',
refId: 'A',
meta: {},
series: [
{
name: 'TargetResponseTime_p90.00',

View File

@@ -19,7 +19,7 @@ export const alignOptions = [
{
text: 'delta',
value: 'ALIGN_DELTA',
valueTypes: [ValueTypes.INT64, ValueTypes.DOUBLE, ValueTypes.MONEY],
valueTypes: [ValueTypes.INT64, ValueTypes.DOUBLE, ValueTypes.MONEY, ValueTypes.DISTRIBUTION],
metricKinds: [MetricKind.CUMULATIVE, MetricKind.DELTA],
},
{

View File

@@ -241,7 +241,17 @@ export default class StackdriverDatasource {
try {
const metricsApiPath = `v3/projects/${projectId}/metricDescriptors`;
const { data } = await this.doRequest(`${this.baseUrl}${metricsApiPath}`);
return data.metricDescriptors;
const metrics = data.metricDescriptors.map(m => {
const [service] = m.type.split('/');
const [serviceShortName] = service.split('.');
m.service = service;
m.serviceShortName = serviceShortName;
m.displayName = m.displayName || m.type;
return m;
});
return metrics;
} catch (error) {
console.log(error);
}

View File

@@ -40,21 +40,33 @@
<div class="gf-form" ng-show="ctrl.showLastQuery">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.rawQueryString}}</pre>
</div>
<div class="gf-form grafana-info-box" style="padding: 0" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info" style="margin-right: 0"><h5>Alias Patterns</h5>Format the legend keys any way you want by using alias patterns.
<div class="grafana-info-box m-t-2 markdown-html" ng-show="ctrl.showHelp">
<h5>Alias Patterns</h5>
<label>Example: </label><code ng-non-bindable>{{metric.name}} - {{metric.label.instance_name}}</code>
Format the legend keys any way you want by using alias patterns.<br /> <br />
<label>Result: </label><code ng-non-bindable>cpu/usage_time - server1-europe-west-1</code>
Example: <code ng-non-bindable>{{metric.name}} - {{metric.label.instance_name}}</code><br />
Result: &nbsp;&nbsp;<code ng-non-bindable>cpu/usage_time - server1-europe-west-1</code><br /><br />
<label>Patterns:</label>
<code ng-non-bindable>{{metric.type}}</code> = metric type e.g. compute.googleapis.com/instance/cpu/usage_time
<code ng-non-bindable>{{metric.name}}</code> = name part of metric e.g. instance/cpu/usage_time
<code ng-non-bindable>{{metric.service}}</code> = service part of metric e.g. compute
<code ng-non-bindable>{{metric.label.label_name}}</code> = Metric label metadata e.g. metric.label.instance_name
<code ng-non-bindable>{{resource.label.label_name}}</code> = Resource label metadata e.g. resource.label.zone
</pre>
<strong>Patterns</strong><br />
<ul>
<li>
<code ng-non-bindable>{{metric.type}}</code> = metric type e.g. compute.googleapis.com/instance/cpu/usage_time
</li>
<li>
<code ng-non-bindable>{{metric.name}}</code> = name part of metric e.g. instance/cpu/usage_time
</li>
<li>
<code ng-non-bindable>{{metric.service}}</code> = service part of metric e.g. compute
</li>
<li>
<code ng-non-bindable>{{metric.label.label_name}}</code> = Metric label metadata e.g.
metric.label.instance_name
</li>
<li>
<code ng-non-bindable>{{resource.label.label_name}}</code> = Resource label metadata e.g. resource.label.zone
</li>
</ul>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>

View File

@@ -24,6 +24,7 @@ export class StackdriverAggregationCtrl {
alignOptions: any[];
target: any;
/** @ngInject */
constructor(private $scope) {
this.$scope.ctrl = this;
this.target = $scope.target;

View File

@@ -4,6 +4,7 @@ import { FilterSegments, DefaultRemoveFilterValue } from './filter_segments';
import appEvents from 'app/core/app_events';
export class StackdriverFilter {
/** @ngInject */
constructor() {
return {
templateUrl: 'public/app/plugins/datasource/stackdriver/partials/query.filter.html',
@@ -95,11 +96,9 @@ export class StackdriverFilterCtrl {
getServicesList() {
const defaultValue = { value: this.$scope.defaultServiceValue, text: this.$scope.defaultServiceValue };
const services = this.metricDescriptors.map(m => {
const [service] = m.type.split('/');
const [serviceShortName] = service.split('.');
return {
value: service,
text: serviceShortName,
value: m.service,
text: m.serviceShortName,
};
});
@@ -112,12 +111,10 @@ export class StackdriverFilterCtrl {
getMetricsList() {
const metrics = this.metricDescriptors.map(m => {
const [service] = m.type.split('/');
const [serviceShortName] = service.split('.');
return {
service,
service: m.service,
value: m.type,
serviceShortName,
serviceShortName: m.serviceShortName,
text: m.displayName,
title: m.description,
};

View File

@@ -164,11 +164,11 @@ describe('StackdriverDataSource', () => {
metricDescriptors: [
{
displayName: 'test metric name 1',
type: 'test metric type 1',
type: 'compute.googleapis.com/instance/cpu/test-metric-type-1',
description: 'A description',
},
{
displayName: 'test metric name 2',
type: 'test metric type 2',
type: 'logging.googleapis.com/user/logbased-metric-with-no-display-name',
},
],
},
@@ -180,8 +180,13 @@ describe('StackdriverDataSource', () => {
});
it('should return successfully', () => {
expect(result.length).toBe(2);
expect(result[0].type).toBe('test metric type 1');
expect(result[0].service).toBe('compute.googleapis.com');
expect(result[0].serviceShortName).toBe('compute');
expect(result[0].type).toBe('compute.googleapis.com/instance/cpu/test-metric-type-1');
expect(result[0].displayName).toBe('test metric name 1');
expect(result[0].description).toBe('A description');
expect(result[1].type).toBe('logging.googleapis.com/user/logbased-metric-with-no-display-name');
expect(result[1].displayName).toBe('logging.googleapis.com/user/logbased-metric-with-no-display-name');
});
});

View File

@@ -115,8 +115,8 @@ $tight-form-func-bg: #333334;
$tight-form-func-highlight-bg: #444445;
$modal-backdrop-bg: #353c42;
$code-tag-bg: $gray-1;
$code-tag-border: lighten($code-tag-bg, 2%);
$code-tag-bg: $dark-1;
$code-tag-border: $dark-4;
// cards
$card-background: linear-gradient(135deg, #2f2f32, #262628);

14
scripts/build/publish.sh Executable file
View File

@@ -0,0 +1,14 @@
#/bin/sh
# no relation to publish.go
# Right now we hack this in into the publish script.
# Eventually we might want to keep a list of all previous releases somewhere.
_releaseNoteUrl="https://community.grafana.com/t/release-notes-v5-3-x/10244"
_whatsNewUrl="http://docs.grafana.org/guides/whats-new-in-v5-3/"
./scripts/build/release_publisher/release_publisher \
--wn ${_whatsNewUrl} \
--rn ${_releaseNoteUrl} \
--version ${CIRCLE_TAG} \
--apikey ${GRAFANA_COM_API_KEY}

View File

@@ -0,0 +1,40 @@
package main
import (
"flag"
"fmt"
"log"
"os"
)
var baseUri string = "https://grafana.com/api"
func main() {
var version string
var whatsNewUrl string
var releaseNotesUrl string
var dryRun bool
var apiKey string
flag.StringVar(&version, "version", "", "Grafana version (ex: --version v5.2.0-beta1)")
flag.StringVar(&whatsNewUrl, "wn", "", "What's new url (ex: --wn http://docs.grafana.org/guides/whats-new-in-v5-2/)")
flag.StringVar(&releaseNotesUrl, "rn", "", "Grafana version (ex: --rn https://community.grafana.com/t/release-notes-v5-2-x/7894)")
flag.StringVar(&apiKey, "apikey", "", "Grafana.com API key (ex: --apikey ABCDEF)")
flag.BoolVar(&dryRun, "dry-run", false, "--dry-run")
flag.Parse()
if len(os.Args) == 1 {
fmt.Println("Usage: go run publisher.go main.go --version <v> --wn <what's new url> --rn <release notes url> --apikey <api key> --dry-run false")
fmt.Println("example: go run publisher.go main.go --version v5.2.0-beta2 --wn http://docs.grafana.org/guides/whats-new-in-v5-2/ --rn https://community.grafana.com/t/release-notes-v5-2-x/7894 --apikey ASDF123 --dry-run true")
os.Exit(1)
}
if dryRun {
log.Println("Dry-run has been enabled.")
}
p := publisher{apiKey: apiKey}
if err := p.doRelease(version, whatsNewUrl, releaseNotesUrl, dryRun); err != nil {
log.Fatalf("error: %v", err)
}
}

View File

@@ -0,0 +1,266 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"strings"
"time"
)
type publisher struct {
apiKey string
}
func (p *publisher) doRelease(version string, whatsNewUrl string, releaseNotesUrl string, dryRun bool) error {
currentRelease, err := newRelease(version, whatsNewUrl, releaseNotesUrl, buildArtifactConfigurations, getHttpContents{})
if err != nil {
return err
}
if dryRun {
relJson, err := json.Marshal(currentRelease)
if err != nil {
return err
}
log.Println(string(relJson))
for _, b := range currentRelease.Builds {
artifactJson, err := json.Marshal(b)
if err != nil {
return err
}
log.Println(string(artifactJson))
}
} else {
if err := p.postRelease(currentRelease); err != nil {
return err
}
}
return nil
}
func (p *publisher) postRelease(r *release) error {
err := p.postRequest("/grafana/versions", r, fmt.Sprintf("Create Release %s", r.Version))
if err != nil {
return err
}
err = p.postRequest("/grafana/versions/"+r.Version, r, fmt.Sprintf("Update Release %s", r.Version))
if err != nil {
return err
}
for _, b := range r.Builds {
err = p.postRequest(fmt.Sprintf("/grafana/versions/%s/packages", r.Version), b, fmt.Sprintf("Create Build %s %s", b.Os, b.Arch))
if err != nil {
return err
}
err = p.postRequest(fmt.Sprintf("/grafana/versions/%s/packages/%s/%s", r.Version, b.Arch, b.Os), b, fmt.Sprintf("Update Build %s %s", b.Os, b.Arch))
if err != nil {
return err
}
}
return nil
}
const baseArhiveUrl = "https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana"
type buildArtifact struct {
os string
arch string
urlPostfix string
}
func (t buildArtifact) getUrl(version string, isBeta bool) string {
prefix := "-"
rhelReleaseExtra := ""
if t.os == "deb" {
prefix = "_"
}
if !isBeta && t.os == "rhel" {
rhelReleaseExtra = "-1"
}
url := strings.Join([]string{baseArhiveUrl, prefix, version, rhelReleaseExtra, t.urlPostfix}, "")
return url
}
var buildArtifactConfigurations = []buildArtifact{
{
os: "deb",
arch: "arm64",
urlPostfix: "_arm64.deb",
},
{
os: "rhel",
arch: "arm64",
urlPostfix: ".aarch64.rpm",
},
{
os: "linux",
arch: "arm64",
urlPostfix: ".linux-arm64.tar.gz",
},
{
os: "deb",
arch: "armv7",
urlPostfix: "_armhf.deb",
},
{
os: "rhel",
arch: "armv7",
urlPostfix: ".armhfp.rpm",
},
{
os: "linux",
arch: "armv7",
urlPostfix: ".linux-armv7.tar.gz",
},
{
os: "darwin",
arch: "amd64",
urlPostfix: ".darwin-amd64.tar.gz",
},
{
os: "deb",
arch: "amd64",
urlPostfix: "_amd64.deb",
},
{
os: "rhel",
arch: "amd64",
urlPostfix: ".x86_64.rpm",
},
{
os: "linux",
arch: "amd64",
urlPostfix: ".linux-amd64.tar.gz",
},
{
os: "win",
arch: "amd64",
urlPostfix: ".windows-amd64.zip",
},
}
func newRelease(rawVersion string, whatsNewUrl string, releaseNotesUrl string, artifactConfigurations []buildArtifact, getter urlGetter) (*release, error) {
version := rawVersion[1:]
now := time.Now()
isBeta := strings.Contains(version, "beta")
builds := []build{}
for _, ba := range artifactConfigurations {
sha256, err := getter.getContents(fmt.Sprintf("%s.sha256", ba.getUrl(version, isBeta)))
if err != nil {
return nil, err
}
builds = append(builds, newBuild(ba, version, isBeta, sha256))
}
r := release{
Version: version,
ReleaseDate: time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, time.Local),
Stable: !isBeta,
Beta: isBeta,
Nightly: false,
WhatsNewUrl: whatsNewUrl,
ReleaseNotesUrl: releaseNotesUrl,
Builds: builds,
}
return &r, nil
}
func newBuild(ba buildArtifact, version string, isBeta bool, sha256 string) build {
return build{
Os: ba.os,
Url: ba.getUrl(version, isBeta),
Sha256: sha256,
Arch: ba.arch,
}
}
func (p *publisher) postRequest(url string, obj interface{}, desc string) error {
jsonBytes, err := json.Marshal(obj)
if err != nil {
return err
}
req, err := http.NewRequest(http.MethodPost, baseUri+url, bytes.NewReader(jsonBytes))
if err != nil {
return err
}
req.Header.Add("Authorization", "Bearer "+p.apiKey)
req.Header.Add("Content-Type", "application/json")
res, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
if res.StatusCode == http.StatusOK {
log.Printf("Action: %s \t OK", desc)
return nil
}
if res.Body != nil {
defer res.Body.Close()
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return err
}
if strings.Contains(string(body), "already exists") || strings.Contains(string(body), "Nothing to update") {
log.Printf("Action: %s \t Already exists", desc)
} else {
log.Printf("Action: %s \t Failed - Status: %v", desc, res.Status)
log.Printf("Resp: %s", body)
log.Fatalf("Quiting")
}
}
return nil
}
type release struct {
Version string `json:"version"`
ReleaseDate time.Time `json:"releaseDate"`
Stable bool `json:"stable"`
Beta bool `json:"beta"`
Nightly bool `json:"nightly"`
WhatsNewUrl string `json:"whatsNewUrl"`
ReleaseNotesUrl string `json:"releaseNotesUrl"`
Builds []build `json:"-"`
}
type build struct {
Os string `json:"os"`
Url string `json:"url"`
Sha256 string `json:"sha256"`
Arch string `json:"arch"`
}
type urlGetter interface {
getContents(url string) (string, error)
}
type getHttpContents struct{}
func (getHttpContents) getContents(url string) (string, error) {
response, err := http.Get(url)
if err != nil {
return "", err
}
defer response.Body.Close()
all, err := ioutil.ReadAll(response.Body)
if err != nil {
return "", err
}
return string(all), nil
}

View File

@@ -0,0 +1,43 @@
package main
import "testing"
func TestNewRelease(t *testing.T) {
versionIn := "v5.2.0-beta1"
expectedVersion := "5.2.0-beta1"
whatsNewUrl := "https://whatsnews.foo/"
relNotesUrl := "https://relnotes.foo/"
expectedArch := "amd64"
expectedOs := "linux"
buildArtifacts := []buildArtifact{{expectedOs, expectedArch, ".linux-amd64.tar.gz"}}
rel, _ := newRelease(versionIn, whatsNewUrl, relNotesUrl, buildArtifacts, mockHttpGetter{})
if !rel.Beta || rel.Stable {
t.Errorf("%s should have been tagged as beta (not stable), but wasn't .", versionIn)
}
if rel.Version != expectedVersion {
t.Errorf("Expected version to be %s, but it was %s.", expectedVersion, rel.Version)
}
expectedBuilds := len(buildArtifacts)
if len(rel.Builds) != expectedBuilds {
t.Errorf("Expected %v builds, but got %v.", expectedBuilds, len(rel.Builds))
}
build := rel.Builds[0]
if build.Arch != expectedArch {
t.Errorf("Expected arch to be %v, but it was %v", expectedArch, build.Arch)
}
if build.Os != expectedOs {
t.Errorf("Expected arch to be %v, but it was %v", expectedOs, build.Os)
}
}
type mockHttpGetter struct{}
func (mockHttpGetter) getContents(url string) (string, error) {
return url, nil
}