2019-10-23 17:09:04 +00:00
|
|
|
package pkger
|
|
|
|
|
|
|
|
import (
|
2019-12-21 23:57:41 +00:00
|
|
|
"errors"
|
2020-02-06 20:26:10 +00:00
|
|
|
"fmt"
|
2019-11-07 00:45:00 +00:00
|
|
|
"path/filepath"
|
2019-11-14 00:24:05 +00:00
|
|
|
"strconv"
|
2019-11-07 00:45:00 +00:00
|
|
|
"strings"
|
2019-10-23 17:09:04 +00:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
"github.com/influxdata/influxdb"
|
2019-12-18 01:57:44 +00:00
|
|
|
"github.com/influxdata/influxdb/notification"
|
|
|
|
icheck "github.com/influxdata/influxdb/notification/check"
|
2019-12-06 07:05:32 +00:00
|
|
|
"github.com/influxdata/influxdb/notification/endpoint"
|
2019-10-23 17:09:04 +00:00
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
|
|
|
func TestParse(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
t.Run("pkg with a bucket", func(t *testing.T) {
|
2019-10-24 23:59:01 +00:00
|
|
|
t.Run("with valid bucket pkg should be valid", func(t *testing.T) {
|
2019-10-23 17:09:04 +00:00
|
|
|
testfileRunner(t, "testdata/bucket", func(t *testing.T, pkg *Pkg) {
|
2019-12-06 00:53:00 +00:00
|
|
|
buckets := pkg.Summary().Buckets
|
2020-03-16 18:25:39 +00:00
|
|
|
require.Len(t, buckets, 2)
|
2019-10-23 17:09:04 +00:00
|
|
|
|
|
|
|
actual := buckets[0]
|
2019-12-06 00:53:00 +00:00
|
|
|
expectedBucket := SummaryBucket{
|
2020-03-16 18:25:39 +00:00
|
|
|
Name: "display name",
|
|
|
|
Description: "bucket 2 description",
|
|
|
|
LabelAssociations: []SummaryLabel{},
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedBucket, actual)
|
|
|
|
|
|
|
|
actual = buckets[1]
|
|
|
|
expectedBucket = SummaryBucket{
|
2020-01-12 02:25:19 +00:00
|
|
|
Name: "rucket_11",
|
|
|
|
Description: "bucket 1 description",
|
|
|
|
RetentionPeriod: time.Hour,
|
|
|
|
LabelAssociations: []SummaryLabel{},
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2019-12-06 00:53:00 +00:00
|
|
|
assert.Equal(t, expectedBucket, actual)
|
2019-10-23 17:09:04 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
2019-10-23 17:09:04 +00:00
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "missing name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
2019-10-23 17:09:04 +00:00
|
|
|
spec:
|
|
|
|
`,
|
2019-11-06 22:41:06 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "mixed valid and missing name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_11
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
2019-10-23 17:09:04 +00:00
|
|
|
spec:
|
|
|
|
`,
|
2019-11-06 22:41:06 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "mixed valid and multiple bad names",
|
|
|
|
resourceErrs: 2,
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_11
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
spec:
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
2019-10-23 17:09:04 +00:00
|
|
|
spec:
|
|
|
|
`,
|
|
|
|
},
|
2019-11-06 22:41:06 +00:00
|
|
|
{
|
|
|
|
name: "duplicate bucket names",
|
|
|
|
resourceErrs: 1,
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
2020-03-16 18:25:39 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2020-03-16 23:45:25 +00:00
|
|
|
name: "duplicate meta name and spec name",
|
2020-03-16 18:25:39 +00:00
|
|
|
resourceErrs: 1,
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldSpec, fieldName},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
|
|
|
name: rucket_1
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2020-03-16 23:45:25 +00:00
|
|
|
name: "spec name too short",
|
2020-03-16 18:25:39 +00:00
|
|
|
resourceErrs: 1,
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldSpec, fieldName},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: invalid name
|
|
|
|
spec:
|
|
|
|
name: f
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindBucket, tt)
|
2019-11-06 22:41:06 +00:00
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
})
|
|
|
|
})
|
2019-10-24 23:59:01 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
t.Run("pkg with a label", func(t *testing.T) {
|
2019-10-24 23:59:01 +00:00
|
|
|
t.Run("with valid label pkg should be valid", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/label", func(t *testing.T, pkg *Pkg) {
|
2020-03-16 22:17:24 +00:00
|
|
|
labels := pkg.Summary().Labels
|
|
|
|
require.Len(t, labels, 3)
|
2019-10-24 23:59:01 +00:00
|
|
|
|
2020-03-16 22:17:24 +00:00
|
|
|
expectedLabel0 := SummaryLabel{
|
|
|
|
Name: "display name",
|
|
|
|
Properties: struct {
|
|
|
|
Color string `json:"color"`
|
|
|
|
Description string `json:"description"`
|
|
|
|
}{
|
|
|
|
Description: "label 3 description",
|
|
|
|
},
|
2019-10-24 23:59:01 +00:00
|
|
|
}
|
2020-03-16 22:17:24 +00:00
|
|
|
assert.Equal(t, expectedLabel0, labels[0])
|
2019-10-24 23:59:01 +00:00
|
|
|
|
2020-03-16 22:17:24 +00:00
|
|
|
expectedLabel1 := SummaryLabel{
|
|
|
|
Name: "label_1",
|
|
|
|
Properties: struct {
|
|
|
|
Color string `json:"color"`
|
|
|
|
Description string `json:"description"`
|
|
|
|
}{
|
|
|
|
Color: "#FFFFFF",
|
|
|
|
Description: "label 1 description",
|
|
|
|
},
|
2019-10-24 23:59:01 +00:00
|
|
|
}
|
2020-03-16 22:17:24 +00:00
|
|
|
assert.Equal(t, expectedLabel1, labels[1])
|
|
|
|
|
|
|
|
expectedLabel2 := SummaryLabel{
|
|
|
|
Name: "label_2",
|
|
|
|
Properties: struct {
|
|
|
|
Color string `json:"color"`
|
|
|
|
Description string `json:"description"`
|
|
|
|
}{
|
|
|
|
Color: "#000000",
|
|
|
|
Description: "label 2 description",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedLabel2, labels[2])
|
2019-10-24 23:59:01 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("with missing label name should error", func(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
tests := []testPkgResourceError{
|
2019-10-24 23:59:01 +00:00
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "missing name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
2019-10-24 23:59:01 +00:00
|
|
|
spec:
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "mixed valid and missing name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: a
|
2019-10-24 23:59:01 +00:00
|
|
|
spec:
|
2020-03-16 22:17:24 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "duplicate names",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
2019-10-24 23:59:01 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "multiple labels with missing name",
|
|
|
|
resourceErrs: 2,
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
|
2020-03-16 22:17:24 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2020-03-16 23:45:25 +00:00
|
|
|
name: "duplicate meta name and spec name",
|
2020-03-16 22:17:24 +00:00
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldSpec, fieldName},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
spec:
|
|
|
|
name: valid name
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2020-03-16 23:45:25 +00:00
|
|
|
name: "spec name to short",
|
2020-03-16 22:17:24 +00:00
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldSpec, fieldName},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
spec:
|
|
|
|
name: a
|
2019-10-24 23:59:01 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindLabel, tt)
|
2019-10-24 23:59:01 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2019-10-26 02:11:47 +00:00
|
|
|
|
|
|
|
t.Run("pkg with buckets and labels associated", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/bucket_associates_label", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Labels, 2)
|
2019-10-26 02:11:47 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
bkts := sum.Buckets
|
|
|
|
require.Len(t, bkts, 3)
|
2019-10-26 02:11:47 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
expectedLabels := []struct {
|
|
|
|
bktName string
|
|
|
|
labels []string
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
bktName: "rucket_1",
|
|
|
|
labels: []string{"label_1"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
bktName: "rucket_2",
|
|
|
|
labels: []string{"label_2"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
bktName: "rucket_3",
|
|
|
|
labels: []string{"label_1", "label_2"},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for i, expected := range expectedLabels {
|
|
|
|
bkt := bkts[i]
|
|
|
|
require.Len(t, bkt.LabelAssociations, len(expected.labels))
|
2019-10-26 02:11:47 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
for j, label := range expected.labels {
|
|
|
|
assert.Equal(t, label, bkt.LabelAssociations[j].Name)
|
|
|
|
}
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
expectedMappings := []SummaryLabelMapping{
|
|
|
|
{
|
|
|
|
ResourceName: "rucket_1",
|
|
|
|
LabelName: "label_1",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ResourceName: "rucket_2",
|
|
|
|
LabelName: "label_2",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ResourceName: "rucket_3",
|
|
|
|
LabelName: "label_1",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ResourceName: "rucket_3",
|
|
|
|
LabelName: "label_2",
|
|
|
|
},
|
|
|
|
}
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, sum.LabelMappings, len(expectedMappings))
|
|
|
|
for i, expected := range expectedMappings {
|
|
|
|
expected.ResourceType = influxdb.BucketsResourceType
|
|
|
|
assert.Equal(t, expected, sum.LabelMappings[i])
|
|
|
|
}
|
|
|
|
})
|
2019-10-26 02:11:47 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("association doesn't exist then provides an error", func(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
tests := []testPkgResourceError{
|
2019-10-26 02:11:47 +00:00
|
|
|
{
|
|
|
|
name: "no labels provided",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{0},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_1
|
2019-10-26 02:11:47 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-10-26 02:11:47 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "mixed found and not found",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{1},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_3
|
|
|
|
spec:
|
|
|
|
associations:
|
2019-10-26 02:11:47 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2020-01-13 19:13:37 +00:00
|
|
|
- kind: Label
|
|
|
|
name: NOT TO BE FOUND
|
2019-10-26 02:11:47 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "multiple not found",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{0, 1},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_3
|
2019-10-26 02:11:47 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
associations:
|
2019-10-26 02:11:47 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2020-01-13 19:13:37 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_2
|
2019-10-26 02:11:47 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "duplicate valid nested labels",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{1},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_3
|
|
|
|
spec:
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-10-26 02:11:47 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindBucket, tt)
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
t.Run("pkg with checks", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/checks", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Checks, 2)
|
|
|
|
|
|
|
|
check1 := sum.Checks[0]
|
|
|
|
thresholdCheck, ok := check1.Check.(*icheck.Threshold)
|
|
|
|
require.Truef(t, ok, "got: %#v", check1)
|
|
|
|
|
|
|
|
expectedBase := icheck.Base{
|
|
|
|
Name: "check_0",
|
|
|
|
Description: "desc_0",
|
|
|
|
Every: mustDuration(t, time.Minute),
|
|
|
|
Offset: mustDuration(t, 15*time.Second),
|
|
|
|
StatusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }",
|
|
|
|
Tags: []influxdb.Tag{
|
|
|
|
{Key: "tag_1", Value: "val_1"},
|
|
|
|
{Key: "tag_2", Value: "val_2"},
|
2019-12-18 01:57:44 +00:00
|
|
|
},
|
2020-01-13 19:13:37 +00:00
|
|
|
}
|
|
|
|
expectedBase.Query.Text = "from(bucket: \"rucket_1\")\n |> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n |> filter(fn: (r) => r._measurement == \"cpu\")\n |> filter(fn: (r) => r._field == \"usage_idle\")\n |> aggregateWindow(every: 1m, fn: mean)\n |> yield(name: \"mean\")"
|
|
|
|
assert.Equal(t, expectedBase, thresholdCheck.Base)
|
|
|
|
|
|
|
|
expectedThresholds := []icheck.ThresholdConfig{
|
|
|
|
icheck.Greater{
|
|
|
|
ThresholdConfigBase: icheck.ThresholdConfigBase{
|
|
|
|
AllValues: true,
|
|
|
|
Level: notification.Critical,
|
|
|
|
},
|
|
|
|
Value: 50.0,
|
|
|
|
},
|
|
|
|
icheck.Lesser{
|
|
|
|
ThresholdConfigBase: icheck.ThresholdConfigBase{Level: notification.Warn},
|
|
|
|
Value: 49.9,
|
|
|
|
},
|
|
|
|
icheck.Range{
|
|
|
|
ThresholdConfigBase: icheck.ThresholdConfigBase{Level: notification.Info},
|
|
|
|
Within: true,
|
|
|
|
Min: 30.0,
|
|
|
|
Max: 45.0,
|
|
|
|
},
|
|
|
|
icheck.Range{
|
|
|
|
ThresholdConfigBase: icheck.ThresholdConfigBase{Level: notification.Ok},
|
|
|
|
Min: 30.0,
|
|
|
|
Max: 35.0,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedThresholds, thresholdCheck.Thresholds)
|
|
|
|
assert.Equal(t, influxdb.Inactive, check1.Status)
|
|
|
|
assert.Len(t, check1.LabelAssociations, 1)
|
|
|
|
|
|
|
|
check2 := sum.Checks[1]
|
|
|
|
deadmanCheck, ok := check2.Check.(*icheck.Deadman)
|
|
|
|
require.Truef(t, ok, "got: %#v", check2)
|
|
|
|
|
|
|
|
expectedBase = icheck.Base{
|
2020-03-16 23:45:25 +00:00
|
|
|
Name: "display name",
|
2020-01-13 19:13:37 +00:00
|
|
|
Description: "desc_1",
|
|
|
|
Every: mustDuration(t, 5*time.Minute),
|
|
|
|
Offset: mustDuration(t, 10*time.Second),
|
|
|
|
StatusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }",
|
|
|
|
Tags: []influxdb.Tag{
|
|
|
|
{Key: "tag_1", Value: "val_1"},
|
|
|
|
{Key: "tag_2", Value: "val_2"},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
expectedBase.Query.Text = "from(bucket: \"rucket_1\")\n |> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n |> filter(fn: (r) => r._measurement == \"cpu\")\n |> filter(fn: (r) => r._field == \"usage_idle\")\n |> aggregateWindow(every: 1m, fn: mean)\n |> yield(name: \"mean\")"
|
|
|
|
assert.Equal(t, expectedBase, deadmanCheck.Base)
|
|
|
|
assert.Equal(t, influxdb.Active, check2.Status)
|
|
|
|
assert.Equal(t, mustDuration(t, 10*time.Minute), deadmanCheck.StaleTime)
|
|
|
|
assert.Equal(t, mustDuration(t, 90*time.Second), deadmanCheck.TimeSince)
|
|
|
|
assert.True(t, deadmanCheck.ReportZero)
|
|
|
|
assert.Len(t, check2.LabelAssociations, 1)
|
|
|
|
|
|
|
|
containsLabelMappings(t, sum.LabelMappings,
|
|
|
|
labelMapping{
|
|
|
|
labelName: "label_1",
|
|
|
|
resName: "check_0",
|
|
|
|
resType: influxdb.ChecksResourceType,
|
|
|
|
},
|
|
|
|
labelMapping{
|
|
|
|
labelName: "label_1",
|
2020-03-16 23:45:25 +00:00
|
|
|
resName: "display name",
|
2020-01-13 19:13:37 +00:00
|
|
|
resType: influxdb.ChecksResourceType,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
})
|
2019-12-18 01:57:44 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
kind Kind
|
|
|
|
resErr testPkgResourceError
|
|
|
|
}{
|
|
|
|
{
|
2020-01-13 19:13:37 +00:00
|
|
|
kind: KindCheckDeadman,
|
2019-12-18 01:57:44 +00:00
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "duplicate name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
|
|
|
spec:
|
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing every duration",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldEvery},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
|
|
|
- type: outside_range
|
|
|
|
level: ok
|
|
|
|
min: 30.0
|
|
|
|
max: 35.0
|
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid threshold value provided",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldLevel},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
|
|
|
- type: greater
|
|
|
|
level: RANDO
|
|
|
|
value: 50.0
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid threshold type provided",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldType},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
|
|
|
- type: RANDO_TYPE
|
|
|
|
level: CRIT
|
|
|
|
value: 50.0
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
2020-01-13 19:13:37 +00:00
|
|
|
name: "invalid min for inside range",
|
2019-12-18 01:57:44 +00:00
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldMin},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
|
|
|
- type: inside_range
|
|
|
|
level: INfO
|
|
|
|
min: 45.0
|
|
|
|
max: 30.0
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "no threshold values provided",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldCheckThresholds},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
2020-01-13 19:13:37 +00:00
|
|
|
name: "threshold missing query",
|
2019-12-18 01:57:44 +00:00
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldQuery},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
|
|
|
- type: greater
|
|
|
|
level: CRIT
|
|
|
|
value: 50.0
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid status provided",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldStatus},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
status: RANDO STATUS
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
thresholds:
|
|
|
|
- type: greater
|
|
|
|
level: CRIT
|
|
|
|
value: 50.0
|
|
|
|
allValues: true
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckThreshold,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing status message template",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldCheckStatusMessageTemplate},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckThreshold
|
|
|
|
metadata:
|
|
|
|
name: check_0
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 1m
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1")
|
|
|
|
thresholds:
|
|
|
|
- type: greater
|
|
|
|
level: CRIT
|
|
|
|
value: 50.0
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckDeadman,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing every",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldEvery},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
timeSince: 90s
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckDeadman,
|
|
|
|
resErr: testPkgResourceError{
|
2020-01-13 19:13:37 +00:00
|
|
|
name: "deadman missing every",
|
2019-12-18 01:57:44 +00:00
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldQuery},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
2019-12-18 01:57:44 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
timeSince: 90s
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckDeadman,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing association label",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldAssociations},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
|
|
|
spec:
|
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
timeSince: 90s
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckDeadman,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "duplicate association labels",
|
|
|
|
validationErrs: 1,
|
2020-03-16 23:45:25 +00:00
|
|
|
valFields: []string{fieldSpec, fieldAssociations},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
|
|
|
spec:
|
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
timeSince: 90s
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-12-18 01:57:44 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2020-03-16 23:45:25 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindCheckDeadman,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "duplicate meta name and spec name",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldSpec, fieldAssociations},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: check_1
|
|
|
|
spec:
|
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
timeSince: 90s
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: CheckDeadman
|
|
|
|
metadata:
|
|
|
|
name: valid name
|
|
|
|
spec:
|
|
|
|
name: check_1
|
|
|
|
every: 5m
|
|
|
|
level: cRiT
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }"
|
|
|
|
timeSince: 90s
|
2019-12-18 01:57:44 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, tt.kind, tt.resErr)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
t.Run("pkg with single dashboard and single chart", func(t *testing.T) {
|
2020-03-04 19:11:55 +00:00
|
|
|
t.Run("gauge chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_gauge", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-12 18:06:53 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dash_1", actual.Name)
|
|
|
|
assert.Equal(t, "desc1", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.GaugeViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "gauge", props.GetType())
|
|
|
|
assert.Equal(t, "gauge note", props.Note)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
queryText := `from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")`
|
|
|
|
assert.Equal(t, queryText, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
require.Len(t, props.ViewColors, 3)
|
|
|
|
c := props.ViewColors[0]
|
|
|
|
assert.Equal(t, "laser", c.Name)
|
|
|
|
assert.Equal(t, "min", c.Type)
|
|
|
|
assert.Equal(t, "#8F8AF4", c.Hex)
|
|
|
|
assert.Equal(t, 0.0, c.Value)
|
|
|
|
})
|
2019-11-12 18:06:53 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "color mixing a hex value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[0].hex"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: gauge
|
|
|
|
name: gauge
|
|
|
|
note: gauge note
|
|
|
|
noteOnEmpty: true
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
value: 0
|
|
|
|
- name: laser
|
|
|
|
type: threshold
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 700
|
|
|
|
- name: laser
|
|
|
|
type: max
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 5000
|
2019-11-12 18:06:53 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing a query value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: gauge
|
|
|
|
name: gauge
|
|
|
|
note: gauge note
|
|
|
|
noteOnEmpty: true
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 0
|
|
|
|
- name: laser
|
|
|
|
type: threshold
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 700
|
|
|
|
- name: laser
|
|
|
|
type: max
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 5000
|
2019-11-12 20:09:13 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
t.Run("heatmap chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_heatmap", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-12 20:09:13 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dashboard w/ single heatmap chart", actual.Name)
|
|
|
|
assert.Equal(t, "a dashboard w/ heatmap chart", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.HeatmapViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "heatmap", props.GetType())
|
|
|
|
assert.Equal(t, "heatmap note", props.Note)
|
|
|
|
assert.Equal(t, int32(10), props.BinSize)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
|
|
|
|
assert.Equal(t, []float64{0, 10}, props.XDomain)
|
|
|
|
assert.Equal(t, []float64{0, 100}, props.YDomain)
|
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
queryText := `from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")`
|
|
|
|
assert.Equal(t, queryText, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
require.Len(t, props.ViewColors, 12)
|
|
|
|
c := props.ViewColors[0]
|
|
|
|
assert.Equal(t, "#000004", c)
|
|
|
|
})
|
2019-11-12 20:09:13 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "a color is missing a hex value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[2].hex"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single heatmap chart
|
|
|
|
spec:
|
|
|
|
charts:
|
|
|
|
- kind: heatmap
|
|
|
|
name: heatmap
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
binSize: 10
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#fbb61a"
|
|
|
|
- hex: "#f4df53"
|
|
|
|
- hex: ""
|
|
|
|
axes:
|
|
|
|
- name: "x"
|
|
|
|
label: "x_label"
|
|
|
|
prefix: "x_prefix"
|
|
|
|
suffix: "x_suffix"
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: "y_label"
|
|
|
|
prefix: "y_prefix"
|
|
|
|
suffix: "y_suffix"
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
|
|
|
`,
|
2019-11-12 20:09:13 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing axes",
|
|
|
|
validationErrs: 1,
|
2020-01-13 19:13:37 +00:00
|
|
|
valFields: []string{"charts[0].axes"},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single heatmap chart
|
|
|
|
spec:
|
|
|
|
charts:
|
|
|
|
- kind: heatmap
|
|
|
|
name: heatmap
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
binSize: 10
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#000004"
|
|
|
|
`,
|
2019-11-12 20:09:13 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing a query value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single heatmap chart
|
|
|
|
spec:
|
|
|
|
charts:
|
|
|
|
- kind: heatmap
|
|
|
|
name: heatmap
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
binSize: 10
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- hex: "#000004"
|
|
|
|
axes:
|
|
|
|
- name: "x"
|
|
|
|
label: "x_label"
|
|
|
|
prefix: "x_prefix"
|
|
|
|
suffix: "x_suffix"
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: "y_label"
|
|
|
|
prefix: "y_prefix"
|
|
|
|
suffix: "y_suffix"
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
2019-11-16 20:14:46 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
t.Run("histogram chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_histogram", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-16 20:14:46 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dashboard w/ single histogram chart", actual.Name)
|
|
|
|
assert.Equal(t, "a dashboard w/ single histogram chart", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.HistogramViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "histogram", props.GetType())
|
|
|
|
assert.Equal(t, "histogram note", props.Note)
|
|
|
|
assert.Equal(t, 30, props.BinCount)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
assert.Equal(t, []float64{0, 10}, props.XDomain)
|
|
|
|
assert.Equal(t, []string{}, props.FillColumns)
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
queryText := `from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_reads_total") |> filter(fn: (r) => r._field == "counter")`
|
|
|
|
assert.Equal(t, queryText, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
require.Len(t, props.ViewColors, 3)
|
|
|
|
assert.Equal(t, "#8F8AF4", props.ViewColors[0].Hex)
|
|
|
|
assert.Equal(t, "#F4CF31", props.ViewColors[1].Hex)
|
|
|
|
assert.Equal(t, "#FFFFFF", props.ViewColors[2].Hex)
|
|
|
|
})
|
2019-11-16 20:14:46 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "missing x-axis",
|
|
|
|
validationErrs: 1,
|
2020-01-13 19:13:37 +00:00
|
|
|
valFields: []string{"charts[0].axes"},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single histogram chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single histogram chart
|
|
|
|
charts:
|
|
|
|
- kind: Histogram
|
|
|
|
name: histogram chart
|
|
|
|
xCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
binCount: 30
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_reads_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
type: scale
|
|
|
|
value: 0
|
|
|
|
name: mycolor
|
|
|
|
axes:
|
|
|
|
`,
|
2019-11-16 20:14:46 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing a query value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single histogram chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single histogram chart
|
|
|
|
charts:
|
|
|
|
- kind: Histogram
|
|
|
|
name: histogram chart
|
|
|
|
xCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
binCount: 30
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
type: scale
|
|
|
|
value: 0
|
|
|
|
name: mycolor
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
2019-11-12 18:06:53 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
t.Run("markdown chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_markdown", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-13 21:30:52 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dashboard w/ single markdown chart", actual.Name)
|
|
|
|
assert.Equal(t, "a dashboard w/ single markdown chart", actual.Description)
|
2019-11-13 21:30:52 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
2019-11-13 21:30:52 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
props, ok := actualChart.Properties.(influxdb.MarkdownViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "markdown", props.GetType())
|
|
|
|
assert.Equal(t, "## markdown note", props.Note)
|
|
|
|
})
|
2019-11-13 21:30:52 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
t.Run("scatter chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_scatter", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-12 18:06:53 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dashboard w/ single scatter chart", actual.Name)
|
|
|
|
assert.Equal(t, "a dashboard w/ single scatter chart", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.ScatterViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "scatter note", props.Note)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
expectedQuery := `from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")`
|
|
|
|
assert.Equal(t, expectedQuery, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
assert.Equal(t, []float64{0, 10}, props.XDomain)
|
|
|
|
assert.Equal(t, []float64{0, 100}, props.YDomain)
|
|
|
|
assert.Equal(t, "x_label", props.XAxisLabel)
|
|
|
|
assert.Equal(t, "y_label", props.YAxisLabel)
|
|
|
|
assert.Equal(t, "x_prefix", props.XPrefix)
|
|
|
|
assert.Equal(t, "y_prefix", props.YPrefix)
|
|
|
|
assert.Equal(t, "x_suffix", props.XSuffix)
|
|
|
|
assert.Equal(t, "y_suffix", props.YSuffix)
|
|
|
|
})
|
2019-11-12 18:06:53 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "missing axes",
|
|
|
|
validationErrs: 1,
|
2020-01-13 19:13:37 +00:00
|
|
|
valFields: []string{"charts[0].axes"},
|
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
`,
|
2019-11-12 18:06:53 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing query value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
|
|
|
`,
|
2019-11-12 18:06:53 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no queries provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
- hex: "#FFFFFF"
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
|
|
|
`,
|
2019-11-12 18:06:53 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no width provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].width"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
- hex: "#FFFFFF"
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
|
|
|
`,
|
2019-11-12 18:06:53 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no height provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].height"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
- hex: "#FFFFFF"
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
2019-11-12 18:06:53 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing hex color",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[0].hex"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
note: scatter note
|
|
|
|
noteOnEmpty: true
|
|
|
|
prefix: sumtin
|
|
|
|
suffix: days
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: ""
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
2019-11-12 18:06:53 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing x axis",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].axes"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
- hex: "#FFFFFF"
|
|
|
|
axes:
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 100
|
|
|
|
`,
|
2019-11-12 18:06:53 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing y axis",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].axes"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dashboard w/ single scatter chart
|
|
|
|
spec:
|
|
|
|
description: a dashboard w/ single scatter chart
|
|
|
|
charts:
|
|
|
|
- kind: Scatter
|
|
|
|
name: scatter chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
xCol: _time
|
|
|
|
yCol: _value
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- hex: "#8F8AF4"
|
|
|
|
- hex: "#F4CF31"
|
|
|
|
- hex: "#FFFFFF"
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
domain:
|
|
|
|
- 0
|
|
|
|
- 10
|
2019-11-12 18:06:53 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
t.Run("single stat chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-01 18:11:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dash_1", actual.Name)
|
|
|
|
assert.Equal(t, "desc1", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.SingleStatViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "single-stat", props.GetType())
|
|
|
|
assert.Equal(t, "single stat note", props.Note)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
assert.True(t, props.DecimalPlaces.IsEnforced)
|
|
|
|
assert.Equal(t, int32(1), props.DecimalPlaces.Digits)
|
|
|
|
assert.Equal(t, "days", props.Suffix)
|
2020-01-16 21:16:45 +00:00
|
|
|
assert.Equal(t, "true", props.TickSuffix)
|
2020-01-13 19:13:37 +00:00
|
|
|
assert.Equal(t, "sumtin", props.Prefix)
|
2020-01-16 21:16:45 +00:00
|
|
|
assert.Equal(t, "true", props.TickPrefix)
|
2020-01-13 19:13:37 +00:00
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
queryText := `from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "processes") |> filter(fn: (r) => r._field == "running" or r._field == "blocked") |> aggregateWindow(every: v.windowPeriod, fn: max) |> yield(name: "max")`
|
|
|
|
assert.Equal(t, queryText, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
require.Len(t, props.ViewColors, 1)
|
|
|
|
c := props.ViewColors[0]
|
|
|
|
assert.Equal(t, "laser", c.Name)
|
|
|
|
assert.Equal(t, "text", c.Type)
|
|
|
|
assert.Equal(t, "#8F8AF4", c.Hex)
|
|
|
|
assert.Equal(t, 3.0, c.Value)
|
|
|
|
})
|
2019-11-01 18:11:42 +00:00
|
|
|
})
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
tests := []testPkgResourceError{
|
2019-11-01 18:11:42 +00:00
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "color missing hex value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[0].hex"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat
|
|
|
|
name: single stat
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
decimalPlaces: 1
|
|
|
|
shade: true
|
|
|
|
queries:
|
|
|
|
- query: "from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == \"processes\") |> filter(fn: (r) => r._field == \"running\" or r._field == \"blocked\") |> aggregateWindow(every: v.windowPeriod, fn: max) |> yield(name: \"max\")"
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
value: 3
|
2019-11-01 18:11:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "query missing text value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat
|
|
|
|
name: single stat
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
2019-11-01 18:11:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "no queries provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat
|
|
|
|
name: single stat
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
2019-11-01 18:11:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "no width provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].width"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat
|
|
|
|
name: single stat
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: "from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == \"processes\") |> filter(fn: (r) => r._field == \"running\" or r._field == \"blocked\") |> aggregateWindow(every: v.windowPeriod, fn: max) |> yield(name: \"max\")"
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
2019-11-01 18:11:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "no height provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].height"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat
|
|
|
|
name: single stat
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 3
|
|
|
|
queries:
|
|
|
|
- query: "from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == \"processes\") |> filter(fn: (r) => r._field == \"running\" or r._field == \"blocked\") |> aggregateWindow(every: v.windowPeriod, fn: max) |> yield(name: \"max\")"
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
2019-11-01 18:11:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
2019-11-04 19:16:32 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("single stat plus line chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_single_stat_plus_line", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-04 19:16:32 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dash_1", actual.Name)
|
|
|
|
assert.Equal(t, "desc1", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.LinePlusSingleStatProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "single stat plus line note", props.Note)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
assert.True(t, props.DecimalPlaces.IsEnforced)
|
|
|
|
assert.Equal(t, int32(1), props.DecimalPlaces.Digits)
|
|
|
|
assert.Equal(t, "days", props.Suffix)
|
|
|
|
assert.Equal(t, "sumtin", props.Prefix)
|
|
|
|
assert.Equal(t, "overlaid", props.Position)
|
|
|
|
assert.Equal(t, "leg_type", props.Legend.Type)
|
|
|
|
assert.Equal(t, "horizontal", props.Legend.Orientation)
|
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
expectedQuery := `from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")`
|
|
|
|
assert.Equal(t, expectedQuery, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
for _, key := range []string{"x", "y"} {
|
|
|
|
xAxis, ok := props.Axes[key]
|
|
|
|
require.True(t, ok, "key="+key)
|
|
|
|
assert.Equal(t, "10", xAxis.Base, "key="+key)
|
|
|
|
assert.Equal(t, key+"_label", xAxis.Label, "key="+key)
|
|
|
|
assert.Equal(t, key+"_prefix", xAxis.Prefix, "key="+key)
|
|
|
|
assert.Equal(t, "linear", xAxis.Scale, "key="+key)
|
|
|
|
assert.Equal(t, key+"_suffix", xAxis.Suffix, "key="+key)
|
|
|
|
}
|
2019-11-04 19:16:32 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, props.ViewColors, 2)
|
|
|
|
c := props.ViewColors[0]
|
|
|
|
assert.Equal(t, "laser", c.Name)
|
|
|
|
assert.Equal(t, "text", c.Type)
|
|
|
|
assert.Equal(t, "#8F8AF4", c.Hex)
|
|
|
|
assert.Equal(t, 3.0, c.Value)
|
|
|
|
|
|
|
|
c = props.ViewColors[1]
|
|
|
|
assert.Equal(t, "android", c.Name)
|
|
|
|
assert.Equal(t, "scale", c.Type)
|
|
|
|
assert.Equal(t, "#F4CF31", c.Hex)
|
|
|
|
assert.Equal(t, 1.0, c.Value)
|
|
|
|
})
|
2019-11-04 19:16:32 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
tests := []testPkgResourceError{
|
2019-11-04 19:16:32 +00:00
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "color missing hex value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[0].hex"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
position: overlaid
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
value: 3
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
2019-11-04 19:16:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "missing query value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
position: overlaid
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
- name: android
|
|
|
|
type: scale
|
|
|
|
hex: "#F4CF31"
|
|
|
|
value: 1
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
2019-11-04 19:16:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "no queries provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
position: overlaid
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
- name: android
|
|
|
|
type: scale
|
|
|
|
hex: "#F4CF31"
|
|
|
|
value: 1
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
`,
|
2019-11-04 19:16:32 +00:00
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "no width provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].width"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
height: 3
|
|
|
|
shade: true
|
|
|
|
position: overlaid
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
- name: android
|
|
|
|
type: scale
|
|
|
|
hex: "#F4CF31"
|
|
|
|
value: 1
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
`,
|
2019-11-04 19:16:32 +00:00
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "no height provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].height"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
position: overlaid
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
- name: android
|
|
|
|
type: scale
|
|
|
|
hex: "#F4CF31"
|
|
|
|
value: 1
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
`,
|
2019-11-04 19:16:32 +00:00
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "missing x axis",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].axes"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
position: overlaid
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
- name: android
|
|
|
|
type: scale
|
|
|
|
hex: "#F4CF31"
|
|
|
|
value: 1
|
|
|
|
axes:
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
`,
|
2019-11-04 19:16:32 +00:00
|
|
|
},
|
|
|
|
{
|
2019-11-06 22:41:06 +00:00
|
|
|
name: "missing y axis",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].axes"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Single_Stat_Plus_Line
|
|
|
|
name: single stat plus line
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
position: overlaid
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: text
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
- name: android
|
|
|
|
type: scale
|
|
|
|
hex: "#F4CF31"
|
|
|
|
value: 1
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
`,
|
2019-11-04 19:16:32 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
2019-11-01 18:11:42 +00:00
|
|
|
}
|
|
|
|
})
|
2019-10-30 21:13:42 +00:00
|
|
|
})
|
2019-11-05 19:19:25 +00:00
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
t.Run("table chart", func(t *testing.T) {
|
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_table", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
|
|
|
|
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dash_1", actual.Name)
|
|
|
|
assert.Equal(t, "desc1", actual.Description)
|
|
|
|
|
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
|
|
|
|
|
|
|
props, ok := actualChart.Properties.(influxdb.TableViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "table note", props.Note)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
|
|
|
assert.True(t, props.DecimalPlaces.IsEnforced)
|
|
|
|
assert.Equal(t, int32(1), props.DecimalPlaces.Digits)
|
|
|
|
assert.Equal(t, "YYYY:MMMM:DD", props.TimeFormat)
|
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
expectedQuery := `from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")`
|
|
|
|
assert.Equal(t, expectedQuery, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
require.Len(t, props.ViewColors, 1)
|
|
|
|
c := props.ViewColors[0]
|
|
|
|
assert.Equal(t, "laser", c.Name)
|
|
|
|
assert.Equal(t, "min", c.Type)
|
|
|
|
assert.Equal(t, "#8F8AF4", c.Hex)
|
|
|
|
assert.Equal(t, 3.0, c.Value)
|
|
|
|
|
|
|
|
tableOpts := props.TableOptions
|
|
|
|
assert.True(t, tableOpts.VerticalTimeAxis)
|
|
|
|
assert.Equal(t, "_time", tableOpts.SortBy.InternalName)
|
|
|
|
assert.Equal(t, "truncate", tableOpts.Wrapping)
|
|
|
|
assert.True(t, tableOpts.FixFirstColumn)
|
|
|
|
|
|
|
|
require.Len(t, props.FieldOptions, 2)
|
|
|
|
expectedField := influxdb.RenamableField{
|
|
|
|
InternalName: "_time",
|
|
|
|
DisplayName: "time (ms)",
|
|
|
|
Visible: true,
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedField, props.FieldOptions[0])
|
|
|
|
expectedField = influxdb.RenamableField{
|
|
|
|
InternalName: "_value",
|
|
|
|
DisplayName: "MB",
|
|
|
|
Visible: true,
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedField, props.FieldOptions[1])
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "color missing hex value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[0].hex"},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Table
|
|
|
|
name: table
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex:
|
|
|
|
value: 3.0`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "missing query value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries[0].query"},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Table
|
|
|
|
name: table
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query:
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex: peru
|
|
|
|
value: 3.0`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no queries provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].queries"},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Table
|
|
|
|
name: table
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex: peru
|
|
|
|
value: 3.0`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no width provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].width"},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Table
|
|
|
|
name: table
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
height: 3
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex: peru
|
|
|
|
value: 3.0`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no height provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].height"},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Table
|
|
|
|
name: table
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex: peru
|
|
|
|
value: 3.0`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "invalid wrapping table option",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].tableOptions.wrapping"},
|
|
|
|
pkgStr: `
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: Table
|
|
|
|
name: table
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
tableOptions:
|
|
|
|
sortBy: _time
|
|
|
|
wrapping: WRONGO wrapping
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: min
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3.0
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("xy chart", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
testfileRunner(t, "testdata/dashboard_xy", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-11-05 19:19:25 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dash_1", actual.Name)
|
|
|
|
assert.Equal(t, "desc1", actual.Description)
|
2019-11-05 19:19:25 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
require.Len(t, actual.Charts, 1)
|
|
|
|
actualChart := actual.Charts[0]
|
|
|
|
assert.Equal(t, 3, actualChart.Height)
|
|
|
|
assert.Equal(t, 6, actualChart.Width)
|
|
|
|
assert.Equal(t, 1, actualChart.XPosition)
|
|
|
|
assert.Equal(t, 2, actualChart.YPosition)
|
2019-11-05 19:19:25 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
props, ok := actualChart.Properties.(influxdb.XYViewProperties)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, "xy", props.GetType())
|
|
|
|
assert.Equal(t, true, props.ShadeBelow)
|
|
|
|
assert.Equal(t, "xy chart note", props.Note)
|
|
|
|
assert.True(t, props.ShowNoteWhenEmpty)
|
2019-12-03 22:59:07 +00:00
|
|
|
assert.Equal(t, "stacked", props.Position)
|
2019-11-06 22:41:06 +00:00
|
|
|
|
|
|
|
require.Len(t, props.Queries, 1)
|
|
|
|
q := props.Queries[0]
|
|
|
|
queryText := `from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")`
|
|
|
|
assert.Equal(t, queryText, q.Text)
|
|
|
|
assert.Equal(t, "advanced", q.EditMode)
|
|
|
|
|
|
|
|
require.Len(t, props.ViewColors, 1)
|
|
|
|
c := props.ViewColors[0]
|
|
|
|
assert.Equal(t, "laser", c.Name)
|
|
|
|
assert.Equal(t, "scale", c.Type)
|
|
|
|
assert.Equal(t, "#8F8AF4", c.Hex)
|
|
|
|
assert.Equal(t, 3.0, c.Value)
|
|
|
|
})
|
2019-11-05 19:19:25 +00:00
|
|
|
})
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
t.Run("handles invalid config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "color missing hex value",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].colors[0].hex"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: XY
|
|
|
|
name: xy chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
geom: line
|
|
|
|
position: stacked
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: scale
|
|
|
|
value: 3
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
2019-11-05 19:19:25 +00:00
|
|
|
`,
|
2019-11-06 22:41:06 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "invalid geom flag",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"charts[0].geom"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
description: desc1
|
|
|
|
charts:
|
|
|
|
- kind: XY
|
|
|
|
name: xy chart
|
|
|
|
xPos: 1
|
|
|
|
yPos: 2
|
|
|
|
width: 6
|
|
|
|
height: 3
|
|
|
|
position: stacked
|
|
|
|
legend:
|
|
|
|
queries:
|
|
|
|
- query: >
|
|
|
|
from(bucket: v.bucket) |> range(start: v.timeRangeStart, stop: v.timeRangeStop) |> filter(fn: (r) => r._measurement == "boltdb_writes_total") |> filter(fn: (r) => r._field == "counter")
|
|
|
|
colors:
|
|
|
|
- name: laser
|
|
|
|
type: scale
|
|
|
|
hex: "#8F8AF4"
|
|
|
|
value: 3
|
|
|
|
axes:
|
|
|
|
- name : "x"
|
|
|
|
label: x_label
|
|
|
|
prefix: x_prefix
|
|
|
|
suffix: x_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
|
|
|
- name: "y"
|
|
|
|
label: y_label
|
|
|
|
prefix: y_prefix
|
|
|
|
suffix: y_suffix
|
|
|
|
base: 10
|
|
|
|
scale: linear
|
2019-11-05 19:19:25 +00:00
|
|
|
`,
|
2019-11-06 22:41:06 +00:00
|
|
|
},
|
|
|
|
}
|
2019-11-05 19:19:25 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
2019-11-05 19:19:25 +00:00
|
|
|
}
|
2019-11-06 22:41:06 +00:00
|
|
|
})
|
2019-11-05 19:19:25 +00:00
|
|
|
})
|
2019-11-06 19:22:36 +00:00
|
|
|
})
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
t.Run("pkg with dashboard and labels associated", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/dashboard_associates_label", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Dashboards, 1)
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
actual := sum.Dashboards[0]
|
|
|
|
assert.Equal(t, "dash_1", actual.Name)
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, actual.LabelAssociations, 1)
|
|
|
|
actualLabel := actual.LabelAssociations[0]
|
|
|
|
assert.Equal(t, "label_1", actualLabel.Name)
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
expectedMappings := []SummaryLabelMapping{
|
|
|
|
{
|
|
|
|
ResourceName: "dash_1",
|
|
|
|
LabelName: "label_1",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
require.Len(t, sum.LabelMappings, len(expectedMappings))
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
for i, expected := range expectedMappings {
|
|
|
|
expected.ResourceType = influxdb.DashboardsResourceType
|
|
|
|
assert.Equal(t, expected, sum.LabelMappings[i])
|
|
|
|
}
|
|
|
|
})
|
2019-10-30 21:13:42 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("association doesn't exist then provides an error", func(t *testing.T) {
|
2019-11-06 22:41:06 +00:00
|
|
|
tests := []testPkgResourceError{
|
2019-10-30 21:13:42 +00:00
|
|
|
{
|
|
|
|
name: "no labels provided",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{0},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
2019-10-30 21:13:42 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-10-30 21:13:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "mixed found and not found",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{1},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
associations:
|
2019-10-30 21:13:42 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2020-01-13 19:13:37 +00:00
|
|
|
- kind: Label
|
|
|
|
name: unfound label
|
2019-10-30 21:13:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "multiple not found",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{0, 1},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
associations:
|
2019-10-30 21:13:42 +00:00
|
|
|
- kind: Label
|
2020-01-13 19:13:37 +00:00
|
|
|
name: not found 1
|
|
|
|
- kind: Label
|
|
|
|
name: unfound label
|
2019-10-30 21:13:42 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "duplicate valid nested labels",
|
2019-11-06 22:41:06 +00:00
|
|
|
assErrs: 1,
|
|
|
|
assIdxs: []int{1},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Dashboard
|
|
|
|
metadata:
|
|
|
|
name: dash_1
|
|
|
|
spec:
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-10-30 21:13:42 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindDashboard, tt)
|
2019-11-06 22:41:06 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
t.Run("pkg with notification endpoints and labels associated", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/notification_endpoint", func(t *testing.T, pkg *Pkg) {
|
|
|
|
expectedEndpoints := []SummaryNotificationEndpoint{
|
|
|
|
{
|
|
|
|
NotificationEndpoint: &endpoint.HTTP{
|
|
|
|
Base: endpoint.Base{
|
|
|
|
Name: "http_basic_auth_notification_endpoint",
|
|
|
|
Description: "http basic auth desc",
|
|
|
|
Status: influxdb.TaskStatusInactive,
|
|
|
|
},
|
|
|
|
URL: "https://www.example.com/endpoint/basicauth",
|
|
|
|
AuthMethod: "basic",
|
|
|
|
Method: "POST",
|
|
|
|
Username: influxdb.SecretField{Value: strPtr("secret username")},
|
|
|
|
Password: influxdb.SecretField{Value: strPtr("secret password")},
|
2019-12-06 07:05:32 +00:00
|
|
|
},
|
|
|
|
},
|
2020-01-13 19:13:37 +00:00
|
|
|
{
|
|
|
|
NotificationEndpoint: &endpoint.HTTP{
|
|
|
|
Base: endpoint.Base{
|
|
|
|
Name: "http_bearer_auth_notification_endpoint",
|
|
|
|
Description: "http bearer auth desc",
|
|
|
|
Status: influxdb.TaskStatusActive,
|
|
|
|
},
|
|
|
|
URL: "https://www.example.com/endpoint/bearerauth",
|
|
|
|
AuthMethod: "bearer",
|
|
|
|
Method: "PUT",
|
|
|
|
Token: influxdb.SecretField{Value: strPtr("secret token")},
|
2019-12-06 07:05:32 +00:00
|
|
|
},
|
|
|
|
},
|
2020-01-13 19:13:37 +00:00
|
|
|
{
|
|
|
|
NotificationEndpoint: &endpoint.HTTP{
|
|
|
|
Base: endpoint.Base{
|
|
|
|
Name: "http_none_auth_notification_endpoint",
|
|
|
|
Description: "http none auth desc",
|
|
|
|
Status: influxdb.TaskStatusActive,
|
|
|
|
},
|
|
|
|
URL: "https://www.example.com/endpoint/noneauth",
|
|
|
|
AuthMethod: "none",
|
|
|
|
Method: "GET",
|
2019-12-06 07:05:32 +00:00
|
|
|
},
|
|
|
|
},
|
2020-01-13 19:13:37 +00:00
|
|
|
{
|
|
|
|
NotificationEndpoint: &endpoint.PagerDuty{
|
|
|
|
Base: endpoint.Base{
|
|
|
|
Name: "pager_duty_notification_endpoint",
|
|
|
|
Description: "pager duty desc",
|
|
|
|
Status: influxdb.TaskStatusActive,
|
|
|
|
},
|
|
|
|
ClientURL: "http://localhost:8080/orgs/7167eb6719fa34e5/alert-history",
|
|
|
|
RoutingKey: influxdb.SecretField{Value: strPtr("secret routing-key")},
|
2019-12-06 07:05:32 +00:00
|
|
|
},
|
|
|
|
},
|
2020-01-13 19:13:37 +00:00
|
|
|
{
|
|
|
|
NotificationEndpoint: &endpoint.Slack{
|
|
|
|
Base: endpoint.Base{
|
|
|
|
Name: "slack_notification_endpoint",
|
|
|
|
Description: "slack desc",
|
|
|
|
Status: influxdb.TaskStatusActive,
|
|
|
|
},
|
|
|
|
URL: "https://hooks.slack.com/services/bip/piddy/boppidy",
|
|
|
|
Token: influxdb.SecretField{Value: strPtr("tokenval")},
|
2019-12-06 07:05:32 +00:00
|
|
|
},
|
|
|
|
},
|
2020-01-13 19:13:37 +00:00
|
|
|
}
|
2019-12-06 07:05:32 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
sum := pkg.Summary()
|
|
|
|
endpoints := sum.NotificationEndpoints
|
|
|
|
require.Len(t, endpoints, len(expectedEndpoints))
|
|
|
|
require.Len(t, sum.LabelMappings, len(expectedEndpoints))
|
|
|
|
|
|
|
|
for i := range expectedEndpoints {
|
|
|
|
expected, actual := expectedEndpoints[i], endpoints[i]
|
|
|
|
assert.Equalf(t, expected.NotificationEndpoint, actual.NotificationEndpoint, "index=%d", i)
|
|
|
|
require.Len(t, actual.LabelAssociations, 1)
|
|
|
|
assert.Equal(t, "label_1", actual.LabelAssociations[0].Name)
|
|
|
|
|
|
|
|
containsLabelMappings(t, sum.LabelMappings, labelMapping{
|
|
|
|
labelName: "label_1",
|
|
|
|
resName: expected.NotificationEndpoint.GetName(),
|
|
|
|
resType: influxdb.NotificationEndpointResourceType,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
2019-12-06 07:05:32 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
kind Kind
|
|
|
|
resErr testPkgResourceError
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointSlack,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing slack url",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationEndpointURL},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointSlack
|
|
|
|
metadata:
|
|
|
|
name: slack_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointPagerDuty,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing pager duty url",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationEndpointURL},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointPagerDuty
|
|
|
|
metadata:
|
|
|
|
name: pager_duty_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing http url",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationEndpointURL},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_none_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: none
|
|
|
|
method: get
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "bad url",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationEndpointURL},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_none_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: none
|
|
|
|
method: get
|
|
|
|
url: d_____-_8**(*https://www.examples.coms
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
2019-12-10 22:51:11 +00:00
|
|
|
name: "missing http method",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
2019-12-10 22:51:11 +00:00
|
|
|
valFields: []string{fieldNotificationEndpointHTTPMethod},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_none_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: none
|
|
|
|
url: https://www.example.com/endpoint/noneauth
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
2019-12-10 22:51:11 +00:00
|
|
|
name: "invalid http method",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
2019-12-10 22:51:11 +00:00
|
|
|
valFields: []string{fieldNotificationEndpointHTTPMethod},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_none_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: none
|
|
|
|
description: http none auth desc
|
|
|
|
method: GHOST
|
|
|
|
url: https://www.example.com/endpoint/noneauth
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
2019-12-10 22:51:11 +00:00
|
|
|
name: "missing basic username",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
2019-12-10 22:51:11 +00:00
|
|
|
valFields: []string{fieldNotificationEndpointUsername},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_basic_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: basic
|
|
|
|
method: POST
|
|
|
|
url: https://www.example.com/endpoint/basicauth
|
|
|
|
password: "secret password"
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
2019-12-10 22:51:11 +00:00
|
|
|
name: "missing basic password",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
2019-12-10 22:51:11 +00:00
|
|
|
valFields: []string{fieldNotificationEndpointPassword},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_basic_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: basic
|
|
|
|
method: POST
|
|
|
|
url: https://www.example.com/endpoint/basicauth
|
|
|
|
username: username
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
2019-12-10 22:51:11 +00:00
|
|
|
name: "missing basic password and username",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
2019-12-10 22:51:11 +00:00
|
|
|
valFields: []string{fieldNotificationEndpointPassword, fieldNotificationEndpointUsername},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_basic_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: http basic auth desc
|
|
|
|
type: basic
|
|
|
|
method: pOsT
|
|
|
|
url: https://www.example.com/endpoint/basicauth
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
2019-12-10 22:51:11 +00:00
|
|
|
name: "missing bearer token",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
2019-12-10 22:51:11 +00:00
|
|
|
valFields: []string{fieldNotificationEndpointToken},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_bearer_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: http bearer auth desc
|
|
|
|
type: bearer
|
|
|
|
method: puT
|
|
|
|
url: https://www.example.com/endpoint/bearerauth
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid http type",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldType},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointHTTP
|
|
|
|
metadata:
|
|
|
|
name: http_none_auth_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
type: RANDOM WRONG TYPE
|
|
|
|
description: http none auth desc
|
|
|
|
method: get
|
|
|
|
url: https://www.example.com/endpoint/noneauth
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointSlack,
|
|
|
|
resErr: testPkgResourceError{
|
2020-01-13 19:13:37 +00:00
|
|
|
name: "duplicate endpoints",
|
2019-12-06 07:05:32 +00:00
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointSlack
|
|
|
|
metadata:
|
|
|
|
name: slack_notification_endpoint
|
|
|
|
spec:
|
|
|
|
url: https://hooks.slack.com/services/bip/piddy/boppidy
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointSlack
|
|
|
|
metadata:
|
|
|
|
name: slack_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
url: https://hooks.slack.com/services/bip/piddy/boppidy
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointSlack,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid status",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldStatus},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationEndpointSlack
|
|
|
|
metadata:
|
|
|
|
name: slack_notification_endpoint
|
2019-12-06 07:05:32 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: slack desc
|
|
|
|
url: https://hooks.slack.com/services/bip/piddy/boppidy
|
|
|
|
status: RANDO STATUS
|
2019-12-06 07:05:32 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, tt.kind, tt.resErr)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-12-19 19:56:03 +00:00
|
|
|
t.Run("pkg with notification rules", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/notification_rule", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
rules := sum.NotificationRules
|
|
|
|
require.Len(t, rules, 1)
|
|
|
|
|
|
|
|
rule := rules[0]
|
|
|
|
assert.Equal(t, "rule_0", rule.Name)
|
|
|
|
assert.Equal(t, "endpoint_0", rule.EndpointName)
|
|
|
|
assert.Equal(t, "desc_0", rule.Description)
|
|
|
|
assert.Equal(t, (10 * time.Minute).String(), rule.Every)
|
|
|
|
assert.Equal(t, (30 * time.Second).String(), rule.Offset)
|
|
|
|
expectedMsgTempl := "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
assert.Equal(t, expectedMsgTempl, rule.MessageTemplate)
|
|
|
|
assert.Equal(t, influxdb.Active, rule.Status)
|
|
|
|
|
|
|
|
expectedStatusRules := []SummaryStatusRule{
|
|
|
|
{CurrentLevel: "CRIT", PreviousLevel: "OK"},
|
|
|
|
{CurrentLevel: "WARN"},
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedStatusRules, rule.StatusRules)
|
2019-12-19 19:56:03 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
expectedTagRules := []SummaryTagRule{
|
|
|
|
{Key: "k1", Value: "v1", Operator: "equal"},
|
|
|
|
{Key: "k1", Value: "v2", Operator: "equal"},
|
|
|
|
}
|
|
|
|
assert.Equal(t, expectedTagRules, rule.TagRules)
|
2019-12-19 19:56:03 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, sum.Labels, 1)
|
|
|
|
require.Len(t, rule.LabelAssociations, 1)
|
|
|
|
})
|
2019-12-19 19:56:03 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
kind Kind
|
|
|
|
resErr testPkgResourceError
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
endpointName: endpoint_0
|
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing endpoint name",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationRuleEndpointName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
2020-01-13 19:13:37 +00:00
|
|
|
name: "missing every",
|
2019-12-19 19:56:03 +00:00
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldEvery},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
endpointName: endpoint_0
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing status rules",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationRuleStatusRules},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 10m
|
|
|
|
endpointName: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "bad current status rule level",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationRuleStatusRules},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
every: 10m
|
|
|
|
endpointName: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WRONGO
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "bad previous status rule level",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationRuleStatusRules},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
endpointName: endpoint_0
|
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: CRIT
|
|
|
|
previousLevel: WRONG
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "bad tag rule operator",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldNotificationRuleTagRules},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
endpointName: endpoint_0
|
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
|
|
|
tagRules:
|
|
|
|
- key: k1
|
|
|
|
value: v2
|
|
|
|
operator: WRONG
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "bad status provided",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldStatus},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
2019-12-19 19:56:03 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
endpointName: endpoint_0
|
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
status: RANDO STATUS
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "label association does not exist",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldAssociations},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
|
|
|
spec:
|
|
|
|
endpointName: endpoint_0
|
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-12-19 19:56:03 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationRule,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "label association dupe",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldAssociations},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: NotificationRule
|
|
|
|
metadata:
|
|
|
|
name: rule_0
|
|
|
|
spec:
|
|
|
|
endpointName: endpoint_0
|
|
|
|
every: 10m
|
|
|
|
messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }"
|
|
|
|
statusRules:
|
|
|
|
- currentLevel: WARN
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-12-19 19:56:03 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, tt.kind, tt.resErr)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-12-23 08:22:48 +00:00
|
|
|
t.Run("pkg with tasks", func(t *testing.T) {
|
2020-01-13 19:13:37 +00:00
|
|
|
t.Run("happy path", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/tasks", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
tasks := sum.Tasks
|
|
|
|
require.Len(t, tasks, 2)
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
baseEqual := func(t *testing.T, i int, status influxdb.Status, actual SummaryTask) {
|
|
|
|
t.Helper()
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
assert.Equal(t, "task_"+strconv.Itoa(i), actual.Name)
|
|
|
|
assert.Equal(t, "desc_"+strconv.Itoa(i), actual.Description)
|
|
|
|
assert.Equal(t, status, actual.Status)
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
expectedQuery := "from(bucket: \"rucket_1\")\n |> range(start: -5d, stop: -1h)\n |> filter(fn: (r) => r._measurement == \"cpu\")\n |> filter(fn: (r) => r._field == \"usage_idle\")\n |> aggregateWindow(every: 1m, fn: mean)\n |> yield(name: \"mean\")"
|
|
|
|
assert.Equal(t, expectedQuery, actual.Query)
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, actual.LabelAssociations, 1)
|
|
|
|
assert.Equal(t, "label_1", actual.LabelAssociations[0].Name)
|
|
|
|
}
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.Len(t, sum.Labels, 1)
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
task1 := tasks[0]
|
|
|
|
baseEqual(t, 0, influxdb.Inactive, task1)
|
|
|
|
assert.Equal(t, (10 * time.Minute).String(), task1.Every)
|
|
|
|
assert.Equal(t, (15 * time.Second).String(), task1.Offset)
|
2019-12-23 08:22:48 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
task2 := tasks[1]
|
|
|
|
baseEqual(t, 1, influxdb.Active, task2)
|
|
|
|
assert.Equal(t, "15 * * * *", task2.Cron)
|
|
|
|
})
|
2019-12-23 08:22:48 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
kind Kind
|
|
|
|
resErr testPkgResourceError
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
kind: KindTask,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing name",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Task
|
|
|
|
metadata:
|
2019-12-23 08:22:48 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: desc_1
|
|
|
|
cron: 15 * * * *
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
2019-12-23 08:22:48 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindTask,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid status",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldStatus},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Task
|
|
|
|
metadata:
|
|
|
|
name: task_0
|
2019-12-23 08:22:48 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
cron: 15 * * * *
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
status: RANDO WRONGO
|
2019-12-23 08:22:48 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindTask,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing query",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldQuery},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Task
|
|
|
|
metadata:
|
|
|
|
name: task_0
|
2019-12-23 08:22:48 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: desc_0
|
|
|
|
every: 10m
|
|
|
|
offset: 15s
|
2019-12-23 08:22:48 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindTask,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "missing every and cron fields",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldEvery, fieldTaskCron},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Task
|
|
|
|
metadata:
|
|
|
|
name: task_0
|
2019-12-23 08:22:48 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: desc_0
|
|
|
|
offset: 15s
|
2019-12-23 08:22:48 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindTask,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "invalid association",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldAssociations},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Task
|
|
|
|
metadata:
|
|
|
|
name: task_1
|
|
|
|
spec:
|
|
|
|
cron: 15 * * * *
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-12-23 08:22:48 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindTask,
|
|
|
|
resErr: testPkgResourceError{
|
|
|
|
name: "duplicate association",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{fieldAssociations},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_1
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Task
|
|
|
|
metadata:
|
|
|
|
name: task_0
|
|
|
|
spec:
|
|
|
|
every: 10m
|
|
|
|
offset: 15s
|
|
|
|
query: >
|
|
|
|
from(bucket: "rucket_1") |> yield(name: "mean")
|
|
|
|
status: inactive
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
2019-12-23 08:22:48 +00:00
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, tt.kind, tt.resErr)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-12-03 18:22:59 +00:00
|
|
|
t.Run("pkg with telegraf and label associations", func(t *testing.T) {
|
|
|
|
t.Run("with valid fields", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/telegraf", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.TelegrafConfigs, 1)
|
|
|
|
|
|
|
|
actual := sum.TelegrafConfigs[0]
|
2019-12-12 19:09:32 +00:00
|
|
|
assert.Equal(t, "first_tele_config", actual.TelegrafConfig.Name)
|
|
|
|
assert.Equal(t, "desc", actual.TelegrafConfig.Description)
|
2019-12-03 18:22:59 +00:00
|
|
|
|
|
|
|
require.Len(t, actual.LabelAssociations, 1)
|
|
|
|
assert.Equal(t, "label_1", actual.LabelAssociations[0].Name)
|
2019-12-06 00:53:00 +00:00
|
|
|
|
|
|
|
require.Len(t, sum.LabelMappings, 1)
|
|
|
|
expectedMapping := SummaryLabelMapping{
|
|
|
|
ResourceName: "first_tele_config",
|
|
|
|
LabelName: "label_1",
|
2019-12-12 19:09:32 +00:00
|
|
|
ResourceType: influxdb.TelegrafsResourceType,
|
2019-12-06 00:53:00 +00:00
|
|
|
}
|
|
|
|
assert.Equal(t, expectedMapping, sum.LabelMappings[0])
|
2019-12-03 18:22:59 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "config missing",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"config"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Telegraf
|
|
|
|
metadata:
|
|
|
|
name: first_tele_config
|
2019-12-03 18:22:59 +00:00
|
|
|
spec:
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
testPkgErrors(t, KindTelegraf, tt)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
t.Run("pkg with a variable", func(t *testing.T) {
|
|
|
|
t.Run("with valid fields should produce summary", func(t *testing.T) {
|
|
|
|
testfileRunner(t, "testdata/variables", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
require.Len(t, sum.Variables, 4)
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
varEquals := func(t *testing.T, name, vType string, vals interface{}, v SummaryVariable) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
assert.Equal(t, name, v.Name)
|
|
|
|
assert.Equal(t, name+" desc", v.Description)
|
|
|
|
require.NotNil(t, v.Arguments)
|
|
|
|
assert.Equal(t, vType, v.Arguments.Type)
|
|
|
|
assert.Equal(t, vals, v.Arguments.Values)
|
2019-10-30 21:13:42 +00:00
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
// validates we support all known variable types
|
|
|
|
varEquals(t,
|
2019-12-07 00:23:09 +00:00
|
|
|
"var_const_3",
|
2019-11-06 22:41:06 +00:00
|
|
|
"constant",
|
|
|
|
influxdb.VariableConstantValues([]string{"first val"}),
|
|
|
|
sum.Variables[0],
|
|
|
|
)
|
|
|
|
|
|
|
|
varEquals(t,
|
2019-12-07 00:23:09 +00:00
|
|
|
"var_map_4",
|
2019-11-06 22:41:06 +00:00
|
|
|
"map",
|
|
|
|
influxdb.VariableMapValues{"k1": "v1"},
|
|
|
|
sum.Variables[1],
|
|
|
|
)
|
|
|
|
|
|
|
|
varEquals(t,
|
|
|
|
"var_query_1",
|
|
|
|
"query",
|
|
|
|
influxdb.VariableQueryValues{
|
|
|
|
Query: `buckets() |> filter(fn: (r) => r.name !~ /^_/) |> rename(columns: {name: "_value"}) |> keep(columns: ["_value"])`,
|
|
|
|
Language: "flux",
|
|
|
|
},
|
|
|
|
sum.Variables[2],
|
|
|
|
)
|
|
|
|
|
|
|
|
varEquals(t,
|
|
|
|
"var_query_2",
|
|
|
|
"query",
|
|
|
|
influxdb.VariableQueryValues{
|
|
|
|
Query: "an influxql query of sorts",
|
|
|
|
Language: "influxql",
|
|
|
|
},
|
|
|
|
sum.Variables[3],
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("handles bad config", func(t *testing.T) {
|
|
|
|
tests := []testPkgResourceError{
|
|
|
|
{
|
|
|
|
name: "name missing",
|
|
|
|
validationErrs: 1,
|
2020-03-16 18:25:39 +00:00
|
|
|
valFields: []string{fieldMetadata, fieldName},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
2019-11-06 22:41:06 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: var_map_4 desc
|
|
|
|
type: map
|
|
|
|
values:
|
|
|
|
k1: v1
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "map var missing values",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"values"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_map_4
|
2019-11-06 22:41:06 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: var_map_4 desc
|
|
|
|
type: map
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "const var missing values",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"values"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_const_3
|
2019-11-06 22:41:06 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: var_const_3 desc
|
|
|
|
type: constant
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query var missing query",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"query"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_query_2
|
2019-11-06 22:41:06 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: var_query_2 desc
|
|
|
|
type: query
|
|
|
|
language: influxql
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query var missing query language",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"language"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_query_2
|
2019-11-06 22:41:06 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: var_query_2 desc
|
|
|
|
type: query
|
|
|
|
query: an influxql query of sorts
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query var provides incorrect query language",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"language"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_query_2
|
2019-11-06 22:41:06 +00:00
|
|
|
spec:
|
2020-01-13 19:13:37 +00:00
|
|
|
description: var_query_2 desc
|
|
|
|
type: query
|
|
|
|
query: an influxql query of sorts
|
|
|
|
language: wrong Language
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "duplicate var names",
|
|
|
|
validationErrs: 1,
|
|
|
|
valFields: []string{"name"},
|
2020-01-13 19:13:37 +00:00
|
|
|
pkgStr: `apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_query_2
|
|
|
|
spec:
|
|
|
|
description: var_query_2 desc
|
|
|
|
type: query
|
|
|
|
query: an influxql query of sorts
|
|
|
|
language: influxql
|
|
|
|
---
|
|
|
|
apiVersion: influxdata.com/v2alpha1
|
|
|
|
kind: Variable
|
|
|
|
metadata:
|
|
|
|
name: var_query_2
|
|
|
|
spec:
|
|
|
|
description: var_query_2 desc
|
|
|
|
type: query
|
|
|
|
query: an influxql query of sorts
|
|
|
|
language: influxql
|
2019-11-06 22:41:06 +00:00
|
|
|
`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
2019-11-08 19:33:41 +00:00
|
|
|
testPkgErrors(t, KindVariable, tt)
|
2019-10-30 21:13:42 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2019-11-07 00:45:00 +00:00
|
|
|
|
|
|
|
t.Run("pkg with variable and labels associated", func(t *testing.T) {
|
2019-12-06 00:53:00 +00:00
|
|
|
testfileRunner(t, "testdata/variable_associates_label.yml", func(t *testing.T, pkg *Pkg) {
|
2019-11-07 00:45:00 +00:00
|
|
|
sum := pkg.Summary()
|
|
|
|
require.Len(t, sum.Labels, 1)
|
|
|
|
|
|
|
|
vars := sum.Variables
|
|
|
|
require.Len(t, vars, 1)
|
|
|
|
|
|
|
|
expectedLabelMappings := []struct {
|
|
|
|
varName string
|
|
|
|
labels []string
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
varName: "var_1",
|
|
|
|
labels: []string{"label_1"},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for i, expected := range expectedLabelMappings {
|
|
|
|
v := vars[i]
|
|
|
|
require.Len(t, v.LabelAssociations, len(expected.labels))
|
|
|
|
|
|
|
|
for j, label := range expected.labels {
|
|
|
|
assert.Equal(t, label, v.LabelAssociations[j].Name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
expectedMappings := []SummaryLabelMapping{
|
|
|
|
{
|
|
|
|
ResourceName: "var_1",
|
|
|
|
LabelName: "label_1",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
require.Len(t, sum.LabelMappings, len(expectedMappings))
|
|
|
|
for i, expected := range expectedMappings {
|
2019-12-12 19:09:32 +00:00
|
|
|
expected.ResourceType = influxdb.VariablesResourceType
|
2019-11-07 00:45:00 +00:00
|
|
|
assert.Equal(t, expected, sum.LabelMappings[i])
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2019-12-16 17:39:55 +00:00
|
|
|
|
|
|
|
t.Run("referencing secrets", func(t *testing.T) {
|
2020-02-05 00:15:20 +00:00
|
|
|
hasSecret := func(t *testing.T, refs map[string]bool, key string) {
|
|
|
|
t.Helper()
|
|
|
|
b, ok := refs[key]
|
|
|
|
assert.True(t, ok)
|
|
|
|
assert.False(t, b)
|
|
|
|
}
|
|
|
|
|
2019-12-16 17:39:55 +00:00
|
|
|
testfileRunner(t, "testdata/notification_endpoint_secrets.yml", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
|
|
|
|
endpoints := sum.NotificationEndpoints
|
|
|
|
require.Len(t, endpoints, 1)
|
|
|
|
|
|
|
|
expected := &endpoint.PagerDuty{
|
|
|
|
Base: endpoint.Base{
|
|
|
|
Name: "pager_duty_notification_endpoint",
|
|
|
|
Status: influxdb.TaskStatusActive,
|
|
|
|
},
|
|
|
|
ClientURL: "http://localhost:8080/orgs/7167eb6719fa34e5/alert-history",
|
2020-02-05 00:15:20 +00:00
|
|
|
RoutingKey: influxdb.SecretField{Key: "-routing-key", Value: strPtr("not empty")},
|
2019-12-16 17:39:55 +00:00
|
|
|
}
|
|
|
|
actual, ok := endpoints[0].NotificationEndpoint.(*endpoint.PagerDuty)
|
|
|
|
require.True(t, ok)
|
|
|
|
assert.Equal(t, expected.Base.Name, actual.Name)
|
|
|
|
require.Nil(t, actual.RoutingKey.Value)
|
|
|
|
assert.Equal(t, "routing-key", actual.RoutingKey.Key)
|
|
|
|
|
2020-02-05 00:15:20 +00:00
|
|
|
hasSecret(t, pkg.mSecrets, "routing-key")
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
t.Run("referencing env", func(t *testing.T) {
|
2020-02-06 17:28:04 +00:00
|
|
|
hasEnv := func(t *testing.T, refs map[string]bool, key string) {
|
2020-02-05 00:15:20 +00:00
|
|
|
t.Helper()
|
|
|
|
_, ok := refs[key]
|
|
|
|
assert.True(t, ok)
|
|
|
|
}
|
|
|
|
|
|
|
|
testfileRunner(t, "testdata/env_refs.yml", func(t *testing.T, pkg *Pkg) {
|
|
|
|
sum := pkg.Summary()
|
|
|
|
|
2020-02-05 01:23:28 +00:00
|
|
|
require.Len(t, sum.Buckets, 1)
|
|
|
|
assert.Equal(t, "$bkt-1-name-ref", sum.Buckets[0].Name)
|
2020-02-05 17:33:45 +00:00
|
|
|
assert.Len(t, sum.Buckets[0].LabelAssociations, 1)
|
2020-02-05 00:15:20 +00:00
|
|
|
hasEnv(t, pkg.mEnv, "bkt-1-name-ref")
|
|
|
|
|
2020-02-05 01:23:28 +00:00
|
|
|
require.Len(t, sum.Checks, 1)
|
|
|
|
assert.Equal(t, "$check-1-name-ref", sum.Checks[0].Check.GetName())
|
2020-02-05 17:33:45 +00:00
|
|
|
assert.Len(t, sum.Checks[0].LabelAssociations, 1)
|
2020-02-05 01:23:28 +00:00
|
|
|
hasEnv(t, pkg.mEnv, "check-1-name-ref")
|
2020-02-05 00:15:20 +00:00
|
|
|
|
2020-02-05 01:23:28 +00:00
|
|
|
require.Len(t, sum.Dashboards, 1)
|
|
|
|
assert.Equal(t, "$dash-1-name-ref", sum.Dashboards[0].Name)
|
2020-02-05 17:33:45 +00:00
|
|
|
assert.Len(t, sum.Dashboards[0].LabelAssociations, 1)
|
2020-02-05 01:23:28 +00:00
|
|
|
hasEnv(t, pkg.mEnv, "dash-1-name-ref")
|
|
|
|
|
|
|
|
require.Len(t, sum.NotificationEndpoints, 1)
|
|
|
|
assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName())
|
|
|
|
hasEnv(t, pkg.mEnv, "endpoint-1-name-ref")
|
|
|
|
|
|
|
|
require.Len(t, sum.Labels, 1)
|
|
|
|
assert.Equal(t, "$label-1-name-ref", sum.Labels[0].Name)
|
|
|
|
hasEnv(t, pkg.mEnv, "label-1-name-ref")
|
|
|
|
|
|
|
|
require.Len(t, sum.NotificationRules, 1)
|
|
|
|
assert.Equal(t, "$rule-1-name-ref", sum.NotificationRules[0].Name)
|
|
|
|
assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationRules[0].EndpointName)
|
|
|
|
hasEnv(t, pkg.mEnv, "rule-1-name-ref")
|
|
|
|
|
|
|
|
require.Len(t, sum.Tasks, 1)
|
|
|
|
assert.Equal(t, "$task-1-name-ref", sum.Tasks[0].Name)
|
|
|
|
hasEnv(t, pkg.mEnv, "task-1-name-ref")
|
|
|
|
|
|
|
|
require.Len(t, sum.TelegrafConfigs, 1)
|
|
|
|
assert.Equal(t, "$telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name)
|
|
|
|
hasEnv(t, pkg.mEnv, "telegraf-1-name-ref")
|
2020-02-05 00:15:20 +00:00
|
|
|
|
2020-02-05 01:23:28 +00:00
|
|
|
require.Len(t, sum.Variables, 1)
|
|
|
|
assert.Equal(t, "$var-1-name-ref", sum.Variables[0].Name)
|
|
|
|
hasEnv(t, pkg.mEnv, "var-1-name-ref")
|
2020-02-06 05:42:01 +00:00
|
|
|
|
|
|
|
t.Log("applying env vars should populate env fields")
|
|
|
|
{
|
2020-02-06 17:28:04 +00:00
|
|
|
err := pkg.applyEnvRefs(map[string]string{
|
2020-02-06 05:42:01 +00:00
|
|
|
"bkt-1-name-ref": "bucket-1",
|
|
|
|
"label-1-name-ref": "label-1",
|
|
|
|
})
|
2020-02-06 17:28:04 +00:00
|
|
|
require.NoError(t, err)
|
2020-02-06 05:42:01 +00:00
|
|
|
|
|
|
|
sum := pkg.Summary()
|
|
|
|
|
|
|
|
require.Len(t, sum.Buckets, 1)
|
|
|
|
assert.Equal(t, "bucket-1", sum.Buckets[0].Name)
|
|
|
|
assert.Len(t, sum.Buckets[0].LabelAssociations, 1)
|
|
|
|
hasEnv(t, pkg.mEnv, "bkt-1-name-ref")
|
|
|
|
|
|
|
|
require.Len(t, sum.Labels, 1)
|
|
|
|
assert.Equal(t, "label-1", sum.Labels[0].Name)
|
|
|
|
hasEnv(t, pkg.mEnv, "label-1-name-ref")
|
|
|
|
}
|
2019-12-16 17:39:55 +00:00
|
|
|
})
|
|
|
|
})
|
2020-01-12 02:49:55 +00:00
|
|
|
|
|
|
|
t.Run("jsonnet support", func(t *testing.T) {
|
2020-02-06 20:26:10 +00:00
|
|
|
pkg := validParsedPkgFromFile(t, "testdata/bucket_associates_labels.jsonnet", EncodingJsonnet)
|
2020-01-12 02:49:55 +00:00
|
|
|
|
|
|
|
sum := pkg.Summary()
|
|
|
|
|
|
|
|
labels := []SummaryLabel{
|
|
|
|
{
|
|
|
|
Name: "label_1",
|
|
|
|
Properties: struct {
|
|
|
|
Color string `json:"color"`
|
|
|
|
Description string `json:"description"`
|
|
|
|
}{Color: "#eee888", Description: "desc_1"},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
assert.Equal(t, labels, sum.Labels)
|
|
|
|
|
|
|
|
bkts := []SummaryBucket{
|
|
|
|
{
|
|
|
|
Name: "rucket_1",
|
|
|
|
Description: "desc_1",
|
|
|
|
RetentionPeriod: 10000 * time.Second,
|
|
|
|
LabelAssociations: labels,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Name: "rucket_2",
|
|
|
|
Description: "desc_2",
|
|
|
|
RetentionPeriod: 20000 * time.Second,
|
|
|
|
LabelAssociations: labels,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Name: "rucket_3",
|
|
|
|
Description: "desc_3",
|
|
|
|
RetentionPeriod: 30000 * time.Second,
|
|
|
|
LabelAssociations: labels,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
assert.Equal(t, bkts, sum.Buckets)
|
|
|
|
})
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 20:26:10 +00:00
|
|
|
func TestCombine(t *testing.T) {
|
|
|
|
newPkgFromYmlStr := func(t *testing.T, pkgStr string) *Pkg {
|
|
|
|
t.Helper()
|
|
|
|
return newParsedPkg(t, FromString(pkgStr), EncodingYAML, ValidSkipParseError())
|
|
|
|
}
|
|
|
|
|
|
|
|
associationsEqual := func(t *testing.T, summaryLabels []SummaryLabel, names ...string) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
require.Len(t, summaryLabels, len(names))
|
|
|
|
|
|
|
|
m := make(map[string]bool)
|
|
|
|
for _, n := range names {
|
|
|
|
m[n] = true
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, l := range summaryLabels {
|
|
|
|
if !m[l.Name] {
|
|
|
|
assert.Fail(t, "did not find label: "+l.Name)
|
|
|
|
}
|
|
|
|
delete(m, l.Name)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(m) > 0 {
|
|
|
|
var unexpectedLabels []string
|
|
|
|
for name := range m {
|
|
|
|
unexpectedLabels = append(unexpectedLabels, name)
|
|
|
|
}
|
|
|
|
assert.Failf(t, "additional labels found", "got: %v", unexpectedLabels)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Run("multiple pkgs with associations across files", func(t *testing.T) {
|
|
|
|
var pkgs []*Pkg
|
|
|
|
numLabels := 5
|
|
|
|
for i := 0; i < numLabels; i++ {
|
|
|
|
pkg := newPkgFromYmlStr(t, fmt.Sprintf(`
|
|
|
|
apiVersion: %[1]s
|
|
|
|
kind: Label
|
|
|
|
metadata:
|
|
|
|
name: label_%d
|
|
|
|
`, APIVersion, i))
|
|
|
|
pkgs = append(pkgs, pkg)
|
|
|
|
}
|
|
|
|
|
|
|
|
pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(`
|
|
|
|
apiVersion: %[1]s
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_1
|
|
|
|
spec:
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
|
|
|
`, APIVersion)))
|
|
|
|
|
|
|
|
pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(`
|
|
|
|
apiVersion: %[1]s
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_2
|
|
|
|
spec:
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_2
|
|
|
|
`, APIVersion)))
|
|
|
|
|
|
|
|
pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(`
|
|
|
|
apiVersion: %[1]s
|
|
|
|
kind: Bucket
|
|
|
|
metadata:
|
|
|
|
name: rucket_3
|
|
|
|
spec:
|
|
|
|
associations:
|
|
|
|
- kind: Label
|
|
|
|
name: label_1
|
|
|
|
- kind: Label
|
|
|
|
name: label_2
|
|
|
|
`, APIVersion)))
|
|
|
|
|
|
|
|
combinedPkg, err := Combine(pkgs...)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
sum := combinedPkg.Summary()
|
|
|
|
|
|
|
|
require.Len(t, sum.Labels, numLabels)
|
|
|
|
for i := 0; i < numLabels; i++ {
|
|
|
|
assert.Equal(t, fmt.Sprintf("label_%d", i), sum.Labels[i].Name)
|
|
|
|
}
|
|
|
|
|
|
|
|
require.Len(t, sum.Labels, numLabels)
|
|
|
|
for i := 0; i < numLabels; i++ {
|
|
|
|
assert.Equal(t, fmt.Sprintf("label_%d", i), sum.Labels[i].Name)
|
|
|
|
}
|
|
|
|
|
|
|
|
require.Len(t, sum.Buckets, 3)
|
|
|
|
assert.Equal(t, "rucket_1", sum.Buckets[0].Name)
|
|
|
|
associationsEqual(t, sum.Buckets[0].LabelAssociations, "label_1")
|
|
|
|
assert.Equal(t, "rucket_2", sum.Buckets[1].Name)
|
|
|
|
associationsEqual(t, sum.Buckets[1].LabelAssociations, "label_2")
|
|
|
|
assert.Equal(t, "rucket_3", sum.Buckets[2].Name)
|
|
|
|
associationsEqual(t, sum.Buckets[2].LabelAssociations, "label_1", "label_2")
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-21 23:57:41 +00:00
|
|
|
func Test_IsParseError(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
err error
|
|
|
|
expected bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "base case",
|
|
|
|
err: &parseErr{},
|
|
|
|
expected: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "wrapped by influxdb error",
|
|
|
|
err: &influxdb.Error{
|
|
|
|
Err: &parseErr{},
|
|
|
|
},
|
|
|
|
expected: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "deeply nested in influxdb error",
|
|
|
|
err: &influxdb.Error{
|
|
|
|
Err: &influxdb.Error{
|
|
|
|
Err: &influxdb.Error{
|
|
|
|
Err: &influxdb.Error{
|
|
|
|
Err: &parseErr{},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expected: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "influxdb error without nested parse err",
|
|
|
|
err: &influxdb.Error{
|
|
|
|
Err: errors.New("nope"),
|
|
|
|
},
|
|
|
|
expected: false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "plain error",
|
|
|
|
err: errors.New("nope"),
|
|
|
|
expected: false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
fn := func(t *testing.T) {
|
|
|
|
isParseErr := IsParseErr(tt.err)
|
|
|
|
assert.Equal(t, tt.expected, isParseErr)
|
|
|
|
}
|
|
|
|
t.Run(tt.name, fn)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func Test_PkgValidationErr(t *testing.T) {
|
|
|
|
iPtr := func(i int) *int {
|
|
|
|
return &i
|
|
|
|
}
|
|
|
|
|
|
|
|
compIntSlcs := func(t *testing.T, expected []int, actuals []*int) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
if len(expected) >= len(actuals) {
|
|
|
|
require.FailNow(t, "expected array is larger than actuals")
|
|
|
|
}
|
|
|
|
|
|
|
|
for i, actual := range actuals {
|
|
|
|
if i == len(expected) {
|
|
|
|
assert.Nil(t, actual)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
assert.Equal(t, expected[i], *actual)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pErr := &parseErr{
|
|
|
|
Resources: []resourceErr{
|
|
|
|
{
|
|
|
|
Kind: KindDashboard.String(),
|
|
|
|
Idx: intPtr(0),
|
|
|
|
ValidationErrs: []validationErr{
|
|
|
|
{
|
|
|
|
Field: "charts",
|
|
|
|
Index: iPtr(1),
|
|
|
|
Nested: []validationErr{
|
|
|
|
{
|
|
|
|
Field: "colors",
|
|
|
|
Index: iPtr(0),
|
|
|
|
Nested: []validationErr{
|
|
|
|
{
|
|
|
|
Field: "hex",
|
|
|
|
Msg: "hex value required",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Field: "kind",
|
|
|
|
Msg: "chart kind must be provided",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
errs := pErr.ValidationErrs()
|
|
|
|
require.Len(t, errs, 2)
|
|
|
|
assert.Equal(t, KindDashboard.String(), errs[0].Kind)
|
2020-01-24 19:25:03 +00:00
|
|
|
assert.Equal(t, []string{"root", "charts", "colors", "hex"}, errs[0].Fields)
|
2019-11-22 01:07:12 +00:00
|
|
|
compIntSlcs(t, []int{0, 1, 0}, errs[0].Indexes)
|
|
|
|
assert.Equal(t, "hex value required", errs[0].Reason)
|
|
|
|
|
|
|
|
assert.Equal(t, KindDashboard.String(), errs[1].Kind)
|
2020-01-24 19:25:03 +00:00
|
|
|
assert.Equal(t, []string{"root", "charts", "kind"}, errs[1].Fields)
|
2019-11-22 01:07:12 +00:00
|
|
|
compIntSlcs(t, []int{0, 1}, errs[1].Indexes)
|
|
|
|
assert.Equal(t, "chart kind must be provided", errs[1].Reason)
|
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
type testPkgResourceError struct {
|
|
|
|
name string
|
|
|
|
encoding Encoding
|
|
|
|
pkgStr string
|
|
|
|
resourceErrs int
|
|
|
|
validationErrs int
|
|
|
|
valFields []string
|
|
|
|
assErrs int
|
|
|
|
assIdxs []int
|
|
|
|
}
|
|
|
|
|
|
|
|
// defaults to yaml encoding if encoding not provided
|
|
|
|
// defaults num resources to 1 if resource errs not provided.
|
2019-11-08 19:33:41 +00:00
|
|
|
func testPkgErrors(t *testing.T, k Kind, tt testPkgResourceError) {
|
2019-11-06 22:41:06 +00:00
|
|
|
t.Helper()
|
|
|
|
encoding := EncodingYAML
|
|
|
|
if tt.encoding != EncodingUnknown {
|
|
|
|
encoding = tt.encoding
|
|
|
|
}
|
|
|
|
|
|
|
|
resErrs := 1
|
|
|
|
if tt.resourceErrs > 0 {
|
|
|
|
resErrs = tt.resourceErrs
|
|
|
|
}
|
|
|
|
|
|
|
|
fn := func(t *testing.T) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
_, err := Parse(encoding, FromString(tt.pkgStr))
|
|
|
|
require.Error(t, err)
|
|
|
|
|
2019-11-14 00:43:28 +00:00
|
|
|
require.True(t, IsParseErr(err), err)
|
2019-11-06 22:41:06 +00:00
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
pErr := err.(*parseErr)
|
2019-11-06 22:41:06 +00:00
|
|
|
require.Len(t, pErr.Resources, resErrs)
|
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
defer func() {
|
|
|
|
if t.Failed() {
|
|
|
|
t.Logf("recieved unexpected err: %s", pErr)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
resErr := pErr.Resources[0]
|
|
|
|
assert.Equal(t, k.String(), resErr.Kind)
|
|
|
|
|
2019-11-14 00:24:05 +00:00
|
|
|
for i, vFail := range resErr.ValidationErrs {
|
|
|
|
if len(tt.valFields) == i {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
expectedField := tt.valFields[i]
|
|
|
|
findErr(t, expectedField, vFail)
|
2019-11-06 22:41:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if tt.assErrs == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-11-14 00:24:05 +00:00
|
|
|
assFails := pErr.Resources[0].AssociationErrs
|
|
|
|
for i, assFail := range assFails {
|
|
|
|
if len(tt.valFields) == i {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
expectedField := tt.valFields[i]
|
|
|
|
findErr(t, expectedField, assFail)
|
2019-11-06 22:41:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
t.Run(tt.name, fn)
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func findErr(t *testing.T, expectedField string, vErr validationErr) validationErr {
|
2019-11-14 00:24:05 +00:00
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
fields := strings.Split(expectedField, ".")
|
|
|
|
if len(fields) == 1 {
|
|
|
|
require.Equal(t, expectedField, vErr.Field)
|
|
|
|
return vErr
|
|
|
|
}
|
|
|
|
|
|
|
|
currentFieldName, idx := nextField(t, fields[0])
|
|
|
|
if idx > -1 {
|
|
|
|
require.NotNil(t, vErr.Index)
|
|
|
|
require.Equal(t, idx, *vErr.Index)
|
|
|
|
}
|
|
|
|
require.Equal(t, currentFieldName, vErr.Field)
|
|
|
|
|
|
|
|
next := strings.Join(fields[1:], ".")
|
|
|
|
nestedField, _ := nextField(t, next)
|
|
|
|
for _, n := range vErr.Nested {
|
|
|
|
if n.Field == nestedField {
|
|
|
|
return findErr(t, next, n)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert.Fail(t, "did not find field: "+expectedField)
|
|
|
|
|
|
|
|
return vErr
|
|
|
|
}
|
|
|
|
|
|
|
|
func nextField(t *testing.T, field string) (string, int) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
fields := strings.Split(field, ".")
|
|
|
|
if len(fields) == 1 && !strings.HasSuffix(fields[0], "]") {
|
|
|
|
return field, -1
|
|
|
|
}
|
|
|
|
parts := strings.Split(fields[0], "[")
|
|
|
|
if len(parts) == 1 {
|
2020-03-04 19:11:55 +00:00
|
|
|
return parts[0], -1
|
2019-11-14 00:24:05 +00:00
|
|
|
}
|
|
|
|
fieldName := parts[0]
|
|
|
|
|
|
|
|
if strIdx := strings.Index(parts[1], "]"); strIdx > -1 {
|
|
|
|
idx, err := strconv.Atoi(parts[1][:strIdx])
|
|
|
|
require.NoError(t, err)
|
|
|
|
return fieldName, idx
|
|
|
|
}
|
|
|
|
return "", -1
|
|
|
|
}
|
|
|
|
|
2020-02-06 20:26:10 +00:00
|
|
|
func validParsedPkgFromFile(t *testing.T, path string, encoding Encoding) *Pkg {
|
|
|
|
t.Helper()
|
|
|
|
return newParsedPkg(t, FromFile(path), encoding)
|
|
|
|
}
|
|
|
|
|
|
|
|
func newParsedPkg(t *testing.T, fn ReaderFn, encoding Encoding, opts ...ValidateOptFn) *Pkg {
|
2019-10-23 17:09:04 +00:00
|
|
|
t.Helper()
|
|
|
|
|
2020-02-06 20:26:10 +00:00
|
|
|
pkg, err := Parse(encoding, fn, opts...)
|
2019-10-23 17:09:04 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
for _, k := range pkg.Objects {
|
|
|
|
require.Equal(t, APIVersion, k.APIVersion)
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
require.True(t, pkg.isParsed)
|
2019-10-23 17:09:04 +00:00
|
|
|
return pkg
|
|
|
|
}
|
|
|
|
|
|
|
|
func testfileRunner(t *testing.T, path string, testFn func(t *testing.T, pkg *Pkg)) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
extension string
|
|
|
|
encoding Encoding
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "yaml",
|
2019-11-07 00:45:00 +00:00
|
|
|
extension: ".yml",
|
2019-10-23 17:09:04 +00:00
|
|
|
encoding: EncodingYAML,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "json",
|
2019-11-07 00:45:00 +00:00
|
|
|
extension: ".json",
|
2019-10-23 17:09:04 +00:00
|
|
|
encoding: EncodingJSON,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2019-11-07 00:45:00 +00:00
|
|
|
ext := filepath.Ext(path)
|
|
|
|
switch ext {
|
|
|
|
case ".yml":
|
|
|
|
tests = tests[:1]
|
|
|
|
case ".json":
|
|
|
|
tests = tests[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
path = strings.TrimSuffix(path, ext)
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
for _, tt := range tests {
|
|
|
|
fn := func(t *testing.T) {
|
|
|
|
t.Helper()
|
|
|
|
|
2020-02-06 20:26:10 +00:00
|
|
|
pkg := validParsedPkgFromFile(t, path+tt.extension, tt.encoding)
|
2019-10-23 17:09:04 +00:00
|
|
|
if testFn != nil {
|
|
|
|
testFn(t, pkg)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
t.Run(tt.name, fn)
|
|
|
|
}
|
|
|
|
}
|
2019-12-10 22:51:11 +00:00
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
type labelMapping struct {
|
|
|
|
labelName string
|
|
|
|
resName string
|
|
|
|
resType influxdb.ResourceType
|
|
|
|
}
|
|
|
|
|
|
|
|
func containsLabelMappings(t *testing.T, labelMappings []SummaryLabelMapping, matches ...labelMapping) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
for _, expected := range matches {
|
|
|
|
expectedMapping := SummaryLabelMapping{
|
|
|
|
ResourceName: expected.resName,
|
|
|
|
LabelName: expected.labelName,
|
|
|
|
ResourceType: expected.resType,
|
|
|
|
}
|
|
|
|
assert.Contains(t, labelMappings, expectedMapping)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-10 22:51:11 +00:00
|
|
|
func strPtr(s string) *string {
|
|
|
|
return &s
|
|
|
|
}
|
2019-12-18 01:57:44 +00:00
|
|
|
|
|
|
|
func mustDuration(t *testing.T, d time.Duration) *notification.Duration {
|
|
|
|
t.Helper()
|
|
|
|
dur, err := notification.FromTimeDuration(d)
|
|
|
|
require.NoError(t, err)
|
|
|
|
return &dur
|
|
|
|
}
|