feat(pkger): move bucket pkger model to reflect http APIs retention rules in palce of retention period

pull/16032/head
Johnny Steenbergen 2019-11-22 10:41:08 -08:00 committed by Johnny Steenbergen
parent 2b4e6283ce
commit 888baa2db6
9 changed files with 136 additions and 55 deletions

View File

@ -63,7 +63,7 @@ func bucketToResource(bkt influxdb.Bucket, name string) Resource {
r[fieldDescription] = bkt.Description r[fieldDescription] = bkt.Description
} }
if bkt.RetentionPeriod != 0 { if bkt.RetentionPeriod != 0 {
r[fieldBucketRetentionPeriod] = bkt.RetentionPeriod.String() r[fieldBucketRetentionRules] = retentionRules{newRetentionRule(bkt.RetentionPeriod)}
} }
return r return r
} }
@ -351,7 +351,7 @@ func variableToResource(v influxdb.Variable, name string) Resource {
case fieldArgTypeQuery: case fieldArgTypeQuery:
vals, ok := args.Values.(influxdb.VariableQueryValues) vals, ok := args.Values.(influxdb.VariableQueryValues)
if ok { if ok {
r[fieldVarLanguage] = vals.Language r[fieldLanguage] = vals.Language
r[fieldQuery] = vals.Query r[fieldQuery] = vals.Query
} }
} }

View File

@ -3,6 +3,7 @@ package pkger
import ( import (
"errors" "errors"
"fmt" "fmt"
"strconv"
"strings" "strings"
"time" "time"
@ -151,7 +152,7 @@ func newDiffBucket(b *bucket, i influxdb.Bucket) DiffBucket {
OldDesc: i.Description, OldDesc: i.Description,
NewDesc: b.Description, NewDesc: b.Description,
OldRetention: i.RetentionPeriod, OldRetention: i.RetentionPeriod,
NewRetention: b.RetentionPeriod, NewRetention: b.RetentionRules.RP(),
} }
} }
@ -366,6 +367,7 @@ const (
fieldAssociations = "associations" fieldAssociations = "associations"
fieldDescription = "description" fieldDescription = "description"
fieldKind = "kind" fieldKind = "kind"
fieldLanguage = "language"
fieldName = "name" fieldName = "name"
fieldPrefix = "prefix" fieldPrefix = "prefix"
fieldQuery = "query" fieldQuery = "query"
@ -376,16 +378,16 @@ const (
) )
const ( const (
fieldBucketRetentionPeriod = "retention_period" fieldBucketRetentionRules = "retentionRules"
) )
type bucket struct { type bucket struct {
id influxdb.ID id influxdb.ID
OrgID influxdb.ID OrgID influxdb.ID
Description string Description string
Name string Name string
RetentionPeriod time.Duration RetentionRules retentionRules
labels []*label labels []*label
// existing provides context for a resource that already // existing provides context for a resource that already
// exists in the platform. If a resource already exists // exists in the platform. If a resource already exists
@ -415,17 +417,84 @@ func (b *bucket) summarize() SummaryBucket {
OrgID: b.OrgID, OrgID: b.OrgID,
Name: b.Name, Name: b.Name,
Description: b.Description, Description: b.Description,
RetentionPeriod: b.RetentionPeriod, RetentionPeriod: b.RetentionRules.RP(),
}, },
LabelAssociations: toInfluxLabels(b.labels...), LabelAssociations: toInfluxLabels(b.labels...),
} }
} }
func (b *bucket) valid() []validationErr {
return b.RetentionRules.valid()
}
func (b *bucket) shouldApply() bool { func (b *bucket) shouldApply() bool {
return b.existing == nil || return b.existing == nil ||
b.Description != b.existing.Description || b.Description != b.existing.Description ||
b.Name != b.existing.Name || b.Name != b.existing.Name ||
b.RetentionPeriod != b.existing.RetentionPeriod b.RetentionRules.RP() != b.existing.RetentionPeriod
}
const (
retentionRuleTypeExpire = "expire"
)
type retentionRule struct {
Type string `json:"type" yaml:"type"`
Seconds int `json:"everySeconds" yaml:"everySeconds"`
}
func newRetentionRule(d time.Duration) retentionRule {
return retentionRule{
Type: retentionRuleTypeExpire,
Seconds: int(d.Round(time.Second) / time.Second),
}
}
func (r retentionRule) valid() []validationErr {
const hour = 3600
var ff []validationErr
if r.Seconds < hour {
ff = append(ff, validationErr{
Field: fieldRetentionRulesEverySeconds,
Msg: "seconds must be a minimum of " + strconv.Itoa(hour),
})
}
if r.Type != retentionRuleTypeExpire {
ff = append(ff, validationErr{
Field: fieldType,
Msg: `type must be "expire"`,
})
}
return ff
}
const (
fieldRetentionRulesEverySeconds = "everySeconds"
)
type retentionRules []retentionRule
func (r retentionRules) RP() time.Duration {
// TODO: this feels very odd to me, will need to follow up with
// team to better understand this
for _, rule := range r {
return time.Duration(rule.Seconds) * time.Second
}
return 0
}
func (r retentionRules) valid() []validationErr {
var failures []validationErr
for i, rule := range r {
if ff := rule.valid(); len(ff) > 0 {
failures = append(failures, validationErr{
Field: fieldBucketRetentionRules,
Index: intPtr(i),
Nested: ff,
})
}
}
return failures
} }
type assocMapKey struct { type assocMapKey struct {
@ -615,7 +684,6 @@ const (
fieldArgTypeConstant = "constant" fieldArgTypeConstant = "constant"
fieldArgTypeMap = "map" fieldArgTypeMap = "map"
fieldArgTypeQuery = "query" fieldArgTypeQuery = "query"
fieldVarLanguage = "language"
) )
type variable struct { type variable struct {
@ -693,27 +761,27 @@ func (v *variable) valid() []validationErr {
case "map": case "map":
if len(v.MapValues) == 0 { if len(v.MapValues) == 0 {
failures = append(failures, validationErr{ failures = append(failures, validationErr{
Field: "values", Field: fieldValues,
Msg: "map variable must have at least 1 key/val pair", Msg: "map variable must have at least 1 key/val pair",
}) })
} }
case "constant": case "constant":
if len(v.ConstValues) == 0 { if len(v.ConstValues) == 0 {
failures = append(failures, validationErr{ failures = append(failures, validationErr{
Field: "values", Field: fieldValues,
Msg: "constant variable must have a least 1 value provided", Msg: "constant variable must have a least 1 value provided",
}) })
} }
case "query": case "query":
if v.Query == "" { if v.Query == "" {
failures = append(failures, validationErr{ failures = append(failures, validationErr{
Field: "query", Field: fieldQuery,
Msg: "query variable must provide a query string", Msg: "query variable must provide a query string",
}) })
} }
if v.Language != "influxql" && v.Language != "flux" { if v.Language != "influxql" && v.Language != "flux" {
failures = append(failures, validationErr{ failures = append(failures, validationErr{
Field: "language", Field: fieldLanguage,
Msg: fmt.Sprintf(`query variable language must be either "influxql" or "flux"; got %q`, v.Language), Msg: fmt.Sprintf(`query variable language must be either "influxql" or "flux"; got %q`, v.Language),
}) })
} }
@ -990,7 +1058,7 @@ func validGeometry(geom string) []validationErr {
msg = "type provided is not supported" msg = "type provided is not supported"
} }
return []validationErr{{ return []validationErr{{
Field: "geom", Field: fieldChartGeom,
Msg: fmt.Sprintf("%s: %q", msg, geom), Msg: fmt.Sprintf("%s: %q", msg, geom),
}} }}
} }
@ -1002,14 +1070,14 @@ func (c chart) validBaseProps() []validationErr {
var fails []validationErr var fails []validationErr
if c.Width <= 0 { if c.Width <= 0 {
fails = append(fails, validationErr{ fails = append(fails, validationErr{
Field: "width", Field: fieldChartWidth,
Msg: "must be greater than 0", Msg: "must be greater than 0",
}) })
} }
if c.Height <= 0 { if c.Height <= 0 {
fails = append(fails, validationErr{ fails = append(fails, validationErr{
Field: "height", Field: fieldChartHeight,
Msg: "must be greater than 0", Msg: "must be greater than 0",
}) })
} }
@ -1103,12 +1171,12 @@ func (c colors) valid() []validationErr {
var fails []validationErr var fails []validationErr
for i, cc := range c { for i, cc := range c {
cErr := validationErr{ cErr := validationErr{
Field: "colors", Field: fieldChartColors,
Index: intPtr(i), Index: intPtr(i),
} }
if cc.Hex == "" { if cc.Hex == "" {
cErr.Nested = append(cErr.Nested, validationErr{ cErr.Nested = append(cErr.Nested, validationErr{
Field: "hex", Field: fieldColorHex,
Msg: "a color must have a hex value provided", Msg: "a color must have a hex value provided",
}) })
} }
@ -1144,19 +1212,19 @@ func (q queries) valid() []validationErr {
var fails []validationErr var fails []validationErr
if len(q) == 0 { if len(q) == 0 {
fails = append(fails, validationErr{ fails = append(fails, validationErr{
Field: "queries", Field: fieldChartQueries,
Msg: "at least 1 query must be provided", Msg: "at least 1 query must be provided",
}) })
} }
for i, qq := range q { for i, qq := range q {
qErr := validationErr{ qErr := validationErr{
Field: "queries", Field: fieldChartQueries,
Index: intPtr(i), Index: intPtr(i),
} }
if qq.Query == "" { if qq.Query == "" {
qErr.Nested = append(fails, validationErr{ qErr.Nested = append(fails, validationErr{
Field: "query", Field: fieldQuery,
Msg: "a query must be provided", Msg: "a query must be provided",
}) })
} }
@ -1220,7 +1288,7 @@ func (a axes) hasAxes(expectedAxes ...string) []validationErr {
for _, expected := range expectedAxes { for _, expected := range expectedAxes {
if !mAxes[expected] { if !mAxes[expected] {
failures = append(failures, validationErr{ failures = append(failures, validationErr{
Field: "axes", Field: fieldChartAxes,
Msg: fmt.Sprintf("axis not found: %q", expected), Msg: fmt.Sprintf("axis not found: %q", expected),
}) })
} }
@ -1230,7 +1298,6 @@ func (a axes) hasAxes(expectedAxes ...string) []validationErr {
} }
const ( const (
fieldLegendLanguage = "language"
fieldLegendOrientation = "orientation" fieldLegendOrientation = "orientation"
) )

View File

@ -16,18 +16,18 @@ func TestPkg(t *testing.T) {
pkg := Pkg{ pkg := Pkg{
mBuckets: map[string]*bucket{ mBuckets: map[string]*bucket{
"buck_2": { "buck_2": {
id: influxdb.ID(2), id: influxdb.ID(2),
OrgID: influxdb.ID(100), OrgID: influxdb.ID(100),
Description: "desc2", Description: "desc2",
Name: "name2", Name: "name2",
RetentionPeriod: 2 * time.Hour, RetentionRules: retentionRules{newRetentionRule(2 * time.Hour)},
}, },
"buck_1": { "buck_1": {
id: influxdb.ID(1), id: influxdb.ID(1),
OrgID: influxdb.ID(100), OrgID: influxdb.ID(100),
Name: "name1", Name: "name1",
Description: "desc1", Description: "desc1",
RetentionPeriod: time.Hour, RetentionRules: retentionRules{newRetentionRule(time.Hour)},
}, },
}, },
} }

View File

@ -424,9 +424,18 @@ func (p *Pkg) graphBuckets() error {
} }
bkt := &bucket{ bkt := &bucket{
Name: r.Name(), Name: r.Name(),
Description: r.stringShort(fieldDescription), Description: r.stringShort(fieldDescription),
RetentionPeriod: r.duration(fieldBucketRetentionPeriod), }
if rules, ok := r[fieldBucketRetentionRules].(retentionRules); ok {
bkt.RetentionRules = rules
} else {
for _, r := range r.slcResource(fieldBucketRetentionRules) {
bkt.RetentionRules = append(bkt.RetentionRules, retentionRule{
Type: r.stringShort(fieldType),
Seconds: r.intShort(fieldRetentionRulesEverySeconds),
})
}
} }
failures := p.parseNestedLabels(r, func(l *label) error { failures := p.parseNestedLabels(r, func(l *label) error {
@ -440,7 +449,7 @@ func (p *Pkg) graphBuckets() error {
p.mBuckets[r.Name()] = bkt p.mBuckets[r.Name()] = bkt
return failures return append(failures, bkt.valid()...)
}) })
} }
@ -535,7 +544,7 @@ func (p *Pkg) graphVariables() error {
Description: r.stringShort(fieldDescription), Description: r.stringShort(fieldDescription),
Type: strings.ToLower(r.stringShort(fieldType)), Type: strings.ToLower(r.stringShort(fieldType)),
Query: strings.TrimSpace(r.stringShort(fieldQuery)), Query: strings.TrimSpace(r.stringShort(fieldQuery)),
Language: strings.ToLower(strings.TrimSpace(r.stringShort(fieldLegendLanguage))), Language: strings.ToLower(strings.TrimSpace(r.stringShort(fieldLanguage))),
ConstValues: r.slcStr(fieldValues), ConstValues: r.slcStr(fieldValues),
MapValues: r.mapStrStr(fieldValues), MapValues: r.mapStrStr(fieldValues),
} }
@ -809,11 +818,6 @@ func (r Resource) boolShort(key string) bool {
return b return b
} }
func (r Resource) duration(key string) time.Duration {
dur, _ := time.ParseDuration(r.stringShort(key))
return dur
}
func (r Resource) float64(key string) (float64, bool) { func (r Resource) float64(key string) (float64, bool) {
f, ok := r[key].(float64) f, ok := r[key].(float64)
if ok { if ok {

View File

@ -117,9 +117,9 @@ spec:
actual := buckets[0] actual := buckets[0]
expectedBucket := bucket{ expectedBucket := bucket{
Name: "rucket_11", Name: "rucket_11",
Description: "bucket 1 description", Description: "bucket 1 description",
RetentionPeriod: time.Hour, RetentionRules: retentionRules{newRetentionRule(time.Hour)},
} }
assert.Equal(t, expectedBucket, *actual) assert.Equal(t, expectedBucket, *actual)
}) })

View File

@ -829,9 +829,10 @@ func (s *Service) rollbackBuckets(buckets []*bucket) error {
continue continue
} }
rp := b.RetentionRules.RP()
_, err := s.bucketSVC.UpdateBucket(context.Background(), b.ID(), influxdb.BucketUpdate{ _, err := s.bucketSVC.UpdateBucket(context.Background(), b.ID(), influxdb.BucketUpdate{
Description: &b.Description, Description: &b.Description,
RetentionPeriod: &b.RetentionPeriod, RetentionPeriod: &rp,
}) })
if err != nil { if err != nil {
errs = append(errs, b.ID().String()) errs = append(errs, b.ID().String())
@ -847,10 +848,11 @@ func (s *Service) rollbackBuckets(buckets []*bucket) error {
} }
func (s *Service) applyBucket(ctx context.Context, b *bucket) (influxdb.Bucket, error) { func (s *Service) applyBucket(ctx context.Context, b *bucket) (influxdb.Bucket, error) {
rp := b.RetentionRules.RP()
if b.existing != nil { if b.existing != nil {
influxBucket, err := s.bucketSVC.UpdateBucket(ctx, b.ID(), influxdb.BucketUpdate{ influxBucket, err := s.bucketSVC.UpdateBucket(ctx, b.ID(), influxdb.BucketUpdate{
Description: &b.Description, Description: &b.Description,
RetentionPeriod: &b.RetentionPeriod, RetentionPeriod: &rp,
}) })
if err != nil { if err != nil {
return influxdb.Bucket{}, err return influxdb.Bucket{}, err
@ -862,7 +864,7 @@ func (s *Service) applyBucket(ctx context.Context, b *bucket) (influxdb.Bucket,
OrgID: b.OrgID, OrgID: b.OrgID,
Description: b.Description, Description: b.Description,
Name: b.Name, Name: b.Name,
RetentionPeriod: b.RetentionPeriod, RetentionPeriod: rp,
} }
err := s.bucketSVC.CreateBucket(ctx, &influxBucket) err := s.bucketSVC.CreateBucket(ctx, &influxBucket)
if err != nil { if err != nil {

View File

@ -234,7 +234,7 @@ func TestService(t *testing.T) {
OrgID: orgID, OrgID: orgID,
Name: pkgBkt.Name, Name: pkgBkt.Name,
Description: pkgBkt.Description, Description: pkgBkt.Description,
RetentionPeriod: pkgBkt.RetentionPeriod, RetentionPeriod: pkgBkt.RetentionRules.RP(),
} }
fakeBktSVC := mock.NewBucketService() fakeBktSVC := mock.NewBucketService()

View File

@ -11,6 +11,12 @@
{ {
"kind": "Bucket", "kind": "Bucket",
"name": "rucket_11", "name": "rucket_11",
"retentionRules": [
{
"type": "expire",
"everySeconds": 3600
}
],
"retention_period": "1h", "retention_period": "1h",
"description": "bucket 1 description" "description": "bucket 1 description"
} }

View File

@ -8,5 +8,7 @@ spec:
resources: resources:
- kind: Bucket - kind: Bucket
name: rucket_11 name: rucket_11
retention_period: 1h
description: bucket 1 description description: bucket 1 description
retentionRules:
- type: expire
everySeconds: 3600