2019-10-23 17:09:04 +00:00
|
|
|
package pkger
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
2020-01-12 02:25:19 +00:00
|
|
|
"net/http"
|
2019-10-23 17:09:04 +00:00
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
"github.com/influxdata/influxdb"
|
2020-01-12 02:49:55 +00:00
|
|
|
"github.com/influxdata/influxdb/pkg/jsonnet"
|
2019-10-23 17:09:04 +00:00
|
|
|
"gopkg.in/yaml.v3"
|
|
|
|
)
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
type (
|
|
|
|
// ReaderFn is used for functional inputs to abstract the individual
|
|
|
|
// entrypoints for the reader itself.
|
|
|
|
ReaderFn func() (io.Reader, error)
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
// Encoder is an encodes a type.
|
|
|
|
Encoder interface {
|
|
|
|
Encode(v interface{}) error
|
|
|
|
}
|
|
|
|
|
|
|
|
// Encoding describes the encoding for the raw package data. The
|
|
|
|
// encoding determines how the raw data is parsed.
|
|
|
|
Encoding int
|
|
|
|
)
|
2019-10-23 17:09:04 +00:00
|
|
|
|
|
|
|
// encoding types
|
|
|
|
const (
|
2019-11-05 01:40:42 +00:00
|
|
|
EncodingUnknown Encoding = iota
|
2019-10-23 17:09:04 +00:00
|
|
|
EncodingJSON
|
2020-01-12 02:49:55 +00:00
|
|
|
EncodingJsonnet
|
2020-01-12 02:25:19 +00:00
|
|
|
EncodingSource // EncodingSource draws the encoding type by inferring it from the source.
|
|
|
|
EncodingYAML
|
2019-10-23 17:09:04 +00:00
|
|
|
)
|
|
|
|
|
2019-11-05 01:40:42 +00:00
|
|
|
// String provides the string representation of the encoding.
|
|
|
|
func (e Encoding) String() string {
|
|
|
|
switch e {
|
|
|
|
case EncodingJSON:
|
|
|
|
return "json"
|
2020-01-12 02:49:55 +00:00
|
|
|
case EncodingJsonnet:
|
|
|
|
return "jsonnet"
|
2020-01-12 02:25:19 +00:00
|
|
|
case EncodingSource:
|
|
|
|
return "source"
|
2019-11-05 01:40:42 +00:00
|
|
|
case EncodingYAML:
|
|
|
|
return "yaml"
|
|
|
|
default:
|
|
|
|
return "unknown"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-30 17:55:13 +00:00
|
|
|
// ErrInvalidEncoding indicates the encoding is invalid type for the parser.
|
|
|
|
var ErrInvalidEncoding = errors.New("invalid encoding provided")
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
// Parse parses a pkg defined by the encoding and readerFns. As of writing this
|
2020-01-24 19:25:03 +00:00
|
|
|
// we can parse both a YAML, JSON, and Jsonnet formats of the Pkg model.
|
2019-11-18 18:50:45 +00:00
|
|
|
func Parse(encoding Encoding, readerFn ReaderFn, opts ...ValidateOptFn) (*Pkg, error) {
|
2019-10-23 17:09:04 +00:00
|
|
|
r, err := readerFn()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
switch encoding {
|
|
|
|
case EncodingJSON:
|
2019-11-18 18:50:45 +00:00
|
|
|
return parseJSON(r, opts...)
|
2020-01-12 02:49:55 +00:00
|
|
|
case EncodingJsonnet:
|
|
|
|
return parseJsonnet(r, opts...)
|
2020-01-12 02:25:19 +00:00
|
|
|
case EncodingSource:
|
|
|
|
return parseSource(r, opts...)
|
|
|
|
case EncodingYAML:
|
|
|
|
return parseYAML(r, opts...)
|
2019-10-23 17:09:04 +00:00
|
|
|
default:
|
2019-10-30 17:55:13 +00:00
|
|
|
return nil, ErrInvalidEncoding
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// FromFile reads a file from disk and provides a reader from it.
|
|
|
|
func FromFile(filePath string) ReaderFn {
|
|
|
|
return func() (io.Reader, error) {
|
|
|
|
// not using os.Open to avoid having to deal with closing the file in here
|
|
|
|
b, err := ioutil.ReadFile(filePath)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return bytes.NewBuffer(b), nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// FromReader simply passes the reader along. Useful when consuming
|
|
|
|
// this from an HTTP request body. There are a number of other useful
|
|
|
|
// places for this functional input.
|
|
|
|
func FromReader(r io.Reader) ReaderFn {
|
|
|
|
return func() (io.Reader, error) {
|
|
|
|
return r, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// FromString parses a pkg from a raw string value. This is very useful
|
|
|
|
// in tests.
|
|
|
|
func FromString(s string) ReaderFn {
|
|
|
|
return func() (io.Reader, error) {
|
|
|
|
return strings.NewReader(s), nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-12 02:25:19 +00:00
|
|
|
// FromHTTPRequest parses a pkg from the request body of a HTTP request. This is
|
|
|
|
// very useful when using packages that are hosted..
|
|
|
|
func FromHTTPRequest(addr string) ReaderFn {
|
|
|
|
return func() (io.Reader, error) {
|
|
|
|
client := http.Client{Timeout: 5 * time.Minute}
|
|
|
|
resp, err := client.Get(addr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
var buf bytes.Buffer
|
|
|
|
if _, err := io.Copy(&buf, resp.Body); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return &buf, nil
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2019-11-18 18:50:45 +00:00
|
|
|
func parseJSON(r io.Reader, opts ...ValidateOptFn) (*Pkg, error) {
|
|
|
|
return parse(json.NewDecoder(r), opts...)
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2020-01-12 02:49:55 +00:00
|
|
|
func parseJsonnet(r io.Reader, opts ...ValidateOptFn) (*Pkg, error) {
|
|
|
|
return parse(jsonnet.NewDecoder(r), opts...)
|
|
|
|
}
|
|
|
|
|
2020-01-12 02:25:19 +00:00
|
|
|
func parseSource(r io.Reader, opts ...ValidateOptFn) (*Pkg, error) {
|
|
|
|
var b []byte
|
|
|
|
if byter, ok := r.(interface{ Bytes() []byte }); ok {
|
|
|
|
b = byter.Bytes()
|
|
|
|
} else {
|
|
|
|
bb, err := ioutil.ReadAll(r)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("failed to decode pkg source: %s", err)
|
|
|
|
}
|
|
|
|
b = bb
|
|
|
|
}
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
contentType := http.DetectContentType(b[:512])
|
2020-01-12 02:25:19 +00:00
|
|
|
switch {
|
2020-01-12 02:49:55 +00:00
|
|
|
case strings.Contains(contentType, "jsonnet"):
|
|
|
|
// highly unlikely to fall in here with supported content type detection as is
|
|
|
|
return parseJsonnet(bytes.NewReader(b), opts...)
|
2020-01-12 02:25:19 +00:00
|
|
|
case strings.Contains(contentType, "json"):
|
|
|
|
return parseJSON(bytes.NewReader(b), opts...)
|
2020-01-12 02:49:55 +00:00
|
|
|
case strings.Contains(contentType, "yaml"),
|
|
|
|
strings.Contains(contentType, "yml"):
|
2020-01-12 02:25:19 +00:00
|
|
|
return parseYAML(bytes.NewReader(b), opts...)
|
2020-01-12 02:49:55 +00:00
|
|
|
default:
|
2020-01-14 22:23:47 +00:00
|
|
|
return parseYAML(bytes.NewReader(b), opts...)
|
2020-01-12 02:25:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseYAML(r io.Reader, opts ...ValidateOptFn) (*Pkg, error) {
|
2020-01-13 19:13:37 +00:00
|
|
|
dec := yaml.NewDecoder(r)
|
|
|
|
|
|
|
|
var pkg Pkg
|
|
|
|
for {
|
|
|
|
// forced to use this for loop b/c the yaml dependency does not
|
|
|
|
// decode multi documents.
|
|
|
|
var k Object
|
|
|
|
err := dec.Decode(&k)
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
pkg.Objects = append(pkg.Objects, k)
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := pkg.Validate(opts...); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &pkg, nil
|
2020-01-12 02:25:19 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
type decoder interface {
|
|
|
|
Decode(interface{}) error
|
|
|
|
}
|
|
|
|
|
2019-11-18 18:50:45 +00:00
|
|
|
func parse(dec decoder, opts ...ValidateOptFn) (*Pkg, error) {
|
2019-10-23 17:09:04 +00:00
|
|
|
var pkg Pkg
|
2020-01-13 19:13:37 +00:00
|
|
|
if err := dec.Decode(&pkg.Objects); err != nil {
|
2019-10-23 17:09:04 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2019-11-18 18:50:45 +00:00
|
|
|
if err := pkg.Validate(opts...); err != nil {
|
2019-11-05 01:40:42 +00:00
|
|
|
return nil, err
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return &pkg, nil
|
|
|
|
}
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
// Object describes the metadata and raw spec for an entity of a package kind.
|
|
|
|
type Object struct {
|
|
|
|
APIVersion string `json:"apiVersion" yaml:"apiVersion"`
|
|
|
|
Type Kind `json:"kind" yaml:"kind"`
|
2020-02-05 00:15:20 +00:00
|
|
|
Metadata Resource `json:"metadata" yaml:"metadata"`
|
2020-01-13 19:13:37 +00:00
|
|
|
Spec Resource `json:"spec" yaml:"spec"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// Name returns the name of the kind.
|
|
|
|
func (k Object) Name() string {
|
2020-03-16 18:25:39 +00:00
|
|
|
return k.Metadata.references(fieldName).String()
|
2020-01-13 19:13:37 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
// Pkg is the model for a package. The resources are more generic that one might
|
|
|
|
// expect at first glance. This was done on purpose. The way json/yaml/toml or
|
|
|
|
// w/e scripting you want to use, can have very different ways of parsing. The
|
|
|
|
// different parsers are limited for the parsers that do not come from the std
|
|
|
|
// lib (looking at you yaml/v2). This allows us to parse it and leave the matching
|
|
|
|
// to another power, the graphing of the package is handled within itself.
|
|
|
|
type Pkg struct {
|
2020-01-13 19:13:37 +00:00
|
|
|
Objects []Object `json:"-" yaml:"-"`
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
mLabels map[string]*label
|
|
|
|
mBuckets map[string]*bucket
|
2019-12-18 01:57:44 +00:00
|
|
|
mChecks map[string]*check
|
2019-12-06 07:05:32 +00:00
|
|
|
mDashboards []*dashboard
|
|
|
|
mNotificationEndpoints map[string]*notificationEndpoint
|
2019-12-19 19:56:03 +00:00
|
|
|
mNotificationRules []*notificationRule
|
2019-12-23 18:55:55 +00:00
|
|
|
mTasks []*task
|
2019-12-06 07:05:32 +00:00
|
|
|
mTelegrafs []*telegraf
|
|
|
|
mVariables map[string]*variable
|
2019-10-28 22:23:40 +00:00
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
mEnv map[string]bool
|
2020-02-06 05:42:01 +00:00
|
|
|
mEnvVals map[string]string
|
2019-12-27 19:22:05 +00:00
|
|
|
mSecrets map[string]bool
|
2019-12-16 17:39:55 +00:00
|
|
|
|
2019-11-06 18:02:45 +00:00
|
|
|
isVerified bool // dry run has verified pkg resources with existing resources
|
|
|
|
isParsed bool // indicates the pkg has been parsed and all resources graphed accordingly
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
// Encode is a helper for encoding the pkg correctly.
|
|
|
|
func (p *Pkg) Encode(encoding Encoding) ([]byte, error) {
|
|
|
|
var (
|
|
|
|
buf bytes.Buffer
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
switch encoding {
|
|
|
|
case EncodingJSON, EncodingJsonnet:
|
|
|
|
enc := json.NewEncoder(&buf)
|
|
|
|
enc.SetIndent("", "\t")
|
|
|
|
err = enc.Encode(p.Objects)
|
|
|
|
case EncodingYAML:
|
|
|
|
enc := yaml.NewEncoder(&buf)
|
|
|
|
for _, k := range p.Objects {
|
|
|
|
if err = enc.Encode(k); err != nil {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
return nil, ErrInvalidEncoding
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return buf.Bytes(), nil
|
|
|
|
}
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
// Summary returns a package Summary that describes all the resources and
|
2019-10-23 17:09:04 +00:00
|
|
|
// associations the pkg contains. It is very useful for informing users of
|
|
|
|
// the changes that will take place when this pkg would be applied.
|
2019-10-26 02:11:47 +00:00
|
|
|
func (p *Pkg) Summary() Summary {
|
2020-01-12 02:25:19 +00:00
|
|
|
// ensure zero values for arrays aren't returned, but instead
|
|
|
|
// we always returning an initialized slice.
|
|
|
|
sum := Summary{
|
|
|
|
Buckets: []SummaryBucket{},
|
|
|
|
Checks: []SummaryCheck{},
|
|
|
|
Dashboards: []SummaryDashboard{},
|
|
|
|
NotificationEndpoints: []SummaryNotificationEndpoint{},
|
|
|
|
NotificationRules: []SummaryNotificationRule{},
|
|
|
|
Labels: []SummaryLabel{},
|
2020-02-05 00:15:20 +00:00
|
|
|
MissingEnvs: p.missingEnvRefs(),
|
2020-01-12 02:25:19 +00:00
|
|
|
MissingSecrets: []string{},
|
|
|
|
Tasks: []SummaryTask{},
|
|
|
|
TelegrafConfigs: []SummaryTelegraf{},
|
|
|
|
Variables: []SummaryVariable{},
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2019-12-27 19:22:05 +00:00
|
|
|
// only add this after dry run has been completed
|
|
|
|
if p.isVerified {
|
|
|
|
sum.MissingSecrets = p.missingSecrets()
|
|
|
|
}
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
for _, b := range p.buckets() {
|
2019-10-28 22:23:40 +00:00
|
|
|
sum.Buckets = append(sum.Buckets, b.summarize())
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
for _, c := range p.checks() {
|
|
|
|
sum.Checks = append(sum.Checks, c.summarize())
|
|
|
|
}
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
for _, d := range p.dashboards() {
|
|
|
|
sum.Dashboards = append(sum.Dashboards, d.summarize())
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
for _, l := range p.labels() {
|
|
|
|
sum.Labels = append(sum.Labels, l.summarize())
|
|
|
|
}
|
|
|
|
|
2019-12-12 19:09:32 +00:00
|
|
|
sum.LabelMappings = p.labelMappings()
|
2019-10-28 22:23:40 +00:00
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
for _, n := range p.notificationEndpoints() {
|
|
|
|
sum.NotificationEndpoints = append(sum.NotificationEndpoints, n.summarize())
|
|
|
|
}
|
|
|
|
|
2019-12-19 19:56:03 +00:00
|
|
|
for _, r := range p.notificationRules() {
|
|
|
|
sum.NotificationRules = append(sum.NotificationRules, r.summarize())
|
|
|
|
}
|
|
|
|
|
2019-12-23 08:22:48 +00:00
|
|
|
for _, t := range p.tasks() {
|
|
|
|
sum.Tasks = append(sum.Tasks, t.summarize())
|
|
|
|
}
|
|
|
|
|
2019-12-03 18:22:59 +00:00
|
|
|
for _, t := range p.telegrafs() {
|
|
|
|
sum.TelegrafConfigs = append(sum.TelegrafConfigs, t.summarize())
|
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
for _, v := range p.variables() {
|
|
|
|
sum.Variables = append(sum.Variables, v.summarize())
|
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
return sum
|
|
|
|
}
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
func (p *Pkg) applyEnvRefs(envRefs map[string]string) error {
|
|
|
|
if len(envRefs) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
if p.mEnvVals == nil {
|
|
|
|
p.mEnvVals = make(map[string]string)
|
|
|
|
}
|
|
|
|
|
2020-02-05 00:15:20 +00:00
|
|
|
for k, v := range envRefs {
|
2020-02-06 05:42:01 +00:00
|
|
|
p.mEnvVals[k] = v
|
2020-02-05 00:15:20 +00:00
|
|
|
}
|
2020-02-06 17:28:04 +00:00
|
|
|
|
|
|
|
return p.Validate()
|
2020-02-05 00:15:20 +00:00
|
|
|
}
|
|
|
|
|
2019-12-27 19:22:05 +00:00
|
|
|
func (p *Pkg) applySecrets(secrets map[string]string) {
|
|
|
|
for k := range secrets {
|
|
|
|
p.mSecrets[k] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-06 20:26:10 +00:00
|
|
|
// Combine combines pkgs together. Is useful when you want to take multiple disparate pkgs
|
|
|
|
// and compile them into one to take advantage of the parser and service guarantees.
|
|
|
|
func Combine(pkgs ...*Pkg) (*Pkg, error) {
|
|
|
|
newPkg := new(Pkg)
|
|
|
|
for _, p := range pkgs {
|
|
|
|
newPkg.Objects = append(newPkg.Objects, p.Objects...)
|
|
|
|
}
|
|
|
|
|
|
|
|
return newPkg, newPkg.Validate()
|
|
|
|
}
|
|
|
|
|
2019-11-09 02:12:48 +00:00
|
|
|
type (
|
|
|
|
validateOpt struct {
|
|
|
|
minResources bool
|
2019-12-23 22:31:56 +00:00
|
|
|
skipValidate bool
|
2019-11-09 02:12:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// ValidateOptFn provides a means to disable desired validation checks.
|
|
|
|
ValidateOptFn func(*validateOpt)
|
|
|
|
)
|
|
|
|
|
|
|
|
// ValidWithoutResources ignores the validation check for minimum number
|
|
|
|
// of resources. This is useful for the service Create to ignore this and
|
|
|
|
// allow the creation of a pkg without resources.
|
|
|
|
func ValidWithoutResources() ValidateOptFn {
|
|
|
|
return func(opt *validateOpt) {
|
|
|
|
opt.minResources = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-23 22:31:56 +00:00
|
|
|
// ValidSkipParseError ignores the validation check from the of resources. This
|
|
|
|
// is useful for the service Create to ignore this and allow the creation of a
|
|
|
|
// pkg without resources.
|
|
|
|
func ValidSkipParseError() ValidateOptFn {
|
|
|
|
return func(opt *validateOpt) {
|
|
|
|
opt.skipValidate = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-05 01:40:42 +00:00
|
|
|
// Validate will graph all resources and validate every thing is in a useful form.
|
2019-11-09 02:12:48 +00:00
|
|
|
func (p *Pkg) Validate(opts ...ValidateOptFn) error {
|
|
|
|
opt := &validateOpt{minResources: true}
|
|
|
|
for _, o := range opts {
|
|
|
|
o(opt)
|
|
|
|
}
|
2020-01-13 19:13:37 +00:00
|
|
|
|
|
|
|
var setupFns []func() error
|
2019-11-09 02:12:48 +00:00
|
|
|
if opt.minResources {
|
|
|
|
setupFns = append(setupFns, p.validResources)
|
|
|
|
}
|
|
|
|
setupFns = append(setupFns, p.graphResources)
|
2019-11-05 01:40:42 +00:00
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
var pErr parseErr
|
2019-11-05 01:40:42 +00:00
|
|
|
for _, fn := range setupFns {
|
|
|
|
if err := fn(); err != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
if IsParseErr(err) {
|
|
|
|
pErr.append(err.(*parseErr).Resources...)
|
|
|
|
continue
|
|
|
|
}
|
2019-11-05 01:40:42 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2019-11-06 18:02:45 +00:00
|
|
|
|
2019-12-23 22:31:56 +00:00
|
|
|
if len(pErr.Resources) > 0 && !opt.skipValidate {
|
2019-11-22 01:07:12 +00:00
|
|
|
return &pErr
|
|
|
|
}
|
|
|
|
|
2019-11-06 18:02:45 +00:00
|
|
|
p.isParsed = true
|
2019-11-05 01:40:42 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-10-26 02:11:47 +00:00
|
|
|
func (p *Pkg) buckets() []*bucket {
|
|
|
|
buckets := make([]*bucket, 0, len(p.mBuckets))
|
|
|
|
for _, b := range p.mBuckets {
|
2019-10-23 17:09:04 +00:00
|
|
|
buckets = append(buckets, b)
|
|
|
|
}
|
|
|
|
|
2020-03-16 18:25:39 +00:00
|
|
|
sort.Slice(buckets, func(i, j int) bool { return buckets[i].Name() < buckets[j].Name() })
|
2019-10-23 17:09:04 +00:00
|
|
|
|
|
|
|
return buckets
|
|
|
|
}
|
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
func (p *Pkg) checks() []*check {
|
|
|
|
checks := make([]*check, 0, len(p.mChecks))
|
|
|
|
for _, c := range p.mChecks {
|
|
|
|
checks = append(checks, c)
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Slice(checks, func(i, j int) bool { return checks[i].Name() < checks[j].Name() })
|
|
|
|
|
|
|
|
return checks
|
|
|
|
}
|
|
|
|
|
2019-10-26 02:11:47 +00:00
|
|
|
func (p *Pkg) labels() []*label {
|
2019-12-06 00:53:00 +00:00
|
|
|
labels := make(sortedLabels, 0, len(p.mLabels))
|
2019-10-26 02:11:47 +00:00
|
|
|
for _, b := range p.mLabels {
|
2019-10-24 23:59:01 +00:00
|
|
|
labels = append(labels, b)
|
|
|
|
}
|
|
|
|
|
2019-12-03 02:05:10 +00:00
|
|
|
sort.Sort(labels)
|
2019-10-24 23:59:01 +00:00
|
|
|
|
|
|
|
return labels
|
|
|
|
}
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
func (p *Pkg) dashboards() []*dashboard {
|
2019-12-03 02:05:10 +00:00
|
|
|
dashes := p.mDashboards[:]
|
2020-02-05 01:23:28 +00:00
|
|
|
sort.Slice(dashes, func(i, j int) bool { return dashes[i].Name() < dashes[j].Name() })
|
2019-10-30 21:13:42 +00:00
|
|
|
return dashes
|
|
|
|
}
|
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
func (p *Pkg) notificationEndpoints() []*notificationEndpoint {
|
|
|
|
endpoints := make([]*notificationEndpoint, 0, len(p.mNotificationEndpoints))
|
|
|
|
for _, e := range p.mNotificationEndpoints {
|
|
|
|
endpoints = append(endpoints, e)
|
|
|
|
}
|
|
|
|
sort.Slice(endpoints, func(i, j int) bool {
|
|
|
|
ei, ej := endpoints[i], endpoints[j]
|
|
|
|
if ei.kind == ej.kind {
|
|
|
|
return ei.Name() < ej.Name()
|
|
|
|
}
|
|
|
|
return ei.kind < ej.kind
|
|
|
|
})
|
|
|
|
return endpoints
|
|
|
|
}
|
|
|
|
|
2019-12-19 19:56:03 +00:00
|
|
|
func (p *Pkg) notificationRules() []*notificationRule {
|
|
|
|
rules := p.mNotificationRules[:]
|
2020-02-05 01:23:28 +00:00
|
|
|
sort.Slice(rules, func(i, j int) bool { return rules[i].Name() < rules[j].Name() })
|
2019-12-19 19:56:03 +00:00
|
|
|
return rules
|
|
|
|
}
|
|
|
|
|
2020-02-05 00:15:20 +00:00
|
|
|
func (p *Pkg) missingEnvRefs() []string {
|
|
|
|
envRefs := make([]string, 0)
|
2020-02-06 17:28:04 +00:00
|
|
|
for envRef, matching := range p.mEnv {
|
|
|
|
if !matching {
|
2020-02-05 00:15:20 +00:00
|
|
|
envRefs = append(envRefs, envRef)
|
|
|
|
}
|
|
|
|
}
|
2020-02-05 01:23:28 +00:00
|
|
|
sort.Strings(envRefs)
|
2020-02-05 00:15:20 +00:00
|
|
|
return envRefs
|
|
|
|
}
|
|
|
|
|
2019-12-27 19:22:05 +00:00
|
|
|
func (p *Pkg) missingSecrets() []string {
|
|
|
|
secrets := make([]string, 0, len(p.mSecrets))
|
|
|
|
for secret, foundInPlatform := range p.mSecrets {
|
|
|
|
if foundInPlatform {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
secrets = append(secrets, secret)
|
2019-12-16 17:39:55 +00:00
|
|
|
}
|
|
|
|
return secrets
|
|
|
|
}
|
|
|
|
|
2019-12-23 08:22:48 +00:00
|
|
|
func (p *Pkg) tasks() []*task {
|
2019-12-23 18:55:55 +00:00
|
|
|
tasks := p.mTasks[:]
|
2019-12-23 08:22:48 +00:00
|
|
|
|
|
|
|
sort.Slice(tasks, func(i, j int) bool { return tasks[i].Name() < tasks[j].Name() })
|
|
|
|
|
|
|
|
return tasks
|
|
|
|
}
|
|
|
|
|
2019-12-03 18:22:59 +00:00
|
|
|
func (p *Pkg) telegrafs() []*telegraf {
|
2020-02-05 01:23:28 +00:00
|
|
|
teles := make([]*telegraf, 0, len(p.mTelegrafs))
|
|
|
|
for _, t := range p.mTelegrafs {
|
|
|
|
t.config.Name = t.Name()
|
|
|
|
teles = append(teles, t)
|
|
|
|
}
|
2019-12-03 18:22:59 +00:00
|
|
|
sort.Slice(teles, func(i, j int) bool { return teles[i].Name() < teles[j].Name() })
|
|
|
|
return teles
|
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
func (p *Pkg) variables() []*variable {
|
|
|
|
vars := make([]*variable, 0, len(p.mVariables))
|
|
|
|
for _, v := range p.mVariables {
|
|
|
|
vars = append(vars, v)
|
|
|
|
}
|
|
|
|
|
2020-02-05 01:23:28 +00:00
|
|
|
sort.Slice(vars, func(i, j int) bool { return vars[i].Name() < vars[j].Name() })
|
2019-11-06 22:41:06 +00:00
|
|
|
|
|
|
|
return vars
|
|
|
|
}
|
|
|
|
|
2019-10-26 02:11:47 +00:00
|
|
|
// labelMappings returns the mappings that will be created for
|
|
|
|
// valid pairs of labels and resources of which all have IDs.
|
|
|
|
// If a resource does not exist yet, a label mapping will not
|
|
|
|
// be returned for it.
|
2019-10-30 17:55:13 +00:00
|
|
|
func (p *Pkg) labelMappings() []SummaryLabelMapping {
|
2020-01-12 02:25:19 +00:00
|
|
|
labels := p.mLabels
|
|
|
|
mappings := make([]SummaryLabelMapping, 0, len(labels))
|
|
|
|
for _, l := range labels {
|
2019-10-28 22:23:40 +00:00
|
|
|
mappings = append(mappings, l.mappingSummary()...)
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
|
|
|
|
2019-10-30 21:13:42 +00:00
|
|
|
// sort by res type ASC, then res name ASC, then label name ASC
|
|
|
|
sort.Slice(mappings, func(i, j int) bool {
|
|
|
|
n, m := mappings[i], mappings[j]
|
|
|
|
if n.ResourceType < m.ResourceType {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if n.ResourceType > m.ResourceType {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
if n.ResourceName < m.ResourceName {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if n.ResourceName > m.ResourceName {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return n.LabelName < m.LabelName
|
|
|
|
})
|
|
|
|
|
2019-10-26 02:11:47 +00:00
|
|
|
return mappings
|
|
|
|
}
|
|
|
|
|
2019-10-30 17:55:13 +00:00
|
|
|
func (p *Pkg) validResources() error {
|
2020-01-13 19:13:37 +00:00
|
|
|
if len(p.Objects) > 0 {
|
2019-10-30 17:55:13 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
res := resourceErr{
|
2020-01-13 19:13:37 +00:00
|
|
|
Kind: KindPackage.String(),
|
2019-11-22 01:07:12 +00:00
|
|
|
RootErrs: []validationErr{{
|
|
|
|
Field: "resources",
|
2020-01-13 19:13:37 +00:00
|
|
|
Msg: "at least 1 kind must be provided",
|
2019-11-22 01:07:12 +00:00
|
|
|
}},
|
2019-10-30 17:55:13 +00:00
|
|
|
}
|
2019-11-22 01:07:12 +00:00
|
|
|
var err parseErr
|
2019-10-30 17:55:13 +00:00
|
|
|
err.append(res)
|
|
|
|
return &err
|
|
|
|
}
|
|
|
|
|
2019-10-26 02:11:47 +00:00
|
|
|
func (p *Pkg) graphResources() error {
|
2020-02-06 17:28:04 +00:00
|
|
|
p.mEnv = make(map[string]bool)
|
2019-12-27 19:22:05 +00:00
|
|
|
p.mSecrets = make(map[string]bool)
|
2019-12-16 17:39:55 +00:00
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
graphFns := []func() *parseErr{
|
|
|
|
// labels are first, this is to validate associations with other resources
|
2019-10-26 02:11:47 +00:00
|
|
|
p.graphLabels,
|
2019-11-06 22:41:06 +00:00
|
|
|
p.graphVariables,
|
2019-10-26 02:11:47 +00:00
|
|
|
p.graphBuckets,
|
2019-12-18 01:57:44 +00:00
|
|
|
p.graphChecks,
|
2019-10-30 21:13:42 +00:00
|
|
|
p.graphDashboards,
|
2019-12-06 07:05:32 +00:00
|
|
|
p.graphNotificationEndpoints,
|
2019-12-19 19:56:03 +00:00
|
|
|
p.graphNotificationRules,
|
2019-12-23 08:22:48 +00:00
|
|
|
p.graphTasks,
|
2019-12-03 18:22:59 +00:00
|
|
|
p.graphTelegrafs,
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
var pErr parseErr
|
2019-10-23 17:09:04 +00:00
|
|
|
for _, fn := range graphFns {
|
|
|
|
if err := fn(); err != nil {
|
2019-12-06 07:05:32 +00:00
|
|
|
pErr.append(err.Resources...)
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
if len(pErr.Resources) > 0 {
|
|
|
|
sort.Slice(pErr.Resources, func(i, j int) bool {
|
|
|
|
ir, jr := pErr.Resources[i], pErr.Resources[j]
|
|
|
|
return *ir.Idx < *jr.Idx
|
2019-11-14 00:24:05 +00:00
|
|
|
})
|
2019-11-22 01:07:12 +00:00
|
|
|
return &pErr
|
2019-11-14 00:24:05 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
func (p *Pkg) graphBuckets() *parseErr {
|
2019-10-26 02:11:47 +00:00
|
|
|
p.mBuckets = make(map[string]*bucket)
|
2020-03-16 18:25:39 +00:00
|
|
|
uniqNames := make(map[string]bool)
|
|
|
|
return p.eachResource(KindBucket, bucketNameMinLength, func(o Object) []validationErr {
|
2020-02-06 17:28:04 +00:00
|
|
|
nameRef := p.getRefWithKnownEnvs(o.Metadata, fieldName)
|
2020-02-05 00:15:20 +00:00
|
|
|
if _, ok := p.mBuckets[nameRef.String()]; ok {
|
2020-03-16 18:25:39 +00:00
|
|
|
return []validationErr{
|
|
|
|
objectValidationErr(fieldMetadata, validationErr{
|
|
|
|
Field: fieldName,
|
|
|
|
Msg: "duplicate name: " + nameRef.String(),
|
|
|
|
}),
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2019-10-26 02:11:47 +00:00
|
|
|
|
2020-03-16 18:25:39 +00:00
|
|
|
displayNameRef := p.getRefWithKnownEnvs(o.Spec, fieldName)
|
|
|
|
|
|
|
|
name := nameRef.String()
|
|
|
|
if displayName := displayNameRef.String(); displayName != "" {
|
|
|
|
name = displayName
|
|
|
|
}
|
|
|
|
if uniqNames[name] {
|
|
|
|
return []validationErr{
|
|
|
|
objectValidationErr(fieldSpec, validationErr{
|
|
|
|
Field: fieldName,
|
|
|
|
Msg: "duplicate name: " + nameRef.String(),
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
uniqNames[name] = true
|
|
|
|
|
2019-10-26 02:11:47 +00:00
|
|
|
bkt := &bucket{
|
2020-02-05 00:15:20 +00:00
|
|
|
name: nameRef,
|
2020-03-16 18:25:39 +00:00
|
|
|
displayName: displayNameRef,
|
2020-02-06 05:42:01 +00:00
|
|
|
Description: o.Spec.stringShort(fieldDescription),
|
2019-11-22 18:41:08 +00:00
|
|
|
}
|
2020-02-06 05:42:01 +00:00
|
|
|
if rules, ok := o.Spec[fieldBucketRetentionRules].(retentionRules); ok {
|
2019-11-22 18:41:08 +00:00
|
|
|
bkt.RetentionRules = rules
|
|
|
|
} else {
|
2020-02-06 05:42:01 +00:00
|
|
|
for _, r := range o.Spec.slcResource(fieldBucketRetentionRules) {
|
2019-11-22 18:41:08 +00:00
|
|
|
bkt.RetentionRules = append(bkt.RetentionRules, retentionRule{
|
|
|
|
Type: r.stringShort(fieldType),
|
|
|
|
Seconds: r.intShort(fieldRetentionRulesEverySeconds),
|
|
|
|
})
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2020-03-16 18:25:39 +00:00
|
|
|
p.setRefs(bkt.name, bkt.displayName)
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-10-30 21:13:42 +00:00
|
|
|
bkt.labels = append(bkt.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(bkt, false)
|
2019-10-30 21:13:42 +00:00
|
|
|
return nil
|
|
|
|
})
|
2019-12-03 02:05:10 +00:00
|
|
|
sort.Sort(bkt.labels)
|
2019-10-26 02:11:47 +00:00
|
|
|
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mBuckets[bkt.PkgName()] = bkt
|
2019-10-26 02:11:47 +00:00
|
|
|
|
2019-11-22 18:41:08 +00:00
|
|
|
return append(failures, bkt.valid()...)
|
2019-10-23 17:09:04 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
func (p *Pkg) graphLabels() *parseErr {
|
2019-10-26 02:11:47 +00:00
|
|
|
p.mLabels = make(map[string]*label)
|
2020-03-16 22:17:24 +00:00
|
|
|
uniqNames := make(map[string]bool)
|
|
|
|
return p.eachResource(KindLabel, labelNameMinLength, func(o Object) []validationErr {
|
2020-02-06 17:28:04 +00:00
|
|
|
nameRef := p.getRefWithKnownEnvs(o.Metadata, fieldName)
|
2020-02-05 01:23:28 +00:00
|
|
|
if _, ok := p.mLabels[nameRef.String()]; ok {
|
2020-03-16 22:17:24 +00:00
|
|
|
return []validationErr{
|
|
|
|
objectValidationErr(fieldMetadata, validationErr{
|
|
|
|
Field: fieldName,
|
|
|
|
Msg: "duplicate name: " + nameRef.String(),
|
|
|
|
}),
|
|
|
|
}
|
2019-10-24 23:59:01 +00:00
|
|
|
}
|
2020-02-05 01:23:28 +00:00
|
|
|
|
2020-03-16 22:17:24 +00:00
|
|
|
displayNameRef := p.getRefWithKnownEnvs(o.Spec, fieldName)
|
|
|
|
|
|
|
|
name := nameRef.String()
|
|
|
|
if displayName := displayNameRef.String(); displayName != "" {
|
|
|
|
name = displayName
|
|
|
|
}
|
|
|
|
if uniqNames[name] {
|
|
|
|
return []validationErr{
|
|
|
|
objectValidationErr(fieldSpec, validationErr{
|
|
|
|
Field: fieldName,
|
|
|
|
Msg: "duplicate name: " + nameRef.String(),
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
uniqNames[name] = true
|
|
|
|
|
2020-02-05 01:23:28 +00:00
|
|
|
l := &label{
|
|
|
|
name: nameRef,
|
2020-03-16 22:17:24 +00:00
|
|
|
displayName: displayNameRef,
|
2020-02-06 05:42:01 +00:00
|
|
|
Color: o.Spec.stringShort(fieldLabelColor),
|
|
|
|
Description: o.Spec.stringShort(fieldDescription),
|
2019-10-24 23:59:01 +00:00
|
|
|
}
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()] = l
|
|
|
|
p.setRefs(nameRef, displayNameRef)
|
2019-10-24 23:59:01 +00:00
|
|
|
|
2020-03-16 22:17:24 +00:00
|
|
|
return l.valid()
|
2019-10-24 23:59:01 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
func (p *Pkg) graphChecks() *parseErr {
|
|
|
|
p.mChecks = make(map[string]*check)
|
|
|
|
|
|
|
|
checkKinds := []struct {
|
|
|
|
kind Kind
|
|
|
|
checkKind checkKind
|
|
|
|
}{
|
|
|
|
{kind: KindCheckThreshold, checkKind: checkKindThreshold},
|
|
|
|
{kind: KindCheckDeadman, checkKind: checkKindDeadman},
|
|
|
|
}
|
|
|
|
var pErr parseErr
|
2020-01-13 19:13:37 +00:00
|
|
|
for _, checkKind := range checkKinds {
|
2020-02-06 17:28:04 +00:00
|
|
|
err := p.eachResource(checkKind.kind, 1, func(o Object) []validationErr {
|
|
|
|
nameRef := p.getRefWithKnownEnvs(o.Metadata, fieldName)
|
2020-02-05 01:23:28 +00:00
|
|
|
if _, ok := p.mChecks[nameRef.String()]; ok {
|
2019-12-18 01:57:44 +00:00
|
|
|
return []validationErr{{
|
2020-02-05 01:23:28 +00:00
|
|
|
Field: fieldName,
|
2020-02-06 05:42:01 +00:00
|
|
|
Msg: "duplicate name: " + o.Name(),
|
2019-12-18 01:57:44 +00:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
ch := &check{
|
2020-01-13 19:13:37 +00:00
|
|
|
kind: checkKind.checkKind,
|
2020-02-05 01:23:28 +00:00
|
|
|
name: nameRef,
|
2020-02-06 05:42:01 +00:00
|
|
|
description: o.Spec.stringShort(fieldDescription),
|
|
|
|
every: o.Spec.durationShort(fieldEvery),
|
|
|
|
level: o.Spec.stringShort(fieldLevel),
|
|
|
|
offset: o.Spec.durationShort(fieldOffset),
|
|
|
|
query: strings.TrimSpace(o.Spec.stringShort(fieldQuery)),
|
|
|
|
reportZero: o.Spec.boolShort(fieldCheckReportZero),
|
|
|
|
staleTime: o.Spec.durationShort(fieldCheckStaleTime),
|
|
|
|
status: normStr(o.Spec.stringShort(fieldStatus)),
|
|
|
|
statusMessage: o.Spec.stringShort(fieldCheckStatusMessageTemplate),
|
|
|
|
timeSince: o.Spec.durationShort(fieldCheckTimeSince),
|
2019-12-18 01:57:44 +00:00
|
|
|
}
|
2020-02-06 05:42:01 +00:00
|
|
|
for _, tagRes := range o.Spec.slcResource(fieldCheckTags) {
|
2019-12-18 01:57:44 +00:00
|
|
|
ch.tags = append(ch.tags, struct{ k, v string }{
|
|
|
|
k: tagRes.stringShort(fieldKey),
|
|
|
|
v: tagRes.stringShort(fieldValue),
|
|
|
|
})
|
|
|
|
}
|
2020-02-06 05:42:01 +00:00
|
|
|
for _, th := range o.Spec.slcResource(fieldCheckThresholds) {
|
2019-12-18 01:57:44 +00:00
|
|
|
ch.thresholds = append(ch.thresholds, threshold{
|
|
|
|
threshType: thresholdType(normStr(th.stringShort(fieldType))),
|
|
|
|
allVals: th.boolShort(fieldCheckAllValues),
|
2019-12-19 19:56:03 +00:00
|
|
|
level: strings.TrimSpace(strings.ToUpper(th.stringShort(fieldLevel))),
|
2019-12-18 01:57:44 +00:00
|
|
|
max: th.float64Short(fieldMax),
|
|
|
|
min: th.float64Short(fieldMin),
|
|
|
|
val: th.float64Short(fieldValue),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-12-18 01:57:44 +00:00
|
|
|
ch.labels = append(ch.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(ch, false)
|
2019-12-18 01:57:44 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
sort.Sort(ch.labels)
|
|
|
|
|
|
|
|
p.mChecks[ch.Name()] = ch
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(nameRef)
|
2019-12-18 01:57:44 +00:00
|
|
|
return append(failures, ch.valid()...)
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
pErr.append(err.Resources...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(pErr.Resources) > 0 {
|
|
|
|
return &pErr
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
func (p *Pkg) graphDashboards() *parseErr {
|
2019-12-03 02:05:10 +00:00
|
|
|
p.mDashboards = make([]*dashboard, 0)
|
2020-02-06 17:28:04 +00:00
|
|
|
return p.eachResource(KindDashboard, 2, func(o Object) []validationErr {
|
|
|
|
nameRef := p.getRefWithKnownEnvs(o.Metadata, fieldName)
|
2019-10-30 21:13:42 +00:00
|
|
|
dash := &dashboard{
|
2020-02-05 01:23:28 +00:00
|
|
|
name: nameRef,
|
2020-02-06 05:42:01 +00:00
|
|
|
Description: o.Spec.stringShort(fieldDescription),
|
2019-10-30 21:13:42 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-10-30 21:13:42 +00:00
|
|
|
dash.labels = append(dash.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(dash, false)
|
2019-10-30 21:13:42 +00:00
|
|
|
return nil
|
|
|
|
})
|
2019-12-03 02:05:10 +00:00
|
|
|
sort.Sort(dash.labels)
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
for i, cr := range o.Spec.slcResource(fieldDashCharts) {
|
2019-11-01 18:11:42 +00:00
|
|
|
ch, fails := parseChart(cr)
|
|
|
|
if fails != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
failures = append(failures, validationErr{
|
2020-01-13 19:13:37 +00:00
|
|
|
Field: fieldDashCharts,
|
2019-11-14 00:24:05 +00:00
|
|
|
Index: intPtr(i),
|
|
|
|
Nested: fails,
|
|
|
|
})
|
2019-11-01 18:11:42 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
dash.Charts = append(dash.Charts, ch)
|
|
|
|
}
|
|
|
|
|
2019-12-03 02:05:10 +00:00
|
|
|
p.mDashboards = append(p.mDashboards, dash)
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(nameRef)
|
2019-10-30 21:13:42 +00:00
|
|
|
|
2019-11-14 00:43:28 +00:00
|
|
|
return failures
|
2019-10-30 21:13:42 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
func (p *Pkg) graphNotificationEndpoints() *parseErr {
|
|
|
|
p.mNotificationEndpoints = make(map[string]*notificationEndpoint)
|
|
|
|
|
|
|
|
notificationKinds := []struct {
|
|
|
|
kind Kind
|
|
|
|
notificationKind notificationKind
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointHTTP,
|
|
|
|
notificationKind: notificationKindHTTP,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointPagerDuty,
|
|
|
|
notificationKind: notificationKindPagerDuty,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
kind: KindNotificationEndpointSlack,
|
|
|
|
notificationKind: notificationKindSlack,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
var pErr parseErr
|
|
|
|
for _, nk := range notificationKinds {
|
2020-02-06 17:28:04 +00:00
|
|
|
err := p.eachResource(nk.kind, 1, func(o Object) []validationErr {
|
|
|
|
nameRef := p.getRefWithKnownEnvs(o.Metadata, fieldName)
|
2020-02-05 01:23:28 +00:00
|
|
|
if _, ok := p.mNotificationEndpoints[nameRef.String()]; ok {
|
2019-12-06 07:05:32 +00:00
|
|
|
return []validationErr{{
|
2020-02-05 01:23:28 +00:00
|
|
|
Field: fieldName,
|
2020-02-06 05:42:01 +00:00
|
|
|
Msg: "duplicate name: " + o.Name(),
|
2019-12-06 07:05:32 +00:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
endpoint := ¬ificationEndpoint{
|
|
|
|
kind: nk.notificationKind,
|
2020-02-05 01:23:28 +00:00
|
|
|
name: nameRef,
|
2020-02-06 05:42:01 +00:00
|
|
|
description: o.Spec.stringShort(fieldDescription),
|
|
|
|
method: strings.TrimSpace(strings.ToUpper(o.Spec.stringShort(fieldNotificationEndpointHTTPMethod))),
|
|
|
|
httpType: normStr(o.Spec.stringShort(fieldType)),
|
|
|
|
password: o.Spec.references(fieldNotificationEndpointPassword),
|
|
|
|
routingKey: o.Spec.references(fieldNotificationEndpointRoutingKey),
|
|
|
|
status: normStr(o.Spec.stringShort(fieldStatus)),
|
|
|
|
token: o.Spec.references(fieldNotificationEndpointToken),
|
|
|
|
url: o.Spec.stringShort(fieldNotificationEndpointURL),
|
|
|
|
username: o.Spec.references(fieldNotificationEndpointUsername),
|
2019-12-06 07:05:32 +00:00
|
|
|
}
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-12-06 07:05:32 +00:00
|
|
|
endpoint.labels = append(endpoint.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(endpoint, false)
|
2019-12-06 07:05:32 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
sort.Sort(endpoint.labels)
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(nameRef, endpoint.password, endpoint.routingKey, endpoint.token, endpoint.username)
|
2019-12-16 17:39:55 +00:00
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
p.mNotificationEndpoints[endpoint.Name()] = endpoint
|
|
|
|
return append(failures, endpoint.valid()...)
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
pErr.append(err.Resources...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(pErr.Resources) > 0 {
|
|
|
|
return &pErr
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-12-19 19:56:03 +00:00
|
|
|
func (p *Pkg) graphNotificationRules() *parseErr {
|
|
|
|
p.mNotificationRules = make([]*notificationRule, 0)
|
2020-02-06 17:28:04 +00:00
|
|
|
return p.eachResource(KindNotificationRule, 1, func(o Object) []validationErr {
|
2019-12-19 19:56:03 +00:00
|
|
|
rule := ¬ificationRule{
|
2020-02-06 17:28:04 +00:00
|
|
|
name: p.getRefWithKnownEnvs(o.Metadata, fieldName),
|
|
|
|
endpointName: p.getRefWithKnownEnvs(o.Spec, fieldNotificationRuleEndpointName),
|
2020-02-06 05:42:01 +00:00
|
|
|
description: o.Spec.stringShort(fieldDescription),
|
|
|
|
channel: o.Spec.stringShort(fieldNotificationRuleChannel),
|
|
|
|
every: o.Spec.durationShort(fieldEvery),
|
|
|
|
msgTemplate: o.Spec.stringShort(fieldNotificationRuleMessageTemplate),
|
|
|
|
offset: o.Spec.durationShort(fieldOffset),
|
|
|
|
status: normStr(o.Spec.stringShort(fieldStatus)),
|
2019-12-19 19:56:03 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
for _, sRule := range o.Spec.slcResource(fieldNotificationRuleStatusRules) {
|
2019-12-19 19:56:03 +00:00
|
|
|
rule.statusRules = append(rule.statusRules, struct{ curLvl, prevLvl string }{
|
|
|
|
curLvl: strings.TrimSpace(strings.ToUpper(sRule.stringShort(fieldNotificationRuleCurrentLevel))),
|
|
|
|
prevLvl: strings.TrimSpace(strings.ToUpper(sRule.stringShort(fieldNotificationRulePreviousLevel))),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
for _, tRule := range o.Spec.slcResource(fieldNotificationRuleTagRules) {
|
2019-12-19 19:56:03 +00:00
|
|
|
rule.tagRules = append(rule.tagRules, struct{ k, v, op string }{
|
|
|
|
k: tRule.stringShort(fieldKey),
|
|
|
|
v: tRule.stringShort(fieldValue),
|
|
|
|
op: normStr(tRule.stringShort(fieldOperator)),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-12-19 19:56:03 +00:00
|
|
|
rule.labels = append(rule.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(rule, false)
|
2019-12-19 19:56:03 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
sort.Sort(rule.labels)
|
|
|
|
|
|
|
|
p.mNotificationRules = append(p.mNotificationRules, rule)
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(rule.name, rule.endpointName)
|
2019-12-19 19:56:03 +00:00
|
|
|
return append(failures, rule.valid()...)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-23 08:22:48 +00:00
|
|
|
func (p *Pkg) graphTasks() *parseErr {
|
2019-12-23 18:55:55 +00:00
|
|
|
p.mTasks = make([]*task, 0)
|
2020-02-06 17:28:04 +00:00
|
|
|
return p.eachResource(KindTask, 1, func(o Object) []validationErr {
|
2019-12-23 08:22:48 +00:00
|
|
|
t := &task{
|
2020-02-06 17:28:04 +00:00
|
|
|
name: p.getRefWithKnownEnvs(o.Metadata, fieldName),
|
2020-02-06 05:42:01 +00:00
|
|
|
cron: o.Spec.stringShort(fieldTaskCron),
|
|
|
|
description: o.Spec.stringShort(fieldDescription),
|
|
|
|
every: o.Spec.durationShort(fieldEvery),
|
|
|
|
offset: o.Spec.durationShort(fieldOffset),
|
|
|
|
query: strings.TrimSpace(o.Spec.stringShort(fieldQuery)),
|
|
|
|
status: normStr(o.Spec.stringShort(fieldStatus)),
|
2019-11-06 22:41:06 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-12-23 08:22:48 +00:00
|
|
|
t.labels = append(t.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(t, false)
|
2019-11-07 00:45:00 +00:00
|
|
|
return nil
|
|
|
|
})
|
2019-12-23 08:22:48 +00:00
|
|
|
sort.Sort(t.labels)
|
2019-11-06 22:41:06 +00:00
|
|
|
|
2019-12-23 18:55:55 +00:00
|
|
|
p.mTasks = append(p.mTasks, t)
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(t.name)
|
2019-12-23 08:22:48 +00:00
|
|
|
return append(failures, t.valid()...)
|
2019-11-06 22:41:06 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-06 07:05:32 +00:00
|
|
|
func (p *Pkg) graphTelegrafs() *parseErr {
|
2019-12-03 18:22:59 +00:00
|
|
|
p.mTelegrafs = make([]*telegraf, 0)
|
2020-02-06 17:28:04 +00:00
|
|
|
return p.eachResource(KindTelegraf, 0, func(o Object) []validationErr {
|
2020-02-05 01:23:28 +00:00
|
|
|
tele := &telegraf{
|
2020-02-06 17:28:04 +00:00
|
|
|
name: p.getRefWithKnownEnvs(o.Metadata, fieldName),
|
2020-02-05 01:23:28 +00:00
|
|
|
}
|
2020-02-06 05:42:01 +00:00
|
|
|
tele.config.Description = o.Spec.stringShort(fieldDescription)
|
2019-12-04 01:00:15 +00:00
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-12-03 18:22:59 +00:00
|
|
|
tele.labels = append(tele.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(tele, false)
|
2019-12-03 18:22:59 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
sort.Sort(tele.labels)
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
tele.config.Config = o.Spec.stringShort(fieldTelegrafConfig)
|
2019-12-20 21:20:13 +00:00
|
|
|
if tele.config.Config == "" {
|
2019-12-03 18:22:59 +00:00
|
|
|
failures = append(failures, validationErr{
|
|
|
|
Field: fieldTelegrafConfig,
|
2019-12-20 21:20:13 +00:00
|
|
|
Msg: "no config provided",
|
2019-12-03 18:22:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
p.mTelegrafs = append(p.mTelegrafs, tele)
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(tele.name)
|
2019-12-03 18:22:59 +00:00
|
|
|
|
|
|
|
return failures
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-23 08:22:48 +00:00
|
|
|
func (p *Pkg) graphVariables() *parseErr {
|
|
|
|
p.mVariables = make(map[string]*variable)
|
2020-02-06 17:28:04 +00:00
|
|
|
return p.eachResource(KindVariable, 1, func(o Object) []validationErr {
|
|
|
|
nameRef := p.getRefWithKnownEnvs(o.Metadata, fieldName)
|
2020-02-05 01:23:28 +00:00
|
|
|
if _, ok := p.mVariables[nameRef.String()]; ok {
|
2019-12-23 08:22:48 +00:00
|
|
|
return []validationErr{{
|
2020-02-06 17:28:04 +00:00
|
|
|
Field: fieldName,
|
2020-02-05 01:23:28 +00:00
|
|
|
Msg: "duplicate name: " + nameRef.String(),
|
2019-12-23 08:22:48 +00:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
newVar := &variable{
|
2020-02-05 01:23:28 +00:00
|
|
|
name: nameRef,
|
2020-02-06 05:42:01 +00:00
|
|
|
Description: o.Spec.stringShort(fieldDescription),
|
|
|
|
Type: normStr(o.Spec.stringShort(fieldType)),
|
|
|
|
Query: strings.TrimSpace(o.Spec.stringShort(fieldQuery)),
|
|
|
|
Language: normStr(o.Spec.stringShort(fieldLanguage)),
|
|
|
|
ConstValues: o.Spec.slcStr(fieldValues),
|
|
|
|
MapValues: o.Spec.mapStrStr(fieldValues),
|
2019-12-23 08:22:48 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
failures := p.parseNestedLabels(o.Spec, func(l *label) error {
|
2019-12-23 08:22:48 +00:00
|
|
|
newVar.labels = append(newVar.labels, l)
|
2020-03-16 22:17:24 +00:00
|
|
|
p.mLabels[l.PkgName()].setMapping(newVar, false)
|
2019-12-23 08:22:48 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
sort.Sort(newVar.labels)
|
|
|
|
|
2020-02-06 05:42:01 +00:00
|
|
|
p.mVariables[o.Name()] = newVar
|
2020-02-06 17:28:04 +00:00
|
|
|
p.setRefs(newVar.name)
|
2019-12-23 08:22:48 +00:00
|
|
|
|
|
|
|
return append(failures, newVar.valid()...)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
func (p *Pkg) eachResource(resourceKind Kind, minNameLen int, fn func(o Object) []validationErr) *parseErr {
|
2019-11-22 01:07:12 +00:00
|
|
|
var pErr parseErr
|
2020-01-13 19:13:37 +00:00
|
|
|
for i, k := range p.Objects {
|
|
|
|
if err := k.Type.OK(); err != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
pErr.append(resourceErr{
|
2020-01-13 19:13:37 +00:00
|
|
|
Kind: k.Type.String(),
|
2019-11-22 01:07:12 +00:00
|
|
|
Idx: intPtr(i),
|
|
|
|
ValidationErrs: []validationErr{
|
2019-10-23 17:09:04 +00:00
|
|
|
{
|
2020-01-14 21:49:29 +00:00
|
|
|
Field: fieldKind,
|
2019-10-23 17:09:04 +00:00
|
|
|
Msg: err.Error(),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
|
|
|
continue
|
|
|
|
}
|
2020-01-13 19:13:37 +00:00
|
|
|
if !k.Type.is(resourceKind) {
|
2019-10-23 17:09:04 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2020-01-14 21:49:29 +00:00
|
|
|
if k.APIVersion != APIVersion {
|
|
|
|
pErr.append(resourceErr{
|
|
|
|
Kind: k.Type.String(),
|
|
|
|
Idx: intPtr(i),
|
|
|
|
ValidationErrs: []validationErr{
|
|
|
|
{
|
|
|
|
Field: fieldAPIVersion,
|
|
|
|
Msg: fmt.Sprintf("invalid API version provided %q; must be 1 in [%s]", k.APIVersion, APIVersion),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
if len(k.Name()) < minNameLen {
|
2019-12-03 02:05:10 +00:00
|
|
|
pErr.append(resourceErr{
|
2020-01-13 19:13:37 +00:00
|
|
|
Kind: k.Type.String(),
|
2019-12-03 02:05:10 +00:00
|
|
|
Idx: intPtr(i),
|
|
|
|
ValidationErrs: []validationErr{
|
2020-03-16 18:25:39 +00:00
|
|
|
objectValidationErr(fieldMetadata, validationErr{
|
|
|
|
Field: fieldName,
|
2019-12-03 02:05:10 +00:00
|
|
|
Msg: fmt.Sprintf("must be a string of at least %d chars in length", minNameLen),
|
2020-03-16 18:25:39 +00:00
|
|
|
}),
|
2019-12-03 02:05:10 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
if failures := fn(k); failures != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
err := resourceErr{
|
2019-11-01 18:11:42 +00:00
|
|
|
Kind: resourceKind.String(),
|
2019-11-22 01:07:12 +00:00
|
|
|
Idx: intPtr(i),
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
|
|
|
for _, f := range failures {
|
2019-11-22 01:07:12 +00:00
|
|
|
vErr := validationErr{
|
2019-11-14 00:24:05 +00:00
|
|
|
Field: f.Field,
|
|
|
|
Msg: f.Msg,
|
|
|
|
Index: f.Index,
|
|
|
|
Nested: f.Nested,
|
|
|
|
}
|
|
|
|
if vErr.Field == "associations" {
|
|
|
|
err.AssociationErrs = append(err.AssociationErrs, vErr)
|
2019-10-26 02:11:47 +00:00
|
|
|
continue
|
|
|
|
}
|
2019-11-14 00:24:05 +00:00
|
|
|
err.ValidationErrs = append(err.ValidationErrs, vErr)
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
2019-11-22 01:07:12 +00:00
|
|
|
pErr.append(err)
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
if len(pErr.Resources) > 0 {
|
|
|
|
return &pErr
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func (p *Pkg) parseNestedLabels(r Resource, fn func(lb *label) error) []validationErr {
|
2019-10-30 21:13:42 +00:00
|
|
|
nestedLabels := make(map[string]*label)
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
var failures []validationErr
|
2019-11-08 19:33:41 +00:00
|
|
|
for i, nr := range r.slcResource(fieldAssociations) {
|
2019-11-14 00:24:05 +00:00
|
|
|
fail := p.parseNestedLabel(nr, func(l *label) error {
|
2019-12-03 02:05:10 +00:00
|
|
|
if _, ok := nestedLabels[l.Name()]; ok {
|
|
|
|
return fmt.Errorf("duplicate nested label: %q", l.Name())
|
2019-10-30 21:13:42 +00:00
|
|
|
}
|
2019-12-03 02:05:10 +00:00
|
|
|
nestedLabels[l.Name()] = l
|
2019-10-30 21:13:42 +00:00
|
|
|
|
|
|
|
return fn(l)
|
|
|
|
})
|
|
|
|
if fail != nil {
|
2019-11-14 00:24:05 +00:00
|
|
|
fail.Index = intPtr(i)
|
2019-10-30 21:13:42 +00:00
|
|
|
failures = append(failures, *fail)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return failures
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func (p *Pkg) parseNestedLabel(nr Resource, fn func(lb *label) error) *validationErr {
|
2019-10-26 02:11:47 +00:00
|
|
|
k, err := nr.kind()
|
|
|
|
if err != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
return &validationErr{
|
2019-12-12 19:09:32 +00:00
|
|
|
Field: fieldAssociations,
|
2019-11-22 01:07:12 +00:00
|
|
|
Nested: []validationErr{
|
2019-11-14 00:24:05 +00:00
|
|
|
{
|
2019-12-12 19:09:32 +00:00
|
|
|
Field: fieldKind,
|
2019-11-14 00:24:05 +00:00
|
|
|
Msg: err.Error(),
|
|
|
|
},
|
|
|
|
},
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
|
|
|
}
|
2019-11-08 19:33:41 +00:00
|
|
|
if !k.is(KindLabel) {
|
2019-10-26 02:11:47 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
nameRef := p.getRefWithKnownEnvs(nr, fieldName)
|
2020-02-05 17:33:45 +00:00
|
|
|
lb, found := p.mLabels[nameRef.String()]
|
2019-10-26 02:11:47 +00:00
|
|
|
if !found {
|
2019-11-22 01:07:12 +00:00
|
|
|
return &validationErr{
|
2019-12-12 19:09:32 +00:00
|
|
|
Field: fieldAssociations,
|
2019-11-14 00:24:05 +00:00
|
|
|
Msg: fmt.Sprintf("label %q does not exist in pkg", nr.Name()),
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := fn(lb); err != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
return &validationErr{
|
2019-12-12 19:09:32 +00:00
|
|
|
Field: fieldAssociations,
|
2019-11-14 00:24:05 +00:00
|
|
|
Msg: err.Error(),
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
func (p *Pkg) getRefWithKnownEnvs(r Resource, field string) *references {
|
|
|
|
nameRef := r.references(field)
|
2020-02-06 05:42:01 +00:00
|
|
|
if v, ok := p.mEnvVals[nameRef.EnvRef]; ok {
|
|
|
|
nameRef.val = v
|
|
|
|
}
|
|
|
|
return nameRef
|
|
|
|
}
|
|
|
|
|
2020-02-06 17:28:04 +00:00
|
|
|
func (p *Pkg) setRefs(refs ...*references) {
|
2020-02-05 00:15:20 +00:00
|
|
|
for _, ref := range refs {
|
|
|
|
if ref.Secret != "" {
|
|
|
|
p.mSecrets[ref.Secret] = false
|
|
|
|
}
|
|
|
|
if ref.EnvRef != "" {
|
2020-02-06 17:28:04 +00:00
|
|
|
p.mEnv[ref.EnvRef] = p.mEnvVals[ref.EnvRef] != ""
|
2020-02-05 00:15:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func parseChart(r Resource) (chart, []validationErr) {
|
2019-11-01 18:11:42 +00:00
|
|
|
ck, err := r.chartKind()
|
|
|
|
if err != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
return chart{}, []validationErr{{
|
2019-11-01 18:11:42 +00:00
|
|
|
Field: "kind",
|
|
|
|
Msg: err.Error(),
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
|
|
|
c := chart{
|
|
|
|
Kind: ck,
|
|
|
|
Name: r.Name(),
|
2020-03-04 19:11:55 +00:00
|
|
|
BinSize: r.intShort(fieldChartBinSize),
|
|
|
|
BinCount: r.intShort(fieldChartBinCount),
|
|
|
|
Geom: r.stringShort(fieldChartGeom),
|
|
|
|
Height: r.intShort(fieldChartHeight),
|
2019-11-08 19:33:41 +00:00
|
|
|
Note: r.stringShort(fieldChartNote),
|
|
|
|
NoteOnEmpty: r.boolShort(fieldChartNoteOnEmpty),
|
2020-03-04 19:11:55 +00:00
|
|
|
Position: r.stringShort(fieldChartPosition),
|
|
|
|
Prefix: r.stringShort(fieldPrefix),
|
2019-11-08 19:33:41 +00:00
|
|
|
Shade: r.boolShort(fieldChartShade),
|
2020-03-04 19:11:55 +00:00
|
|
|
Suffix: r.stringShort(fieldSuffix),
|
|
|
|
TickPrefix: r.stringShort(fieldChartTickPrefix),
|
|
|
|
TickSuffix: r.stringShort(fieldChartTickSuffix),
|
|
|
|
TimeFormat: r.stringShort(fieldChartTimeFormat),
|
|
|
|
Width: r.intShort(fieldChartWidth),
|
2019-11-08 19:33:41 +00:00
|
|
|
XCol: r.stringShort(fieldChartXCol),
|
|
|
|
YCol: r.stringShort(fieldChartYCol),
|
|
|
|
XPos: r.intShort(fieldChartXPos),
|
|
|
|
YPos: r.intShort(fieldChartYPos),
|
|
|
|
}
|
|
|
|
|
|
|
|
if presLeg, ok := r[fieldChartLegend].(legend); ok {
|
|
|
|
c.Legend = presLeg
|
|
|
|
} else {
|
|
|
|
if leg, ok := ifaceToResource(r[fieldChartLegend]); ok {
|
|
|
|
c.Legend.Type = leg.stringShort(fieldType)
|
|
|
|
c.Legend.Orientation = leg.stringShort(fieldLegendOrientation)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if dp, ok := r.int(fieldChartDecimalPlaces); ok {
|
2019-11-01 18:11:42 +00:00
|
|
|
c.EnforceDecimals = true
|
|
|
|
c.DecimalPlaces = dp
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
var failures []validationErr
|
2019-11-08 19:33:41 +00:00
|
|
|
if presentQueries, ok := r[fieldChartQueries].(queries); ok {
|
|
|
|
c.Queries = presentQueries
|
|
|
|
} else {
|
|
|
|
for _, rq := range r.slcResource(fieldChartQueries) {
|
|
|
|
c.Queries = append(c.Queries, query{
|
|
|
|
Query: strings.TrimSpace(rq.stringShort(fieldQuery)),
|
|
|
|
})
|
|
|
|
}
|
2019-11-01 18:11:42 +00:00
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
if presentColors, ok := r[fieldChartColors].(colors); ok {
|
|
|
|
c.Colors = presentColors
|
|
|
|
} else {
|
|
|
|
for _, rc := range r.slcResource(fieldChartColors) {
|
|
|
|
c.Colors = append(c.Colors, &color{
|
|
|
|
Name: rc.Name(),
|
|
|
|
Type: rc.stringShort(fieldType),
|
|
|
|
Hex: rc.stringShort(fieldColorHex),
|
|
|
|
Value: flt64Ptr(rc.float64Short(fieldValue)),
|
|
|
|
})
|
|
|
|
}
|
2019-11-01 18:11:42 +00:00
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
if presAxes, ok := r[fieldChartAxes].(axes); ok {
|
|
|
|
c.Axes = presAxes
|
|
|
|
} else {
|
|
|
|
for _, ra := range r.slcResource(fieldChartAxes) {
|
2019-11-15 01:05:21 +00:00
|
|
|
domain := []float64{}
|
|
|
|
|
|
|
|
if _, ok := ra[fieldChartDomain]; ok {
|
|
|
|
for _, str := range ra.slcStr(fieldChartDomain) {
|
|
|
|
val, err := strconv.ParseFloat(str, 64)
|
|
|
|
if err != nil {
|
2019-11-22 01:07:12 +00:00
|
|
|
failures = append(failures, validationErr{
|
2019-11-15 01:05:21 +00:00
|
|
|
Field: "axes",
|
|
|
|
Msg: err.Error(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
domain = append(domain, val)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
c.Axes = append(c.Axes, axis{
|
|
|
|
Base: ra.stringShort(fieldAxisBase),
|
|
|
|
Label: ra.stringShort(fieldAxisLabel),
|
|
|
|
Name: ra.Name(),
|
|
|
|
Prefix: ra.stringShort(fieldPrefix),
|
|
|
|
Scale: ra.stringShort(fieldAxisScale),
|
|
|
|
Suffix: ra.stringShort(fieldSuffix),
|
2019-11-15 01:05:21 +00:00
|
|
|
Domain: domain,
|
2019-11-08 19:33:41 +00:00
|
|
|
})
|
|
|
|
}
|
2019-11-04 19:16:32 +00:00
|
|
|
}
|
|
|
|
|
2020-03-04 19:11:55 +00:00
|
|
|
if tableOptsRes, ok := ifaceToResource(r[fieldChartTableOptions]); ok {
|
|
|
|
c.TableOptions = tableOptions{
|
|
|
|
VerticalTimeAxis: tableOptsRes.boolShort(fieldChartTableOptionVerticalTimeAxis),
|
|
|
|
SortByField: tableOptsRes.stringShort(fieldChartTableOptionSortBy),
|
|
|
|
Wrapping: tableOptsRes.stringShort(fieldChartTableOptionWrapping),
|
|
|
|
FixFirstColumn: tableOptsRes.boolShort(fieldChartTableOptionFixFirstColumn),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, fieldOptRes := range r.slcResource(fieldChartFieldOptions) {
|
|
|
|
c.FieldOptions = append(c.FieldOptions, fieldOption{
|
|
|
|
FieldName: fieldOptRes.stringShort(fieldChartFieldOptionFieldName),
|
|
|
|
DisplayName: fieldOptRes.stringShort(fieldChartFieldOptionDisplayName),
|
|
|
|
Visible: fieldOptRes.boolShort(fieldChartFieldOptionVisible),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-11-15 17:17:31 +00:00
|
|
|
if failures = append(failures, c.validProperties()...); len(failures) > 0 {
|
2019-11-01 18:11:42 +00:00
|
|
|
return chart{}, failures
|
|
|
|
}
|
|
|
|
|
|
|
|
return c, nil
|
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
// Resource is a pkger Resource kind. It can be one of any of
|
|
|
|
// available kinds that are supported.
|
|
|
|
type Resource map[string]interface{}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
// Name returns the name of the resource.
|
2019-11-01 18:11:42 +00:00
|
|
|
func (r Resource) Name() string {
|
2019-11-08 19:33:41 +00:00
|
|
|
return strings.TrimSpace(r.stringShort(fieldName))
|
2019-11-01 18:11:42 +00:00
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
func (r Resource) kind() (Kind, error) {
|
2019-11-21 00:38:12 +00:00
|
|
|
if k, ok := r[fieldKind].(Kind); ok {
|
|
|
|
return k, k.OK()
|
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
resKind, ok := r.string(fieldKind)
|
2019-10-23 17:09:04 +00:00
|
|
|
if !ok {
|
2019-11-08 19:33:41 +00:00
|
|
|
return KindUnknown, errors.New("no kind provided")
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
k := Kind(resKind)
|
2019-11-21 00:38:12 +00:00
|
|
|
return k, k.OK()
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2019-11-05 22:08:30 +00:00
|
|
|
func (r Resource) chartKind() (chartKind, error) {
|
2019-11-01 18:11:42 +00:00
|
|
|
ck, _ := r.kind()
|
2020-01-13 19:13:37 +00:00
|
|
|
chartKind := chartKind(normStr(string(ck)))
|
2019-11-01 18:11:42 +00:00
|
|
|
if !chartKind.ok() {
|
2019-11-05 22:08:30 +00:00
|
|
|
return chartKindUnknown, errors.New("invalid chart kind provided: " + string(chartKind))
|
2019-11-01 18:11:42 +00:00
|
|
|
}
|
|
|
|
return chartKind, nil
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
func (r Resource) bool(key string) (bool, bool) {
|
|
|
|
b, ok := r[key].(bool)
|
|
|
|
return b, ok
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) boolShort(key string) bool {
|
|
|
|
b, _ := r.bool(key)
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2019-12-18 01:57:44 +00:00
|
|
|
func (r Resource) duration(key string) (time.Duration, bool) {
|
|
|
|
dur, err := time.ParseDuration(r.stringShort(key))
|
|
|
|
return dur, err == nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) durationShort(key string) time.Duration {
|
|
|
|
dur, _ := r.duration(key)
|
|
|
|
return dur
|
|
|
|
}
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
func (r Resource) float64(key string) (float64, bool) {
|
|
|
|
f, ok := r[key].(float64)
|
|
|
|
if ok {
|
|
|
|
return f, true
|
|
|
|
}
|
|
|
|
|
|
|
|
i, ok := r[key].(int)
|
|
|
|
if ok {
|
|
|
|
return float64(i), true
|
|
|
|
}
|
|
|
|
return 0, false
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) float64Short(key string) float64 {
|
|
|
|
f, _ := r.float64(key)
|
|
|
|
return f
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) int(key string) (int, bool) {
|
|
|
|
i, ok := r[key].(int)
|
|
|
|
if ok {
|
|
|
|
return i, true
|
|
|
|
}
|
|
|
|
|
|
|
|
f, ok := r[key].(float64)
|
|
|
|
if ok {
|
|
|
|
return int(f), true
|
|
|
|
}
|
|
|
|
return 0, false
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) intShort(key string) int {
|
|
|
|
i, _ := r.int(key)
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
|
2020-02-05 00:15:20 +00:00
|
|
|
func (r Resource) references(key string) *references {
|
2019-12-16 17:39:55 +00:00
|
|
|
v, ok := r[key]
|
|
|
|
if !ok {
|
2020-02-05 00:15:20 +00:00
|
|
|
return &references{}
|
2019-12-16 17:39:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var ref references
|
2020-02-05 00:15:20 +00:00
|
|
|
for _, f := range []string{fieldReferencesSecret, fieldReferencesEnv} {
|
2019-12-16 17:39:55 +00:00
|
|
|
resBody, ok := ifaceToResource(v)
|
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if keyRes, ok := ifaceToResource(resBody[f]); ok {
|
2020-02-05 00:15:20 +00:00
|
|
|
switch f {
|
|
|
|
case fieldReferencesEnv:
|
|
|
|
ref.EnvRef = keyRes.stringShort(fieldKey)
|
|
|
|
case fieldReferencesSecret:
|
|
|
|
ref.Secret = keyRes.stringShort(fieldKey)
|
|
|
|
}
|
2019-12-16 17:39:55 +00:00
|
|
|
}
|
|
|
|
}
|
2020-02-05 00:15:20 +00:00
|
|
|
if ref.hasValue() {
|
|
|
|
return &ref
|
2019-12-16 17:39:55 +00:00
|
|
|
}
|
|
|
|
|
2020-02-05 00:15:20 +00:00
|
|
|
return &references{val: v}
|
2019-12-16 17:39:55 +00:00
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
func (r Resource) string(key string) (string, bool) {
|
2019-11-06 22:41:06 +00:00
|
|
|
return ifaceToStr(r[key])
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) stringShort(key string) string {
|
|
|
|
s, _ := r.string(key)
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
func (r Resource) slcResource(key string) []Resource {
|
|
|
|
v, ok := r[key]
|
|
|
|
if !ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
if resources, ok := v.([]Resource); ok {
|
|
|
|
return resources
|
|
|
|
}
|
|
|
|
|
2019-11-01 18:11:42 +00:00
|
|
|
iFaceSlc, ok := v.([]interface{})
|
|
|
|
if !ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var newResources []Resource
|
|
|
|
for _, iFace := range iFaceSlc {
|
2019-11-06 22:41:06 +00:00
|
|
|
r, ok := ifaceToResource(iFace)
|
2019-11-01 18:11:42 +00:00
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
newResources = append(newResources, r)
|
|
|
|
}
|
|
|
|
|
|
|
|
return newResources
|
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
func (r Resource) slcStr(key string) []string {
|
|
|
|
v, ok := r[key]
|
|
|
|
if !ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
if strSlc, ok := v.([]string); ok {
|
|
|
|
return strSlc
|
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
iFaceSlc, ok := v.([]interface{})
|
|
|
|
if !ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var out []string
|
|
|
|
for _, iface := range iFaceSlc {
|
|
|
|
s, ok := ifaceToStr(iface)
|
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
out = append(out, s)
|
|
|
|
}
|
|
|
|
|
|
|
|
return out
|
|
|
|
}
|
|
|
|
|
|
|
|
func (r Resource) mapStrStr(key string) map[string]string {
|
2019-11-08 19:33:41 +00:00
|
|
|
v, ok := r[key]
|
|
|
|
if !ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if m, ok := v.(map[string]string); ok {
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
|
|
|
res, ok := ifaceToResource(v)
|
2019-11-06 22:41:06 +00:00
|
|
|
if !ok {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
m := make(map[string]string)
|
|
|
|
for k, v := range res {
|
|
|
|
s, ok := ifaceToStr(v)
|
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
m[k] = s
|
|
|
|
}
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
|
|
|
func ifaceToResource(i interface{}) (Resource, bool) {
|
|
|
|
if i == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
2019-11-08 19:33:41 +00:00
|
|
|
if res, ok := i.(Resource); ok {
|
2019-10-26 02:11:47 +00:00
|
|
|
return res, true
|
|
|
|
}
|
|
|
|
|
|
|
|
if m, ok := i.(map[string]interface{}); ok {
|
|
|
|
return m, true
|
|
|
|
}
|
|
|
|
|
2019-10-23 17:09:04 +00:00
|
|
|
m, ok := i.(map[interface{}]interface{})
|
|
|
|
if !ok {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
newRes := make(Resource)
|
|
|
|
for k, v := range m {
|
|
|
|
s, ok := k.(string)
|
|
|
|
if !ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
newRes[s] = v
|
|
|
|
}
|
|
|
|
return newRes, true
|
|
|
|
}
|
|
|
|
|
2019-11-06 22:41:06 +00:00
|
|
|
func ifaceToStr(v interface{}) (string, bool) {
|
|
|
|
if v == nil {
|
|
|
|
return "", false
|
|
|
|
}
|
|
|
|
|
|
|
|
if s, ok := v.(string); ok {
|
|
|
|
return s, true
|
|
|
|
}
|
|
|
|
|
|
|
|
if i, ok := v.(int); ok {
|
|
|
|
return strconv.Itoa(i), true
|
|
|
|
}
|
|
|
|
|
|
|
|
if f, ok := v.(float64); ok {
|
|
|
|
return strconv.FormatFloat(f, 'f', -1, 64), true
|
|
|
|
}
|
|
|
|
|
|
|
|
return "", false
|
|
|
|
}
|
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
func uniqResources(kinds []Object) []Object {
|
2019-11-12 20:29:50 +00:00
|
|
|
type key struct {
|
|
|
|
kind Kind
|
|
|
|
name string
|
|
|
|
}
|
2019-12-20 20:51:27 +00:00
|
|
|
|
|
|
|
// these 2 maps are used to eliminate duplicates that come
|
2020-01-13 19:13:37 +00:00
|
|
|
// from dependencies while keeping the Object that has any
|
|
|
|
// associations. If there are no associations, then the kinds
|
2019-12-20 20:51:27 +00:00
|
|
|
// are no different from one another.
|
2019-11-12 20:29:50 +00:00
|
|
|
m := make(map[key]bool)
|
2020-01-13 19:13:37 +00:00
|
|
|
res := make(map[key]Object)
|
2019-11-12 20:29:50 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
out := make([]Object, 0, len(kinds))
|
|
|
|
for _, k := range kinds {
|
|
|
|
if err := k.Type.OK(); err != nil {
|
2019-11-12 20:29:50 +00:00
|
|
|
continue
|
|
|
|
}
|
2019-12-20 20:51:27 +00:00
|
|
|
|
2020-01-13 19:13:37 +00:00
|
|
|
if kindsUniqByName[k.Type] {
|
|
|
|
rKey := key{kind: k.Type, name: k.Name()}
|
2019-12-20 20:51:27 +00:00
|
|
|
if hasAssociations, ok := m[rKey]; ok && hasAssociations {
|
2019-11-21 00:38:12 +00:00
|
|
|
continue
|
|
|
|
}
|
2020-01-13 19:13:37 +00:00
|
|
|
_, hasAssociations := k.Spec[fieldAssociations]
|
2019-12-20 20:51:27 +00:00
|
|
|
m[rKey] = hasAssociations
|
2020-01-13 19:13:37 +00:00
|
|
|
res[rKey] = k
|
2019-12-20 20:51:27 +00:00
|
|
|
continue
|
2019-11-21 00:38:12 +00:00
|
|
|
}
|
2020-01-13 19:13:37 +00:00
|
|
|
out = append(out, k)
|
2019-12-20 20:51:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, r := range res {
|
|
|
|
out = append(out, r)
|
2019-11-12 20:29:50 +00:00
|
|
|
}
|
|
|
|
return out
|
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
// ParseError is the error from parsing the given package. The ParseError
|
|
|
|
// behavior provides a list of resources that failed and all validations
|
|
|
|
// that failed for that resource. A resource can multiple errors, and
|
|
|
|
// a parseErr can have multiple resources which themselves can have
|
|
|
|
// multiple validation failures.
|
|
|
|
type ParseError interface {
|
|
|
|
ValidationErrs() []ValidationErr
|
|
|
|
}
|
|
|
|
|
2019-12-12 19:09:32 +00:00
|
|
|
// NewParseError creates a new parse error from existing validation errors.
|
|
|
|
func NewParseError(errs ...ValidationErr) error {
|
|
|
|
if len(errs) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return &parseErr{rawErrs: errs}
|
|
|
|
}
|
|
|
|
|
2019-11-14 00:24:05 +00:00
|
|
|
type (
|
2019-11-22 01:07:12 +00:00
|
|
|
parseErr struct {
|
|
|
|
Resources []resourceErr
|
2019-12-12 19:09:32 +00:00
|
|
|
rawErrs []ValidationErr
|
2019-11-14 00:24:05 +00:00
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
// resourceErr describes the error for a particular resource. In
|
2019-11-14 00:43:28 +00:00
|
|
|
// which it may have numerous validation and association errors.
|
2019-11-22 01:07:12 +00:00
|
|
|
resourceErr struct {
|
2019-11-01 18:11:42 +00:00
|
|
|
Kind string
|
2019-11-22 01:07:12 +00:00
|
|
|
Idx *int
|
|
|
|
RootErrs []validationErr
|
|
|
|
AssociationErrs []validationErr
|
|
|
|
ValidationErrs []validationErr
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2019-11-14 00:24:05 +00:00
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
validationErr struct {
|
2019-11-14 00:24:05 +00:00
|
|
|
Field string
|
|
|
|
Msg string
|
|
|
|
Index *int
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
Nested []validationErr
|
2019-11-14 00:24:05 +00:00
|
|
|
}
|
|
|
|
)
|
2019-10-23 17:09:04 +00:00
|
|
|
|
|
|
|
// Error implements the error interface.
|
2019-11-22 01:07:12 +00:00
|
|
|
func (e *parseErr) Error() string {
|
2019-10-23 17:09:04 +00:00
|
|
|
var errMsg []string
|
2019-12-12 19:09:32 +00:00
|
|
|
for _, ve := range append(e.ValidationErrs(), e.rawErrs...) {
|
2019-11-22 01:07:12 +00:00
|
|
|
errMsg = append(errMsg, ve.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
return strings.Join(errMsg, "\n\t")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *parseErr) ValidationErrs() []ValidationErr {
|
2019-12-12 19:09:32 +00:00
|
|
|
errs := e.rawErrs[:]
|
2019-10-23 17:09:04 +00:00
|
|
|
for _, r := range e.Resources {
|
2019-11-22 01:07:12 +00:00
|
|
|
rootErr := ValidationErr{
|
|
|
|
Kind: r.Kind,
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2019-11-22 01:07:12 +00:00
|
|
|
for _, v := range r.RootErrs {
|
|
|
|
errs = append(errs, traverseErrs(rootErr, v)...)
|
2019-10-26 02:11:47 +00:00
|
|
|
}
|
2019-11-22 01:07:12 +00:00
|
|
|
|
|
|
|
rootErr.Indexes = []*int{r.Idx}
|
2020-01-24 19:25:03 +00:00
|
|
|
rootErr.Fields = []string{"root"}
|
2019-11-22 01:07:12 +00:00
|
|
|
for _, v := range append(r.ValidationErrs, r.AssociationErrs...) {
|
|
|
|
errs = append(errs, traverseErrs(rootErr, v)...)
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
}
|
2019-12-21 23:57:41 +00:00
|
|
|
|
|
|
|
// used to provide a means to == or != in the map lookup
|
|
|
|
// to remove duplicate errors
|
|
|
|
type key struct {
|
|
|
|
kind string
|
|
|
|
fields string
|
|
|
|
indexes string
|
|
|
|
reason string
|
|
|
|
}
|
|
|
|
|
|
|
|
m := make(map[key]bool)
|
|
|
|
var out []ValidationErr
|
|
|
|
for _, verr := range errs {
|
|
|
|
k := key{
|
|
|
|
kind: verr.Kind,
|
|
|
|
fields: strings.Join(verr.Fields, ":"),
|
|
|
|
reason: verr.Reason,
|
|
|
|
}
|
|
|
|
var indexes []string
|
|
|
|
for _, idx := range verr.Indexes {
|
|
|
|
if idx == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
indexes = append(indexes, strconv.Itoa(*idx))
|
|
|
|
}
|
|
|
|
k.indexes = strings.Join(indexes, ":")
|
|
|
|
if m[k] {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
m[k] = true
|
|
|
|
out = append(out, verr)
|
|
|
|
}
|
|
|
|
|
|
|
|
return out
|
2019-11-22 01:07:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// ValidationErr represents an error during the parsing of a package.
|
|
|
|
type ValidationErr struct {
|
|
|
|
Kind string `json:"kind" yaml:"kind"`
|
|
|
|
Fields []string `json:"fields" yaml:"fields"`
|
|
|
|
Indexes []*int `json:"idxs" yaml:"idxs"`
|
|
|
|
Reason string `json:"reason" yaml:"reason"`
|
|
|
|
}
|
2019-10-23 17:09:04 +00:00
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func (v ValidationErr) Error() string {
|
|
|
|
fieldPairs := make([]string, 0, len(v.Fields))
|
|
|
|
for i, idx := range v.Indexes {
|
|
|
|
field := v.Fields[i]
|
|
|
|
if idx == nil || *idx == -1 {
|
|
|
|
fieldPairs = append(fieldPairs, field)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
fieldPairs = append(fieldPairs, fmt.Sprintf("%s[%d]", field, *idx))
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Sprintf("kind=%s field=%s reason=%q", v.Kind, strings.Join(fieldPairs, "."), v.Reason)
|
|
|
|
}
|
|
|
|
|
|
|
|
func traverseErrs(root ValidationErr, vErr validationErr) []ValidationErr {
|
|
|
|
root.Fields = append(root.Fields, vErr.Field)
|
|
|
|
root.Indexes = append(root.Indexes, vErr.Index)
|
|
|
|
if len(vErr.Nested) == 0 {
|
|
|
|
root.Reason = vErr.Msg
|
|
|
|
return []ValidationErr{root}
|
|
|
|
}
|
|
|
|
|
|
|
|
var errs []ValidationErr
|
|
|
|
for _, n := range vErr.Nested {
|
|
|
|
errs = append(errs, traverseErrs(root, n)...)
|
|
|
|
}
|
|
|
|
return errs
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
2019-11-22 01:07:12 +00:00
|
|
|
func (e *parseErr) append(errs ...resourceErr) {
|
2019-11-14 00:24:05 +00:00
|
|
|
e.Resources = append(e.Resources, errs...)
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// IsParseErr inspects a given error to determine if it is
|
2019-11-22 01:07:12 +00:00
|
|
|
// a parseErr. If a parseErr it is, it will return it along
|
|
|
|
// with the confirmation boolean. If the error is not a parseErr
|
|
|
|
// it will return nil values for the parseErr, making it unsafe
|
2019-10-23 17:09:04 +00:00
|
|
|
// to use.
|
2019-11-14 00:43:28 +00:00
|
|
|
func IsParseErr(err error) bool {
|
2019-12-21 23:57:41 +00:00
|
|
|
if _, ok := err.(*parseErr); ok {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
iErr, ok := err.(*influxdb.Error)
|
|
|
|
if !ok {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return IsParseErr(iErr.Err)
|
2019-10-23 17:09:04 +00:00
|
|
|
}
|
2019-12-10 22:51:11 +00:00
|
|
|
|
2020-03-16 18:25:39 +00:00
|
|
|
func objectValidationErr(field string, vErrs ...validationErr) validationErr {
|
|
|
|
return validationErr{
|
|
|
|
Field: field,
|
|
|
|
Nested: vErrs,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-10 22:51:11 +00:00
|
|
|
func normStr(s string) string {
|
|
|
|
return strings.TrimSpace(strings.ToLower(s))
|
|
|
|
}
|