feat(pkger): add ability to export all and defined list of resources via influx cli to a package

refers: #15921
pull/16013/head
Johnny Steenbergen 2019-11-20 16:38:12 -08:00 committed by Johnny Steenbergen
parent a92b632e05
commit 9c525ad413
10 changed files with 963 additions and 145 deletions

View File

@ -19,6 +19,11 @@ import (
const maxTCPConnections = 128
type httpClientOpts struct {
token, addr string
skipVerify bool
}
func main() {
influxCmd := influxCmd()
if err := influxCmd.Execute(); err != nil {

View File

@ -6,6 +6,7 @@ import (
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
@ -16,6 +17,7 @@ import (
"github.com/fatih/color"
"github.com/influxdata/influxdb"
"github.com/influxdata/influxdb/http"
ierror "github.com/influxdata/influxdb/kit/errors"
"github.com/influxdata/influxdb/pkger"
"github.com/olekukonko/tablewriter"
"github.com/spf13/cobra"
@ -28,7 +30,8 @@ type pkgSVCFn func(cliReq httpClientOpts) (pkger.SVC, error)
func pkgCmd(newSVCFn pkgSVCFn) *cobra.Command {
cmd := pkgApplyCmd(newSVCFn)
cmd.AddCommand(
pkgCreateCmd(newSVCFn),
pkgNewCmd(newSVCFn),
pkgExportCmd(newSVCFn),
)
return cmd
@ -105,36 +108,32 @@ func pkgApplyRunEFn(newSVCFn pkgSVCFn, orgID, path *string, hasColor, hasTableBo
}
}
func pkgCreateCmd(newSVCFn pkgSVCFn) *cobra.Command {
func pkgNewCmd(newSVCFn pkgSVCFn) *cobra.Command {
cmd := &cobra.Command{
Use: "new",
Short: "Create a reusable pkg to create resources in a declarative manner",
}
wd, err := os.Getwd()
if err != nil {
panic(err)
}
var opts pkgCreateRunOpts
cmd.Flags().StringVarP(&opts.outPath, "out", "o", filepath.Join(wd, "pkg.json"), "output file for created pkg; defaults to pkg.json; the extension of provided file (.yml/.json) will dictate encoding")
var opts pkgNewOpts
cmd.Flags().StringVarP(&opts.outPath, "file", "f", "", "output file for created pkg; defaults to std out if no file provided; the extension of provided file (.yml/.json) will dictate encoding")
cmd.Flags().BoolVarP(&opts.quiet, "quiet", "q", false, "skip interactive mode")
cmd.Flags().StringVarP(&opts.meta.Name, "name", "n", "", "name for new pkg")
cmd.Flags().StringVarP(&opts.meta.Description, "description", "d", "", "description for new pkg")
cmd.Flags().StringVarP(&opts.meta.Version, "version", "v", "", "version for new pkg")
cmd.RunE = pkgCreateRunEFn(newSVCFn, &opts)
cmd.RunE = pkgNewRunEFn(newSVCFn, &opts)
return cmd
}
type pkgCreateRunOpts struct {
type pkgNewOpts struct {
quiet bool
outPath string
orgID string
meta pkger.Metadata
}
func pkgCreateRunEFn(newSVCFn pkgSVCFn, opt *pkgCreateRunOpts) func(*cobra.Command, []string) error {
func pkgNewRunEFn(newSVCFn pkgSVCFn, opt *pkgNewOpts) func(*cobra.Command, []string) error {
return func(cmd *cobra.Command, args []string) error {
if !opt.quiet {
ui := &input.UI{
@ -143,13 +142,13 @@ func pkgCreateRunEFn(newSVCFn pkgSVCFn, opt *pkgCreateRunOpts) func(*cobra.Comma
}
if opt.meta.Name == "" {
opt.meta.Name = getInput(ui, "pkg name:", "")
opt.meta.Name = getInput(ui, "pkg name", "")
}
if opt.meta.Description == "" {
opt.meta.Description = getInput(ui, "pkg description:", opt.meta.Description)
opt.meta.Description = getInput(ui, "pkg description", opt.meta.Description)
}
if opt.meta.Version == "" {
opt.meta.Version = getInput(ui, "pkg version:", opt.meta.Version)
opt.meta.Version = getInput(ui, "pkg version", opt.meta.Version)
}
}
@ -158,37 +157,233 @@ func pkgCreateRunEFn(newSVCFn pkgSVCFn, opt *pkgCreateRunOpts) func(*cobra.Comma
return err
}
newPkg, err := pkgSVC.CreatePkg(context.Background(), pkger.CreateWithMetadata(opt.meta))
return writePkg(cmd.OutOrStdout(), pkgSVC, opt.outPath, pkger.CreateWithMetadata(opt.meta))
}
}
type pkgExportOpts struct {
outPath string
meta pkger.Metadata
resourceType string
buckets string
dashboards string
labels string
variables string
}
func pkgExportCmd(newSVCFn pkgSVCFn) *cobra.Command {
cmd := &cobra.Command{
Use: "export",
Short: "Export existing resources as a package",
}
cmd.AddCommand(pkgExportAllCmd(newSVCFn))
var opts pkgExportOpts
cmd.Flags().StringVarP(&opts.outPath, "file", "f", "", "output file for created pkg; defaults to std out if no file provided; the extension of provided file (.yml/.json) will dictate encoding")
cmd.Flags().StringVarP(&opts.meta.Name, "name", "n", "", "name for new pkg")
cmd.Flags().StringVarP(&opts.meta.Description, "description", "d", "", "description for new pkg")
cmd.Flags().StringVarP(&opts.meta.Version, "version", "v", "", "version for new pkg")
cmd.Flags().StringVar(&opts.resourceType, "resource-type", "", "The resource type provided will be associated with all IDs via stdin.")
cmd.Flags().StringVar(&opts.buckets, "buckets", "", "List of bucket ids comma separated")
cmd.Flags().StringVar(&opts.dashboards, "dashboards", "", "List of dashboard ids comma separated")
cmd.Flags().StringVar(&opts.labels, "labels", "", "List of label ids comma separated")
cmd.Flags().StringVar(&opts.variables, "variables", "", "List of variable ids comma separated")
cmd.RunE = pkgExportRunEFn(newSVCFn, &opts)
return cmd
}
func pkgExportRunEFn(newSVCFn pkgSVCFn, cmdOpts *pkgExportOpts) func(*cobra.Command, []string) error {
return func(cmd *cobra.Command, args []string) error {
pkgSVC, err := newSVCFn(flags.httpClientOpts())
if err != nil {
return err
}
var (
buf bytes.Buffer
enc interface {
Encode(interface{}) error
}
)
opts := []pkger.CreatePkgSetFn{pkger.CreateWithMetadata(cmdOpts.meta)}
switch ext := filepath.Ext(opt.outPath); ext {
case ".yml":
enc = yaml.NewEncoder(&buf)
default:
jsonEnc := json.NewEncoder(&buf)
jsonEnc.SetIndent("", "\t")
enc = jsonEnc
resTypes := []struct {
kind pkger.Kind
idStrs []string
}{
{kind: pkger.KindBucket, idStrs: strings.Split(cmdOpts.buckets, ",")},
{kind: pkger.KindDashboard, idStrs: strings.Split(cmdOpts.dashboards, ",")},
{kind: pkger.KindLabel, idStrs: strings.Split(cmdOpts.labels, ",")},
{kind: pkger.KindVariable, idStrs: strings.Split(cmdOpts.variables, ",")},
}
if err := enc.Encode(newPkg); err != nil {
for _, rt := range resTypes {
newOpt, err := getResourcesToClone(rt.kind, rt.idStrs)
if err != nil {
return ierror.Wrap(err, rt.kind.String())
}
opts = append(opts, newOpt)
}
if cmdOpts.resourceType == "" {
return writePkg(cmd.OutOrStdout(), pkgSVC, cmdOpts.outPath, opts...)
}
kind := pkger.NewKind(cmdOpts.resourceType)
if err := kind.OK(); err != nil {
return errors.New("resource type must be one of bucket|dashboard|label|variable; got: " + cmdOpts.resourceType)
}
stdinInpt, _ := readStdInFull()
if len(stdinInpt) > 0 {
args = stdinInpt
}
resTypeOpt, err := getResourcesToClone(kind, args)
if err != nil {
return err
}
return ioutil.WriteFile(opt.outPath, buf.Bytes(), os.ModePerm)
return writePkg(cmd.OutOrStdout(), pkgSVC, cmdOpts.outPath, append(opts, resTypeOpt)...)
}
}
type httpClientOpts struct {
token, addr string
skipVerify bool
func getResourcesToClone(kind pkger.Kind, idStrs []string) (pkger.CreatePkgSetFn, error) {
ids, err := toInfluxIDs(idStrs)
if err != nil {
return nil, err
}
var resources []pkger.ResourceToClone
for _, id := range ids {
resources = append(resources, pkger.ResourceToClone{
Kind: kind,
ID: id,
})
}
return pkger.CreateWithExistingResources(resources...), nil
}
func readStdInFull() ([]string, error) {
info, err := os.Stdin.Stat()
if err != nil {
return nil, err
}
var stdinInput []string
if (info.Mode() & os.ModeCharDevice) != os.ModeCharDevice {
b, _ := ioutil.ReadAll(os.Stdin)
for _, bb := range bytes.Split(b, []byte("\n")) {
trimmed := bytes.TrimSpace(bb)
if len(trimmed) == 0 {
continue
}
stdinInput = append(stdinInput, string(trimmed))
}
}
return stdinInput, nil
}
func toInfluxIDs(args []string) ([]influxdb.ID, error) {
var (
ids []influxdb.ID
errs []string
)
for _, arg := range args {
normedArg := normStr(arg)
if normedArg == "" {
continue
}
id, err := influxdb.IDFromString(normedArg)
if err != nil {
errs = append(errs, "arg must provide a valid 16 length ID; got: "+arg)
continue
}
ids = append(ids, *id)
}
if len(errs) > 0 {
return nil, errors.New(strings.Join(errs, "\n\t"))
}
return ids, nil
}
func pkgExportAllCmd(newSVCFn pkgSVCFn) *cobra.Command {
cmd := &cobra.Command{
Use: "all",
Short: "Export all existing resources for an organization as a package",
}
var opts pkgNewOpts
cmd.Flags().StringVarP(&opts.outPath, "file", "f", "", "output file for created pkg; defaults to std out if no file provided; the extension of provided file (.yml/.json) will dictate encoding")
cmd.Flags().StringVarP(&opts.orgID, "org-id", "o", "", "organization id")
cmd.Flags().StringVarP(&opts.meta.Name, "name", "n", "", "name for new pkg")
cmd.Flags().StringVarP(&opts.meta.Description, "description", "d", "", "description for new pkg")
cmd.Flags().StringVarP(&opts.meta.Version, "version", "v", "", "version for new pkg")
cmd.RunE = pkgExportAllRunEFn(newSVCFn, &opts)
return cmd
}
func pkgExportAllRunEFn(newSVCFn pkgSVCFn, opt *pkgNewOpts) func(*cobra.Command, []string) error {
return func(cmd *cobra.Command, args []string) error {
pkgSVC, err := newSVCFn(flags.httpClientOpts())
if err != nil {
return err
}
opts := []pkger.CreatePkgSetFn{pkger.CreateWithMetadata(opt.meta)}
orgID, err := influxdb.IDFromString(opt.orgID)
if err != nil {
return err
}
opts = append(opts, pkger.CreateWithAllOrgResources(*orgID))
return writePkg(cmd.OutOrStdout(), pkgSVC, opt.outPath, opts...)
}
}
func normStr(in string) string {
return strings.TrimSpace(strings.ToLower(in))
}
func writePkg(w io.Writer, pkgSVC pkger.SVC, outPath string, opts ...pkger.CreatePkgSetFn) error {
pkg, err := pkgSVC.CreatePkg(context.Background(), opts...)
if err != nil {
return err
}
buf, err := createPkgBuf(pkg, outPath)
if err != nil {
return err
}
if outPath == "" {
_, err := io.Copy(w, buf)
return err
}
return ioutil.WriteFile(outPath, buf.Bytes(), os.ModePerm)
}
func createPkgBuf(pkg *pkger.Pkg, outPath string) (*bytes.Buffer, error) {
var (
buf bytes.Buffer
enc interface {
Encode(interface{}) error
}
)
switch ext := filepath.Ext(outPath); ext {
case ".json":
jsonEnc := json.NewEncoder(&buf)
jsonEnc.SetIndent("", "\t")
enc = jsonEnc
default:
enc = yaml.NewEncoder(&buf)
}
if err := enc.Encode(pkg); err != nil {
return nil, err
}
return &buf, nil
}
func newPkgerSVC(cliReqOpts httpClientOpts) (pkger.SVC, error) {

View File

@ -1,12 +1,20 @@
package main
import (
"bytes"
"context"
"io/ioutil"
"os"
"path"
"path/filepath"
"strconv"
"strings"
"testing"
"github.com/influxdata/influxdb"
"github.com/influxdata/influxdb/kit/errors"
"github.com/influxdata/influxdb/pkger"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -18,22 +26,30 @@ func Test_Pkg(t *testing.T) {
}
}
hasNotZeroDefault := func(t *testing.T, expected, actual string) {
t.Helper()
if expected == "" {
assert.NotZero(t, actual)
return
}
assert.Equal(t, expected, actual)
}
t.Run("new", func(t *testing.T) {
tests := []struct {
name string
encoding pkger.Encoding
filename string
flags []struct{ name, val string }
expectedMeta pkger.Metadata
args pkgFileArgs
}{
{
name: "yaml out",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []struct{ name, val string }{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
args: pkgFileArgs{
name: "yaml out",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
},
},
expectedMeta: pkger.Metadata{
Name: "new name",
@ -42,13 +58,15 @@ func Test_Pkg(t *testing.T) {
},
},
{
name: "json out",
encoding: pkger.EncodingJSON,
filename: "pkg_1.json",
flags: []struct{ name, val string }{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
args: pkgFileArgs{
name: "json out",
encoding: pkger.EncodingJSON,
filename: "pkg_1.json",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
},
},
expectedMeta: pkger.Metadata{
Name: "new name",
@ -57,50 +75,369 @@ func Test_Pkg(t *testing.T) {
},
},
{
name: "quiet mode",
encoding: pkger.EncodingJSON,
filename: "pkg_1.json",
flags: []struct{ name, val string }{
{name: "quiet", val: "true"},
args: pkgFileArgs{
name: "quiet mode",
encoding: pkger.EncodingJSON,
filename: "pkg_1.json",
flags: []flagArg{
{name: "quiet", val: "true"},
},
},
},
}
cmdFn := func() *cobra.Command {
return pkgNewCmd(fakeSVCFn(pkger.NewService()))
}
for _, tt := range tests {
fn := func(t *testing.T) {
tempDir := newTempDir(t)
defer os.RemoveAll(tempDir)
pathToFile := filepath.Join(tempDir, tt.filename)
cmd := pkgCreateCmd(fakeSVCFn(pkger.NewService()))
require.NoError(t, cmd.Flags().Set("out", pathToFile))
for _, f := range tt.flags {
require.NoError(t, cmd.Flags().Set(f.name, f.val))
}
require.NoError(t, cmd.Execute())
pkg, err := pkger.Parse(tt.encoding, pkger.FromFile(pathToFile), pkger.ValidWithoutResources())
require.NoError(t, err)
assert.Equal(t, pkger.KindPackage.String(), pkg.Kind)
testPkgWrites(t, cmdFn, tt.args, func(t *testing.T, pkg *pkger.Pkg) {
assert.Equal(t, tt.expectedMeta.Description, pkg.Metadata.Description)
hasNotZeroDefault := func(t *testing.T, expected, actual string) {
t.Helper()
if expected == "" {
assert.NotZero(t, actual)
return
}
assert.Equal(t, expected, actual)
}
hasNotZeroDefault(t, tt.expectedMeta.Name, pkg.Metadata.Name)
hasNotZeroDefault(t, tt.expectedMeta.Version, pkg.Metadata.Version)
}
t.Run(tt.name, fn)
})
}
})
t.Run("export all", func(t *testing.T) {
expectedOrgID := influxdb.ID(9000)
tests := []struct {
pkgFileArgs
expectedMeta pkger.Metadata
}{
{
pkgFileArgs: pkgFileArgs{
name: "yaml out",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
{name: "org-id", val: expectedOrgID.String()},
},
},
expectedMeta: pkger.Metadata{
Name: "new name",
Description: "new desc",
Version: "new version",
},
},
}
cmdFn := func() *cobra.Command {
pkgSVC := &fakePkgSVC{
createFn: func(_ context.Context, opts ...pkger.CreatePkgSetFn) (*pkger.Pkg, error) {
opt := pkger.CreateOpt{}
for _, o := range opts {
if err := o(&opt); err != nil {
return nil, err
}
}
if !opt.OrgIDs[expectedOrgID] {
return nil, errors.New("did not provide expected orgID")
}
pkg := pkger.Pkg{
APIVersion: pkger.APIVersion,
Kind: pkger.KindPackage,
Metadata: opt.Metadata,
}
pkg.Spec.Resources = append(pkg.Spec.Resources, pkger.Resource{
"name": "bucket1",
"kind": pkger.KindBucket.String(),
})
return &pkg, nil
},
}
return pkgExportAllCmd(fakeSVCFn(pkgSVC))
}
for _, tt := range tests {
testPkgWrites(t, cmdFn, tt.pkgFileArgs, func(t *testing.T, pkg *pkger.Pkg) {
assert.Equal(t, tt.expectedMeta.Description, pkg.Metadata.Description)
hasNotZeroDefault(t, tt.expectedMeta.Name, pkg.Metadata.Name)
hasNotZeroDefault(t, tt.expectedMeta.Version, pkg.Metadata.Version)
sum := pkg.Summary()
require.Len(t, sum.Buckets, 1)
assert.Equal(t, "bucket1", sum.Buckets[0].Name)
})
}
})
t.Run("export resources", func(t *testing.T) {
tests := []struct {
name string
pkgFileArgs
bucketIDs []influxdb.ID
dashIDs []influxdb.ID
labelIDs []influxdb.ID
varIDs []influxdb.ID
expectedMeta pkger.Metadata
}{
{
pkgFileArgs: pkgFileArgs{
name: "buckets",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
{name: "version", val: "new version"},
},
},
bucketIDs: []influxdb.ID{1, 2},
expectedMeta: pkger.Metadata{
Name: "new name",
Description: "new desc",
Version: "new version",
},
},
{
pkgFileArgs: pkgFileArgs{
name: "dashboards",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
{name: "version", val: "new version"},
},
},
dashIDs: []influxdb.ID{1, 2},
expectedMeta: pkger.Metadata{
Name: "new name",
Description: "new desc",
Version: "new version",
},
},
{
pkgFileArgs: pkgFileArgs{
name: "labels",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
{name: "version", val: "new version"},
},
},
labelIDs: []influxdb.ID{1, 2},
expectedMeta: pkger.Metadata{
Name: "new name",
Description: "new desc",
Version: "new version",
},
},
{
pkgFileArgs: pkgFileArgs{
name: "variables",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
{name: "version", val: "new version"},
},
},
varIDs: []influxdb.ID{1, 2},
expectedMeta: pkger.Metadata{
Name: "new name",
Description: "new desc",
Version: "new version",
},
},
{
pkgFileArgs: pkgFileArgs{
name: "mixed",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
flags: []flagArg{
{name: "name", val: "new name"},
{name: "description", val: "new desc"},
{name: "version", val: "new version"},
{name: "version", val: "new version"},
},
},
bucketIDs: []influxdb.ID{1, 2},
dashIDs: []influxdb.ID{3, 4},
labelIDs: []influxdb.ID{5, 6},
varIDs: []influxdb.ID{7, 8},
expectedMeta: pkger.Metadata{
Name: "new name",
Description: "new desc",
Version: "new version",
},
},
}
cmdFn := func() *cobra.Command {
pkgSVC := &fakePkgSVC{
createFn: func(_ context.Context, opts ...pkger.CreatePkgSetFn) (*pkger.Pkg, error) {
opt := pkger.CreateOpt{}
for _, o := range opts {
if err := o(&opt); err != nil {
return nil, err
}
}
pkg := pkger.Pkg{
APIVersion: pkger.APIVersion,
Kind: pkger.KindPackage,
Metadata: opt.Metadata,
}
for _, rc := range opt.Resources {
name := rc.Kind.String() + strconv.Itoa(int(rc.ID))
pkg.Spec.Resources = append(pkg.Spec.Resources, pkger.Resource{
"kind": rc.Kind,
"name": name,
})
}
return &pkg, nil
},
}
return pkgExportCmd(fakeSVCFn(pkgSVC))
}
for _, tt := range tests {
tt.flags = append(tt.flags,
flagArg{"buckets", idsStr(tt.bucketIDs...)},
flagArg{"dashboards", idsStr(tt.dashIDs...)},
flagArg{"labels", idsStr(tt.labelIDs...)},
flagArg{"variables", idsStr(tt.varIDs...)},
)
testPkgWrites(t, cmdFn, tt.pkgFileArgs, func(t *testing.T, pkg *pkger.Pkg) {
assert.Equal(t, tt.expectedMeta.Description, pkg.Metadata.Description)
hasNotZeroDefault(t, tt.expectedMeta.Name, pkg.Metadata.Name)
hasNotZeroDefault(t, tt.expectedMeta.Version, pkg.Metadata.Version)
sum := pkg.Summary()
require.Len(t, sum.Buckets, len(tt.bucketIDs))
for i, id := range tt.bucketIDs {
actual := sum.Buckets[i]
assert.Equal(t, "bucket"+strconv.Itoa(int(id)), actual.Name)
}
require.Len(t, sum.Dashboards, len(tt.dashIDs))
for i, id := range tt.dashIDs {
actual := sum.Dashboards[i]
assert.Equal(t, "dashboard"+strconv.Itoa(int(id)), actual.Name)
}
require.Len(t, sum.Labels, len(tt.labelIDs))
for i, id := range tt.labelIDs {
actual := sum.Labels[i]
assert.Equal(t, "label"+strconv.Itoa(int(id)), actual.Name)
}
require.Len(t, sum.Variables, len(tt.varIDs))
for i, id := range tt.varIDs {
actual := sum.Variables[i]
assert.Equal(t, "variable"+strconv.Itoa(int(id)), actual.Name)
}
})
}
})
}
type flagArg struct{ name, val string }
type pkgFileArgs struct {
name string
filename string
encoding pkger.Encoding
flags []flagArg
}
func testPkgWrites(t *testing.T, newCmdFn func() *cobra.Command, args pkgFileArgs, assertFn func(t *testing.T, pkg *pkger.Pkg)) {
wrappedCmdFn := func() *cobra.Command {
cmd := newCmdFn()
cmd.SetArgs([]string{})
return cmd
}
t.Run(path.Join(args.name, "file"), testPkgWritesFile(wrappedCmdFn, args, assertFn))
t.Run(path.Join(args.name, "buffer"), testPkgWritesToBuffer(wrappedCmdFn, args, assertFn))
}
func testPkgWritesFile(newCmdFn func() *cobra.Command, args pkgFileArgs, assertFn func(t *testing.T, pkg *pkger.Pkg)) func(t *testing.T) {
return func(t *testing.T) {
t.Helper()
tempDir := newTempDir(t)
defer os.RemoveAll(tempDir)
pathToFile := filepath.Join(tempDir, args.filename)
cmd := newCmdFn()
require.NoError(t, cmd.Flags().Set("file", pathToFile))
for _, f := range args.flags {
require.NoError(t, cmd.Flags().Set(f.name, f.val))
}
require.NoError(t, cmd.Execute())
pkg, err := pkger.Parse(args.encoding, pkger.FromFile(pathToFile), pkger.ValidWithoutResources())
require.NoError(t, err)
require.Equal(t, pkger.KindPackage, pkg.Kind)
assertFn(t, pkg)
}
}
func testPkgWritesToBuffer(newCmdFn func() *cobra.Command, args pkgFileArgs, assertFn func(t *testing.T, pkg *pkger.Pkg)) func(t *testing.T) {
return func(t *testing.T) {
t.Helper()
var buf bytes.Buffer
cmd := newCmdFn()
cmd.SetOutput(&buf)
for _, f := range args.flags {
require.NoError(t, cmd.Flags().Set(f.name, f.val))
}
require.NoError(t, cmd.Execute())
pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromReader(&buf), pkger.ValidWithoutResources())
require.NoError(t, err)
require.Equal(t, pkger.KindPackage, pkg.Kind)
assertFn(t, pkg)
}
}
type fakePkgSVC struct {
createFn func(ctx context.Context, setters ...pkger.CreatePkgSetFn) (*pkger.Pkg, error)
dryRunFn func(ctx context.Context, orgID influxdb.ID, pkg *pkger.Pkg) (pkger.Summary, pkger.Diff, error)
applyFn func(ctx context.Context, orgID influxdb.ID, pkg *pkger.Pkg) (pkger.Summary, error)
}
func (f *fakePkgSVC) CreatePkg(ctx context.Context, setters ...pkger.CreatePkgSetFn) (*pkger.Pkg, error) {
if f.createFn != nil {
return f.createFn(ctx, setters...)
}
panic("not implemented")
}
func (f *fakePkgSVC) DryRun(ctx context.Context, orgID influxdb.ID, pkg *pkger.Pkg) (pkger.Summary, pkger.Diff, error) {
if f.dryRunFn != nil {
return f.dryRunFn(ctx, orgID, pkg)
}
panic("not implemented")
}
func (f *fakePkgSVC) Apply(ctx context.Context, orgID influxdb.ID, pkg *pkger.Pkg) (pkger.Summary, error) {
if f.applyFn != nil {
return f.applyFn(ctx, orgID, pkg)
}
panic("not implemented")
}
func newTempDir(t *testing.T) string {
@ -110,3 +447,11 @@ func newTempDir(t *testing.T) string {
require.NoError(t, err)
return tempDir
}
func idsStr(ids ...influxdb.ID) string {
var idStrs []string
for _, id := range ids {
idStrs = append(idStrs, id.String())
}
return strings.Join(idStrs, ",")
}

View File

@ -56,7 +56,7 @@ func TestPkgerHTTPServer(t *testing.T) {
pkg := resp.Pkg
require.NoError(t, pkg.Validate())
assert.Equal(t, pkger.APIVersion, pkg.APIVersion)
assert.Equal(t, "package", pkg.Kind)
assert.Equal(t, pkger.KindPackage, pkg.Kind)
meta := pkg.Metadata
assert.Equal(t, "name1", meta.Name)

View File

@ -25,16 +25,47 @@ func (r ResourceToClone) OK() error {
return nil
}
func uniqResourcesToClone(resources []ResourceToClone) []ResourceToClone {
type key struct {
kind Kind
id influxdb.ID
}
m := make(map[key]ResourceToClone)
for i := range resources {
r := resources[i]
rKey := key{kind: r.Kind, id: r.ID}
kr, ok := m[rKey]
switch {
case ok && kr.Name == r.Name:
case ok && kr.Name != "" && r.Name == "":
default:
m[rKey] = r
}
}
out := make([]ResourceToClone, 0, len(resources))
for _, r := range m {
out = append(out, r)
}
return out
}
func bucketToResource(bkt influxdb.Bucket, name string) Resource {
if name == "" {
name = bkt.Name
}
return Resource{
fieldKind: KindBucket.title(),
fieldName: name,
fieldDescription: bkt.Description,
fieldBucketRetentionPeriod: bkt.RetentionPeriod.String(),
r := Resource{
fieldKind: KindBucket.title(),
fieldName: name,
}
if bkt.Description != "" {
r[fieldDescription] = bkt.Description
}
if bkt.RetentionPeriod != 0 {
r[fieldBucketRetentionPeriod] = bkt.RetentionPeriod.String()
}
return r
}
type cellView struct {
@ -274,12 +305,17 @@ func labelToResource(l influxdb.Label, name string) Resource {
if name == "" {
name = l.Name
}
return Resource{
fieldKind: KindLabel.title(),
fieldName: name,
fieldLabelColor: l.Properties["color"],
fieldDescription: l.Properties["description"],
r := Resource{
fieldKind: KindLabel.title(),
fieldName: name,
}
if desc := l.Properties["description"]; desc != "" {
r[fieldDescription] = desc
}
if color := l.Properties["color"]; color != "" {
r[fieldLabelColor] = color
}
return r
}
func variableToResource(v influxdb.Variable, name string) Resource {
@ -288,10 +324,13 @@ func variableToResource(v influxdb.Variable, name string) Resource {
}
r := Resource{
fieldKind: KindVariable.title(),
fieldName: name,
fieldDescription: v.Description,
fieldKind: KindVariable.title(),
fieldName: name,
}
if v.Description != "" {
r[fieldDescription] = v.Description
}
args := v.Arguments
if args == nil {
return r

View File

@ -30,7 +30,8 @@ var kinds = map[Kind]bool{
// Kind is a resource kind.
type Kind string
func newKind(s string) Kind {
// NewKind returns the kind parsed from the provided string.
func NewKind(s string) Kind {
return Kind(strings.TrimSpace(strings.ToLower(s)))
}

View File

@ -126,7 +126,7 @@ func parse(dec decoder, opts ...ValidateOptFn) (*Pkg, error) {
// to another power, the graphing of the package is handled within itself.
type Pkg struct {
APIVersion string `yaml:"apiVersion" json:"apiVersion"`
Kind string `yaml:"kind" json:"kind"`
Kind Kind `yaml:"kind" json:"kind"`
Metadata Metadata `yaml:"meta" json:"meta"`
Spec struct {
Resources []Resource `yaml:"resources" json:"resources"`
@ -308,7 +308,7 @@ func (p *Pkg) validMetadata() error {
})
}
if mKind := newKind(p.Kind); mKind != KindPackage {
if !p.Kind.is(KindPackage) {
failures = append(failures, ValidationErr{
Field: "kind",
Msg: `must be of kind "Package"`,
@ -473,13 +473,6 @@ func (p *Pkg) graphDashboards() error {
}}
}
if _, ok := p.mDashboards[r.Name()]; ok {
return []ValidationErr{{
Field: "name",
Msg: "duplicate name: " + r.Name(),
}}
}
dash := &dashboard{
Name: r.Name(),
Description: r.stringShort(fieldDescription),
@ -777,17 +770,17 @@ func (r Resource) Name() string {
}
func (r Resource) kind() (Kind, error) {
if k, ok := r[fieldKind].(Kind); ok {
return k, k.OK()
}
resKind, ok := r.string(fieldKind)
if !ok {
return KindUnknown, errors.New("no kind provided")
}
k := newKind(resKind)
if err := k.OK(); err != nil {
return k, err
}
return k, nil
k := NewKind(resKind)
return k, k.OK()
}
func (r Resource) chartKind() (chartKind, error) {
@ -1001,12 +994,21 @@ func uniqResources(resources []Resource) []Resource {
if err != nil {
continue
}
rKey := key{kind: k, name: r.Name()}
if m[rKey] {
if err := k.OK(); err != nil {
continue
}
m[rKey] = true
out = append(out, r)
switch k {
// these 3 kinds are unique, have existing state identifiable by name
case KindBucket, KindLabel, KindVariable:
rKey := key{kind: k, name: r.Name()}
if m[rKey] {
continue
}
m[rKey] = true
fallthrough
default:
out = append(out, r)
}
}
return out
}

View File

@ -3050,7 +3050,7 @@ func nextField(t *testing.T, field string) (string, int) {
type baseAsserts struct {
version string
kind string
kind Kind
description string
metaName string
metaVersion string
@ -3063,7 +3063,7 @@ func validParsedPkg(t *testing.T, path string, encoding Encoding, expected baseA
require.NoError(t, err)
require.Equal(t, expected.version, pkg.APIVersion)
require.Equal(t, expected.kind, pkg.Kind)
require.True(t, pkg.Kind.is(expected.kind))
require.Equal(t, expected.description, pkg.Metadata.Description)
require.Equal(t, expected.metaName, pkg.Metadata.Name)
require.Equal(t, expected.metaVersion, pkg.Metadata.Version)
@ -3108,7 +3108,7 @@ func testfileRunner(t *testing.T, path string, testFn func(t *testing.T, pkg *Pk
pkg := validParsedPkg(t, path+tt.extension, tt.encoding, baseAsserts{
version: "0.1.0",
kind: "Package",
kind: KindPackage,
description: "pack description",
metaName: "pkg_name",
metaVersion: "1",

View File

@ -10,6 +10,7 @@ import (
"time"
"github.com/influxdata/influxdb"
ierrors "github.com/influxdata/influxdb/kit/errors"
"go.uber.org/zap"
)
@ -100,38 +101,55 @@ func NewService(opts ...ServiceSetterFn) *Service {
}
// CreatePkgSetFn is a functional input for setting the pkg fields.
type CreatePkgSetFn func(opt *createOpt) error
type CreatePkgSetFn func(opt *CreateOpt) error
type createOpt struct {
metadata Metadata
resources []ResourceToClone
// CreateOpt are the options for creating a new package.
type CreateOpt struct {
Metadata Metadata
OrgIDs map[influxdb.ID]bool
Resources []ResourceToClone
}
// CreateWithMetadata sets the metadata on the pkg in a CreatePkg call.
func CreateWithMetadata(meta Metadata) CreatePkgSetFn {
return func(opt *createOpt) error {
opt.metadata = meta
return func(opt *CreateOpt) error {
opt.Metadata = meta
return nil
}
}
// CreateWithExistingResources allows the create method to clone existing resources.
func CreateWithExistingResources(resources ...ResourceToClone) CreatePkgSetFn {
return func(opt *createOpt) error {
return func(opt *CreateOpt) error {
for _, r := range resources {
if err := r.OK(); err != nil {
return err
}
r.Kind = newKind(string(r.Kind))
r.Kind = NewKind(string(r.Kind))
}
opt.resources = append(opt.resources, resources...)
opt.Resources = append(opt.Resources, resources...)
return nil
}
}
// CreateWithAllOrgResources allows the create method to clone all existing resources
// for the given organization.
func CreateWithAllOrgResources(orgID influxdb.ID) CreatePkgSetFn {
return func(opt *CreateOpt) error {
if orgID == 0 {
return errors.New("orgID provided must not be zero")
}
if opt.OrgIDs == nil {
opt.OrgIDs = make(map[influxdb.ID]bool)
}
opt.OrgIDs[orgID] = true
return nil
}
}
// CreatePkg will produce a pkg from the parameters provided.
func (s *Service) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pkg, error) {
opt := new(createOpt)
opt := new(CreateOpt)
for _, setter := range setters {
if err := setter(opt); err != nil {
return nil, err
@ -140,12 +158,12 @@ func (s *Service) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pk
pkg := &Pkg{
APIVersion: APIVersion,
Kind: KindPackage.String(),
Metadata: opt.metadata,
Kind: KindPackage,
Metadata: opt.Metadata,
Spec: struct {
Resources []Resource `yaml:"resources" json:"resources"`
}{
Resources: make([]Resource, 0, len(opt.resources)),
Resources: make([]Resource, 0, len(opt.Resources)),
},
}
if pkg.Metadata.Name == "" {
@ -158,7 +176,15 @@ func (s *Service) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pk
}
cloneAssFn := s.resourceCloneAssociationsGen()
for _, r := range opt.resources {
for orgID := range opt.OrgIDs {
resourcesToClone, err := s.cloneOrgResources(ctx, orgID)
if err != nil {
return nil, err
}
opt.Resources = append(opt.Resources, resourcesToClone...)
}
for _, r := range uniqResourcesToClone(opt.Resources) {
newResources, err := s.resourceCloneToResource(ctx, r, cloneAssFn)
if err != nil {
return nil, err
@ -193,6 +219,117 @@ func (s *Service) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pk
return pkg, nil
}
func (s *Service) cloneOrgResources(ctx context.Context, orgID influxdb.ID) ([]ResourceToClone, error) {
resourceTypeGens := []struct {
resType influxdb.ResourceType
cloneFn func(context.Context, influxdb.ID) ([]ResourceToClone, error)
}{
{
resType: influxdb.BucketsResourceType,
cloneFn: s.cloneOrgBuckets,
},
{
resType: influxdb.DashboardsResourceType,
cloneFn: s.cloneOrgDashboards,
},
{
resType: influxdb.LabelsResourceType,
cloneFn: s.cloneOrgLabels,
},
{
resType: influxdb.VariablesResourceType,
cloneFn: s.cloneOrgVariables,
},
}
var resources []ResourceToClone
for _, resGen := range resourceTypeGens {
existingResources, err := resGen.cloneFn(ctx, orgID)
if err != nil {
return nil, ierrors.Wrap(err, "finding "+string(resGen.resType))
}
resources = append(resources, existingResources...)
}
return resources, nil
}
func (s *Service) cloneOrgBuckets(ctx context.Context, orgID influxdb.ID) ([]ResourceToClone, error) {
buckets, _, err := s.bucketSVC.FindBuckets(ctx, influxdb.BucketFilter{
OrganizationID: &orgID,
})
if err != nil {
return nil, err
}
resources := make([]ResourceToClone, 0, len(buckets))
for _, b := range buckets {
if b.Type == influxdb.BucketTypeSystem {
continue
}
resources = append(resources, ResourceToClone{
Kind: KindBucket,
ID: b.ID,
})
}
return resources, nil
}
func (s *Service) cloneOrgDashboards(ctx context.Context, orgID influxdb.ID) ([]ResourceToClone, error) {
dashs, _, err := s.dashSVC.FindDashboards(ctx, influxdb.DashboardFilter{
OrganizationID: &orgID,
}, influxdb.FindOptions{Limit: 100})
if err != nil {
return nil, err
}
resources := make([]ResourceToClone, 0, len(dashs))
for _, d := range dashs {
resources = append(resources, ResourceToClone{
Kind: KindDashboard,
ID: d.ID,
})
}
return resources, nil
}
func (s *Service) cloneOrgLabels(ctx context.Context, orgID influxdb.ID) ([]ResourceToClone, error) {
labels, err := s.labelSVC.FindLabels(ctx, influxdb.LabelFilter{
OrgID: &orgID,
}, influxdb.FindOptions{Limit: 10000})
if err != nil {
return nil, ierrors.Wrap(err, "finding labels")
}
resources := make([]ResourceToClone, 0, len(labels))
for _, l := range labels {
resources = append(resources, ResourceToClone{
Kind: KindLabel,
ID: l.ID,
})
}
return resources, nil
}
func (s *Service) cloneOrgVariables(ctx context.Context, orgID influxdb.ID) ([]ResourceToClone, error) {
vars, err := s.varSVC.FindVariables(ctx, influxdb.VariableFilter{
OrganizationID: &orgID,
}, influxdb.FindOptions{Limit: 10000})
if err != nil {
return nil, err
}
resources := make([]ResourceToClone, 0, len(vars))
for _, v := range vars {
resources = append(resources, ResourceToClone{
Kind: KindVariable,
ID: v.ID,
})
}
return resources, nil
}
func (s *Service) resourceCloneToResource(ctx context.Context, r ResourceToClone, cFn cloneAssociationsFn) ([]Resource, error) {
var newResource Resource
switch {
@ -591,14 +728,6 @@ func (s *Service) dryRunResourceLabelMapping(ctx context.Context, la labelAssoci
return nil
}
func labelSlcToMap(labels []*label) map[string]*label {
m := make(map[string]*label)
for i := range labels {
m[labels[i].Name] = labels[i]
}
return m
}
// Apply will apply all the resources identified in the provided pkg. The entire pkg will be applied
// in its entirety. If a failure happens midway then the entire pkg will be rolled back to the state
// from before the pkg were applied.
@ -1141,3 +1270,11 @@ func (a applyErrs) toError(resType, msg string) error {
}
return errors.New(errMsg)
}
func labelSlcToMap(labels []*label) map[string]*label {
m := make(map[string]*label)
for i := range labels {
m[labels[i].Name] = labels[i]
}
return m
}

View File

@ -1307,5 +1307,99 @@ func TestService(t *testing.T) {
})
})
})
t.Run("with org id", func(t *testing.T) {
orgID := influxdb.ID(9000)
bktSVC := mock.NewBucketService()
bktSVC.FindBucketsFn = func(_ context.Context, f influxdb.BucketFilter, opts ...influxdb.FindOptions) ([]*influxdb.Bucket, int, error) {
if f.OrganizationID == nil || *f.OrganizationID != orgID {
return nil, 0, errors.New("not suppose to get here")
}
return []*influxdb.Bucket{{ID: 1, Name: "bucket"}}, 1, nil
}
bktSVC.FindBucketByIDFn = func(_ context.Context, id influxdb.ID) (*influxdb.Bucket, error) {
if id != 1 {
return nil, errors.New("wrong id")
}
return &influxdb.Bucket{ID: 1, Name: "bucket"}, nil
}
dashSVC := mock.NewDashboardService()
dashSVC.FindDashboardsF = func(_ context.Context, f influxdb.DashboardFilter, _ influxdb.FindOptions) ([]*influxdb.Dashboard, int, error) {
if f.OrganizationID == nil || *f.OrganizationID != orgID {
return nil, 0, errors.New("not suppose to get here")
}
return []*influxdb.Dashboard{{
ID: 2,
Name: "dashboard",
Cells: []*influxdb.Cell{},
}}, 1, nil
}
dashSVC.FindDashboardByIDF = func(_ context.Context, id influxdb.ID) (*influxdb.Dashboard, error) {
if id != 2 {
return nil, errors.New("wrong id")
}
return &influxdb.Dashboard{
ID: 2,
Name: "dashboard",
Cells: []*influxdb.Cell{},
}, nil
}
labelSVC := mock.NewLabelService()
labelSVC.FindLabelsFn = func(_ context.Context, f influxdb.LabelFilter) ([]*influxdb.Label, error) {
if f.OrgID == nil || *f.OrgID != orgID {
return nil, errors.New("not suppose to get here")
}
return []*influxdb.Label{{ID: 3, Name: "label"}}, nil
}
labelSVC.FindLabelByIDFn = func(_ context.Context, id influxdb.ID) (*influxdb.Label, error) {
if id != 3 {
return nil, errors.New("wrong id")
}
return &influxdb.Label{ID: 3, Name: "label"}, nil
}
varSVC := mock.NewVariableService()
varSVC.FindVariablesF = func(_ context.Context, f influxdb.VariableFilter, _ ...influxdb.FindOptions) ([]*influxdb.Variable, error) {
if f.OrganizationID == nil || *f.OrganizationID != orgID {
return nil, errors.New("not suppose to get here")
}
return []*influxdb.Variable{{ID: 4, Name: "variable"}}, nil
}
varSVC.FindVariableByIDF = func(_ context.Context, id influxdb.ID) (*influxdb.Variable, error) {
if id != 4 {
return nil, errors.New("wrong id")
}
return &influxdb.Variable{ID: 4, Name: "variable"}, nil
}
svc := NewService(
WithBucketSVC(bktSVC),
WithDashboardSVC(dashSVC),
WithLabelSVC(labelSVC),
WithVariableSVC(varSVC),
)
pkg, err := svc.CreatePkg(context.TODO(), CreateWithAllOrgResources(orgID))
require.NoError(t, err)
bkts := pkg.Summary().Buckets
require.Len(t, bkts, 1)
assert.Equal(t, "bucket", bkts[0].Name)
dashs := pkg.Summary().Dashboards
require.Len(t, dashs, 1)
assert.Equal(t, "dashboard", dashs[0].Name)
labels := pkg.Summary().Labels
require.Len(t, labels, 1)
assert.Equal(t, "label", labels[0].Name)
vars := pkg.Summary().Variables
require.Len(t, vars, 1)
assert.Equal(t, "variable", vars[0].Name)
})
})
}