chore(pkger): refactor return values for Apply/DryRun to enable extending it

references: #17997
pull/18277/head
Johnny Steenbergen 2020-05-28 15:09:30 -07:00 committed by Johnny Steenbergen
parent f9ed497abf
commit b25b4021c3
12 changed files with 261 additions and 174 deletions

View File

@ -153,12 +153,12 @@ func (b *cmdPkgBuilder) pkgApplyRunEFn(cmd *cobra.Command, args []string) error
pkger.ApplyWithStackID(stackID), pkger.ApplyWithStackID(stackID),
} }
drySum, diff, err := svc.DryRun(context.Background(), influxOrgID, 0, pkg, opts...) dryRunImpact, err := svc.DryRun(context.Background(), influxOrgID, 0, pkg, opts...)
if err != nil { if err != nil {
return err return err
} }
providedSecrets := mapKeys(drySum.MissingSecrets, b.applyOpts.secrets) providedSecrets := mapKeys(dryRunImpact.Summary.MissingSecrets, b.applyOpts.secrets)
if !isTTY { if !isTTY {
const skipDefault = "$$skip-this-key$$" const skipDefault = "$$skip-this-key$$"
for _, secretKey := range missingValKeys(providedSecrets) { for _, secretKey := range missingValKeys(providedSecrets) {
@ -170,7 +170,7 @@ func (b *cmdPkgBuilder) pkgApplyRunEFn(cmd *cobra.Command, args []string) error
} }
} }
if err := b.printPkgDiff(diff); err != nil { if err := b.printPkgDiff(dryRunImpact.Diff); err != nil {
return err return err
} }
@ -183,18 +183,18 @@ func (b *cmdPkgBuilder) pkgApplyRunEFn(cmd *cobra.Command, args []string) error
} }
} }
if b.applyOpts.force != "conflict" && isTTY && diff.HasConflicts() { if b.applyOpts.force != "conflict" && isTTY && dryRunImpact.Diff.HasConflicts() {
return errors.New("package has conflicts with existing resources and cannot safely apply") return errors.New("package has conflicts with existing resources and cannot safely apply")
} }
opts = append(opts, pkger.ApplyWithSecrets(providedSecrets)) opts = append(opts, pkger.ApplyWithSecrets(providedSecrets))
summary, _, err := svc.Apply(context.Background(), influxOrgID, 0, pkg, opts...) impact, err := svc.Apply(context.Background(), influxOrgID, 0, pkg, opts...)
if err != nil { if err != nil {
return err return err
} }
b.printPkgSummary(summary) b.printPkgSummary(impact.Summary)
return nil return nil
} }

View File

@ -708,8 +708,8 @@ func testPkgWritesToBuffer(newCmdFn func(w io.Writer) *cobra.Command, args pkgFi
type fakePkgSVC struct { type fakePkgSVC struct {
initStackFn func(ctx context.Context, userID influxdb.ID, stack pkger.Stack) (pkger.Stack, error) initStackFn func(ctx context.Context, userID influxdb.ID, stack pkger.Stack) (pkger.Stack, error)
createFn func(ctx context.Context, setters ...pkger.CreatePkgSetFn) (*pkger.Pkg, error) createFn func(ctx context.Context, setters ...pkger.CreatePkgSetFn) (*pkger.Pkg, error)
dryRunFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg) (pkger.Summary, pkger.Diff, error) dryRunFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg) (pkger.PkgImpactSummary, error)
applyFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) applyFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error)
} }
var _ pkger.SVC = (*fakePkgSVC)(nil) var _ pkger.SVC = (*fakePkgSVC)(nil)
@ -736,14 +736,14 @@ func (f *fakePkgSVC) CreatePkg(ctx context.Context, setters ...pkger.CreatePkgSe
panic("not implemented") panic("not implemented")
} }
func (f *fakePkgSVC) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { func (f *fakePkgSVC) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if f.dryRunFn != nil { if f.dryRunFn != nil {
return f.dryRunFn(ctx, orgID, userID, pkg) return f.dryRunFn(ctx, orgID, userID, pkg)
} }
panic("not implemented") panic("not implemented")
} }
func (f *fakePkgSVC) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { func (f *fakePkgSVC) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if f.applyFn != nil { if f.applyFn != nil {
return f.applyFn(ctx, orgID, userID, pkg, opts...) return f.applyFn(ctx, orgID, userID, pkg, opts...)
} }

View File

@ -264,9 +264,11 @@ func TestLauncher_Pkger(t *testing.T) {
newVariableObject("non-existent-var", "", ""), newVariableObject("non-existent-var", "", ""),
) )
sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allResourcesPkg, pkger.ApplyWithStackID(newStack.ID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allResourcesPkg, pkger.ApplyWithStackID(newStack.ID))
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 1) require.Len(t, sum.Buckets, 1)
assert.NotZero(t, sum.Buckets[0].ID) assert.NotZero(t, sum.Buckets[0].ID)
require.Len(t, sum.Checks, 1) require.Len(t, sum.Checks, 1)
@ -400,22 +402,24 @@ func TestLauncher_Pkger(t *testing.T) {
assert.Equal(t, "bucket-1", sum.Buckets[1].Name) assert.Equal(t, "bucket-1", sum.Buckets[1].Name)
} }
sum, _, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, nil, pkger.ApplyWithStackID(newStack.ID)) impact, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, nil, pkger.ApplyWithStackID(newStack.ID))
require.NoError(t, err) require.NoError(t, err)
sumEquals(t, sum) sumEquals(t, impact.Summary)
sum, _, err = svc.Apply(ctx, l.Org.ID, l.User.ID, nil, pkger.ApplyWithStackID(newStack.ID)) impact, err = svc.Apply(ctx, l.Org.ID, l.User.ID, nil, pkger.ApplyWithStackID(newStack.ID))
require.NoError(t, err) require.NoError(t, err)
sumEquals(t, sum) sumEquals(t, impact.Summary)
}) })
t.Run("apply a pkg with a stack and associations", func(t *testing.T) { t.Run("apply a pkg with a stack and associations", func(t *testing.T) {
testLabelMappingFn := func(t *testing.T, stackID influxdb.ID, pkg *pkger.Pkg, assertAssociatedLabelsFn func(pkger.Summary, []*influxdb.Label, influxdb.ResourceType)) pkger.Summary { testLabelMappingFn := func(t *testing.T, stackID influxdb.ID, pkg *pkger.Pkg, assertAssociatedLabelsFn func(pkger.Summary, []*influxdb.Label, influxdb.ResourceType)) pkger.Summary {
t.Helper() t.Helper()
sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 1) require.Len(t, sum.Buckets, 1)
assert.NotZero(t, sum.Buckets[0].ID) assert.NotZero(t, sum.Buckets[0].ID)
assert.Equal(t, "bucket", sum.Buckets[0].Name) assert.Equal(t, "bucket", sum.Buckets[0].Name)
@ -550,7 +554,7 @@ func TestLauncher_Pkger(t *testing.T) {
svc = pkger.MWLogging(logger)(svc) svc = pkger.MWLogging(logger)(svc)
pkg := newPkg(append(newObjectsFn(), labelObj)...) pkg := newPkg(append(newObjectsFn(), labelObj)...)
_, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stack.ID)) _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stack.ID))
require.Error(t, err) require.Error(t, err)
resources := []struct { resources := []struct {
@ -626,9 +630,11 @@ func TestLauncher_Pkger(t *testing.T) {
newVariableObject(initialVariablePkgName, "var char", "init desc"), newVariableObject(initialVariablePkgName, "var char", "init desc"),
) )
summary, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, initialPkg, pkger.ApplyWithStackID(stack.ID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, initialPkg, pkger.ApplyWithStackID(stack.ID))
require.NoError(t, err) require.NoError(t, err)
summary := impact.Summary
require.Len(t, summary.Buckets, 1) require.Len(t, summary.Buckets, 1)
assert.NotZero(t, summary.Buckets[0].ID) assert.NotZero(t, summary.Buckets[0].ID)
assert.Equal(t, "display name", summary.Buckets[0].Name) assert.Equal(t, "display name", summary.Buckets[0].Name)
@ -743,9 +749,11 @@ func TestLauncher_Pkger(t *testing.T) {
newTelegrafObject(initialSum.TelegrafConfigs[0].PkgName, updateTelegrafName, ""), newTelegrafObject(initialSum.TelegrafConfigs[0].PkgName, updateTelegrafName, ""),
newVariableObject(initialSum.Variables[0].PkgName, updateVariableName, ""), newVariableObject(initialSum.Variables[0].PkgName, updateVariableName, ""),
) )
sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, updatedPkg, pkger.ApplyWithStackID(stack.ID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, updatedPkg, pkger.ApplyWithStackID(stack.ID))
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 1) require.Len(t, sum.Buckets, 1)
assert.Equal(t, initialSum.Buckets[0].ID, sum.Buckets[0].ID) assert.Equal(t, initialSum.Buckets[0].ID, sum.Buckets[0].ID)
assert.Equal(t, updateBucketName, sum.Buckets[0].Name) assert.Equal(t, updateBucketName, sum.Buckets[0].Name)
@ -855,7 +863,7 @@ func TestLauncher_Pkger(t *testing.T) {
newTelegrafObject("z-telegraf-rolls-back", "", ""), newTelegrafObject("z-telegraf-rolls-back", "", ""),
newVariableObject("z-var-rolls-back", "", ""), newVariableObject("z-var-rolls-back", "", ""),
) )
_, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkgWithDelete, pkger.ApplyWithStackID(stack.ID)) _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkgWithDelete, pkger.ApplyWithStackID(stack.ID))
require.Error(t, err) require.Error(t, err)
t.Log("validate all resources are rolled back") t.Log("validate all resources are rolled back")
@ -936,9 +944,11 @@ func TestLauncher_Pkger(t *testing.T) {
newTelegrafObject("non-existent-tele", "", ""), newTelegrafObject("non-existent-tele", "", ""),
newVariableObject("non-existent-var", "", ""), newVariableObject("non-existent-var", "", ""),
) )
sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allNewResourcesPkg, pkger.ApplyWithStackID(stack.ID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allNewResourcesPkg, pkger.ApplyWithStackID(stack.ID))
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 1) require.Len(t, sum.Buckets, 1)
assert.NotEqual(t, initialSum.Buckets[0].ID, sum.Buckets[0].ID) assert.NotEqual(t, initialSum.Buckets[0].ID, sum.Buckets[0].ID)
assert.NotZero(t, sum.Buckets[0].ID) assert.NotZero(t, sum.Buckets[0].ID)
@ -1051,15 +1061,15 @@ func TestLauncher_Pkger(t *testing.T) {
} }
}() }()
initialSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stack.ID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stack.ID))
require.NoError(t, err) require.NoError(t, err)
return stack.ID, cleanup, initialSum return stack.ID, cleanup, impact.Summary
} }
testValidRemoval := func(t *testing.T, stackID influxdb.ID) { testValidRemoval := func(t *testing.T, stackID influxdb.ID) {
t.Helper() t.Helper()
_, _, err := svc.Apply( _, err := svc.Apply(
ctx, ctx,
l.Org.ID, l.Org.ID,
l.User.ID, l.User.ID,
@ -1087,9 +1097,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedVariable(t, func(t *testing.T, stackID influxdb.ID, initialVarObj pkger.Object, initialSum pkger.Summary) { testUserDeletedVariable(t, func(t *testing.T, stackID influxdb.ID, initialVarObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialVarObj) pkg := newPkg(initialVarObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.Variables, 1) require.Len(t, updateSum.Variables, 1)
initVar, updateVar := initialSum.Variables[0], updateSum.Variables[0] initVar, updateVar := initialSum.Variables[0], updateSum.Variables[0]
assert.NotEqual(t, initVar.ID, updateVar.ID) assert.NotEqual(t, initVar.ID, updateVar.ID)
@ -1123,9 +1135,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedBucket(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedBucket(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.Buckets, 1) require.Len(t, updateSum.Buckets, 1)
intial, updated := initialSum.Buckets[0], updateSum.Buckets[0] intial, updated := initialSum.Buckets[0], updateSum.Buckets[0]
assert.NotEqual(t, intial.ID, updated.ID) assert.NotEqual(t, intial.ID, updated.ID)
@ -1159,9 +1173,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedCheck(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedCheck(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.Checks, 1) require.Len(t, updateSum.Checks, 1)
intial, updated := initialSum.Checks[0].Check, updateSum.Checks[0].Check intial, updated := initialSum.Checks[0].Check, updateSum.Checks[0].Check
assert.NotEqual(t, intial.GetID(), updated.GetID()) assert.NotEqual(t, intial.GetID(), updated.GetID())
@ -1196,9 +1212,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedDashboard(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedDashboard(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.Dashboards, 1) require.Len(t, updateSum.Dashboards, 1)
initial, updated := initialSum.Dashboards[0], updateSum.Dashboards[0] initial, updated := initialSum.Dashboards[0], updateSum.Dashboards[0]
assert.NotEqual(t, initial.ID, updated.ID) assert.NotEqual(t, initial.ID, updated.ID)
@ -1232,9 +1250,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedLabel(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedLabel(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.Labels, 1) require.Len(t, updateSum.Labels, 1)
initial, updated := initialSum.Labels[0], updateSum.Labels[0] initial, updated := initialSum.Labels[0], updateSum.Labels[0]
assert.NotEqual(t, initial.ID, updated.ID) assert.NotEqual(t, initial.ID, updated.ID)
@ -1268,9 +1288,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedEndpoint(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedEndpoint(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.NotificationEndpoints, 1) require.Len(t, updateSum.NotificationEndpoints, 1)
initial, updated := initialSum.NotificationEndpoints[0].NotificationEndpoint, updateSum.NotificationEndpoints[0].NotificationEndpoint initial, updated := initialSum.NotificationEndpoints[0].NotificationEndpoint, updateSum.NotificationEndpoints[0].NotificationEndpoint
assert.NotEqual(t, initial.GetID(), updated.GetID()) assert.NotEqual(t, initial.GetID(), updated.GetID())
@ -1308,9 +1330,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedRule(t, func(t *testing.T, stackID influxdb.ID, initialObjects []pkger.Object, initialSum pkger.Summary) { testUserDeletedRule(t, func(t *testing.T, stackID influxdb.ID, initialObjects []pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObjects...) pkg := newPkg(initialObjects...)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.NotificationRules, 1) require.Len(t, updateSum.NotificationRules, 1)
initial, updated := initialSum.NotificationRules[0], updateSum.NotificationRules[0] initial, updated := initialSum.NotificationRules[0], updateSum.NotificationRules[0]
assert.NotEqual(t, initial.ID, updated.ID) assert.NotEqual(t, initial.ID, updated.ID)
@ -1344,9 +1368,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedTask(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedTask(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.Tasks, 1) require.Len(t, updateSum.Tasks, 1)
initial, updated := initialSum.Tasks[0], updateSum.Tasks[0] initial, updated := initialSum.Tasks[0], updateSum.Tasks[0]
assert.NotEqual(t, initial.ID, updated.ID) assert.NotEqual(t, initial.ID, updated.ID)
@ -1380,9 +1406,11 @@ func TestLauncher_Pkger(t *testing.T) {
t.Run("should create new resource when attempting to update", func(t *testing.T) { t.Run("should create new resource when attempting to update", func(t *testing.T) {
testUserDeletedTelegraf(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) { testUserDeletedTelegraf(t, func(t *testing.T, stackID influxdb.ID, initialObj pkger.Object, initialSum pkger.Summary) {
pkg := newPkg(initialObj) pkg := newPkg(initialObj)
updateSum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithStackID(stackID))
require.NoError(t, err) require.NoError(t, err)
updateSum := impact.Summary
require.Len(t, updateSum.TelegrafConfigs, 1) require.Len(t, updateSum.TelegrafConfigs, 1)
initial, updated := initialSum.TelegrafConfigs[0].TelegrafConfig, updateSum.TelegrafConfigs[0].TelegrafConfig initial, updated := initialSum.TelegrafConfigs[0].TelegrafConfig, updateSum.TelegrafConfigs[0].TelegrafConfig
assert.NotEqual(t, initial.ID, updated.ID) assert.NotEqual(t, initial.ID, updated.ID)
@ -1416,7 +1444,7 @@ func TestLauncher_Pkger(t *testing.T) {
pkger.WithVariableSVC(l.VariableService(t)), pkger.WithVariableSVC(l.VariableService(t)),
) )
_, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, newPkg(t)) _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, newPkg(t))
require.Error(t, err) require.Error(t, err)
bkts, _, err := l.BucketService(t).FindBuckets(ctx, influxdb.BucketFilter{OrganizationID: &l.Org.ID}) bkts, _, err := l.BucketService(t).FindBuckets(ctx, influxdb.BucketFilter{OrganizationID: &l.Org.ID})
@ -1487,9 +1515,11 @@ func TestLauncher_Pkger(t *testing.T) {
} }
t.Run("dry run a package with no existing resources", func(t *testing.T) { t.Run("dry run a package with no existing resources", func(t *testing.T) {
sum, diff, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, newPkg(t)) impact, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, newPkg(t))
require.NoError(t, err) require.NoError(t, err)
sum, diff := impact.Summary, impact.Diff
require.Len(t, diff.Buckets, 1) require.Len(t, diff.Buckets, 1)
assert.True(t, diff.Buckets[0].IsNew()) assert.True(t, diff.Buckets[0].IsNew())
@ -1596,12 +1626,14 @@ spec:
pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr)) pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr))
require.NoError(t, err) require.NoError(t, err)
sum, _, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithEnvRefs(map[string]string{ impact, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithEnvRefs(map[string]string{
"bkt-1-name-ref": "new-bkt-name", "bkt-1-name-ref": "new-bkt-name",
"label-1-name-ref": "new-label-name", "label-1-name-ref": "new-label-name",
})) }))
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 1) require.Len(t, sum.Buckets, 1)
assert.Equal(t, "new-bkt-name", sum.Buckets[0].Name) assert.Equal(t, "new-bkt-name", sum.Buckets[0].Name)
@ -1611,9 +1643,11 @@ spec:
t.Run("apply a package of all new resources", func(t *testing.T) { t.Run("apply a package of all new resources", func(t *testing.T) {
// this initial test is also setup for the sub tests // this initial test is also setup for the sub tests
sum1, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, newPkg(t)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, newPkg(t))
require.NoError(t, err) require.NoError(t, err)
sum1 := impact.Summary
labels := sum1.Labels labels := sum1.Labels
require.Len(t, labels, 2) require.Len(t, labels, 2)
assert.NotZero(t, labels[0].ID) assert.NotZero(t, labels[0].ID)
@ -1929,9 +1963,11 @@ spec:
t.Run("pkg with same bkt-var-label does nto create new resources for them", func(t *testing.T) { t.Run("pkg with same bkt-var-label does nto create new resources for them", func(t *testing.T) {
// validate the new package doesn't create new resources for bkts/labels/vars // validate the new package doesn't create new resources for bkts/labels/vars
// since names collide. // since names collide.
sum2, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, newPkg(t)) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, newPkg(t))
require.NoError(t, err) require.NoError(t, err)
sum2 := impact.Summary
require.Equal(t, sum1.Buckets, sum2.Buckets) require.Equal(t, sum1.Buckets, sum2.Buckets)
require.Equal(t, sum1.Labels, sum2.Labels) require.Equal(t, sum1.Labels, sum2.Labels)
require.Equal(t, sum1.NotificationEndpoints, sum2.NotificationEndpoints) require.Equal(t, sum1.NotificationEndpoints, sum2.NotificationEndpoints)
@ -1947,9 +1983,9 @@ spec:
pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr)) pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr))
require.NoError(t, err) require.NoError(t, err)
sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg)
require.NoError(t, err) require.NoError(t, err)
return sum return impact.Summary
} }
pkgWithSecretRaw := fmt.Sprintf(` pkgWithSecretRaw := fmt.Sprintf(`
@ -2143,7 +2179,7 @@ spec:
pkger.WithVariableSVC(l.VariableService(t)), pkger.WithVariableSVC(l.VariableService(t)),
) )
_, _, err = svc.Apply(ctx, l.Org.ID, 0, updatePkg) _, err = svc.Apply(ctx, l.Org.ID, 0, updatePkg)
require.Error(t, err) require.Error(t, err)
bkt, err := l.BucketService(t).FindBucketByID(ctx, influxdb.ID(sum1Bkts[0].ID)) bkt, err := l.BucketService(t).FindBucketByID(ctx, influxdb.ID(sum1Bkts[0].ID))
@ -2224,10 +2260,10 @@ spec:
pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr)) pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr))
require.NoError(t, err) require.NoError(t, err)
sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg) impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, sum.Tasks, 1) require.Len(t, impact.Summary.Tasks, 1)
}) })
t.Run("apply a package with env refs", func(t *testing.T) { t.Run("apply a package with env refs", func(t *testing.T) {
@ -2334,9 +2370,11 @@ spec:
pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr)) pkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(pkgStr))
require.NoError(t, err) require.NoError(t, err)
sum, _, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, pkg) impact, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 1) require.Len(t, sum.Buckets, 1)
assert.Equal(t, "env-bkt-1-name-ref", sum.Buckets[0].Name) assert.Equal(t, "env-bkt-1-name-ref", sum.Buckets[0].Name)
assert.Len(t, sum.Buckets[0].LabelAssociations, 1) assert.Len(t, sum.Buckets[0].LabelAssociations, 1)
@ -2370,7 +2408,7 @@ spec:
} }
assert.Equal(t, expectedMissingEnvs, sum.MissingEnvs) assert.Equal(t, expectedMissingEnvs, sum.MissingEnvs)
sum, _, err = svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithEnvRefs(map[string]string{ impact, err = svc.Apply(ctx, l.Org.ID, l.User.ID, pkg, pkger.ApplyWithEnvRefs(map[string]string{
"bkt-1-name-ref": "rucket_threeve", "bkt-1-name-ref": "rucket_threeve",
"check-1-name-ref": "check_threeve", "check-1-name-ref": "check_threeve",
"dash-1-name-ref": "dash_threeve", "dash-1-name-ref": "dash_threeve",
@ -2383,6 +2421,8 @@ spec:
})) }))
require.NoError(t, err) require.NoError(t, err)
sum = impact.Summary
assert.Equal(t, "rucket_threeve", sum.Buckets[0].Name) assert.Equal(t, "rucket_threeve", sum.Buckets[0].Name)
assert.Equal(t, "check_threeve", sum.Checks[0].Check.GetName()) assert.Equal(t, "check_threeve", sum.Checks[0].Check.GetName())
assert.Equal(t, "dash_threeve", sum.Dashboards[0].Name) assert.Equal(t, "dash_threeve", sum.Dashboards[0].Name)

View File

@ -133,25 +133,25 @@ func (s *HTTPRemoteService) CreatePkg(ctx context.Context, setters ...CreatePkgS
// DryRun provides a dry run of the pkg application. The pkg will be marked verified // DryRun provides a dry run of the pkg application. The pkg will be marked verified
// for later calls to Apply. This func will be run on an Apply if it has not been run // for later calls to Apply. This func will be run on an Apply if it has not been run
// already. // already.
func (s *HTTPRemoteService) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *HTTPRemoteService) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
return s.apply(ctx, orgID, pkg, true, opts...) return s.apply(ctx, orgID, pkg, true, opts...)
} }
// Apply will apply all the resources identified in the provided pkg. The entire pkg will be applied // Apply will apply all the resources identified in the provided pkg. The entire pkg will be applied
// in its entirety. If a failure happens midway then the entire pkg will be rolled back to the state // in its entirety. If a failure happens midway then the entire pkg will be rolled back to the state
// from before the pkg was applied. // from before the pkg was applied.
func (s *HTTPRemoteService) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *HTTPRemoteService) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
return s.apply(ctx, orgID, pkg, false, opts...) return s.apply(ctx, orgID, pkg, false, opts...)
} }
func (s *HTTPRemoteService) apply(ctx context.Context, orgID influxdb.ID, pkg *Pkg, dryRun bool, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *HTTPRemoteService) apply(ctx context.Context, orgID influxdb.ID, pkg *Pkg, dryRun bool, opts ...ApplyOptFn) (PkgImpactSummary, error) {
opt := applyOptFromOptFns(opts...) opt := applyOptFromOptFns(opts...)
var rawPkg []byte var rawPkg []byte
if pkg != nil { if pkg != nil {
b, err := pkg.Encode(EncodingJSON) b, err := pkg.Encode(EncodingJSON)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
rawPkg = b rawPkg = b
} }
@ -174,8 +174,11 @@ func (s *HTTPRemoteService) apply(ctx context.Context, orgID influxdb.ID, pkg *P
DecodeJSON(&resp). DecodeJSON(&resp).
Do(ctx) Do(ctx)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
return resp.Summary, resp.Diff, NewParseError(resp.Errors...) return PkgImpactSummary{
Diff: resp.Diff,
Summary: resp.Summary,
}, NewParseError(resp.Errors...)
} }

View File

@ -478,11 +478,11 @@ func (s *HTTPServer) applyPkg(w http.ResponseWriter, r *http.Request) {
} }
if reqBody.DryRun { if reqBody.DryRun {
sum, diff, err := s.svc.DryRun(r.Context(), *orgID, userID, parsedPkg, applyOpts...) impact, err := s.svc.DryRun(r.Context(), *orgID, userID, parsedPkg, applyOpts...)
if IsParseErr(err) { if IsParseErr(err) {
s.api.Respond(w, r, http.StatusUnprocessableEntity, RespApplyPkg{ s.api.Respond(w, r, http.StatusUnprocessableEntity, RespApplyPkg{
Diff: diff, Diff: impact.Diff,
Summary: sum, Summary: impact.Summary,
Errors: convertParseErr(err), Errors: convertParseErr(err),
}) })
return return
@ -493,23 +493,23 @@ func (s *HTTPServer) applyPkg(w http.ResponseWriter, r *http.Request) {
} }
s.api.Respond(w, r, http.StatusOK, RespApplyPkg{ s.api.Respond(w, r, http.StatusOK, RespApplyPkg{
Diff: diff, Diff: impact.Diff,
Summary: sum, Summary: impact.Summary,
}) })
return return
} }
applyOpts = append(applyOpts, ApplyWithSecrets(reqBody.Secrets)) applyOpts = append(applyOpts, ApplyWithSecrets(reqBody.Secrets))
sum, diff, err := s.svc.Apply(r.Context(), *orgID, userID, parsedPkg, applyOpts...) impact, err := s.svc.Apply(r.Context(), *orgID, userID, parsedPkg, applyOpts...)
if err != nil && !IsParseErr(err) { if err != nil && !IsParseErr(err) {
s.api.Err(w, r, err) s.api.Err(w, r, err)
return return
} }
s.api.Respond(w, r, http.StatusCreated, RespApplyPkg{ s.api.Respond(w, r, http.StatusCreated, RespApplyPkg{
Diff: diff, Diff: impact.Diff,
Summary: sum, Summary: impact.Summary,
Errors: convertParseErr(err), Errors: convertParseErr(err),
}) })
} }

View File

@ -153,9 +153,9 @@ func TestPkgerHTTPServer(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
fn := func(t *testing.T) { fn := func(t *testing.T) {
svc := &fakeSVC{ svc := &fakeSVC{
dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if err := pkg.Validate(); err != nil { if err := pkg.Validate(); err != nil {
return pkger.Summary{}, pkger.Diff{}, err return pkger.PkgImpactSummary{}, err
} }
sum := pkg.Summary() sum := pkg.Summary()
var diff pkger.Diff var diff pkger.Diff
@ -166,7 +166,10 @@ func TestPkgerHTTPServer(t *testing.T) {
}, },
}) })
} }
return sum, diff, nil return pkger.PkgImpactSummary{
Summary: sum,
Diff: diff,
}, nil
}, },
} }
@ -209,9 +212,9 @@ func TestPkgerHTTPServer(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
fn := func(t *testing.T) { fn := func(t *testing.T) {
svc := &fakeSVC{ svc := &fakeSVC{
dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if err := pkg.Validate(); err != nil { if err := pkg.Validate(); err != nil {
return pkger.Summary{}, pkger.Diff{}, err return pkger.PkgImpactSummary{}, err
} }
sum := pkg.Summary() sum := pkg.Summary()
var diff pkger.Diff var diff pkger.Diff
@ -222,7 +225,10 @@ func TestPkgerHTTPServer(t *testing.T) {
}, },
}) })
} }
return sum, diff, nil return pkger.PkgImpactSummary{
Diff: diff,
Summary: sum,
}, nil
}, },
} }
@ -313,9 +319,9 @@ func TestPkgerHTTPServer(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
fn := func(t *testing.T) { fn := func(t *testing.T) {
svc := &fakeSVC{ svc := &fakeSVC{
dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if err := pkg.Validate(); err != nil { if err := pkg.Validate(); err != nil {
return pkger.Summary{}, pkger.Diff{}, err return pkger.PkgImpactSummary{}, err
} }
sum := pkg.Summary() sum := pkg.Summary()
var diff pkger.Diff var diff pkger.Diff
@ -326,7 +332,11 @@ func TestPkgerHTTPServer(t *testing.T) {
}, },
}) })
} }
return sum, diff, nil
return pkger.PkgImpactSummary{
Diff: diff,
Summary: sum,
}, nil
}, },
} }
@ -385,8 +395,10 @@ func TestPkgerHTTPServer(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
fn := func(t *testing.T) { fn := func(t *testing.T) {
svc := &fakeSVC{ svc := &fakeSVC{
dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { dryRunFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
return pkg.Summary(), pkger.Diff{}, nil return pkger.PkgImpactSummary{
Summary: pkg.Summary(),
}, nil
}, },
} }
@ -407,7 +419,7 @@ func TestPkgerHTTPServer(t *testing.T) {
t.Run("apply a pkg", func(t *testing.T) { t.Run("apply a pkg", func(t *testing.T) {
svc := &fakeSVC{ svc := &fakeSVC{
applyFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { applyFn: func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
var opt pkger.ApplyOpt var opt pkger.ApplyOpt
for _, o := range opts { for _, o := range opts {
o(&opt) o(&opt)
@ -425,7 +437,11 @@ func TestPkgerHTTPServer(t *testing.T) {
for key := range opt.MissingSecrets { for key := range opt.MissingSecrets {
sum.MissingSecrets = append(sum.MissingSecrets, key) sum.MissingSecrets = append(sum.MissingSecrets, key)
} }
return sum, diff, nil
return pkger.PkgImpactSummary{
Diff: diff,
Summary: sum,
}, nil
}, },
} }
@ -824,8 +840,8 @@ func decodeBody(t *testing.T, r io.Reader, v interface{}) {
type fakeSVC struct { type fakeSVC struct {
initStack func(ctx context.Context, userID influxdb.ID, stack pkger.Stack) (pkger.Stack, error) initStack func(ctx context.Context, userID influxdb.ID, stack pkger.Stack) (pkger.Stack, error)
listStacksFn func(ctx context.Context, orgID influxdb.ID, filter pkger.ListFilter) ([]pkger.Stack, error) listStacksFn func(ctx context.Context, orgID influxdb.ID, filter pkger.ListFilter) ([]pkger.Stack, error)
dryRunFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) dryRunFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error)
applyFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) applyFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error)
} }
var _ pkger.SVC = (*fakeSVC)(nil) var _ pkger.SVC = (*fakeSVC)(nil)
@ -852,7 +868,7 @@ func (f *fakeSVC) CreatePkg(ctx context.Context, setters ...pkger.CreatePkgSetFn
panic("not implemented") panic("not implemented")
} }
func (f *fakeSVC) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { func (f *fakeSVC) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if f.dryRunFn == nil { if f.dryRunFn == nil {
panic("not implemented") panic("not implemented")
} }
@ -860,7 +876,7 @@ func (f *fakeSVC) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *pk
return f.dryRunFn(ctx, orgID, userID, pkg, opts...) return f.dryRunFn(ctx, orgID, userID, pkg, opts...)
} }
func (f *fakeSVC) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) { func (f *fakeSVC) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.PkgImpactSummary, error) {
if f.applyFn == nil { if f.applyFn == nil {
panic("not implemented") panic("not implemented")
} }

View File

@ -64,8 +64,8 @@ type SVC interface {
ListStacks(ctx context.Context, orgID influxdb.ID, filter ListFilter) ([]Stack, error) ListStacks(ctx context.Context, orgID influxdb.ID, filter ListFilter) ([]Stack, error)
CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pkg, error) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pkg, error)
DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error)
Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error)
} }
// SVCMiddleware is a service middleware func. // SVCMiddleware is a service middleware func.
@ -690,28 +690,38 @@ func (s *Service) filterOrgResourceKinds(resourceKindFilters []Kind) []struct {
return resourceTypeGens return resourceTypeGens
} }
// PkgImpactSummary represents the impact the application of a pkg will have on the system.
type PkgImpactSummary struct {
Diff Diff
Summary Summary
}
// DryRun provides a dry run of the pkg application. The pkg will be marked verified // DryRun provides a dry run of the pkg application. The pkg will be marked verified
// for later calls to Apply. This func will be run on an Apply if it has not been run // for later calls to Apply. This func will be run on an Apply if it has not been run
// already. // already.
func (s *Service) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *Service) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
opt := applyOptFromOptFns(opts...) opt := applyOptFromOptFns(opts...)
if opt.StackID != 0 { if opt.StackID != 0 {
remotePkgs, err := s.getStackRemotePackages(ctx, opt.StackID) remotePkgs, err := s.getStackRemotePackages(ctx, opt.StackID)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
pkg, err = Combine(append(remotePkgs, pkg), ValidWithoutResources()) pkg, err = Combine(append(remotePkgs, pkg), ValidWithoutResources())
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
} }
state, err := s.dryRun(ctx, orgID, pkg, opt) state, err := s.dryRun(ctx, orgID, pkg, opt)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
return newSummaryFromStatePkg(state, pkg), state.diff(), nil
return PkgImpactSummary{
Diff: state.diff(),
Summary: newSummaryFromStatePkg(state, pkg),
}, nil
} }
func (s *Service) dryRun(ctx context.Context, orgID influxdb.ID, pkg *Pkg, opt ApplyOpt) (*stateCoordinator, error) { func (s *Service) dryRun(ctx context.Context, orgID influxdb.ID, pkg *Pkg, opt ApplyOpt) (*stateCoordinator, error) {
@ -1177,32 +1187,32 @@ func applyOptFromOptFns(opts ...ApplyOptFn) ApplyOpt {
// Apply will apply all the resources identified in the provided pkg. The entire pkg will be applied // Apply will apply all the resources identified in the provided pkg. The entire pkg will be applied
// in its entirety. If a failure happens midway then the entire pkg will be rolled back to the state // in its entirety. If a failure happens midway then the entire pkg will be rolled back to the state
// from before the pkg were applied. // from before the pkg were applied.
func (s *Service) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (sum Summary, diff Diff, e error) { func (s *Service) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (impact PkgImpactSummary, e error) {
opt := applyOptFromOptFns(opts...) opt := applyOptFromOptFns(opts...)
if opt.StackID != 0 { if opt.StackID != 0 {
remotePkgs, err := s.getStackRemotePackages(ctx, opt.StackID) remotePkgs, err := s.getStackRemotePackages(ctx, opt.StackID)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
pkg, err = Combine(append(remotePkgs, pkg), ValidWithoutResources()) pkg, err = Combine(append(remotePkgs, pkg), ValidWithoutResources())
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
} }
if err := pkg.Validate(ValidWithoutResources()); err != nil { if err := pkg.Validate(ValidWithoutResources()); err != nil {
return Summary{}, Diff{}, failedValidationErr(err) return PkgImpactSummary{}, failedValidationErr(err)
} }
if err := pkg.applyEnvRefs(opt.EnvRefs); err != nil { if err := pkg.applyEnvRefs(opt.EnvRefs); err != nil {
return Summary{}, Diff{}, failedValidationErr(err) return PkgImpactSummary{}, failedValidationErr(err)
} }
state, err := s.dryRun(ctx, orgID, pkg, opt) state, err := s.dryRun(ctx, orgID, pkg, opt)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
defer func(stackID influxdb.ID) { defer func(stackID influxdb.ID) {
@ -1224,12 +1234,15 @@ func (s *Service) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg
err = s.applyState(ctx, coordinator, orgID, userID, state, opt.MissingSecrets) err = s.applyState(ctx, coordinator, orgID, userID, state, opt.MissingSecrets)
if err != nil { if err != nil {
return Summary{}, Diff{}, err return PkgImpactSummary{}, err
} }
pkg.applySecrets(opt.MissingSecrets) pkg.applySecrets(opt.MissingSecrets)
return newSummaryFromStatePkg(state, pkg), state.diff(), err return PkgImpactSummary{
Diff: state.diff(),
Summary: newSummaryFromStatePkg(state, pkg),
}, nil
} }
func (s *Service) applyState(ctx context.Context, coordinator *rollbackCoordinator, orgID, userID influxdb.ID, state *stateCoordinator, missingSecrets map[string]string) (e error) { func (s *Service) applyState(ctx context.Context, coordinator *rollbackCoordinator, orgID, userID influxdb.ID, state *stateCoordinator, missingSecrets map[string]string) (e error) {

View File

@ -56,10 +56,10 @@ func (s *authMW) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pkg
return s.next.CreatePkg(ctx, setters...) return s.next.CreatePkg(ctx, setters...)
} }
func (s *authMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *authMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
return s.next.DryRun(ctx, orgID, userID, pkg, opts...) return s.next.DryRun(ctx, orgID, userID, pkg, opts...)
} }
func (s *authMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *authMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
return s.next.Apply(ctx, orgID, userID, pkg, opts...) return s.next.Apply(ctx, orgID, userID, pkg, opts...)
} }

View File

@ -96,7 +96,7 @@ func (s *loggingMW) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (p
return s.next.CreatePkg(ctx, setters...) return s.next.CreatePkg(ctx, setters...)
} }
func (s *loggingMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (sum Summary, diff Diff, err error) { func (s *loggingMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (impact PkgImpactSummary, err error) {
defer func(start time.Time) { defer func(start time.Time) {
dur := zap.Duration("took", time.Since(start)) dur := zap.Duration("took", time.Since(start))
if err != nil { if err != nil {
@ -114,7 +114,7 @@ func (s *loggingMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *
o(&opt) o(&opt)
} }
fields := s.summaryLogFields(sum) fields := s.summaryLogFields(impact.Summary)
if opt.StackID != 0 { if opt.StackID != 0 {
fields = append(fields, zap.Stringer("stackID", opt.StackID)) fields = append(fields, zap.Stringer("stackID", opt.StackID))
} }
@ -124,7 +124,7 @@ func (s *loggingMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *
return s.next.DryRun(ctx, orgID, userID, pkg, opts...) return s.next.DryRun(ctx, orgID, userID, pkg, opts...)
} }
func (s *loggingMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (sum Summary, diff Diff, err error) { func (s *loggingMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (impact PkgImpactSummary, err error) {
defer func(start time.Time) { defer func(start time.Time) {
dur := zap.Duration("took", time.Since(start)) dur := zap.Duration("took", time.Since(start))
if err != nil { if err != nil {
@ -137,7 +137,7 @@ func (s *loggingMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *P
return return
} }
fields := s.summaryLogFields(sum) fields := s.summaryLogFields(impact.Summary)
opt := applyOptFromOptFns(opts...) opt := applyOptFromOptFns(opts...)
if opt.StackID != 0 { if opt.StackID != 0 {

View File

@ -50,14 +50,14 @@ func (s *mwMetrics) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*
return pkg, rec(err) return pkg, rec(err)
} }
func (s *mwMetrics) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *mwMetrics) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
rec := s.rec.Record("dry_run") rec := s.rec.Record("dry_run")
sum, diff, err := s.next.DryRun(ctx, orgID, userID, pkg, opts...) impact, err := s.next.DryRun(ctx, orgID, userID, pkg, opts...)
return sum, diff, rec(err) return impact, rec(err)
} }
func (s *mwMetrics) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (Summary, Diff, error) { func (s *mwMetrics) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
rec := s.rec.Record("apply") rec := s.rec.Record("apply")
sum, diff, err := s.next.Apply(ctx, orgID, userID, pkg, opts...) impact, err := s.next.Apply(ctx, orgID, userID, pkg, opts...)
return sum, diff, rec(err) return impact, rec(err)
} }

View File

@ -78,10 +78,10 @@ func TestService(t *testing.T) {
} }
svc := newTestService(WithBucketSVC(fakeBktSVC)) svc := newTestService(WithBucketSVC(fakeBktSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.Buckets, 2) require.Len(t, impact.Diff.Buckets, 2)
expected := DiffBucket{ expected := DiffBucket{
DiffIdentifier: DiffIdentifier{ DiffIdentifier: DiffIdentifier{
@ -101,7 +101,7 @@ func TestService(t *testing.T) {
RetentionRules: retentionRules{newRetentionRule(time.Hour)}, RetentionRules: retentionRules{newRetentionRule(time.Hour)},
}, },
} }
assert.Contains(t, diff.Buckets, expected) assert.Contains(t, impact.Diff.Buckets, expected)
}) })
}) })
@ -113,10 +113,10 @@ func TestService(t *testing.T) {
} }
svc := newTestService(WithBucketSVC(fakeBktSVC)) svc := newTestService(WithBucketSVC(fakeBktSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.Buckets, 2) require.Len(t, impact.Diff.Buckets, 2)
expected := DiffBucket{ expected := DiffBucket{
DiffIdentifier: DiffIdentifier{ DiffIdentifier: DiffIdentifier{
@ -129,7 +129,7 @@ func TestService(t *testing.T) {
RetentionRules: retentionRules{newRetentionRule(time.Hour)}, RetentionRules: retentionRules{newRetentionRule(time.Hour)},
}, },
} }
assert.Contains(t, diff.Buckets, expected) assert.Contains(t, impact.Diff.Buckets, expected)
}) })
}) })
}) })
@ -154,10 +154,10 @@ func TestService(t *testing.T) {
svc := newTestService(WithCheckSVC(fakeCheckSVC)) svc := newTestService(WithCheckSVC(fakeCheckSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
checks := diff.Checks checks := impact.Diff.Checks
require.Len(t, checks, 2) require.Len(t, checks, 2)
check0 := checks[0] check0 := checks[0]
assert.True(t, check0.IsNew()) assert.True(t, check0.IsNew())
@ -192,10 +192,10 @@ func TestService(t *testing.T) {
} }
svc := newTestService(WithLabelSVC(fakeLabelSVC)) svc := newTestService(WithLabelSVC(fakeLabelSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.Labels, 3) require.Len(t, impact.Diff.Labels, 3)
expected := DiffLabel{ expected := DiffLabel{
DiffIdentifier: DiffIdentifier{ DiffIdentifier: DiffIdentifier{
@ -214,14 +214,14 @@ func TestService(t *testing.T) {
Description: "label 1 description", Description: "label 1 description",
}, },
} }
assert.Contains(t, diff.Labels, expected) assert.Contains(t, impact.Diff.Labels, expected)
expected.PkgName = "label-2" expected.PkgName = "label-2"
expected.New.Name = "label-2" expected.New.Name = "label-2"
expected.New.Color = "#000000" expected.New.Color = "#000000"
expected.New.Description = "label 2 description" expected.New.Description = "label 2 description"
expected.Old.Name = "label-2" expected.Old.Name = "label-2"
assert.Contains(t, diff.Labels, expected) assert.Contains(t, impact.Diff.Labels, expected)
}) })
}) })
@ -233,10 +233,11 @@ func TestService(t *testing.T) {
} }
svc := newTestService(WithLabelSVC(fakeLabelSVC)) svc := newTestService(WithLabelSVC(fakeLabelSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.Labels, 3) labels := impact.Diff.Labels
require.Len(t, labels, 3)
expected := DiffLabel{ expected := DiffLabel{
DiffIdentifier: DiffIdentifier{ DiffIdentifier: DiffIdentifier{
@ -249,13 +250,13 @@ func TestService(t *testing.T) {
Description: "label 1 description", Description: "label 1 description",
}, },
} }
assert.Contains(t, diff.Labels, expected) assert.Contains(t, labels, expected)
expected.PkgName = "label-2" expected.PkgName = "label-2"
expected.New.Name = "label-2" expected.New.Name = "label-2"
expected.New.Color = "#000000" expected.New.Color = "#000000"
expected.New.Description = "label 2 description" expected.New.Description = "label 2 description"
assert.Contains(t, diff.Labels, expected) assert.Contains(t, labels, expected)
}) })
}) })
}) })
@ -281,16 +282,16 @@ func TestService(t *testing.T) {
svc := newTestService(WithNotificationEndpointSVC(fakeEndpointSVC)) svc := newTestService(WithNotificationEndpointSVC(fakeEndpointSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.NotificationEndpoints, 5) require.Len(t, impact.Diff.NotificationEndpoints, 5)
var ( var (
newEndpoints []DiffNotificationEndpoint newEndpoints []DiffNotificationEndpoint
existingEndpoints []DiffNotificationEndpoint existingEndpoints []DiffNotificationEndpoint
) )
for _, e := range diff.NotificationEndpoints { for _, e := range impact.Diff.NotificationEndpoints {
if e.Old != nil { if e.Old != nil {
existingEndpoints = append(existingEndpoints, e) existingEndpoints = append(existingEndpoints, e)
continue continue
@ -349,12 +350,12 @@ func TestService(t *testing.T) {
svc := newTestService(WithNotificationEndpointSVC(fakeEndpointSVC)) svc := newTestService(WithNotificationEndpointSVC(fakeEndpointSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.NotificationRules, 1) require.Len(t, impact.Diff.NotificationRules, 1)
actual := diff.NotificationRules[0].New actual := impact.Diff.NotificationRules[0].New
assert.Equal(t, "rule_0", actual.Name) assert.Equal(t, "rule_0", actual.Name)
assert.Equal(t, "desc_0", actual.Description) assert.Equal(t, "desc_0", actual.Description)
assert.Equal(t, "slack", actual.EndpointType) assert.Equal(t, "slack", actual.EndpointType)
@ -385,10 +386,10 @@ func TestService(t *testing.T) {
} }
svc := newTestService(WithSecretSVC(fakeSecretSVC)) svc := newTestService(WithSecretSVC(fakeSecretSVC))
sum, _, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, []string{"routing-key"}, sum.MissingSecrets) assert.Equal(t, []string{"routing-key"}, impact.Summary.MissingSecrets)
}) })
}) })
@ -406,10 +407,11 @@ func TestService(t *testing.T) {
} }
svc := newTestService(WithVariableSVC(fakeVarSVC)) svc := newTestService(WithVariableSVC(fakeVarSVC))
_, diff, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg) impact, err := svc.DryRun(context.TODO(), influxdb.ID(100), 0, pkg)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, diff.Variables, 4) variables := impact.Diff.Variables
require.Len(t, variables, 4)
expected := DiffVariable{ expected := DiffVariable{
DiffIdentifier: DiffIdentifier{ DiffIdentifier: DiffIdentifier{
@ -430,7 +432,7 @@ func TestService(t *testing.T) {
}, },
}, },
} }
assert.Equal(t, expected, diff.Variables[0]) assert.Equal(t, expected, variables[0])
expected = DiffVariable{ expected = DiffVariable{
DiffIdentifier: DiffIdentifier{ DiffIdentifier: DiffIdentifier{
@ -447,7 +449,7 @@ func TestService(t *testing.T) {
}, },
}, },
} }
assert.Equal(t, expected, diff.Variables[1]) assert.Equal(t, expected, variables[1])
}) })
}) })
}) })
@ -473,9 +475,10 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 2) require.Len(t, sum.Buckets, 2)
expected := SummaryBucket{ expected := SummaryBucket{
@ -523,9 +526,10 @@ func TestService(t *testing.T) {
svc := newTestService(WithBucketSVC(fakeBktSVC)) svc := newTestService(WithBucketSVC(fakeBktSVC))
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Buckets, 2) require.Len(t, sum.Buckets, 2)
expected := SummaryBucket{ expected := SummaryBucket{
@ -561,7 +565,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.GreaterOrEqual(t, fakeBktSVC.DeleteBucketCalls.Count(), 1) assert.GreaterOrEqual(t, fakeBktSVC.DeleteBucketCalls.Count(), 1)
@ -582,9 +586,10 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Checks, 2) require.Len(t, sum.Checks, 2)
containsWithID := func(t *testing.T, name string) { containsWithID := func(t *testing.T, name string) {
@ -623,7 +628,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.GreaterOrEqual(t, fakeCheckSVC.DeleteCheckCalls.Count(), 1) assert.GreaterOrEqual(t, fakeCheckSVC.DeleteCheckCalls.Count(), 1)
@ -648,9 +653,10 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Labels, 3) require.Len(t, sum.Labels, 3)
assert.Contains(t, sum.Labels, SummaryLabel{ assert.Contains(t, sum.Labels, SummaryLabel{
@ -698,7 +704,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.GreaterOrEqual(t, fakeLabelSVC.DeleteLabelCalls.Count(), 1) assert.GreaterOrEqual(t, fakeLabelSVC.DeleteLabelCalls.Count(), 1)
@ -754,9 +760,10 @@ func TestService(t *testing.T) {
svc := newTestService(WithLabelSVC(fakeLabelSVC)) svc := newTestService(WithLabelSVC(fakeLabelSVC))
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Labels, 3) require.Len(t, sum.Labels, 3)
assert.Contains(t, sum.Labels, SummaryLabel{ assert.Contains(t, sum.Labels, SummaryLabel{
@ -808,9 +815,11 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Dashboards, 2) require.Len(t, sum.Dashboards, 2)
dash1 := sum.Dashboards[0] dash1 := sum.Dashboards[0]
assert.NotZero(t, dash1.ID) assert.NotZero(t, dash1.ID)
@ -848,7 +857,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.True(t, deletedDashs[1]) assert.True(t, deletedDashs[1])
@ -857,7 +866,7 @@ func TestService(t *testing.T) {
}) })
t.Run("label mapping", func(t *testing.T) { t.Run("label mapping", func(t *testing.T) {
testLabelMappingV2ApplyFn := func(t *testing.T, filename string, numExpected int, settersFn func() []ServiceSetterFn) { testLabelMappingApplyFn := func(t *testing.T, filename string, numExpected int, settersFn func() []ServiceSetterFn) {
t.Helper() t.Helper()
testfileRunner(t, filename, func(t *testing.T, pkg *Pkg) { testfileRunner(t, filename, func(t *testing.T, pkg *Pkg) {
t.Helper() t.Helper()
@ -883,14 +892,14 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, numExpected, fakeLabelSVC.CreateLabelMappingCalls.Count()) assert.Equal(t, numExpected, fakeLabelSVC.CreateLabelMappingCalls.Count())
}) })
} }
testLabelMappingV2RollbackFn := func(t *testing.T, filename string, killCount int, settersFn func() []ServiceSetterFn) { testLabelMappingRollbackFn := func(t *testing.T, filename string, killCount int, settersFn func() []ServiceSetterFn) {
t.Helper() t.Helper()
testfileRunner(t, filename, func(t *testing.T, pkg *Pkg) { testfileRunner(t, filename, func(t *testing.T, pkg *Pkg) {
t.Helper() t.Helper()
@ -919,7 +928,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.GreaterOrEqual(t, fakeLabelSVC.DeleteLabelMappingCalls.Count(), killCount) assert.GreaterOrEqual(t, fakeLabelSVC.DeleteLabelMappingCalls.Count(), killCount)
@ -941,11 +950,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/bucket_associates_label.yml", 4, bktOpt) testLabelMappingApplyFn(t, "testdata/bucket_associates_label.yml", 4, bktOpt)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/bucket_associates_label.yml", 2, bktOpt) testLabelMappingRollbackFn(t, "testdata/bucket_associates_label.yml", 2, bktOpt)
}) })
}) })
@ -964,11 +973,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/checks.yml", 2, opts) testLabelMappingApplyFn(t, "testdata/checks.yml", 2, opts)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/checks.yml", 1, opts) testLabelMappingRollbackFn(t, "testdata/checks.yml", 1, opts)
}) })
}) })
@ -983,11 +992,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/dashboard_associates_label.yml", 2, opts) testLabelMappingApplyFn(t, "testdata/dashboard_associates_label.yml", 2, opts)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/dashboard_associates_label.yml", 1, opts) testLabelMappingRollbackFn(t, "testdata/dashboard_associates_label.yml", 1, opts)
}) })
}) })
@ -1002,11 +1011,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/notification_endpoint.yml", 5, opts) testLabelMappingApplyFn(t, "testdata/notification_endpoint.yml", 5, opts)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/notification_endpoint.yml", 3, opts) testLabelMappingRollbackFn(t, "testdata/notification_endpoint.yml", 3, opts)
}) })
}) })
@ -1023,11 +1032,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/notification_rule.yml", 2, opts) testLabelMappingApplyFn(t, "testdata/notification_rule.yml", 2, opts)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/notification_rule.yml", 1, opts) testLabelMappingRollbackFn(t, "testdata/notification_rule.yml", 1, opts)
}) })
}) })
@ -1055,11 +1064,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/tasks.yml", 2, opts) testLabelMappingApplyFn(t, "testdata/tasks.yml", 2, opts)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/tasks.yml", 1, opts) testLabelMappingRollbackFn(t, "testdata/tasks.yml", 1, opts)
}) })
}) })
@ -1074,11 +1083,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/telegraf.yml", 2, opts) testLabelMappingApplyFn(t, "testdata/telegraf.yml", 2, opts)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/telegraf.yml", 1, opts) testLabelMappingRollbackFn(t, "testdata/telegraf.yml", 1, opts)
}) })
}) })
@ -1093,11 +1102,11 @@ func TestService(t *testing.T) {
} }
t.Run("applies successfully", func(t *testing.T) { t.Run("applies successfully", func(t *testing.T) {
testLabelMappingV2ApplyFn(t, "testdata/variable_associates_label.yml", 1, opt) testLabelMappingApplyFn(t, "testdata/variable_associates_label.yml", 1, opt)
}) })
t.Run("deletes new label mappings on error", func(t *testing.T) { t.Run("deletes new label mappings on error", func(t *testing.T) {
testLabelMappingV2RollbackFn(t, "testdata/variable_associates_label.yml", 0, opt) testLabelMappingRollbackFn(t, "testdata/variable_associates_label.yml", 0, opt)
}) })
}) })
}) })
@ -1115,9 +1124,10 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.NotificationEndpoints, 5) require.Len(t, sum.NotificationEndpoints, 5)
containsWithID := func(t *testing.T, name string) { containsWithID := func(t *testing.T, name string) {
@ -1163,7 +1173,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.GreaterOrEqual(t, fakeEndpointSVC.DeleteNotificationEndpointCalls.Count(), 3) assert.GreaterOrEqual(t, fakeEndpointSVC.DeleteNotificationEndpointCalls.Count(), 3)
@ -1192,9 +1202,10 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.NotificationRules, 1) require.Len(t, sum.NotificationRules, 1)
assert.Equal(t, "rule-uuid", sum.NotificationRules[0].PkgName) assert.Equal(t, "rule-uuid", sum.NotificationRules[0].PkgName)
assert.Equal(t, "rule_0", sum.NotificationRules[0].Name) assert.Equal(t, "rule_0", sum.NotificationRules[0].Name)
@ -1234,7 +1245,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.Equal(t, 1, fakeRuleStore.DeleteNotificationRuleCalls.Count()) assert.Equal(t, 1, fakeRuleStore.DeleteNotificationRuleCalls.Count())
@ -1268,9 +1279,10 @@ func TestService(t *testing.T) {
svc := newTestService(WithTaskSVC(fakeTaskSVC)) svc := newTestService(WithTaskSVC(fakeTaskSVC))
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Tasks, 2) require.Len(t, sum.Tasks, 2)
assert.NotZero(t, sum.Tasks[0].ID) assert.NotZero(t, sum.Tasks[0].ID)
assert.Equal(t, "task-1", sum.Tasks[0].PkgName) assert.Equal(t, "task-1", sum.Tasks[0].PkgName)
@ -1300,7 +1312,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.Equal(t, 1, fakeTaskSVC.DeleteTaskCalls.Count()) assert.Equal(t, 1, fakeTaskSVC.DeleteTaskCalls.Count())
@ -1321,9 +1333,10 @@ func TestService(t *testing.T) {
svc := newTestService(WithTelegrafSVC(fakeTeleSVC)) svc := newTestService(WithTelegrafSVC(fakeTeleSVC))
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.TelegrafConfigs, 2) require.Len(t, sum.TelegrafConfigs, 2)
assert.Equal(t, "display name", sum.TelegrafConfigs[0].TelegrafConfig.Name) assert.Equal(t, "display name", sum.TelegrafConfigs[0].TelegrafConfig.Name)
assert.Equal(t, "desc", sum.TelegrafConfigs[0].TelegrafConfig.Description) assert.Equal(t, "desc", sum.TelegrafConfigs[0].TelegrafConfig.Description)
@ -1353,7 +1366,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.Equal(t, 1, fakeTeleSVC.DeleteTelegrafConfigCalls.Count()) assert.Equal(t, 1, fakeTeleSVC.DeleteTelegrafConfigCalls.Count())
@ -1374,9 +1387,10 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Variables, 4) require.Len(t, sum.Variables, 4)
expected := sum.Variables[0] expected := sum.Variables[0]
@ -1408,7 +1422,7 @@ func TestService(t *testing.T) {
orgID := influxdb.ID(9000) orgID := influxdb.ID(9000)
_, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) _, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.Error(t, err) require.Error(t, err)
assert.GreaterOrEqual(t, fakeVarSVC.DeleteVariableCalls.Count(), 1) assert.GreaterOrEqual(t, fakeVarSVC.DeleteVariableCalls.Count(), 1)
@ -1449,9 +1463,10 @@ func TestService(t *testing.T) {
svc := newTestService(WithVariableSVC(fakeVarSVC)) svc := newTestService(WithVariableSVC(fakeVarSVC))
sum, _, err := svc.Apply(context.TODO(), orgID, 0, pkg) impact, err := svc.Apply(context.TODO(), orgID, 0, pkg)
require.NoError(t, err) require.NoError(t, err)
sum := impact.Summary
require.Len(t, sum.Variables, 4) require.Len(t, sum.Variables, 4)
expected := sum.Variables[0] expected := sum.Variables[0]
assert.Equal(t, SafeID(1), expected.ID) assert.Equal(t, SafeID(1), expected.ID)

View File

@ -51,14 +51,14 @@ func (s *traceMW) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (pkg
return s.next.CreatePkg(ctx, setters...) return s.next.CreatePkg(ctx, setters...)
} }
func (s *traceMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (sum Summary, diff Diff, err error) { func (s *traceMW) DryRun(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
span, ctx := tracing.StartSpanFromContextWithOperationName(ctx, "DryRun") span, ctx := tracing.StartSpanFromContextWithOperationName(ctx, "DryRun")
span.LogKV("orgID", orgID.String(), "userID", userID.String()) span.LogKV("orgID", orgID.String(), "userID", userID.String())
defer span.Finish() defer span.Finish()
return s.next.DryRun(ctx, orgID, userID, pkg, opts...) return s.next.DryRun(ctx, orgID, userID, pkg, opts...)
} }
func (s *traceMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (sum Summary, diff Diff, err error) { func (s *traceMW) Apply(ctx context.Context, orgID, userID influxdb.ID, pkg *Pkg, opts ...ApplyOptFn) (PkgImpactSummary, error) {
span, ctx := tracing.StartSpanFromContextWithOperationName(ctx, "Apply") span, ctx := tracing.StartSpanFromContextWithOperationName(ctx, "Apply")
span.LogKV("orgID", orgID.String(), "userID", userID.String()) span.LogKV("orgID", orgID.String(), "userID", userID.String())
defer span.Finish() defer span.Finish()