From 7be7328c3bd307f779c69c809b24d30808f72b12 Mon Sep 17 00:00:00 2001 From: Alex Boatwright Date: Thu, 30 Apr 2020 16:44:09 -0700 Subject: [PATCH 1/9] feat: frontend consumption of feature flags (#17926) --- ui/src/Logout.tsx | 21 +- ui/src/index.tsx | 471 ++++++++++++++------------ ui/src/mockState.tsx | 1 + ui/src/shared/actions/flags.ts | 38 ++- ui/src/shared/containers/GetFlags.tsx | 53 +++ ui/src/shared/reducers/flags.ts | 18 +- 6 files changed, 372 insertions(+), 230 deletions(-) create mode 100644 ui/src/shared/containers/GetFlags.tsx diff --git a/ui/src/Logout.tsx b/ui/src/Logout.tsx index 1f510d8d72..39b86e9a94 100644 --- a/ui/src/Logout.tsx +++ b/ui/src/Logout.tsx @@ -1,5 +1,6 @@ // Libraries import {FC, useEffect} from 'react' +import {connect} from 'react-redux' import {withRouter, WithRouterProps} from 'react-router' // APIs @@ -10,8 +11,14 @@ import {CLOUD, CLOUD_URL, CLOUD_LOGOUT_PATH} from 'src/shared/constants' // Components import {ErrorHandling} from 'src/shared/decorators/errors' +import {reset} from 'src/shared/actions/flags' -const Logout: FC = ({router}) => { +interface DispatchProps { + resetFeatureFlags: typeof reset +} + +type Props = DispatchProps & WithRouterProps +const Logout: FC = ({router, resetFeatureFlags}) => { const handleSignOut = async () => { if (CLOUD) { window.location.href = `${CLOUD_URL}${CLOUD_LOGOUT_PATH}` @@ -28,9 +35,19 @@ const Logout: FC = ({router}) => { } useEffect(() => { + resetFeatureFlags() handleSignOut() }, []) return null } -export default ErrorHandling(withRouter(Logout)) +const mdtp = { + resetFeatureFlags: reset, +} + +export default ErrorHandling( + connect<{}, DispatchProps>( + null, + mdtp + )(withRouter(Logout)) +) diff --git a/ui/src/index.tsx b/ui/src/index.tsx index 92aca08e9c..cba9886f25 100644 --- a/ui/src/index.tsx +++ b/ui/src/index.tsx @@ -38,6 +38,7 @@ import {MePage} from 'src/me' import NotFound from 'src/shared/components/NotFound' import GetLinks from 'src/shared/containers/GetLinks' import GetMe from 'src/shared/containers/GetMe' +import GetFlags from 'src/shared/containers/GetFlags' import UnauthenticatedApp from 'src/shared/containers/UnauthenticatedApp' import TaskExportOverlay from 'src/tasks/components/TaskExportOverlay' import TaskImportOverlay from 'src/tasks/components/TaskImportOverlay' @@ -201,258 +202,278 @@ class Root extends PureComponent { - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + + {!CLOUD && ( + + )} - - - - - - - - - - - - - {!CLOUD && ( - - )} diff --git a/ui/src/mockState.tsx b/ui/src/mockState.tsx index 673d449961..e952852417 100644 --- a/ui/src/mockState.tsx +++ b/ui/src/mockState.tsx @@ -26,6 +26,7 @@ export const localState: LocalStorage = { }, }, flags: { + status: RemoteDataState.Done, original: {}, override: {}, }, diff --git a/ui/src/shared/actions/flags.ts b/ui/src/shared/actions/flags.ts index 34c15f0ffc..ae4cb7c7bf 100644 --- a/ui/src/shared/actions/flags.ts +++ b/ui/src/shared/actions/flags.ts @@ -1,19 +1,33 @@ +import {Dispatch} from 'redux' +import {getFlags as getFlagsRequest} from 'src/client' +import {FlagMap} from 'src/shared/reducers/flags' +import {RemoteDataState} from 'src/types' export const SET_FEATURE_FLAGS = 'SET_FEATURE_FLAGS' +export const RESET_FEATURE_FLAGS = 'RESET_FEATURE_FLAGS' export const CLEAR_FEATURE_FLAG_OVERRIDES = 'CLEAR_FEATURE_FLAG_OVERRIDES' export const SET_FEATURE_FLAG_OVERRIDE = 'SET_FEATURE_FLAG_OVERRIDE' export type Actions = | ReturnType + | ReturnType | ReturnType | ReturnType // NOTE: this doesnt have a type as it will be determined // by the backend at a later time and keeping the format // open for transformations in a bit -export const setFlags = flags => +export const setFlags = (status: RemoteDataState, flags?: FlagMap) => ({ type: SET_FEATURE_FLAGS, - payload: flags, + payload: { + status, + flags, + }, + } as const) + +export const reset = () => + ({ + type: RESET_FEATURE_FLAGS, } as const) export const clearOverrides = () => @@ -28,3 +42,23 @@ export const setOverride = (flag: string, value: string | boolean) => [flag]: value, }, } as const) + +export const getFlags = () => async ( + dispatch: Dispatch +): Promise => { + try { + dispatch(setFlags(RemoteDataState.Loading)) + const resp = await getFlagsRequest({}) + + if (resp.status !== 200) { + throw new Error(resp.data.message) + } + + dispatch(setFlags(RemoteDataState.Done, resp.data)) + + return resp.data + } catch (error) { + console.error(error) + dispatch(setFlags(RemoteDataState.Error, null)) + } +} diff --git a/ui/src/shared/containers/GetFlags.tsx b/ui/src/shared/containers/GetFlags.tsx new file mode 100644 index 0000000000..4cc226b46b --- /dev/null +++ b/ui/src/shared/containers/GetFlags.tsx @@ -0,0 +1,53 @@ +// Libraries +import React, {useEffect, FunctionComponent} from 'react' +import {connect} from 'react-redux' + +// Components +import {SpinnerContainer, TechnoSpinner} from '@influxdata/clockface' + +// Types +import {RemoteDataState, AppState} from 'src/types' + +// Actions +import {getFlags as getFlagsAction} from 'src/shared/actions/flags' + +interface PassedInProps { + children: React.ReactElement +} + +interface DispatchProps { + getFlags: typeof getFlagsAction +} + +interface StateProps { + status: RemoteDataState +} + +type Props = StateProps & DispatchProps & PassedInProps + +const GetFlags: FunctionComponent = ({status, getFlags, children}) => { + useEffect(() => { + if (status === RemoteDataState.NotStarted) { + getFlags() + } + }, []) + + return ( + }> + {children && React.cloneElement(children)} + + ) +} + +const mdtp = { + getFlags: getFlagsAction, +} + +const mstp = (state: AppState): StateProps => ({ + status: state.flags.status || RemoteDataState.NotStarted, +}) + +export default connect( + mstp, + mdtp +)(GetFlags) diff --git a/ui/src/shared/reducers/flags.ts b/ui/src/shared/reducers/flags.ts index 6fce9525ad..74abae816a 100644 --- a/ui/src/shared/reducers/flags.ts +++ b/ui/src/shared/reducers/flags.ts @@ -1,20 +1,24 @@ import { Actions, SET_FEATURE_FLAGS, + RESET_FEATURE_FLAGS, CLEAR_FEATURE_FLAG_OVERRIDES, SET_FEATURE_FLAG_OVERRIDE, } from 'src/shared/actions/flags' +import {RemoteDataState} from 'src/types' export interface FlagMap { [key: string]: string | boolean } export interface FlagState { + status: RemoteDataState original: FlagMap override: FlagMap } const defaultState: FlagState = { + status: RemoteDataState.NotStarted, original: {}, override: {}, } @@ -22,9 +26,21 @@ const defaultState: FlagState = { export default (state = defaultState, action: Actions): FlagState => { switch (action.type) { case SET_FEATURE_FLAGS: + // just setting the loading state + if (!action.payload.flags) { + return { + ...state, + status: action.payload.status, + } + } return { ...state, - original: action.payload, + status: action.payload.status, + original: action.payload.flags, + } + case RESET_FEATURE_FLAGS: + return { + ...defaultState, } case CLEAR_FEATURE_FLAG_OVERRIDES: return { From 0f09f4d2ab27fbef88cb49d890d9bec6ee6ca675 Mon Sep 17 00:00:00 2001 From: Alex Boatwright Date: Fri, 1 May 2020 10:28:07 -0700 Subject: [PATCH 2/9] fix: safari rendering bug in checks (#17929) --- ui/src/timeMachine/components/Queries.scss | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ui/src/timeMachine/components/Queries.scss b/ui/src/timeMachine/components/Queries.scss index a4c7eaf9f3..fe130552c2 100644 --- a/ui/src/timeMachine/components/Queries.scss +++ b/ui/src/timeMachine/components/Queries.scss @@ -24,7 +24,7 @@ margin-bottom: $cf-marg-a; position: relative; flex: 1 1 auto; - + &::-webkit-scrollbar { height: 0; width: 0; @@ -48,6 +48,7 @@ .time-machine-queries--body { flex-grow: 1; position: relative; + height: 100%; } .time-machine-influxql-editor { From 369186d339389cfd79ab3e303687521d640c0ba2 Mon Sep 17 00:00:00 2001 From: Alex Boatwright Date: Fri, 1 May 2020 11:26:36 -0700 Subject: [PATCH 3/9] fix: show buckets in alerting (#17930) --- ui/src/alerting/components/AlertingIndex.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/alerting/components/AlertingIndex.tsx b/ui/src/alerting/components/AlertingIndex.tsx index f551cca371..3ec3aeb3fe 100644 --- a/ui/src/alerting/components/AlertingIndex.tsx +++ b/ui/src/alerting/components/AlertingIndex.tsx @@ -55,7 +55,7 @@ const AlertingIndex: FunctionComponent = ({ scrollable={false} className={pageContentsClassName} > - + Date: Fri, 1 May 2020 14:40:32 -0700 Subject: [PATCH 4/9] feat(pkger): add the ability to remove a stack and all its associated resources closes: #17554 --- CHANGELOG.md | 2 + cmd/influx/pkg_test.go | 6 ++ cmd/influxd/launcher/pkger_test.go | 133 ++++++++++++++++++++++++++++- http/swagger.yml | 28 ++++++ pkger/http_remote_service.go | 7 ++ pkger/http_server.go | 60 +++++++++++++ pkger/http_server_test.go | 4 + pkger/service.go | 36 +++++++- pkger/service_auth.go | 8 ++ pkger/service_logging.go | 18 ++++ pkger/service_metrics.go | 5 ++ pkger/service_tracing.go | 6 ++ 12 files changed, 309 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3c17fdcc5..c95e5c272b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ### Features +1. [17934](https://github.com/influxdata/influxdb/pull/17934): Add ability to delete a stack and all the resources associated with it + ### Bug Fixes 1. [17906](https://github.com/influxdata/influxdb/pull/17906): Ensure UpdateUser cleans up the index when updating names diff --git a/cmd/influx/pkg_test.go b/cmd/influx/pkg_test.go index 1f4ed77ada..b2ba7c27b0 100644 --- a/cmd/influx/pkg_test.go +++ b/cmd/influx/pkg_test.go @@ -708,6 +708,8 @@ type fakePkgSVC struct { applyFn func(ctx context.Context, orgID, userID influxdb.ID, pkg *pkger.Pkg, opts ...pkger.ApplyOptFn) (pkger.Summary, pkger.Diff, error) } +var _ pkger.SVC = (*fakePkgSVC)(nil) + func (f *fakePkgSVC) InitStack(ctx context.Context, userID influxdb.ID, stack pkger.Stack) (pkger.Stack, error) { if f.initStackFn != nil { return f.initStackFn(ctx, userID, stack) @@ -719,6 +721,10 @@ func (f *fakePkgSVC) ListStacks(ctx context.Context, orgID influxdb.ID, filter p panic("not implemented") } +func (f *fakePkgSVC) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error { + panic("not implemented") +} + func (f *fakePkgSVC) CreatePkg(ctx context.Context, setters ...pkger.CreatePkgSetFn) (*pkger.Pkg, error) { if f.createFn != nil { return f.createFn(ctx, setters...) diff --git a/cmd/influxd/launcher/pkger_test.go b/cmd/influxd/launcher/pkger_test.go index c82cc37c9a..8d95e78077 100644 --- a/cmd/influxd/launcher/pkger_test.go +++ b/cmd/influxd/launcher/pkger_test.go @@ -229,6 +229,122 @@ func TestLauncher_Pkger(t *testing.T) { }) }) + t.Run("delete a stack", func(t *testing.T) { + t.Run("should delete the stack and all resources associated with it", func(t *testing.T) { + newStack, err := svc.InitStack(ctx, l.User.ID, pkger.Stack{ + OrgID: l.Org.ID, + }) + require.NoError(t, err) + + newEndpointPkgName := "non_existent_endpoint" + allResourcesPkg := newPkg( + newBucketObject("non_existent_bucket", "", ""), + newCheckDeadmanObject(t, "non_existent_check", "", time.Minute), + newDashObject("non_existent_dash", "", ""), + newEndpointHTTP(newEndpointPkgName, "", ""), + newLabelObject("non_existent_label", "", "", ""), + newRuleObject(t, "non_existent_rule", "", newEndpointPkgName, ""), + newTaskObject("non_existent_task", "", ""), + newTelegrafObject("non_existent_tele", "", ""), + newVariableObject("non_existent_var", "", ""), + ) + + sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allResourcesPkg, pkger.ApplyWithStackID(newStack.ID)) + require.NoError(t, err) + + require.Len(t, sum.Buckets, 1) + assert.NotZero(t, sum.Buckets[0].ID) + require.Len(t, sum.Checks, 1) + assert.NotZero(t, sum.Checks[0].Check.GetID()) + require.Len(t, sum.Dashboards, 1) + assert.NotZero(t, sum.Dashboards[0].ID) + require.Len(t, sum.Labels, 1) + assert.NotZero(t, sum.Labels[0].ID) + require.Len(t, sum.NotificationEndpoints, 1) + assert.NotZero(t, sum.NotificationEndpoints[0].NotificationEndpoint.GetID()) + require.Len(t, sum.NotificationRules, 1) + assert.NotZero(t, sum.NotificationRules[0].ID) + require.Len(t, sum.Tasks, 1) + assert.NotZero(t, sum.Tasks[0].ID) + require.Len(t, sum.TelegrafConfigs, 1) + assert.NotZero(t, sum.TelegrafConfigs[0].TelegrafConfig.ID) + require.Len(t, sum.Variables, 1) + assert.NotZero(t, sum.Variables[0].ID) + + err = svc.DeleteStack(ctx, struct{ OrgID, UserID, StackID influxdb.ID }{ + OrgID: l.Org.ID, + UserID: l.User.ID, + StackID: newStack.ID, + }) + require.NoError(t, err) + + matchingStacks, err := svc.ListStacks(ctx, l.Org.ID, pkger.ListFilter{ + StackIDs: []influxdb.ID{newStack.ID}, + }) + require.NoError(t, err) + require.Empty(t, matchingStacks) + + _, err = resourceCheck.getBucket(t, byID(influxdb.ID(sum.Buckets[0].ID))) + assert.Error(t, err) + + _, err = resourceCheck.getCheck(t, byID(sum.Checks[0].Check.GetID())) + assert.Error(t, err) + + _, err = resourceCheck.getDashboard(t, byID(influxdb.ID(sum.Dashboards[0].ID))) + assert.Error(t, err) + + _, err = resourceCheck.getLabel(t, byID(influxdb.ID(sum.Labels[0].ID))) + assert.Error(t, err) + + _, err = resourceCheck.getEndpoint(t, byID(sum.NotificationEndpoints[0].NotificationEndpoint.GetID())) + assert.Error(t, err) + + _, err = resourceCheck.getRule(t, byID(influxdb.ID(sum.NotificationRules[0].ID))) + assert.Error(t, err) + + _, err = resourceCheck.getTask(t, byID(influxdb.ID(sum.Tasks[0].ID))) + assert.Error(t, err) + + _, err = resourceCheck.getTelegrafConfig(t, byID(sum.TelegrafConfigs[0].TelegrafConfig.ID)) + assert.Error(t, err) + + _, err = resourceCheck.getVariable(t, byID(influxdb.ID(sum.Variables[0].ID))) + assert.Error(t, err) + }) + + t.Run("that has been deleted should be successful", func(t *testing.T) { + newStack, err := svc.InitStack(ctx, l.User.ID, pkger.Stack{ + OrgID: l.Org.ID, + }) + require.NoError(t, err) + + err = svc.DeleteStack(ctx, struct{ OrgID, UserID, StackID influxdb.ID }{ + OrgID: l.Org.ID, + UserID: l.User.ID, + StackID: newStack.ID, + }) + require.NoError(t, err) + + // delete same stack + err = svc.DeleteStack(ctx, struct{ OrgID, UserID, StackID influxdb.ID }{ + OrgID: l.Org.ID, + UserID: l.User.ID, + StackID: newStack.ID, + }) + require.NoError(t, err) + }) + + t.Run("that doesn't exist should be successful", func(t *testing.T) { + // delete stack that doesn't exist + err := svc.DeleteStack(ctx, struct{ OrgID, UserID, StackID influxdb.ID }{ + OrgID: l.Org.ID, + UserID: l.User.ID, + StackID: 9000, + }) + require.NoError(t, err) + }) + }) + t.Run("apply with only a stackID succeeds when stack has URLs", func(t *testing.T) { svr := httptest.NewServer(nethttp.HandlerFunc(func(w nethttp.ResponseWriter, r *nethttp.Request) { pkg := newPkg(newBucketObject("bucket_0", "", "")) @@ -2296,6 +2412,12 @@ type ( getResourceOptFn func() getResourceOpt ) +func byID(id influxdb.ID) getResourceOptFn { + return func() getResourceOpt { + return getResourceOpt{id: id} + } +} + func byName(name string) getResourceOptFn { return func() getResourceOpt { return getResourceOpt{name: name} @@ -2507,8 +2629,11 @@ func (r resourceChecker) getLabel(t *testing.T, getOpt getResourceOptFn) (influx default: require.Fail(t, "did not provide any get option") } + if err != nil { + return influxdb.Label{}, err + } - return *label, err + return *label, nil } func (r resourceChecker) mustGetLabel(t *testing.T, getOpt getResourceOptFn) influxdb.Label { @@ -2700,8 +2825,10 @@ func (r resourceChecker) getVariable(t *testing.T, getOpt getResourceOptFn) (inf default: require.Fail(t, "did not provide any get option") } - - return *variable, err + if err != nil { + return influxdb.Variable{}, err + } + return *variable, nil } func (r resourceChecker) mustGetVariable(t *testing.T, getOpt getResourceOptFn) influxdb.Variable { diff --git a/http/swagger.yml b/http/swagger.yml index d21de512a8..a481596f80 100644 --- a/http/swagger.yml +++ b/http/swagger.yml @@ -4510,6 +4510,34 @@ paths: application/json: schema: $ref: "#/components/schemas/Error" + /packages/stacks/{stack_id}: + delete: + operationId: DeleteStack + tags: + - InfluxPackages + summary: Delete a stack and remove all its associated resources + parameters: + - in: path + name: stack_id + required: true + schema: + type: string + description: The stack id to be removed + - in: query + name: orgID + required: true + schema: + type: string + description: The organization id of the user + responses: + '204': + description: Stack and all its associated resources are deleted + default: + description: Unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" /tasks: get: operationId: GetTasks diff --git a/pkger/http_remote_service.go b/pkger/http_remote_service.go index 8161d85f0e..67f151aabc 100644 --- a/pkger/http_remote_service.go +++ b/pkger/http_remote_service.go @@ -55,6 +55,13 @@ func (s *HTTPRemoteService) InitStack(ctx context.Context, userID influxdb.ID, s return newStack, nil } +func (s *HTTPRemoteService) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error { + return s.Client. + Delete(RoutePrefix, "stacks", identifiers.StackID.String()). + QueryParams([2]string{"orgID", identifiers.OrgID.String()}). + Do(ctx) +} + func (s *HTTPRemoteService) ListStacks(ctx context.Context, orgID influxdb.ID, f ListFilter) ([]Stack, error) { queryParams := [][2]string{{"orgID", orgID.String()}} for _, name := range f.Names { diff --git a/pkger/http_server.go b/pkger/http_server.go index 7be0749a24..785ac86513 100644 --- a/pkger/http_server.go +++ b/pkger/http_server.go @@ -48,11 +48,14 @@ func NewHTTPServer(log *zap.Logger, svc SVC) *HTTPServer { { r.With(middleware.AllowContentType("text/yml", "application/x-yaml", "application/json")). Post("/", svr.createPkg) + r.With(middleware.SetHeader("Content-Type", "application/json; charset=utf-8")). Post("/apply", svr.applyPkg) + r.Route("/stacks", func(r chi.Router) { r.Post("/", svr.createStack) r.Get("/", svr.listStacks) + r.Delete("/{stack_id}", svr.deleteStack) }) } @@ -203,6 +206,63 @@ func (s *HTTPServer) createStack(w http.ResponseWriter, r *http.Request) { }) } +func (s *HTTPServer) deleteStack(w http.ResponseWriter, r *http.Request) { + orgID, err := getRequiredOrgIDFromQuery(r.URL.Query()) + if err != nil { + s.api.Err(w, err) + return + } + + stackID, err := influxdb.IDFromString(chi.URLParam(r, "stack_id")) + if err != nil { + s.api.Err(w, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "the stack id provided in the path was invalid", + Err: err, + }) + return + } + + auth, err := pctx.GetAuthorizer(r.Context()) + if err != nil { + s.api.Err(w, err) + return + } + userID := auth.GetUserID() + + err = s.svc.DeleteStack(r.Context(), struct{ OrgID, UserID, StackID influxdb.ID }{ + OrgID: orgID, + UserID: userID, + StackID: *stackID, + }) + if err != nil { + s.api.Err(w, err) + return + } + + s.api.Respond(w, http.StatusNoContent, nil) +} + +func getRequiredOrgIDFromQuery(q url.Values) (influxdb.ID, error) { + orgIDRaw := q.Get("orgID") + if orgIDRaw == "" { + return 0, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "the orgID query param is required", + } + } + + orgID, err := influxdb.IDFromString(orgIDRaw) + if err != nil { + return 0, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "the orgID query param was invalid", + Err: err, + } + } + return *orgID, nil +} + // ReqCreateOrgIDOpt provides options to export resources by organization id. type ReqCreateOrgIDOpt struct { OrgID string `json:"orgID"` diff --git a/pkger/http_server_test.go b/pkger/http_server_test.go index a396ee15c9..aa7f685cbe 100644 --- a/pkger/http_server_test.go +++ b/pkger/http_server_test.go @@ -837,6 +837,10 @@ func (f *fakeSVC) InitStack(ctx context.Context, userID influxdb.ID, stack pkger return f.initStack(ctx, userID, stack) } +func (f *fakeSVC) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error { + panic("not implemented yet") +} + func (f *fakeSVC) ListStacks(ctx context.Context, orgID influxdb.ID, filter pkger.ListFilter) ([]pkger.Stack, error) { if f.listStacksFn == nil { panic("not implemented") diff --git a/pkger/service.go b/pkger/service.go index a6939c283e..06b99bbbfd 100644 --- a/pkger/service.go +++ b/pkger/service.go @@ -59,6 +59,7 @@ const ResourceTypeStack influxdb.ResourceType = "stack" // SVC is the packages service interface. type SVC interface { InitStack(ctx context.Context, userID influxdb.ID, stack Stack) (Stack, error) + DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error ListStacks(ctx context.Context, orgID influxdb.ID, filter ListFilter) ([]Stack, error) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pkg, error) @@ -298,6 +299,39 @@ func (s *Service) InitStack(ctx context.Context, userID influxdb.ID, stack Stack return stack, nil } +// DeleteStack removes a stack and all the resources that have are associated with the stack. +func (s *Service) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) (e error) { + stack, err := s.store.ReadStackByID(ctx, identifiers.StackID) + if influxdb.ErrorCode(err) == influxdb.ENotFound { + return nil + } + if err != nil { + return err + } + if stack.OrgID != identifiers.OrgID { + return &influxdb.Error{ + Code: influxdb.EConflict, + Msg: "you do not have access to given stack ID", + } + } + + // providing empty Pkg will remove all applied resources + state, err := s.dryRun(ctx, identifiers.OrgID, new(Pkg), applyOptFromOptFns(ApplyWithStackID(identifiers.StackID))) + if err != nil { + return err + } + + coordinator := &rollbackCoordinator{sem: make(chan struct{}, s.applyReqLimit)} + defer coordinator.rollback(s.log, &e, identifiers.OrgID) + + err = s.applyState(ctx, coordinator, identifiers.OrgID, identifiers.UserID, state, nil) + if err != nil { + return err + } + + return s.store.DeleteStack(ctx, identifiers.StackID) +} + // ListFilter are filter options for filtering stacks from being returned. type ListFilter struct { StackIDs []influxdb.ID @@ -685,7 +719,7 @@ func (s *Service) dryRun(ctx context.Context, orgID influxdb.ID, pkg *Pkg, opt A // will be skipped, and won't bleed into the dry run here. We can now return // a error (parseErr) and valid diff/summary. var parseErr error - err := pkg.Validate() + err := pkg.Validate(ValidWithoutResources()) if err != nil && !IsParseErr(err) { return nil, internalErr(err) } diff --git a/pkger/service_auth.go b/pkger/service_auth.go index f871672a4a..38f2200951 100644 --- a/pkger/service_auth.go +++ b/pkger/service_auth.go @@ -36,6 +36,14 @@ func (s *authMW) InitStack(ctx context.Context, userID influxdb.ID, newStack Sta return s.next.InitStack(ctx, userID, newStack) } +func (s *authMW) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error { + err := s.authAgent.IsWritable(ctx, identifiers.OrgID, ResourceTypeStack) + if err != nil { + return err + } + return s.next.DeleteStack(ctx, identifiers) +} + func (s *authMW) ListStacks(ctx context.Context, orgID influxdb.ID, f ListFilter) ([]Stack, error) { err := s.authAgent.OrgPermissions(ctx, orgID, influxdb.ReadAction) if err != nil { diff --git a/pkger/service_logging.go b/pkger/service_logging.go index 65cb1edfec..8acfd332de 100644 --- a/pkger/service_logging.go +++ b/pkger/service_logging.go @@ -43,6 +43,24 @@ func (s *loggingMW) InitStack(ctx context.Context, userID influxdb.ID, newStack return s.next.InitStack(ctx, userID, newStack) } +func (s *loggingMW) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) (err error) { + defer func(start time.Time) { + if err == nil { + return + } + + s.logger.Error( + "failed to delete stack", + zap.Error(err), + zap.Stringer("orgID", identifiers.OrgID), + zap.Stringer("userID", identifiers.OrgID), + zap.Stringer("stackID", identifiers.StackID), + zap.Duration("took", time.Since(start)), + ) + }(time.Now()) + return s.next.DeleteStack(ctx, identifiers) +} + func (s *loggingMW) ListStacks(ctx context.Context, orgID influxdb.ID, f ListFilter) (stacks []Stack, err error) { defer func(start time.Time) { if err == nil { diff --git a/pkger/service_metrics.go b/pkger/service_metrics.go index e573b6ed59..e4a9a66a37 100644 --- a/pkger/service_metrics.go +++ b/pkger/service_metrics.go @@ -33,6 +33,11 @@ func (s *mwMetrics) InitStack(ctx context.Context, userID influxdb.ID, newStack return stack, rec(err) } +func (s *mwMetrics) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error { + rec := s.rec.Record("delete_stack") + return rec(s.next.DeleteStack(ctx, identifiers)) +} + func (s *mwMetrics) ListStacks(ctx context.Context, orgID influxdb.ID, f ListFilter) ([]Stack, error) { rec := s.rec.Record("list_stacks") stacks, err := s.next.ListStacks(ctx, orgID, f) diff --git a/pkger/service_tracing.go b/pkger/service_tracing.go index c57fc8aa32..51de1104f6 100644 --- a/pkger/service_tracing.go +++ b/pkger/service_tracing.go @@ -27,6 +27,12 @@ func (s *traceMW) InitStack(ctx context.Context, userID influxdb.ID, newStack St return s.next.InitStack(ctx, userID, newStack) } +func (s *traceMW) DeleteStack(ctx context.Context, identifiers struct{ OrgID, UserID, StackID influxdb.ID }) error { + span, ctx := tracing.StartSpanFromContextWithOperationName(ctx, "DeleteStack") + defer span.Finish() + return s.next.DeleteStack(ctx, identifiers) +} + func (s *traceMW) ListStacks(ctx context.Context, orgID influxdb.ID, f ListFilter) ([]Stack, error) { span, ctx := tracing.StartSpanFromContextWithOperationName(ctx, "ListStacks") defer span.Finish() From 35ed5734bb2ca7bf625028930de8b38268077143 Mon Sep 17 00:00:00 2001 From: Johnny Steenbergen Date: Fri, 1 May 2020 17:19:57 -0700 Subject: [PATCH 5/9] chore(httpc): refactor inputs to eliminate required path the base address should be enough to make a request. All requests are now valid without a path provided. This will be true as long as the address is valid. --- pkg/httpc/client.go | 43 +++++++++++++++++++++---------------------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/pkg/httpc/client.go b/pkg/httpc/client.go index a28123f73a..c4f94e1a3d 100644 --- a/pkg/httpc/client.go +++ b/pkg/httpc/client.go @@ -76,51 +76,51 @@ func New(opts ...ClientOptFn) (*Client, error) { } // Delete generates a DELETE request. -func (c *Client) Delete(urlPath string, rest ...string) *Req { - return c.Req(http.MethodDelete, nil, urlPath, rest...) +func (c *Client) Delete(urlPath ...string) *Req { + return c.Req(http.MethodDelete, nil, urlPath...) } // Get generates a GET request. -func (c *Client) Get(urlPath string, rest ...string) *Req { - return c.Req(http.MethodGet, nil, urlPath, rest...) +func (c *Client) Get(urlPath ...string) *Req { + return c.Req(http.MethodGet, nil, urlPath...) } // Patch generates a PATCH request. -func (c *Client) Patch(bFn BodyFn, urlPath string, rest ...string) *Req { - return c.Req(http.MethodPatch, bFn, urlPath, rest...) +func (c *Client) Patch(bFn BodyFn, urlPath ...string) *Req { + return c.Req(http.MethodPatch, bFn, urlPath...) } // PatchJSON generates a PATCH request. This is to be used with value or pointer to value type. // Providing a stream/reader will result in disappointment. -func (c *Client) PatchJSON(v interface{}, urlPath string, rest ...string) *Req { - return c.Patch(BodyJSON(v), urlPath, rest...) +func (c *Client) PatchJSON(v interface{}, urlPath ...string) *Req { + return c.Patch(BodyJSON(v), urlPath...) } // Post generates a POST request. -func (c *Client) Post(bFn BodyFn, urlPath string, rest ...string) *Req { - return c.Req(http.MethodPost, bFn, urlPath, rest...) +func (c *Client) Post(bFn BodyFn, urlPath ...string) *Req { + return c.Req(http.MethodPost, bFn, urlPath...) } // PostJSON generates a POST request and json encodes the body. This is to be // used with value or pointer to value type. Providing a stream/reader will result // in disappointment. -func (c *Client) PostJSON(v interface{}, urlPath string, rest ...string) *Req { - return c.Post(BodyJSON(v), urlPath, rest...) +func (c *Client) PostJSON(v interface{}, urlPath ...string) *Req { + return c.Post(BodyJSON(v), urlPath...) } // Put generates a PUT request. -func (c *Client) Put(bFn BodyFn, urlPath string, rest ...string) *Req { - return c.Req(http.MethodPut, bFn, urlPath, rest...) +func (c *Client) Put(bFn BodyFn, urlPath ...string) *Req { + return c.Req(http.MethodPut, bFn, urlPath...) } // PutJSON generates a PUT request. This is to be used with value or pointer to value type. // Providing a stream/reader will result in disappointment. -func (c *Client) PutJSON(v interface{}, urlPath string, rest ...string) *Req { - return c.Put(BodyJSON(v), urlPath, rest...) +func (c *Client) PutJSON(v interface{}, urlPath ...string) *Req { + return c.Put(BodyJSON(v), urlPath...) } // Req constructs a request. -func (c *Client) Req(method string, bFn BodyFn, urlPath string, rest ...string) *Req { +func (c *Client) Req(method string, bFn BodyFn, urlPath ...string) *Req { bodyF := BodyEmpty if bFn != nil { bodyF = bFn @@ -165,7 +165,7 @@ func (c *Client) Req(method string, bFn BodyFn, urlPath string, rest ...string) body = &buf } - req, err := http.NewRequest(method, c.buildURL(urlPath, rest...), body) + req, err := http.NewRequest(method, c.buildURL(urlPath...), body) if err != nil { return &Req{err: err} } @@ -205,11 +205,10 @@ func (c *Client) Clone(opts ...ClientOptFn) (*Client, error) { return New(append(existingOpts, opts...)...) } -func (c *Client) buildURL(urlPath string, rest ...string) string { +func (c *Client) buildURL(urlPath ...string) string { u := c.addr - u.Path = path.Join(u.Path, urlPath) - if len(rest) > 0 { - u.Path = path.Join(u.Path, path.Join(rest...)) + if len(urlPath) > 0 { + u.Path = path.Join(u.Path, path.Join(urlPath...)) } return u.String() } From 2eb70ee041953be3b65ec5eb570edca8dc6e4c92 Mon Sep 17 00:00:00 2001 From: Ariel Salem Date: Mon, 4 May 2020 05:00:28 -0700 Subject: [PATCH 6/9] fix(zero_value_checks): checks can now have a value set to 0 (#17933) --- CHANGELOG.md | 1 + notification/check/threshold.go | 4 ++-- ui/cypress/e2e/checks.test.ts | 22 +++++++++++++++++++ .../components/builder/AlertBuilder.scss | 17 +++++++++++--- 4 files changed, 39 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c95e5c272b..814b35ead4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ ### Bug Fixes 1. [17906](https://github.com/influxdata/influxdb/pull/17906): Ensure UpdateUser cleans up the index when updating names +1. [17933](https://github.com/influxdata/influxdb/pull/17933): Ensure Checks can be set for zero values ### UI Improvements diff --git a/notification/check/threshold.go b/notification/check/threshold.go index b669d146e7..ab68b9f2c9 100644 --- a/notification/check/threshold.go +++ b/notification/check/threshold.go @@ -380,7 +380,7 @@ func (b ThresholdConfigBase) GetLevel() notification.CheckLevel { // Lesser threshold type. type Lesser struct { ThresholdConfigBase - Value float64 `json:"value,omitempty"` + Value float64 `json:"value"` } // Type of the threshold config. @@ -404,7 +404,7 @@ func (td Lesser) MarshalJSON() ([]byte, error) { // Greater threshold type. type Greater struct { ThresholdConfigBase - Value float64 `json:"value,omitempty"` + Value float64 `json:"value"` } // Type of the threshold config. diff --git a/ui/cypress/e2e/checks.test.ts b/ui/cypress/e2e/checks.test.ts index b369be66ed..54bfc42723 100644 --- a/ui/cypress/e2e/checks.test.ts +++ b/ui/cypress/e2e/checks.test.ts @@ -80,6 +80,9 @@ describe('Checks', () => { cy.getByTestID('save-cell--button').should('be.disabled') cy.getByTestID('checkeo--header alerting-tab').click() cy.getByTestID('add-threshold-condition-WARN').click() + cy.getByTestID('input-field') + .clear() + .type('0') cy.getByTestID('save-cell--button').click() cy.getByTestID('check-card').should('have.length', 1) cy.getByTestID('notification-error').should('not.exist') @@ -104,6 +107,25 @@ describe('Checks', () => { }) }) + it('should allow created checks edited checks to persist changes (especially if the value is 0)', () => { + const checkName = 'Check it out!' + // Selects the check to edit + cy.getByTestID('check-card--name').should('have.length', 1) + cy.getByTestID('check-card--name').click() + // ensures that the check WARN value is set to 0 + cy.getByTestID('input-field') + .should('have.value', '0') + .clear() + .type('7') + // renames the check + cy.getByTestID('page-title') + .contains('Name this Check') + .type(checkName) + cy.getByTestID('save-cell--button').click() + // checks that the values persisted + cy.getByTestID('check-card--name').contains(checkName) + }) + it('can edit the check card', () => { // toggle on / off cy.get('.cf-resource-card__disabled').should('not.exist') diff --git a/ui/src/alerting/components/builder/AlertBuilder.scss b/ui/src/alerting/components/builder/AlertBuilder.scss index ef23a176e4..a855f9bee6 100644 --- a/ui/src/alerting/components/builder/AlertBuilder.scss +++ b/ui/src/alerting/components/builder/AlertBuilder.scss @@ -25,13 +25,24 @@ } &.alert-builder--meta-card { - flex: 1 0 280px !important; + flex: 1 0 140px !important; } &.alert-builder--message-card { - flex: 3 0 320px !important; + flex: 3 0 140px !important; } &.alert-builder--conditions-card { - flex: 2 0 320px !important; + flex: 2 0 200px !important; + } + @media screen and (min-width: $cf-grid--breakpoint-md) { + &.alert-builder--meta-card { + flex: 1 0 280px !important; + } + &.alert-builder--message-card { + flex: 3 0 320px !important; + } + &.alert-builder--conditions-card { + flex: 2 0 320px !important; + } } } From 784b222d4dc4df96f84fa3a29e7511a501dc5f00 Mon Sep 17 00:00:00 2001 From: Johnny Steenbergen Date: Sun, 3 May 2020 10:34:24 -0700 Subject: [PATCH 7/9] feat(pkger): enforce metadata.name dns name compliance this PR includes a lot of small changes to names in existing test pkgs. the tests are updated to follow suit. closes: #17940 --- CHANGELOG.md | 1 + cmd/influx/pkg_test.go | 34 +- cmd/influxd/launcher/pkger_test.go | 317 ++++----- pkger/clone_resource.go | 6 +- pkger/http_server_test.go | 8 +- pkger/internal/wordplay/wordplay.go | 2 +- pkger/models_test.go | 4 +- pkger/parser.go | 62 +- pkger/parser_models.go | 19 +- pkger/parser_test.go | 622 +++++++++--------- pkger/service_test.go | 147 ++--- pkger/testdata/bucket.json | 4 +- pkger/testdata/bucket.yml | 4 +- pkger/testdata/bucket_associates_label.json | 18 +- pkger/testdata/bucket_associates_label.yml | 18 +- .../testdata/bucket_associates_labels.jsonnet | 10 +- pkger/testdata/checks.json | 10 +- pkger/testdata/checks.yml | 10 +- pkger/testdata/dashboard.json | 4 +- pkger/testdata/dashboard.yml | 4 +- .../testdata/dashboard_associates_label.json | 10 +- pkger/testdata/dashboard_associates_label.yml | 10 +- pkger/testdata/dashboard_gauge.json | 2 +- pkger/testdata/dashboard_gauge.yml | 2 +- pkger/testdata/dashboard_heatmap.json | 2 +- pkger/testdata/dashboard_heatmap.yml | 2 +- pkger/testdata/dashboard_histogram.json | 2 +- pkger/testdata/dashboard_histogram.yml | 2 +- pkger/testdata/dashboard_markdown.json | 2 +- pkger/testdata/dashboard_markdown.yml | 2 +- pkger/testdata/dashboard_scatter.json | 2 +- pkger/testdata/dashboard_scatter.yml | 2 +- .../dashboard_single_stat_plus_line.json | 2 +- .../dashboard_single_stat_plus_line.yml | 2 +- pkger/testdata/dashboard_table.json | 2 +- pkger/testdata/dashboard_table.yml | 2 +- pkger/testdata/dashboard_xy.json | 2 +- pkger/testdata/dashboard_xy.yml | 2 +- pkger/testdata/label.json | 6 +- pkger/testdata/label.yml | 6 +- pkger/testdata/notification_endpoint.json | 22 +- pkger/testdata/notification_endpoint.yml | 22 +- .../notification_endpoint_secrets.yml | 2 +- pkger/testdata/notification_rule.json | 14 +- pkger/testdata/notification_rule.yml | 14 +- pkger/testdata/remote_bucket.json | 2 +- pkger/testdata/tasks.json | 12 +- pkger/testdata/tasks.yml | 12 +- pkger/testdata/telegraf.json | 12 +- pkger/testdata/telegraf.yml | 12 +- pkger/testdata/variable_associates_label.yml | 6 +- pkger/testdata/variables.json | 14 +- pkger/testdata/variables.yml | 14 +- 53 files changed, 790 insertions(+), 736 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 814b35ead4..3f1135c8fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ### Features 1. [17934](https://github.com/influxdata/influxdb/pull/17934): Add ability to delete a stack and all the resources associated with it +1. [17941](https://github.com/influxdata/influxdb/pull/17941): Encorce dns name compliance on all pkger resources' metadata.name field ### Bug Fixes diff --git a/cmd/influx/pkg_test.go b/cmd/influx/pkg_test.go index b2ba7c27b0..75bea893a3 100644 --- a/cmd/influx/pkg_test.go +++ b/cmd/influx/pkg_test.go @@ -135,7 +135,7 @@ func TestCmdPkg(t *testing.T) { sum := pkg.Summary() require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "Dashboard", sum.Dashboards[0].Name) + assert.Equal(t, "dashboard", sum.Dashboards[0].Name) }, }, { @@ -153,9 +153,9 @@ func TestCmdPkg(t *testing.T) { sum := pkg.Summary() require.Len(t, sum.Buckets, 1) - assert.Equal(t, "Bucket", sum.Buckets[0].Name) + assert.Equal(t, "bucket", sum.Buckets[0].Name) require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "Dashboard", sum.Dashboards[0].Name) + assert.Equal(t, "dashboard", sum.Dashboards[0].Name) }, }, { @@ -176,9 +176,9 @@ func TestCmdPkg(t *testing.T) { require.Len(t, sum.Labels, 1) assert.Equal(t, "foo", sum.Labels[0].Name) require.Len(t, sum.Buckets, 1) - assert.Equal(t, "Bucket", sum.Buckets[0].Name) + assert.Equal(t, "bucket", sum.Buckets[0].Name) require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "Dashboard", sum.Dashboards[0].Name) + assert.Equal(t, "dashboard", sum.Dashboards[0].Name) }, }, } @@ -212,7 +212,7 @@ func TestCmdPkg(t *testing.T) { APIVersion: pkger.APIVersion, Kind: k, Metadata: pkger.Resource{ - "name": k.String(), + "name": strings.ToLower(k.String()), }, }) } @@ -335,7 +335,7 @@ func TestCmdPkg(t *testing.T) { if rc.Kind == pkger.KindNotificationEndpoint { rc.Kind = pkger.KindNotificationEndpointHTTP } - name := rc.Kind.String() + strconv.Itoa(int(rc.ID)) + name := strings.ToLower(rc.Kind.String()) + strconv.Itoa(int(rc.ID)) pkg.Objects = append(pkg.Objects, pkger.Object{ APIVersion: pkger.APIVersion, Kind: rc.Kind, @@ -366,45 +366,49 @@ func TestCmdPkg(t *testing.T) { testPkgWrites(t, cmdFn, tt.pkgFileArgs, func(t *testing.T, pkg *pkger.Pkg) { sum := pkg.Summary() + kindToName := func(k pkger.Kind, id influxdb.ID) string { + return strings.ToLower(k.String()) + strconv.Itoa(int(id)) + } + require.Len(t, sum.Buckets, len(tt.bucketIDs)) for i, id := range tt.bucketIDs { actual := sum.Buckets[i] - assert.Equal(t, pkger.KindBucket.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindBucket, id), actual.Name) } require.Len(t, sum.Dashboards, len(tt.dashIDs)) for i, id := range tt.dashIDs { actual := sum.Dashboards[i] - assert.Equal(t, pkger.KindDashboard.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindDashboard, id), actual.Name) } require.Len(t, sum.NotificationEndpoints, len(tt.endpointIDs)) for i, id := range tt.endpointIDs { actual := sum.NotificationEndpoints[i] - assert.Equal(t, pkger.KindNotificationEndpointHTTP.String()+strconv.Itoa(int(id)), actual.NotificationEndpoint.GetName()) + assert.Equal(t, kindToName(pkger.KindNotificationEndpointHTTP, id), actual.NotificationEndpoint.GetName()) } require.Len(t, sum.Labels, len(tt.labelIDs)) for i, id := range tt.labelIDs { actual := sum.Labels[i] - assert.Equal(t, pkger.KindLabel.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindLabel, id), actual.Name) } require.Len(t, sum.NotificationRules, len(tt.ruleIDs)) for i, id := range tt.ruleIDs { actual := sum.NotificationRules[i] - assert.Equal(t, pkger.KindNotificationRule.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindNotificationRule, id), actual.Name) } require.Len(t, sum.Tasks, len(tt.taskIDs)) for i, id := range tt.taskIDs { actual := sum.Tasks[i] - assert.Equal(t, pkger.KindTask.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindTask, id), actual.Name) } require.Len(t, sum.TelegrafConfigs, len(tt.telegrafIDs)) for i, id := range tt.telegrafIDs { actual := sum.TelegrafConfigs[i] - assert.Equal(t, pkger.KindTelegraf.String()+strconv.Itoa(int(id)), actual.TelegrafConfig.Name) + assert.Equal(t, kindToName(pkger.KindTelegraf, id), actual.TelegrafConfig.Name) } require.Len(t, sum.Variables, len(tt.varIDs)) for i, id := range tt.varIDs { actual := sum.Variables[i] - assert.Equal(t, pkger.KindVariable.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindVariable, id), actual.Name) } }) } diff --git a/cmd/influxd/launcher/pkger_test.go b/cmd/influxd/launcher/pkger_test.go index 8d95e78077..03617ca5ba 100644 --- a/cmd/influxd/launcher/pkger_test.go +++ b/cmd/influxd/launcher/pkger_test.go @@ -236,17 +236,17 @@ func TestLauncher_Pkger(t *testing.T) { }) require.NoError(t, err) - newEndpointPkgName := "non_existent_endpoint" + newEndpointPkgName := "non-existent-endpoint" allResourcesPkg := newPkg( - newBucketObject("non_existent_bucket", "", ""), - newCheckDeadmanObject(t, "non_existent_check", "", time.Minute), - newDashObject("non_existent_dash", "", ""), + newBucketObject("non-existent-bucket", "", ""), + newCheckDeadmanObject(t, "non-existent-check", "", time.Minute), + newDashObject("non-existent-dash", "", ""), newEndpointHTTP(newEndpointPkgName, "", ""), - newLabelObject("non_existent_label", "", "", ""), - newRuleObject(t, "non_existent_rule", "", newEndpointPkgName, ""), - newTaskObject("non_existent_task", "", ""), - newTelegrafObject("non_existent_tele", "", ""), - newVariableObject("non_existent_var", "", ""), + newLabelObject("non-existent-label", "", "", ""), + newRuleObject(t, "non-existent-rule", "", newEndpointPkgName, ""), + newTaskObject("non-existent-task", "", ""), + newTelegrafObject("non-existent-tele", "", ""), + newVariableObject("non-existent-var", "", ""), ) sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allResourcesPkg, pkger.ApplyWithStackID(newStack.ID)) @@ -347,7 +347,7 @@ func TestLauncher_Pkger(t *testing.T) { t.Run("apply with only a stackID succeeds when stack has URLs", func(t *testing.T) { svr := httptest.NewServer(nethttp.HandlerFunc(func(w nethttp.ResponseWriter, r *nethttp.Request) { - pkg := newPkg(newBucketObject("bucket_0", "", "")) + pkg := newPkg(newBucketObject("bucket-0", "", "")) b, err := pkg.Encode(pkger.EncodingJSON) if err != nil { w.WriteHeader(nethttp.StatusInternalServerError) @@ -361,7 +361,7 @@ func TestLauncher_Pkger(t *testing.T) { require.NoError(t, err) defer f.Close() - pkg := newPkg(newBucketObject("bucket_1", "", "")) + pkg := newPkg(newBucketObject("bucket-1", "", "")) b, err := pkg.Encode(pkger.EncodingYAML) require.NoError(t, err) f.Write(b) @@ -387,10 +387,10 @@ func TestLauncher_Pkger(t *testing.T) { sumEquals := func(t *testing.T, sum pkger.Summary) { t.Helper() require.Len(t, sum.Buckets, 2) - assert.Equal(t, "bucket_0", sum.Buckets[0].PkgName) - assert.Equal(t, "bucket_0", sum.Buckets[0].Name) - assert.Equal(t, "bucket_1", sum.Buckets[1].PkgName) - assert.Equal(t, "bucket_1", sum.Buckets[1].Name) + assert.Equal(t, "bucket-0", sum.Buckets[0].PkgName) + assert.Equal(t, "bucket-0", sum.Buckets[0].Name) + assert.Equal(t, "bucket-1", sum.Buckets[1].PkgName) + assert.Equal(t, "bucket-1", sum.Buckets[1].Name) } sum, _, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, nil, pkger.ApplyWithStackID(newStack.ID)) @@ -414,23 +414,23 @@ func TestLauncher_Pkger(t *testing.T) { require.Len(t, sum.Buckets, 1) assert.NotZero(t, sum.Buckets[0].ID) - assert.Equal(t, "bucket_0", sum.Buckets[0].Name) + assert.Equal(t, "bucket", sum.Buckets[0].Name) require.Len(t, sum.Checks, 1) assert.NotZero(t, sum.Checks[0].Check.GetID()) - assert.Equal(t, "check_0", sum.Checks[0].Check.GetName()) + assert.Equal(t, "check-0", sum.Checks[0].Check.GetName()) require.Len(t, sum.Dashboards, 1) assert.NotZero(t, sum.Dashboards[0].ID) - assert.Equal(t, "dash_0", sum.Dashboards[0].Name) + assert.Equal(t, "dash-0", sum.Dashboards[0].Name) require.Len(t, sum.NotificationEndpoints, 1) assert.NotZero(t, sum.NotificationEndpoints[0].NotificationEndpoint.GetID()) - assert.Equal(t, "endpoint_0", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) + assert.Equal(t, "endpoint-0", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) require.Len(t, sum.NotificationRules, 1) assert.NotZero(t, sum.NotificationRules[0].ID) - assert.Equal(t, "rule_0", sum.NotificationRules[0].Name) + assert.Equal(t, "rule-0", sum.NotificationRules[0].Name) require.Len(t, sum.Labels, 1) assert.NotZero(t, sum.Labels[0].ID) @@ -438,11 +438,11 @@ func TestLauncher_Pkger(t *testing.T) { require.Len(t, sum.Tasks, 1) assert.NotZero(t, sum.Tasks[0].ID) - assert.Equal(t, "task_0", sum.Tasks[0].Name) + assert.Equal(t, "task-0", sum.Tasks[0].Name) require.Len(t, sum.TelegrafConfigs, 1) assert.NotZero(t, sum.TelegrafConfigs[0].TelegrafConfig.ID) - assert.Equal(t, "tele_0", sum.TelegrafConfigs[0].TelegrafConfig.Name) + assert.Equal(t, "tele-0", sum.TelegrafConfigs[0].TelegrafConfig.Name) resources := []struct { resID influxdb.ID @@ -471,17 +471,17 @@ func TestLauncher_Pkger(t *testing.T) { newObjectsFn := func() []pkger.Object { return []pkger.Object{ - newBucketObject("bucket", "bucket_0", ""), - newCheckDeadmanObject(t, "check_0", "", time.Hour), - newDashObject("dash_0", "", ""), - newEndpointHTTP("endpoint_0", "", ""), - newRuleObject(t, "rule_0", "", "endpoint_0", ""), - newTaskObject("task_0", "", ""), - newTelegrafObject("tele_0", "", ""), - newVariableObject("var_0", "", ""), + newBucketObject("bucket", "", ""), + newCheckDeadmanObject(t, "check-0", "", time.Hour), + newDashObject("dash-0", "", ""), + newEndpointHTTP("endpoint-0", "", ""), + newRuleObject(t, "rule-0", "", "endpoint-0", ""), + newTaskObject("task-0", "", ""), + newTelegrafObject("tele-0", "", ""), + newVariableObject("var-0", "", ""), } } - labelObj := newLabelObject("label_1", "label 1", "", "") + labelObj := newLabelObject("label-1", "label 1", "", "") stack, err := svc.InitStack(ctx, l.User.ID, pkger.Stack{ OrgID: l.Org.ID, @@ -628,15 +628,15 @@ func TestLauncher_Pkger(t *testing.T) { applyOpt := pkger.ApplyWithStackID(stack.ID) var ( - initialBucketPkgName = "rucketeer_1" + initialBucketPkgName = "rucketeer-1" initialCheckPkgName = "checkers" - initialDashPkgName = "dash_of_salt" + initialDashPkgName = "dash-of-salt" initialEndpointPkgName = "endzo" initialLabelPkgName = "labelino" - initialRulePkgName = "oh_doyle_rules" + initialRulePkgName = "oh-doyle-rules" initialTaskPkgName = "tap" initialTelegrafPkgName = "teletype" - initialVariablePkgName = "laces out dan" + initialVariablePkgName = "laces-out-dan" ) initialPkg := newPkg( newBucketObject(initialBucketPkgName, "display name", "init desc"), @@ -851,21 +851,21 @@ func TestLauncher_Pkger(t *testing.T) { ) svc = pkger.MWLogging(logger)(svc) - endpointPkgName := "z_endpoint_rolls_back" + endpointPkgName := "z-endpoint-rolls-back" pkgWithDelete := newPkg( - newBucketObject("z_roll_me_back", "", ""), - newBucketObject("z_rolls_back_too", "", ""), - newDashObject("z_rolls_dash", "", ""), - newLabelObject("z_label_roller", "", "", ""), - newCheckDeadmanObject(t, "z_check", "", time.Hour), + newBucketObject("z-roll-me-back", "", ""), + newBucketObject("z-rolls-back-too", "", ""), + newDashObject("z-rolls-dash", "", ""), + newLabelObject("z-label-roller", "", "", ""), + newCheckDeadmanObject(t, "z-check", "", time.Hour), newEndpointHTTP(endpointPkgName, "", ""), - newRuleObject(t, "z_rules_back", "", endpointPkgName, ""), - newRuleObject(t, "z_rules_back_2", "", endpointPkgName, ""), - newRuleObject(t, "z_rules_back_3", "", endpointPkgName, ""), - newTaskObject("z_task_rolls_back", "", ""), - newTelegrafObject("z_telegraf_rolls_back", "", ""), - newVariableObject("z_var_rolls_back", "", ""), + newRuleObject(t, "z-rules-back", "", endpointPkgName, ""), + newRuleObject(t, "z-rules-back-2", "", endpointPkgName, ""), + newRuleObject(t, "z-rules-back-3", "", endpointPkgName, ""), + newTaskObject("z-task-rolls-back", "", ""), + newTelegrafObject("z-telegraf-rolls-back", "", ""), + newVariableObject("z-var-rolls-back", "", ""), ) _, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkgWithDelete, applyOpt) require.Error(t, err) @@ -902,48 +902,48 @@ func TestLauncher_Pkger(t *testing.T) { t.Log("validate all changes do not persist") { - for _, name := range []string{"z_roll_me_back", "z_rolls_back_too"} { + for _, name := range []string{"z-roll-me-back", "z-rolls-back-too"} { _, err := resourceCheck.getBucket(t, byName(name)) assert.Error(t, err) } - for _, name := range []string{"z_rules_back", "z_rules_back_2", "z_rules_back_3"} { + for _, name := range []string{"z-rules-back", "z-rules-back-2", "z-rules-back-3"} { _, err = resourceCheck.getRule(t, byName(name)) assert.Error(t, err) } - _, err := resourceCheck.getCheck(t, byName("z_check")) + _, err := resourceCheck.getCheck(t, byName("z-check")) assert.Error(t, err) - _, err = resourceCheck.getDashboard(t, byName("z_rolls_dash")) + _, err = resourceCheck.getDashboard(t, byName("z-rolls_dash")) assert.Error(t, err) - _, err = resourceCheck.getEndpoint(t, byName("z_endpoint_rolls_back")) + _, err = resourceCheck.getEndpoint(t, byName("z-endpoint-rolls-back")) assert.Error(t, err) - _, err = resourceCheck.getLabel(t, byName("z_label_roller")) + _, err = resourceCheck.getLabel(t, byName("z-label-roller")) assert.Error(t, err) - _, err = resourceCheck.getTelegrafConfig(t, byName("z_telegraf_rolls_back")) + _, err = resourceCheck.getTelegrafConfig(t, byName("z-telegraf-rolls-back")) assert.Error(t, err) - _, err = resourceCheck.getVariable(t, byName("z_var_rolls_back")) + _, err = resourceCheck.getVariable(t, byName("z-var-rolls-back")) assert.Error(t, err) } }) t.Run("apply pkg with stack id where resources have been removed since last run", func(t *testing.T) { - newEndpointPkgName := "non_existent_endpoint" + newEndpointPkgName := "non-existent-endpoint" allNewResourcesPkg := newPkg( - newBucketObject("non_existent_bucket", "", ""), - newCheckDeadmanObject(t, "non_existent_check", "", time.Minute), - newDashObject("non_existent_dash", "", ""), + newBucketObject("non-existent-bucket", "", ""), + newCheckDeadmanObject(t, "non-existent-check", "", time.Minute), + newDashObject("non-existent-dash", "", ""), newEndpointHTTP(newEndpointPkgName, "", ""), - newLabelObject("non_existent_label", "", "", ""), - newRuleObject(t, "non_existent_rule", "", newEndpointPkgName, ""), - newTaskObject("non_existent_task", "", ""), - newTelegrafObject("non_existent_tele", "", ""), - newVariableObject("non_existent_var", "", ""), + newLabelObject("non-existent-label", "", "", ""), + newRuleObject(t, "non-existent-rule", "", newEndpointPkgName, ""), + newTaskObject("non-existent-task", "", ""), + newTelegrafObject("non-existent-tele", "", ""), + newVariableObject("non-existent-var", "", ""), ) sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allNewResourcesPkg, applyOpt) require.NoError(t, err) @@ -952,19 +952,19 @@ func TestLauncher_Pkger(t *testing.T) { assert.NotEqual(t, initialSum.Buckets[0].ID, sum.Buckets[0].ID) assert.NotZero(t, sum.Buckets[0].ID) defer resourceCheck.mustDeleteBucket(t, influxdb.ID(sum.Buckets[0].ID)) - assert.Equal(t, "non_existent_bucket", sum.Buckets[0].Name) + assert.Equal(t, "non-existent-bucket", sum.Buckets[0].Name) require.Len(t, sum.Checks, 1) assert.NotEqual(t, initialSum.Checks[0].Check.GetID(), sum.Checks[0].Check.GetID()) assert.NotZero(t, sum.Checks[0].Check.GetID()) defer resourceCheck.mustDeleteCheck(t, sum.Checks[0].Check.GetID()) - assert.Equal(t, "non_existent_check", sum.Checks[0].Check.GetName()) + assert.Equal(t, "non-existent-check", sum.Checks[0].Check.GetName()) require.Len(t, sum.Dashboards, 1) assert.NotEqual(t, initialSum.Dashboards[0].ID, sum.Dashboards[0].ID) assert.NotZero(t, sum.Dashboards[0].ID) defer resourceCheck.mustDeleteDashboard(t, influxdb.ID(sum.Dashboards[0].ID)) - assert.Equal(t, "non_existent_dash", sum.Dashboards[0].Name) + assert.Equal(t, "non-existent-dash", sum.Dashboards[0].Name) require.Len(t, sum.NotificationEndpoints, 1) sumEndpoint := sum.NotificationEndpoints[0].NotificationEndpoint @@ -978,57 +978,57 @@ func TestLauncher_Pkger(t *testing.T) { assert.NotEqual(t, initialSum.NotificationRules[0].ID, sumRule.ID) assert.NotZero(t, sumRule.ID) defer resourceCheck.mustDeleteRule(t, influxdb.ID(sumRule.ID)) - assert.Equal(t, "non_existent_rule", sumRule.Name) + assert.Equal(t, "non-existent-rule", sumRule.Name) require.Len(t, sum.Labels, 1) assert.NotEqual(t, initialSum.Labels[0].ID, sum.Labels[0].ID) assert.NotZero(t, sum.Labels[0].ID) defer resourceCheck.mustDeleteLabel(t, influxdb.ID(sum.Labels[0].ID)) - assert.Equal(t, "non_existent_label", sum.Labels[0].Name) + assert.Equal(t, "non-existent-label", sum.Labels[0].Name) require.Len(t, sum.Tasks, 1) assert.NotEqual(t, initialSum.Tasks[0].ID, sum.Tasks[0].ID) assert.NotZero(t, sum.Tasks[0].ID) defer resourceCheck.mustDeleteTask(t, influxdb.ID(sum.Tasks[0].ID)) - assert.Equal(t, "non_existent_task", sum.Tasks[0].Name) + assert.Equal(t, "non-existent-task", sum.Tasks[0].Name) require.Len(t, sum.TelegrafConfigs, 1) newTele := sum.TelegrafConfigs[0].TelegrafConfig assert.NotEqual(t, initialSum.TelegrafConfigs[0].TelegrafConfig.ID, newTele.ID) assert.NotZero(t, newTele.ID) defer resourceCheck.mustDeleteTelegrafConfig(t, newTele.ID) - assert.Equal(t, "non_existent_tele", newTele.Name) + assert.Equal(t, "non-existent-tele", newTele.Name) require.Len(t, sum.Variables, 1) assert.NotEqual(t, initialSum.Variables[0].ID, sum.Variables[0].ID) assert.NotZero(t, sum.Variables[0].ID) defer resourceCheck.mustDeleteVariable(t, influxdb.ID(sum.Variables[0].ID)) - assert.Equal(t, "non_existent_var", sum.Variables[0].Name) + assert.Equal(t, "non-existent-var", sum.Variables[0].Name) t.Log("\tvalidate all resources are created") { - bkt := resourceCheck.mustGetBucket(t, byName("non_existent_bucket")) + bkt := resourceCheck.mustGetBucket(t, byName("non-existent-bucket")) assert.Equal(t, pkger.SafeID(bkt.ID), sum.Buckets[0].ID) - chk := resourceCheck.mustGetCheck(t, byName("non_existent_check")) + chk := resourceCheck.mustGetCheck(t, byName("non-existent-check")) assert.Equal(t, chk.GetID(), sum.Checks[0].Check.GetID()) endpoint := resourceCheck.mustGetEndpoint(t, byName(newEndpointPkgName)) assert.Equal(t, endpoint.GetID(), sum.NotificationEndpoints[0].NotificationEndpoint.GetID()) - label := resourceCheck.mustGetLabel(t, byName("non_existent_label")) + label := resourceCheck.mustGetLabel(t, byName("non-existent-label")) assert.Equal(t, pkger.SafeID(label.ID), sum.Labels[0].ID) - actualRule := resourceCheck.mustGetRule(t, byName("non_existent_rule")) + actualRule := resourceCheck.mustGetRule(t, byName("non-existent-rule")) assert.Equal(t, pkger.SafeID(actualRule.GetID()), sum.NotificationRules[0].ID) - task := resourceCheck.mustGetTask(t, byName("non_existent_task")) + task := resourceCheck.mustGetTask(t, byName("non-existent-task")) assert.Equal(t, pkger.SafeID(task.ID), sum.Tasks[0].ID) - tele := resourceCheck.mustGetTelegrafConfig(t, byName("non_existent_tele")) + tele := resourceCheck.mustGetTelegrafConfig(t, byName("non-existent-tele")) assert.Equal(t, tele.ID, sum.TelegrafConfigs[0].TelegrafConfig.ID) - variable := resourceCheck.mustGetVariable(t, byName("non_existent_var")) + variable := resourceCheck.mustGetVariable(t, byName("non-existent-var")) assert.Equal(t, pkger.SafeID(variable.ID), sum.Variables[0].ID) } @@ -1175,50 +1175,50 @@ func TestLauncher_Pkger(t *testing.T) { labels := sum.Labels require.Len(t, labels, 2) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Equal(t, "the 2nd label", labels[1].Name) bkts := sum.Buckets require.Len(t, bkts, 1) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := sum.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") dashs := sum.Dashboards require.Len(t, dashs, 1) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") endpoints := sum.NotificationEndpoints require.Len(t, endpoints, 1) assert.Equal(t, "no auth endpoint", endpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, "http none auth desc", endpoints[0].NotificationEndpoint.GetDescription()) - hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label-1") require.Len(t, sum.Tasks, 1) task := sum.Tasks[0] assert.Equal(t, "task_1", task.Name) assert.Equal(t, "desc_1", task.Description) assert.Equal(t, "15 * * * *", task.Cron) - hasLabelAssociations(t, task.LabelAssociations, 1, "label_1") + hasLabelAssociations(t, task.LabelAssociations, 1, "label-1") teles := sum.TelegrafConfigs require.Len(t, teles, 1) assert.Equal(t, "first tele config", teles[0].TelegrafConfig.Name) assert.Equal(t, "desc", teles[0].TelegrafConfig.Description) - hasLabelAssociations(t, teles[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, teles[0].LabelAssociations, 1, "label-1") vars := sum.Variables require.Len(t, vars, 1) assert.Equal(t, "query var", vars[0].Name) - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1276,7 +1276,7 @@ spec: labels := sum1.Labels require.Len(t, labels, 2) assert.NotZero(t, labels[0].ID) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Equal(t, "the 2nd label", labels[1].Name) bkts := sum1.Buckets @@ -1284,14 +1284,14 @@ spec: assert.NotZero(t, bkts[0].ID) assert.NotEmpty(t, bkts[0].PkgName) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := sum1.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") for _, ch := range checks { assert.NotZero(t, ch.Check.GetID()) } @@ -1302,7 +1302,7 @@ spec: assert.NotEmpty(t, dashs[0].Name) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") require.Len(t, dashs[0].Charts, 1) assert.Equal(t, influxdb.ViewPropertyTypeSingleStat, dashs[0].Charts[0].Properties.GetType()) @@ -1312,14 +1312,14 @@ spec: assert.Equal(t, "no auth endpoint", endpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, "http none auth desc", endpoints[0].NotificationEndpoint.GetDescription()) assert.Equal(t, influxdb.TaskStatusInactive, string(endpoints[0].NotificationEndpoint.GetStatus())) - hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label-1") require.Len(t, sum1.NotificationRules, 1) rule := sum1.NotificationRules[0] assert.NotZero(t, rule.ID) assert.Equal(t, "rule_0", rule.Name) assert.Equal(t, pkger.SafeID(endpoints[0].NotificationEndpoint.GetID()), rule.EndpointID) - assert.Equal(t, "http_none_auth_notification_endpoint", rule.EndpointPkgName) + assert.Equal(t, "http-none-auth-notification-endpoint", rule.EndpointPkgName) assert.Equalf(t, "http", rule.EndpointType, "rule: %+v", rule) require.Len(t, sum1.Tasks, 1) @@ -1340,7 +1340,7 @@ spec: require.Len(t, vars, 1) assert.NotZero(t, vars[0].ID) assert.Equal(t, "query var", vars[0].Name) - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1405,28 +1405,28 @@ spec: labels := sum.Labels require.Len(t, labels, 2) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Equal(t, "the 2nd label", labels[1].Name) bkts := sum.Buckets require.Len(t, bkts, 1) assert.NotEmpty(t, bkts[0].PkgName) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := sum.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") dashs := sum.Dashboards require.Len(t, dashs, 1) assert.NotEmpty(t, dashs[0].Name) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") require.Len(t, dashs[0].Charts, 1) assert.Equal(t, influxdb.ViewPropertyTypeSingleStat, dashs[0].Charts[0].Properties.GetType()) @@ -1435,7 +1435,7 @@ spec: assert.Equal(t, "no auth endpoint", endpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, "http none auth desc", endpoints[0].NotificationEndpoint.GetDescription()) assert.Equal(t, influxdb.TaskStatusInactive, string(endpoints[0].NotificationEndpoint.GetStatus())) - hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label-1") require.Len(t, sum.NotificationRules, 1) rule := sum.NotificationRules[0] @@ -1457,7 +1457,7 @@ spec: vars := sum.Variables require.Len(t, vars, 1) assert.Equal(t, "query var", vars[0].Name) - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1613,7 +1613,7 @@ spec: apiVersion: %[1]s kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: url: http://localhost:8080/orgs/7167eb6719fa34e5/alert-history routingKey: secret-sauce @@ -1634,7 +1634,7 @@ spec: apiVersion: %[1]s kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: url: http://localhost:8080/orgs/7167eb6719fa34e5/alert-history routingKey: @@ -1711,7 +1711,7 @@ spec: labels := newSum.Labels require.Len(t, labels, 2) assert.Zero(t, labels[0].ID) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Zero(t, labels[1].ID) assert.Equal(t, "the 2nd label", labels[1].Name) @@ -1719,21 +1719,21 @@ spec: require.Len(t, bkts, 1) assert.Zero(t, bkts[0].ID) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := newSum.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") dashs := newSum.Dashboards require.Len(t, dashs, 1) assert.Zero(t, dashs[0].ID) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") require.Len(t, dashs[0].Charts, 1) assert.Equal(t, influxdb.ViewPropertyTypeSingleStat, dashs[0].Charts[0].Properties.GetType()) @@ -1741,14 +1741,14 @@ spec: require.Len(t, newEndpoints, 1) assert.Equal(t, sum1Endpoints[0].NotificationEndpoint.GetName(), newEndpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, sum1Endpoints[0].NotificationEndpoint.GetDescription(), newEndpoints[0].NotificationEndpoint.GetDescription()) - hasLabelAssociations(t, newEndpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, newEndpoints[0].LabelAssociations, 1, "label-1") require.Len(t, newSum.NotificationRules, 1) newRule := newSum.NotificationRules[0] assert.Equal(t, "new rule name", newRule.Name) assert.Zero(t, newRule.EndpointID) assert.NotEmpty(t, newRule.EndpointPkgName) - hasLabelAssociations(t, newRule.LabelAssociations, 1, "label_1") + hasLabelAssociations(t, newRule.LabelAssociations, 1, "label-1") require.Len(t, newSum.Tasks, 1) newTask := newSum.Tasks[0] @@ -1763,13 +1763,13 @@ spec: require.Len(t, newSum.TelegrafConfigs, 1) assert.Equal(t, sum1Teles[0].TelegrafConfig.Name, newSum.TelegrafConfigs[0].TelegrafConfig.Name) assert.Equal(t, sum1Teles[0].TelegrafConfig.Description, newSum.TelegrafConfigs[0].TelegrafConfig.Description) - hasLabelAssociations(t, newSum.TelegrafConfigs[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, newSum.TelegrafConfigs[0].LabelAssociations, 1, "label-1") vars := newSum.Variables require.Len(t, vars, 1) assert.Zero(t, vars[0].ID) assert.Equal(t, "new name", vars[0].Name) // new name - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1837,8 +1837,9 @@ spec: apiVersion: %[1]s kind: Task metadata: - name: Http.POST Synthetic (POST) + name: http-post-synthetic spec: + name: Http.POST Synthetic (POST) every: 5m query: |- import "strings" @@ -1992,24 +1993,24 @@ spec: require.NoError(t, err) require.Len(t, sum.Buckets, 1) - assert.Equal(t, "$bkt-1-name-ref", sum.Buckets[0].Name) + assert.Equal(t, "env-bkt-1-name-ref", sum.Buckets[0].Name) assert.Len(t, sum.Buckets[0].LabelAssociations, 1) require.Len(t, sum.Checks, 1) - assert.Equal(t, "$check-1-name-ref", sum.Checks[0].Check.GetName()) + assert.Equal(t, "env-check-1-name-ref", sum.Checks[0].Check.GetName()) require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "$dash-1-name-ref", sum.Dashboards[0].Name) + assert.Equal(t, "env-dash-1-name-ref", sum.Dashboards[0].Name) require.Len(t, sum.Labels, 1) - assert.Equal(t, "$label-1-name-ref", sum.Labels[0].Name) + assert.Equal(t, "env-label-1-name-ref", sum.Labels[0].Name) require.Len(t, sum.NotificationEndpoints, 1) - assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) + assert.Equal(t, "env-endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) require.Len(t, sum.NotificationRules, 1) - assert.Equal(t, "$rule-1-name-ref", sum.NotificationRules[0].Name) + assert.Equal(t, "env-rule-1-name-ref", sum.NotificationRules[0].Name) require.Len(t, sum.TelegrafConfigs, 1) - assert.Equal(t, "$task-1-name-ref", sum.Tasks[0].Name) + assert.Equal(t, "env-task-1-name-ref", sum.Tasks[0].Name) require.Len(t, sum.TelegrafConfigs, 1) - assert.Equal(t, "$telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) + assert.Equal(t, "env-telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) require.Len(t, sum.Variables, 1) - assert.Equal(t, "$var-1-name-ref", sum.Variables[0].Name) + assert.Equal(t, "env-var-1-name-ref", sum.Variables[0].Name) expectedMissingEnvs := []string{ "bkt-1-name-ref", @@ -2078,31 +2079,31 @@ var pkgYMLStr = fmt.Sprintf(` apiVersion: %[1]s kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Label metadata: - name: the 2nd label + name: the-2nd-label spec: name: the 2nd label --- apiVersion: %[1]s kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: name: rucketeer associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: the 2nd label + name: the-2nd-label --- apiVersion: %[1]s kind: Dashboard metadata: - name: dash_UUID + name: dash-uuid spec: name: dash_1 description: desc1 @@ -2122,14 +2123,14 @@ spec: hex: "#8F8AF4" associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: the 2nd label + name: the-2nd-label --- apiVersion: %[1]s kind: Variable metadata: - name: var_query_1 + name: var-query-1 spec: name: query var description: var_query_1 desc @@ -2139,24 +2140,24 @@ spec: buckets() |> filter(fn: (r) => r.name !~ /^_/) |> rename(columns: {name: "_value"}) |> keep(columns: ["_value"]) associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Telegraf metadata: - name: first_tele_config + name: first-tele-config spec: name: first tele config description: desc associations: - kind: Label - name: label_1 + name: label-1 config: %+q --- apiVersion: %[1]s kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint # on export of resource created from this, will not be same name as this + name: http-none-auth-notification-endpoint # on export of resource created from this, will not be same name as this spec: name: no auth endpoint type: none @@ -2166,12 +2167,12 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: name: check 0 name every: 1m @@ -2203,12 +2204,12 @@ spec: val: 30 associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: description: desc_1 every: 5m @@ -2227,16 +2228,16 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: NotificationRule metadata: - name: rule_UUID + name: rule-uuid spec: name: rule_0 description: desc_0 - endpointName: http_none_auth_notification_endpoint + endpointName: http-none-auth-notification-endpoint every: 10m offset: 30s messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" @@ -2254,12 +2255,12 @@ spec: operator: eQuAl associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Task metadata: - name: task_UUID + name: task-uuid spec: name: task_1 description: desc_1 @@ -2269,31 +2270,31 @@ spec: |> yield() associations: - kind: Label - name: label_1 + name: label-1 `, pkger.APIVersion, telegrafCfg) var updatePkgYMLStr = fmt.Sprintf(` apiVersion: %[1]s kind: Label metadata: - name: label_1 + name: label-1 spec: descriptin: new desc --- apiVersion: %[1]s kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: descriptin: new desc associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Variable metadata: - name: var_query_1 + name: var-query-1 spec: description: new desc type: query @@ -2302,12 +2303,12 @@ spec: buckets() |> filter(fn: (r) => r.name !~ /^_/) |> rename(columns: {name: "_value"}) |> keep(columns: ["_value"]) associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: name: no auth endpoint type: none @@ -2319,7 +2320,7 @@ spec: apiVersion: %[1]s kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > diff --git a/pkger/clone_resource.go b/pkger/clone_resource.go index cb71dcc701..b0b6ac1789 100644 --- a/pkger/clone_resource.go +++ b/pkger/clone_resource.go @@ -377,9 +377,9 @@ func (ex *resourceExporter) findDashboardByIDFull(ctx context.Context, id influx } func (ex *resourceExporter) uniqName() string { - uuid := idGenerator.ID().String() + uuid := strings.ToLower(idGenerator.ID().String()) for i := 1; i < 250; i++ { - name := fmt.Sprintf("%s_%s", ex.nameGen(), uuid[10:]) + name := fmt.Sprintf("%s-%s", ex.nameGen(), uuid[10:]) if !ex.mPkgNames[name] { return name } @@ -992,7 +992,7 @@ func newObject(kind Kind, name string) Object { // this timestamp is added to make the resource unique. Should also indicate // to the end user that this is machine readable and the spec.name field is // the one they want to edit when a name change is desired. - fieldName: idGenerator.ID().String(), + fieldName: strings.ToLower(idGenerator.ID().String()), }, Spec: Resource{ fieldName: name, diff --git a/pkger/http_server_test.go b/pkger/http_server_test.go index aa7f685cbe..e17da30fcf 100644 --- a/pkger/http_server_test.go +++ b/pkger/http_server_test.go @@ -292,7 +292,7 @@ func TestPkgerHTTPServer(t *testing.T) { newBktPkg(t, "bkt3"), }, }, - expectedBkts: []string{"bkt1", "bkt2", "bkt3", "rucket_11"}, + expectedBkts: []string{"bkt1", "bkt2", "bkt3", "rucket-11"}, }, { name: "retrieves packages from raw single and list", @@ -763,7 +763,7 @@ local Bucket(name, desc) = { }; [ - Bucket(name="rucket_1", desc="bucket 1 description"), + Bucket(name="rucket-1", desc="bucket 1 description"), ] ` case pkger.EncodingJSON: @@ -772,7 +772,7 @@ local Bucket(name, desc) = { "apiVersion": "%[1]s", "kind": "Bucket", "metadata": { - "name": "rucket_11" + "name": "rucket-11" }, "spec": { "description": "bucket 1 description" @@ -784,7 +784,7 @@ local Bucket(name, desc) = { pkgStr = `apiVersion: %[1]s kind: Bucket metadata: - name: rucket_11 + name: rucket-11 spec: description: bucket 1 description ` diff --git a/pkger/internal/wordplay/wordplay.go b/pkger/internal/wordplay/wordplay.go index 385f37e593..c2f15f53b7 100644 --- a/pkger/internal/wordplay/wordplay.go +++ b/pkger/internal/wordplay/wordplay.go @@ -878,5 +878,5 @@ var ( // formatted as "adjective_surname". For example 'focused_turing'. If retry is non-zero, a random // integer between 0 and 10 will be added to the end of the name, e.g `focused_turing3` func GetRandomName() string { - return fmt.Sprintf("%s_%s", left[rand.Intn(len(left))], right[rand.Intn(len(right))]) + return fmt.Sprintf("%s-%s", left[rand.Intn(len(left))], right[rand.Intn(len(right))]) } diff --git a/pkger/models_test.go b/pkger/models_test.go index 20e68de5a6..00248496a4 100644 --- a/pkger/models_test.go +++ b/pkger/models_test.go @@ -479,12 +479,12 @@ func TestPkg(t *testing.T) { { pkgFile: "testdata/label.yml", kind: KindLabel, - validName: "label_1", + validName: "label-1", }, { pkgFile: "testdata/notification_rule.yml", kind: KindNotificationRule, - validName: "rule_UUID", + validName: "rule-uuid", }, } diff --git a/pkger/parser.go b/pkger/parser.go index 42c4eb3166..42e63f8d46 100644 --- a/pkger/parser.go +++ b/pkger/parser.go @@ -8,6 +8,7 @@ import ( "io" "io/ioutil" "net/http" + "regexp" "sort" "strconv" "strings" @@ -732,7 +733,7 @@ func (p *Pkg) graphResources() error { func (p *Pkg) graphBuckets() *parseErr { p.mBuckets = make(map[string]*bucket) tracker := p.trackNames(true) - return p.eachResource(KindBucket, bucketNameMinLength, func(o Object) []validationErr { + return p.eachResource(KindBucket, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -770,7 +771,7 @@ func (p *Pkg) graphBuckets() *parseErr { func (p *Pkg) graphLabels() *parseErr { p.mLabels = make(map[string]*label) tracker := p.trackNames(true) - return p.eachResource(KindLabel, labelNameMinLength, func(o Object) []validationErr { + return p.eachResource(KindLabel, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -801,7 +802,7 @@ func (p *Pkg) graphChecks() *parseErr { } var pErr parseErr for _, checkKind := range checkKinds { - err := p.eachResource(checkKind.kind, checkNameMinLength, func(o Object) []validationErr { + err := p.eachResource(checkKind.kind, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -862,7 +863,7 @@ func (p *Pkg) graphChecks() *parseErr { func (p *Pkg) graphDashboards() *parseErr { p.mDashboards = make(map[string]*dashboard) tracker := p.trackNames(false) - return p.eachResource(KindDashboard, dashboardNameMinLength, func(o Object) []validationErr { + return p.eachResource(KindDashboard, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -926,7 +927,7 @@ func (p *Pkg) graphNotificationEndpoints() *parseErr { var pErr parseErr for _, nk := range notificationKinds { - err := p.eachResource(nk.kind, 1, func(o Object) []validationErr { + err := p.eachResource(nk.kind, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -977,7 +978,7 @@ func (p *Pkg) graphNotificationEndpoints() *parseErr { func (p *Pkg) graphNotificationRules() *parseErr { p.mNotificationRules = make(map[string]*notificationRule) tracker := p.trackNames(false) - return p.eachResource(KindNotificationRule, 1, func(o Object) []validationErr { + return p.eachResource(KindNotificationRule, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1027,7 +1028,7 @@ func (p *Pkg) graphNotificationRules() *parseErr { func (p *Pkg) graphTasks() *parseErr { p.mTasks = make(map[string]*task) tracker := p.trackNames(false) - return p.eachResource(KindTask, 1, func(o Object) []validationErr { + return p.eachResource(KindTask, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1059,7 +1060,7 @@ func (p *Pkg) graphTasks() *parseErr { func (p *Pkg) graphTelegrafs() *parseErr { p.mTelegrafs = make(map[string]*telegraf) tracker := p.trackNames(false) - return p.eachResource(KindTelegraf, 0, func(o Object) []validationErr { + return p.eachResource(KindTelegraf, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1088,7 +1089,7 @@ func (p *Pkg) graphTelegrafs() *parseErr { func (p *Pkg) graphVariables() *parseErr { p.mVariables = make(map[string]*variable) tracker := p.trackNames(true) - return p.eachResource(KindVariable, 1, func(o Object) []validationErr { + return p.eachResource(KindVariable, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1118,7 +1119,7 @@ func (p *Pkg) graphVariables() *parseErr { }) } -func (p *Pkg) eachResource(resourceKind Kind, minNameLen int, fn func(o Object) []validationErr) *parseErr { +func (p *Pkg) eachResource(resourceKind Kind, fn func(o Object) []validationErr) *parseErr { var pErr parseErr for i, k := range p.Objects { if err := k.Kind.OK(); err != nil { @@ -1152,14 +1153,14 @@ func (p *Pkg) eachResource(resourceKind Kind, minNameLen int, fn func(o Object) continue } - if len(k.Name()) < minNameLen { + if errs := isDNS1123Label(k.Name()); len(errs) > 0 { pErr.append(resourceErr{ Kind: k.Kind.String(), Idx: intPtr(i), ValidationErrs: []validationErr{ objectValidationErr(fieldMetadata, validationErr{ Field: fieldName, - Msg: fmt.Sprintf("must be a string of at least %d chars in length", minNameLen), + Msg: fmt.Sprintf("name %q is invalid; %s", k.Name(), strings.Join(errs, "; ")), }), }, }) @@ -1434,6 +1435,43 @@ func parseChart(r Resource) (chart, []validationErr) { return c, nil } +// dns1123LabelMaxLength is a label's max length in DNS (RFC 1123) +const dns1123LabelMaxLength int = 63 + +const dns1123LabelFmt string = "[a-z0-9]([-a-z0-9]*[a-z0-9])?" +const dns1123LabelErrMsg string = "a DNS-1123 label must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character" + +var dns1123LabelRegexp = regexp.MustCompile("^" + dns1123LabelFmt + "$") + +// isDNS1123Label tests for a string that conforms to the definition of a label in +// DNS (RFC 1123). +func isDNS1123Label(value string) []string { + var errs []string + if len(value) > dns1123LabelMaxLength { + errs = append(errs, fmt.Sprintf("must be no more than %d characters", dns1123LabelMaxLength)) + } + if !dns1123LabelRegexp.MatchString(value) { + errs = append(errs, regexError(dns1123LabelErrMsg, dns1123LabelFmt, "my-name", "123-abc")) + } + return errs +} + +// regexError returns a string explanation of a regex validation failure. +func regexError(msg string, fmt string, examples ...string) string { + if len(examples) == 0 { + return msg + " (regex used for validation is '" + fmt + "')" + } + msg += " (e.g. " + for i := range examples { + if i > 0 { + msg += " or " + } + msg += "'" + examples[i] + "', " + } + msg += "regex used for validation is '" + fmt + "')" + return msg +} + // Resource is a pkger Resource kind. It can be one of any of // available kinds that are supported. type Resource map[string]interface{} diff --git a/pkger/parser_models.go b/pkger/parser_models.go index 439da2c37a..01dd54bd93 100644 --- a/pkger/parser_models.go +++ b/pkger/parser_models.go @@ -1335,6 +1335,10 @@ var validEndpointHTTPMethods = map[string]bool{ func (n *notificationEndpoint) valid() []validationErr { var failures []validationErr + if err, ok := isValidName(n.Name(), 1); !ok { + failures = append(failures, err) + } + if _, err := url.Parse(n.url); err != nil || n.url == "" { failures = append(failures, validationErr{ Field: fieldNotificationEndpointURL, @@ -1532,6 +1536,9 @@ func (r *notificationRule) toInfluxRule() influxdb.NotificationRule { func (r *notificationRule) valid() []validationErr { var vErrs []validationErr + if err, ok := isValidName(r.Name(), 1); !ok { + vErrs = append(vErrs, err) + } if !r.endpointName.hasValue() { vErrs = append(vErrs, validationErr{ Field: fieldNotificationRuleEndpointName, @@ -1714,6 +1721,9 @@ func (t *task) summarize() SummaryTask { func (t *task) valid() []validationErr { var vErrs []validationErr + if err, ok := isValidName(t.Name(), 1); !ok { + vErrs = append(vErrs, err) + } if t.cron == "" && t.every == 0 { vErrs = append(vErrs, validationErr{ @@ -1838,6 +1848,9 @@ func (t *telegraf) summarize() SummaryTelegraf { func (t *telegraf) valid() []validationErr { var vErrs []validationErr + if err, ok := isValidName(t.Name(), 1); !ok { + vErrs = append(vErrs, err) + } if t.config.Config == "" { vErrs = append(vErrs, validationErr{ Field: fieldTelegrafConfig, @@ -1918,6 +1931,10 @@ func (v *variable) influxVarArgs() *influxdb.VariableArguments { func (v *variable) valid() []validationErr { var failures []validationErr + if err, ok := isValidName(v.Name(), 1); !ok { + failures = append(failures, err) + } + switch v.Type { case "map": if len(v.MapValues) == 0 { @@ -1979,7 +1996,7 @@ func (r *references) String() string { return v } if r.EnvRef != "" { - return "$" + r.EnvRef + return "env-" + r.EnvRef } return "" } diff --git a/pkger/parser_test.go b/pkger/parser_test.go index 2b73a41b5c..a92a6c2cd5 100644 --- a/pkger/parser_test.go +++ b/pkger/parser_test.go @@ -4,6 +4,7 @@ import ( "errors" "fmt" "path/filepath" + "sort" "strconv" "strings" "testing" @@ -26,7 +27,7 @@ func TestParse(t *testing.T) { actual := buckets[0] expectedBucket := SummaryBucket{ - PkgName: "rucket_22", + PkgName: "rucket-22", Name: "display name", Description: "bucket 2 description", LabelAssociations: []SummaryLabel{}, @@ -35,8 +36,8 @@ func TestParse(t *testing.T) { actual = buckets[1] expectedBucket = SummaryBucket{ - PkgName: "rucket_11", - Name: "rucket_11", + PkgName: "rucket-11", + Name: "rucket-11", Description: "bucket 1 description", RetentionPeriod: time.Hour, LabelAssociations: []SummaryLabel{}, @@ -64,7 +65,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_11 + name: rucket-11 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket @@ -80,7 +81,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_11 + name: rucket-11 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket @@ -101,12 +102,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: valid name + name: valid-name --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: valid name + name: valid-name `, }, { @@ -117,14 +118,14 @@ metadata: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: valid name + name: valid-name spec: - name: rucket_1 + name: rucket-1 `, }, { @@ -135,12 +136,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: invalid name + name: invalid-name spec: name: f `, @@ -160,7 +161,7 @@ spec: require.Len(t, labels, 3) expectedLabel0 := SummaryLabel{ - PkgName: "label_3", + PkgName: "label-3", Name: "display name", Properties: struct { Color string `json:"color"` @@ -172,8 +173,8 @@ spec: assert.Equal(t, expectedLabel0, labels[0]) expectedLabel1 := SummaryLabel{ - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -185,8 +186,8 @@ spec: assert.Equal(t, expectedLabel1, labels[1]) expectedLabel2 := SummaryLabel{ - PkgName: "label_2", - Name: "label_2", + PkgName: "label-2", + Name: "label-2", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -218,13 +219,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: a spec: `, }, @@ -235,13 +235,13 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: `, }, @@ -255,7 +255,6 @@ kind: Label --- apiVersion: influxdata.com/v2alpha1 kind: Label - `, }, { @@ -265,15 +264,15 @@ kind: Label pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 spec: - name: valid name + name: valid-name `, }, { @@ -283,13 +282,13 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 spec: name: a `, @@ -316,16 +315,16 @@ spec: labels []string }{ { - bktName: "rucket_1", - labels: []string{"label_1"}, + bktName: "rucket-1", + labels: []string{"label-1"}, }, { - bktName: "rucket_2", - labels: []string{"label_2"}, + bktName: "rucket-2", + labels: []string{"label-2"}, }, { - bktName: "rucket_3", - labels: []string{"label_1", "label_2"}, + bktName: "rucket-3", + labels: []string{"label-1", "label-2"}, }, } for i, expected := range expectedLabels { @@ -339,28 +338,28 @@ spec: expectedMappings := []SummaryLabelMapping{ { - ResourcePkgName: "rucket_1", - ResourceName: "rucket_1", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "rucket-1", + ResourceName: "rucket-1", + LabelPkgName: "label-1", + LabelName: "label-1", }, { - ResourcePkgName: "rucket_2", - ResourceName: "rucket_2", - LabelPkgName: "label_2", - LabelName: "label_2", + ResourcePkgName: "rucket-2", + ResourceName: "rucket-2", + LabelPkgName: "label-2", + LabelName: "label-2", }, { - ResourcePkgName: "rucket_3", - ResourceName: "rucket_3", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "rucket-3", + ResourceName: "rucket-3", + LabelPkgName: "label-1", + LabelName: "label-1", }, { - ResourcePkgName: "rucket_3", - ResourceName: "rucket_3", - LabelPkgName: "label_2", - LabelName: "label_2", + ResourcePkgName: "rucket-3", + ResourceName: "rucket-3", + LabelPkgName: "label-2", + LabelName: "label-2", }, } @@ -381,11 +380,11 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: associations: - kind: Label - name: label_1 + name: label-1 `, }, { @@ -395,16 +394,16 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label name: NOT TO BE FOUND `, @@ -416,13 +415,13 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 `, }, { @@ -432,18 +431,18 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, } @@ -465,7 +464,7 @@ spec: require.Truef(t, ok, "got: %#v", check1) expectedBase := icheck.Base{ - Name: "check_0", + Name: "check-0", Description: "desc_0", Every: mustDuration(t, time.Minute), Offset: mustDuration(t, 15*time.Second), @@ -531,15 +530,15 @@ spec: expectedMappings := []SummaryLabelMapping{ { - LabelPkgName: "label_1", - LabelName: "label_1", - ResourcePkgName: "check_0", - ResourceName: "check_0", + LabelPkgName: "label-1", + LabelName: "label-1", + ResourcePkgName: "check-0", + ResourceName: "check-0", }, { - LabelPkgName: "label_1", - LabelName: "label_1", - ResourcePkgName: "check_1", + LabelPkgName: "label-1", + LabelName: "label-1", + ResourcePkgName: "check-1", ResourceName: "display name", }, } @@ -565,7 +564,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -576,7 +575,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -595,7 +594,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: query: > from(bucket: "rucket_1") |> yield(name: "mean") @@ -618,7 +617,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -640,7 +639,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -662,7 +661,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -686,7 +685,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -705,7 +704,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }" @@ -726,7 +725,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -750,7 +749,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -771,7 +770,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: level: cRiT query: > @@ -790,7 +789,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -808,7 +807,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -818,7 +817,7 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -831,12 +830,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -846,9 +845,9 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -862,7 +861,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -874,9 +873,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: valid name + name: valid-name spec: - name: check_1 + name: check-1 every: 5m level: cRiT query: > @@ -902,7 +901,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -942,7 +941,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -978,7 +977,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1022,7 +1021,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single heatmap chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ heatmap chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1063,7 +1062,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: charts: - kind: heatmap @@ -1106,7 +1105,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: charts: - kind: heatmap @@ -1132,7 +1131,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: charts: - kind: heatmap @@ -1180,7 +1179,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single histogram chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ single histogram chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1218,7 +1217,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single histogram chart + name: dash-0 spec: description: a dashboard w/ single histogram chart charts: @@ -1246,7 +1245,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single histogram chart + name: dash-0 spec: description: a dashboard w/ single histogram chart charts: @@ -1286,7 +1285,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single markdown chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ single markdown chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1307,7 +1306,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single scatter chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ single scatter chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1348,7 +1347,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1375,7 +1374,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1416,7 +1415,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1456,7 +1455,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1498,7 +1497,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1540,7 +1539,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1585,7 +1584,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1621,7 +1620,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1665,7 +1664,7 @@ spec: require.Len(t, sum.Dashboards, 2) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.PkgName) + assert.Equal(t, "dash-1", actual.PkgName) assert.Equal(t, "display name", actual.Name) assert.Equal(t, "desc1", actual.Description) @@ -1702,8 +1701,8 @@ spec: assert.Equal(t, 3.0, c.Value) actual2 := sum.Dashboards[1] - assert.Equal(t, "dash_2", actual2.PkgName) - assert.Equal(t, "dash_2", actual2.Name) + assert.Equal(t, "dash-2", actual2.PkgName) + assert.Equal(t, "dash-2", actual2.Name) assert.Equal(t, "desc", actual2.Description) }) }) @@ -1717,7 +1716,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1744,7 +1743,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1769,7 +1768,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1792,7 +1791,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1816,7 +1815,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1841,13 +1840,13 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: `, }, @@ -1860,7 +1859,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: name: d `, @@ -1880,7 +1879,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -1942,7 +1941,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1982,7 +1981,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2026,7 +2025,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2068,7 +2067,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2113,7 +2112,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2157,7 +2156,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2196,7 +2195,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2243,7 +2242,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -2303,7 +2302,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2330,7 +2329,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2356,7 +2355,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2380,7 +2379,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2406,7 +2405,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2432,7 +2431,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2470,7 +2469,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -2512,7 +2511,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2553,7 +2552,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2604,28 +2603,28 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) require.Len(t, actual.LabelAssociations, 2) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) - assert.Equal(t, "label_2", actual.LabelAssociations[1].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-2", actual.LabelAssociations[1].Name) expectedMappings := []SummaryLabelMapping{ { Status: StateStatusNew, ResourceType: influxdb.DashboardsResourceType, - ResourcePkgName: "dash_1", - ResourceName: "dash_1", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "dash-1", + ResourceName: "dash-1", + LabelPkgName: "label-1", + LabelName: "label-1", }, { Status: StateStatusNew, ResourceType: influxdb.DashboardsResourceType, - ResourcePkgName: "dash_1", - ResourceName: "dash_1", - LabelPkgName: "label_2", - LabelName: "label_2", + ResourcePkgName: "dash-1", + ResourceName: "dash-1", + LabelPkgName: "label-2", + LabelName: "label-2", }, } @@ -2644,11 +2643,11 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 `, }, { @@ -2658,16 +2657,16 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label name: unfound label `, @@ -2679,12 +2678,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label @@ -2700,18 +2699,18 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, } @@ -2727,7 +2726,7 @@ spec: testfileRunner(t, "testdata/notification_endpoint", func(t *testing.T, pkg *Pkg) { expectedEndpoints := []SummaryNotificationEndpoint{ { - PkgName: "http_basic_auth_notification_endpoint", + PkgName: "http-basic-auth-notification-endpoint", NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ Name: "basic endpoint name", @@ -2742,10 +2741,10 @@ spec: }, }, { - PkgName: "http_bearer_auth_notification_endpoint", + PkgName: "http-bearer-auth-notification-endpoint", NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ - Name: "http_bearer_auth_notification_endpoint", + Name: "http-bearer-auth-notification-endpoint", Description: "http bearer auth desc", Status: influxdb.TaskStatusActive, }, @@ -2756,10 +2755,10 @@ spec: }, }, { - PkgName: "http_none_auth_notification_endpoint", + PkgName: "http-none-auth-notification-endpoint", NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ - Name: "http_none_auth_notification_endpoint", + Name: "http-none-auth-notification-endpoint", Description: "http none auth desc", Status: influxdb.TaskStatusActive, }, @@ -2769,7 +2768,7 @@ spec: }, }, { - PkgName: "pager_duty_notification_endpoint", + PkgName: "pager-duty-notification-endpoint", NotificationEndpoint: &endpoint.PagerDuty{ Base: endpoint.Base{ Name: "pager duty name", @@ -2781,7 +2780,7 @@ spec: }, }, { - PkgName: "slack_notification_endpoint", + PkgName: "slack-notification-endpoint", NotificationEndpoint: &endpoint.Slack{ Base: endpoint.Base{ Name: "slack name", @@ -2803,15 +2802,15 @@ spec: expected, actual := expectedEndpoints[i], endpoints[i] assert.Equalf(t, expected.NotificationEndpoint, actual.NotificationEndpoint, "index=%d", i) require.Len(t, actual.LabelAssociations, 1) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) assert.Contains(t, sum.LabelMappings, SummaryLabelMapping{ Status: StateStatusNew, ResourceType: influxdb.NotificationEndpointResourceType, ResourcePkgName: expected.PkgName, ResourceName: expected.NotificationEndpoint.GetName(), - LabelPkgName: "label_1", - LabelName: "label_1", + LabelPkgName: "label-1", + LabelName: "label-1", }) } }) @@ -2831,7 +2830,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: `, }, @@ -2845,7 +2844,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: `, }, @@ -2859,7 +2858,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none method: get @@ -2875,7 +2874,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none method: get @@ -2892,7 +2891,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none url: https://www.example.com/endpoint/noneauth @@ -2908,7 +2907,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: none description: http none auth desc @@ -2926,7 +2925,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: basic method: POST @@ -2944,7 +2943,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: basic method: POST @@ -2962,7 +2961,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: description: http basic auth desc type: basic @@ -2980,7 +2979,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_bearer_auth_notification_endpoint + name: http-bearer-auth-notification-endpoint spec: description: http bearer auth desc type: bearer @@ -2998,7 +2997,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: RANDOM WRONG TYPE description: http none auth desc @@ -3016,7 +3015,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: url: https://hooks.slack.com/services/bip/piddy/boppidy --- @@ -3038,7 +3037,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: description: slack desc url: https://hooks.slack.com/services/bip/piddy/boppidy @@ -3063,7 +3062,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: name: slack description: slack desc @@ -3088,7 +3087,7 @@ spec: rule := rules[0] assert.Equal(t, "rule_0", rule.Name) - assert.Equal(t, "endpoint_0", rule.EndpointPkgName) + assert.Equal(t, "endpoint-0", rule.EndpointPkgName) assert.Equal(t, "desc_0", rule.Description) assert.Equal(t, (10 * time.Minute).String(), rule.Every) assert.Equal(t, (30 * time.Second).String(), rule.Offset) @@ -3110,8 +3109,8 @@ spec: require.Len(t, sum.Labels, 2) require.Len(t, rule.LabelAssociations, 2) - assert.Equal(t, "label_1", rule.LabelAssociations[0].PkgName) - assert.Equal(t, "label_2", rule.LabelAssociations[1].PkgName) + assert.Equal(t, "label-1", rule.LabelAssociations[0].PkgName) + assert.Equal(t, "label-2", rule.LabelAssociations[1].PkgName) }) }) @@ -3121,7 +3120,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: endpoint_0 + name: endpoint-0 spec: url: https://hooks.slack.com/services/bip/piddy/boppidy --- @@ -3142,7 +3141,7 @@ spec: kind: NotificationRule metadata: spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3158,7 +3157,7 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" @@ -3175,9 +3174,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WARN @@ -3192,10 +3191,10 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: every: 10m - endpointName: endpoint_0 + endpointName: endpoint-0 messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" `), }, @@ -3208,10 +3207,10 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: every: 10m - endpointName: endpoint_0 + endpointName: endpoint-0 messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WRONGO @@ -3226,9 +3225,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3245,9 +3244,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3267,9 +3266,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" status: RANDO STATUS @@ -3286,16 +3285,16 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WARN associations: - kind: Label - name: label_1 + name: label-1 `), }, }, @@ -3307,23 +3306,23 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WARN associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `), }, }, @@ -3336,9 +3335,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3347,9 +3346,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3366,7 +3365,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: endpointName: RANDO_ENDPOINT_NAME every: 10m @@ -3390,11 +3389,14 @@ spec: sum := pkg.Summary() tasks := sum.Tasks require.Len(t, tasks, 2) + sort.Slice(tasks, func(i, j int) bool { + return tasks[i].PkgName < tasks[j].PkgName + }) baseEqual := func(t *testing.T, i int, status influxdb.Status, actual SummaryTask) { t.Helper() - assert.Equal(t, "task_"+strconv.Itoa(i), actual.Name) + assert.Equal(t, "task-"+strconv.Itoa(i), actual.Name) assert.Equal(t, "desc_"+strconv.Itoa(i), actual.Description) assert.Equal(t, status, actual.Status) @@ -3402,19 +3404,19 @@ spec: assert.Equal(t, expectedQuery, actual.Query) require.Len(t, actual.LabelAssociations, 1) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) } require.Len(t, sum.Labels, 1) task0 := tasks[0] - baseEqual(t, 0, influxdb.Inactive, task0) - assert.Equal(t, (10 * time.Minute).String(), task0.Every) - assert.Equal(t, (15 * time.Second).String(), task0.Offset) + baseEqual(t, 1, influxdb.Active, task0) + assert.Equal(t, "15 * * * *", task0.Cron) task1 := tasks[1] - baseEqual(t, 1, influxdb.Active, task1) - assert.Equal(t, "15 * * * *", task1.Cron) + baseEqual(t, 0, influxdb.Inactive, task1) + assert.Equal(t, (10 * time.Minute).String(), task1.Every) + assert.Equal(t, (15 * time.Second).String(), task1.Offset) }) }) @@ -3449,7 +3451,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: cron: 15 * * * * query: > @@ -3467,7 +3469,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: description: desc_0 every: 10m @@ -3484,7 +3486,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: description: desc_0 offset: 15s @@ -3500,14 +3502,14 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_1 + name: task-1 spec: cron: 15 * * * * query: > from(bucket: "rucket_1") |> yield(name: "mean") associations: - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -3521,12 +3523,12 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: every: 10m offset: 15s @@ -3535,9 +3537,9 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -3551,7 +3553,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: every: 10m query: > @@ -3560,7 +3562,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: every: 10m query: > @@ -3587,25 +3589,25 @@ spec: assert.Equal(t, "desc", actual.TelegrafConfig.Description) require.Len(t, actual.LabelAssociations, 2) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) - assert.Equal(t, "label_2", actual.LabelAssociations[1].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-2", actual.LabelAssociations[1].Name) actual = sum.TelegrafConfigs[1] - assert.Equal(t, "tele_2", actual.TelegrafConfig.Name) + assert.Equal(t, "tele-2", actual.TelegrafConfig.Name) assert.Empty(t, actual.LabelAssociations) require.Len(t, sum.LabelMappings, 2) expectedMapping := SummaryLabelMapping{ Status: StateStatusNew, - ResourcePkgName: "first_tele_config", + ResourcePkgName: "first-tele-config", ResourceName: "display name", - LabelPkgName: "label_1", - LabelName: "label_1", + LabelPkgName: "label-1", + LabelName: "label-1", ResourceType: influxdb.TelegrafsResourceType, } assert.Equal(t, expectedMapping, sum.LabelMappings[0]) - expectedMapping.LabelPkgName = "label_2" - expectedMapping.LabelName = "label_2" + expectedMapping.LabelPkgName = "label-2" + expectedMapping.LabelName = "label-2" assert.Equal(t, expectedMapping, sum.LabelMappings[1]) }) }) @@ -3619,7 +3621,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: first_tele_config + name: first-tele-config spec: `, }, @@ -3630,14 +3632,14 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: tele_0 + name: tele-0 spec: config: fake tele config --- apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: tele_0 + name: tele-0 spec: config: fake tele config `, @@ -3679,21 +3681,21 @@ spec: // validates we support all known variable types varEquals(t, - "var_const_3", + "var-const-3", "constant", influxdb.VariableConstantValues([]string{"first val"}), sum.Variables[1], ) varEquals(t, - "var_map_4", + "var-map-4", "map", influxdb.VariableMapValues{"k1": "v1"}, sum.Variables[2], ) varEquals(t, - "var_query_2", + "var-query-2", "query", influxdb.VariableQueryValues{ Query: "an influxql query of sorts", @@ -3714,7 +3716,7 @@ spec: kind: Variable metadata: spec: - description: var_map_4 desc + description: var-map-4 desc type: map values: k1: v1 @@ -3727,9 +3729,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_map_4 + name: var-map-4 spec: - description: var_map_4 desc + description: var-map-4 desc type: map `, }, @@ -3740,9 +3742,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_const_3 + name: var-const-3 spec: - description: var_const_3 desc + description: var-const-3 desc type: constant `, }, @@ -3753,9 +3755,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query language: influxql `, @@ -3767,9 +3769,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts `, @@ -3781,9 +3783,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: wrong Language @@ -3796,9 +3798,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3806,9 +3808,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3821,9 +3823,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3831,10 +3833,10 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: valid_query + name: valid-query spec: - name: var_query_2 - description: var_query_2 desc + name: var-query-2 + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3861,8 +3863,8 @@ spec: labels []string }{ { - varName: "var_1", - labels: []string{"label_1"}, + varName: "var-1", + labels: []string{"label-1"}, }, } for i, expected := range expectedLabelMappings { @@ -3877,10 +3879,10 @@ spec: expectedMappings := []SummaryLabelMapping{ { Status: StateStatusNew, - ResourcePkgName: "var_1", - ResourceName: "var_1", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "var-1", + ResourceName: "var-1", + LabelPkgName: "label-1", + LabelName: "label-1", }, } @@ -3908,7 +3910,7 @@ spec: expected := &endpoint.PagerDuty{ Base: endpoint.Base{ - Name: "pager_duty_notification_endpoint", + Name: "pager-duty-notification-endpoint", Status: influxdb.TaskStatusActive, }, ClientURL: "http://localhost:8080/orgs/7167eb6719fa34e5/alert-history", @@ -3935,43 +3937,43 @@ spec: sum := pkg.Summary() require.Len(t, sum.Buckets, 1) - assert.Equal(t, "$bkt-1-name-ref", sum.Buckets[0].Name) + assert.Equal(t, "env-bkt-1-name-ref", sum.Buckets[0].Name) assert.Len(t, sum.Buckets[0].LabelAssociations, 1) hasEnv(t, pkg.mEnv, "bkt-1-name-ref") require.Len(t, sum.Checks, 1) - assert.Equal(t, "$check-1-name-ref", sum.Checks[0].Check.GetName()) + assert.Equal(t, "env-check-1-name-ref", sum.Checks[0].Check.GetName()) assert.Len(t, sum.Checks[0].LabelAssociations, 1) hasEnv(t, pkg.mEnv, "check-1-name-ref") require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "$dash-1-name-ref", sum.Dashboards[0].Name) + assert.Equal(t, "env-dash-1-name-ref", sum.Dashboards[0].Name) assert.Len(t, sum.Dashboards[0].LabelAssociations, 1) hasEnv(t, pkg.mEnv, "dash-1-name-ref") require.Len(t, sum.NotificationEndpoints, 1) - assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) + assert.Equal(t, "env-endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) hasEnv(t, pkg.mEnv, "endpoint-1-name-ref") require.Len(t, sum.Labels, 1) - assert.Equal(t, "$label-1-name-ref", sum.Labels[0].Name) + assert.Equal(t, "env-label-1-name-ref", sum.Labels[0].Name) hasEnv(t, pkg.mEnv, "label-1-name-ref") require.Len(t, sum.NotificationRules, 1) - assert.Equal(t, "$rule-1-name-ref", sum.NotificationRules[0].Name) - assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationRules[0].EndpointPkgName) + assert.Equal(t, "env-rule-1-name-ref", sum.NotificationRules[0].Name) + assert.Equal(t, "env-endpoint-1-name-ref", sum.NotificationRules[0].EndpointPkgName) hasEnv(t, pkg.mEnv, "rule-1-name-ref") require.Len(t, sum.Tasks, 1) - assert.Equal(t, "$task-1-name-ref", sum.Tasks[0].Name) + assert.Equal(t, "env-task-1-name-ref", sum.Tasks[0].Name) hasEnv(t, pkg.mEnv, "task-1-name-ref") require.Len(t, sum.TelegrafConfigs, 1) - assert.Equal(t, "$telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) + assert.Equal(t, "env-telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) hasEnv(t, pkg.mEnv, "telegraf-1-name-ref") require.Len(t, sum.Variables, 1) - assert.Equal(t, "$var-1-name-ref", sum.Variables[0].Name) + assert.Equal(t, "env-var-1-name-ref", sum.Variables[0].Name) hasEnv(t, pkg.mEnv, "var-1-name-ref") t.Log("applying env vars should populate env fields") @@ -4003,8 +4005,8 @@ spec: labels := []SummaryLabel{ { - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -4015,22 +4017,22 @@ spec: bkts := []SummaryBucket{ { - PkgName: "rucket_1", - Name: "rucket_1", + PkgName: "rucket-1", + Name: "rucket-1", Description: "desc_1", RetentionPeriod: 10000 * time.Second, LabelAssociations: labels, }, { - PkgName: "rucket_2", - Name: "rucket_2", - Description: "desc_2", + PkgName: "rucket-2", + Name: "rucket-2", + Description: "desc-2", RetentionPeriod: 20000 * time.Second, LabelAssociations: labels, }, { - PkgName: "rucket_3", - Name: "rucket_3", + PkgName: "rucket-3", + Name: "rucket-3", Description: "desc_3", RetentionPeriod: 30000 * time.Second, LabelAssociations: labels, @@ -4080,7 +4082,7 @@ func TestCombine(t *testing.T) { apiVersion: %[1]s kind: Label metadata: - name: label_%d + name: label-%d `, APIVersion, i)) pkgs = append(pkgs, pkg) } @@ -4089,35 +4091,35 @@ metadata: apiVersion: %[1]s kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: associations: - kind: Label - name: label_1 + name: label-1 `, APIVersion))) pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(` apiVersion: %[1]s kind: Bucket metadata: - name: rucket_2 + name: rucket-2 spec: associations: - kind: Label - name: label_2 + name: label-2 `, APIVersion))) pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(` apiVersion: %[1]s kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 `, APIVersion))) combinedPkg, err := Combine(pkgs) @@ -4127,21 +4129,21 @@ spec: require.Len(t, sum.Labels, numLabels) for i := 0; i < numLabels; i++ { - assert.Equal(t, fmt.Sprintf("label_%d", i), sum.Labels[i].Name) + assert.Equal(t, fmt.Sprintf("label-%d", i), sum.Labels[i].Name) } require.Len(t, sum.Labels, numLabels) for i := 0; i < numLabels; i++ { - assert.Equal(t, fmt.Sprintf("label_%d", i), sum.Labels[i].Name) + assert.Equal(t, fmt.Sprintf("label-%d", i), sum.Labels[i].Name) } require.Len(t, sum.Buckets, 3) - assert.Equal(t, "rucket_1", sum.Buckets[0].Name) - associationsEqual(t, sum.Buckets[0].LabelAssociations, "label_1") - assert.Equal(t, "rucket_2", sum.Buckets[1].Name) - associationsEqual(t, sum.Buckets[1].LabelAssociations, "label_2") - assert.Equal(t, "rucket_3", sum.Buckets[2].Name) - associationsEqual(t, sum.Buckets[2].LabelAssociations, "label_1", "label_2") + assert.Equal(t, "rucket-1", sum.Buckets[0].Name) + associationsEqual(t, sum.Buckets[0].LabelAssociations, "label-1") + assert.Equal(t, "rucket-2", sum.Buckets[1].Name) + associationsEqual(t, sum.Buckets[1].LabelAssociations, "label-2") + assert.Equal(t, "rucket-3", sum.Buckets[2].Name) + associationsEqual(t, sum.Buckets[2].LabelAssociations, "label-1", "label-2") }) } diff --git a/pkger/service_test.go b/pkger/service_test.go index 2d98354e1b..c2c208e4fa 100644 --- a/pkger/service_test.go +++ b/pkger/service_test.go @@ -64,7 +64,7 @@ func TestService(t *testing.T) { testfileRunner(t, "testdata/bucket.yml", func(t *testing.T, pkg *Pkg) { fakeBktSVC := mock.NewBucketService() fakeBktSVC.FindBucketByNameFn = func(_ context.Context, orgID influxdb.ID, name string) (*influxdb.Bucket, error) { - if name != "rucket_11" { + if name != "rucket-11" { return nil, errors.New("not found") } return &influxdb.Bucket{ @@ -86,16 +86,16 @@ func TestService(t *testing.T) { DiffIdentifier: DiffIdentifier{ ID: SafeID(1), StateStatus: StateStatusExists, - PkgName: "rucket_11", + PkgName: "rucket-11", }, Old: &DiffBucketValues{ - Name: "rucket_11", + Name: "rucket-11", Description: "old desc", RetentionRules: retentionRules{newRetentionRule(30 * time.Hour)}, }, New: DiffBucketValues{ - Name: "rucket_11", + Name: "rucket-11", Description: "bucket 1 description", RetentionRules: retentionRules{newRetentionRule(time.Hour)}, }, @@ -119,11 +119,11 @@ func TestService(t *testing.T) { expected := DiffBucket{ DiffIdentifier: DiffIdentifier{ - PkgName: "rucket_11", + PkgName: "rucket-11", StateStatus: StateStatusNew, }, New: DiffBucketValues{ - Name: "rucket_11", + Name: "rucket-11", Description: "bucket 1 description", RetentionRules: retentionRules{newRetentionRule(time.Hour)}, }, @@ -160,13 +160,13 @@ func TestService(t *testing.T) { require.Len(t, checks, 2) check0 := checks[0] assert.True(t, check0.IsNew()) - assert.Equal(t, "check_0", check0.PkgName) + assert.Equal(t, "check-0", check0.PkgName) assert.Zero(t, check0.ID) assert.Nil(t, check0.Old) check1 := checks[1] assert.False(t, check1.IsNew()) - assert.Equal(t, "check_1", check1.PkgName) + assert.Equal(t, "check-1", check1.PkgName) assert.Equal(t, "display name", check1.New.GetName()) assert.NotZero(t, check1.ID) assert.Equal(t, existing, check1.Old.Check) @@ -200,26 +200,26 @@ func TestService(t *testing.T) { DiffIdentifier: DiffIdentifier{ ID: SafeID(1), StateStatus: StateStatusExists, - PkgName: "label_1", + PkgName: "label-1", }, Old: &DiffLabelValues{ - Name: "label_1", + Name: "label-1", Color: "old color", Description: "old description", }, New: DiffLabelValues{ - Name: "label_1", + Name: "label-1", Color: "#FFFFFF", Description: "label 1 description", }, } assert.Contains(t, diff.Labels, expected) - expected.PkgName = "label_2" - expected.New.Name = "label_2" + expected.PkgName = "label-2" + expected.New.Name = "label-2" expected.New.Color = "#000000" expected.New.Description = "label 2 description" - expected.Old.Name = "label_2" + expected.Old.Name = "label-2" assert.Contains(t, diff.Labels, expected) }) }) @@ -239,19 +239,19 @@ func TestService(t *testing.T) { expected := DiffLabel{ DiffIdentifier: DiffIdentifier{ - PkgName: "label_1", + PkgName: "label-1", StateStatus: StateStatusNew, }, New: DiffLabelValues{ - Name: "label_1", + Name: "label-1", Color: "#FFFFFF", Description: "label 1 description", }, } assert.Contains(t, diff.Labels, expected) - expected.PkgName = "label_2" - expected.New.Name = "label_2" + expected.PkgName = "label-2" + expected.New.Name = "label-2" expected.New.Color = "#000000" expected.New.Description = "label 2 description" assert.Contains(t, diff.Labels, expected) @@ -266,7 +266,7 @@ func TestService(t *testing.T) { existing := &endpoint.HTTP{ Base: endpoint.Base{ ID: &id, - Name: "http_none_auth_notification_endpoint", + Name: "http-none-auth-notification-endpoint", Description: "old desc", Status: influxdb.TaskStatusInactive, }, @@ -302,7 +302,7 @@ func TestService(t *testing.T) { expected := DiffNotificationEndpoint{ DiffIdentifier: DiffIdentifier{ ID: 1, - PkgName: "http_none_auth_notification_endpoint", + PkgName: "http-none-auth-notification-endpoint", StateStatus: StateStatusExists, }, Old: &DiffNotificationEndpointValues{ @@ -312,7 +312,7 @@ func TestService(t *testing.T) { NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ ID: &id, - Name: "http_none_auth_notification_endpoint", + Name: "http-none-auth-notification-endpoint", Description: "http none auth desc", Status: influxdb.TaskStatusActive, }, @@ -334,7 +334,7 @@ func TestService(t *testing.T) { Base: endpoint.Base{ ID: &id, // This name here matches the endpoint identified in the pkg notification rule - Name: "endpoint_0", + Name: "endpoint-0", Description: "old desc", Status: influxdb.TaskStatusInactive, }, @@ -392,13 +392,13 @@ func TestService(t *testing.T) { }) t.Run("variables", func(t *testing.T) { - testfileRunner(t, "testdata/variables", func(t *testing.T, pkg *Pkg) { + testfileRunner(t, "testdata/variables.json", func(t *testing.T, pkg *Pkg) { fakeVarSVC := mock.NewVariableService() fakeVarSVC.FindVariablesF = func(_ context.Context, filter influxdb.VariableFilter, opts ...influxdb.FindOptions) ([]*influxdb.Variable, error) { return []*influxdb.Variable{ { ID: influxdb.ID(1), - Name: "var_const_3", + Name: "var-const-3", Description: "old desc", }, }, nil @@ -413,16 +413,16 @@ func TestService(t *testing.T) { expected := DiffVariable{ DiffIdentifier: DiffIdentifier{ ID: 1, - PkgName: "var_const_3", + PkgName: "var-const-3", StateStatus: StateStatusExists, }, Old: &DiffVariableValues{ - Name: "var_const_3", + Name: "var-const-3", Description: "old desc", }, New: DiffVariableValues{ - Name: "var_const_3", - Description: "var_const_3 desc", + Name: "var-const-3", + Description: "var-const-3 desc", Args: &influxdb.VariableArguments{ Type: "constant", Values: influxdb.VariableConstantValues{"first val"}, @@ -434,12 +434,12 @@ func TestService(t *testing.T) { expected = DiffVariable{ DiffIdentifier: DiffIdentifier{ // no ID here since this one would be new - PkgName: "var_map_4", + PkgName: "var-map-4", StateStatus: StateStatusNew, }, New: DiffVariableValues{ - Name: "var_map_4", - Description: "var_map_4 desc", + Name: "var-map-4", + Description: "var-map-4 desc", Args: &influxdb.VariableArguments{ Type: "map", Values: influxdb.VariableMapValues{"k1": "v1"}, @@ -480,8 +480,8 @@ func TestService(t *testing.T) { expected := SummaryBucket{ ID: SafeID(time.Hour), OrgID: SafeID(orgID), - PkgName: "rucket_11", - Name: "rucket_11", + PkgName: "rucket-11", + Name: "rucket-11", Description: "bucket 1 description", RetentionPeriod: time.Hour, LabelAssociations: []SummaryLabel{}, @@ -503,7 +503,7 @@ func TestService(t *testing.T) { id := influxdb.ID(3) if name == "display name" { id = 4 - name = "rucket_22" + name = "rucket-22" } if bkt, ok := pkg.mBuckets[name]; ok { return &influxdb.Bucket{ @@ -530,8 +530,8 @@ func TestService(t *testing.T) { expected := SummaryBucket{ ID: SafeID(3), OrgID: SafeID(orgID), - PkgName: "rucket_11", - Name: "rucket_11", + PkgName: "rucket-11", + Name: "rucket-11", Description: "bucket 1 description", RetentionPeriod: time.Hour, LabelAssociations: []SummaryLabel{}, @@ -543,7 +543,7 @@ func TestService(t *testing.T) { }) t.Run("rolls back all created buckets on an error", func(t *testing.T) { - testfileRunner(t, "testdata/bucket", func(t *testing.T, pkg *Pkg) { + testfileRunner(t, "testdata/bucket.yml", func(t *testing.T, pkg *Pkg) { fakeBktSVC := mock.NewBucketService() fakeBktSVC.FindBucketByNameFn = func(_ context.Context, id influxdb.ID, s string) (*influxdb.Bucket, error) { // forces the bucket to be created a new @@ -556,9 +556,6 @@ func TestService(t *testing.T) { return nil } - pkg.mBuckets["copybuck1"] = pkg.mBuckets["rucket_11"] - pkg.mBuckets["copybuck2"] = pkg.mBuckets["rucket_11"] - svc := newTestService(WithBucketSVC(fakeBktSVC)) orgID := influxdb.ID(9000) @@ -604,7 +601,7 @@ func TestService(t *testing.T) { assert.Fail(t, "did not find notification by name: "+name) } - for _, expectedName := range []string{"check_0", "display name"} { + for _, expectedName := range []string{"check-0", "display name"} { containsWithID(t, expectedName) } }) @@ -621,11 +618,6 @@ func TestService(t *testing.T) { return nil } - // create some dupes - for name, c := range pkg.mChecks { - pkg.mChecks["copy"+name] = c - } - svc := newTestService(WithCheckSVC(fakeCheckSVC)) orgID := influxdb.ID(9000) @@ -663,8 +655,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 1, OrgID: SafeID(orgID), - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -677,8 +669,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 2, OrgID: SafeID(orgID), - PkgName: "label_2", - Name: "label_2", + PkgName: "label-2", + Name: "label-2", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -701,9 +693,6 @@ func TestService(t *testing.T) { return nil } - pkg.mLabels["copy1"] = pkg.mLabels["label_1"] - pkg.mLabels["copy2"] = pkg.mLabels["label_2"] - svc := newTestService(WithLabelSVC(fakeLabelSVC)) orgID := influxdb.ID(9000) @@ -721,6 +710,7 @@ func TestService(t *testing.T) { stubExisting := func(name string, id influxdb.ID) *influxdb.Label { pkgLabel := pkg.mLabels[name] + fmt.Println(name, pkgLabel) return &influxdb.Label{ // makes all pkg changes same as they are on the existing ID: id, @@ -732,24 +722,24 @@ func TestService(t *testing.T) { }, } } - stubExisting("label_1", 1) - stubExisting("label_3", 3) + stubExisting("label-1", 1) + stubExisting("label-3", 3) fakeLabelSVC := mock.NewLabelService() fakeLabelSVC.FindLabelsFn = func(ctx context.Context, f influxdb.LabelFilter) ([]*influxdb.Label, error) { - if f.Name != "label_1" && f.Name != "display name" { + if f.Name != "label-1" && f.Name != "display name" { return nil, nil } id := influxdb.ID(1) name := f.Name if f.Name == "display name" { id = 3 - name = "label_3" + name = "label-3" } return []*influxdb.Label{stubExisting(name, id)}, nil } fakeLabelSVC.CreateLabelFn = func(_ context.Context, l *influxdb.Label) error { - if l.Name == "label_2" { + if l.Name == "label-2" { l.ID = 2 } return nil @@ -771,8 +761,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 1, OrgID: SafeID(orgID), - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -785,8 +775,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 2, OrgID: SafeID(orgID), - PkgName: "label_2", - Name: "label_2", + PkgName: "label-2", + Name: "label-2", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -824,14 +814,14 @@ func TestService(t *testing.T) { dash1 := sum.Dashboards[0] assert.NotZero(t, dash1.ID) assert.NotZero(t, dash1.OrgID) - assert.Equal(t, "dash_1", dash1.PkgName) + assert.Equal(t, "dash-1", dash1.PkgName) assert.Equal(t, "display name", dash1.Name) require.Len(t, dash1.Charts, 1) dash2 := sum.Dashboards[1] assert.NotZero(t, dash2.ID) - assert.Equal(t, "dash_2", dash2.PkgName) - assert.Equal(t, "dash_2", dash2.Name) + assert.Equal(t, "dash-2", dash2.PkgName) + assert.Equal(t, "dash-2", dash2.Name) require.Empty(t, dash2.Charts) }) }) @@ -1146,8 +1136,8 @@ func TestService(t *testing.T) { expectedNames := []string{ "basic endpoint name", - "http_bearer_auth_notification_endpoint", - "http_none_auth_notification_endpoint", + "http-bearer-auth-notification-endpoint", + "http-none-auth-notification-endpoint", "pager duty name", "slack name", } @@ -1181,7 +1171,7 @@ func TestService(t *testing.T) { }) t.Run("notification rules", func(t *testing.T) { - t.Run("successfuly creates", func(t *testing.T) { + t.Run("successfully creates", func(t *testing.T) { testfileRunner(t, "testdata/notification_rule.yml", func(t *testing.T, pkg *Pkg) { fakeEndpointSVC := mock.NewNotificationEndpointService() fakeEndpointSVC.CreateNotificationEndpointF = func(ctx context.Context, nr influxdb.NotificationEndpoint, userID influxdb.ID) error { @@ -1205,10 +1195,11 @@ func TestService(t *testing.T) { require.NoError(t, err) require.Len(t, sum.NotificationRules, 1) + assert.Equal(t, "rule-uuid", sum.NotificationRules[0].PkgName) assert.Equal(t, "rule_0", sum.NotificationRules[0].Name) assert.Equal(t, "desc_0", sum.NotificationRules[0].Description) assert.Equal(t, SafeID(1), sum.NotificationRules[0].EndpointID) - assert.Equal(t, "endpoint_0", sum.NotificationRules[0].EndpointPkgName) + assert.Equal(t, "endpoint-0", sum.NotificationRules[0].EndpointPkgName) assert.Equal(t, "slack", sum.NotificationRules[0].EndpointType) }) }) @@ -1281,13 +1272,13 @@ func TestService(t *testing.T) { require.Len(t, sum.Tasks, 2) assert.NotZero(t, sum.Tasks[0].ID) - assert.Equal(t, "task_1", sum.Tasks[0].PkgName) - assert.Equal(t, "task_1", sum.Tasks[0].Name) + assert.Equal(t, "task-1", sum.Tasks[0].PkgName) + assert.Equal(t, "task-1", sum.Tasks[0].Name) assert.Equal(t, "desc_1", sum.Tasks[0].Description) assert.NotZero(t, sum.Tasks[1].ID) - assert.Equal(t, "task_UUID", sum.Tasks[1].PkgName) - assert.Equal(t, "task_0", sum.Tasks[1].Name) + assert.Equal(t, "task-uuid", sum.Tasks[1].PkgName) + assert.Equal(t, "task-0", sum.Tasks[1].Name) assert.Equal(t, "desc_0", sum.Tasks[1].Description) }) }) @@ -1335,7 +1326,7 @@ func TestService(t *testing.T) { require.Len(t, sum.TelegrafConfigs, 2) assert.Equal(t, "display name", sum.TelegrafConfigs[0].TelegrafConfig.Name) assert.Equal(t, "desc", sum.TelegrafConfigs[0].TelegrafConfig.Description) - assert.Equal(t, "tele_2", sum.TelegrafConfigs[1].TelegrafConfig.Name) + assert.Equal(t, "tele-2", sum.TelegrafConfigs[1].TelegrafConfig.Name) }) }) @@ -1390,8 +1381,8 @@ func TestService(t *testing.T) { expected := sum.Variables[0] assert.True(t, expected.ID > 0 && expected.ID < 5) assert.Equal(t, SafeID(orgID), expected.OrgID) - assert.Equal(t, "var_const_3", expected.Name) - assert.Equal(t, "var_const_3 desc", expected.Description) + assert.Equal(t, "var-const-3", expected.Name) + assert.Equal(t, "var-const-3 desc", expected.Description) require.NotNil(t, expected.Arguments) assert.Equal(t, influxdb.VariableConstantValues{"first val"}, expected.Arguments.Values) @@ -1434,7 +1425,7 @@ func TestService(t *testing.T) { // makes all pkg changes same as they are on the existing ID: influxdb.ID(1), OrganizationID: orgID, - Name: pkg.mVariables["var_const_3"].Name(), + Name: pkg.mVariables["var-const-3"].Name(), Arguments: &influxdb.VariableArguments{ Type: "constant", Values: influxdb.VariableConstantValues{"first val"}, @@ -1463,7 +1454,7 @@ func TestService(t *testing.T) { require.Len(t, sum.Variables, 4) expected := sum.Variables[0] assert.Equal(t, SafeID(1), expected.ID) - assert.Equal(t, "var_const_3", expected.Name) + assert.Equal(t, "var-const-3", expected.Name) assert.Equal(t, 3, fakeVarSVC.CreateVariableCalls.Count()) // only called for last 3 labels }) diff --git a/pkger/testdata/bucket.json b/pkger/testdata/bucket.json index 4d87a13080..e00dac33dd 100644 --- a/pkger/testdata/bucket.json +++ b/pkger/testdata/bucket.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_11" + "name": "rucket-11" }, "spec": { "description": "bucket 1 description", @@ -19,7 +19,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_22" + "name": "rucket-22" }, "spec": { "name": "display name", diff --git a/pkger/testdata/bucket.yml b/pkger/testdata/bucket.yml index b05f4e1514..3ea9da8cef 100644 --- a/pkger/testdata/bucket.yml +++ b/pkger/testdata/bucket.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_11 + name: rucket-11 spec: description: bucket 1 description retentionRules: @@ -11,7 +11,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_22 + name: rucket-22 spec: name: display name description: bucket 2 description diff --git a/pkger/testdata/bucket_associates_label.json b/pkger/testdata/bucket_associates_label.json index dead1da125..1e8f097585 100644 --- a/pkger/testdata/bucket_associates_label.json +++ b/pkger/testdata/bucket_associates_label.json @@ -3,27 +3,27 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_1" + "name": "rucket-1" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -32,13 +32,13 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_2" + "name": "rucket-2" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_2" + "name": "label-2" } ] } @@ -47,17 +47,17 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_3" + "name": "rucket-3" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_2" + "name": "label-2" }, { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/bucket_associates_label.yml b/pkger/testdata/bucket_associates_label.yml index c1cd73151f..4793735030 100644 --- a/pkger/testdata/bucket_associates_label.yml +++ b/pkger/testdata/bucket_associates_label.yml @@ -1,38 +1,38 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_2 + name: rucket-2 spec: associations: - kind: Label - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 diff --git a/pkger/testdata/bucket_associates_labels.jsonnet b/pkger/testdata/bucket_associates_labels.jsonnet index 050fb42908..043f4516d2 100644 --- a/pkger/testdata/bucket_associates_labels.jsonnet +++ b/pkger/testdata/bucket_associates_labels.jsonnet @@ -15,7 +15,7 @@ local LabelAssociations(names=[]) = [ for name in names ]; -local Bucket(name, desc, secs, associations=LabelAssociations(['label_1'])) = { +local Bucket(name, desc, secs, associations=LabelAssociations(['label-1'])) = { apiVersion: 'influxdata.com/v2alpha1', kind: 'Bucket', metadata: { @@ -31,8 +31,8 @@ local Bucket(name, desc, secs, associations=LabelAssociations(['label_1'])) = { }; [ - Label("label_1",desc="desc_1", color='#eee888'), - Bucket(name="rucket_1", desc="desc_1", secs=10000), - Bucket("rucket_2", "desc_2", 20000), - Bucket("rucket_3", "desc_3", 30000), + Label("label-1",desc="desc_1", color='#eee888'), + Bucket(name="rucket-1", desc="desc_1", secs=10000), + Bucket("rucket-2", "desc-2", 20000), + Bucket("rucket-3", "desc_3", 30000), ] diff --git a/pkger/testdata/checks.json b/pkger/testdata/checks.json index 0d3f362d46..0b67dca5d5 100644 --- a/pkger/testdata/checks.json +++ b/pkger/testdata/checks.json @@ -3,14 +3,14 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "CheckThreshold", "metadata": { - "name": "check_0" + "name": "check-0" }, "spec": { "description": "desc_0", @@ -57,7 +57,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -66,7 +66,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "CheckDeadman", "metadata": { - "name": "check_1" + "name": "check-1" }, "spec": { "name": "display name", @@ -91,7 +91,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/checks.yml b/pkger/testdata/checks.yml index 6f2f67b414..5b5a2004b3 100644 --- a/pkger/testdata/checks.yml +++ b/pkger/testdata/checks.yml @@ -2,12 +2,12 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: description: desc_0 every: 1m @@ -44,12 +44,12 @@ spec: max: 35.0 associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: name: display name description: desc_1 @@ -74,4 +74,4 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/dashboard.json b/pkger/testdata/dashboard.json index a8b05557e7..9118290fce 100644 --- a/pkger/testdata/dashboard.json +++ b/pkger/testdata/dashboard.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "name": "display name", @@ -47,7 +47,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_2" + "name": "dash-2" }, "spec": { "description": "desc" diff --git a/pkger/testdata/dashboard.yml b/pkger/testdata/dashboard.yml index 5513d0d469..b213d75655 100644 --- a/pkger/testdata/dashboard.yml +++ b/pkger/testdata/dashboard.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: name: display name description: desc1 @@ -31,6 +31,6 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_2 + name: dash-2 spec: description: desc diff --git a/pkger/testdata/dashboard_associates_label.json b/pkger/testdata/dashboard_associates_label.json index b0b8352f09..40cc51ae8f 100644 --- a/pkger/testdata/dashboard_associates_label.json +++ b/pkger/testdata/dashboard_associates_label.json @@ -3,31 +3,31 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" }, { "kind": "Label", - "name": "label_2" + "name": "label-2" } ] } diff --git a/pkger/testdata/dashboard_associates_label.yml b/pkger/testdata/dashboard_associates_label.yml index 369cffd0ef..63a8e3c493 100644 --- a/pkger/testdata/dashboard_associates_label.yml +++ b/pkger/testdata/dashboard_associates_label.yml @@ -1,20 +1,20 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 diff --git a/pkger/testdata/dashboard_gauge.json b/pkger/testdata/dashboard_gauge.json index e66156e317..f44f101679 100644 --- a/pkger/testdata/dashboard_gauge.json +++ b/pkger/testdata/dashboard_gauge.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_gauge.yml b/pkger/testdata/dashboard_gauge.yml index 9fc152032e..277f48ffda 100644 --- a/pkger/testdata/dashboard_gauge.yml +++ b/pkger/testdata/dashboard_gauge.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/dashboard_heatmap.json b/pkger/testdata/dashboard_heatmap.json index a4e66f9e6c..f6e9c9e9fb 100644 --- a/pkger/testdata/dashboard_heatmap.json +++ b/pkger/testdata/dashboard_heatmap.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single heatmap chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ heatmap chart", diff --git a/pkger/testdata/dashboard_heatmap.yml b/pkger/testdata/dashboard_heatmap.yml index cf04a46420..fc45c907a1 100644 --- a/pkger/testdata/dashboard_heatmap.yml +++ b/pkger/testdata/dashboard_heatmap.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: description: a dashboard w/ heatmap chart charts: diff --git a/pkger/testdata/dashboard_histogram.json b/pkger/testdata/dashboard_histogram.json index 2e475c63a3..dd6dd0d007 100644 --- a/pkger/testdata/dashboard_histogram.json +++ b/pkger/testdata/dashboard_histogram.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single histogram chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ single histogram chart", diff --git a/pkger/testdata/dashboard_histogram.yml b/pkger/testdata/dashboard_histogram.yml index 28d539b57c..31d904a6b0 100644 --- a/pkger/testdata/dashboard_histogram.yml +++ b/pkger/testdata/dashboard_histogram.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single histogram chart + name: dash-0 spec: description: a dashboard w/ single histogram chart charts: diff --git a/pkger/testdata/dashboard_markdown.json b/pkger/testdata/dashboard_markdown.json index c2d1bca281..54613a52dc 100644 --- a/pkger/testdata/dashboard_markdown.json +++ b/pkger/testdata/dashboard_markdown.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single markdown chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ single markdown chart", diff --git a/pkger/testdata/dashboard_markdown.yml b/pkger/testdata/dashboard_markdown.yml index 44ec954fbd..afd568299f 100644 --- a/pkger/testdata/dashboard_markdown.yml +++ b/pkger/testdata/dashboard_markdown.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single markdown chart + name: dash-0 spec: description: a dashboard w/ single markdown chart charts: diff --git a/pkger/testdata/dashboard_scatter.json b/pkger/testdata/dashboard_scatter.json index b6eae9b64c..63fb14303a 100644 --- a/pkger/testdata/dashboard_scatter.json +++ b/pkger/testdata/dashboard_scatter.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single scatter chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ single scatter chart", diff --git a/pkger/testdata/dashboard_scatter.yml b/pkger/testdata/dashboard_scatter.yml index 6fa321b27e..173358b887 100644 --- a/pkger/testdata/dashboard_scatter.yml +++ b/pkger/testdata/dashboard_scatter.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: diff --git a/pkger/testdata/dashboard_single_stat_plus_line.json b/pkger/testdata/dashboard_single_stat_plus_line.json index d6a8ade1a1..d42133db08 100644 --- a/pkger/testdata/dashboard_single_stat_plus_line.json +++ b/pkger/testdata/dashboard_single_stat_plus_line.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_single_stat_plus_line.yml b/pkger/testdata/dashboard_single_stat_plus_line.yml index f8f11486b3..06836603ae 100644 --- a/pkger/testdata/dashboard_single_stat_plus_line.yml +++ b/pkger/testdata/dashboard_single_stat_plus_line.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/dashboard_table.json b/pkger/testdata/dashboard_table.json index 6c8e3a3059..ddb51dc533 100644 --- a/pkger/testdata/dashboard_table.json +++ b/pkger/testdata/dashboard_table.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_table.yml b/pkger/testdata/dashboard_table.yml index 1aea424ec2..e332146027 100644 --- a/pkger/testdata/dashboard_table.yml +++ b/pkger/testdata/dashboard_table.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/dashboard_xy.json b/pkger/testdata/dashboard_xy.json index 22f1f503a7..01e8510eec 100644 --- a/pkger/testdata/dashboard_xy.json +++ b/pkger/testdata/dashboard_xy.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_xy.yml b/pkger/testdata/dashboard_xy.yml index 430f0d8b0b..ee0b4571b8 100644 --- a/pkger/testdata/dashboard_xy.yml +++ b/pkger/testdata/dashboard_xy.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/label.json b/pkger/testdata/label.json index 55f472ac51..5038f7e815 100644 --- a/pkger/testdata/label.json +++ b/pkger/testdata/label.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" }, "spec": { "color": "#000000", @@ -14,7 +14,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" }, "spec": { "color": "#FFFFFF", @@ -25,7 +25,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_3" + "name": "label-3" }, "spec": { "name": "display name", diff --git a/pkger/testdata/label.yml b/pkger/testdata/label.yml index 7f010080f8..80a98cb17f 100644 --- a/pkger/testdata/label.yml +++ b/pkger/testdata/label.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 spec: color: "#000000" description: label 2 description @@ -9,7 +9,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 spec: color: "#FFFFFF" description: label 1 description @@ -17,7 +17,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_3 + name: label-3 spec: name: display name description: label 3 description diff --git a/pkger/testdata/notification_endpoint.json b/pkger/testdata/notification_endpoint.json index fb5441cf6a..d1cadee1f4 100644 --- a/pkger/testdata/notification_endpoint.json +++ b/pkger/testdata/notification_endpoint.json @@ -3,14 +3,14 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointSlack", "metadata": { - "name": "slack_notification_endpoint" + "name": "slack-notification-endpoint" }, "spec":{ "name": "slack name", @@ -21,7 +21,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -30,7 +30,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointHTTP", "metadata": { - "name": "http_none_auth_notification_endpoint" + "name": "http-none-auth-notification-endpoint" }, "spec":{ "description": "http none auth desc", @@ -41,7 +41,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -50,7 +50,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointHTTP", "metadata": { - "name": "http_basic_auth_notification_endpoint" + "name": "http-basic-auth-notification-endpoint" }, "spec":{ "name": "basic endpoint name", @@ -64,7 +64,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -73,7 +73,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointHTTP", "metadata": { - "name": "http_bearer_auth_notification_endpoint" + "name": "http-bearer-auth-notification-endpoint" }, "spec":{ "description": "http bearer auth desc", @@ -84,7 +84,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -93,7 +93,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointPagerDuty", "metadata": { - "name": "pager_duty_notification_endpoint" + "name": "pager-duty-notification-endpoint" }, "spec":{ "name": "pager duty name", @@ -104,7 +104,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/notification_endpoint.yml b/pkger/testdata/notification_endpoint.yml index 3d976fa734..c56f2f428b 100644 --- a/pkger/testdata/notification_endpoint.yml +++ b/pkger/testdata/notification_endpoint.yml @@ -1,12 +1,12 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: name: slack name description: slack desc @@ -15,12 +15,12 @@ spec: token: tokenval associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none description: http none auth desc @@ -29,12 +29,12 @@ spec: status: active associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: name: basic endpoint name description: http basic auth desc @@ -46,12 +46,12 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_bearer_auth_notification_endpoint + name: http-bearer-auth-notification-endpoint spec: description: http bearer auth desc type: bearer @@ -60,12 +60,12 @@ spec: token: "secret token" associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: name: pager duty name description: pager duty desc @@ -74,4 +74,4 @@ spec: status: active associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/notification_endpoint_secrets.yml b/pkger/testdata/notification_endpoint_secrets.yml index df75a6c70b..301a910273 100644 --- a/pkger/testdata/notification_endpoint_secrets.yml +++ b/pkger/testdata/notification_endpoint_secrets.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: description: pager duty desc url: http://localhost:8080/orgs/7167eb6719fa34e5/alert-history diff --git a/pkger/testdata/notification_rule.json b/pkger/testdata/notification_rule.json index bc36cca269..94f97ad6fb 100644 --- a/pkger/testdata/notification_rule.json +++ b/pkger/testdata/notification_rule.json @@ -3,27 +3,27 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationRule", "metadata": { - "name": "rule_UUID" + "name": "rule-uuid" }, "spec": { "name": "rule_0", "description": "desc_0", "channel": "#two-fer-one", - "endpointName": "endpoint_0", + "endpointName": "endpoint-0", "every": "10m", "offset": "30s", "messageTemplate": "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }", @@ -52,12 +52,12 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" }, { "kind": "Label", - "name": "label_2" + "name": "label-2" } ] } @@ -66,7 +66,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointSlack", "metadata": { - "name": "endpoint_0" + "name": "endpoint-0" }, "spec": { "url": "https://hooks.slack.com/services/bip/piddy/boppidy" diff --git a/pkger/testdata/notification_rule.yml b/pkger/testdata/notification_rule.yml index 4803b29da5..bf494ab9b3 100644 --- a/pkger/testdata/notification_rule.yml +++ b/pkger/testdata/notification_rule.yml @@ -2,22 +2,22 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_UUID + name: rule-uuid spec: name: rule_0 description: desc_0 channel: "#two-fer-one" - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m offset: 30s messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" @@ -35,13 +35,13 @@ spec: operator: eQuAl associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: endpoint_0 + name: endpoint-0 spec: url: https://hooks.slack.com/services/bip/piddy/boppidy diff --git a/pkger/testdata/remote_bucket.json b/pkger/testdata/remote_bucket.json index ddc40c78a6..6fd9a25ba3 100644 --- a/pkger/testdata/remote_bucket.json +++ b/pkger/testdata/remote_bucket.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_11" + "name": "rucket-11" }, "spec": { "description": "bucket 1 description" diff --git a/pkger/testdata/tasks.json b/pkger/testdata/tasks.json index daf84fa13a..c9489c1d03 100644 --- a/pkger/testdata/tasks.json +++ b/pkger/testdata/tasks.json @@ -3,17 +3,17 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Task", "metadata": { - "name": "task_UUID" + "name": "task-uuid" }, "spec": { - "name": "task_0", + "name": "task-0", "description": "desc_0", "every": "10m", "offset": "15s", @@ -22,7 +22,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -31,7 +31,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Task", "metadata": { - "name": "task_1" + "name": "task-1" }, "spec": { "description": "desc_1", @@ -40,7 +40,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/tasks.yml b/pkger/testdata/tasks.yml index 5a65512970..460180c901 100644 --- a/pkger/testdata/tasks.yml +++ b/pkger/testdata/tasks.yml @@ -1,14 +1,14 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_UUID + name: task-uuid spec: - name: task_0 + name: task-0 description: desc_0 every: 10m offset: 15s @@ -22,12 +22,12 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_1 + name: task-1 spec: description: desc_1 cron: 15 * * * * @@ -40,4 +40,4 @@ spec: |> yield(name: "mean") associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/telegraf.json b/pkger/testdata/telegraf.json index 77a579c0e7..12d3012dda 100644 --- a/pkger/testdata/telegraf.json +++ b/pkger/testdata/telegraf.json @@ -3,21 +3,21 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Telegraf", "metadata": { - "name": "first_tele_config" + "name": "first-tele-config" }, "spec": { "name": "display name", @@ -25,11 +25,11 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" }, { "kind": "Label", - "name": "label_2" + "name": "label-2" } ], "config": "# Configuration for telegraf agent\n [agent]\n ## Default data collection interval for all inputs\n interval = \"10s\"\n ## Rounds collection interval to 'interval'\n ## ie, if interval=\"10s\" then always collect on :00, :10, :20, etc.\n round_interval = true\n\n ## Telegraf will send metrics to outputs in batches of at most\n ## metric_batch_size metrics.\n ## This controls the size of writes that Telegraf sends to output plugins.\n metric_batch_size = 1000\n\n ## For failed writes, telegraf will cache metric_buffer_limit metrics for each\n ## output, and will flush this buffer on a successful write. Oldest metrics\n ## are dropped first when this buffer fills.\n ## This buffer only fills when writes fail to output plugin(s).\n metric_buffer_limit = 10000\n\n ## Collection jitter is used to jitter the collection by a random amount.\n ## Each plugin will sleep for a random time within jitter before collecting.\n ## This can be used to avoid many plugins querying things like sysfs at the\n ## same time, which can have a measurable effect on the system.\n collection_jitter = \"0s\"\n\n ## Default flushing interval for all outputs. Maximum flush_interval will be\n ## flush_interval + flush_jitter\n flush_interval = \"10s\"\n ## Jitter the flush interval by a random amount. This is primarily to avoid\n ## large write spikes for users running a large number of telegraf instances.\n ## ie, a jitter of 5s and interval 10s means flushes will happen every 10-15s\n flush_jitter = \"0s\"\n\n ## By default or when set to \"0s\", precision will be set to the same\n ## timestamp order as the collection interval, with the maximum being 1s.\n ## ie, when interval = \"10s\", precision will be \"1s\"\n ## when interval = \"250ms\", precision will be \"1ms\"\n ## Precision will NOT be used for service inputs. It is up to each individual\n ## service input to set the timestamp at the appropriate precision.\n ## Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\".\n precision = \"\"\n\n ## Logging configuration:\n ## Run telegraf with debug log messages.\n debug = false\n ## Run telegraf in quiet mode (error log messages only).\n quiet = false\n ## Specify the log file name. The empty string means to log to stderr.\n logfile = \"\"\n\n ## Override default hostname, if empty use os.Hostname()\n hostname = \"\"\n ## If set to true, do no set the \"host\" tag in the telegraf agent.\n omit_hostname = false\n [[outputs.influxdb_v2]]\n ## The URLs of the InfluxDB cluster nodes.\n ##\n ## Multiple URLs can be specified for a single cluster, only ONE of the\n ## urls will be written to each interval.\n ## urls exp: http://127.0.0.1:9999\n urls = [\"http://localhost:9999\"]\n\n ## Token for authentication.\n token = \"$INFLUX_TOKEN\"\n\n ## Organization is the name of the organization you wish to write to; must exist.\n organization = \"rg\"\n\n ## Destination bucket to write into.\n bucket = \"rucket_3\"\n [[inputs.cpu]]\n ## Whether to report per-cpu stats or not\n percpu = true\n ## Whether to report total system cpu stats or not\n totalcpu = true\n ## If true, collect raw CPU time metrics.\n collect_cpu_time = false\n ## If true, compute and report the sum of all non-idle CPU states.\n report_active = false\n [[inputs.disk]]\n ## By default stats will be gathered for all mount points.\n ## Set mount_points will restrict the stats to only the specified mount points.\n # mount_points = [\"/\"]\n ## Ignore mount points by filesystem type.\n ignore_fs = [\"tmpfs\", \"devtmpfs\", \"devfs\", \"overlay\", \"aufs\", \"squashfs\"]\n [[inputs.diskio]]\n [[inputs.mem]]\n [[inputs.net]]\n [[inputs.processes]]\n [[inputs.swap]]\n [[inputs.system]]" @@ -39,7 +39,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Telegraf", "metadata": { - "name": "tele_2" + "name": "tele-2" }, "spec": { "config": "# Configuration for telegraf agent\n [agent]\n ## Default data collection interval for all inputs\n interval = \"10s\"\n ## Rounds collection interval to 'interval'\n ## ie, if interval=\"10s\" then always collect on :00, :10, :20, etc.\n round_interval = true\n\n ## Telegraf will send metrics to outputs in batches of at most\n ## metric_batch_size metrics.\n ## This controls the size of writes that Telegraf sends to output plugins.\n metric_batch_size = 1000\n\n ## For failed writes, telegraf will cache metric_buffer_limit metrics for each\n ## output, and will flush this buffer on a successful write. Oldest metrics\n ## are dropped first when this buffer fills.\n ## This buffer only fills when writes fail to output plugin(s).\n metric_buffer_limit = 10000\n\n ## Collection jitter is used to jitter the collection by a random amount.\n ## Each plugin will sleep for a random time within jitter before collecting.\n ## This can be used to avoid many plugins querying things like sysfs at the\n ## same time, which can have a measurable effect on the system.\n collection_jitter = \"0s\"\n\n ## Default flushing interval for all outputs. Maximum flush_interval will be\n ## flush_interval + flush_jitter\n flush_interval = \"10s\"\n ## Jitter the flush interval by a random amount. This is primarily to avoid\n ## large write spikes for users running a large number of telegraf instances.\n ## ie, a jitter of 5s and interval 10s means flushes will happen every 10-15s\n flush_jitter = \"0s\"\n\n ## By default or when set to \"0s\", precision will be set to the same\n ## timestamp order as the collection interval, with the maximum being 1s.\n ## ie, when interval = \"10s\", precision will be \"1s\"\n ## when interval = \"250ms\", precision will be \"1ms\"\n ## Precision will NOT be used for service inputs. It is up to each individual\n ## service input to set the timestamp at the appropriate precision.\n ## Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\".\n precision = \"\"\n\n ## Logging configuration:\n ## Run telegraf with debug log messages.\n debug = false\n ## Run telegraf in quiet mode (error log messages only).\n quiet = false\n ## Specify the log file name. The empty string means to log to stderr.\n logfile = \"\"\n\n ## Override default hostname, if empty use os.Hostname()\n hostname = \"\"\n ## If set to true, do no set the \"host\" tag in the telegraf agent.\n omit_hostname = false\n [[outputs.influxdb_v2]]\n ## The URLs of the InfluxDB cluster nodes.\n ##\n ## Multiple URLs can be specified for a single cluster, only ONE of the\n ## urls will be written to each interval.\n ## urls exp: http://127.0.0.1:9999\n urls = [\"http://localhost:9999\"]\n\n ## Token for authentication.\n token = \"$INFLUX_TOKEN\"\n\n ## Organization is the name of the organization you wish to write to; must exist.\n organization = \"rg\"\n\n ## Destination bucket to write into.\n bucket = \"rucket_3\"\n [[inputs.cpu]]\n ## Whether to report per-cpu stats or not\n percpu = true\n ## Whether to report total system cpu stats or not\n totalcpu = true\n ## If true, collect raw CPU time metrics.\n collect_cpu_time = false\n ## If true, compute and report the sum of all non-idle CPU states.\n report_active = false\n [[inputs.disk]]\n ## By default stats will be gathered for all mount points.\n ## Set mount_points will restrict the stats to only the specified mount points.\n # mount_points = [\"/\"]\n ## Ignore mount points by filesystem type.\n ignore_fs = [\"tmpfs\", \"devtmpfs\", \"devfs\", \"overlay\", \"aufs\", \"squashfs\"]\n [[inputs.diskio]]\n [[inputs.mem]]\n [[inputs.net]]\n [[inputs.processes]]\n [[inputs.swap]]\n [[inputs.system]]" diff --git a/pkger/testdata/telegraf.yml b/pkger/testdata/telegraf.yml index 6eb56e28af..84543f3a50 100644 --- a/pkger/testdata/telegraf.yml +++ b/pkger/testdata/telegraf.yml @@ -1,25 +1,25 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: first_tele_config + name: first-tele-config spec: name: display name description: desc associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 config: | # Configuration for telegraf agent [agent] @@ -118,7 +118,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: tele_2 + name: tele-2 spec: config: | # Configuration for telegraf agent diff --git a/pkger/testdata/variable_associates_label.yml b/pkger/testdata/variable_associates_label.yml index 283e97114a..e707eeca87 100644 --- a/pkger/testdata/variable_associates_label.yml +++ b/pkger/testdata/variable_associates_label.yml @@ -1,15 +1,15 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_1 + name: var-1 spec: type: constant values: [first val] associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/variables.json b/pkger/testdata/variables.json index 8d736f73ad..0d8038b417 100644 --- a/pkger/testdata/variables.json +++ b/pkger/testdata/variables.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_query_1" + "name": "var-query-1" }, "spec": { "name": "query var", @@ -17,10 +17,10 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_query_2" + "name": "var-query-2" }, "spec": { - "description": "var_query_2 desc", + "description": "var-query-2 desc", "type": "query", "query": "an influxql query of sorts", "language": "influxql" @@ -30,10 +30,10 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_const_3" + "name": "var-const-3" }, "spec": { - "description": "var_const_3 desc", + "description": "var-const-3 desc", "type": "constant", "values": ["first val"] } @@ -42,10 +42,10 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_map_4" + "name": "var-map-4" }, "spec": { - "description": "var_map_4 desc", + "description": "var-map-4 desc", "type": "map", "values": { "k1": "v1" diff --git a/pkger/testdata/variables.yml b/pkger/testdata/variables.yml index ff60038826..6d4896d662 100644 --- a/pkger/testdata/variables.yml +++ b/pkger/testdata/variables.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_1 + name: var-query-1 spec: name: query var description: query var desc @@ -13,9 +13,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -23,9 +23,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_const_3 + name: var-const-3 spec: - description: var_const_3 desc + description: var-const-3 desc type: constant values: - first val @@ -33,9 +33,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_map_4 + name: var-map-4 spec: - description: var_map_4 desc + description: var-map-4 desc type: map values: k1: v1 From 9bc7a37c0b3c48a66ff2b41474f531ad38878b11 Mon Sep 17 00:00:00 2001 From: Deniz Kusefoglu Date: Mon, 4 May 2020 14:50:03 -0700 Subject: [PATCH 8/9] feat(demodata): Optimize demodata lookup (#17947) * feat(demodata): Optimize demodata lookup * feat(demodata): style demodata dropdown --- ui/src/buckets/components/DemoDataDropdown.scss | 10 ++++++++++ ui/src/buckets/components/DemoDataDropdown.tsx | 16 ++++++---------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/ui/src/buckets/components/DemoDataDropdown.scss b/ui/src/buckets/components/DemoDataDropdown.scss index fa40103931..d33872a2ba 100644 --- a/ui/src/buckets/components/DemoDataDropdown.scss +++ b/ui/src/buckets/components/DemoDataDropdown.scss @@ -1,3 +1,7 @@ +.demodata-dropdown { + margin-right: $cf-marg-b; +} + .demodata-dropdown--item-contents { display: inline-flex; align-items: center; @@ -8,6 +12,12 @@ opacity: 0; } +.demodata-dropdown--item, +.demodata-dropdown--item__added { + padding-left: 6px; + padding-right: 6px; +} + .demodata-dropdown--item__added, .demodata-dropdown--item__added:hover { .demodata-dropdown--item-icon { diff --git a/ui/src/buckets/components/DemoDataDropdown.tsx b/ui/src/buckets/components/DemoDataDropdown.tsx index a0abcef929..163c845506 100644 --- a/ui/src/buckets/components/DemoDataDropdown.tsx +++ b/ui/src/buckets/components/DemoDataDropdown.tsx @@ -3,9 +3,6 @@ import React, {FC, useEffect} from 'react' import {connect} from 'react-redux' import {get, sortBy} from 'lodash' -// Utils -import {getAll} from 'src/resources/selectors' - // Actions import { getDemoDataBucketMembership as getDemoDataBucketMembershipAction, @@ -19,7 +16,7 @@ import {ComponentColor, Dropdown, Icon, IconFont} from '@influxdata/clockface' import {AppState, Bucket, ResourceType} from 'src/types' interface StateProps { - ownBuckets: Bucket[] + ownBucketsByID: {[id: string]: Bucket} demoDataBuckets: Bucket[] } @@ -31,7 +28,7 @@ interface DispatchProps { type Props = DispatchProps & StateProps const DemoDataDropdown: FC = ({ - ownBuckets, + ownBucketsByID, demoDataBuckets, getDemoDataBucketMembership, getDemoDataBuckets, @@ -44,14 +41,12 @@ const DemoDataDropdown: FC = ({ return null } - const ownBucketNames = ownBuckets.map(o => o.name.toLocaleLowerCase()) - const sortedBuckets = sortBy(demoDataBuckets, d => { return d.name.toLocaleLowerCase() }) const dropdownItems = sortedBuckets.map(b => { - if (ownBucketNames.includes(b.name.toLocaleLowerCase())) { + if (ownBucketsByID[b.id]) { return ( = ({ return ( ( = ({ } const mstp = (state: AppState): StateProps => ({ - ownBuckets: getAll(state, ResourceType.Buckets), + ownBucketsByID: state.resources[ResourceType.Buckets].byID, demoDataBuckets: get(state, 'cloud.demoData.buckets', []) as Bucket[], }) From 02c8e02c58af97daafec1b582409c4af332e43a1 Mon Sep 17 00:00:00 2001 From: jlapacik Date: Tue, 5 May 2020 09:46:19 -0700 Subject: [PATCH 9/9] chore: update flux to latest revision --- go.mod | 4 ++-- go.sum | 12 ++++++++++-- query/promql/internal/promqltests/go.mod | 4 +--- query/promql/internal/promqltests/go.sum | 12 ++++++++---- 4 files changed, 21 insertions(+), 11 deletions(-) diff --git a/go.mod b/go.mod index 65d11142ad..2197a66de9 100644 --- a/go.mod +++ b/go.mod @@ -42,7 +42,7 @@ require ( github.com/hashicorp/raft v1.0.0 // indirect github.com/hashicorp/vault/api v1.0.2 github.com/influxdata/cron v0.0.0-20191203200038-ded12750aac6 - github.com/influxdata/flux v0.67.1-0.20200429154143-b42d4177e03e + github.com/influxdata/flux v0.67.1-0.20200504203345-528f07c316a9 github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 github.com/influxdata/influxql v0.0.0-20180925231337-1cbfca8e56b6 github.com/influxdata/pkg-config v0.2.0 @@ -94,7 +94,7 @@ require ( github.com/yudai/pp v2.0.1+incompatible // indirect go.uber.org/multierr v1.4.0 go.uber.org/zap v1.9.1 - golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 + golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59 golang.org/x/net v0.0.0-20190620200207-3b0461eec859 golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 golang.org/x/sync v0.0.0-20190423024810-112230192c58 diff --git a/go.sum b/go.sum index f9b1dfcef1..d46e397078 100644 --- a/go.sum +++ b/go.sum @@ -182,6 +182,8 @@ github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OI github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.0.0 h1:b4Gk+7WdP/d3HZH8EJsZpvV7EtDOgaZLtnaNGIu1adA= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -244,8 +246,8 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/influxdata/cron v0.0.0-20191203200038-ded12750aac6 h1:OtjKkeWDjUbyMi82C7XXy7Tvm2LXMwiBBXyFIGNPaGA= github.com/influxdata/cron v0.0.0-20191203200038-ded12750aac6/go.mod h1:XabtPPW2qsCg0tl+kjaPU+cFS+CjQXEXbT1VJvHT4og= -github.com/influxdata/flux v0.67.1-0.20200429154143-b42d4177e03e h1:kcsFqnxlImwDTTqNPRJeePWfaCFteO0/gppYk5L5x+k= -github.com/influxdata/flux v0.67.1-0.20200429154143-b42d4177e03e/go.mod h1:4PVm7oUSOMJgXbEsEZgHmWZRjIPtB6gF0F9et2i3+3w= +github.com/influxdata/flux v0.67.1-0.20200504203345-528f07c316a9 h1:G7W9rZ8BIcZs3UfOis94NWEpHvsfxHeOewJt5HBJj+Y= +github.com/influxdata/flux v0.67.1-0.20200504203345-528f07c316a9/go.mod h1:AdzL5HnjdFlcBiNz0wE69rSTGRX9CQHqtJUF8ptiDeY= github.com/influxdata/goreleaser v0.97.0-influx h1:jT5OrcW7WfS0e2QxfwmTBjhLvpIC9CDLRhNgZJyhj8s= github.com/influxdata/goreleaser v0.97.0-influx/go.mod h1:MnjA0e0Uq6ISqjG1WxxMAl+3VS1QYjILSWVnMYDxasE= github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU= @@ -381,6 +383,8 @@ github.com/philhofer/fwd v1.0.0 h1:UbZqGr5Y38ApvM/V/jEljVxwocdweyH+vmYvRPBnbqQ= github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4 h1:49lOXmGaUpV9Fz3gd7TFZY106KVlPVa5jcYD1gaQf98= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= @@ -427,6 +431,8 @@ github.com/smartystreets/assertions v1.0.1/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUr github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8= github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/snowflakedb/gosnowflake v1.3.4 h1:Gyoi6g4lMHsilEwW9+KV+bgYkJTgf5pVfvL7Utus920= +github.com/snowflakedb/gosnowflake v1.3.4/go.mod h1:NsRq2QeiMUuoNUJhp5Q6xGC4uBrsS9g6LwZVEkTWgsE= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= @@ -503,6 +509,8 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529 h1:iMGN4xG0cnqj3t+zOM8wUB golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59 h1:3zb4D3T4G8jdExgVU/95+vQXfpEPiMdCaZgmGVxjNHM= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4 h1:c2HOrn5iMezYjSlGPncknSEr/8x5LELb/ilJbXi9DEA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= diff --git a/query/promql/internal/promqltests/go.mod b/query/promql/internal/promqltests/go.mod index 4c1795cfca..b7c67eb8f4 100644 --- a/query/promql/internal/promqltests/go.mod +++ b/query/promql/internal/promqltests/go.mod @@ -11,9 +11,8 @@ require ( github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 // indirect github.com/go-kit/kit v0.10.0 // indirect github.com/google/go-cmp v0.4.0 - github.com/google/uuid v1.1.1 // indirect github.com/hashicorp/go-rootcerts v1.0.2 // indirect - github.com/influxdata/flux v0.67.1-0.20200429154143-b42d4177e03e + github.com/influxdata/flux v0.67.1-0.20200504203345-528f07c316a9 github.com/influxdata/influxdb/v2 v2.0.0-00010101000000-000000000000 github.com/influxdata/influxql v1.0.1 // indirect github.com/influxdata/promql/v2 v2.12.0 @@ -28,7 +27,6 @@ require ( github.com/spf13/afero v1.2.2 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/willf/bitset v1.1.10 // indirect - golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413 // indirect golang.org/x/net v0.0.0-20200301022130-244492dfa37a // indirect golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d // indirect golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 // indirect diff --git a/query/promql/internal/promqltests/go.sum b/query/promql/internal/promqltests/go.sum index 014258fb52..879003d66c 100644 --- a/query/promql/internal/promqltests/go.sum +++ b/query/promql/internal/promqltests/go.sum @@ -321,8 +321,8 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/influxdata/cron v0.0.0-20191203200038-ded12750aac6 h1:OtjKkeWDjUbyMi82C7XXy7Tvm2LXMwiBBXyFIGNPaGA= github.com/influxdata/cron v0.0.0-20191203200038-ded12750aac6/go.mod h1:XabtPPW2qsCg0tl+kjaPU+cFS+CjQXEXbT1VJvHT4og= -github.com/influxdata/flux v0.67.1-0.20200429154143-b42d4177e03e h1:kcsFqnxlImwDTTqNPRJeePWfaCFteO0/gppYk5L5x+k= -github.com/influxdata/flux v0.67.1-0.20200429154143-b42d4177e03e/go.mod h1:4PVm7oUSOMJgXbEsEZgHmWZRjIPtB6gF0F9et2i3+3w= +github.com/influxdata/flux v0.67.1-0.20200504203345-528f07c316a9 h1:G7W9rZ8BIcZs3UfOis94NWEpHvsfxHeOewJt5HBJj+Y= +github.com/influxdata/flux v0.67.1-0.20200504203345-528f07c316a9/go.mod h1:AdzL5HnjdFlcBiNz0wE69rSTGRX9CQHqtJUF8ptiDeY= github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU= github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69/go.mod h1:pwymjR6SrP3gD3pRj9RJwdl1j5s3doEEV8gS4X9qSzA= github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= @@ -503,6 +503,8 @@ github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4 h1:49lOXmGaUpV9Fz3gd7TFZY106KVlPVa5jcYD1gaQf98= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -579,6 +581,8 @@ github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbd github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/snowflakedb/gosnowflake v1.3.4 h1:Gyoi6g4lMHsilEwW9+KV+bgYkJTgf5pVfvL7Utus920= +github.com/snowflakedb/gosnowflake v1.3.4/go.mod h1:NsRq2QeiMUuoNUJhp5Q6xGC4uBrsS9g6LwZVEkTWgsE= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= @@ -682,8 +686,8 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmV golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413 h1:ULYEB3JvPRE/IfO+9uO7vKV/xzVTO7XPAwm8xbf4w2g= -golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59 h1:3zb4D3T4G8jdExgVU/95+vQXfpEPiMdCaZgmGVxjNHM= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522 h1:OeRHuibLsmZkFj773W4LcfAGsSxJgfPONhr8cmO+eLA=