diff --git a/CHANGELOG.md b/CHANGELOG.md index bb8302fc97..c5b38e7977 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,12 @@ -## v2.0.0-alpha.7 [unreleased] +## v2.0.0-alpha.8 [unreleased] + +### Features + +### Bug Fixes + +### UI Improvements + +## v2.0.0-alpha.7 [2019-03-28] ### Features @@ -15,6 +23,7 @@ 1. [12888](https://github.com/influxdata/influxdb/pull/12888): Add the ability to delete a template 1. [12901](https://github.com/influxdata/influxdb/pull/12901): Save user preference for variable control bar visibility and default to visible 1. [12910](https://github.com/influxdata/influxdb/pull/12910): Add the ability to clone a template +1. [12958](https://github.com/influxdata/influxdb/pull/12958): Add the ability to import a variable ### Bug Fixes @@ -25,11 +34,14 @@ 1. [12793](https://github.com/influxdata/influxdb/pull/12793): Fix task creation error when switching schedule types. 1. [12805](https://github.com/influxdata/influxdb/pull/12805): Fix hidden horizonal scrollbars in flux raw data view 1. [12827](https://github.com/influxdata/influxdb/pull/12827): Fix screen tearing bug in Raw Data View +1. [12961](https://github.com/influxdata/influxdb/pull/12961): Fix scroll clipping in graph legends & dropdown menus +1. [12959](https://github.com/influxdata/influxdb/pull/12959): Fix routing loop ### UI Improvements 1. [12782](https://github.com/influxdata/influxdb/pull/12782): Move bucket selection in the query builder to the first card in the list 1. [12850](https://github.com/influxdata/influxdb/pull/12850): Ensure editor is automatically focused in note editor +1. [12915](https://github.com/influxdata/influxdb/pull/12915): Add ability to edit a template's name. ## v2.0.0-alpha.6 [2019-03-15] diff --git a/auth.go b/auth.go index fea4cfb3b5..3354a59c6d 100644 --- a/auth.go +++ b/auth.go @@ -27,6 +27,12 @@ type Authorization struct { Permissions []Permission `json:"permissions"` } +// AuthorizationUpdate is the authorization update request. +type AuthorizationUpdate struct { + Status *Status `json:"status,omitempty"` + Description *string `json:"description,omitempty"` +} + // Valid ensures that the authorization is valid. func (a *Authorization) Valid() error { for _, p := range a.Permissions { @@ -78,7 +84,7 @@ const ( OpFindAuthorizationByToken = "FindAuthorizationByToken" OpFindAuthorizations = "FindAuthorizations" OpCreateAuthorization = "CreateAuthorization" - OpSetAuthorizationStatus = "SetAuthorizationStatus" + OpUpdateAuthorization = "UpdateAuthorization" OpDeleteAuthorization = "DeleteAuthorization" ) @@ -97,9 +103,8 @@ type AuthorizationService interface { // Creates a new authorization and sets a.Token and a.UserID with the new identifier. CreateAuthorization(ctx context.Context, a *Authorization) error - // SetAuthorizationStatus updates the status of the authorization. Useful - // for setting an authorization to inactive or active. - SetAuthorizationStatus(ctx context.Context, id ID, status Status) error + // UpdateAuthorization updates the status and description if available. + UpdateAuthorization(ctx context.Context, id ID, udp *AuthorizationUpdate) error // Removes a authorization by token. DeleteAuthorization(ctx context.Context, id ID) error diff --git a/authorizer/auth.go b/authorizer/auth.go index 87e14f1ecb..f842e2d9fd 100644 --- a/authorizer/auth.go +++ b/authorizer/auth.go @@ -143,8 +143,8 @@ func VerifyPermissions(ctx context.Context, ps []influxdb.Permission) error { return nil } -// SetAuthorizationStatus checks to see if the authorizer on context has write access to the authorization provided. -func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id influxdb.ID, st influxdb.Status) error { +// UpdateAuthorization checks to see if the authorizer on context has write access to the authorization provided. +func (s *AuthorizationService) UpdateAuthorization(ctx context.Context, id influxdb.ID, upd *influxdb.AuthorizationUpdate) error { a, err := s.s.FindAuthorizationByID(ctx, id) if err != nil { return err @@ -154,7 +154,7 @@ func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id in return err } - return s.s.SetAuthorizationStatus(ctx, id, st) + return s.s.UpdateAuthorization(ctx, id, upd) } // DeleteAuthorization checks to see if the authorizer on context has write access to the authorization provided. diff --git a/authorizer/auth_test.go b/authorizer/auth_test.go index 72d7c7e1d3..8d6c007e32 100644 --- a/authorizer/auth_test.go +++ b/authorizer/auth_test.go @@ -244,7 +244,7 @@ func TestAuthorizationService_WriteAuthorization(t *testing.T) { m.DeleteAuthorizationFn = func(ctx context.Context, id influxdb.ID) error { return nil } - m.SetAuthorizationStatusFn = func(ctx context.Context, id influxdb.ID, s influxdb.Status) error { + m.UpdateAuthorizationFn = func(ctx context.Context, id influxdb.ID, upd *influxdb.AuthorizationUpdate) error { return nil } s := authorizer.NewAuthorizationService(m) @@ -257,8 +257,8 @@ func TestAuthorizationService_WriteAuthorization(t *testing.T) { influxdbtesting.ErrorsEqual(t, err, tt.wants.err) }) - t.Run("set authorization status", func(t *testing.T) { - err := s.SetAuthorizationStatus(ctx, 10, influxdb.Active) + t.Run("update authorization", func(t *testing.T) { + err := s.UpdateAuthorization(ctx, 10, &influxdb.AuthorizationUpdate{Status: influxdb.Active.Ptr()}) influxdbtesting.ErrorsEqual(t, err, tt.wants.err) }) diff --git a/bolt/authorization.go b/bolt/authorization.go index 63ef4ada01..938ef02192 100644 --- a/bolt/authorization.go +++ b/bolt/authorization.go @@ -4,7 +4,7 @@ import ( "context" "encoding/json" - "github.com/coreos/bbolt" + bolt "github.com/coreos/bbolt" platform "github.com/influxdata/influxdb" ) @@ -382,27 +382,32 @@ func (c *Client) deleteAuthorization(ctx context.Context, tx *bolt.Tx, id platfo return nil } -// SetAuthorizationStatus updates the status of the authorization. Useful -// for setting an authorization to inactive or active. -func (c *Client) SetAuthorizationStatus(ctx context.Context, id platform.ID, status platform.Status) error { +// UpdateAuthorization updates the status and description if available. +func (c *Client) UpdateAuthorization(ctx context.Context, id platform.ID, upd *platform.AuthorizationUpdate) error { return c.db.Update(func(tx *bolt.Tx) error { - if pe := c.updateAuthorization(ctx, tx, id, status); pe != nil { + if pe := c.updateAuthorization(ctx, tx, id, upd); pe != nil { return &platform.Error{ Err: pe, - Op: platform.OpSetAuthorizationStatus, + Op: platform.OpUpdateAuthorization, } } return nil }) } -func (c *Client) updateAuthorization(ctx context.Context, tx *bolt.Tx, id platform.ID, status platform.Status) *platform.Error { +func (c *Client) updateAuthorization(ctx context.Context, tx *bolt.Tx, id platform.ID, upd *platform.AuthorizationUpdate) *platform.Error { a, pe := c.findAuthorizationByID(ctx, tx, id) if pe != nil { return pe } - a.Status = status + if upd.Status != nil { + a.Status = *upd.Status + } + if upd.Description != nil { + a.Description = *upd.Description + } + b, err := encodeAuthorization(a) if err != nil { return &platform.Error{ diff --git a/bolt/bbolt.go b/bolt/bbolt.go index 33ff2a5686..a172aae87c 100644 --- a/bolt/bbolt.go +++ b/bolt/bbolt.go @@ -133,11 +133,6 @@ func (c *Client) initialize(ctx context.Context) error { return err } - // Always create Views bucket. - if err := c.initializeViews(ctx, tx); err != nil { - return err - } - // Always create Variables bucket. if err := c.initializeVariables(ctx, tx); err != nil { return err diff --git a/bolt/dashboard_test.go b/bolt/dashboard_test.go index 3e6b84983b..fcf0eb6e2c 100644 --- a/bolt/dashboard_test.go +++ b/bolt/dashboard_test.go @@ -28,11 +28,6 @@ func initDashboardService(f platformtesting.DashboardFields, t *testing.T) (plat t.Fatalf("failed to populate dashboards") } } - for _, b := range f.Views { - if err := c.PutView(ctx, b); err != nil { - t.Fatalf("failed to populate views") - } - } return c, bolt.OpPrefix, func() { defer closeFn() for _, b := range f.Dashboards { @@ -40,11 +35,6 @@ func initDashboardService(f platformtesting.DashboardFields, t *testing.T) (plat t.Logf("failed to remove dashboard: %v", err) } } - for _, b := range f.Views { - if err := c.DeleteView(ctx, b.ID); err != nil { - t.Logf("failed to remove view: %v", err) - } - } } } diff --git a/bolt/view.go b/bolt/view.go deleted file mode 100644 index c3d540ade9..0000000000 --- a/bolt/view.go +++ /dev/null @@ -1,320 +0,0 @@ -package bolt - -import ( - "context" - "encoding/json" - "sync" - - bolt "github.com/coreos/bbolt" - platform "github.com/influxdata/influxdb" -) - -var ( - viewBucket = []byte("viewsv2") -) - -func (c *Client) initializeViews(ctx context.Context, tx *bolt.Tx) error { - if _, err := tx.CreateBucketIfNotExists([]byte(viewBucket)); err != nil { - return err - } - return nil -} - -// FindViewByID retrieves a view by id. -func (c *Client) FindViewByID(ctx context.Context, id platform.ID) (*platform.View, error) { - var d *platform.View - - err := c.db.View(func(tx *bolt.Tx) error { - dash, err := c.findViewByID(ctx, tx, id) - if err != nil { - return &platform.Error{ - Err: err, - Op: getOp(platform.OpFindViewByID), - } - } - d = dash - return nil - }) - - return d, err -} - -func (c *Client) findViewByID(ctx context.Context, tx *bolt.Tx, id platform.ID) (*platform.View, *platform.Error) { - var d platform.View - - encodedID, err := id.Encode() - if err != nil { - return nil, &platform.Error{ - Err: err, - } - } - - v := tx.Bucket(viewBucket).Get(encodedID) - if len(v) == 0 { - return nil, &platform.Error{ - Code: platform.ENotFound, - Msg: platform.ErrViewNotFound, - } - } - - if err := json.Unmarshal(v, &d); err != nil { - return nil, &platform.Error{ - Err: err, - } - } - - return &d, nil -} - -// FindView retrieves a view using an arbitrary view filter. -func (c *Client) FindView(ctx context.Context, filter platform.ViewFilter) (*platform.View, error) { - if filter.ID != nil { - return c.FindViewByID(ctx, *filter.ID) - } - - var d *platform.View - err := c.db.View(func(tx *bolt.Tx) error { - filterFn := filterViewsFn(filter) - return c.forEachView(ctx, tx, func(dash *platform.View) bool { - if filterFn(dash) { - d = dash - return false - } - return true - }) - }) - - if err != nil { - return nil, &platform.Error{ - Err: err, - } - } - - if d == nil { - return nil, &platform.Error{ - Code: platform.ENotFound, - Msg: platform.ErrViewNotFound, - } - } - - return d, nil -} - -func filterViewsFn(filter platform.ViewFilter) func(v *platform.View) bool { - if filter.ID != nil { - return func(v *platform.View) bool { - return v.ID == *filter.ID - } - } - - if len(filter.Types) > 0 { - var sm sync.Map - for _, t := range filter.Types { - sm.Store(t, true) - } - return func(v *platform.View) bool { - _, ok := sm.Load(v.Properties.GetType()) - return ok - } - } - - return func(v *platform.View) bool { return true } -} - -// FindViews retrives all views that match an arbitrary view filter. -func (c *Client) FindViews(ctx context.Context, filter platform.ViewFilter) ([]*platform.View, int, error) { - ds := []*platform.View{} - op := getOp(platform.OpFindViews) - if filter.ID != nil { - d, err := c.FindViewByID(ctx, *filter.ID) - if err != nil && platform.ErrorCode(err) != platform.ENotFound { - return nil, 0, &platform.Error{ - Err: err, - Op: op, - } - } - if d != nil { - ds = append(ds, d) - } - - return ds, 1, nil - } - - err := c.db.View(func(tx *bolt.Tx) error { - dashs, err := c.findViews(ctx, tx, filter) - if err != nil { - return &platform.Error{ - Err: err, - Op: op, - } - } - ds = dashs - return nil - }) - - return ds, len(ds), err -} - -func (c *Client) findViews(ctx context.Context, tx *bolt.Tx, filter platform.ViewFilter) ([]*platform.View, error) { - ds := []*platform.View{} - - filterFn := filterViewsFn(filter) - err := c.forEachView(ctx, tx, func(d *platform.View) bool { - if filterFn(d) { - ds = append(ds, d) - } - return true - }) - - if err != nil { - return nil, err - } - - return ds, nil -} - -// CreateView creates a platform view and sets d.ID. -func (c *Client) CreateView(ctx context.Context, d *platform.View) error { - return c.db.Update(func(tx *bolt.Tx) error { - if pe := c.createView(ctx, tx, d); pe != nil { - return &platform.Error{ - Op: getOp(platform.OpCreateView), - Err: pe, - } - } - return nil - }) -} - -func (c *Client) createView(ctx context.Context, tx *bolt.Tx, d *platform.View) *platform.Error { - d.ID = c.IDGenerator.ID() - return c.putView(ctx, tx, d) -} - -// PutView will put a view without setting an ID. -func (c *Client) PutView(ctx context.Context, d *platform.View) error { - return c.db.Update(func(tx *bolt.Tx) error { - if pe := c.putView(ctx, tx, d); pe != nil { - return pe - } - return nil - }) -} - -func (c *Client) putView(ctx context.Context, tx *bolt.Tx, d *platform.View) *platform.Error { - v, err := json.Marshal(d) - if err != nil { - return &platform.Error{ - Err: err, - } - } - encodedID, err := d.ID.Encode() - if err != nil { - return &platform.Error{ - Err: err, - } - } - if err := tx.Bucket(viewBucket).Put(encodedID, v); err != nil { - return &platform.Error{ - Err: err, - } - } - return nil -} - -// forEachView will iterate through all views while fn returns true. -func (c *Client) forEachView(ctx context.Context, tx *bolt.Tx, fn func(*platform.View) bool) error { - cur := tx.Bucket(viewBucket).Cursor() - for k, v := cur.First(); k != nil; k, v = cur.Next() { - d := &platform.View{} - if err := json.Unmarshal(v, d); err != nil { - return err - } - if !fn(d) { - break - } - } - - return nil -} - -// UpdateView updates a view according the parameters set on upd. -func (c *Client) UpdateView(ctx context.Context, id platform.ID, upd platform.ViewUpdate) (*platform.View, error) { - var d *platform.View - err := c.db.Update(func(tx *bolt.Tx) error { - dash, pe := c.updateView(ctx, tx, id, upd) - if pe != nil { - return &platform.Error{ - Err: pe, - Op: getOp(platform.OpUpdateView), - } - } - d = dash - return nil - }) - - return d, err -} - -func (c *Client) updateView(ctx context.Context, tx *bolt.Tx, id platform.ID, upd platform.ViewUpdate) (*platform.View, error) { - d, err := c.findViewByID(ctx, tx, id) - if err != nil { - return nil, err - } - - if upd.Name != nil { - d.Name = *upd.Name - } - - if upd.Properties != nil { - d.Properties = upd.Properties - } - - if err := c.putView(ctx, tx, d); err != nil { - return nil, err - } - - return d, nil -} - -// DeleteView deletes a view and prunes it from the index. -func (c *Client) DeleteView(ctx context.Context, id platform.ID) error { - return c.db.Update(func(tx *bolt.Tx) error { - if pe := c.deleteView(ctx, tx, id); pe != nil { - return &platform.Error{ - Err: pe, - Op: getOp(platform.OpDeleteView), - } - } - return nil - }) -} - -func (c *Client) deleteView(ctx context.Context, tx *bolt.Tx, id platform.ID) *platform.Error { - _, pe := c.findViewByID(ctx, tx, id) - if pe != nil { - return pe - } - encodedID, err := id.Encode() - if err != nil { - return &platform.Error{ - Err: err, - } - } - if err := tx.Bucket(viewBucket).Delete(encodedID); err != nil { - return &platform.Error{ - Err: err, - } - } - - if err := c.deleteUserResourceMappings(ctx, tx, platform.UserResourceMappingFilter{ - ResourceID: id, - ResourceType: platform.DashboardsResourceType, - }); err != nil { - return &platform.Error{ - Err: err, - } - } - - return nil -} diff --git a/bolt/view_test.go b/bolt/view_test.go deleted file mode 100644 index 758b278553..0000000000 --- a/bolt/view_test.go +++ /dev/null @@ -1,52 +0,0 @@ -package bolt_test - -import ( - "context" - "testing" - - platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/bolt" - platformtesting "github.com/influxdata/influxdb/testing" -) - -func initViewService(f platformtesting.ViewFields, t *testing.T) (platform.ViewService, string, func()) { - c, closeFn, err := NewTestClient() - if err != nil { - t.Fatalf("failed to create new bolt client: %v", err) - } - c.IDGenerator = f.IDGenerator - ctx := context.TODO() - for _, b := range f.Views { - if err := c.PutView(ctx, b); err != nil { - t.Fatalf("failed to populate cells") - } - } - return c, bolt.OpPrefix, func() { - defer closeFn() - for _, b := range f.Views { - if err := c.DeleteView(ctx, b.ID); err != nil { - t.Logf("failed to remove cell: %v", err) - } - } - } -} - -func TestViewService_CreateView(t *testing.T) { - platformtesting.CreateView(initViewService, t) -} - -func TestViewService_FindViewByID(t *testing.T) { - platformtesting.FindViewByID(initViewService, t) -} - -func TestViewService_FindViews(t *testing.T) { - platformtesting.FindViews(initViewService, t) -} - -func TestViewService_DeleteView(t *testing.T) { - platformtesting.DeleteView(initViewService, t) -} - -func TestViewService_UpdateView(t *testing.T) { - platformtesting.UpdateView(initViewService, t) -} diff --git a/cmd/influx/authorization.go b/cmd/influx/authorization.go index 01b11e8085..76463cc93a 100644 --- a/cmd/influx/authorization.go +++ b/cmd/influx/authorization.go @@ -5,10 +5,8 @@ import ( "os" platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/bolt" "github.com/influxdata/influxdb/cmd/influx/internal" "github.com/influxdata/influxdb/http" - "github.com/influxdata/influxdb/internal/fs" "github.com/spf13/cobra" ) @@ -289,17 +287,7 @@ func init() { func newAuthorizationService(f Flags) (platform.AuthorizationService, error) { if flags.local { - boltFile, err := fs.BoltFile() - if err != nil { - return nil, err - } - c := bolt.NewClient() - c.Path = boltFile - if err := c.Open(context.Background()); err != nil { - return nil, err - } - - return c, nil + return newLocalKVService() } return &http.AuthorizationService{ Addr: flags.host, @@ -473,7 +461,9 @@ func authorizationActiveF(cmd *cobra.Command, args []string) error { return err } - if err := s.SetAuthorizationStatus(context.Background(), id, platform.Active); err != nil { + if err := s.UpdateAuthorization(context.Background(), id, &platform.AuthorizationUpdate{ + Status: platform.Active.Ptr(), + }); err != nil { return err } @@ -542,7 +532,9 @@ func authorizationInactiveF(cmd *cobra.Command, args []string) error { return err } - if err := s.SetAuthorizationStatus(ctx, id, platform.Inactive); err != nil { + if err := s.UpdateAuthorization(context.Background(), id, &platform.AuthorizationUpdate{ + Status: platform.Inactive.Ptr(), + }); err != nil { return err } diff --git a/cmd/influx/bucket.go b/cmd/influx/bucket.go index c904724a4a..4beec59c77 100644 --- a/cmd/influx/bucket.go +++ b/cmd/influx/bucket.go @@ -7,10 +7,8 @@ import ( "time" platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/bolt" "github.com/influxdata/influxdb/cmd/influx/internal" "github.com/influxdata/influxdb/http" - "github.com/influxdata/influxdb/internal/fs" "github.com/spf13/cobra" ) @@ -53,17 +51,7 @@ func init() { func newBucketService(f Flags) (platform.BucketService, error) { if flags.local { - boltFile, err := fs.BoltFile() - if err != nil { - return nil, err - } - c := bolt.NewClient() - c.Path = boltFile - if err := c.Open(context.Background()); err != nil { - return nil, err - } - - return c, nil + return newLocalKVService() } return &http.BucketService{ Addr: flags.host, diff --git a/cmd/influx/main.go b/cmd/influx/main.go index 403d3986a4..2d7755bfed 100644 --- a/cmd/influx/main.go +++ b/cmd/influx/main.go @@ -8,9 +8,11 @@ import ( "path/filepath" "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/bolt" "github.com/influxdata/influxdb/cmd/influx/internal" "github.com/influxdata/influxdb/http" "github.com/influxdata/influxdb/internal/fs" + "github.com/influxdata/influxdb/kv" "github.com/spf13/cobra" "github.com/spf13/viper" ) @@ -164,3 +166,17 @@ func Execute() { os.Exit(1) } } + +func newLocalKVService() (*kv.Service, error) { + boltFile, err := fs.BoltFile() + if err != nil { + return nil, err + } + + store := bolt.NewKVStore(boltFile) + if err := store.Open(context.Background()); err != nil { + return nil, err + } + + return kv.NewService(store), nil +} diff --git a/cmd/influx/organization.go b/cmd/influx/organization.go index 80613d08eb..481d817fe2 100644 --- a/cmd/influx/organization.go +++ b/cmd/influx/organization.go @@ -6,10 +6,8 @@ import ( "os" platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/bolt" "github.com/influxdata/influxdb/cmd/influx/internal" "github.com/influxdata/influxdb/http" - "github.com/influxdata/influxdb/internal/fs" "github.com/spf13/cobra" ) @@ -47,17 +45,7 @@ func init() { func newOrganizationService(f Flags) (platform.OrganizationService, error) { if flags.local { - boltFile, err := fs.BoltFile() - if err != nil { - return nil, err - } - c := bolt.NewClient() - c.Path = boltFile - if err := c.Open(context.Background()); err != nil { - return nil, err - } - - return c, nil + return newLocalKVService() } return &http.OrganizationService{ Addr: flags.host, diff --git a/cmd/influx/user.go b/cmd/influx/user.go index bc69d55325..895a91458a 100644 --- a/cmd/influx/user.go +++ b/cmd/influx/user.go @@ -5,10 +5,8 @@ import ( "os" platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/bolt" "github.com/influxdata/influxdb/cmd/influx/internal" "github.com/influxdata/influxdb/http" - "github.com/influxdata/influxdb/internal/fs" "github.com/spf13/cobra" ) @@ -44,17 +42,7 @@ func init() { func newUserService(f Flags) (platform.UserService, error) { if flags.local { - boltFile, err := fs.BoltFile() - if err != nil { - return nil, err - } - c := bolt.NewClient() - c.Path = boltFile - if err := c.Open(context.Background()); err != nil { - return nil, err - } - - return c, nil + return newLocalKVService() } return &http.UserService{ Addr: flags.host, @@ -64,17 +52,7 @@ func newUserService(f Flags) (platform.UserService, error) { func newUserResourceMappingService(f Flags) (platform.UserResourceMappingService, error) { if flags.local { - boltFile, err := fs.BoltFile() - if err != nil { - return nil, err - } - c := bolt.NewClient() - c.Path = boltFile - if err := c.Open(context.Background()); err != nil { - return nil, err - } - - return c, nil + return newLocalKVService() } return &http.UserResourceMappingService{ Addr: flags.host, diff --git a/cmd/influxd/launcher/launcher.go b/cmd/influxd/launcher/launcher.go index f6dabcf162..27f207bce8 100644 --- a/cmd/influxd/launcher/launcher.go +++ b/cmd/influxd/launcher/launcher.go @@ -547,12 +547,13 @@ func (m *Launcher) run(ctx context.Context) (err error) { executor := taskexecutor.NewAsyncQueryServiceExecutor(m.logger.With(zap.String("service", "task-executor")), m.queryController, authSvc, store) lw := taskbackend.NewPointLogWriter(pointsWriter) - m.scheduler = taskbackend.NewScheduler(store, executor, lw, time.Now().UTC().Unix(), taskbackend.WithTicker(ctx, 100*time.Millisecond), taskbackend.WithLogger(m.logger)) + queryService := query.QueryServiceBridge{AsyncQueryService: m.queryController} + lr := taskbackend.NewQueryLogReader(queryService) + taskControlService := taskbackend.TaskControlAdaptor(store, lw, lr) + m.scheduler = taskbackend.NewScheduler(taskControlService, executor, time.Now().UTC().Unix(), taskbackend.WithTicker(ctx, 100*time.Millisecond), taskbackend.WithLogger(m.logger)) m.scheduler.Start(ctx) m.reg.MustRegister(m.scheduler.PrometheusCollectors()...) - queryService := query.QueryServiceBridge{AsyncQueryService: m.queryController} - lr := taskbackend.NewQueryLogReader(queryService) taskSvc = task.PlatformAdapter(coordinator.New(m.logger.With(zap.String("service", "task-coordinator")), m.scheduler, store), lr, m.scheduler, authSvc, userResourceSvc, orgSvc) taskSvc = task.NewValidator(m.logger.With(zap.String("service", "task-authz-validator")), taskSvc, bucketSvc) m.taskStore = store diff --git a/cmd/influxd/launcher/launcher_helpers.go b/cmd/influxd/launcher/launcher_helpers.go new file mode 100644 index 0000000000..1f7d40a6be --- /dev/null +++ b/cmd/influxd/launcher/launcher_helpers.go @@ -0,0 +1,381 @@ +package launcher + +import ( + "bytes" + "context" + "fmt" + "io" + "io/ioutil" + nethttp "net/http" + "os" + "path/filepath" + "reflect" + "sort" + "strings" + "testing" + + "github.com/influxdata/flux" + "github.com/influxdata/flux/lang" + platform "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/bolt" + "github.com/influxdata/influxdb/http" + "github.com/influxdata/influxdb/query" +) + +// TestLauncher is a test wrapper for launcher.Launcher. +type TestLauncher struct { + *Launcher + + // Root temporary directory for all data. + Path string + + // Initialized after calling the Setup() helper. + User *platform.User + Org *platform.Organization + Bucket *platform.Bucket + Auth *platform.Authorization + + // Standard in/out/err buffers. + Stdin bytes.Buffer + Stdout bytes.Buffer + Stderr bytes.Buffer +} + +// NewTestLauncher returns a new instance of TestLauncher. +func NewTestLauncher() *TestLauncher { + l := &TestLauncher{Launcher: NewLauncher()} + l.Launcher.Stdin = &l.Stdin + l.Launcher.Stdout = &l.Stdout + l.Launcher.Stderr = &l.Stderr + if testing.Verbose() { + l.Launcher.Stdout = io.MultiWriter(l.Launcher.Stdout, os.Stdout) + l.Launcher.Stderr = io.MultiWriter(l.Launcher.Stderr, os.Stderr) + } + + path, err := ioutil.TempDir("", "") + if err != nil { + panic(err) + } + l.Path = path + return l +} + +// RunLauncherOrFail initializes and starts the server. +func RunTestLauncherOrFail(tb testing.TB, ctx context.Context, args ...string) *TestLauncher { + tb.Helper() + l := NewTestLauncher() + if err := l.Run(ctx, args...); err != nil { + tb.Fatal(err) + } + return l +} + +// Run executes the program with additional arguments to set paths and ports. +func (tl *TestLauncher) Run(ctx context.Context, args ...string) error { + args = append(args, "--bolt-path", filepath.Join(tl.Path, "influxd.bolt")) + args = append(args, "--protos-path", filepath.Join(tl.Path, "protos")) + args = append(args, "--engine-path", filepath.Join(tl.Path, "engine")) + args = append(args, "--http-bind-address", "127.0.0.1:0") + args = append(args, "--log-level", "debug") + return tl.Launcher.Run(ctx, args...) +} + +// Shutdown stops the program and cleans up temporary paths. +func (tl *TestLauncher) Shutdown(ctx context.Context) error { + tl.Cancel() + tl.Launcher.Shutdown(ctx) + return os.RemoveAll(tl.Path) +} + +// ShutdownOrFail stops the program and cleans up temporary paths. Fail on error. +func (tl *TestLauncher) ShutdownOrFail(tb testing.TB, ctx context.Context) { + tb.Helper() + if err := tl.Shutdown(ctx); err != nil { + tb.Fatal(err) + } +} + +// SetupOrFail creates a new user, bucket, org, and auth token. Fail on error. +func (tl *TestLauncher) SetupOrFail(tb testing.TB) { + results := tl.OnBoardOrFail(tb, &platform.OnboardingRequest{ + User: "USER", + Password: "PASSWORD", + Org: "ORG", + Bucket: "BUCKET", + }) + + tl.User = results.User + tl.Org = results.Org + tl.Bucket = results.Bucket + tl.Auth = results.Auth +} + +// OnBoardOrFail attempts an on-boarding request or fails on error. +// The on-boarding status is also reset to allow multiple user/org/buckets to be created. +func (tl *TestLauncher) OnBoardOrFail(tb testing.TB, req *platform.OnboardingRequest) *platform.OnboardingResults { + tb.Helper() + res, err := tl.KeyValueService().Generate(context.Background(), req) + if err != nil { + tb.Fatal(err) + } + + err = tl.KeyValueService().PutOnboardingStatus(context.Background(), false) + if err != nil { + tb.Fatal(err) + } + + return res +} + +// WriteOrFail attempts a write to the organization and bucket identified by to or fails if there is an error. +func (tl *TestLauncher) WriteOrFail(tb testing.TB, to *platform.OnboardingResults, data string) { + tb.Helper() + resp, err := nethttp.DefaultClient.Do(tl.NewHTTPRequestOrFail(tb, "POST", fmt.Sprintf("/api/v2/write?org=%s&bucket=%s", to.Org.ID, to.Bucket.ID), to.Auth.Token, data)) + if err != nil { + tb.Fatal(err) + } + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + tb.Fatal(err) + } + + if err := resp.Body.Close(); err != nil { + tb.Fatal(err) + } + + if resp.StatusCode != nethttp.StatusNoContent { + tb.Fatalf("unexpected status code: %d, body: %s, headers: %v", resp.StatusCode, body, resp.Header) + } +} + +// WriteOrFail attempts a write to the organization and bucket used during setup or fails if there is an error. +func (tl *TestLauncher) WritePointsOrFail(tb testing.TB, data string) { + tb.Helper() + resp, err := nethttp.DefaultClient.Do( + tl.NewHTTPRequestOrFail( + tb, + "POST", fmt.Sprintf("/api/v2/write?org=%s&bucket=%s", tl.Org.ID, tl.Bucket.ID), + tl.Auth.Token, + data)) + if err != nil { + tb.Fatal(err) + } + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + tb.Fatal(err) + } + + if err := resp.Body.Close(); err != nil { + tb.Fatal(err) + } + + if resp.StatusCode != nethttp.StatusNoContent { + tb.Fatalf("unexpected status code: %d, body: %s, headers: %v", resp.StatusCode, body, resp.Header) + } +} + +// MustExecuteQuery executes the provided query panicking if an error is encountered. +// Callers of MustExecuteQuery must call Done on the returned QueryResults. +func (tl *TestLauncher) MustExecuteQuery(query string) *QueryResults { + results, err := tl.ExecuteQuery(query) + if err != nil { + panic(err) + } + return results +} + +// ExecuteQuery executes the provided query against the ith query node. +// Callers of ExecuteQuery must call Done on the returned QueryResults. +func (tl *TestLauncher) ExecuteQuery(q string) (*QueryResults, error) { + fq, err := tl.QueryController().Query(context.Background(), &query.Request{ + Authorization: tl.Auth, + OrganizationID: tl.Auth.OrgID, + Compiler: lang.FluxCompiler{ + Query: q, + }}) + if err != nil { + return nil, err + } + if err = fq.Err(); err != nil { + return nil, fq.Err() + } + return &QueryResults{ + Results: <-fq.Ready(), + Query: fq, + }, nil +} + +// QueryAndConsume queries InfluxDB using the request provided. It uses a function to consume the results obtained. +// It returns the first error encountered when requesting the query, consuming the results, or executing the query. +func (tl *TestLauncher) QueryAndConsume(ctx context.Context, req *query.Request, fn func(r flux.Result) error) error { + res, err := tl.FluxQueryService().Query(ctx, req) + if err != nil { + return err + } + // iterate over results to populate res.Err() + var gotErr error + for res.More() { + if err := fn(res.Next()); gotErr == nil { + gotErr = err + } + } + if gotErr != nil { + return gotErr + } + return res.Err() +} + +// QueryAndNopConsume does the same as QueryAndConsume but consumes results with a nop function. +func (tl *TestLauncher) QueryAndNopConsume(ctx context.Context, req *query.Request) error { + return tl.QueryAndConsume(ctx, req, func(r flux.Result) error { + return r.Tables().Do(func(table flux.Table) error { + return nil + }) + }) +} + +// FluxQueryOrFail performs a query to the specified organization and returns the results +// or fails if there is an error. +func (tl *TestLauncher) FluxQueryOrFail(tb testing.TB, org *platform.Organization, token string, query string) string { + tb.Helper() + + b, err := http.SimpleQuery(tl.URL(), query, org.Name, token) + if err != nil { + tb.Fatal(err) + } + + return string(b) +} + +// MustNewHTTPRequest returns a new nethttp.Request with base URL and auth attached. Fail on error. +func (tl *TestLauncher) MustNewHTTPRequest(method, rawurl, body string) *nethttp.Request { + req, err := nethttp.NewRequest(method, tl.URL()+rawurl, strings.NewReader(body)) + if err != nil { + panic(err) + } + + req.Header.Set("Authorization", "Token "+tl.Auth.Token) + return req +} + +// MustNewHTTPRequest returns a new nethttp.Request with base URL and auth attached. Fail on error. +func (tl *TestLauncher) NewHTTPRequestOrFail(tb testing.TB, method, rawurl, token string, body string) *nethttp.Request { + tb.Helper() + req, err := nethttp.NewRequest(method, tl.URL()+rawurl, strings.NewReader(body)) + if err != nil { + tb.Fatal(err) + } + + req.Header.Set("Authorization", "Token "+token) + return req +} + +// Services + +func (tl *TestLauncher) FluxService() *http.FluxService { + return &http.FluxService{Addr: tl.URL(), Token: tl.Auth.Token} +} + +func (tl *TestLauncher) FluxQueryService() *http.FluxQueryService { + return &http.FluxQueryService{Addr: tl.URL(), Token: tl.Auth.Token} +} + +func (tl *TestLauncher) BucketService() *http.BucketService { + return &http.BucketService{Addr: tl.URL(), Token: tl.Auth.Token, OpPrefix: bolt.OpPrefix} +} + +func (tl *TestLauncher) AuthorizationService() *http.AuthorizationService { + return &http.AuthorizationService{Addr: tl.URL(), Token: tl.Auth.Token} +} + +func (tl *TestLauncher) TaskService() *http.TaskService { + return &http.TaskService{Addr: tl.URL(), Token: tl.Auth.Token} +} + +// QueryResult wraps a single flux.Result with some helper methods. +type QueryResult struct { + t *testing.T + q flux.Result +} + +// HasTableWithCols checks if the desired number of tables and columns exist, +// ignoring any system columns. +// +// If the result is not as expected then the testing.T fails. +func (r *QueryResult) HasTablesWithCols(want []int) { + r.t.Helper() + + // _start, _stop, _time, _f + systemCols := 4 + got := []int{} + if err := r.q.Tables().Do(func(b flux.Table) error { + got = append(got, len(b.Cols())-systemCols) + b.Do(func(c flux.ColReader) error { return nil }) + return nil + }); err != nil { + r.t.Fatal(err) + } + + if !reflect.DeepEqual(got, want) { + r.t.Fatalf("got %v, expected %v", got, want) + } +} + +// TablesN returns the number of tables for the result. +func (r *QueryResult) TablesN() int { + var total int + r.q.Tables().Do(func(b flux.Table) error { + total++ + b.Do(func(c flux.ColReader) error { return nil }) + return nil + }) + return total +} + +// QueryResults wraps a set of query results with some helper methods. +type QueryResults struct { + Results map[string]flux.Result + Query flux.Query +} + +func (r *QueryResults) Done() { + r.Query.Done() +} + +// First returns the first QueryResult. When there are not exactly 1 table First +// will fail. +func (r *QueryResults) First(t *testing.T) *QueryResult { + r.HasTableCount(t, 1) + for _, result := range r.Results { + return &QueryResult{t: t, q: result} + } + return nil +} + +// HasTableCount asserts that there are n tables in the result. +func (r *QueryResults) HasTableCount(t *testing.T, n int) { + if got, exp := len(r.Results), n; got != exp { + t.Fatalf("result has %d tables, expected %d. Tables: %s", got, exp, r.Names()) + } +} + +// Names returns the sorted set of table names for the query results. +func (r *QueryResults) Names() []string { + if len(r.Results) == 0 { + return nil + } + names := make([]string, len(r.Results), 0) + for k := range r.Results { + names = append(names, k) + } + return names +} + +// SortedNames returns the sorted set of table names for the query results. +func (r *QueryResults) SortedNames() []string { + names := r.Names() + sort.Strings(names) + return names +} diff --git a/cmd/influxd/launcher/launcher_test.go b/cmd/influxd/launcher/launcher_test.go index 7d665069a4..d52efc8e09 100644 --- a/cmd/influxd/launcher/launcher_test.go +++ b/cmd/influxd/launcher/launcher_test.go @@ -1,15 +1,10 @@ package launcher_test import ( - "bytes" "context" "encoding/json" - "io" "io/ioutil" nethttp "net/http" - "os" - "path/filepath" - "strings" "testing" platform "github.com/influxdata/influxdb" @@ -22,7 +17,7 @@ import ( var ctx = context.Background() func TestLauncher_Setup(t *testing.T) { - l := NewLauncher() + l := launcher.NewTestLauncher() if err := l.Run(ctx); err != nil { t.Fatal(err) } @@ -50,7 +45,7 @@ func TestLauncher_Setup(t *testing.T) { // This is to mimic chronograf using cookies as sessions // rather than authorizations func TestLauncher_SetupWithUsers(t *testing.T) { - l := RunLauncherOrFail(t, ctx) + l := launcher.RunTestLauncherOrFail(t, ctx) l.SetupOrFail(t) defer l.ShutdownOrFail(t, ctx) @@ -144,147 +139,3 @@ func TestLauncher_SetupWithUsers(t *testing.T) { t.Fatalf("unexpected 2 users: %#+v", exp) } } - -// Launcher is a test wrapper for launcher.Launcher. -type Launcher struct { - *launcher.Launcher - - // Root temporary directory for all data. - Path string - - // Initialized after calling the Setup() helper. - User *platform.User - Org *platform.Organization - Bucket *platform.Bucket - Auth *platform.Authorization - - // Standard in/out/err buffers. - Stdin bytes.Buffer - Stdout bytes.Buffer - Stderr bytes.Buffer -} - -// NewLauncher returns a new instance of Launcher. -func NewLauncher() *Launcher { - l := &Launcher{Launcher: launcher.NewLauncher()} - l.Launcher.Stdin = &l.Stdin - l.Launcher.Stdout = &l.Stdout - l.Launcher.Stderr = &l.Stderr - if testing.Verbose() { - l.Launcher.Stdout = io.MultiWriter(l.Launcher.Stdout, os.Stdout) - l.Launcher.Stderr = io.MultiWriter(l.Launcher.Stderr, os.Stderr) - } - - path, err := ioutil.TempDir("", "") - if err != nil { - panic(err) - } - l.Path = path - return l -} - -// RunLauncherOrFail initializes and starts the server. -func RunLauncherOrFail(tb testing.TB, ctx context.Context, args ...string) *Launcher { - tb.Helper() - l := NewLauncher() - if err := l.Run(ctx, args...); err != nil { - tb.Fatal(err) - } - return l -} - -// Run executes the program with additional arguments to set paths and ports. -func (l *Launcher) Run(ctx context.Context, args ...string) error { - args = append(args, "--bolt-path", filepath.Join(l.Path, "influxd.bolt")) - args = append(args, "--protos-path", filepath.Join(l.Path, "protos")) - args = append(args, "--engine-path", filepath.Join(l.Path, "engine")) - args = append(args, "--http-bind-address", "127.0.0.1:0") - args = append(args, "--log-level", "debug") - return l.Launcher.Run(ctx, args...) -} - -// Shutdown stops the program and cleans up temporary paths. -func (l *Launcher) Shutdown(ctx context.Context) error { - l.Cancel() - l.Launcher.Shutdown(ctx) - return os.RemoveAll(l.Path) -} - -// ShutdownOrFail stops the program and cleans up temporary paths. Fail on error. -func (l *Launcher) ShutdownOrFail(tb testing.TB, ctx context.Context) { - tb.Helper() - if err := l.Shutdown(ctx); err != nil { - tb.Fatal(err) - } -} - -// SetupOrFail creates a new user, bucket, org, and auth token. Fail on error. -func (l *Launcher) SetupOrFail(tb testing.TB) { - results := l.OnBoardOrFail(tb, &platform.OnboardingRequest{ - User: "USER", - Password: "PASSWORD", - Org: "ORG", - Bucket: "BUCKET", - }) - - l.User = results.User - l.Org = results.Org - l.Bucket = results.Bucket - l.Auth = results.Auth -} - -// OnBoardOrFail attempts an on-boarding request or fails on error. -// The on-boarding status is also reset to allow multiple user/org/buckets to be created. -func (l *Launcher) OnBoardOrFail(tb testing.TB, req *platform.OnboardingRequest) *platform.OnboardingResults { - tb.Helper() - res, err := l.KeyValueService().Generate(context.Background(), req) - if err != nil { - tb.Fatal(err) - } - - err = l.KeyValueService().PutOnboardingStatus(context.Background(), false) - if err != nil { - tb.Fatal(err) - } - - return res -} - -func (l *Launcher) FluxService() *http.FluxService { - return &http.FluxService{Addr: l.URL(), Token: l.Auth.Token} -} - -func (l *Launcher) BucketService() *http.BucketService { - return &http.BucketService{Addr: l.URL(), Token: l.Auth.Token} -} - -func (l *Launcher) AuthorizationService() *http.AuthorizationService { - return &http.AuthorizationService{Addr: l.URL(), Token: l.Auth.Token} -} - -func (l *Launcher) TaskService() *http.TaskService { - return &http.TaskService{Addr: l.URL(), Token: l.Auth.Token} -} - -// MustNewHTTPRequest returns a new nethttp.Request with base URL and auth attached. Fail on error. -func (l *Launcher) MustNewHTTPRequest(method, rawurl, body string) *nethttp.Request { - req, err := nethttp.NewRequest(method, l.URL()+rawurl, strings.NewReader(body)) - if err != nil { - panic(err) - } - - req.Header.Set("Authorization", "Token "+l.Auth.Token) - return req -} - -// MustNewHTTPRequest returns a new nethttp.Request with base URL and auth attached. Fail on error. -func (l *Launcher) NewHTTPRequestOrFail(tb testing.TB, method, rawurl, token string, body string) *nethttp.Request { - tb.Helper() - req, err := nethttp.NewRequest(method, l.URL()+rawurl, strings.NewReader(body)) - if err != nil { - tb.Fatal(err) - } - - req.Header.Set("Authorization", "Token "+token) - return req -} diff --git a/cmd/influxd/launcher/query_test.go b/cmd/influxd/launcher/query_test.go index a5dcd346b3..30edcf126f 100644 --- a/cmd/influxd/launcher/query_test.go +++ b/cmd/influxd/launcher/query_test.go @@ -6,20 +6,19 @@ import ( "fmt" "io" nethttp "net/http" - "reflect" - "sort" + "strings" "testing" "time" "github.com/influxdata/flux" "github.com/influxdata/flux/lang" - platform "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/cmd/influxd/launcher" phttp "github.com/influxdata/influxdb/http" "github.com/influxdata/influxdb/query" ) func TestPipeline_Write_Query_FieldKey(t *testing.T) { - be := RunLauncherOrFail(t, ctx) + be := launcher.RunTestLauncherOrFail(t, ctx) be.SetupOrFail(t) defer be.ShutdownOrFail(t, ctx) @@ -55,7 +54,7 @@ mem,server=b value=45.2`)) // _measurement=cpu,region=west,server=a,_field=v0 // _measurement=cpu,region=west,server=b,_field=v0 // - results := be.MustExecuteQuery(be.Org.ID, rawQ, be.Auth) + results := be.MustExecuteQuery(rawQ) defer results.Done() results.First(t).HasTablesWithCols([]int{5, 4, 4}) } @@ -64,9 +63,7 @@ mem,server=b value=45.2`)) // and checks that the queried results contain the expected number of tables // and expected number of columns. func TestPipeline_WriteV2_Query(t *testing.T) { - t.Parallel() - - be := RunLauncherOrFail(t, ctx) + be := launcher.RunTestLauncherOrFail(t, ctx) be.SetupOrFail(t) defer be.ShutdownOrFail(t, ctx) @@ -97,127 +94,52 @@ func TestPipeline_WriteV2_Query(t *testing.T) { t.Fatalf("exp status %d; got %d, body: %s", nethttp.StatusNoContent, resp.StatusCode, buf.String()) } - res := be.MustExecuteQuery( - be.Org.ID, - fmt.Sprintf(`from(bucket:"%s") |> range(start:-5m)`, be.Bucket.Name), - be.Auth) + res := be.MustExecuteQuery(fmt.Sprintf(`from(bucket:"%s") |> range(start:-5m)`, be.Bucket.Name)) defer res.Done() res.HasTableCount(t, 1) } -// QueryResult wraps a single flux.Result with some helper methods. -type QueryResult struct { - t *testing.T - q flux.Result -} +// This test initializes a default launcher; writes some data; queries the data (success); +// sets memory limits to the same read query; checks that the query fails because limits are exceeded. +func TestPipeline_QueryMemoryLimits(t *testing.T) { + l := launcher.RunTestLauncherOrFail(t, ctx) + l.SetupOrFail(t) + defer l.ShutdownOrFail(t, ctx) -// HasTableWithCols checks if the desired number of tables and columns exist, -// ignoring any system columns. -// -// If the result is not as expected then the testing.T fails. -func (r *QueryResult) HasTablesWithCols(want []int) { - r.t.Helper() - - // _start, _stop, _time, _f - systemCols := 4 - got := []int{} - if err := r.q.Tables().Do(func(b flux.Table) error { - got = append(got, len(b.Cols())-systemCols) - b.Do(func(c flux.ColReader) error { return nil }) - return nil - }); err != nil { - r.t.Fatal(err) + // write some points + for i := 0; i < 100; i++ { + l.WritePointsOrFail(t, fmt.Sprintf(`m,k=v1 f=%di %d`, i*100, time.Now().UnixNano())) } - if !reflect.DeepEqual(got, want) { - r.t.Fatalf("got %v, expected %v", got, want) - } -} - -// TablesN returns the number of tables for the result. -func (r *QueryResult) TablesN() int { - var total int - r.q.Tables().Do(func(b flux.Table) error { - total++ - b.Do(func(c flux.ColReader) error { return nil }) - return nil - }) - return total -} - -// MustExecuteQuery executes the provided query panicking if an error is encountered. -// Callers of MustExecuteQuery must call Done on the returned QueryResults. -func (p *Launcher) MustExecuteQuery(orgID platform.ID, query string, auth *platform.Authorization) *QueryResults { - results, err := p.ExecuteQuery(orgID, query, auth) + // compile a from query and get the spec + spec, err := flux.Compile(context.Background(), fmt.Sprintf(`from(bucket:"%s") |> range(start:-5m)`, l.Bucket.Name), time.Now()) if err != nil { - panic(err) + t.Fatal(err) } - return results -} -// ExecuteQuery executes the provided query against the ith query node. -// Callers of ExecuteQuery must call Done on the returned QueryResults. -func (p *Launcher) ExecuteQuery(orgID platform.ID, q string, auth *platform.Authorization) (*QueryResults, error) { - fq, err := p.QueryController().Query(context.Background(), &query.Request{ - Authorization: auth, - OrganizationID: orgID, - Compiler: lang.FluxCompiler{ - Query: q, - }}) - if err != nil { - return nil, err + // we expect this request to succeed + req := &query.Request{ + Authorization: l.Auth, + OrganizationID: l.Org.ID, + Compiler: lang.SpecCompiler{ + Spec: spec, + }, } - if err = fq.Err(); err != nil { - return nil, fq.Err() + if err := l.QueryAndNopConsume(context.Background(), req); err != nil { + t.Fatal(err) } - return &QueryResults{ - Results: <-fq.Ready(), - Query: fq, - }, nil -} -// QueryResults wraps a set of query results with some helper methods. -type QueryResults struct { - Results map[string]flux.Result - Query flux.Query -} - -func (r *QueryResults) Done() { - r.Query.Done() -} - -// First returns the first QueryResult. When there are not exactly 1 table First -// will fail. -func (r *QueryResults) First(t *testing.T) *QueryResult { - r.HasTableCount(t, 1) - for _, result := range r.Results { - return &QueryResult{t: t, q: result} + // ok, the first request went well, let's add memory limits: + // this query should error. + spec.Resources = flux.ResourceManagement{ + MemoryBytesQuota: 100, } - return nil -} -// HasTableCount asserts that there are n tables in the result. -func (r *QueryResults) HasTableCount(t *testing.T, n int) { - if got, exp := len(r.Results), n; got != exp { - t.Fatalf("result has %d tables, expected %d. Tables: %s", got, exp, r.Names()) + if err := l.QueryAndNopConsume(context.Background(), req); err != nil { + if !strings.Contains(err.Error(), "allocation limit reached") { + t.Fatalf("query errored with unexpected error: %v", err) + } + } else { + t.Fatal("expected error, got successful query execution") } } - -// Names returns the sorted set of table names for the query results. -func (r *QueryResults) Names() []string { - if len(r.Results) == 0 { - return nil - } - names := make([]string, len(r.Results), 0) - for k := range r.Results { - names = append(names, k) - } - return names -} - -// SortedNames returns the sorted set of table names for the query results. -func (r *QueryResults) SortedNames() []string { - names := r.Names() - sort.Strings(names) - return names -} diff --git a/cmd/influxd/launcher/storage_test.go b/cmd/influxd/launcher/storage_test.go index ea34f3a659..59ec26be27 100644 --- a/cmd/influxd/launcher/storage_test.go +++ b/cmd/influxd/launcher/storage_test.go @@ -10,13 +10,14 @@ import ( "github.com/google/go-cmp/cmp" "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/cmd/influxd/launcher" "github.com/influxdata/influxdb/http" "github.com/influxdata/influxdb/toml" "github.com/influxdata/influxdb/tsdb/tsm1" ) func TestStorage_WriteAndQuery(t *testing.T) { - l := RunLauncherOrFail(t, ctx) + l := launcher.RunTestLauncherOrFail(t, ctx) org1 := l.OnBoardOrFail(t, &influxdb.OnboardingRequest{ User: "USER-1", @@ -53,7 +54,7 @@ func TestStorage_WriteAndQuery(t *testing.T) { } func TestLauncher_WriteAndQuery(t *testing.T) { - l := RunLauncherOrFail(t, ctx) + l := launcher.RunTestLauncherOrFail(t, ctx) l.SetupOrFail(t) defer l.ShutdownOrFail(t, ctx) @@ -91,7 +92,7 @@ func TestLauncher_WriteAndQuery(t *testing.T) { } func TestLauncher_BucketDelete(t *testing.T) { - l := RunLauncherOrFail(t, ctx) + l := launcher.RunTestLauncherOrFail(t, ctx) l.SetupOrFail(t) defer l.ShutdownOrFail(t, ctx) @@ -157,7 +158,7 @@ func TestLauncher_BucketDelete(t *testing.T) { } func TestStorage_CacheSnapshot_Size(t *testing.T) { - l := NewLauncher() + l := launcher.NewTestLauncher() l.StorageConfig.Engine.Cache.SnapshotMemorySize = 10 l.StorageConfig.Engine.Cache.SnapshotAgeDuration = toml.Duration(time.Hour) defer l.ShutdownOrFail(t, ctx) @@ -203,7 +204,7 @@ func TestStorage_CacheSnapshot_Size(t *testing.T) { } func TestStorage_CacheSnapshot_Age(t *testing.T) { - l := NewLauncher() + l := launcher.NewTestLauncher() l.StorageConfig.Engine.Cache.SnapshotAgeDuration = toml.Duration(time.Second) defer l.ShutdownOrFail(t, ctx) @@ -246,38 +247,3 @@ func TestStorage_CacheSnapshot_Age(t *testing.T) { t.Fatalf("got %d series in TSM files, expected %d", got, exp) } } - -// WriteOrFail attempts a write to the organization and bucket identified by to or fails if there is an error. -func (l *Launcher) WriteOrFail(tb testing.TB, to *influxdb.OnboardingResults, data string) { - tb.Helper() - resp, err := nethttp.DefaultClient.Do(l.NewHTTPRequestOrFail(tb, "POST", fmt.Sprintf("/api/v2/write?org=%s&bucket=%s", to.Org.ID, to.Bucket.ID), to.Auth.Token, data)) - if err != nil { - tb.Fatal(err) - } - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - tb.Fatal(err) - } - - if err := resp.Body.Close(); err != nil { - tb.Fatal(err) - } - - if resp.StatusCode != nethttp.StatusNoContent { - tb.Fatalf("unexpected status code: %d, body: %s, headers: %v", resp.StatusCode, body, resp.Header) - } -} - -// FluxQueryOrFail performs a query to the specified organization and returns the results -// or fails if there is an error. -func (l *Launcher) FluxQueryOrFail(tb testing.TB, org *influxdb.Organization, token string, query string) string { - tb.Helper() - - b, err := http.SimpleQuery(l.URL(), query, org.Name, token) - if err != nil { - tb.Fatal(err) - } - - return string(b) -} diff --git a/cmd/influxd/launcher/tasks_test.go b/cmd/influxd/launcher/tasks_test.go index 9abd133d36..f1e03be48e 100644 --- a/cmd/influxd/launcher/tasks_test.go +++ b/cmd/influxd/launcher/tasks_test.go @@ -13,12 +13,13 @@ import ( "github.com/influxdata/flux" "github.com/influxdata/flux/execute/executetest" "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/cmd/influxd/launcher" pctx "github.com/influxdata/influxdb/context" "github.com/influxdata/influxdb/task/backend" ) func TestLauncher_Task(t *testing.T) { - be := RunLauncherOrFail(t, ctx) + be := launcher.RunTestLauncherOrFail(t, ctx) be.SetupOrFail(t) defer be.ShutdownOrFail(t, ctx) @@ -95,7 +96,7 @@ stuff f=-123.456,b=true,s="hello" } from(bucket:"my_bucket_in") |> range(start:-5m) |> to(bucket:"%s", org:"%s")`, bOut.Name, be.Org.Name), } - created, err := be.TaskService().CreateTask(pctx.SetAuthorizer(ctx, be.Auth), create) + created, err := be.TaskService().CreateTask(ctx, create) if err != nil { t.Fatal(err) } @@ -160,7 +161,7 @@ from(bucket:"my_bucket_in") |> range(start:-5m) |> to(bucket:"%s", org:"%s")`, b // Explicitly set the now option so want and got have the same _start and _end values. nowOpt := fmt.Sprintf("option now = () => %s\n", time.Unix(now, 0).UTC().Format(time.RFC3339)) - res := be.MustExecuteQuery(org.ID, nowOpt+`from(bucket:"my_bucket_in") |> range(start:-5m)`, be.Auth) + res := be.MustExecuteQuery(nowOpt + `from(bucket:"my_bucket_in") |> range(start:-5m)`) defer res.Done() if len(res.Results) < 1 { t.Fail() @@ -185,7 +186,7 @@ from(bucket:"my_bucket_in") |> range(start:-5m) |> to(bucket:"%s", org:"%s")`, b for _, w := range want { executetest.NormalizeTables(w) } - res = be.MustExecuteQuery(org.ID, nowOpt+`from(bucket:"my_bucket_out") |> range(start:-5m)`, be.Auth) + res = be.MustExecuteQuery(nowOpt + `from(bucket:"my_bucket_out") |> range(start:-5m)`) defer res.Done() got := make(map[string][]*executetest.Table) for k, v := range res.Results { diff --git a/dashboard.go b/dashboard.go index bcaf540d90..073da1bb87 100644 --- a/dashboard.go +++ b/dashboard.go @@ -2,6 +2,8 @@ package influxdb import ( "context" + "encoding/json" + "fmt" "net/url" "sort" "time" @@ -13,6 +15,9 @@ const ErrDashboardNotFound = "dashboard not found" // ErrCellNotFound is the error msg for a missing cell. const ErrCellNotFound = "cell not found" +// ErrViewNotFound is the error msg for a missing View. +const ErrViewNotFound = "view not found" + // ops for dashboard service. const ( OpFindDashboardByID = "FindDashboardByID" @@ -229,3 +234,503 @@ func (u CellUpdate) Valid() *Error { return nil } + +// ViewUpdate is a struct for updating Views. +type ViewUpdate struct { + ViewContentsUpdate + Properties ViewProperties +} + +// Valid validates the update struct. It expects minimal values to be set. +func (u ViewUpdate) Valid() *Error { + _, ok := u.Properties.(EmptyViewProperties) + if u.Name == nil && ok { + return &Error{ + Code: EInvalid, + Msg: "expected at least one attribute to be updated", + } + } + + return nil +} + +// Apply updates a view with the view updates properties. +func (u ViewUpdate) Apply(v *View) error { + if err := u.Valid(); err != nil { + return err + } + + if u.Name != nil { + v.Name = *u.Name + } + + if u.Properties != nil { + v.Properties = u.Properties + } + + return nil +} + +// ViewContentsUpdate is a struct for updating the non properties content of a View. +type ViewContentsUpdate struct { + Name *string `json:"name"` +} + +// ViewFilter represents a set of filter that restrict the returned results. +type ViewFilter struct { + ID *ID + Types []string +} + +// View holds positional and visual information for a View. +type View struct { + ViewContents + Properties ViewProperties +} + +// ViewContents is the id and name of a specific view. +type ViewContents struct { + ID ID `json:"id,omitempty"` + Name string `json:"name"` +} + +// ViewProperties is used to mark other structures as conforming to a View. +type ViewProperties interface { + viewProperties() + GetType() string +} + +// EmptyViewProperties is visualization that has no values +type EmptyViewProperties struct{} + +func (v EmptyViewProperties) viewProperties() {} + +func (v EmptyViewProperties) GetType() string { return "" } + +// UnmarshalViewPropertiesJSON unmarshals JSON bytes into a ViewProperties. +func UnmarshalViewPropertiesJSON(b []byte) (ViewProperties, error) { + var v struct { + B json.RawMessage `json:"properties"` + } + + if err := json.Unmarshal(b, &v); err != nil { + return nil, err + } + + if len(v.B) == 0 { + // Then there wasn't any visualization field, so there's no need unmarshal it + return EmptyViewProperties{}, nil + } + + var t struct { + Shape string `json:"shape"` + Type string `json:"type"` + } + + if err := json.Unmarshal(v.B, &t); err != nil { + return nil, err + } + + var vis ViewProperties + switch t.Shape { + case "chronograf-v2": + switch t.Type { + case "xy": + var xyv XYViewProperties + if err := json.Unmarshal(v.B, &xyv); err != nil { + return nil, err + } + vis = xyv + case "single-stat": + var ssv SingleStatViewProperties + if err := json.Unmarshal(v.B, &ssv); err != nil { + return nil, err + } + vis = ssv + case "gauge": + var gv GaugeViewProperties + if err := json.Unmarshal(v.B, &gv); err != nil { + return nil, err + } + vis = gv + case "table": + var tv TableViewProperties + if err := json.Unmarshal(v.B, &tv); err != nil { + return nil, err + } + vis = tv + case "markdown": + var mv MarkdownViewProperties + if err := json.Unmarshal(v.B, &mv); err != nil { + return nil, err + } + vis = mv + case "log-viewer": // happens in log viewer stays in log viewer. + var lv LogViewProperties + if err := json.Unmarshal(v.B, &lv); err != nil { + return nil, err + } + vis = lv + case "line-plus-single-stat": + var lv LinePlusSingleStatProperties + if err := json.Unmarshal(v.B, &lv); err != nil { + return nil, err + } + vis = lv + case "histogram": + var hv HistogramViewProperties + if err := json.Unmarshal(v.B, &hv); err != nil { + return nil, err + } + vis = hv + } + case "empty": + var ev EmptyViewProperties + if err := json.Unmarshal(v.B, &ev); err != nil { + return nil, err + } + vis = ev + default: + return nil, fmt.Errorf("unknown type %v", t.Shape) + } + + return vis, nil +} + +// MarshalViewPropertiesJSON encodes a view into JSON bytes. +func MarshalViewPropertiesJSON(v ViewProperties) ([]byte, error) { + var s interface{} + switch vis := v.(type) { + case SingleStatViewProperties: + s = struct { + Shape string `json:"shape"` + SingleStatViewProperties + }{ + Shape: "chronograf-v2", + + SingleStatViewProperties: vis, + } + case TableViewProperties: + s = struct { + Shape string `json:"shape"` + TableViewProperties + }{ + Shape: "chronograf-v2", + + TableViewProperties: vis, + } + case GaugeViewProperties: + s = struct { + Shape string `json:"shape"` + GaugeViewProperties + }{ + Shape: "chronograf-v2", + + GaugeViewProperties: vis, + } + case XYViewProperties: + s = struct { + Shape string `json:"shape"` + XYViewProperties + }{ + Shape: "chronograf-v2", + + XYViewProperties: vis, + } + case LinePlusSingleStatProperties: + s = struct { + Shape string `json:"shape"` + LinePlusSingleStatProperties + }{ + Shape: "chronograf-v2", + + LinePlusSingleStatProperties: vis, + } + case HistogramViewProperties: + s = struct { + Shape string `json:"shape"` + HistogramViewProperties + }{ + Shape: "chronograf-v2", + + HistogramViewProperties: vis, + } + case MarkdownViewProperties: + s = struct { + Shape string `json:"shape"` + MarkdownViewProperties + }{ + Shape: "chronograf-v2", + + MarkdownViewProperties: vis, + } + case LogViewProperties: + s = struct { + Shape string `json:"shape"` + LogViewProperties + }{ + Shape: "chronograf-v2", + LogViewProperties: vis, + } + default: + s = struct { + Shape string `json:"shape"` + EmptyViewProperties + }{ + Shape: "empty", + EmptyViewProperties: EmptyViewProperties{}, + } + } + return json.Marshal(s) +} + +// MarshalJSON encodes a view to JSON bytes. +func (c View) MarshalJSON() ([]byte, error) { + vis, err := MarshalViewPropertiesJSON(c.Properties) + if err != nil { + return nil, err + } + + return json.Marshal(struct { + ViewContents + ViewProperties json.RawMessage `json:"properties"` + }{ + ViewContents: c.ViewContents, + ViewProperties: vis, + }) +} + +// UnmarshalJSON decodes JSON bytes into the corresponding view type (those that implement ViewProperties). +func (c *View) UnmarshalJSON(b []byte) error { + if err := json.Unmarshal(b, &c.ViewContents); err != nil { + return err + } + + v, err := UnmarshalViewPropertiesJSON(b) + if err != nil { + return err + } + c.Properties = v + return nil +} + +// UnmarshalJSON decodes JSON bytes into the corresponding view update type (those that implement ViewProperties). +func (u *ViewUpdate) UnmarshalJSON(b []byte) error { + if err := json.Unmarshal(b, &u.ViewContentsUpdate); err != nil { + return err + } + + v, err := UnmarshalViewPropertiesJSON(b) + if err != nil { + return err + } + u.Properties = v + return nil +} + +// MarshalJSON encodes a view to JSON bytes. +func (u ViewUpdate) MarshalJSON() ([]byte, error) { + vis, err := MarshalViewPropertiesJSON(u.Properties) + if err != nil { + return nil, err + } + + return json.Marshal(struct { + ViewContentsUpdate + ViewProperties json.RawMessage `json:"properties,omitempty"` + }{ + ViewContentsUpdate: u.ViewContentsUpdate, + ViewProperties: vis, + }) +} + +// LinePlusSingleStatProperties represents options for line plus single stat view in Chronograf +type LinePlusSingleStatProperties struct { + Queries []DashboardQuery `json:"queries"` + Axes map[string]Axis `json:"axes"` + Type string `json:"type"` + Legend Legend `json:"legend"` + ViewColors []ViewColor `json:"colors"` + Prefix string `json:"prefix"` + Suffix string `json:"suffix"` + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` +} + +// XYViewProperties represents options for line, bar, step, or stacked view in Chronograf +type XYViewProperties struct { + Queries []DashboardQuery `json:"queries"` + Axes map[string]Axis `json:"axes"` + Type string `json:"type"` + Legend Legend `json:"legend"` + Geom string `json:"geom"` // Either "line", "step", "stacked", or "bar" + ViewColors []ViewColor `json:"colors"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` +} + +// SingleStatViewProperties represents options for single stat view in Chronograf +type SingleStatViewProperties struct { + Type string `json:"type"` + Queries []DashboardQuery `json:"queries"` + Prefix string `json:"prefix"` + Suffix string `json:"suffix"` + ViewColors []ViewColor `json:"colors"` + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` +} + +// HistogramViewProperties represents options for histogram view in Chronograf +type HistogramViewProperties struct { + Type string `json:"type"` + Queries []DashboardQuery `json:"queries"` + ViewColors []ViewColor `json:"colors"` + XColumn string `json:"xColumn"` + FillColumns []string `json:"fillColumns"` + XDomain []float64 `json:"xDomain,omitEmpty"` + XAxisLabel string `json:"xAxisLabel"` + Position string `json:"position"` + BinCount int `json:"binCount"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` +} + +// GaugeViewProperties represents options for gauge view in Chronograf +type GaugeViewProperties struct { + Type string `json:"type"` + Queries []DashboardQuery `json:"queries"` + Prefix string `json:"prefix"` + Suffix string `json:"suffix"` + ViewColors []ViewColor `json:"colors"` + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` +} + +// TableViewProperties represents options for table view in Chronograf +type TableViewProperties struct { + Type string `json:"type"` + Queries []DashboardQuery `json:"queries"` + ViewColors []ViewColor `json:"colors"` + TableOptions TableOptions `json:"tableOptions"` + FieldOptions []RenamableField `json:"fieldOptions"` + TimeFormat string `json:"timeFormat"` + DecimalPlaces DecimalPlaces `json:"decimalPlaces"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` +} + +type MarkdownViewProperties struct { + Type string `json:"type"` + Note string `json:"note"` +} + +// LogViewProperties represents options for log viewer in Chronograf. +type LogViewProperties struct { + Type string `json:"type"` + Columns []LogViewerColumn `json:"columns"` +} + +// LogViewerColumn represents a specific column in a Log Viewer. +type LogViewerColumn struct { + Name string `json:"name"` + Position int32 `json:"position"` + Settings []LogColumnSetting `json:"settings"` +} + +// LogColumnSetting represent the settings for a specific column of a Log Viewer. +type LogColumnSetting struct { + Type string `json:"type"` + Value string `json:"value"` + Name string `json:"name,omitempty"` +} + +func (XYViewProperties) viewProperties() {} +func (LinePlusSingleStatProperties) viewProperties() {} +func (SingleStatViewProperties) viewProperties() {} +func (HistogramViewProperties) viewProperties() {} +func (GaugeViewProperties) viewProperties() {} +func (TableViewProperties) viewProperties() {} +func (MarkdownViewProperties) viewProperties() {} +func (LogViewProperties) viewProperties() {} + +func (v XYViewProperties) GetType() string { return v.Type } +func (v LinePlusSingleStatProperties) GetType() string { return v.Type } +func (v SingleStatViewProperties) GetType() string { return v.Type } +func (v HistogramViewProperties) GetType() string { return v.Type } +func (v GaugeViewProperties) GetType() string { return v.Type } +func (v TableViewProperties) GetType() string { return v.Type } +func (v MarkdownViewProperties) GetType() string { return v.Type } +func (v LogViewProperties) GetType() string { return v.Type } + +///////////////////////////// +// Old Chronograf Types +///////////////////////////// + +// DashboardQuery represents a query used in a dashboard cell +type DashboardQuery struct { + Text string `json:"text"` + EditMode string `json:"editMode"` // Either "builder" or "advanced" + Name string `json:"name"` // Term or phrase that refers to the query + BuilderConfig BuilderConfig `json:"builderConfig"` +} + +type BuilderConfig struct { + Buckets []string `json:"buckets"` + Tags []struct { + Key string `json:"key"` + Values []string `json:"values"` + } `json:"tags"` + Functions []struct { + Name string `json:"name"` + } `json:"functions"` +} + +// Axis represents the visible extents of a visualization +type Axis struct { + Bounds []string `json:"bounds"` // bounds are an arbitrary list of client-defined strings that specify the viewport for a View + LegacyBounds [2]int64 `json:"-"` // legacy bounds are for testing a migration from an earlier version of axis + Label string `json:"label"` // label is a description of this Axis + Prefix string `json:"prefix"` // Prefix represents a label prefix for formatting axis values + Suffix string `json:"suffix"` // Suffix represents a label suffix for formatting axis values + Base string `json:"base"` // Base represents the radix for formatting axis values + Scale string `json:"scale"` // Scale is the axis formatting scale. Supported: "log", "linear" +} + +// ViewColor represents the encoding of data into visualizations +type ViewColor struct { + ID string `json:"id"` // ID is the unique id of the View color + Type string `json:"type"` // Type is how the color is used. Accepted (min,max,threshold) + Hex string `json:"hex"` // Hex is the hex number of the color + Name string `json:"name"` // Name is the user-facing name of the hex color + Value float64 `json:"value"` // Value is the data value mapped to this color +} + +// Legend represents the encoding of data into a legend +type Legend struct { + Type string `json:"type,omitempty"` + Orientation string `json:"orientation,omitempty"` +} + +// TableOptions is a type of options for a DashboardView with type Table +type TableOptions struct { + VerticalTimeAxis bool `json:"verticalTimeAxis"` + SortBy RenamableField `json:"sortBy"` + Wrapping string `json:"wrapping"` + FixFirstColumn bool `json:"fixFirstColumn"` +} + +// RenamableField is a column/row field in a DashboardView of type Table +type RenamableField struct { + InternalName string `json:"internalName"` + DisplayName string `json:"displayName"` + Visible bool `json:"visible"` +} + +// DecimalPlaces indicates whether decimal places should be enforced, and how many digits it should show. +type DecimalPlaces struct { + IsEnforced bool `json:"isEnforced"` + Digits int32 `json:"digits"` +} diff --git a/view_test.go b/dashboard_test.go similarity index 100% rename from view_test.go rename to dashboard_test.go diff --git a/http/api_handler.go b/http/api_handler.go index ce26429220..ce6c03878d 100644 --- a/http/api_handler.go +++ b/http/api_handler.go @@ -72,7 +72,6 @@ type APIBackend struct { ChronografService *server.Service ProtoService influxdb.ProtoService OrgLookupService authorizer.OrganizationService - ViewService influxdb.ViewService DocumentService influxdb.DocumentService } diff --git a/http/auth_service.go b/http/auth_service.go index 85f76c3eaa..b84875b23c 100644 --- a/http/auth_service.go +++ b/http/auth_service.go @@ -65,7 +65,7 @@ func NewAuthorizationHandler(b *AuthorizationBackend) *AuthorizationHandler { h.HandlerFunc("POST", "/api/v2/authorizations", h.handlePostAuthorization) h.HandlerFunc("GET", "/api/v2/authorizations", h.handleGetAuthorizations) h.HandlerFunc("GET", "/api/v2/authorizations/:id", h.handleGetAuthorization) - h.HandlerFunc("PATCH", "/api/v2/authorizations/:id", h.handleSetAuthorizationStatus) + h.HandlerFunc("PATCH", "/api/v2/authorizations/:id", h.handleUpdateAuthorization) h.HandlerFunc("DELETE", "/api/v2/authorizations/:id", h.handleDeleteAuthorization) return h } @@ -459,11 +459,11 @@ func decodeGetAuthorizationRequest(ctx context.Context, r *http.Request) (*getAu }, nil } -// handleSetAuthorizationStatus is the HTTP handler for the PATCH /api/v2/authorizations/:id route that updates the authorization's status. -func (h *AuthorizationHandler) handleSetAuthorizationStatus(w http.ResponseWriter, r *http.Request) { +// handleUpdateAuthorization is the HTTP handler for the PATCH /api/v2/authorizations/:id route that updates the authorization's status and desc. +func (h *AuthorizationHandler) handleUpdateAuthorization(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - req, err := decodeSetAuthorizationStatusRequest(ctx, r) + req, err := decodeUpdateAuthorizationRequest(ctx, r) if err != nil { h.Logger.Info("failed to decode request", zap.String("handler", "updateAuthorization"), zap.Error(err)) EncodeError(ctx, err, w) @@ -476,12 +476,9 @@ func (h *AuthorizationHandler) handleSetAuthorizationStatus(w http.ResponseWrite return } - if req.Status != a.Status { - a.Status = req.Status - if err := h.AuthorizationService.SetAuthorizationStatus(ctx, a.ID, a.Status); err != nil { - EncodeError(ctx, err, w) - return - } + if err := h.AuthorizationService.UpdateAuthorization(ctx, a.ID, req.AuthorizationUpdate); err != nil { + EncodeError(ctx, err, w) + return } o, err := h.OrganizationService.FindOrganizationByID(ctx, a.OrgID) @@ -509,11 +506,11 @@ func (h *AuthorizationHandler) handleSetAuthorizationStatus(w http.ResponseWrite } type updateAuthorizationRequest struct { - ID platform.ID - Status platform.Status + ID platform.ID + *platform.AuthorizationUpdate } -func decodeSetAuthorizationStatusRequest(ctx context.Context, r *http.Request) (*updateAuthorizationRequest, error) { +func decodeUpdateAuthorizationRequest(ctx context.Context, r *http.Request) (*updateAuthorizationRequest, error) { params := httprouter.ParamsFromContext(ctx) id := params.ByName("id") if id == "" { @@ -528,14 +525,14 @@ func decodeSetAuthorizationStatusRequest(ctx context.Context, r *http.Request) ( return nil, err } - a := &setAuthorizationStatusRequest{} - if err := json.NewDecoder(r.Body).Decode(a); err != nil { + upd := &platform.AuthorizationUpdate{} + if err := json.NewDecoder(r.Body).Decode(upd); err != nil { return nil, err } return &updateAuthorizationRequest{ - ID: i, - Status: a.Status, + ID: i, + AuthorizationUpdate: upd, }, nil } @@ -745,20 +742,14 @@ func (s *AuthorizationService) CreateAuthorization(ctx context.Context, a *platf return nil } -type setAuthorizationStatusRequest struct { - Status platform.Status `json:"status"` -} - -// SetAuthorizationStatus updates an authorization's status. -func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id platform.ID, status platform.Status) error { +// UpdateAuthorization updates the status and description if available. +func (s *AuthorizationService) UpdateAuthorization(ctx context.Context, id platform.ID, upd *platform.AuthorizationUpdate) error { u, err := newURL(s.Addr, authorizationIDPath(id)) if err != nil { return err } - b, err := json.Marshal(setAuthorizationStatusRequest{ - Status: status, - }) + b, err := json.Marshal(upd) if err != nil { return err } diff --git a/http/auth_test.go b/http/auth_test.go index b9c57eb5d9..2e26116fd3 100644 --- a/http/auth_test.go +++ b/http/auth_test.go @@ -776,8 +776,8 @@ func TestAuthorizationService_DeleteAuthorization(t *testing.T) { platformtesting.DeleteAuthorization(initAuthorizationService, t) } -func TestAuthorizationService_UpdateAuthorizationStatus(t *testing.T) { - platformtesting.UpdateAuthorizationStatus(initAuthorizationService, t) +func TestAuthorizationService_UpdateAuthorization(t *testing.T) { + platformtesting.UpdateAuthorization(initAuthorizationService, t) } func MustMarshal(o interface{}) []byte { diff --git a/http/bucket_service.go b/http/bucket_service.go index 3b059f7389..460a3e53cf 100644 --- a/http/bucket_service.go +++ b/http/bucket_service.go @@ -120,7 +120,6 @@ func NewBucketHandler(b *BucketBackend) *BucketHandler { h.HandlerFunc("GET", bucketsIDLabelsPath, newGetLabelsHandler(labelBackend)) h.HandlerFunc("POST", bucketsIDLabelsPath, newPostLabelHandler(labelBackend)) h.HandlerFunc("DELETE", bucketsIDLabelsIDPath, newDeleteLabelHandler(labelBackend)) - h.HandlerFunc("PATCH", bucketsIDLabelsIDPath, newPatchLabelHandler(labelBackend)) return h } diff --git a/http/dashboard_service.go b/http/dashboard_service.go index f3633b6698..9d054f50fe 100644 --- a/http/dashboard_service.go +++ b/http/dashboard_service.go @@ -125,7 +125,6 @@ func NewDashboardHandler(b *DashboardBackend) *DashboardHandler { h.HandlerFunc("GET", dashboardsIDLabelsPath, newGetLabelsHandler(labelBackend)) h.HandlerFunc("POST", dashboardsIDLabelsPath, newPostLabelHandler(labelBackend)) h.HandlerFunc("DELETE", dashboardsIDLabelsIDPath, newDeleteLabelHandler(labelBackend)) - h.HandlerFunc("PATCH", dashboardsIDLabelsIDPath, newPatchLabelHandler(labelBackend)) return h } @@ -230,8 +229,34 @@ func newDashboardCellsResponse(dashboardID platform.ID, cs []*platform.Cell) das return res } -func newDashboardCellViewResponse(dashID, cellID platform.ID, v *platform.View) viewResponse { - return viewResponse{ +type viewLinks struct { + Self string `json:"self"` +} + +type dashboardCellViewResponse struct { + platform.View + Links viewLinks `json:"links"` +} + +func (r dashboardCellViewResponse) MarshalJSON() ([]byte, error) { + props, err := platform.MarshalViewPropertiesJSON(r.Properties) + if err != nil { + return nil, err + } + + return json.Marshal(struct { + platform.ViewContents + Links viewLinks `json:"links"` + Properties json.RawMessage `json:"properties"` + }{ + ViewContents: r.ViewContents, + Links: r.Links, + Properties: props, + }) +} + +func newDashboardCellViewResponse(dashID, cellID platform.ID, v *platform.View) dashboardCellViewResponse { + return dashboardCellViewResponse{ Links: viewLinks{ Self: fmt.Sprintf("/api/v2/dashboards/%s/cells/%s", dashID, cellID), }, @@ -1322,7 +1347,7 @@ func (s *DashboardService) GetDashboardCellView(ctx context.Context, dashboardID return nil, err } - res := viewResponse{} + res := dashboardCellViewResponse{} if err := json.NewDecoder(resp.Body).Decode(&res); err != nil { return nil, err } @@ -1362,7 +1387,7 @@ func (s *DashboardService) UpdateDashboardCellView(ctx context.Context, dashboar return nil, err } - res := viewResponse{} + res := dashboardCellViewResponse{} if err := json.NewDecoder(resp.Body).Decode(&res); err != nil { return nil, err } diff --git a/http/dashboard_test.go b/http/dashboard_test.go index 80a0f1017e..d73f7a8d96 100644 --- a/http/dashboard_test.go +++ b/http/dashboard_test.go @@ -13,11 +13,14 @@ import ( "go.uber.org/zap" + "github.com/google/go-cmp/cmp" platform "github.com/influxdata/influxdb" "github.com/influxdata/influxdb/inmem" "github.com/influxdata/influxdb/mock" platformtesting "github.com/influxdata/influxdb/testing" "github.com/julienschmidt/httprouter" + "github.com/yudai/gojsondiff" + "github.com/yudai/gojsondiff/formatter" ) // NewMockDashboardBackend returns a DashboardBackend with mock services. @@ -1265,11 +1268,6 @@ func initDashboardService(f platformtesting.DashboardFields, t *testing.T) (plat t.Fatalf("failed to populate dashboard") } } - for _, b := range f.Views { - if err := svc.PutView(ctx, b); err != nil { - t.Fatalf("failed to populate views") - } - } dashboardBackend := NewMockDashboardBackend() dashboardBackend.DashboardService = svc @@ -1386,3 +1384,39 @@ func TestService_handlePostDashboardLabel(t *testing.T) { }) } } + +func jsonEqual(s1, s2 string) (eq bool, diff string, err error) { + var o1, o2 interface{} + if s1 == s2 { + return true, "", nil + } + + if s1 == "" { + return false, s2, fmt.Errorf("s1 is empty") + } + + if s2 == "" { + return false, s1, fmt.Errorf("s2 is empty") + } + + if err = json.Unmarshal([]byte(s1), &o1); err != nil { + return + } + + if err = json.Unmarshal([]byte(s2), &o2); err != nil { + return + } + + differ := gojsondiff.New() + d, err := differ.Compare([]byte(s1), []byte(s2)) + if err != nil { + return + } + + config := formatter.AsciiFormatterConfig{} + + formatter := formatter.NewAsciiFormatter(o1, config) + diff, err = formatter.Format(d) + + return cmp.Equal(o1, o2), diff, err +} diff --git a/http/label_service.go b/http/label_service.go index b8ca6de56b..d087db564c 100644 --- a/http/label_service.go +++ b/http/label_service.go @@ -432,30 +432,6 @@ func decodePostLabelMappingRequest(ctx context.Context, r *http.Request, rt plat return req, nil } -// newPatchLabelHandler returns a handler func for a PATCH to /labels endpoints -func newPatchLabelHandler(b *LabelBackend) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - req, err := decodePatchLabelRequest(ctx, r) - if err != nil { - EncodeError(ctx, err, w) - return - } - - label, err := b.LabelService.UpdateLabel(ctx, req.LabelID, req.Update) - if err != nil { - EncodeError(ctx, err, w) - return - } - - if err := encodeResponse(ctx, w, http.StatusOK, newLabelResponse(label)); err != nil { - logEncodingError(b.Logger, r, err) - return - } - } -} - // newDeleteLabelHandler returns a handler func for a DELETE to /labels endpoints func newDeleteLabelHandler(b *LabelBackend) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { diff --git a/http/org_service.go b/http/org_service.go index 46455f46e2..9615590715 100644 --- a/http/org_service.go +++ b/http/org_service.go @@ -127,7 +127,6 @@ func NewOrgHandler(b *OrgBackend) *OrgHandler { h.HandlerFunc("GET", organizationsIDLabelsPath, newGetLabelsHandler(labelBackend)) h.HandlerFunc("POST", organizationsIDLabelsPath, newPostLabelHandler(labelBackend)) h.HandlerFunc("DELETE", organizationsIDLabelsIDPath, newDeleteLabelHandler(labelBackend)) - h.HandlerFunc("PATCH", organizationsIDLabelsIDPath, newPatchLabelHandler(labelBackend)) return h } diff --git a/http/swagger.yml b/http/swagger.yml index cd4779251b..755867e4de 100644 --- a/http/swagger.yml +++ b/http/swagger.yml @@ -1262,6 +1262,98 @@ paths: application/json: schema: $ref: "#/components/schemas/Error" + '/variables/{variableID}/labels': + get: + tags: + - Variables + summary: list all labels for a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + schema: + type: string + required: true + description: ID of the variable + responses: + '200': + description: a list of all labels for a variable + content: + application/json: + schema: + $ref: "#/components/schemas/LabelsResponse" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + post: + tags: + - Variables + summary: add a label to a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + schema: + type: string + required: true + description: ID of the variable + requestBody: + description: label to add + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/LabelMapping" + responses: + '200': + description: a list of all labels for a variable + content: + application/json: + schema: + $ref: "#/components/schemas/LabelsResponse" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + '/variables/{variableID}/labels/{labelID}': + delete: + tags: + - Variables + summary: delete a label from a variable + parameters: + - $ref: '#/components/parameters/TraceSpan' + - in: path + name: variableID + schema: + type: string + required: true + description: ID of the variable + - in: path + name: labelID + schema: + type: string + required: true + description: the label id to delete + responses: + '204': + description: delete has been accepted + '404': + description: variable not found + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" /write: post: tags: @@ -2700,7 +2792,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/Authorization" + $ref: "#/components/schemas/AuthorizationUpdateRequest" parameters: - $ref: '#/components/parameters/TraceSpan' - in: path @@ -4677,12 +4769,8 @@ paths: schema: $ref: "#/components/schemas/PasswordResetBody" responses: - '200': - description: user details - content: - application/json: - schema: - $ref: "#/components/schemas/User" + '204': + description: password successfully updated default: description: unsuccessful authentication content: @@ -5346,12 +5434,8 @@ components: type: string nullable: true description: optional name of the organization of the organization with orgID. - Authorization: - required: [orgID, permissions] + AuthorizationUpdateRequest: properties: - orgID: - type: string - description: ID of org that authorization is scoped to. status: description: if inactive the token is inactive and requests using the token will be rejected. default: active @@ -5362,46 +5446,55 @@ components: description: type: string description: A description of the token. - permissions: - type: array - minLength: 1 - description: List of permissions for an auth. An auth must have at least one Permission. - items: - $ref: "#/components/schemas/Permission" - id: - readOnly: true - type: string - token: - readOnly: true - type: string - description: Passed via the Authorization Header and Token Authentication type. - userID: - readOnly: true - type: string - description: ID of user that created and owns the token. - user: - readOnly: true - type: string - description: Name of user that created and owns the token. - org: - readOnly: true - type: string - description: Name of the org token is scoped to. - links: - type: object - readOnly: true - example: - self: "/api/v2/authorizations/1" - user: "/api/v2/users/12" + Authorization: + required: [orgID, permissions] + allOf: + - $ref: "#/components/schemas/AuthorizationUpdateRequest" + - type: object properties: - self: + orgID: + type: string + description: ID of org that authorization is scoped to. + permissions: + type: array + minLength: 1 + description: List of permissions for an auth. An auth must have at least one Permission. + items: + $ref: "#/components/schemas/Permission" + id: readOnly: true type: string - format: uri + token: + readOnly: true + type: string + description: Passed via the Authorization Header and Token Authentication type. + userID: + readOnly: true + type: string + description: ID of user that created and owns the token. user: readOnly: true type: string - format: uri + description: Name of user that created and owns the token. + org: + readOnly: true + type: string + description: Name of the org token is scoped to. + links: + type: object + readOnly: true + example: + self: "/api/v2/authorizations/1" + user: "/api/v2/users/12" + properties: + self: + readOnly: true + type: string + format: uri + user: + readOnly: true + type: string + format: uri Authorizations: type: object properties: @@ -6435,6 +6528,8 @@ components: enum: ["map"] values: type: object + additionalProperties: + type: string QueryVariableProperties: properties: type: @@ -6461,6 +6556,12 @@ components: self: type: string format: uri + org: + type: string + format: uri + labels: + type: string + format: uri id: readOnly: true type: string @@ -6472,6 +6573,8 @@ components: type: array items: type: string + labels: + $ref: "#/components/schemas/Labels" arguments: type: object oneOf: @@ -6724,10 +6827,10 @@ components: properties: createdAt: type: string - format: date + format: date-time updatedAt: type: string - format: date + format: date-time cells: $ref: "#/components/schemas/Cells" labels: @@ -6892,6 +6995,15 @@ components: type: object labels: $ref: "#/components/schemas/Labels" + links: + type: object + readOnly: true + example: + self: "/api/v2/documents/templates/1" + properties: + self: + description: URL for this document + $ref: "#/components/schemas/Link" required: - id - meta @@ -6934,6 +7046,15 @@ components: $ref: "#/components/schemas/DocumentMeta" labels: $ref: "#/components/schemas/Labels" + links: + type: object + readOnly: true + example: + self: "/api/v2/documents/templates/1" + properties: + self: + description: URL for this document + $ref: "#/components/schemas/Link" required: - id - meta diff --git a/http/task_service.go b/http/task_service.go index 2fd17ee14a..cedb13fa9f 100644 --- a/http/task_service.go +++ b/http/task_service.go @@ -145,7 +145,6 @@ func NewTaskHandler(b *TaskBackend) *TaskHandler { h.HandlerFunc("GET", tasksIDLabelsPath, newGetLabelsHandler(labelBackend)) h.HandlerFunc("POST", tasksIDLabelsPath, newPostLabelHandler(labelBackend)) h.HandlerFunc("DELETE", tasksIDLabelsIDPath, newDeleteLabelHandler(labelBackend)) - h.HandlerFunc("PATCH", tasksIDLabelsIDPath, newPatchLabelHandler(labelBackend)) return h } diff --git a/http/task_test.go b/http/task_test.go index 25c3af7a16..292b1f9920 100644 --- a/http/task_test.go +++ b/http/task_test.go @@ -71,7 +71,7 @@ func httpTaskServiceFactory(t *testing.T) (*servicetest.System, context.CancelFu } return &servicetest.System{ - TaskControlService: servicetest.TaskControlAdaptor(store, rrw, rrw), + TaskControlService: backend.TaskControlAdaptor(store, rrw, rrw), TaskService: taskService, Ctx: ctx, I: i, diff --git a/http/telegraf.go b/http/telegraf.go index fc5dafcc48..461152db7c 100644 --- a/http/telegraf.go +++ b/http/telegraf.go @@ -111,7 +111,6 @@ func NewTelegrafHandler(b *TelegrafBackend) *TelegrafHandler { h.HandlerFunc("GET", telegrafsIDLabelsPath, newGetLabelsHandler(labelBackend)) h.HandlerFunc("POST", telegrafsIDLabelsPath, newPostLabelHandler(labelBackend)) h.HandlerFunc("DELETE", telegrafsIDLabelsIDPath, newDeleteLabelHandler(labelBackend)) - h.HandlerFunc("PATCH", telegrafsIDLabelsIDPath, newPatchLabelHandler(labelBackend)) return h } diff --git a/http/variable_service.go b/http/variable_service.go index 81bb25e025..22a6458fe5 100644 --- a/http/variable_service.go +++ b/http/variable_service.go @@ -22,12 +22,15 @@ const ( type VariableBackend struct { Logger *zap.Logger VariableService platform.VariableService + LabelService platform.LabelService } +// NewVariableBackend creates a backend used by the variable handler. func NewVariableBackend(b *APIBackend) *VariableBackend { return &VariableBackend{ Logger: b.Logger.With(zap.String("handler", "variable")), VariableService: b.VariableService, + LabelService: b.LabelService, } } @@ -38,6 +41,7 @@ type VariableHandler struct { Logger *zap.Logger VariableService platform.VariableService + LabelService platform.LabelService } // NewVariableHandler creates a new VariableHandler @@ -47,9 +51,12 @@ func NewVariableHandler(b *VariableBackend) *VariableHandler { Logger: b.Logger, VariableService: b.VariableService, + LabelService: b.LabelService, } entityPath := fmt.Sprintf("%s/:id", variablePath) + entityLabelsPath := fmt.Sprintf("%s/labels", entityPath) + entityLabelsIDPath := fmt.Sprintf("%s/:lid", entityLabelsPath) h.HandlerFunc("GET", variablePath, h.handleGetVariables) h.HandlerFunc("POST", variablePath, h.handlePostVariable) @@ -58,6 +65,15 @@ func NewVariableHandler(b *VariableBackend) *VariableHandler { h.HandlerFunc("PUT", entityPath, h.handlePutVariable) h.HandlerFunc("DELETE", entityPath, h.handleDeleteVariable) + labelBackend := &LabelBackend{ + Logger: b.Logger.With(zap.String("handler", "label")), + LabelService: b.LabelService, + ResourceType: platform.DashboardsResourceType, + } + h.HandlerFunc("GET", entityLabelsPath, newGetLabelsHandler(labelBackend)) + h.HandlerFunc("POST", entityLabelsPath, newPostLabelHandler(labelBackend)) + h.HandlerFunc("DELETE", entityLabelsIDPath, newDeleteLabelHandler(labelBackend)) + return h } @@ -74,7 +90,7 @@ func (r getVariablesResponse) ToPlatform() []*platform.Variable { return variables } -func newGetVariablesResponse(variables []*platform.Variable, f platform.VariableFilter, opts platform.FindOptions) getVariablesResponse { +func newGetVariablesResponse(ctx context.Context, variables []*platform.Variable, f platform.VariableFilter, opts platform.FindOptions, labelService platform.LabelService) getVariablesResponse { num := len(variables) resp := getVariablesResponse{ Variables: make([]variableResponse, 0, num), @@ -82,7 +98,8 @@ func newGetVariablesResponse(variables []*platform.Variable, f platform.Variable } for _, variable := range variables { - resp.Variables = append(resp.Variables, newVariableResponse(variable)) + labels, _ := labelService.FindResourceLabels(ctx, platform.LabelMappingFilter{ResourceID: variable.ID}) + resp.Variables = append(resp.Variables, newVariableResponse(variable, labels)) } return resp @@ -138,7 +155,7 @@ func (h *VariableHandler) handleGetVariables(w http.ResponseWriter, r *http.Requ return } - err = encodeResponse(ctx, w, http.StatusOK, newGetVariablesResponse(variables, req.filter, req.opts)) + err = encodeResponse(ctx, w, http.StatusOK, newGetVariablesResponse(ctx, variables, req.filter, req.opts, h.LabelService)) if err != nil { logEncodingError(h.Logger, r, err) return @@ -181,7 +198,13 @@ func (h *VariableHandler) handleGetVariable(w http.ResponseWriter, r *http.Reque return } - err = encodeResponse(ctx, w, http.StatusOK, newVariableResponse(variable)) + labels, err := h.LabelService.FindResourceLabels(ctx, platform.LabelMappingFilter{ResourceID: variable.ID}) + if err != nil { + EncodeError(ctx, err, w) + return + } + + err = encodeResponse(ctx, w, http.StatusOK, newVariableResponse(variable, labels)) if err != nil { logEncodingError(h.Logger, r, err) return @@ -189,23 +212,33 @@ func (h *VariableHandler) handleGetVariable(w http.ResponseWriter, r *http.Reque } type variableLinks struct { - Self string `json:"self"` - Org string `json:"org"` + Self string `json:"self"` + Labels string `json:"labels"` + Org string `json:"org"` } type variableResponse struct { *platform.Variable - Links variableLinks `json:"links"` + Labels []platform.Label `json:"labels"` + Links variableLinks `json:"links"` } -func newVariableResponse(m *platform.Variable) variableResponse { - return variableResponse{ +func newVariableResponse(m *platform.Variable, labels []*platform.Label) variableResponse { + res := variableResponse{ Variable: m, + Labels: []platform.Label{}, Links: variableLinks{ - Self: fmt.Sprintf("/api/v2/variables/%s", m.ID), - Org: fmt.Sprintf("/api/v2/orgs/%s", m.OrganizationID), + Self: fmt.Sprintf("/api/v2/variables/%s", m.ID), + Labels: fmt.Sprintf("/api/v2/variables/%s/labels", m.ID), + Org: fmt.Sprintf("/api/v2/orgs/%s", m.OrganizationID), }, } + + for _, l := range labels { + res.Labels = append(res.Labels, *l) + } + + return res } func (h *VariableHandler) handlePostVariable(w http.ResponseWriter, r *http.Request) { @@ -223,8 +256,7 @@ func (h *VariableHandler) handlePostVariable(w http.ResponseWriter, r *http.Requ return } - err = encodeResponse(ctx, w, http.StatusCreated, newVariableResponse(req.variable)) - if err != nil { + if err := encodeResponse(ctx, w, http.StatusCreated, newVariableResponse(req.variable, []*platform.Label{})); err != nil { logEncodingError(h.Logger, r, err) return } @@ -278,7 +310,13 @@ func (h *VariableHandler) handlePatchVariable(w http.ResponseWriter, r *http.Req return } - err = encodeResponse(ctx, w, http.StatusOK, newVariableResponse(variable)) + labels, err := h.LabelService.FindResourceLabels(ctx, platform.LabelMappingFilter{ResourceID: variable.ID}) + if err != nil { + EncodeError(ctx, err, w) + return + } + + err = encodeResponse(ctx, w, http.StatusOK, newVariableResponse(variable, labels)) if err != nil { logEncodingError(h.Logger, r, err) return @@ -340,7 +378,13 @@ func (h *VariableHandler) handlePutVariable(w http.ResponseWriter, r *http.Reque return } - err = encodeResponse(ctx, w, http.StatusOK, newVariableResponse(req.variable)) + labels, err := h.LabelService.FindResourceLabels(ctx, platform.LabelMappingFilter{ResourceID: req.variable.ID}) + if err != nil { + EncodeError(ctx, err, w) + return + } + + err = encodeResponse(ctx, w, http.StatusOK, newVariableResponse(req.variable, labels)) if err != nil { logEncodingError(h.Logger, r, err) return diff --git a/http/variable_test.go b/http/variable_test.go index f3de3223e1..a86ee32527 100644 --- a/http/variable_test.go +++ b/http/variable_test.go @@ -3,6 +3,7 @@ package http import ( "bytes" "context" + "encoding/json" "fmt" "io/ioutil" "net/http" @@ -23,12 +24,14 @@ func NewMockVariableBackend() *VariableBackend { return &VariableBackend{ Logger: zap.NewNop().With(zap.String("handler", "variable")), VariableService: mock.NewVariableService(), + LabelService: mock.NewLabelService(), } } func TestVariableService_handleGetVariables(t *testing.T) { type fields struct { VariableService platform.VariableService + LabelService platform.LabelService } type args struct { queryParams map[string][]string @@ -74,11 +77,25 @@ func TestVariableService_handleGetVariables(t *testing.T) { }, nil }, }, + &mock.LabelService{ + FindResourceLabelsFn: func(ctx context.Context, f platform.LabelMappingFilter) ([]*platform.Label, error) { + labels := []*platform.Label{ + { + ID: platformtesting.MustIDBase16("fc3dc670a4be9b9a"), + Name: "label", + Properties: map[string]string{ + "color": "fff000", + }, + }, + } + return labels, nil + }, + }, }, wants: wants{ statusCode: http.StatusOK, contentType: "application/json; charset=utf-8", - body: `{"variables":[{"id":"6162207574726f71","orgID":"0000000000000001","name":"variable-a","selected":["b"],"arguments":{"type":"constant","values":["a","b"]},"links":{"self":"/api/v2/variables/6162207574726f71","org": "/api/v2/orgs/0000000000000001"}},{"id":"61726920617a696f","orgID":"0000000000000001","name":"variable-b","selected":["c"],"arguments":{"type":"map","values":{"a":"b","c":"d"}},"links":{"self":"/api/v2/variables/61726920617a696f","org": "/api/v2/orgs/0000000000000001"}}],"links":{"self":"/api/v2/variables?descending=false&limit=20&offset=0"}}`, + body: `{"variables":[{"id":"6162207574726f71","orgID":"0000000000000001","name":"variable-a","selected":["b"],"arguments":{"type":"constant","values":["a","b"]},"labels":[{"id":"fc3dc670a4be9b9a","name":"label","properties":{"color":"fff000"}}],"links":{"self":"/api/v2/variables/6162207574726f71","labels":"/api/v2/variables/6162207574726f71/labels","org":"/api/v2/orgs/0000000000000001"}},{"id":"61726920617a696f","orgID":"0000000000000001","name":"variable-b","selected":["c"],"arguments":{"type":"map","values":{"a":"b","c":"d"}},"labels":[{"id":"fc3dc670a4be9b9a","name":"label","properties":{"color":"fff000"}}],"links":{"self":"/api/v2/variables/61726920617a696f","labels":"/api/v2/variables/61726920617a696f/labels","org": "/api/v2/orgs/0000000000000001"}}],"links":{"self":"/api/v2/variables?descending=false&limit=20&offset=0"}}`, }, }, { @@ -89,6 +106,11 @@ func TestVariableService_handleGetVariables(t *testing.T) { return []*platform.Variable{}, nil }, }, + &mock.LabelService{ + FindResourceLabelsFn: func(ctx context.Context, f platform.LabelMappingFilter) ([]*platform.Label, error) { + return []*platform.Label{}, nil + }, + }, }, args: args{ map[string][]string{ @@ -120,6 +142,20 @@ func TestVariableService_handleGetVariables(t *testing.T) { }, nil }, }, + &mock.LabelService{ + FindResourceLabelsFn: func(ctx context.Context, f platform.LabelMappingFilter) ([]*platform.Label, error) { + labels := []*platform.Label{ + { + ID: platformtesting.MustIDBase16("fc3dc670a4be9b9a"), + Name: "label", + Properties: map[string]string{ + "color": "fff000", + }, + }, + } + return labels, nil + }, + }, }, args: args{ map[string][]string{ @@ -129,7 +165,7 @@ func TestVariableService_handleGetVariables(t *testing.T) { wants: wants{ statusCode: http.StatusOK, contentType: "application/json; charset=utf-8", - body: `{"variables":[{"id":"6162207574726f71","orgID":"0000000000000001","name":"variable-a","selected":["b"],"arguments":{"type":"constant","values":["a","b"]},"links":{"self":"/api/v2/variables/6162207574726f71","org":"/api/v2/orgs/0000000000000001"}}],"links":{"self":"/api/v2/variables?descending=false&limit=20&offset=0&orgID=0000000000000001"}}`, + body: `{"variables":[{"id":"6162207574726f71","orgID":"0000000000000001","name":"variable-a","selected":["b"],"arguments":{"type":"constant","values":["a","b"]},"labels":[{"id":"fc3dc670a4be9b9a","name":"label","properties":{"color": "fff000"}}],"links":{"self":"/api/v2/variables/6162207574726f71","org":"/api/v2/orgs/0000000000000001","labels":"/api/v2/variables/6162207574726f71/labels"}}],"links":{"self":"/api/v2/variables?descending=false&limit=20&offset=0&orgID=0000000000000001"}}`, }, }, } @@ -137,10 +173,12 @@ func TestVariableService_handleGetVariables(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { variableBackend := NewMockVariableBackend() + variableBackend.LabelService = tt.fields.LabelService variableBackend.VariableService = tt.fields.VariableService h := NewVariableHandler(variableBackend) r := httptest.NewRequest("GET", "http://howdy.tld", nil) + qp := r.URL.Query() for k, vs := range tt.args.queryParams { for _, v := range vs { @@ -213,7 +251,7 @@ func TestVariableService_handleGetVariable(t *testing.T) { wants: wants{ statusCode: 200, contentType: "application/json; charset=utf-8", - body: `{"id":"75650d0a636f6d70","orgID":"0000000000000001","name":"variable-a","selected":["b"],"arguments":{"type":"constant","values":["a","b"]},"links":{"self":"/api/v2/variables/75650d0a636f6d70","org":"/api/v2/orgs/0000000000000001"}} + body: `{"id":"75650d0a636f6d70","orgID":"0000000000000001","name":"variable-a","selected":["b"],"arguments":{"type":"constant","values":["a","b"]},"labels":[],"links":{"self":"/api/v2/variables/75650d0a636f6d70","labels":"/api/v2/variables/75650d0a636f6d70/labels","org":"/api/v2/orgs/0000000000000001"}} `, }, }, @@ -291,7 +329,6 @@ func TestVariableService_handleGetVariable(t *testing.T) { if body != tt.wants.body { t.Errorf("got = %v, want %v", body, tt.wants.body) } - }) } } @@ -347,7 +384,7 @@ func TestVariableService_handlePostVariable(t *testing.T) { wants: wants{ statusCode: 201, contentType: "application/json; charset=utf-8", - body: `{"id":"75650d0a636f6d70","orgID":"0000000000000001","name":"my-great-variable","selected":["'foo'"],"arguments":{"type":"constant","values":["bar","foo"]},"links":{"self":"/api/v2/variables/75650d0a636f6d70","org":"/api/v2/orgs/0000000000000001"}} + body: `{"id":"75650d0a636f6d70","orgID":"0000000000000001","name":"my-great-variable","selected":["'foo'"],"arguments":{"type":"constant","values":["bar","foo"]},"labels":[],"links":{"self":"/api/v2/variables/75650d0a636f6d70","labels":"/api/v2/variables/75650d0a636f6d70/labels","org":"/api/v2/orgs/0000000000000001"}} `, }, }, @@ -464,7 +501,7 @@ func TestVariableService_handlePatchVariable(t *testing.T) { wants: wants{ statusCode: 200, contentType: "application/json; charset=utf-8", - body: `{"id":"75650d0a636f6d70","orgID":"0000000000000002","name":"new-name","selected":[],"arguments":{"type":"constant","values":[]},"links":{"self":"/api/v2/variables/75650d0a636f6d70","org":"/api/v2/orgs/0000000000000002"}} + body: `{"id":"75650d0a636f6d70","orgID":"0000000000000002","name":"new-name","selected":[],"arguments":{"type":"constant","values":[]},"labels":[],"links":{"self":"/api/v2/variables/75650d0a636f6d70","labels":"/api/v2/variables/75650d0a636f6d70/labels","org":"/api/v2/orgs/0000000000000002"}} `, }, }, @@ -604,6 +641,104 @@ func TestVariableService_handleDeleteVariable(t *testing.T) { } } +func TestService_handlePostVariableLabel(t *testing.T) { + type fields struct { + LabelService platform.LabelService + } + type args struct { + labelMapping *platform.LabelMapping + variableID platform.ID + } + type wants struct { + statusCode int + contentType string + body string + } + + tests := []struct { + name string + fields fields + args args + wants wants + }{ + { + name: "add label to variable", + fields: fields{ + LabelService: &mock.LabelService{ + FindLabelByIDFn: func(ctx context.Context, id platform.ID) (*platform.Label, error) { + return &platform.Label{ + ID: 1, + Name: "label", + Properties: map[string]string{ + "color": "fff000", + }, + }, nil + }, + CreateLabelMappingFn: func(ctx context.Context, m *platform.LabelMapping) error { return nil }, + }, + }, + args: args{ + labelMapping: &platform.LabelMapping{ + ResourceID: 100, + LabelID: 1, + }, + variableID: 100, + }, + wants: wants{ + statusCode: http.StatusCreated, + contentType: "application/json; charset=utf-8", + body: ` +{ + "label": { + "id": "0000000000000001", + "name": "label", + "properties": { + "color": "fff000" + } + }, + "links": { + "self": "/api/v2/labels/0000000000000001" + } +} +`, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + variableBackend := NewMockVariableBackend() + variableBackend.LabelService = tt.fields.LabelService + h := NewVariableHandler(variableBackend) + + b, err := json.Marshal(tt.args.labelMapping) + if err != nil { + t.Fatalf("failed to unmarshal label mapping: %v", err) + } + + url := fmt.Sprintf("http://localhost:9999/api/v2/variables/%s/labels", tt.args.variableID) + r := httptest.NewRequest("POST", url, bytes.NewReader(b)) + w := httptest.NewRecorder() + + h.ServeHTTP(w, r) + + res := w.Result() + content := res.Header.Get("Content-Type") + body, _ := ioutil.ReadAll(res.Body) + + if res.StatusCode != tt.wants.statusCode { + t.Errorf("got %v, want %v", res.StatusCode, tt.wants.statusCode) + } + if tt.wants.contentType != "" && content != tt.wants.contentType { + t.Errorf("got %v, want %v", content, tt.wants.contentType) + } + if eq, diff, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq { + t.Errorf("Diff\n%s", diff) + } + }) + } +} + func initVariableService(f platformtesting.VariableFields, t *testing.T) (platform.VariableService, string, func()) { t.Helper() svc := inmem.NewService() diff --git a/http/view_service.go b/http/view_service.go deleted file mode 100644 index 1f02884368..0000000000 --- a/http/view_service.go +++ /dev/null @@ -1,407 +0,0 @@ -// NOTE: This service has been deprecated and should not be used. -// Views are now resources that belong to dashboards. The reason for -// this is due to how we authorize operations against views. -package http - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - - "github.com/influxdata/influxdb" - "github.com/julienschmidt/httprouter" - "go.uber.org/zap" -) - -// ViewBackend is all services and associated parameters required to construct -// the ScraperHandler. -type ViewBackend struct { - Logger *zap.Logger - - ViewService influxdb.ViewService - UserService influxdb.UserService - UserResourceMappingService influxdb.UserResourceMappingService - LabelService influxdb.LabelService -} - -// NewViewBackend returns a new instance of ViewBackend. -func NewViewBackend(b *APIBackend) *ViewBackend { - return &ViewBackend{ - Logger: b.Logger.With(zap.String("handler", "scraper")), - - ViewService: b.ViewService, - UserService: b.UserService, - LabelService: b.LabelService, - } -} - -// ViewHandler is the handler for the view service -type ViewHandler struct { - *httprouter.Router - - Logger *zap.Logger - - ViewService influxdb.ViewService - UserService influxdb.UserService - UserResourceMappingService influxdb.UserResourceMappingService - LabelService influxdb.LabelService -} - -const ( - viewsPath = "/api/v2/views" - viewsIDPath = "/api/v2/views/:id" - viewsIDMembersPath = "/api/v2/views/:id/members" - viewsIDMembersIDPath = "/api/v2/views/:id/members/:userID" - viewsIDOwnersPath = "/api/v2/views/:id/owners" - viewsIDOwnersIDPath = "/api/v2/views/:id/owners/:userID" - viewsIDLabelsPath = "/api/v2/views/:id/labels" - viewsIDLabelsIDPath = "/api/v2/views/:id/labels/:lid" -) - -// NewViewHandler returns a new instance of ViewHandler. -func NewViewHandler(b *ViewBackend) *ViewHandler { - h := &ViewHandler{ - Router: NewRouter(), - Logger: b.Logger, - - ViewService: b.ViewService, - UserResourceMappingService: b.UserResourceMappingService, - LabelService: b.LabelService, - UserService: b.UserService, - } - - h.HandlerFunc("POST", viewsPath, h.handlePostViews) - h.HandlerFunc("GET", viewsPath, h.handleGetViews) - - h.HandlerFunc("GET", viewsIDPath, h.handleGetView) - h.HandlerFunc("DELETE", viewsIDPath, h.handleDeleteView) - h.HandlerFunc("PATCH", viewsIDPath, h.handlePatchView) - - memberBackend := MemberBackend{ - Logger: b.Logger.With(zap.String("handler", "member")), - ResourceType: influxdb.ViewsResourceType, - UserType: influxdb.Member, - UserResourceMappingService: b.UserResourceMappingService, - UserService: b.UserService, - } - h.HandlerFunc("POST", viewsIDMembersPath, newPostMemberHandler(memberBackend)) - h.HandlerFunc("GET", viewsIDMembersPath, newGetMembersHandler(memberBackend)) - h.HandlerFunc("DELETE", viewsIDMembersIDPath, newDeleteMemberHandler(memberBackend)) - - ownerBackend := MemberBackend{ - Logger: b.Logger.With(zap.String("handler", "member")), - ResourceType: influxdb.ViewsResourceType, - UserType: influxdb.Owner, - UserResourceMappingService: b.UserResourceMappingService, - UserService: b.UserService, - } - h.HandlerFunc("POST", viewsIDOwnersPath, newPostMemberHandler(ownerBackend)) - h.HandlerFunc("GET", viewsIDOwnersPath, newGetMembersHandler(ownerBackend)) - h.HandlerFunc("DELETE", viewsIDOwnersIDPath, newDeleteMemberHandler(ownerBackend)) - - labelBackend := &LabelBackend{ - Logger: b.Logger.With(zap.String("handler", "label")), - LabelService: b.LabelService, - } - h.HandlerFunc("GET", viewsIDLabelsPath, newGetLabelsHandler(labelBackend)) - h.HandlerFunc("POST", viewsIDLabelsPath, newPostLabelHandler(labelBackend)) - h.HandlerFunc("DELETE", viewsIDLabelsIDPath, newDeleteLabelHandler(labelBackend)) - - return h -} - -type viewLinks struct { - Self string `json:"self"` - Labels string `json:"labels"` -} - -type viewResponse struct { - influxdb.View - Links viewLinks `json:"links"` -} - -func (r viewResponse) MarshalJSON() ([]byte, error) { - props, err := influxdb.MarshalViewPropertiesJSON(r.Properties) - if err != nil { - return nil, err - } - - return json.Marshal(struct { - influxdb.ViewContents - Links viewLinks `json:"links"` - Properties json.RawMessage `json:"properties"` - }{ - ViewContents: r.ViewContents, - Links: r.Links, - Properties: props, - }) -} - -func newViewResponse(c *influxdb.View) viewResponse { - return viewResponse{ - Links: viewLinks{ - Self: fmt.Sprintf("/api/v2/views/%s", c.ID), - Labels: fmt.Sprintf("/api/v2/views/%s/labels", c.ID), - }, - View: *c, - } -} - -// handleGetViews returns all views within the store. -func (h *ViewHandler) handleGetViews(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - req := decodeGetViewsRequest(ctx, r) - - views, _, err := h.ViewService.FindViews(ctx, req.filter) - if err != nil { - EncodeError(ctx, err, w) - return - } - - if err := encodeResponse(ctx, w, http.StatusOK, newGetViewsResponse(views)); err != nil { - logEncodingError(h.Logger, r, err) - return - } -} - -type getViewsRequest struct { - filter influxdb.ViewFilter -} - -func decodeGetViewsRequest(ctx context.Context, r *http.Request) *getViewsRequest { - qp := r.URL.Query() - - return &getViewsRequest{ - filter: influxdb.ViewFilter{ - Types: qp["type"], - }, - } -} - -type getViewsLinks struct { - Self string `json:"self"` -} - -type getViewsResponse struct { - Links getViewsLinks `json:"links"` - Views []viewResponse `json:"views"` -} - -func newGetViewsResponse(views []*influxdb.View) getViewsResponse { - res := getViewsResponse{ - Links: getViewsLinks{ - Self: "/api/v2/views", - }, - Views: make([]viewResponse, 0, len(views)), - } - - for _, view := range views { - res.Views = append(res.Views, newViewResponse(view)) - } - - return res -} - -// handlePostViews creates a new view. -func (h *ViewHandler) handlePostViews(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - req, err := decodePostViewRequest(ctx, r) - if err != nil { - EncodeError(ctx, err, w) - return - } - if err := h.ViewService.CreateView(ctx, req.View); err != nil { - EncodeError(ctx, err, w) - return - } - - if err := encodeResponse(ctx, w, http.StatusCreated, newViewResponse(req.View)); err != nil { - logEncodingError(h.Logger, r, err) - return - } -} - -type postViewRequest struct { - View *influxdb.View -} - -func decodePostViewRequest(ctx context.Context, r *http.Request) (*postViewRequest, error) { - c := &influxdb.View{} - if err := json.NewDecoder(r.Body).Decode(c); err != nil { - return nil, &influxdb.Error{ - Code: influxdb.EInvalid, - Msg: err.Error(), - } - } - return &postViewRequest{ - View: c, - }, nil -} - -// hanldeGetView retrieves a view by ID. -func (h *ViewHandler) handleGetView(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - req, err := decodeGetViewRequest(ctx, r) - if err != nil { - EncodeError(ctx, err, w) - return - } - - view, err := h.ViewService.FindViewByID(ctx, req.ViewID) - if err != nil { - EncodeError(ctx, err, w) - return - } - - if err := encodeResponse(ctx, w, http.StatusOK, newViewResponse(view)); err != nil { - logEncodingError(h.Logger, r, err) - return - } -} - -type getViewRequest struct { - ViewID influxdb.ID -} - -func decodeGetViewRequest(ctx context.Context, r *http.Request) (*getViewRequest, error) { - params := httprouter.ParamsFromContext(ctx) - id := params.ByName("id") - if id == "" { - return nil, &influxdb.Error{ - Code: influxdb.EInvalid, - Msg: "url missing id", - } - } - - var i influxdb.ID - if err := i.DecodeFromString(id); err != nil { - return nil, err - } - - return &getViewRequest{ - ViewID: i, - }, nil -} - -// handleDeleteView removes a view by ID. -func (h *ViewHandler) handleDeleteView(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - req, err := decodeDeleteViewRequest(ctx, r) - if err != nil { - EncodeError(ctx, err, w) - return - } - - if err := h.ViewService.DeleteView(ctx, req.ViewID); err != nil { - EncodeError(ctx, err, w) - return - } - - w.WriteHeader(http.StatusNoContent) -} - -type deleteViewRequest struct { - ViewID influxdb.ID -} - -func decodeDeleteViewRequest(ctx context.Context, r *http.Request) (*deleteViewRequest, error) { - params := httprouter.ParamsFromContext(ctx) - id := params.ByName("id") - if id == "" { - return nil, &influxdb.Error{ - Code: influxdb.EInvalid, - Msg: "url missing id", - } - } - - var i influxdb.ID - if err := i.DecodeFromString(id); err != nil { - return nil, err - } - - return &deleteViewRequest{ - ViewID: i, - }, nil -} - -// handlePatchView updates a view. -func (h *ViewHandler) handlePatchView(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - req, pe := decodePatchViewRequest(ctx, r) - if pe != nil { - EncodeError(ctx, pe, w) - return - } - view, err := h.ViewService.UpdateView(ctx, req.ViewID, req.Upd) - if err != nil { - EncodeError(ctx, err, w) - return - } - - if err := encodeResponse(ctx, w, http.StatusOK, newViewResponse(view)); err != nil { - logEncodingError(h.Logger, r, err) - return - } -} - -type patchViewRequest struct { - ViewID influxdb.ID - Upd influxdb.ViewUpdate -} - -func decodePatchViewRequest(ctx context.Context, r *http.Request) (*patchViewRequest, *influxdb.Error) { - req := &patchViewRequest{} - upd := influxdb.ViewUpdate{} - if err := json.NewDecoder(r.Body).Decode(&upd); err != nil { - return nil, &influxdb.Error{ - Code: influxdb.EInvalid, - Msg: err.Error(), - } - } - - req.Upd = upd - - params := httprouter.ParamsFromContext(ctx) - id := params.ByName("id") - if id == "" { - return nil, &influxdb.Error{ - Code: influxdb.EInvalid, - Msg: "url missing id", - } - } - var i influxdb.ID - if err := i.DecodeFromString(id); err != nil { - return nil, &influxdb.Error{ - Code: influxdb.EInvalid, - Err: err, - } - } - - req.ViewID = i - - if err := req.Valid(); err != nil { - return nil, &influxdb.Error{ - Err: err, - } - } - - return req, nil -} - -// Valid validates that the view ID is non zero valued and update has expected values set. -func (r *patchViewRequest) Valid() *influxdb.Error { - if !r.ViewID.Valid() { - return &influxdb.Error{ - Code: influxdb.EInvalid, - Msg: "missing view ID", - } - } - - return r.Upd.Valid() -} diff --git a/http/view_test.go b/http/view_test.go deleted file mode 100644 index b6e390b00a..0000000000 --- a/http/view_test.go +++ /dev/null @@ -1,736 +0,0 @@ -package http - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/mock" - influxdbtesting "github.com/influxdata/influxdb/testing" - "github.com/julienschmidt/httprouter" - "github.com/yudai/gojsondiff" - "github.com/yudai/gojsondiff/formatter" - "go.uber.org/zap" -) - -// NewMockViewBackend returns a ViewBackend with mock services. -func NewMockViewBackend() *ViewBackend { - return &ViewBackend{ - Logger: zap.NewNop().With(zap.String("handler", "view")), - - ViewService: &mock.ViewService{}, - UserService: mock.NewUserService(), - UserResourceMappingService: &mock.UserResourceMappingService{}, - LabelService: mock.NewLabelService(), - } -} - -func TestService_handleGetViews(t *testing.T) { - type fields struct { - ViewService influxdb.ViewService - } - type args struct { - queryParams map[string][]string - } - type wants struct { - statusCode int - contentType string - body string - } - - tests := []struct { - name string - fields fields - args args - wants wants - }{ - { - name: "get all views", - fields: fields{ - &mock.ViewService{ - FindViewsF: func(ctx context.Context, filter influxdb.ViewFilter) ([]*influxdb.View, int, error) { - return []*influxdb.View{ - { - ViewContents: influxdb.ViewContents{ - ID: influxdbtesting.MustIDBase16("7365637465747572"), - Name: "hello", - }, - Properties: influxdb.XYViewProperties{ - Type: "xy", - }, - }, - { - ViewContents: influxdb.ViewContents{ - ID: influxdbtesting.MustIDBase16("6167697474697320"), - Name: "example", - }, - }, - }, 2, nil - }, - }, - }, - args: args{}, - wants: wants{ - statusCode: http.StatusOK, - contentType: "application/json; charset=utf-8", - body: ` -{ - "links": { - "self": "/api/v2/views" - }, - "views": [ - { - "id": "7365637465747572", - "name": "hello", - "links": { - "labels": "/api/v2/views/7365637465747572/labels", - "self": "/api/v2/views/7365637465747572" - }, - "properties": { - "shape": "chronograf-v2", - "queries": null, - "axes": null, - "type": "xy", - "colors": null, - "legend": {}, - "geom": "", - "note": "", - "showNoteWhenEmpty": false - } - }, - { - "id": "6167697474697320", - "name": "example", - "links": { - "labels": "/api/v2/views/6167697474697320/labels", - "self": "/api/v2/views/6167697474697320" - }, - "properties": { - "shape": "empty" - } - } - ] -}`, - }, - }, - { - name: "get all views when there are none", - fields: fields{ - &mock.ViewService{ - FindViewsF: func(ctx context.Context, filter influxdb.ViewFilter) ([]*influxdb.View, int, error) { - return []*influxdb.View{}, 0, nil - }, - }, - }, - args: args{}, - wants: wants{ - statusCode: http.StatusOK, - contentType: "application/json; charset=utf-8", - body: ` -{ - "links": { - "self": "/api/v2/views" - }, - "views": [] -}`, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - viewBackend := NewMockViewBackend() - viewBackend.ViewService = tt.fields.ViewService - h := NewViewHandler(viewBackend) - - r := httptest.NewRequest("GET", "http://any.url", nil) - - qp := r.URL.Query() - for k, vs := range tt.args.queryParams { - for _, v := range vs { - qp.Add(k, v) - } - } - r.URL.RawQuery = qp.Encode() - - w := httptest.NewRecorder() - - h.handleGetViews(w, r) - - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := ioutil.ReadAll(res.Body) - - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handleGetViews() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handleGetViews() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if eq, diff, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq { - t.Errorf("%q. handleGetViews() = ***%s***", tt.name, diff) - } - - }) - } -} - -func TestService_handleGetView(t *testing.T) { - type fields struct { - ViewService influxdb.ViewService - } - type args struct { - id string - } - type wants struct { - statusCode int - contentType string - body string - } - - tests := []struct { - name string - fields fields - args args - wants wants - }{ - { - name: "get a view by id", - fields: fields{ - &mock.ViewService{ - FindViewByIDF: func(ctx context.Context, id influxdb.ID) (*influxdb.View, error) { - return &influxdb.View{ - ViewContents: influxdb.ViewContents{ - ID: influxdbtesting.MustIDBase16("020f755c3c082000"), - Name: "example", - }, - }, nil - }, - }, - }, - args: args{ - id: "020f755c3c082000", - }, - wants: wants{ - statusCode: http.StatusOK, - contentType: "application/json; charset=utf-8", - body: ` -{ - "id": "020f755c3c082000", - "name": "example", - "links": { - "labels": "/api/v2/views/020f755c3c082000/labels", - "self": "/api/v2/views/020f755c3c082000" - }, - "properties": { - "shape": "empty" - } -} -`, - }, - }, - { - name: "not found", - fields: fields{ - &mock.ViewService{ - FindViewByIDF: func(ctx context.Context, id influxdb.ID) (*influxdb.View, error) { - return nil, &influxdb.Error{ - Code: influxdb.ENotFound, - Msg: influxdb.ErrViewNotFound, - } - }, - }, - }, - args: args{ - id: "020f755c3c082000", - }, - wants: wants{ - statusCode: http.StatusNotFound, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - viewBackend := NewMockViewBackend() - viewBackend.ViewService = tt.fields.ViewService - h := NewViewHandler(viewBackend) - - r := httptest.NewRequest("GET", "http://any.url", nil) - - r = r.WithContext(context.WithValue( - context.TODO(), - httprouter.ParamsKey, - httprouter.Params{ - { - Key: "id", - Value: tt.args.id, - }, - })) - - w := httptest.NewRecorder() - - h.handleGetView(w, r) - - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := ioutil.ReadAll(res.Body) - - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handleGetView() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handleGetView() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if eq, diff, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq { - t.Errorf("%q. handleGetView() = ***%s***", tt.name, diff) - } - }) - } -} - -func TestService_handlePostViews(t *testing.T) { - type fields struct { - ViewService influxdb.ViewService - } - type args struct { - view *influxdb.View - } - type wants struct { - statusCode int - contentType string - body string - } - - tests := []struct { - name string - fields fields - args args - wants wants - }{ - { - name: "create a new view", - fields: fields{ - &mock.ViewService{ - CreateViewF: func(ctx context.Context, c *influxdb.View) error { - c.ID = influxdbtesting.MustIDBase16("020f755c3c082000") - return nil - }, - }, - }, - args: args{ - view: &influxdb.View{ - ViewContents: influxdb.ViewContents{ - ID: influxdbtesting.MustIDBase16("020f755c3c082000"), - Name: "hello", - }, - Properties: influxdb.XYViewProperties{ - Type: "xy", - }, - }, - }, - wants: wants{ - statusCode: http.StatusCreated, - contentType: "application/json; charset=utf-8", - body: ` -{ - "id": "020f755c3c082000", - "name": "hello", - "links": { - "labels": "/api/v2/views/020f755c3c082000/labels", - "self": "/api/v2/views/020f755c3c082000" - }, - "properties": { - "shape": "chronograf-v2", - "queries": null, - "axes": null, - "type": "xy", - "colors": null, - "legend": {}, - "geom": "", - "note": "", - "showNoteWhenEmpty": false - } -} -`, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - viewBackend := NewMockViewBackend() - viewBackend.ViewService = tt.fields.ViewService - h := NewViewHandler(viewBackend) - - b, err := json.Marshal(tt.args.view) - if err != nil { - t.Fatalf("failed to unmarshal view: %v", err) - } - - r := httptest.NewRequest("GET", "http://any.url", bytes.NewReader(b)) - w := httptest.NewRecorder() - - h.handlePostViews(w, r) - - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := ioutil.ReadAll(res.Body) - - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handlePostViews() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handlePostViews() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if eq, diff, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq { - t.Errorf("%q. handlePostViews() = ***%s***", tt.name, diff) - } - }) - } -} - -func TestService_handleDeleteView(t *testing.T) { - type fields struct { - ViewService influxdb.ViewService - } - type args struct { - id string - } - type wants struct { - statusCode int - contentType string - body string - } - - tests := []struct { - name string - fields fields - args args - wants wants - }{ - { - name: "remove a view by id", - fields: fields{ - &mock.ViewService{ - DeleteViewF: func(ctx context.Context, id influxdb.ID) error { - if id == influxdbtesting.MustIDBase16("020f755c3c082000") { - return nil - } - - return fmt.Errorf("wrong id") - }, - }, - }, - args: args{ - id: "020f755c3c082000", - }, - wants: wants{ - statusCode: http.StatusNoContent, - }, - }, - { - name: "view not found", - fields: fields{ - &mock.ViewService{ - DeleteViewF: func(ctx context.Context, id influxdb.ID) error { - return &influxdb.Error{ - Code: influxdb.ENotFound, - Msg: influxdb.ErrViewNotFound, - } - }, - }, - }, - args: args{ - id: "020f755c3c082000", - }, - wants: wants{ - statusCode: http.StatusNotFound, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - viewBackend := NewMockViewBackend() - viewBackend.ViewService = tt.fields.ViewService - h := NewViewHandler(viewBackend) - - r := httptest.NewRequest("GET", "http://any.url", nil) - - r = r.WithContext(context.WithValue( - context.TODO(), - httprouter.ParamsKey, - httprouter.Params{ - { - Key: "id", - Value: tt.args.id, - }, - })) - - w := httptest.NewRecorder() - - h.handleDeleteView(w, r) - - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := ioutil.ReadAll(res.Body) - - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handleDeleteView() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handleDeleteView() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if eq, diff, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq { - t.Errorf("%q. handleDeleteView() = ***%s***", tt.name, diff) - } - }) - } -} - -func TestService_handlePatchView(t *testing.T) { - type fields struct { - ViewService influxdb.ViewService - } - type args struct { - id string - name string - properties influxdb.ViewProperties - } - type wants struct { - statusCode int - contentType string - body string - } - - tests := []struct { - name string - fields fields - args args - wants wants - }{ - { - name: "update a view", - fields: fields{ - &mock.ViewService{ - UpdateViewF: func(ctx context.Context, id influxdb.ID, upd influxdb.ViewUpdate) (*influxdb.View, error) { - if id == influxdbtesting.MustIDBase16("020f755c3c082000") { - return &influxdb.View{ - ViewContents: influxdb.ViewContents{ - ID: influxdbtesting.MustIDBase16("020f755c3c082000"), - Name: "example", - }, - Properties: influxdb.XYViewProperties{ - Type: "xy", - }, - }, nil - } - - return nil, fmt.Errorf("not found") - }, - }, - }, - args: args{ - id: "020f755c3c082000", - name: "example", - }, - wants: wants{ - statusCode: http.StatusOK, - contentType: "application/json; charset=utf-8", - body: ` -{ - "id": "020f755c3c082000", - "name": "example", - "links": { - "labels": "/api/v2/views/020f755c3c082000/labels", - "self": "/api/v2/views/020f755c3c082000" - }, - "properties": { - "shape": "chronograf-v2", - "queries": null, - "axes": null, - "type": "xy", - "colors": null, - "legend": {}, - "geom": "", - "note": "", - "showNoteWhenEmpty": false - } -} -`, - }, - }, - { - name: "update a view with empty request body", - fields: fields{ - &mock.ViewService{ - UpdateViewF: func(ctx context.Context, id influxdb.ID, upd influxdb.ViewUpdate) (*influxdb.View, error) { - if id == influxdbtesting.MustIDBase16("020f755c3c082000") { - return &influxdb.View{ - ViewContents: influxdb.ViewContents{ - ID: influxdbtesting.MustIDBase16("020f755c3c082000"), - Name: "example", - }, - Properties: influxdb.XYViewProperties{ - Type: "xy", - }, - }, nil - } - - return nil, fmt.Errorf("not found") - }, - }, - }, - args: args{ - id: "020f755c3c082000", - }, - wants: wants{ - statusCode: http.StatusBadRequest, - }, - }, - { - name: "view not found", - fields: fields{ - &mock.ViewService{ - UpdateViewF: func(ctx context.Context, id influxdb.ID, upd influxdb.ViewUpdate) (*influxdb.View, error) { - return nil, &influxdb.Error{ - Code: influxdb.ENotFound, - Msg: influxdb.ErrViewNotFound, - } - }, - }, - }, - args: args{ - id: "020f755c3c082000", - name: "hello", - }, - wants: wants{ - statusCode: http.StatusNotFound, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - viewBackend := NewMockViewBackend() - viewBackend.ViewService = tt.fields.ViewService - h := NewViewHandler(viewBackend) - - upd := influxdb.ViewUpdate{} - if tt.args.name != "" { - upd.Name = &tt.args.name - } - if tt.args.properties != nil { - upd.Properties = tt.args.properties - } - - b, err := json.Marshal(upd) - if err != nil { - t.Fatalf("failed to unmarshal view update: %v", err) - } - - r := httptest.NewRequest("GET", "http://any.url", bytes.NewReader(b)) - - r = r.WithContext(context.WithValue( - context.TODO(), - httprouter.ParamsKey, - httprouter.Params{ - { - Key: "id", - Value: tt.args.id, - }, - })) - - w := httptest.NewRecorder() - - h.handlePatchView(w, r) - - res := w.Result() - content := res.Header.Get("Content-Type") - body, _ := ioutil.ReadAll(res.Body) - - if res.StatusCode != tt.wants.statusCode { - t.Errorf("%q. handlePatchView() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode) - } - if tt.wants.contentType != "" && content != tt.wants.contentType { - t.Errorf("%q. handlePatchView() = %v, want %v", tt.name, content, tt.wants.contentType) - } - if eq, diff, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq { - t.Errorf("%q. handlePatchView() = ***%s***", tt.name, diff) - } - }) - } -} - -func jsonEqual(s1, s2 string) (eq bool, diff string, err error) { - var o1, o2 interface{} - if s1 == s2 { - return true, "", nil - } - - if s1 == "" { - return false, s2, fmt.Errorf("s1 is empty") - } - - if s2 == "" { - return false, s1, fmt.Errorf("s2 is empty") - } - - if err = json.Unmarshal([]byte(s1), &o1); err != nil { - return - } - - if err = json.Unmarshal([]byte(s2), &o2); err != nil { - return - } - - differ := gojsondiff.New() - d, err := differ.Compare([]byte(s1), []byte(s2)) - if err != nil { - return - } - - config := formatter.AsciiFormatterConfig{} - - formatter := formatter.NewAsciiFormatter(o1, config) - diff, err = formatter.Format(d) - - return cmp.Equal(o1, o2), diff, err -} - -/* todo -func initViewService(f influxdbtesting.ViewFields, t *testing.T) (influxdb.ViewService, func()) { - t.Helper() - svc := inmem.NewService() - svc.IDGenerator = f.IDGenerator - - ctx := context.Background() - for _, b := range f.Views { - if err := s.PutView(ctx, b); err != nil { - t.Fatalf("failed to populate Views") - } - } - - handler := NewViewHandler() - handler.ViewService = svc - server := httptest.NewServer(handler) - client := ViewService{ - Addr: server.URL, - } - done := server.Close - - return &client, done -} - -func TestViewService(t *testing.T) { - influxdbtesting.ViewService(initViewService, t) -} -*/ diff --git a/inmem/auth_service.go b/inmem/auth_service.go index 61b140b7a0..381a513319 100644 --- a/inmem/auth_service.go +++ b/inmem/auth_service.go @@ -192,9 +192,9 @@ func (s *Service) DeleteAuthorization(ctx context.Context, id platform.ID) error return nil } -// SetAuthorizationStatus updates the status of an authorization associated with id. -func (s *Service) SetAuthorizationStatus(ctx context.Context, id platform.ID, status platform.Status) error { - op := OpPrefix + platform.OpSetAuthorizationStatus +// UpdateAuthorization updates the status and description if available. +func (s *Service) UpdateAuthorization(ctx context.Context, id platform.ID, upd *platform.AuthorizationUpdate) error { + op := OpPrefix + platform.OpUpdateAuthorization a, err := s.FindAuthorizationByID(ctx, id) if err != nil { return &platform.Error{ @@ -203,20 +203,23 @@ func (s *Service) SetAuthorizationStatus(ctx context.Context, id platform.ID, st } } - switch status { - case platform.Active, platform.Inactive: - default: - return &platform.Error{ - Code: platform.EInvalid, - Msg: "unknown authorization status", - Op: op, + if upd.Status != nil { + status := *upd.Status + switch status { + case platform.Active, platform.Inactive: + default: + return &platform.Error{ + Code: platform.EInvalid, + Msg: "unknown authorization status", + Op: op, + } } + a.Status = status } - if a.Status == status { - return nil + if upd.Description != nil { + a.Description = *upd.Description } - a.Status = status return s.PutAuthorization(ctx, a) } diff --git a/inmem/dashboard.go b/inmem/dashboard.go index 6dea3884d6..8ec93e8458 100644 --- a/inmem/dashboard.go +++ b/inmem/dashboard.go @@ -303,7 +303,10 @@ func (s *Service) RemoveDashboardCell(ctx context.Context, dashboardID platform. } if err := s.DeleteView(ctx, d.Cells[idx].ID); err != nil { - return err + return &platform.Error{ + Err: err, + Op: op, + } } d.Cells = append(d.Cells[:idx], d.Cells[idx+1:]...) @@ -442,3 +445,50 @@ func (s *Service) UpdateDashboardCellView(ctx context.Context, dashboardID, cell return v, nil } + +func (s *Service) loadView(ctx context.Context, id platform.ID) (*platform.View, *platform.Error) { + i, ok := s.viewKV.Load(id.String()) + if !ok { + return nil, &platform.Error{ + Code: platform.ENotFound, + Msg: "view not found", + } + } + + d, ok := i.(*platform.View) + if !ok { + return nil, &platform.Error{ + Code: platform.EInvalid, + Msg: fmt.Sprintf("type %T is not a view", i), + } + } + return d, nil +} + +// FindViewByID returns a single view by ID. +func (s *Service) FindViewByID(ctx context.Context, id platform.ID) (*platform.View, error) { + v, pe := s.loadView(ctx, id) + if pe != nil { + return nil, pe + } + return v, nil +} + +// PutView sets view with the current ID. +func (s *Service) PutView(ctx context.Context, c *platform.View) error { + if c.Properties == nil { + c.Properties = platform.EmptyViewProperties{} + } + s.viewKV.Store(c.ID.String(), c) + return nil +} + +// DeleteView removes a view by ID. +func (s *Service) DeleteView(ctx context.Context, id platform.ID) error { + if _, err := s.FindViewByID(ctx, id); err != nil { + return err + } + + s.viewKV.Delete(id.String()) + return nil +} diff --git a/inmem/dashboard_test.go b/inmem/dashboard_test.go index ffb3d99915..a00c210796 100644 --- a/inmem/dashboard_test.go +++ b/inmem/dashboard_test.go @@ -18,11 +18,6 @@ func initDashboardService(f platformtesting.DashboardFields, t *testing.T) (plat t.Fatalf("failed to populate Dashboards") } } - for _, b := range f.Views { - if err := s.PutView(ctx, b); err != nil { - t.Fatalf("failed to populate views") - } - } return s, OpPrefix, func() {} } diff --git a/inmem/source.go b/inmem/source.go index bbf03e5f9f..e2dde0b364 100644 --- a/inmem/source.go +++ b/inmem/source.go @@ -125,7 +125,7 @@ func (s *Service) UpdateSource(ctx context.Context, id platform.ID, upd platform if err != nil { return nil, &platform.Error{ Err: err, - Op: OpPrefix + platform.OpUpdateView, + Op: OpPrefix + platform.OpUpdateSource, } } @@ -139,7 +139,7 @@ func (s *Service) DeleteSource(ctx context.Context, id platform.ID) error { if _, err := s.FindSourceByID(ctx, id); err != nil { return &platform.Error{ Err: err, - Op: OpPrefix + platform.OpDeleteView, + Op: OpPrefix + platform.OpDeleteSource, } } s.sourceKV.Delete(id.String()) diff --git a/inmem/view.go b/inmem/view.go deleted file mode 100644 index cb15c72ff5..0000000000 --- a/inmem/view.go +++ /dev/null @@ -1,139 +0,0 @@ -package inmem - -import ( - "context" - "fmt" - - platform "github.com/influxdata/influxdb" -) - -func (s *Service) loadView(ctx context.Context, id platform.ID) (*platform.View, *platform.Error) { - i, ok := s.viewKV.Load(id.String()) - if !ok { - return nil, &platform.Error{ - Code: platform.ENotFound, - Msg: "view not found", - } - } - - d, ok := i.(*platform.View) - if !ok { - return nil, &platform.Error{ - Code: platform.EInvalid, - Msg: fmt.Sprintf("type %T is not a view", i), - } - } - return d, nil -} - -// FindViewByID returns a single view by ID. -func (s *Service) FindViewByID(ctx context.Context, id platform.ID) (*platform.View, error) { - v, pe := s.loadView(ctx, id) - if pe != nil { - return nil, &platform.Error{ - Err: pe, - Op: OpPrefix + platform.OpFindViewByID, - } - } - return v, nil -} - -func filterViewFn(filter platform.ViewFilter) func(d *platform.View) bool { - if filter.ID != nil { - return func(d *platform.View) bool { - return d.ID == *filter.ID - } - } - - return func(d *platform.View) bool { return true } -} - -// FindViews implements platform.ViewService interface. -func (s *Service) FindViews(ctx context.Context, filter platform.ViewFilter) ([]*platform.View, int, error) { - var ds []*platform.View - if filter.ID != nil { - d, err := s.FindViewByID(ctx, *filter.ID) - if err != nil && platform.ErrorCode(err) != platform.ENotFound { - return nil, 0, &platform.Error{ - Err: err, - Op: OpPrefix + platform.OpFindViews, - } - } - if d != nil { - ds = append(ds, d) - } - - return ds, len(ds), nil - } - - var err error - filterF := filterViewFn(filter) - s.viewKV.Range(func(k, v interface{}) bool { - d, ok := v.(*platform.View) - if !ok { - return false - } - - if filterF(d) { - ds = append(ds, d) - } - return true - }) - return ds, len(ds), err -} - -// CreateView implements platform.ViewService interface. -func (s *Service) CreateView(ctx context.Context, c *platform.View) error { - c.ID = s.IDGenerator.ID() - if err := s.PutView(ctx, c); err != nil { - return &platform.Error{ - Err: err, - Op: OpPrefix + platform.OpCreateView, - } - } - return nil -} - -// PutView implements platform.ViewService interface. -func (s *Service) PutView(ctx context.Context, c *platform.View) error { - if c.Properties == nil { - c.Properties = platform.EmptyViewProperties{} - } - s.viewKV.Store(c.ID.String(), c) - return nil -} - -// UpdateView implements platform.ViewService interface. -func (s *Service) UpdateView(ctx context.Context, id platform.ID, upd platform.ViewUpdate) (*platform.View, error) { - c, err := s.FindViewByID(ctx, id) - if err != nil { - return nil, &platform.Error{ - Err: err, - Op: OpPrefix + platform.OpUpdateView, - } - } - - if upd.Name != nil { - c.Name = *upd.Name - } - - if upd.Properties != nil { - c.Properties = upd.Properties - } - - s.viewKV.Store(c.ID.String(), c) - - return c, nil -} - -// DeleteView implements platform.ViewService interface. -func (s *Service) DeleteView(ctx context.Context, id platform.ID) error { - if _, err := s.FindViewByID(ctx, id); err != nil { - return &platform.Error{ - Err: err, - Op: OpPrefix + platform.OpDeleteView, - } - } - s.viewKV.Delete(id.String()) - return nil -} diff --git a/inmem/view_test.go b/inmem/view_test.go deleted file mode 100644 index bffa030c30..0000000000 --- a/inmem/view_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package inmem - -import ( - "context" - "testing" - - platform "github.com/influxdata/influxdb" - platformtesting "github.com/influxdata/influxdb/testing" -) - -func initViewService(f platformtesting.ViewFields, t *testing.T) (platform.ViewService, string, func()) { - s := NewService() - s.IDGenerator = f.IDGenerator - ctx := context.TODO() - for _, b := range f.Views { - if err := s.PutView(ctx, b); err != nil { - t.Fatalf("failed to populate Views") - } - } - return s, OpPrefix, func() {} -} - -func TestViewService_CreateView(t *testing.T) { - platformtesting.CreateView(initViewService, t) -} - -func TestViewService_FindViewByID(t *testing.T) { - platformtesting.FindViewByID(initViewService, t) -} -func TestViewService_FindViews(t *testing.T) { - platformtesting.FindViews(initViewService, t) -} - -func TestViewService_DeleteView(t *testing.T) { - platformtesting.DeleteView(initViewService, t) -} - -func TestViewService_UpdateView(t *testing.T) { - platformtesting.UpdateView(initViewService, t) -} diff --git a/kv/auth.go b/kv/auth.go index ce04843b4d..24ed313601 100644 --- a/kv/auth.go +++ b/kv/auth.go @@ -416,17 +416,32 @@ func (s *Service) deleteAuthorization(ctx context.Context, tx Tx, id influxdb.ID // for setting an authorization to inactive or active. func (s *Service) SetAuthorizationStatus(ctx context.Context, id influxdb.ID, status influxdb.Status) error { return s.kv.Update(ctx, func(tx Tx) error { - return s.updateAuthorization(ctx, tx, id, status) + return s.updateAuthorization(ctx, tx, id, &influxdb.AuthorizationUpdate{ + Status: &status, + }) }) } -func (s *Service) updateAuthorization(ctx context.Context, tx Tx, id influxdb.ID, status influxdb.Status) error { +// UpdateAuthorization updates the status and description if available. +func (s *Service) UpdateAuthorization(ctx context.Context, id influxdb.ID, upd *influxdb.AuthorizationUpdate) error { + return s.kv.Update(ctx, func(tx Tx) error { + return s.updateAuthorization(ctx, tx, id, upd) + }) +} + +func (s *Service) updateAuthorization(ctx context.Context, tx Tx, id influxdb.ID, upd *influxdb.AuthorizationUpdate) error { a, err := s.findAuthorizationByID(ctx, tx, id) if err != nil { return err } - a.Status = status + if upd.Status != nil { + a.Status = *upd.Status + } + if upd.Description != nil { + a.Description = *upd.Description + } + v, err := encodeAuthorization(a) if err != nil { return &influxdb.Error{ diff --git a/kv/document.go b/kv/document.go index 5fa6f8163e..66b7d54b95 100644 --- a/kv/document.go +++ b/kv/document.go @@ -168,7 +168,13 @@ func (i *DocumentIndex) FindLabelByName(name string) (influxdb.ID, error) { if err != nil { return influxdb.InvalidID(), err } - if len(ls) != 1 { + if len(ls) == 0 { + return influxdb.InvalidID(), &influxdb.Error{ + Code: influxdb.ENotFound, + Msg: "label not found", + } + } + if len(ls) > 1 { return influxdb.InvalidID(), &influxdb.Error{ Code: influxdb.EInternal, Msg: "found multiple labels matching the name provided", diff --git a/mock/auth_service.go b/mock/auth_service.go index 53da865e5d..d0b4562078 100644 --- a/mock/auth_service.go +++ b/mock/auth_service.go @@ -21,7 +21,7 @@ type AuthorizationService struct { FindAuthorizationsFn func(context.Context, platform.AuthorizationFilter, ...platform.FindOptions) ([]*platform.Authorization, int, error) CreateAuthorizationFn func(context.Context, *platform.Authorization) error DeleteAuthorizationFn func(context.Context, platform.ID) error - SetAuthorizationStatusFn func(context.Context, platform.ID, platform.Status) error + UpdateAuthorizationFn func(context.Context, platform.ID, *platform.AuthorizationUpdate) error } // NewAuthorizationService returns a mock AuthorizationService where its methods will return @@ -33,9 +33,9 @@ func NewAuthorizationService() *AuthorizationService { FindAuthorizationsFn: func(context.Context, platform.AuthorizationFilter, ...platform.FindOptions) ([]*platform.Authorization, int, error) { return nil, 0, nil }, - CreateAuthorizationFn: func(context.Context, *platform.Authorization) error { return nil }, - DeleteAuthorizationFn: func(context.Context, platform.ID) error { return nil }, - SetAuthorizationStatusFn: func(context.Context, platform.ID, platform.Status) error { return nil }, + CreateAuthorizationFn: func(context.Context, *platform.Authorization) error { return nil }, + DeleteAuthorizationFn: func(context.Context, platform.ID) error { return nil }, + UpdateAuthorizationFn: func(context.Context, platform.ID, *platform.AuthorizationUpdate) error { return nil }, } } @@ -63,6 +63,7 @@ func (s *AuthorizationService) DeleteAuthorization(ctx context.Context, id platf return s.DeleteAuthorizationFn(ctx, id) } -func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id platform.ID, status platform.Status) error { - return s.SetAuthorizationStatusFn(ctx, id, status) +// UpdateAuthorization updates the status and description if available. +func (s *AuthorizationService) UpdateAuthorization(ctx context.Context, id platform.ID, upd *platform.AuthorizationUpdate) error { + return s.UpdateAuthorizationFn(ctx, id, upd) } diff --git a/mock/view_service.go b/mock/view_service.go deleted file mode 100644 index 37b5989104..0000000000 --- a/mock/view_service.go +++ /dev/null @@ -1,37 +0,0 @@ -package mock - -import ( - "context" - - platform "github.com/influxdata/influxdb" -) - -var _ platform.ViewService = &ViewService{} - -type ViewService struct { - CreateViewF func(context.Context, *platform.View) error - FindViewByIDF func(context.Context, platform.ID) (*platform.View, error) - FindViewsF func(context.Context, platform.ViewFilter) ([]*platform.View, int, error) - UpdateViewF func(context.Context, platform.ID, platform.ViewUpdate) (*platform.View, error) - DeleteViewF func(context.Context, platform.ID) error -} - -func (s *ViewService) FindViewByID(ctx context.Context, id platform.ID) (*platform.View, error) { - return s.FindViewByIDF(ctx, id) -} - -func (s *ViewService) FindViews(ctx context.Context, filter platform.ViewFilter) ([]*platform.View, int, error) { - return s.FindViewsF(ctx, filter) -} - -func (s *ViewService) CreateView(ctx context.Context, b *platform.View) error { - return s.CreateViewF(ctx, b) -} - -func (s *ViewService) UpdateView(ctx context.Context, id platform.ID, upd platform.ViewUpdate) (*platform.View, error) { - return s.UpdateViewF(ctx, id, upd) -} - -func (s *ViewService) DeleteView(ctx context.Context, id platform.ID) error { - return s.DeleteViewF(ctx, id) -} diff --git a/pkg/pointer/pointer.go b/pkg/pointer/pointer.go index ec5c37a46b..51a6147b8b 100644 --- a/pkg/pointer/pointer.go +++ b/pkg/pointer/pointer.go @@ -2,7 +2,9 @@ // Feel free to add more pointerification functions for more types as you need them. package pointer -import "time" +import ( + "time" +) // Duration returns a pointer to its argument. func Duration(d time.Duration) *time.Duration { diff --git a/prometheus/auth_service.go b/prometheus/auth_service.go index 2050797025..c45ae92529 100644 --- a/prometheus/auth_service.go +++ b/prometheus/auth_service.go @@ -109,9 +109,8 @@ func (s *AuthorizationService) DeleteAuthorization(ctx context.Context, id platf return s.AuthorizationService.DeleteAuthorization(ctx, id) } -// SetAuthorizationStatus updates the status of the authorization. Useful -// for setting an authorization to inactive or active. -func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id platform.ID, status platform.Status) (err error) { +// UpdateAuthorization updates the status and description. +func (s *AuthorizationService) UpdateAuthorization(ctx context.Context, id platform.ID, upd *platform.AuthorizationUpdate) (err error) { defer func(start time.Time) { labels := prometheus.Labels{ "method": "setAuthorizationStatus", @@ -121,7 +120,7 @@ func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id pl s.requestDuration.With(labels).Observe(time.Since(start).Seconds()) }(time.Now()) - return s.AuthorizationService.SetAuthorizationStatus(ctx, id, status) + return s.AuthorizationService.UpdateAuthorization(ctx, id, upd) } // PrometheusCollectors returns all authorization service prometheus collectors. diff --git a/prometheus/auth_service_test.go b/prometheus/auth_service_test.go index 93a6b618af..59daa4198a 100644 --- a/prometheus/auth_service_test.go +++ b/prometheus/auth_service_test.go @@ -38,7 +38,7 @@ func (a *authzSvc) DeleteAuthorization(context.Context, platform.ID) error { return a.Err } -func (a *authzSvc) SetAuthorizationStatus(context.Context, platform.ID, platform.Status) error { +func (a *authzSvc) UpdateAuthorization(context.Context, platform.ID, *platform.AuthorizationUpdate) error { return a.Err } diff --git a/query/stdlib/testing/end_to_end_test.go b/query/stdlib/testing/end_to_end_test.go index 33367f5f33..f4abd34c1f 100644 --- a/query/stdlib/testing/end_to_end_test.go +++ b/query/stdlib/testing/end_to_end_test.go @@ -4,11 +4,6 @@ import ( "bufio" "bytes" "context" - "io" - "io/ioutil" - nethttp "net/http" - "os" - "path/filepath" "strings" "testing" @@ -20,9 +15,7 @@ import ( "github.com/influxdata/flux/stdlib" platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/bolt" "github.com/influxdata/influxdb/cmd/influxd/launcher" - "github.com/influxdata/influxdb/http" "github.com/influxdata/influxdb/query" _ "github.com/influxdata/flux/stdlib" // Import the built-in functions @@ -147,7 +140,7 @@ func BenchmarkFluxEndToEnd(b *testing.B) { } func runEndToEnd(t *testing.T, pkgs []*ast.Package) { - l := RunMainOrFail(t, ctx) + l := launcher.RunTestLauncherOrFail(t, ctx) l.SetupOrFail(t) defer l.ShutdownOrFail(t, ctx) for _, pkg := range pkgs { @@ -163,7 +156,7 @@ func runEndToEnd(t *testing.T, pkgs []*ast.Package) { } func benchEndToEnd(b *testing.B, pkgs []*ast.Package) { - l := RunMainOrFail(b, ctx) + l := launcher.RunTestLauncherOrFail(b, ctx) l.SetupOrFail(b) defer l.ShutdownOrFail(b, ctx) for _, pkg := range pkgs { @@ -203,7 +196,7 @@ func init() { optionsAST = pkg.Files[0] } -func testFlux(t testing.TB, l *Launcher, pkg *ast.Package) { +func testFlux(t testing.TB, l *launcher.TestLauncher, pkg *ast.Package) { // Query server to ensure write persists. @@ -245,7 +238,7 @@ func testFlux(t testing.TB, l *Launcher, pkg *ast.Package) { OrganizationID: l.Org.ID, Compiler: lang.ASTCompiler{AST: pkg}, } - if r, err := l.FluxService().Query(ctx, req); err != nil { + if r, err := l.FluxQueryService().Query(ctx, req); err != nil { t.Fatal(err) } else { for r.More() { @@ -264,7 +257,7 @@ func testFlux(t testing.TB, l *Launcher, pkg *ast.Package) { // this time we use a call to `run` so that the assertion error is triggered runCalls := stdlib.TestingRunCalls(pkg) pkg.Files[len(pkg.Files)-1] = runCalls - r, err := l.FluxService().Query(ctx, req) + r, err := l.FluxQueryService().Query(ctx, req) if err != nil { t.Fatal(err) } @@ -281,7 +274,7 @@ func testFlux(t testing.TB, l *Launcher, pkg *ast.Package) { t.Error(err) // Replace the testing.run calls with testing.inspect calls. pkg.Files[len(pkg.Files)-1] = inspectCalls - r, err := l.FluxService().Query(ctx, req) + r, err := l.FluxQueryService().Query(ctx, req) if err != nil { t.Fatal(err) } @@ -306,118 +299,3 @@ func testFlux(t testing.TB, l *Launcher, pkg *ast.Package) { } } } - -// Launcher is a test wrapper for main.Launcher. -type Launcher struct { - *launcher.Launcher - - // Root temporary directory for all data. - Path string - - // Initialized after calling the Setup() helper. - User *platform.User - Org *platform.Organization - Bucket *platform.Bucket - Auth *platform.Authorization - - // Standard in/out/err buffers. - Stdin bytes.Buffer - Stdout bytes.Buffer - Stderr bytes.Buffer -} - -// NewLauncher returns a new instance of Launcher. -func NewLauncher() *Launcher { - l := &Launcher{Launcher: launcher.NewLauncher()} - l.Launcher.Stdin = &l.Stdin - l.Launcher.Stdout = &l.Stdout - l.Launcher.Stderr = &l.Stderr - if testing.Verbose() { - l.Launcher.Stdout = io.MultiWriter(l.Launcher.Stdout, os.Stdout) - l.Launcher.Stderr = io.MultiWriter(l.Launcher.Stderr, os.Stderr) - } - - path, err := ioutil.TempDir("", "") - if err != nil { - panic(err) - } - l.Path = path - return l -} - -// RunMainOrFail initializes and starts the server. -func RunMainOrFail(tb testing.TB, ctx context.Context, args ...string) *Launcher { - tb.Helper() - l := NewLauncher() - if err := l.Run(ctx, args...); err != nil { - tb.Fatal(err) - } - return l -} - -// Run executes the program with additional arguments to set paths and ports. -func (l *Launcher) Run(ctx context.Context, args ...string) error { - args = append(args, "--bolt-path", filepath.Join(l.Path, "influxd.bolt")) - args = append(args, "--protos-path", filepath.Join(l.Path, "protos")) - args = append(args, "--engine-path", filepath.Join(l.Path, "engine")) - args = append(args, "--http-bind-address", "127.0.0.1:0") - args = append(args, "--log-level", "debug") - return l.Launcher.Run(ctx, args...) -} - -// Shutdown stops the program and cleans up temporary paths. -func (l *Launcher) Shutdown(ctx context.Context) error { - l.Cancel() - l.Launcher.Shutdown(ctx) - return os.RemoveAll(l.Path) -} - -// ShutdownOrFail stops the program and cleans up temporary paths. Fail on error. -func (l *Launcher) ShutdownOrFail(tb testing.TB, ctx context.Context) { - tb.Helper() - if err := l.Shutdown(ctx); err != nil { - tb.Fatal(err) - } -} - -// SetupOrFail creates a new user, bucket, org, and auth token. Fail on error. -func (l *Launcher) SetupOrFail(tb testing.TB) { - svc := &http.SetupService{Addr: l.URL()} - results, err := svc.Generate(ctx, &platform.OnboardingRequest{ - User: "USER", - Password: "PASSWORD", - Org: "ORG", - Bucket: "BUCKET", - }) - if err != nil { - tb.Fatal(err) - } - - l.User = results.User - l.Org = results.Org - l.Bucket = results.Bucket - l.Auth = results.Auth -} - -func (l *Launcher) FluxService() *http.FluxQueryService { - return &http.FluxQueryService{Addr: l.URL(), Token: l.Auth.Token} -} - -func (l *Launcher) BucketService() *http.BucketService { - return &http.BucketService{ - Addr: l.URL(), - Token: l.Auth.Token, - OpPrefix: bolt.OpPrefix, - } -} - -// MustNewHTTPRequest returns a new nethttp.Request with base URL and auth attached. Fail on error. -func (l *Launcher) MustNewHTTPRequest(method, rawurl, body string) *nethttp.Request { - req, err := nethttp.NewRequest(method, l.URL()+rawurl, strings.NewReader(body)) - if err != nil { - panic(err) - } - - req.Header.Set("Authorization", "Token "+l.Auth.Token) - return req -} diff --git a/status.go b/status.go index e6be4d3d73..45446a8679 100644 --- a/status.go +++ b/status.go @@ -24,3 +24,8 @@ func (s Status) Valid() error { } } } + +// Ptr returns the pointer of that status. +func (s Status) Ptr() *Status { + return &s +} diff --git a/task.go b/task.go index 106a25bf2b..eaaa520b68 100644 --- a/task.go +++ b/task.go @@ -6,9 +6,7 @@ import ( "errors" "fmt" "strconv" - "time" - "github.com/influxdata/flux" "github.com/influxdata/flux/ast" "github.com/influxdata/flux/ast/edit" "github.com/influxdata/flux/parser" @@ -40,6 +38,21 @@ type Task struct { UpdatedAt string `json:"updatedAt,omitempty"` } +// EffectiveCron returns the effective cron string of the options. +// If the cron option was specified, it is returned. +// If the every option was specified, it is converted into a cron string using "@every". +// Otherwise, the empty string is returned. +// The value of the offset option is not considered. +func (t *Task) EffectiveCron() string { + if t.Cron != "" { + return t.Cron + } + if t.Every != "" { + return "@every " + t.Every + } + return "" +} + // Run is a record created when a run of a task is scheduled. type Run struct { ID ID `json:"id,omitempty"` @@ -145,11 +158,11 @@ func (t *TaskUpdate) UnmarshalJSON(data []byte) error { // Every represents a fixed period to repeat execution. // It gets marshalled from a string duration, i.e.: "10s" is 10 seconds - Every flux.Duration `json:"every,omitempty"` + Every options.Duration `json:"every,omitempty"` // Offset represents a delay before execution. // It gets marshalled from a string duration, i.e.: "10s" is 10 seconds - Offset *flux.Duration `json:"offset,omitempty"` + Offset *options.Duration `json:"offset,omitempty"` Concurrency *int64 `json:"concurrency,omitempty"` @@ -163,9 +176,9 @@ func (t *TaskUpdate) UnmarshalJSON(data []byte) error { } t.Options.Name = jo.Name t.Options.Cron = jo.Cron - t.Options.Every = time.Duration(jo.Every) + t.Options.Every = jo.Every if jo.Offset != nil { - offset := time.Duration(*jo.Offset) + offset := *jo.Offset t.Options.Offset = &offset } t.Options.Concurrency = jo.Concurrency @@ -187,10 +200,10 @@ func (t TaskUpdate) MarshalJSON() ([]byte, error) { Cron string `json:"cron,omitempty"` // Every represents a fixed period to repeat execution. - Every flux.Duration `json:"every,omitempty"` + Every options.Duration `json:"every,omitempty"` // Offset represents a delay before execution. - Offset *flux.Duration `json:"offset,omitempty"` + Offset *options.Duration `json:"offset,omitempty"` Concurrency *int64 `json:"concurrency,omitempty"` @@ -200,9 +213,9 @@ func (t TaskUpdate) MarshalJSON() ([]byte, error) { }{} jo.Name = t.Options.Name jo.Cron = t.Options.Cron - jo.Every = flux.Duration(t.Options.Every) + jo.Every = t.Options.Every if t.Options.Offset != nil { - offset := flux.Duration(*t.Options.Offset) + offset := *t.Options.Offset jo.Offset = &offset } jo.Concurrency = t.Options.Concurrency @@ -215,7 +228,7 @@ func (t TaskUpdate) MarshalJSON() ([]byte, error) { func (t TaskUpdate) Validate() error { switch { - case t.Options.Every != 0 && t.Options.Cron != "": + case !t.Options.Every.IsZero() && t.Options.Cron != "": return errors.New("cannot specify both every and cron") case t.Flux == nil && t.Status == nil && t.Options.IsZero() && t.Token == "": return errors.New("cannot update task without content") @@ -237,25 +250,23 @@ func (t *TaskUpdate) UpdateFlux(oldFlux string) error { return ast.GetError(parsedPKG) } parsed := parsedPKG.Files[0] - if t.Options.Every != 0 && t.Options.Cron != "" { - return errors.New("cannot specify both every and cron") + if !t.Options.Every.IsZero() && t.Options.Cron != "" { + return errors.New("cannot specify both cron and every") } op := make(map[string]ast.Expression, 4) if t.Options.Name != "" { op["name"] = &ast.StringLiteral{Value: t.Options.Name} } - if t.Options.Every != 0 { - d := ast.Duration{Magnitude: int64(t.Options.Every), Unit: "ns"} - op["every"] = &ast.DurationLiteral{Values: []ast.Duration{d}} + if !t.Options.Every.IsZero() { + op["every"] = &t.Options.Every.Node } if t.Options.Cron != "" { op["cron"] = &ast.StringLiteral{Value: t.Options.Cron} } if t.Options.Offset != nil { - if *t.Options.Offset != 0 { - d := ast.Duration{Magnitude: int64(*t.Options.Offset), Unit: "ns"} - op["offset"] = &ast.DurationLiteral{Values: []ast.Duration{d}} + if !t.Options.Offset.IsZero() { + op["offset"] = &t.Options.Offset.Node } else { toDelete["offset"] = struct{}{} } @@ -285,12 +296,12 @@ func (t *TaskUpdate) UpdateFlux(oldFlux string) error { case "offset": if offset, ok := op["offset"]; ok && t.Options.Offset != nil { delete(op, "offset") - p.Value = offset + p.Value = offset.Copy().(*ast.DurationLiteral) } case "every": - if every, ok := op["every"]; ok && t.Options.Every != 0 { + if every, ok := op["every"]; ok && !t.Options.Every.IsZero() { + p.Value = every.Copy().(*ast.DurationLiteral) delete(op, "every") - p.Value = every } else if cron, ok := op["cron"]; ok && t.Options.Cron != "" { delete(op, "cron") p.Value = cron @@ -300,10 +311,10 @@ func (t *TaskUpdate) UpdateFlux(oldFlux string) error { if cron, ok := op["cron"]; ok && t.Options.Cron != "" { delete(op, "cron") p.Value = cron - } else if every, ok := op["every"]; ok && t.Options.Every != 0 { + } else if every, ok := op["every"]; ok && !t.Options.Every.IsZero() { delete(op, "every") p.Key = &ast.Identifier{Name: "every"} - p.Value = every + p.Value = every.Copy().(*ast.DurationLiteral) } } } diff --git a/task/backend/coordinator/coordinator.go b/task/backend/coordinator/coordinator.go index 835ec09dbb..0d263bb4fb 100644 --- a/task/backend/coordinator/coordinator.go +++ b/task/backend/coordinator/coordinator.go @@ -58,8 +58,13 @@ func (c *Coordinator) claimExistingTasks() { continue } - t := task // Copy to avoid mistaken closure around task value. - if err := c.sch.ClaimTask(&t.Task, &t.Meta); err != nil { + t, err := backend.ToInfluxTask(&task.Task, &task.Meta) + if err != nil { + continue + } + + // I may need a context with an auth here + if err := c.sch.ClaimTask(context.Background(), t); err != nil { c.logger.Error("failed claim task", zap.Error(err)) continue } @@ -84,8 +89,11 @@ func (c *Coordinator) CreateTask(ctx context.Context, req backend.CreateTaskRequ if err != nil { return id, err } - - if err := c.sch.ClaimTask(task, meta); err != nil { + t, err := backend.ToInfluxTask(task, meta) + if err != nil { + return id, err + } + if err := c.sch.ClaimTask(ctx, t); err != nil { _, delErr := c.Store.DeleteTask(ctx, id) if delErr != nil { return id, fmt.Errorf("schedule task failed: %s\n\tcleanup also failed: %s", err, delErr) @@ -114,13 +122,18 @@ func (c *Coordinator) UpdateTask(ctx context.Context, req backend.UpdateTaskRequ } } - if err := c.sch.UpdateTask(task, meta); err != nil && err != backend.ErrTaskNotClaimed { + t, err := backend.ToInfluxTask(task, meta) + if err != nil { + return res, err + } + + if err := c.sch.UpdateTask(ctx, t); err != nil && err != backend.ErrTaskNotClaimed { return res, err } // If enabling the task, claim it after modifying the script. if req.Status == backend.TaskActive { - if err := c.sch.ClaimTask(task, meta); err != nil && err != backend.ErrTaskAlreadyClaimed { + if err := c.sch.ClaimTask(ctx, t); err != nil && err != backend.ErrTaskAlreadyClaimed { return res, err } } @@ -162,9 +175,15 @@ func (c *Coordinator) ManuallyRunTimeRange(ctx context.Context, taskID platform. if err != nil { return r, err } - t, m, err := c.Store.FindTaskByIDWithMeta(ctx, taskID) + task, meta, err := c.Store.FindTaskByIDWithMeta(ctx, taskID) if err != nil { return nil, err } - return r, c.sch.UpdateTask(t, m) + + t, err := backend.ToInfluxTask(task, meta) + if err != nil { + return nil, err + } + + return r, c.sch.UpdateTask(ctx, t) } diff --git a/task/backend/coordinator/coordinator_test.go b/task/backend/coordinator/coordinator_test.go index 39e156ab42..0775f50273 100644 --- a/task/backend/coordinator/coordinator_test.go +++ b/task/backend/coordinator/coordinator_test.go @@ -15,11 +15,11 @@ import ( "go.uber.org/zap/zaptest" ) -func timeoutSelector(ch <-chan *mock.Task) (*mock.Task, error) { +func timeoutSelector(ch <-chan *platform.Task) (*platform.Task, error) { select { case task := <-ch: return task, nil - case <-time.After(time.Second): + case <-time.After(10 * time.Second): return nil, errors.New("timeout on select") } } @@ -47,7 +47,7 @@ func TestCoordinator(t *testing.T) { t.Fatal(err) } - if task.Script != script { + if task.Flux != script { t.Fatal("task sent to scheduler doesnt match task created") } @@ -65,7 +65,7 @@ func TestCoordinator(t *testing.T) { t.Fatal(err) } - if task.Script != script { + if task.Flux != script { t.Fatal("task sent to scheduler doesnt match task created") } @@ -102,7 +102,7 @@ func TestCoordinator(t *testing.T) { t.Fatal(err) } - if task.Script != script { + if task.Flux != script { t.Fatal("task sent to scheduler doesnt match task created") } @@ -115,7 +115,7 @@ func TestCoordinator(t *testing.T) { t.Fatal(err) } - if task.Script != script { + if task.Flux != script { t.Fatal("task sent to scheduler doesnt match task created") } @@ -129,7 +129,7 @@ func TestCoordinator(t *testing.T) { t.Fatal(err) } - if task.Script != newScript { + if task.Flux != newScript { t.Fatal("task sent to scheduler doesnt match task created") } } diff --git a/task/backend/meta.go b/task/backend/meta.go index 9cca1b7172..7a6a31739b 100644 --- a/task/backend/meta.go +++ b/task/backend/meta.go @@ -14,6 +14,7 @@ import ( // This file contains helper methods for the StoreTaskMeta type defined in protobuf. // NewStoreTaskMeta returns a new StoreTaskMeta based on the given request and parsed options. +// Do not call this without validating the request and options first. func NewStoreTaskMeta(req CreateTaskRequest, o options.Options) StoreTaskMeta { stm := StoreTaskMeta{ Status: string(req.Status), @@ -26,7 +27,8 @@ func NewStoreTaskMeta(req CreateTaskRequest, o options.Options) StoreTaskMeta { stm.MaxConcurrency = int32(*o.Concurrency) } if o.Offset != nil { - stm.Offset = int32(*o.Offset / time.Second) + offset, _ := o.Offset.DurationFrom(time.Unix(req.ScheduleAfter, 0)) // we can do this because it is validated already. + stm.Offset = offset.String() } if stm.Status == "" { @@ -43,20 +45,29 @@ func (stm *StoreTaskMeta) AlignLatestCompleted() { if strings.HasPrefix(stm.EffectiveCron, "@every ") { everyString := strings.TrimPrefix(stm.EffectiveCron, "@every ") - every, err := time.ParseDuration(everyString) + every := options.Duration{} + err := every.Parse(everyString) if err != nil { // We cannot align a invalid time return } - - t := time.Unix(stm.LatestCompleted, 0).Truncate(every).Unix() - if t == stm.LatestCompleted { + t := time.Unix(stm.LatestCompleted, 0) + everyDur, err := every.DurationFrom(t) + if err != nil { + return + } + t = t.Truncate(everyDur) + if t.Unix() == stm.LatestCompleted { // For example, every 1m truncates to exactly on the minute. // But the input request is schedule after, not "on or after". // Add one interval. - t += int64(every / time.Second) + tafter, err := every.Add(t) + if err != nil { + return + } + t = tafter } - stm.LatestCompleted = t + stm.LatestCompleted = t.Truncate(time.Second).Unix() } } @@ -123,15 +134,23 @@ func (stm *StoreTaskMeta) CreateNextRun(now int64, makeID func() (platform.ID, e latest = cr.Now } } - + nowTime := time.Unix(now, 0) nextScheduled := sch.Next(time.Unix(latest, 0)) nextScheduledUnix := nextScheduled.Unix() - if dueAt := nextScheduledUnix + int64(stm.Offset); dueAt > now { + offset := &options.Duration{} + if err := offset.Parse(stm.Offset); err != nil { + return RunCreation{}, err + } + dueAt, err := offset.Add(nextScheduled) + if err != nil { + return RunCreation{}, err + } + if dueAt.After(nowTime) { // Can't schedule yet. if len(stm.ManualRuns) > 0 { - return stm.createNextRunFromQueue(now, dueAt, sch, makeID) + return stm.createNextRunFromQueue(now, dueAt.Unix(), sch, makeID) } - return RunCreation{}, RunNotYetDueError{DueAt: dueAt} + return RunCreation{}, RunNotYetDueError{DueAt: dueAt.Unix()} } id, err := makeID() @@ -145,12 +164,16 @@ func (stm *StoreTaskMeta) CreateNextRun(now int64, makeID func() (platform.ID, e RunID: uint64(id), }) + nextDue, err := offset.Add(sch.Next(nextScheduled)) + if err != nil { + return RunCreation{}, err + } return RunCreation{ Created: QueuedRun{ RunID: id, Now: nextScheduledUnix, }, - NextDue: sch.Next(nextScheduled).Unix() + int64(stm.Offset), + NextDue: nextDue.Unix(), HasQueue: len(stm.ManualRuns) > 0, }, nil } @@ -229,8 +252,15 @@ func (stm *StoreTaskMeta) NextDueRun() (int64, error) { latest = cr.Now } } - - return sch.Next(time.Unix(latest, 0)).Unix() + int64(stm.Offset), nil + offset := &options.Duration{} + if err := offset.Parse(stm.Offset); err != nil { + return 0, err + } + nextDue, err := offset.Add(sch.Next(time.Unix(latest, 0))) + if err != nil { + return 0, err + } + return nextDue.Unix(), nil } // ManuallyRunTimeRange requests a manual run covering the approximate range specified by the Unix timestamps start and end. diff --git a/task/backend/meta.pb.go b/task/backend/meta.pb.go index d18c156673..8f1cb8b68d 100644 --- a/task/backend/meta.pb.go +++ b/task/backend/meta.pb.go @@ -35,9 +35,9 @@ type StoreTaskMeta struct { // effective_cron is the effective cron string as reported by the task's options. EffectiveCron string `protobuf:"bytes,5,opt,name=effective_cron,json=effectiveCron,proto3" json:"effective_cron,omitempty"` // Task's configured delay, in seconds. - Offset int32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` - CreatedAt int64 `protobuf:"varint,7,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` - UpdatedAt int64 `protobuf:"varint,8,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"` + Offset string `protobuf:"bytes,6,opt,name=offset,proto3" json:"offset,omitempty"` + CreatedAt int64 `protobuf:"varint,7,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + UpdatedAt int64 `protobuf:"varint,8,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"` // The Authorization ID associated with the task. AuthorizationID uint64 `protobuf:"varint,9,opt,name=authorization_id,json=authorizationId,proto3" json:"authorization_id,omitempty"` ManualRuns []*StoreTaskMetaManualRun `protobuf:"bytes,16,rep,name=manual_runs,json=manualRuns,proto3" json:"manual_runs,omitempty"` @@ -47,7 +47,7 @@ func (m *StoreTaskMeta) Reset() { *m = StoreTaskMeta{} } func (m *StoreTaskMeta) String() string { return proto.CompactTextString(m) } func (*StoreTaskMeta) ProtoMessage() {} func (*StoreTaskMeta) Descriptor() ([]byte, []int) { - return fileDescriptor_meta_841ef32afee093f0, []int{0} + return fileDescriptor_meta_b8385560be3db2c8, []int{0} } func (m *StoreTaskMeta) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -111,11 +111,11 @@ func (m *StoreTaskMeta) GetEffectiveCron() string { return "" } -func (m *StoreTaskMeta) GetOffset() int32 { +func (m *StoreTaskMeta) GetOffset() string { if m != nil { return m.Offset } - return 0 + return "" } func (m *StoreTaskMeta) GetCreatedAt() int64 { @@ -164,7 +164,7 @@ func (m *StoreTaskMetaRun) Reset() { *m = StoreTaskMetaRun{} } func (m *StoreTaskMetaRun) String() string { return proto.CompactTextString(m) } func (*StoreTaskMetaRun) ProtoMessage() {} func (*StoreTaskMetaRun) Descriptor() ([]byte, []int) { - return fileDescriptor_meta_841ef32afee093f0, []int{1} + return fileDescriptor_meta_b8385560be3db2c8, []int{1} } func (m *StoreTaskMetaRun) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -254,7 +254,7 @@ func (m *StoreTaskMetaManualRun) Reset() { *m = StoreTaskMetaManualRun{} func (m *StoreTaskMetaManualRun) String() string { return proto.CompactTextString(m) } func (*StoreTaskMetaManualRun) ProtoMessage() {} func (*StoreTaskMetaManualRun) Descriptor() ([]byte, []int) { - return fileDescriptor_meta_841ef32afee093f0, []int{2} + return fileDescriptor_meta_b8385560be3db2c8, []int{2} } func (m *StoreTaskMetaManualRun) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -372,10 +372,11 @@ func (m *StoreTaskMeta) MarshalTo(dAtA []byte) (int, error) { i = encodeVarintMeta(dAtA, i, uint64(len(m.EffectiveCron))) i += copy(dAtA[i:], m.EffectiveCron) } - if m.Offset != 0 { - dAtA[i] = 0x30 + if len(m.Offset) > 0 { + dAtA[i] = 0x32 i++ - i = encodeVarintMeta(dAtA, i, uint64(m.Offset)) + i = encodeVarintMeta(dAtA, i, uint64(len(m.Offset))) + i += copy(dAtA[i:], m.Offset) } if m.CreatedAt != 0 { dAtA[i] = 0x38 @@ -535,8 +536,9 @@ func (m *StoreTaskMeta) Size() (n int) { if l > 0 { n += 1 + l + sovMeta(uint64(l)) } - if m.Offset != 0 { - n += 1 + sovMeta(uint64(m.Offset)) + l = len(m.Offset) + if l > 0 { + n += 1 + l + sovMeta(uint64(l)) } if m.CreatedAt != 0 { n += 1 + sovMeta(uint64(m.CreatedAt)) @@ -777,10 +779,10 @@ func (m *StoreTaskMeta) Unmarshal(dAtA []byte) error { m.EffectiveCron = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 6: - if wireType != 0 { + if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Offset", wireType) } - m.Offset = 0 + var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowMeta @@ -790,11 +792,21 @@ func (m *StoreTaskMeta) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - m.Offset |= (int32(b) & 0x7F) << shift + stringLen |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthMeta + } + postIndex := iNdEx + intStringLen + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Offset = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex case 7: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field CreatedAt", wireType) @@ -1318,42 +1330,42 @@ var ( ErrIntOverflowMeta = fmt.Errorf("proto: integer overflow") ) -func init() { proto.RegisterFile("meta.proto", fileDescriptor_meta_841ef32afee093f0) } +func init() { proto.RegisterFile("meta.proto", fileDescriptor_meta_b8385560be3db2c8) } -var fileDescriptor_meta_841ef32afee093f0 = []byte{ - // 543 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x41, 0x6f, 0xd3, 0x30, - 0x14, 0xc7, 0x1b, 0xd2, 0x74, 0xab, 0x4b, 0xd7, 0x60, 0xa6, 0x29, 0x02, 0x91, 0x66, 0x15, 0x88, - 0x72, 0x09, 0x12, 0x48, 0x9c, 0x10, 0x52, 0x57, 0x38, 0xec, 0xb0, 0x8b, 0xc7, 0x09, 0x09, 0x45, - 0x5e, 0xe2, 0x94, 0xa8, 0x89, 0x5d, 0x9c, 0x67, 0x68, 0xf9, 0x14, 0x7c, 0x14, 0xae, 0x7c, 0x03, - 0x8e, 0x3b, 0x72, 0x9a, 0x50, 0xfb, 0x35, 0x38, 0x20, 0x3b, 0x69, 0xd9, 0x46, 0x0f, 0x68, 0xb7, - 0xe7, 0xdf, 0x8b, 0x9f, 0xdf, 0xff, 0xff, 0x5e, 0x10, 0x2a, 0x18, 0xd0, 0x70, 0x26, 0x05, 0x08, - 0xfc, 0x30, 0x16, 0x45, 0x98, 0xf1, 0x34, 0x57, 0xf3, 0x84, 0x6a, 0x9a, 0x53, 0x48, 0x85, 0x2c, - 0x42, 0xa0, 0xe5, 0x34, 0x3c, 0xa3, 0xf1, 0x94, 0xf1, 0xe4, 0xde, 0xfe, 0x44, 0x4c, 0x84, 0xb9, - 0xf0, 0x54, 0x47, 0xd5, 0xdd, 0xc1, 0x6f, 0x1b, 0x75, 0x4f, 0x41, 0x48, 0xf6, 0x96, 0x96, 0xd3, - 0x13, 0x06, 0x14, 0x3f, 0x46, 0xbd, 0x82, 0xce, 0xa3, 0x58, 0xf0, 0x58, 0x49, 0xc9, 0x78, 0xbc, - 0xf0, 0xac, 0xc0, 0x1a, 0x3a, 0x64, 0xaf, 0xa0, 0xf3, 0xf1, 0x5f, 0x8a, 0x9f, 0x20, 0x37, 0xa7, - 0xc0, 0x4a, 0x88, 0x62, 0x51, 0xcc, 0x72, 0x06, 0x2c, 0xf1, 0x6e, 0x05, 0xd6, 0xd0, 0x26, 0xbd, - 0x8a, 0x8f, 0xd7, 0x18, 0x1f, 0xa0, 0x56, 0x09, 0x14, 0x54, 0xe9, 0xd9, 0x81, 0x35, 0x6c, 0x93, - 0xfa, 0x84, 0x63, 0x74, 0xa7, 0x2a, 0x07, 0xf9, 0x22, 0x92, 0x8a, 0xf3, 0x8c, 0x4f, 0xbc, 0x66, - 0x60, 0x0f, 0x3b, 0xcf, 0x5e, 0x84, 0xff, 0xa3, 0x2a, 0xbc, 0xd2, 0x3b, 0x51, 0x9c, 0xb8, 0x9b, - 0x82, 0xa4, 0xaa, 0x87, 0x1f, 0xa1, 0x3d, 0x96, 0xa6, 0x2c, 0x86, 0xec, 0x13, 0x8b, 0x62, 0x29, - 0xb8, 0xe7, 0x98, 0x26, 0xba, 0x1b, 0x3a, 0x96, 0x82, 0xeb, 0x1e, 0x45, 0x9a, 0x96, 0x0c, 0xbc, - 0x96, 0x91, 0x5b, 0x9f, 0xf0, 0x03, 0x84, 0x62, 0xc9, 0x28, 0xb0, 0x24, 0xa2, 0xe0, 0xed, 0x18, - 0x81, 0xed, 0x9a, 0x8c, 0x4c, 0x5a, 0xcd, 0x92, 0x75, 0x7a, 0xb7, 0x4a, 0xd7, 0x64, 0x04, 0xf8, - 0x15, 0x72, 0xa9, 0x82, 0x0f, 0x42, 0x66, 0x5f, 0x28, 0x64, 0x82, 0x47, 0x59, 0xe2, 0xb5, 0x03, - 0x6b, 0xd8, 0x3c, 0xba, 0xbb, 0xbc, 0xe8, 0xf7, 0x46, 0x97, 0x73, 0xc7, 0xaf, 0x49, 0xef, 0xca, - 0xc7, 0xc7, 0x09, 0x7e, 0x8f, 0x3a, 0x05, 0xe5, 0x8a, 0xe6, 0xda, 0x9e, 0xd2, 0x73, 0x8d, 0x37, - 0x2f, 0x6f, 0xe0, 0xcd, 0x89, 0xa9, 0xa2, 0x1d, 0x42, 0xc5, 0x3a, 0x2c, 0x07, 0xdf, 0x2d, 0xe4, - 0x5e, 0xb7, 0x10, 0xbb, 0xc8, 0xe6, 0xe2, 0xb3, 0x99, 0xba, 0x4d, 0x74, 0xa8, 0x09, 0xc8, 0x85, - 0x99, 0x6e, 0x97, 0xe8, 0x10, 0x07, 0xa8, 0x25, 0x95, 0x51, 0x63, 0x1b, 0x35, 0xed, 0xe5, 0x45, - 0xdf, 0x21, 0x4a, 0x6b, 0x70, 0xa4, 0xd2, 0x9d, 0xf7, 0x51, 0x47, 0x52, 0x3e, 0x61, 0x51, 0x09, - 0x54, 0x82, 0xd7, 0x34, 0xd5, 0x90, 0x41, 0xa7, 0x9a, 0xe0, 0xfb, 0xa8, 0x5d, 0x7d, 0xc0, 0x78, - 0x62, 0x46, 0x62, 0x93, 0x5d, 0x03, 0xde, 0xf0, 0x04, 0x1f, 0xa2, 0xdb, 0x92, 0x7d, 0x54, 0xac, - 0xac, 0x8d, 0x6d, 0x99, 0x7c, 0x67, 0xc3, 0x46, 0x30, 0xf8, 0x66, 0xa1, 0x83, 0xed, 0x12, 0xf1, - 0x3e, 0x72, 0xaa, 0x57, 0x2b, 0x0d, 0xd5, 0x41, 0xab, 0xd0, 0x4f, 0x55, 0x3b, 0xaa, 0xc3, 0xad, - 0x2b, 0x6c, 0x6f, 0x5f, 0xe1, 0xeb, 0x0d, 0x35, 0xff, 0x69, 0xe8, 0x92, 0x27, 0xce, 0x76, 0x4f, - 0x8e, 0x0e, 0x7f, 0x2c, 0x7d, 0xeb, 0x7c, 0xe9, 0x5b, 0xbf, 0x96, 0xbe, 0xf5, 0x75, 0xe5, 0x37, - 0xce, 0x57, 0x7e, 0xe3, 0xe7, 0xca, 0x6f, 0xbc, 0xdb, 0xa9, 0x87, 0x76, 0xd6, 0x32, 0xff, 0xe5, - 0xf3, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x74, 0xb0, 0xab, 0x79, 0xe1, 0x03, 0x00, 0x00, +var fileDescriptor_meta_b8385560be3db2c8 = []byte{ + // 544 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x53, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0xae, 0x7f, 0xc7, 0x69, 0xb3, 0xf9, 0xd3, 0x98, 0xa5, 0xaa, 0x2c, 0x10, 0x8e, 0x1b, 0x81, + 0x08, 0x17, 0x23, 0x81, 0xc4, 0x09, 0x21, 0xa5, 0x81, 0x43, 0x0f, 0xbd, 0x6c, 0x39, 0x21, 0x21, + 0x6b, 0x6b, 0xaf, 0x83, 0x15, 0x7b, 0x37, 0xac, 0x67, 0x21, 0xe1, 0x29, 0x78, 0x14, 0xae, 0xbc, + 0x01, 0xc7, 0x1e, 0x39, 0x55, 0x28, 0x79, 0x0d, 0x0e, 0x68, 0x77, 0x93, 0xd0, 0x96, 0x1c, 0x10, + 0xb7, 0x99, 0x6f, 0x76, 0x67, 0xbf, 0xef, 0x9b, 0x59, 0x84, 0x2a, 0x06, 0x34, 0x9e, 0x4a, 0x01, + 0x02, 0xdf, 0x4f, 0x45, 0x15, 0x17, 0x3c, 0x2f, 0xd5, 0x2c, 0xa3, 0x1a, 0x2d, 0x29, 0xe4, 0x42, + 0x56, 0x31, 0xd0, 0x7a, 0x12, 0x9f, 0xd3, 0x74, 0xc2, 0x78, 0x76, 0xe7, 0x60, 0x2c, 0xc6, 0xc2, + 0x5c, 0x78, 0xac, 0x23, 0x7b, 0xb7, 0xff, 0xd3, 0x45, 0x9d, 0x33, 0x10, 0x92, 0xbd, 0xa6, 0xf5, + 0xe4, 0x94, 0x01, 0xc5, 0x0f, 0x51, 0xb7, 0xa2, 0xb3, 0x24, 0x15, 0x3c, 0x55, 0x52, 0x32, 0x9e, + 0xce, 0x03, 0x27, 0x72, 0x06, 0x1e, 0xd9, 0xaf, 0xe8, 0x6c, 0xf4, 0x1b, 0xc5, 0x8f, 0x90, 0x5f, + 0x52, 0x60, 0x35, 0x24, 0xa9, 0xa8, 0xa6, 0x25, 0x03, 0x96, 0x05, 0xff, 0x45, 0xce, 0xc0, 0x25, + 0x5d, 0x8b, 0x8f, 0xd6, 0x30, 0x3e, 0x44, 0xcd, 0x1a, 0x28, 0xa8, 0x3a, 0x70, 0x23, 0x67, 0xd0, + 0x22, 0xab, 0x0c, 0xa7, 0xe8, 0x96, 0x6d, 0x07, 0xe5, 0x3c, 0x91, 0x8a, 0xf3, 0x82, 0x8f, 0x83, + 0x46, 0xe4, 0x0e, 0xda, 0x4f, 0x9e, 0xc5, 0x7f, 0xa3, 0x2a, 0xbe, 0xc6, 0x9d, 0x28, 0x4e, 0xfc, + 0x4d, 0x43, 0x62, 0xfb, 0xe1, 0x07, 0x68, 0x9f, 0xe5, 0x39, 0x4b, 0xa1, 0xf8, 0xc0, 0x92, 0x54, + 0x0a, 0x1e, 0x78, 0x86, 0x44, 0x67, 0x83, 0x8e, 0xa4, 0xe0, 0x9a, 0xa3, 0xc8, 0xf3, 0x9a, 0x41, + 0xd0, 0xb4, 0x1c, 0x6d, 0x86, 0xef, 0x21, 0x94, 0x4a, 0x46, 0x81, 0x65, 0x09, 0x85, 0x60, 0xd7, + 0x08, 0x6c, 0xad, 0x90, 0xa1, 0x29, 0xab, 0x69, 0xb6, 0x2e, 0xef, 0xd9, 0xf2, 0x0a, 0x19, 0x02, + 0x7e, 0x81, 0x7c, 0xaa, 0xe0, 0x9d, 0x90, 0xc5, 0x27, 0x0a, 0x85, 0xe0, 0x49, 0x91, 0x05, 0xad, + 0xc8, 0x19, 0x34, 0x8e, 0x6f, 0x2f, 0x2e, 0x7b, 0xdd, 0xe1, 0xd5, 0xda, 0xc9, 0x4b, 0xd2, 0xbd, + 0x76, 0xf8, 0x24, 0xc3, 0x6f, 0x51, 0xbb, 0xa2, 0x5c, 0xd1, 0x52, 0xdb, 0x53, 0x07, 0xbe, 0xf1, + 0xe6, 0xf9, 0x3f, 0x78, 0x73, 0x6a, 0xba, 0x68, 0x87, 0x50, 0xb5, 0x0e, 0xeb, 0xfe, 0x57, 0x07, + 0xf9, 0x37, 0x2d, 0xc4, 0x3e, 0x72, 0xb9, 0xf8, 0x68, 0xa6, 0xee, 0x12, 0x1d, 0x6a, 0x04, 0xe4, + 0xdc, 0x4c, 0xb7, 0x43, 0x74, 0x88, 0x23, 0xd4, 0x94, 0xca, 0xa8, 0x71, 0x8d, 0x9a, 0xd6, 0xe2, + 0xb2, 0xe7, 0x11, 0xa5, 0x35, 0x78, 0x52, 0x69, 0xe6, 0x3d, 0xd4, 0x96, 0x94, 0x8f, 0x59, 0x52, + 0x03, 0x95, 0x10, 0x34, 0x4c, 0x37, 0x64, 0xa0, 0x33, 0x8d, 0xe0, 0xbb, 0xa8, 0x65, 0x0f, 0x30, + 0x9e, 0x99, 0x91, 0xb8, 0x64, 0xcf, 0x00, 0xaf, 0x78, 0x86, 0x8f, 0xd0, 0xff, 0x92, 0xbd, 0x57, + 0xac, 0x5e, 0x19, 0xdb, 0x34, 0xf5, 0xf6, 0x06, 0x1b, 0x42, 0xff, 0x8b, 0x83, 0x0e, 0xb7, 0x4b, + 0xc4, 0x07, 0xc8, 0xb3, 0xaf, 0x5a, 0x0d, 0x36, 0xd1, 0x2a, 0xf4, 0x53, 0x76, 0x47, 0x75, 0xb8, + 0x75, 0x85, 0xdd, 0xed, 0x2b, 0x7c, 0x93, 0x50, 0xe3, 0x0f, 0x42, 0x57, 0x3c, 0xf1, 0xb6, 0x7b, + 0x72, 0x7c, 0xf4, 0x6d, 0x11, 0x3a, 0x17, 0x8b, 0xd0, 0xf9, 0xb1, 0x08, 0x9d, 0xcf, 0xcb, 0x70, + 0xe7, 0x62, 0x19, 0xee, 0x7c, 0x5f, 0x86, 0x3b, 0x6f, 0x76, 0x57, 0x43, 0x3b, 0x6f, 0x9a, 0x7f, + 0xf9, 0xf4, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0xca, 0x64, 0xf6, 0x93, 0xe1, 0x03, 0x00, 0x00, } diff --git a/task/backend/meta.proto b/task/backend/meta.proto index 0c12fe52b6..f82aefc5fb 100644 --- a/task/backend/meta.proto +++ b/task/backend/meta.proto @@ -25,7 +25,7 @@ message StoreTaskMeta { string effective_cron = 5; // Task's configured delay, in seconds. - int32 offset = 6; + string offset = 6; int64 created_at = 7; int64 updated_at = 8; diff --git a/task/backend/meta_test.go b/task/backend/meta_test.go index 8bcd43cd64..8dfa2b9a81 100644 --- a/task/backend/meta_test.go +++ b/task/backend/meta_test.go @@ -190,7 +190,7 @@ func TestMeta_CreateNextRun_Delay(t *testing.T) { MaxConcurrency: 2, Status: "enabled", EffectiveCron: "* * * * *", // Every minute. - Offset: 5, + Offset: "5s", LatestCompleted: 30, // Arbitrary non-overlap starting point. } @@ -219,7 +219,7 @@ func TestMeta_ManuallyRunTimeRange(t *testing.T) { MaxConcurrency: 2, Status: "enabled", EffectiveCron: "* * * * *", // Every minute. - Offset: 5, + Offset: "5s", LatestCompleted: 30, // Arbitrary non-overlap starting point. } diff --git a/task/backend/scheduler.go b/task/backend/scheduler.go index 36742b28d7..7030cb04e3 100644 --- a/task/backend/scheduler.go +++ b/task/backend/scheduler.go @@ -11,12 +11,12 @@ import ( "time" "github.com/influxdata/flux" + platform "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/kit/tracing" + "github.com/influxdata/influxdb/task/options" opentracing "github.com/opentracing/opentracing-go" "github.com/prometheus/client_golang/prometheus" "go.uber.org/zap" - - platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/kit/tracing" ) var ( @@ -30,21 +30,6 @@ var ( ErrTaskAlreadyClaimed = errors.New("task already claimed") ) -// DesiredState persists the desired state of a run. -type DesiredState interface { - // CreateNextRun requests the next run from the desired state, delegating to (*StoreTaskMeta).CreateNextRun. - // This allows the scheduler to be "dumb" and just tell DesiredState what time the scheduler thinks it is, - // and the DesiredState will create the appropriate run according to the task's cron schedule, - // and according to what's in progress and what's been finished. - // - // If a Run is requested and the cron schedule says the schedule isn't ready, a RunNotYetDueError is returned. - CreateNextRun(ctx context.Context, taskID platform.ID, now int64) (RunCreation, error) - - // FinishRun indicates that the given run is no longer intended to be executed. - // This may be called after a successful or failed execution, or upon cancellation. - FinishRun(ctx context.Context, taskID, runID platform.ID) error -} - // Executor handles execution of a run. type Executor interface { // Execute attempts to begin execution of a run. @@ -114,10 +99,10 @@ type Scheduler interface { Stop() // ClaimTask begins control of task execution in this scheduler. - ClaimTask(task *StoreTask, meta *StoreTaskMeta) error + ClaimTask(authCtx context.Context, task *platform.Task) error // UpdateTask will update the concurrency and the runners for a task - UpdateTask(task *StoreTask, meta *StoreTaskMeta) error + UpdateTask(authCtx context.Context, task *platform.Task) error // ReleaseTask immediately cancels any in-progress runs for the given task ID, // and releases any resources related to management of that task. @@ -166,16 +151,15 @@ func WithLogger(logger *zap.Logger) TickSchedulerOption { } // NewScheduler returns a new scheduler with the given desired state and the given now UTC timestamp. -func NewScheduler(desiredState DesiredState, executor Executor, lw LogWriter, now int64, opts ...TickSchedulerOption) *TickScheduler { +func NewScheduler(taskControlService TaskControlService, executor Executor, now int64, opts ...TickSchedulerOption) *TickScheduler { o := &TickScheduler{ - desiredState: desiredState, - executor: executor, - logWriter: lw, - now: now, - taskSchedulers: make(map[platform.ID]*taskScheduler), - logger: zap.NewNop(), - wg: &sync.WaitGroup{}, - metrics: newSchedulerMetrics(), + taskControlService: taskControlService, + executor: executor, + now: now, + taskSchedulers: make(map[platform.ID]*taskScheduler), + logger: zap.NewNop(), + wg: &sync.WaitGroup{}, + metrics: newSchedulerMetrics(), } for _, opt := range opts { @@ -186,9 +170,8 @@ func NewScheduler(desiredState DesiredState, executor Executor, lw LogWriter, no } type TickScheduler struct { - desiredState DesiredState - executor Executor - logWriter LogWriter + taskControlService TaskControlService + executor Executor now int64 logger *zap.Logger @@ -286,7 +269,7 @@ func (s *TickScheduler) Stop() { s.executor.Wait() } -func (s *TickScheduler) ClaimTask(task *StoreTask, meta *StoreTaskMeta) (err error) { +func (s *TickScheduler) ClaimTask(authCtx context.Context, task *platform.Task) (err error) { s.schedulerMu.Lock() defer s.schedulerMu.Unlock() if s.ctx == nil { @@ -302,7 +285,7 @@ func (s *TickScheduler) ClaimTask(task *StoreTask, meta *StoreTaskMeta) (err err defer s.metrics.ClaimTask(err == nil) - ts, err := newTaskScheduler(s.ctx, s.wg, s, task, meta, s.metrics) + ts, err := newTaskScheduler(s.ctx, authCtx, s.wg, s, task, s.metrics) if err != nil { return err } @@ -314,8 +297,13 @@ func (s *TickScheduler) ClaimTask(task *StoreTask, meta *StoreTaskMeta) (err err s.taskSchedulers[task.ID] = ts - if len(meta.CurrentlyRunning) > 0 { - if err := ts.WorkCurrentlyRunning(meta); err != nil { + // pickup any runs that are still "running from a previous failure" + runs, err := s.taskControlService.CurrentlyRunning(authCtx, task.ID) + if err != nil { + return err + } + if len(runs) > 0 { + if err := ts.WorkCurrentlyRunning(runs); err != nil { return err } } @@ -327,7 +315,12 @@ func (s *TickScheduler) ClaimTask(task *StoreTask, meta *StoreTaskMeta) (err err return nil } -func (s *TickScheduler) UpdateTask(task *StoreTask, meta *StoreTaskMeta) error { +func (s *TickScheduler) UpdateTask(authCtx context.Context, task *platform.Task) error { + opt, err := options.FromScript(task.Flux) + if err != nil { + return err + } + s.schedulerMu.Lock() defer s.schedulerMu.Unlock() @@ -337,20 +330,29 @@ func (s *TickScheduler) UpdateTask(task *StoreTask, meta *StoreTaskMeta) error { } ts.task = task - next, err := meta.NextDueRun() + next, err := s.taskControlService.NextDueRun(authCtx, task.ID) if err != nil { return err } - hasQueue := len(meta.ManualRuns) > 0 + + runs, err := s.taskControlService.ManualRuns(authCtx, task.ID) + if err != nil { + return err + } + + hasQueue := len(runs) > 0 // update the queued information ts.nextDueMu.Lock() ts.hasQueue = hasQueue ts.nextDue = next + ts.authCtx = authCtx ts.nextDueMu.Unlock() - // check the concurrency // todo(lh): In the near future we may not be using the scheduler to manage concurrency. - maxC := int(meta.MaxConcurrency) + maxC := len(ts.runners) + if opt.Concurrency != nil { + maxC = int(*opt.Concurrency) + } if maxC != len(ts.runners) { ts.runningMu.Lock() if maxC < len(ts.runners) { @@ -360,7 +362,7 @@ func (s *TickScheduler) UpdateTask(task *StoreTask, meta *StoreTaskMeta) error { if maxC > len(ts.runners) { delta := maxC - len(ts.runners) for i := 0; i < delta; i++ { - ts.runners = append(ts.runners, newRunner(s.ctx, ts.wg, s.logger, task, s.desiredState, s.executor, s.logWriter, ts)) + ts.runners = append(ts.runners, newRunner(s.ctx, ts.wg, s.logger, task, s.taskControlService, s.executor, ts)) } } ts.runningMu.Unlock() @@ -404,7 +406,10 @@ type taskScheduler struct { now *int64 // Task we are scheduling for. - task *StoreTask + task *platform.Task + + // Authorization context for using the TaskControlService + authCtx context.Context // CancelFunc for context passed to runners, to enable Cancel method. cancel context.CancelFunc @@ -427,13 +432,26 @@ type taskScheduler struct { func newTaskScheduler( ctx context.Context, + authCtx context.Context, wg *sync.WaitGroup, s *TickScheduler, - task *StoreTask, - meta *StoreTaskMeta, + task *platform.Task, metrics *schedulerMetrics, ) (*taskScheduler, error) { - firstDue, err := meta.NextDueRun() + firstDue, err := s.taskControlService.NextDueRun(authCtx, task.ID) + if err != nil { + return nil, err + } + opt, err := options.FromScript(task.Flux) + if err != nil { + return nil, err + } + maxC := 1 + if opt.Concurrency != nil { + maxC = int(*opt.Concurrency) + } + + runs, err := s.taskControlService.ManualRuns(authCtx, task.ID) if err != nil { return nil, err } @@ -442,20 +460,21 @@ func newTaskScheduler( ts := &taskScheduler{ now: &s.now, task: task, + authCtx: authCtx, cancel: cancel, wg: wg, - runners: make([]*runner, meta.MaxConcurrency), - running: make(map[platform.ID]runCtx, meta.MaxConcurrency), + runners: make([]*runner, maxC), + running: make(map[platform.ID]runCtx, maxC), logger: s.logger.With(zap.String("task_id", task.ID.String())), metrics: s.metrics, nextDue: firstDue, nextDueSource: math.MinInt64, - hasQueue: len(meta.ManualRuns) > 0, + hasQueue: len(runs) > 0, } for i := range ts.runners { logger := ts.logger.With(zap.Int("run_slot", i)) - ts.runners[i] = newRunner(ctx, wg, logger, task, s.desiredState, s.executor, s.logWriter, ts) + ts.runners[i] = newRunner(ctx, wg, logger, task, s.taskControlService, s.executor, ts) } return ts, nil @@ -473,11 +492,15 @@ func (ts *taskScheduler) Work() { } } -func (ts *taskScheduler) WorkCurrentlyRunning(meta *StoreTaskMeta) error { - for _, cr := range meta.CurrentlyRunning { +func (ts *taskScheduler) WorkCurrentlyRunning(runs []*platform.Run) error { + for _, cr := range runs { foundWorker := false for _, r := range ts.runners { - qr := QueuedRun{TaskID: ts.task.ID, RunID: platform.ID(cr.RunID), Now: cr.Now} + time, err := time.Parse(time.RFC3339, cr.ScheduledFor) + if err != nil { + return err + } + qr := QueuedRun{TaskID: ts.task.ID, RunID: platform.ID(cr.ID), Now: time.Unix()} if r.RestartRun(qr) { foundWorker = true break @@ -523,11 +546,10 @@ type runner struct { ctx context.Context wg *sync.WaitGroup - task *StoreTask + task *platform.Task - desiredState DesiredState - executor Executor - logWriter LogWriter + taskControlService TaskControlService + executor Executor // Parent taskScheduler. ts *taskScheduler @@ -539,22 +561,20 @@ func newRunner( ctx context.Context, wg *sync.WaitGroup, logger *zap.Logger, - task *StoreTask, - desiredState DesiredState, + task *platform.Task, + taskControlService TaskControlService, executor Executor, - logWriter LogWriter, ts *taskScheduler, ) *runner { return &runner{ - ctx: ctx, - wg: wg, - state: new(uint32), - task: task, - desiredState: desiredState, - executor: executor, - logWriter: logWriter, - ts: ts, - logger: logger, + ctx: ctx, + wg: wg, + state: new(uint32), + task: task, + taskControlService: taskControlService, + executor: executor, + ts: ts, + logger: logger, } } @@ -624,7 +644,7 @@ func (r *runner) startFromWorking(now int64) { defer span.Finish() ctx, cancel := context.WithCancel(ctx) - rc, err := r.desiredState.CreateNextRun(ctx, r.task.ID, now) + rc, err := r.taskControlService.CreateNextRun(ctx, r.task.ID, now) if err != nil { r.logger.Info("Failed to create run", zap.Error(err)) atomic.StoreUint32(r.state, runnerIdle) @@ -658,13 +678,7 @@ func (r *runner) clearRunning(id platform.ID) { // fail sets r's state to failed, and marks this runner as idle. func (r *runner) fail(qr QueuedRun, runLogger *zap.Logger, stage string, reason error) { - rlb := RunLogBase{ - Task: r.task, - RunID: qr.RunID, - RunScheduledFor: qr.Now, - RequestedAt: qr.RequestedAt, - } - if err := r.logWriter.AddRunLog(r.ctx, rlb, time.Now(), stage+": "+reason.Error()); err != nil { + if err := r.taskControlService.AddRunLog(r.ts.authCtx, r.task.ID, qr.RunID, time.Now(), stage+": "+reason.Error()); err != nil { runLogger.Info("Failed to update run log", zap.Error(err)) } @@ -674,6 +688,16 @@ func (r *runner) fail(qr QueuedRun, runLogger *zap.Logger, stage string, reason func (r *runner) executeAndWait(ctx context.Context, qr QueuedRun, runLogger *zap.Logger) { defer r.wg.Done() + errMsg := "Failed to finish run" + defer func() { + if _, err := r.taskControlService.FinishRun(r.ctx, qr.TaskID, qr.RunID); err != nil { + // TODO(mr): Need to figure out how to reconcile this error, on the next run, if it happens. + + runLogger.Error(errMsg, zap.Error(err)) + + atomic.StoreUint32(r.state, runnerIdle) + } + }() sp, spCtx := tracing.StartSpanFromContext(ctx) defer sp.Finish() @@ -681,11 +705,7 @@ func (r *runner) executeAndWait(ctx context.Context, qr QueuedRun, runLogger *za rp, err := r.executor.Execute(spCtx, qr) if err != nil { runLogger.Info("Failed to begin run execution", zap.Error(err)) - if err := r.desiredState.FinishRun(r.ctx, qr.TaskID, qr.RunID); err != nil { - // TODO(mr): Need to figure out how to reconcile this error, on the next run, if it happens. - runLogger.Error("Beginning run execution failed, and desired state update failed", zap.Error(err)) - } - + errMsg = "Beginning run execution failed, " + errMsg // TODO(mr): retry? r.fail(qr, runLogger, "Run failed to begin execution", err) return @@ -713,19 +733,14 @@ func (r *runner) executeAndWait(ctx context.Context, qr QueuedRun, runLogger *za close(ready) if err != nil { if err == ErrRunCanceled { - _ = r.desiredState.FinishRun(r.ctx, qr.TaskID, qr.RunID) r.updateRunState(qr, RunCanceled, runLogger) - + errMsg = "Waiting for execution result failed, " + errMsg // Move on to the next execution, for a canceled run. r.startFromWorking(atomic.LoadInt64(r.ts.now)) return } runLogger.Info("Failed to wait for execution result", zap.Error(err)) - if err := r.desiredState.FinishRun(r.ctx, qr.TaskID, qr.RunID); err != nil { - // TODO(mr): Need to figure out how to reconcile this error, on the next run, if it happens. - runLogger.Error("Waiting for execution result failed, and desired state update failed", zap.Error(err)) - } // TODO(mr): retry? r.fail(qr, runLogger, "Waiting for execution result", err) @@ -733,34 +748,22 @@ func (r *runner) executeAndWait(ctx context.Context, qr QueuedRun, runLogger *za } if err := rr.Err(); err != nil { runLogger.Info("Run failed to execute", zap.Error(err)) - if err := r.desiredState.FinishRun(r.ctx, qr.TaskID, qr.RunID); err != nil { - // TODO(mr): Need to figure out how to reconcile this error, on the next run, if it happens. - runLogger.Error("Run failed to execute, and desired state update failed", zap.Error(err)) - } + errMsg = "Run failed to execute, " + errMsg + // TODO(mr): retry? r.fail(qr, runLogger, "Run failed to execute", err) return } - if err := r.desiredState.FinishRun(r.ctx, qr.TaskID, qr.RunID); err != nil { - runLogger.Info("Failed to finish run", zap.Error(err)) - // TODO(mr): retry? - // Need to think about what it means if there was an error finishing a run. - atomic.StoreUint32(r.state, runnerIdle) - r.updateRunState(qr, RunFail, runLogger) - return - } - rlb := RunLogBase{ - Task: r.task, - RunID: qr.RunID, - RunScheduledFor: qr.Now, - RequestedAt: qr.RequestedAt, - } stats := rr.Statistics() b, err := json.Marshal(stats) if err == nil { - r.logWriter.AddRunLog(r.ctx, rlb, time.Now(), string(b)) + // authctx can be updated mid process + r.ts.nextDueMu.RLock() + authCtx := r.ts.authCtx + r.ts.nextDueMu.RUnlock() + r.taskControlService.AddRunLog(authCtx, r.task.ID, qr.RunID, time.Now(), string(b)) } r.updateRunState(qr, RunSuccess, runLogger) runLogger.Info("Execution succeeded") @@ -770,26 +773,19 @@ func (r *runner) executeAndWait(ctx context.Context, qr QueuedRun, runLogger *za } func (r *runner) updateRunState(qr QueuedRun, s RunStatus, runLogger *zap.Logger) { - rlb := RunLogBase{ - Task: r.task, - RunID: qr.RunID, - RunScheduledFor: qr.Now, - RequestedAt: qr.RequestedAt, - } - switch s { case RunStarted: r.ts.metrics.StartRun(r.task.ID.String()) - r.logWriter.AddRunLog(r.ctx, rlb, time.Now(), fmt.Sprintf("Started task from script: %q", r.task.Script)) + r.taskControlService.AddRunLog(r.ts.authCtx, r.task.ID, qr.RunID, time.Now(), fmt.Sprintf("Started task from script: %q", r.task.Flux)) case RunSuccess: r.ts.metrics.FinishRun(r.task.ID.String(), true) - r.logWriter.AddRunLog(r.ctx, rlb, time.Now(), "Completed successfully") + r.taskControlService.AddRunLog(r.ts.authCtx, r.task.ID, qr.RunID, time.Now(), "Completed successfully") case RunFail: r.ts.metrics.FinishRun(r.task.ID.String(), false) - r.logWriter.AddRunLog(r.ctx, rlb, time.Now(), "Failed") + r.taskControlService.AddRunLog(r.ts.authCtx, r.task.ID, qr.RunID, time.Now(), "Failed") case RunCanceled: r.ts.metrics.FinishRun(r.task.ID.String(), false) - r.logWriter.AddRunLog(r.ctx, rlb, time.Now(), "Canceled") + r.taskControlService.AddRunLog(r.ts.authCtx, r.task.ID, qr.RunID, time.Now(), "Canceled") default: // We are deliberately not handling RunQueued yet. // There is not really a notion of being queued in this runner architecture. runLogger.Warn("Unhandled run state", zap.Stringer("state", s)) @@ -799,7 +795,7 @@ func (r *runner) updateRunState(qr QueuedRun, s RunStatus, runLogger *zap.Logger // If we start seeing errors from this, we know the time limit is too short or the system is overloaded. ctx, cancel := context.WithTimeout(r.ctx, 10*time.Millisecond) defer cancel() - if err := r.logWriter.UpdateRunState(ctx, rlb, time.Now(), s); err != nil { + if err := r.taskControlService.UpdateRunState(ctx, r.task.ID, qr.RunID, time.Now(), s); err != nil { runLogger.Info("Error updating run state", zap.Stringer("state", s), zap.Error(err)) } } diff --git a/task/backend/scheduler_test.go b/task/backend/scheduler_test.go index 6ef5b8f17f..75545acd28 100644 --- a/task/backend/scheduler_test.go +++ b/task/backend/scheduler_test.go @@ -7,6 +7,7 @@ import ( "fmt" "reflect" "strings" + "sync" "testing" "time" @@ -23,55 +24,46 @@ import ( func TestScheduler_Cancelation(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() e.WithHanging(100 * time.Millisecond) - rl := backend.NewInMemRunReaderWriter() - o := backend.NewScheduler(d, e, rl, 5, backend.WithLogger(zaptest.NewLogger(t))) + o := backend.NewScheduler(tcs, e, 5, backend.WithLogger(zaptest.NewLogger(t))) o.Start(context.Background()) defer o.Stop() const orgID = 2 - task := &backend.StoreTask{ - ID: platform.ID(1), - Org: orgID, + task := &platform.Task{ + ID: platform.ID(1), + OrganizationID: orgID, + Every: "1s", + LatestCompleted: "1970-01-01T00:00:04Z", + Flux: `option task = {name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 1, - EffectiveCron: "@every 1s", - LatestCompleted: 4, - } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } - runs, err := rl.ListRuns(context.Background(), orgID, platform.RunFilter{Task: task.ID}) + runs, err := tcs.CurrentlyRunning(context.Background(), task.ID) if err != nil { t.Fatal(err) } - if err = o.CancelRun(context.Background(), task.ID, runs[0].ID); err != nil { + run := runs[0] + if err = o.CancelRun(context.Background(), task.ID, run.ID); err != nil { t.Fatal(err) } - time.Sleep(10 * time.Millisecond) // we have to do this because the storage system we are using for the logs is eventually consistent. - runs, err = rl.ListRuns(context.Background(), orgID, platform.RunFilter{Task: task.ID}) + time.Sleep(20 * time.Millisecond) // we have to do this because the storage system we are using for the logs is eventually consistent. + runs, err = tcs.CurrentlyRunning(context.Background(), task.ID) if err != nil { t.Fatal(err) } - if runs[0].Status != "canceled" { - t.Fatalf("Run not logged as canceled, but is %s", runs[0].Status) - } - // check to make sure it is really canceling, and that the status doesn't get changed to something else after it would have finished - time.Sleep(500 * time.Millisecond) - runs, err = rl.ListRuns(context.Background(), orgID, platform.RunFilter{Task: task.ID}) - if err != nil { - t.Fatal(err) - } - if runs[0].Status != "canceled" { - t.Fatalf("Run not actually canceled, but is %s", runs[0].Status) + if len(runs) != 0 { + t.Fatal("canceled run still running") } + // check for when we cancel something already canceled - if err = o.CancelRun(context.Background(), task.ID, runs[0].ID); err != backend.ErrRunNotFound { + time.Sleep(500 * time.Millisecond) + if err = o.CancelRun(context.Background(), task.ID, run.ID); err != backend.ErrRunNotFound { t.Fatalf("expected ErrRunNotFound but got %s", err) } } @@ -79,55 +71,47 @@ func TestScheduler_Cancelation(t *testing.T) { func TestScheduler_StartScriptOnClaim(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - o := backend.NewScheduler(d, e, backend.NopLogWriter{}, 5, backend.WithLogger(zaptest.NewLogger(t))) + o := backend.NewScheduler(tcs, e, 5, backend.WithLogger(zaptest.NewLogger(t))) o.Start(context.Background()) defer o.Stop() - task := &backend.StoreTask{ - ID: platform.ID(1), + task := &platform.Task{ + ID: platform.ID(1), + Cron: "* * * * *", + LatestCompleted: "1970-01-01T00:00:03Z", + Flux: `option task = {name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 1, - EffectiveCron: "* * * * *", - LatestCompleted: 3, - } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } // No valid timestamps between 3 and 5 for every minute. - if n := len(d.CreatedFor(task.ID)); n > 0 { + if n := len(tcs.CreatedFor(task.ID)); n > 0 { t.Fatalf("expected no runs queued, but got %d", n) } // For every second, can queue for timestamps 4 and 5. - task = &backend.StoreTask{ - ID: platform.ID(2), + task = &platform.Task{ + ID: platform.ID(2), + Every: "1s", + LatestCompleted: "1970-01-01T00:00:03Z", + Flux: `option task = {concurrency: 99, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - meta = &backend.StoreTaskMeta{ - MaxConcurrency: 99, - EffectiveCron: "@every 1s", - LatestCompleted: 3, - CurrentlyRunning: []*backend.StoreTaskMetaRun{ - &backend.StoreTaskMetaRun{ - Now: 4, - RunID: uint64(10), - }, - }, - } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } - if n := len(d.CreatedFor(task.ID)); n != 1 { + if n := len(tcs.CreatedFor(task.ID)); n != 2 { t.Fatalf("expected 2 runs queued for 'every 1s' script, but got %d", n) } - if x, err := d.PollForNumberCreated(task.ID, 1); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 2); err != nil { t.Fatalf("expected 1 runs queued, but got %d", len(x)) } @@ -143,7 +127,7 @@ func TestScheduler_StartScriptOnClaim(t *testing.T) { rp.Finish(mock.NewRunResult(nil, false), nil) } - if x, err := d.PollForNumberCreated(task.ID, 0); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 0); err != nil { t.Fatalf("expected 1 runs queued, but got %d", len(x)) } @@ -155,32 +139,30 @@ func TestScheduler_StartScriptOnClaim(t *testing.T) { func TestScheduler_CreateNextRunOnTick(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - o := backend.NewScheduler(d, e, backend.NopLogWriter{}, 5) + o := backend.NewScheduler(tcs, e, 5) o.Start(context.Background()) defer o.Stop() - task := &backend.StoreTask{ - ID: platform.ID(1), - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 2, - EffectiveCron: "@every 1s", - LatestCompleted: 5, + task := &platform.Task{ + ID: platform.ID(1), + Every: "1s", + LatestCompleted: "1970-01-01T00:00:05Z", + Flux: `option task = {concurrency: 2, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } - if x, err := d.PollForNumberCreated(task.ID, 0); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 0); err != nil { t.Fatalf("expected no runs queued, but got %d", len(x)) } o.Tick(6) - if x, err := d.PollForNumberCreated(task.ID, 1); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 1); err != nil { t.Fatalf("expected 1 run queued, but got %d", len(x)) } running, err := e.PollForNumberRunning(task.ID, 1) @@ -193,7 +175,7 @@ func TestScheduler_CreateNextRunOnTick(t *testing.T) { } o.Tick(7) - if x, err := d.PollForNumberCreated(task.ID, 2); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 2); err != nil { t.Fatalf("expected 2 runs queued, but got %d", len(x)) } running, err = e.PollForNumberRunning(task.ID, 2) @@ -212,7 +194,7 @@ func TestScheduler_CreateNextRunOnTick(t *testing.T) { } o.Tick(8) // Can't exceed concurrency of 2. - if x, err := d.PollForNumberCreated(task.ID, 2); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 2); err != nil { t.Fatalf("expected 2 runs queued, but got %d", len(x)) } run6.Cancel() // 7 and 8 should be running. @@ -227,28 +209,26 @@ func TestScheduler_CreateNextRunOnTick(t *testing.T) { func TestScheduler_LogStatisticsOnSuccess(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - rl := backend.NewInMemRunReaderWriter() - o := backend.NewScheduler(d, e, rl, 5, backend.WithLogger(zaptest.NewLogger(t))) + o := backend.NewScheduler(tcs, e, 5, backend.WithLogger(zaptest.NewLogger(t))) o.Start(context.Background()) defer o.Stop() const taskID = 0x12345 const orgID = 0x54321 - task := &backend.StoreTask{ - ID: taskID, - Org: orgID, - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 1, - EffectiveCron: "@every 1s", - LatestCompleted: 5, + + task := &platform.Task{ + ID: taskID, + OrganizationID: orgID, + Every: "1s", + LatestCompleted: "1970-01-01T00:00:05Z", + Flux: `option task = {name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(taskID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } @@ -269,14 +249,11 @@ func TestScheduler_LogStatisticsOnSuccess(t *testing.T) { t.Fatal(err) } - logs, err := rl.ListLogs(context.Background(), orgID, platform.LogFilter{Task: taskID, Run: &runID}) - if err != nil { - t.Fatal(err) - } + run := tcs.FinishedRun(runID) // For now, assume the stats line is the only line beginning with "{". var statJSON string - for _, log := range logs { + for _, log := range run.Log { if len(log.Message) > 0 && log.Message[0] == '{' { statJSON = log.Message break @@ -299,29 +276,27 @@ func TestScheduler_LogStatisticsOnSuccess(t *testing.T) { func TestScheduler_Release(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - o := backend.NewScheduler(d, e, backend.NopLogWriter{}, 5) + o := backend.NewScheduler(tcs, e, 5) o.Start(context.Background()) defer o.Stop() - task := &backend.StoreTask{ - ID: platform.ID(1), - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 99, - EffectiveCron: "@every 1s", - LatestCompleted: 5, + task := &platform.Task{ + ID: platform.ID(1), + Every: "1s", + LatestCompleted: "1970-01-01T00:00:05Z", + Flux: `option task = {concurrency: 99, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } o.Tick(6) o.Tick(7) - if n := len(d.CreatedFor(task.ID)); n != 2 { + if n := len(tcs.CreatedFor(task.ID)); n != 2 { t.Fatalf("expected 2 runs queued, but got %d", n) } @@ -329,7 +304,7 @@ func TestScheduler_Release(t *testing.T) { t.Fatal(err) } - if _, err := d.PollForNumberCreated(task.ID, 0); err != nil { + if _, err := tcs.PollForNumberCreated(task.ID, 0); err != nil { t.Fatal(err) } } @@ -337,23 +312,21 @@ func TestScheduler_Release(t *testing.T) { func TestScheduler_UpdateTask(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - s := backend.NewScheduler(d, e, backend.NopLogWriter{}, 3059, backend.WithLogger(zaptest.NewLogger(t))) + s := backend.NewScheduler(tcs, e, 3059, backend.WithLogger(zaptest.NewLogger(t))) s.Start(context.Background()) defer s.Stop() - task := &backend.StoreTask{ - ID: platform.ID(1), - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 1, - EffectiveCron: "* * * * *", // Every minute. - LatestCompleted: 3000, + task := &platform.Task{ + ID: platform.ID(1), + Cron: "* * * * *", + LatestCompleted: "1970-01-01T00:50:00Z", + Flux: `option task = {name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := s.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := s.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } @@ -365,11 +338,11 @@ func TestScheduler_UpdateTask(t *testing.T) { p[0].Finish(mock.NewRunResult(nil, false), nil) - meta.EffectiveCron = "0 * * * *" - meta.MaxConcurrency = 30 - d.SetTaskMeta(task.ID, *meta) + task.Cron = "0 * * * *" + task.Flux = `option task = {concurrency: 50, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")` + tcs.SetTask(task) - if err := s.UpdateTask(task, meta); err != nil { + if err := s.UpdateTask(context.Background(), task); err != nil { t.Fatal(err) } @@ -390,30 +363,41 @@ func TestScheduler_UpdateTask(t *testing.T) { func TestScheduler_Queue(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - o := backend.NewScheduler(d, e, backend.NopLogWriter{}, 3059, backend.WithLogger(zaptest.NewLogger(t))) + o := backend.NewScheduler(tcs, e, 3059, backend.WithLogger(zaptest.NewLogger(t))) o.Start(context.Background()) defer o.Stop() - task := &backend.StoreTask{ - ID: platform.ID(1), - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 1, - EffectiveCron: "* * * * *", // Every minute. - LatestCompleted: 3000, - ManualRuns: []*backend.StoreTaskMetaManualRun{ - {Start: 120, End: 240, LatestCompleted: 119, RequestedAt: 3001}, - }, + task := &platform.Task{ + ID: platform.ID(1), + Cron: "* * * * *", + LatestCompleted: "1970-01-01T00:50:00Z", + Flux: `option task = {name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + tcs.SetManualRuns([]*platform.Run{ + &platform.Run{ + ID: platform.ID(10), + TaskID: task.ID, + ScheduledFor: "1970-01-01T00:02:00Z", + }, + &platform.Run{ + ID: platform.ID(11), + TaskID: task.ID, + ScheduledFor: "1970-01-01T00:03:00Z", + }, &platform.Run{ + ID: platform.ID(12), + TaskID: task.ID, + ScheduledFor: "1970-01-01T00:04:00Z", + }, + }) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } - cs, err := d.PollForNumberCreated(task.ID, 1) + cs, err := tcs.PollForNumberCreated(task.ID, 1) if err != nil { t.Fatal(err) } @@ -456,40 +440,64 @@ func TestScheduler_Queue(t *testing.T) { // Tick the scheduler so the next natural run will happen once 180 finishes. o.Tick(3062) - // Cancel 180. Next run should be 3060, the next natural schedule. - e.RunningFor(task.ID)[0].Cancel() - pollForRun(3060) - - // Cancel the 3060 run; 240 should pick up. + // Cancel 180. Next run should be 240, manual runs get priority. e.RunningFor(task.ID)[0].Cancel() pollForRun(240) - // Cancel 240; jobs should be idle. + // Cancel the 240 run; 3060 should pick up. + e.RunningFor(task.ID)[0].Cancel() + pollForRun(3060) + + // Cancel 3060; jobs should be idle. e.RunningFor(task.ID)[0].Cancel() if _, err := e.PollForNumberRunning(task.ID, 0); err != nil { t.Fatal(err) } } -func pollForRunLog(t *testing.T, r backend.LogReader, taskID, runID, orgID platform.ID, exp string) { +// LogListener allows us to act as a middleware and see if specific logs have been written +type logListener struct { + mu sync.Mutex + + backend.TaskControlService + + logs map[string][]string +} + +func newLogListener(tcs backend.TaskControlService) *logListener { + return &logListener{ + TaskControlService: tcs, + logs: make(map[string][]string), + } +} + +func (l *logListener) AddRunLog(ctx context.Context, taskID, runID platform.ID, when time.Time, log string) error { + l.mu.Lock() + defer l.mu.Unlock() + + logs := l.logs[taskID.String()+runID.String()] + logs = append(logs, log) + l.logs[taskID.String()+runID.String()] = logs + + return l.TaskControlService.AddRunLog(ctx, taskID, runID, when, log) +} + +func pollForRunLog(t *testing.T, ll *logListener, taskID, runID platform.ID, exp string) { t.Helper() - var logs []platform.Log - var err error + var logs []string const maxAttempts = 50 for i := 0; i < maxAttempts; i++ { if i != 0 { time.Sleep(10 * time.Millisecond) } - - logs, err = r.ListLogs(context.Background(), orgID, platform.LogFilter{Task: taskID, Run: &runID}) - if err != nil { - t.Fatal(err) - } + ll.mu.Lock() + logs = ll.logs[taskID.String()+runID.String()] + ll.mu.Unlock() for _, log := range logs { - if log.Message == exp { + if log == exp { return } } @@ -497,33 +505,75 @@ func pollForRunLog(t *testing.T, r backend.LogReader, taskID, runID, orgID platf t.Logf("Didn't find message %q in logs:", exp) for _, log := range logs { - t.Logf("\t%s", log.Message) + t.Logf("\t%s", log) } t.FailNow() } +// runListener allows us to act as a middleware and see if specific states are updated +type runListener struct { + mu sync.Mutex + + backend.TaskControlService + + rs map[platform.ID][]*platform.Run +} + +func newRunListener(tcs backend.TaskControlService) *runListener { + return &runListener{ + TaskControlService: tcs, + rs: make(map[platform.ID][]*platform.Run), + } +} + +func (l *runListener) UpdateRunState(ctx context.Context, taskID, runID platform.ID, when time.Time, state backend.RunStatus) error { + l.mu.Lock() + defer l.mu.Unlock() + + runs, ok := l.rs[taskID] + if !ok { + runs = []*platform.Run{} + } + found := false + for _, run := range runs { + if run.ID == runID { + found = true + run.Status = state.String() + } + } + if !found { + runs = append(runs, &platform.Run{ID: runID, Status: state.String()}) + } + + l.rs[taskID] = runs + + return l.TaskControlService.UpdateRunState(ctx, taskID, runID, when, state) +} + // pollForRunStatus tries a few times to find runs matching supplied conditions, before failing. -func pollForRunStatus(t *testing.T, r backend.LogReader, taskID, orgID platform.ID, expCount, expIndex int, expStatus string) { +func pollForRunStatus(t *testing.T, r *runListener, taskID platform.ID, expCount, expIndex int, expStatus string) { t.Helper() var runs []*platform.Run - var err error - const maxAttempts = 50 for i := 0; i < maxAttempts; i++ { if i != 0 { time.Sleep(10 * time.Millisecond) } - runs, err = r.ListRuns(context.Background(), orgID, platform.RunFilter{Task: taskID}) - if err != nil { - t.Fatal(err) - } + r.mu.Lock() + runs = r.rs[taskID] + r.mu.Unlock() if len(runs) != expCount { continue } + // make sure we dont panic + if len(runs) < expIndex { + continue + } + if runs[expIndex].Status != expStatus { continue } @@ -542,26 +592,24 @@ func pollForRunStatus(t *testing.T, r backend.LogReader, taskID, orgID platform. func TestScheduler_RunStatus(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - rl := backend.NewInMemRunReaderWriter() - s := backend.NewScheduler(d, e, rl, 5, backend.WithLogger(zaptest.NewLogger(t))) + rl := newRunListener(tcs) + s := backend.NewScheduler(rl, e, 5, backend.WithLogger(zaptest.NewLogger(t))) s.Start(context.Background()) defer s.Stop() // Claim a task that starts later. - task := &backend.StoreTask{ - ID: platform.ID(1), - Org: 2, - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 99, - EffectiveCron: "@every 1s", - LatestCompleted: 5, + task := &platform.Task{ + ID: platform.ID(1), + OrganizationID: platform.ID(2), + Every: "1s", + LatestCompleted: "1970-01-01T00:00:05Z", + Flux: `option task = {concurrency: 99, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := s.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := s.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } @@ -571,7 +619,7 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - runs, err := rl.ListRuns(context.Background(), task.Org, platform.RunFilter{Task: task.ID}) + runs, err := tcs.CurrentlyRunning(context.Background(), task.ID) if err != nil { t.Fatal(err) } @@ -589,7 +637,7 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 1, 0, backend.RunSuccess.String()) + pollForRunStatus(t, rl, task.ID, 1, 0, backend.RunSuccess.String()) // Create a new run, but fail this time. s.Tick(7) @@ -598,7 +646,7 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 2, 1, backend.RunStarted.String()) + pollForRunStatus(t, rl, task.ID, 2, 1, backend.RunStarted.String()) // Finish with failure to create the run. promises[0].Finish(nil, errors.New("forced failure")) @@ -606,7 +654,7 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 2, 1, backend.RunFail.String()) + pollForRunStatus(t, rl, task.ID, 2, 1, backend.RunFail.String()) // Create a new run that starts but fails. s.Tick(8) @@ -615,12 +663,12 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 3, 2, backend.RunStarted.String()) + pollForRunStatus(t, rl, task.ID, 3, 2, backend.RunStarted.String()) promises[0].Finish(mock.NewRunResult(errors.New("started but failed to finish properly"), false), nil) if _, err := e.PollForNumberRunning(task.ID, 0); err != nil { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 3, 2, backend.RunFail.String()) + pollForRunStatus(t, rl, task.ID, 3, 2, backend.RunFail.String()) // One more run, but cancel this time. s.Tick(9) @@ -629,7 +677,7 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 4, 3, backend.RunStarted.String()) + pollForRunStatus(t, rl, task.ID, 4, 3, backend.RunStarted.String()) // Finish with failure. promises[0].Cancel() @@ -637,31 +685,29 @@ func TestScheduler_RunStatus(t *testing.T) { t.Fatal(err) } - pollForRunStatus(t, rl, task.ID, task.Org, 4, 3, backend.RunCanceled.String()) + pollForRunStatus(t, rl, task.ID, 4, 3, backend.RunCanceled.String()) } func TestScheduler_RunFailureCleanup(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - rl := backend.NewInMemRunReaderWriter() - s := backend.NewScheduler(d, e, rl, 5, backend.WithLogger(zaptest.NewLogger(t))) + ll := newLogListener(tcs) + s := backend.NewScheduler(ll, e, 5, backend.WithLogger(zaptest.NewLogger(t))) s.Start(context.Background()) defer s.Stop() // Task with concurrency 1 should continue after one run fails. - task := &backend.StoreTask{ - ID: platform.ID(1), - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 1, - EffectiveCron: "@every 1s", - LatestCompleted: 5, + task := &platform.Task{ + ID: platform.ID(1), + Every: "1s", + LatestCompleted: "1970-01-01T00:00:05Z", + Flux: `option task = {name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := s.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := s.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } @@ -676,7 +722,7 @@ func TestScheduler_RunFailureCleanup(t *testing.T) { if _, err := e.PollForNumberRunning(task.ID, 0); err != nil { t.Fatal(err) } - pollForRunLog(t, rl, task.ID, promises[0].Run().RunID, task.Org, "Waiting for execution result: forced failure") + pollForRunLog(t, ll, task.ID, promises[0].Run().RunID, "Waiting for execution result: forced failure") // Should continue even if max concurrency == 1. // This run will start and then fail. @@ -690,10 +736,10 @@ func TestScheduler_RunFailureCleanup(t *testing.T) { if _, err := e.PollForNumberRunning(task.ID, 0); err != nil { t.Fatal(err) } - pollForRunLog(t, rl, task.ID, promises[0].Run().RunID, task.Org, "Run failed to execute: started but failed to finish properly") + pollForRunLog(t, ll, task.ID, promises[0].Run().RunID, "Run failed to execute: started but failed to finish properly") // Fail to execute next run. - if n := d.TotalRunsCreatedForTask(task.ID); n != 2 { + if n := tcs.TotalRunsCreatedForTask(task.ID); n != 2 { t.Fatalf("should have created 2 runs so far, got %d", n) } e.FailNextCallToExecute(errors.New("forced failure on Execute")) @@ -702,7 +748,7 @@ func TestScheduler_RunFailureCleanup(t *testing.T) { const attempts = 50 for i := 0; i < attempts; i++ { time.Sleep(2 * time.Millisecond) - n := d.TotalRunsCreatedForTask(task.ID) + n := tcs.TotalRunsCreatedForTask(task.ID) if n == 3 { break } @@ -712,11 +758,11 @@ func TestScheduler_RunFailureCleanup(t *testing.T) { } } // We don't have a good hook to get the run ID right now, so list the runs and assume the final one is ours. - runs, err := rl.ListRuns(context.Background(), task.Org, platform.RunFilter{Task: task.ID}) + runs := tcs.FinishedRuns() if err != nil { t.Fatal(err) } - pollForRunLog(t, rl, task.ID, runs[len(runs)-1].ID, task.Org, "Run failed to begin execution: forced failure on Execute") + pollForRunLog(t, ll, task.ID, runs[len(runs)-1].ID, "Run failed to begin execution: forced failure on Execute") // One more tick just to ensure that we can keep going after this type of failure too. s.Tick(9) @@ -729,9 +775,9 @@ func TestScheduler_RunFailureCleanup(t *testing.T) { func TestScheduler_Metrics(t *testing.T) { t.Parallel() - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - s := backend.NewScheduler(d, e, backend.NopLogWriter{}, 5) + s := backend.NewScheduler(tcs, e, 5) s.Start(context.Background()) defer s.Stop() @@ -741,17 +787,15 @@ func TestScheduler_Metrics(t *testing.T) { reg.MustRegister(s.PrometheusCollectors()...) // Claim a task that starts later. - task := &backend.StoreTask{ - ID: platform.ID(1), - } - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 99, - EffectiveCron: "@every 1s", - LatestCompleted: 5, + task := &platform.Task{ + ID: platform.ID(1), + Every: "1s", + LatestCompleted: "1970-01-01T00:00:05Z", + Flux: `option task = {concurrency: 99, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, } - d.SetTaskMeta(task.ID, *meta) - if err := s.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := s.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } @@ -871,7 +915,7 @@ func TestScheduler_Stop(t *testing.T) { t.Parallel() e := &fakeWaitExecutor{wait: make(chan struct{})} - o := backend.NewScheduler(mock.NewDesiredState(), e, backend.NopLogWriter{}, 4, backend.WithLogger(zaptest.NewLogger(t))) + o := backend.NewScheduler(mock.NewTaskControlService(), e, 4, backend.WithLogger(zaptest.NewLogger(t))) o.Start(context.Background()) stopped := make(chan struct{}) @@ -904,33 +948,30 @@ func TestScheduler_WithTicker(t *testing.T) { defer cancel() tickFreq := 100 * time.Millisecond - d := mock.NewDesiredState() + tcs := mock.NewTaskControlService() e := mock.NewExecutor() - o := backend.NewScheduler(d, e, backend.NopLogWriter{}, 5, backend.WithLogger(zaptest.NewLogger(t)), backend.WithTicker(ctx, tickFreq)) + o := backend.NewScheduler(tcs, e, 5, backend.WithLogger(zaptest.NewLogger(t)), backend.WithTicker(ctx, tickFreq)) o.Start(ctx) defer o.Stop() - - task := &backend.StoreTask{ - ID: platform.ID(1), - } - createdAt := time.Now().Unix() - meta := &backend.StoreTaskMeta{ - MaxConcurrency: 5, - EffectiveCron: "@every 1s", - LatestCompleted: createdAt, + createdAt := time.Now() + task := &platform.Task{ + ID: platform.ID(1), + Every: "1s", + Flux: `option task = {concurrency: 5, name:"x", every:1m} from(bucket:"a") |> to(bucket:"b", org: "o")`, + LatestCompleted: createdAt.Format(time.RFC3339Nano), } - d.SetTaskMeta(task.ID, *meta) - if err := o.ClaimTask(task, meta); err != nil { + tcs.SetTask(task) + if err := o.ClaimTask(context.Background(), task); err != nil { t.Fatal(err) } - for time.Now().Unix() == createdAt { + for time.Now().Unix() == createdAt.Unix() { time.Sleep(tickFreq + 10*time.Millisecond) } - if x, err := d.PollForNumberCreated(task.ID, 1); err != nil { + if x, err := tcs.PollForNumberCreated(task.ID, 1); err != nil { t.Fatalf("expected 1 run queued, but got %d", len(x)) } } diff --git a/task/backend/storetest/storetest.go b/task/backend/storetest/storetest.go index 810fc6ee58..35dc587b01 100644 --- a/task/backend/storetest/storetest.go +++ b/task/backend/storetest/storetest.go @@ -12,6 +12,7 @@ import ( platform "github.com/influxdata/influxdb" "github.com/influxdata/influxdb/snowflake" "github.com/influxdata/influxdb/task/backend" + "github.com/influxdata/influxdb/task/options" ) var idGen = snowflake.NewIDGenerator() @@ -522,8 +523,15 @@ from(bucket:"test") |> range(start:-1h)` if meta.EffectiveCron != "* * * * *" { t.Fatalf("unexpected cron stored in meta: %q", meta.EffectiveCron) } - - if time.Duration(meta.Offset)*time.Second != 5*time.Second { + duration := options.Duration{} + if err := duration.Parse(meta.Offset); err != nil { + t.Fatal(err) + } + dur, err := duration.DurationFrom(time.Now()) // is time.Now() the best option here + if err != nil { + t.Fatal(err) + } + if dur != 5*time.Second { t.Fatalf("unexpected delay stored in meta: %v", meta.Offset) } @@ -683,7 +691,12 @@ from(bucket:"test") |> range(start:-1h)` t.Fatalf("unexpected cron stored in meta: %q", meta.EffectiveCron) } - if time.Duration(meta.Offset)*time.Second != 5*time.Second { + duration := options.Duration{} + + if err := duration.Parse(meta.Offset); err != nil { + t.Fatal(err) + } + if duration.String() != "5s" { t.Fatalf("unexpected delay stored in meta: %v", meta.Offset) } diff --git a/task/backend/task.go b/task/backend/task.go index 77aac6215c..09f9f613c7 100644 --- a/task/backend/task.go +++ b/task/backend/task.go @@ -5,6 +5,7 @@ import ( "time" "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/task/options" ) // TaskControlService is a low-level controller interface, intended to be passed to @@ -16,6 +17,9 @@ type TaskControlService interface { // If the run's ScheduledFor would be later than the passed-in now, CreateNextRun returns a RunNotYetDueError. CreateNextRun(ctx context.Context, taskID influxdb.ID, now int64) (RunCreation, error) + CurrentlyRunning(ctx context.Context, taskID influxdb.ID) ([]*influxdb.Run, error) + ManualRuns(ctx context.Context, taskID influxdb.ID) ([]*influxdb.Run, error) + // FinishRun removes runID from the list of running tasks and if its `ScheduledFor` is later then last completed update it. FinishRun(ctx context.Context, taskID, runID influxdb.ID) (*influxdb.Run, error) @@ -29,3 +33,213 @@ type TaskControlService interface { // AddRunLog adds a log line to the run. AddRunLog(ctx context.Context, taskID, runID influxdb.ID, when time.Time, log string) error } + +// TaskControlAdaptor creates a TaskControlService for the older TaskStore system. +// TODO(lh): remove task control adaptor when we transition away from Store. +func TaskControlAdaptor(s Store, lw LogWriter, lr LogReader) TaskControlService { + return &taskControlAdaptor{s, lw, lr} +} + +// taskControlAdaptor adapts a Store and log readers and writers to implement the task control service. +type taskControlAdaptor struct { + s Store + lw LogWriter + lr LogReader +} + +func (tcs *taskControlAdaptor) CreateNextRun(ctx context.Context, taskID influxdb.ID, now int64) (RunCreation, error) { + return tcs.s.CreateNextRun(ctx, taskID, now) +} + +func (tcs *taskControlAdaptor) FinishRun(ctx context.Context, taskID, runID influxdb.ID) (*influxdb.Run, error) { + // Once we completely switch over to the new system we can look at the returned run in the tests. + task, err := tcs.s.FindTaskByID(ctx, taskID) + if err != nil { + return nil, err + } + tcs.lr.FindRunByID(ctx, task.Org, runID) + + return nil, tcs.s.FinishRun(ctx, taskID, runID) +} + +func (tcs *taskControlAdaptor) CurrentlyRunning(ctx context.Context, taskID influxdb.ID) ([]*influxdb.Run, error) { + t, m, err := tcs.s.FindTaskByIDWithMeta(ctx, taskID) + if err != nil { + return nil, err + } + + var rtn = make([]*influxdb.Run, len(m.CurrentlyRunning)) + for i, cr := range m.CurrentlyRunning { + rtn[i] = &influxdb.Run{ + ID: influxdb.ID(cr.RunID), + TaskID: t.ID, + ScheduledFor: time.Unix(cr.Now, 0).UTC().Format(time.RFC3339), + } + if cr.RequestedAt != 0 { + rtn[i].RequestedAt = time.Unix(cr.RequestedAt, 0).UTC().Format(time.RFC3339) + } + } + return rtn, nil +} + +func (tcs *taskControlAdaptor) ManualRuns(ctx context.Context, taskID influxdb.ID) ([]*influxdb.Run, error) { + t, m, err := tcs.s.FindTaskByIDWithMeta(ctx, taskID) + if err != nil { + return nil, err + } + + var rtn = make([]*influxdb.Run, len(m.ManualRuns)) + for i, cr := range m.ManualRuns { + rtn[i] = &influxdb.Run{ + ID: influxdb.ID(cr.RunID), + TaskID: t.ID, + ScheduledFor: time.Unix(cr.Start, 0).UTC().Format(time.RFC3339), + } + if cr.RequestedAt != 0 { + rtn[i].RequestedAt = time.Unix(cr.RequestedAt, 0).Format(time.RFC3339) + } + } + return rtn, nil +} + +func (tcs *taskControlAdaptor) NextDueRun(ctx context.Context, taskID influxdb.ID) (int64, error) { + m, err := tcs.s.FindTaskMetaByID(ctx, taskID) + if err != nil { + return 0, err + } + return m.NextDueRun() +} + +func (tcs *taskControlAdaptor) UpdateRunState(ctx context.Context, taskID, runID influxdb.ID, when time.Time, state RunStatus) error { + st, m, err := tcs.s.FindTaskByIDWithMeta(ctx, taskID) + if err != nil { + return err + } + var ( + schedFor, reqAt time.Time + ) + // check the log store + r, err := tcs.lr.FindRunByID(ctx, st.Org, runID) + if err == nil && r != nil { + schedFor, err = time.Parse(time.RFC3339, r.ScheduledFor) + if err != nil { + return err + } + if r.RequestedAt != "" { + reqAt, err = time.Parse(time.RFC3339, r.RequestedAt) + if err != nil { + return err + } + } + } + + // in the old system the log store may not have the run until after the first + // state update, so we will need to pull the currently running. + if schedFor.IsZero() { + for _, cr := range m.CurrentlyRunning { + if influxdb.ID(cr.RunID) == runID { + schedFor = time.Unix(cr.Now, 0) + if cr.RequestedAt != 0 { + reqAt = time.Unix(cr.RequestedAt, 0) + } + } + } + } + + rlb := RunLogBase{ + Task: st, + RunID: runID, + RunScheduledFor: schedFor.Unix(), + } + if !reqAt.IsZero() { + rlb.RequestedAt = reqAt.Unix() + } + if err := tcs.lw.UpdateRunState(ctx, rlb, when, state); err != nil { + return err + } + return nil +} + +func (tcs *taskControlAdaptor) AddRunLog(ctx context.Context, taskID, runID influxdb.ID, when time.Time, log string) error { + st, m, err := tcs.s.FindTaskByIDWithMeta(ctx, taskID) + if err != nil { + return err + } + + var ( + schedFor, reqAt time.Time + ) + + r, err := tcs.lr.FindRunByID(ctx, st.Org, runID) + if err == nil && r != nil { + schedFor, err = time.Parse(time.RFC3339, r.ScheduledFor) + if err != nil { + return err + } + if r.RequestedAt != "" { + reqAt, err = time.Parse(time.RFC3339, r.RequestedAt) + if err != nil { + return err + } + } + } + + // in the old system the log store may not have the run until after the first + // state update, so we will need to pull the currently running. + if schedFor.IsZero() { + for _, cr := range m.CurrentlyRunning { + if influxdb.ID(cr.RunID) == runID { + schedFor = time.Unix(cr.Now, 0) + if cr.RequestedAt != 0 { + reqAt = time.Unix(cr.RequestedAt, 0) + } + } + } + } + + rlb := RunLogBase{ + Task: st, + RunID: runID, + RunScheduledFor: schedFor.Unix(), + } + if !reqAt.IsZero() { + rlb.RequestedAt = reqAt.Unix() + } + return tcs.lw.AddRunLog(ctx, rlb, when, log) +} + +// ToInfluxTask converts a backend tas and meta to a influxdb.Task +// TODO(lh): remove this when we no longer need the backend store. +func ToInfluxTask(t *StoreTask, m *StoreTaskMeta) (*influxdb.Task, error) { + opts, err := options.FromScript(t.Script) + if err != nil { + return nil, err + } + + pt := &influxdb.Task{ + ID: t.ID, + OrganizationID: t.Org, + Name: t.Name, + Flux: t.Script, + Cron: opts.Cron, + AuthorizationID: influxdb.ID(m.AuthorizationID), + } + if !opts.Every.IsZero() { + pt.Every = opts.Every.String() + } + if opts.Offset != nil && !opts.Offset.IsZero() { + pt.Offset = opts.Offset.String() + } + if m != nil { + pt.Status = string(m.Status) + pt.LatestCompleted = time.Unix(m.LatestCompleted, 0).UTC().Format(time.RFC3339) + if m.CreatedAt != 0 { + pt.CreatedAt = time.Unix(m.CreatedAt, 0).UTC().Format(time.RFC3339) + } + if m.UpdatedAt != 0 { + pt.UpdatedAt = time.Unix(m.UpdatedAt, 0).UTC().Format(time.RFC3339) + } + pt.AuthorizationID = influxdb.ID(m.AuthorizationID) + } + return pt, nil +} diff --git a/task/mock/scheduler.go b/task/mock/scheduler.go index de36f3f104..eb9856f76e 100644 --- a/task/mock/scheduler.go +++ b/task/mock/scheduler.go @@ -3,16 +3,13 @@ package mock import ( "context" - "errors" "fmt" - "strings" "sync" "time" "github.com/influxdata/flux" platform "github.com/influxdata/influxdb" "github.com/influxdata/influxdb/task/backend" - scheduler "github.com/influxdata/influxdb/task/backend" "go.uber.org/zap" ) @@ -22,28 +19,19 @@ type Scheduler struct { lastTick int64 - claims map[string]*Task - meta map[string]backend.StoreTaskMeta + claims map[platform.ID]*platform.Task - createChan chan *Task - releaseChan chan *Task - updateChan chan *Task + createChan chan *platform.Task + releaseChan chan *platform.Task + updateChan chan *platform.Task claimError error releaseError error } -// Task is a mock implementation of a task. -type Task struct { - Script string - StartExecution int64 - ConcurrencyLimit uint8 -} - func NewScheduler() *Scheduler { return &Scheduler{ - claims: map[string]*Task{}, - meta: map[string]backend.StoreTaskMeta{}, + claims: map[platform.ID]*platform.Task{}, } } @@ -60,7 +48,7 @@ func (s *Scheduler) Start(context.Context) {} func (s *Scheduler) Stop() {} -func (s *Scheduler) ClaimTask(task *backend.StoreTask, meta *backend.StoreTaskMeta) error { +func (s *Scheduler) ClaimTask(_ context.Context, task *platform.Task) error { if s.claimError != nil { return s.claimError } @@ -68,40 +56,33 @@ func (s *Scheduler) ClaimTask(task *backend.StoreTask, meta *backend.StoreTaskMe s.Lock() defer s.Unlock() - _, ok := s.claims[task.ID.String()] + _, ok := s.claims[task.ID] if ok { return backend.ErrTaskAlreadyClaimed } - s.meta[task.ID.String()] = *meta - t := &Task{Script: task.Script, StartExecution: meta.LatestCompleted, ConcurrencyLimit: uint8(meta.MaxConcurrency)} - - s.claims[task.ID.String()] = t + s.claims[task.ID] = task if s.createChan != nil { - s.createChan <- t + s.createChan <- task } return nil } -func (s *Scheduler) UpdateTask(task *backend.StoreTask, meta *backend.StoreTaskMeta) error { +func (s *Scheduler) UpdateTask(_ context.Context, task *platform.Task) error { s.Lock() defer s.Unlock() - _, ok := s.claims[task.ID.String()] + _, ok := s.claims[task.ID] if !ok { return backend.ErrTaskNotClaimed } - s.meta[task.ID.String()] = *meta - - t := &Task{Script: task.Script, StartExecution: meta.LatestCompleted, ConcurrencyLimit: uint8(meta.MaxConcurrency)} - - s.claims[task.ID.String()] = t + s.claims[task.ID] = task if s.updateChan != nil { - s.updateChan <- t + s.updateChan <- task } return nil @@ -115,7 +96,7 @@ func (s *Scheduler) ReleaseTask(taskID platform.ID) error { s.Lock() defer s.Unlock() - t, ok := s.claims[taskID.String()] + t, ok := s.claims[taskID] if !ok { return backend.ErrTaskNotClaimed } @@ -123,28 +104,27 @@ func (s *Scheduler) ReleaseTask(taskID platform.ID) error { s.releaseChan <- t } - delete(s.claims, taskID.String()) - delete(s.meta, taskID.String()) + delete(s.claims, taskID) return nil } -func (s *Scheduler) TaskFor(id platform.ID) *Task { +func (s *Scheduler) TaskFor(id platform.ID) *platform.Task { s.Lock() defer s.Unlock() - return s.claims[id.String()] + return s.claims[id] } -func (s *Scheduler) TaskCreateChan() <-chan *Task { - s.createChan = make(chan *Task, 10) +func (s *Scheduler) TaskCreateChan() <-chan *platform.Task { + s.createChan = make(chan *platform.Task, 10) return s.createChan } -func (s *Scheduler) TaskReleaseChan() <-chan *Task { - s.releaseChan = make(chan *Task, 10) +func (s *Scheduler) TaskReleaseChan() <-chan *platform.Task { + s.releaseChan = make(chan *platform.Task, 10) return s.releaseChan } -func (s *Scheduler) TaskUpdateChan() <-chan *Task { - s.updateChan = make(chan *Task, 10) +func (s *Scheduler) TaskUpdateChan() <-chan *platform.Task { + s.updateChan = make(chan *platform.Task, 10) return s.updateChan } @@ -162,134 +142,6 @@ func (s *Scheduler) CancelRun(_ context.Context, taskID, runID platform.ID) erro return nil } -// DesiredState is a mock implementation of DesiredState (used by NewScheduler). -type DesiredState struct { - mu sync.Mutex - // Map of stringified task ID to last ID used for run. - runIDs map[string]uint64 - - // Map of stringified, concatenated task and platform ID, to runs that have been created. - created map[string]backend.QueuedRun - - // Map of stringified task ID to task meta. - meta map[string]backend.StoreTaskMeta - - // Map of task ID to total number of runs created for that task. - totalRunsCreated map[platform.ID]int -} - -var _ backend.DesiredState = (*DesiredState)(nil) - -func NewDesiredState() *DesiredState { - return &DesiredState{ - runIDs: make(map[string]uint64), - created: make(map[string]backend.QueuedRun), - meta: make(map[string]backend.StoreTaskMeta), - totalRunsCreated: make(map[platform.ID]int), - } -} - -// SetTaskMeta sets the task meta for the given task ID. -// SetTaskMeta must be called before CreateNextRun, for a given task ID. -func (d *DesiredState) SetTaskMeta(taskID platform.ID, meta backend.StoreTaskMeta) { - d.mu.Lock() - defer d.mu.Unlock() - - d.meta[taskID.String()] = meta -} - -// CreateNextRun creates the next run for the given task. -// Refer to the documentation for SetTaskPeriod to understand how the times are determined. -func (d *DesiredState) CreateNextRun(_ context.Context, taskID platform.ID, now int64) (backend.RunCreation, error) { - d.mu.Lock() - defer d.mu.Unlock() - if !taskID.Valid() { - return backend.RunCreation{}, errors.New("invalid task id") - } - tid := taskID.String() - - meta, ok := d.meta[tid] - if !ok { - panic(fmt.Sprintf("meta not set for task with ID %s", tid)) - } - - makeID := func() (platform.ID, error) { - d.runIDs[tid]++ - runID := platform.ID(d.runIDs[tid]) - return runID, nil - } - - rc, err := meta.CreateNextRun(now, makeID) - if err != nil { - return backend.RunCreation{}, err - } - d.meta[tid] = meta - rc.Created.TaskID = taskID - d.created[tid+rc.Created.RunID.String()] = rc.Created - d.totalRunsCreated[taskID]++ - return rc, nil -} - -func (d *DesiredState) FinishRun(_ context.Context, taskID, runID platform.ID) error { - d.mu.Lock() - defer d.mu.Unlock() - - tid := taskID.String() - rid := runID.String() - m := d.meta[tid] - if !m.FinishRun(runID) { - var knownIDs []string - for _, r := range m.CurrentlyRunning { - knownIDs = append(knownIDs, platform.ID(r.RunID).String()) - } - return fmt.Errorf("unknown run ID %s; known run IDs: %s", rid, strings.Join(knownIDs, ", ")) - } - d.meta[tid] = m - delete(d.created, tid+rid) - return nil -} - -func (d *DesiredState) CreatedFor(taskID platform.ID) []backend.QueuedRun { - d.mu.Lock() - defer d.mu.Unlock() - - var qrs []backend.QueuedRun - for _, qr := range d.created { - if qr.TaskID == taskID { - qrs = append(qrs, qr) - } - } - - return qrs -} - -// TotalRunsCreatedForTask returns the number of runs created for taskID. -func (d *DesiredState) TotalRunsCreatedForTask(taskID platform.ID) int { - d.mu.Lock() - defer d.mu.Unlock() - - return d.totalRunsCreated[taskID] -} - -// PollForNumberCreated blocks for a small amount of time waiting for exactly the given count of created and unfinished runs for the given task ID. -// If the expected number isn't found in time, it returns an error. -// -// Because the scheduler and executor do a lot of state changes asynchronously, this is useful in test. -func (d *DesiredState) PollForNumberCreated(taskID platform.ID, count int) ([]scheduler.QueuedRun, error) { - const numAttempts = 50 - actualCount := 0 - var created []scheduler.QueuedRun - for i := 0; i < numAttempts; i++ { - time.Sleep(2 * time.Millisecond) // we sleep even on first so it becomes more likely that we catch when too many are produced. - created = d.CreatedFor(taskID) - actualCount = len(created) - if actualCount == count { - return created, nil - } - } - return created, fmt.Errorf("did not see count of %d created run(s) for task with ID %s in time, instead saw %d", count, taskID.String(), actualCount) // we return created anyways, to make it easier to debug -} - type Executor struct { mu sync.Mutex hangingFor time.Duration @@ -385,7 +237,7 @@ func (e *Executor) PollForNumberRunning(taskID platform.ID, count int) ([]*RunPr return running, nil } } - return nil, fmt.Errorf("did not see count of %d running task(s) for ID %s in time; last count was %d", count, taskID.String(), len(running)) + return nil, fmt.Errorf("did not see count of %d running task(s) for ID %s in time; last count was %d", count, taskID, len(running)) } // RunPromise is a mock RunPromise. diff --git a/task/mock/task_control_service.go b/task/mock/task_control_service.go new file mode 100644 index 0000000000..496f3c39d4 --- /dev/null +++ b/task/mock/task_control_service.go @@ -0,0 +1,346 @@ +package mock + +import ( + "context" + "errors" + "fmt" + "sort" + "sync" + "time" + + "github.com/influxdata/influxdb" + "github.com/influxdata/influxdb/snowflake" + "github.com/influxdata/influxdb/task/backend" + cron "gopkg.in/robfig/cron.v2" +) + +var idgen = snowflake.NewDefaultIDGenerator() + +// TaskControlService is a mock implementation of TaskControlService (used by NewScheduler). +type TaskControlService struct { + mu sync.Mutex + // Map of stringified task ID to last ID used for run. + runs map[influxdb.ID]map[influxdb.ID]*influxdb.Run + + // Map of stringified, concatenated task and platform ID, to runs that have been created. + created map[string]backend.QueuedRun + + // Map of stringified task ID to task meta. + tasks map[influxdb.ID]*influxdb.Task + manualRuns []*influxdb.Run + // Map of task ID to total number of runs created for that task. + totalRunsCreated map[influxdb.ID]int + finishedRuns map[influxdb.ID]*influxdb.Run +} + +var _ backend.TaskControlService = (*TaskControlService)(nil) + +func NewTaskControlService() *TaskControlService { + return &TaskControlService{ + runs: make(map[influxdb.ID]map[influxdb.ID]*influxdb.Run), + finishedRuns: make(map[influxdb.ID]*influxdb.Run), + tasks: make(map[influxdb.ID]*influxdb.Task), + created: make(map[string]backend.QueuedRun), + totalRunsCreated: make(map[influxdb.ID]int), + } +} + +// SetTask sets the task. +// SetTask must be called before CreateNextRun, for a given task ID. +func (d *TaskControlService) SetTask(task *influxdb.Task) { + d.mu.Lock() + defer d.mu.Unlock() + + d.tasks[task.ID] = task +} + +func (d *TaskControlService) SetManualRuns(runs []*influxdb.Run) { + d.manualRuns = runs +} + +// CreateNextRun creates the next run for the given task. +// Refer to the documentation for SetTaskPeriod to understand how the times are determined. +func (d *TaskControlService) CreateNextRun(ctx context.Context, taskID influxdb.ID, now int64) (backend.RunCreation, error) { + d.mu.Lock() + defer d.mu.Unlock() + if !taskID.Valid() { + return backend.RunCreation{}, errors.New("invalid task id") + } + tid := taskID + + task, ok := d.tasks[tid] + if !ok { + panic(fmt.Sprintf("meta not set for task with ID %s", tid)) + } + + if len(d.manualRuns) != 0 { + run := d.manualRuns[0] + d.manualRuns = d.manualRuns[1:] + runs, ok := d.runs[tid] + if !ok { + runs = make(map[influxdb.ID]*influxdb.Run) + } + runs[run.ID] = run + d.runs[task.ID] = runs + now, err := time.Parse(time.RFC3339, run.ScheduledFor) + next, _ := d.nextDueRun(ctx, taskID) + if err == nil { + rc := backend.RunCreation{ + Created: backend.QueuedRun{ + TaskID: task.ID, + RunID: run.ID, + Now: now.Unix(), + }, + NextDue: next, + HasQueue: len(d.manualRuns) != 0, + } + d.created[tid.String()+rc.Created.RunID.String()] = rc.Created + d.totalRunsCreated[taskID]++ + return rc, nil + } + } + + rc, err := d.createNextRun(task, now) + if err != nil { + return backend.RunCreation{}, err + } + rc.Created.TaskID = taskID + d.created[tid.String()+rc.Created.RunID.String()] = rc.Created + d.totalRunsCreated[taskID]++ + return rc, nil +} + +func (t *TaskControlService) createNextRun(task *influxdb.Task, now int64) (backend.RunCreation, error) { + sch, err := cron.Parse(task.EffectiveCron()) + if err != nil { + return backend.RunCreation{}, err + } + latest := int64(0) + lt, err := time.Parse(time.RFC3339, task.LatestCompleted) + if err == nil { + latest = lt.Unix() + } + for _, r := range t.runs[task.ID] { + rt, err := time.Parse(time.RFC3339, r.ScheduledFor) + if err == nil { + if rt.Unix() > latest { + latest = rt.Unix() + } + } + } + + nextScheduled := sch.Next(time.Unix(latest, 0)) + nextScheduledUnix := nextScheduled.Unix() + offset := int64(0) + if task.Offset != "" { + toff, err := time.ParseDuration(task.Offset) + if err == nil { + offset = toff.Nanoseconds() + } + } + if dueAt := nextScheduledUnix + int64(offset); dueAt > now { + return backend.RunCreation{}, backend.RunNotYetDueError{DueAt: dueAt} + } + + runID := idgen.ID() + runs, ok := t.runs[task.ID] + if !ok { + runs = make(map[influxdb.ID]*influxdb.Run) + } + runs[runID] = &influxdb.Run{ + ID: runID, + ScheduledFor: nextScheduled.Format(time.RFC3339), + } + t.runs[task.ID] = runs + + return backend.RunCreation{ + Created: backend.QueuedRun{ + RunID: runID, + Now: nextScheduledUnix, + }, + NextDue: sch.Next(nextScheduled).Unix() + offset, + HasQueue: false, + }, nil +} + +func (d *TaskControlService) FinishRun(_ context.Context, taskID, runID influxdb.ID) (*influxdb.Run, error) { + d.mu.Lock() + defer d.mu.Unlock() + + tid := taskID + rid := runID + r := d.runs[tid][rid] + delete(d.runs[tid], rid) + t := d.tasks[tid] + schedFor, err := time.Parse(time.RFC3339, r.ScheduledFor) + if err != nil { + return nil, err + } + var latest time.Time + if t.LatestCompleted != "" { + latest, err = time.Parse(time.RFC3339, t.LatestCompleted) + if err != nil { + return nil, err + } + } + if schedFor.After(latest) { + t.LatestCompleted = r.ScheduledFor + } + d.finishedRuns[rid] = r + delete(d.created, tid.String()+rid.String()) + return r, nil +} + +func (t *TaskControlService) CurrentlyRunning(ctx context.Context, taskID influxdb.ID) ([]*influxdb.Run, error) { + t.mu.Lock() + defer t.mu.Unlock() + rtn := []*influxdb.Run{} + for _, run := range t.runs[taskID] { + rtn = append(rtn, run) + } + return rtn, nil +} + +func (t *TaskControlService) ManualRuns(ctx context.Context, taskID influxdb.ID) ([]*influxdb.Run, error) { + t.mu.Lock() + defer t.mu.Unlock() + + if t.manualRuns != nil { + return t.manualRuns, nil + } + return []*influxdb.Run{}, nil +} + +// NextDueRun returns the Unix timestamp of when the next call to CreateNextRun will be ready. +// The returned timestamp reflects the task's offset, so it does not necessarily exactly match the schedule time. +func (d *TaskControlService) NextDueRun(ctx context.Context, taskID influxdb.ID) (int64, error) { + d.mu.Lock() + defer d.mu.Unlock() + + return d.nextDueRun(ctx, taskID) +} + +func (d *TaskControlService) nextDueRun(ctx context.Context, taskID influxdb.ID) (int64, error) { + task := d.tasks[taskID] + sch, err := cron.Parse(task.EffectiveCron()) + if err != nil { + return 0, err + } + latest := int64(0) + lt, err := time.Parse(time.RFC3339, task.LatestCompleted) + if err == nil { + latest = lt.Unix() + } + + for _, r := range d.runs[task.ID] { + rt, err := time.Parse(time.RFC3339, r.ScheduledFor) + if err == nil { + if rt.Unix() > latest { + latest = rt.Unix() + } + } + } + + nextScheduled := sch.Next(time.Unix(latest, 0)) + nextScheduledUnix := nextScheduled.Unix() + offset := int64(0) + if task.Offset != "" { + toff, err := time.ParseDuration(task.Offset) + if err == nil { + offset = toff.Nanoseconds() + } + } + + return nextScheduledUnix + int64(offset), nil +} + +// UpdateRunState sets the run state at the respective time. +func (d *TaskControlService) UpdateRunState(ctx context.Context, taskID, runID influxdb.ID, when time.Time, state backend.RunStatus) error { + d.mu.Lock() + defer d.mu.Unlock() + + run := d.runs[taskID][runID] + switch state { + case backend.RunStarted: + run.StartedAt = when.Format(time.RFC3339Nano) + case backend.RunSuccess, backend.RunFail, backend.RunCanceled: + run.FinishedAt = when.Format(time.RFC3339Nano) + case backend.RunScheduled: + // nothing + default: + panic("invalid status") + } + run.Status = state.String() + return nil +} + +// AddRunLog adds a log line to the run. +func (d *TaskControlService) AddRunLog(ctx context.Context, taskID, runID influxdb.ID, when time.Time, log string) error { + d.mu.Lock() + defer d.mu.Unlock() + + run := d.runs[taskID][runID] + if run == nil { + panic("cannot add a log to a non existant run") + } + run.Log = append(run.Log, influxdb.Log{Time: when.Format(time.RFC3339Nano), Message: log}) + return nil +} + +func (d *TaskControlService) CreatedFor(taskID influxdb.ID) []backend.QueuedRun { + d.mu.Lock() + defer d.mu.Unlock() + + var qrs []backend.QueuedRun + for _, qr := range d.created { + if qr.TaskID == taskID { + qrs = append(qrs, qr) + } + } + + return qrs +} + +// TotalRunsCreatedForTask returns the number of runs created for taskID. +func (d *TaskControlService) TotalRunsCreatedForTask(taskID influxdb.ID) int { + d.mu.Lock() + defer d.mu.Unlock() + + return d.totalRunsCreated[taskID] +} + +// PollForNumberCreated blocks for a small amount of time waiting for exactly the given count of created and unfinished runs for the given task ID. +// If the expected number isn't found in time, it returns an error. +// +// Because the scheduler and executor do a lot of state changes asynchronously, this is useful in test. +func (d *TaskControlService) PollForNumberCreated(taskID influxdb.ID, count int) ([]backend.QueuedRun, error) { + const numAttempts = 50 + actualCount := 0 + var created []backend.QueuedRun + for i := 0; i < numAttempts; i++ { + time.Sleep(2 * time.Millisecond) // we sleep even on first so it becomes more likely that we catch when too many are produced. + created = d.CreatedFor(taskID) + actualCount = len(created) + if actualCount == count { + return created, nil + } + } + return created, fmt.Errorf("did not see count of %d created run(s) for task with ID %s in time, instead saw %d", count, taskID, actualCount) // we return created anyways, to make it easier to debug +} + +func (d *TaskControlService) FinishedRun(runID influxdb.ID) *influxdb.Run { + d.mu.Lock() + defer d.mu.Unlock() + + return d.finishedRuns[runID] +} + +func (d *TaskControlService) FinishedRuns() []*influxdb.Run { + rtn := []*influxdb.Run{} + for _, run := range d.finishedRuns { + rtn = append(rtn, run) + } + + sort.Slice(rtn, func(i, j int) bool { return rtn[i].ScheduledFor < rtn[j].ScheduledFor }) + return rtn +} diff --git a/task/options/options.go b/task/options/options.go index 8ca6dba96c..b6e0e1e592 100644 --- a/task/options/options.go +++ b/task/options/options.go @@ -8,6 +8,10 @@ import ( "sync" "time" + "github.com/influxdata/flux/parser" + + "github.com/influxdata/flux/ast" + "github.com/influxdata/flux" "github.com/influxdata/flux/semantic" "github.com/influxdata/flux/values" @@ -40,31 +44,111 @@ type Options struct { // Every represents a fixed period to repeat execution. // this can be unmarshaled from json as a string i.e.: "1d" will unmarshal as 1 day - Every time.Duration `json:"every,omitempty"` + Every Duration `json:"every,omitempty"` // Offset represents a delay before execution. // this can be unmarshaled from json as a string i.e.: "1d" will unmarshal as 1 day - Offset *time.Duration `json:"offset,omitempty"` + Offset *Duration `json:"offset,omitempty"` Concurrency *int64 `json:"concurrency,omitempty"` Retry *int64 `json:"retry,omitempty"` } +// Duration is a time span that supports the same units as the flux parser's time duration, as well as negative length time spans. +type Duration struct { + Node ast.DurationLiteral +} + +func (a Duration) String() string { + return ast.Format(&a.Node) +} + +// Parse parses a string into a Duration. +func (a *Duration) Parse(s string) error { + q, err := parseSignedDuration(s) + if err != nil { + return err + } + a.Node = *q + return nil +} + +// MustParseDuration parses a string and returns a duration. It panics if there is an error. +func MustParseDuration(s string) (dur *Duration) { + dur = &Duration{} + if err := dur.Parse(s); err != nil { + panic(err) + } + return dur +} + +// parseSignedDuration is a helper wrapper around parser.ParseSignedDuration. +// We use it because we need to clear the basenode, but flux does not. +func parseSignedDuration(text string) (*ast.DurationLiteral, error) { + q, err := parser.ParseSignedDuration(text) + if err != nil { + return nil, err + } + q.BaseNode = ast.BaseNode{} + return q, err +} + +// UnmarshalText unmarshals text into a Duration. +func (a *Duration) UnmarshalText(text []byte) error { + q, err := parseSignedDuration(string(text)) + if err != nil { + return err + } + a.Node = *q + return nil +} + +// UnmarshalText marshals text into a Duration. +func (a Duration) MarshalText() ([]byte, error) { + return []byte(a.String()), nil +} + +// IsZero checks if each segment of the duration is zero, it doesn't check if the Duration sums to zero, just if each internal duration is zero. +func (a *Duration) IsZero() bool { + for i := range a.Node.Values { + if a.Node.Values[i].Magnitude != 0 { + return false + } + } + return true +} + +// DurationFrom gives us a time.Duration from a time. +// Currently because of how flux works, this is just an approfimation for any time unit larger than hours. +func (a *Duration) DurationFrom(t time.Time) (time.Duration, error) { + return ast.DurationFrom(&a.Node, t) +} + +// Add adds the duration to a time. +func (a *Duration) Add(t time.Time) (time.Time, error) { + d, err := ast.DurationFrom(&a.Node, t) + if err != nil { + return time.Time{}, err + } + return t.Add(d), nil +} + // Clear clears out all options in the options struct, it us useful if you wish to reuse it. func (o *Options) Clear() { o.Name = "" o.Cron = "" - o.Every = 0 + o.Every = Duration{} o.Offset = nil o.Concurrency = nil o.Retry = nil } +// IsZero tells us if the options has been zeroed out. func (o *Options) IsZero() bool { return o.Name == "" && o.Cron == "" && - o.Every == 0 && + o.Every.IsZero() && o.Offset == nil && o.Concurrency == nil && o.Retry == nil @@ -80,6 +164,50 @@ const ( optRetry = "retry" ) +// contains is a helper function to see if an array of strings contains a string +func contains(s []string, e string) bool { + for i := range s { + if s[i] == e { + return true + } + } + return false +} + +func grabTaskOptionAST(p *ast.Package, keys ...string) map[string]ast.Expression { + res := make(map[string]ast.Expression, 2) // we preallocate two keys for the map, as that is how many we will use at maximum (offset and every) + for i := range p.Files { + for j := range p.Files[i].Body { + if p.Files[i].Body[j].Type() != "OptionStatement" { + continue + } + opt := (p.Files[i].Body[j]).(*ast.OptionStatement) + if opt.Assignment.Type() != "VariableAssignment" { + continue + } + asmt, ok := opt.Assignment.(*ast.VariableAssignment) + if !ok { + continue + } + if asmt.ID.Key() != "task" { + continue + } + ae, ok := asmt.Init.(*ast.ObjectExpression) + if !ok { + continue + } + for k := range ae.Properties { + prop := ae.Properties[k] + if key := prop.Key.Key(); prop != nil && contains(keys, key) { + res[key] = prop.Value + } + } + return res + } + } + return res +} + // FromScript extracts Options from a Flux script. func FromScript(script string) (Options, error) { if optionCache != nil { @@ -93,7 +221,12 @@ func FromScript(script string) (Options, error) { } opt := Options{Retry: pointer.Int64(1), Concurrency: pointer.Int64(1)} - _, scope, err := flux.Eval(script) + fluxAST, err := flux.Parse(script) + if err != nil { + return opt, err + } + durTypes := grabTaskOptionAST(fluxAST, optEvery, optOffset) + _, scope, err := flux.EvalAST(fluxAST) if err != nil { return opt, err } @@ -103,6 +236,10 @@ func FromScript(script string) (Options, error) { if !ok { return opt, errors.New("missing required option: 'task'") } + // check to make sure task is an object + if err := checkNature(task.PolyType().Nature(), semantic.Object); err != nil { + return opt, err + } optObject := task.Object() if err := validateOptionNames(optObject); err != nil { return opt, err @@ -138,14 +275,39 @@ func FromScript(script string) (Options, error) { if err := checkNature(everyVal.PolyType().Nature(), semantic.Duration); err != nil { return opt, err } - opt.Every = everyVal.Duration().Duration() + dur, ok := durTypes["every"] + if !ok || dur == nil { + return opt, errors.New("failed to parse `every` in task") + } + durNode, err := parseSignedDuration(dur.Location().Source) + if err != nil { + return opt, err + } + if !ok || durNode == nil { + return opt, errors.New("failed to parse `every` in task") + } + durNode.BaseNode = ast.BaseNode{} + opt.Every.Node = *durNode } if offsetVal, ok := optObject.Get(optOffset); ok { if err := checkNature(offsetVal.PolyType().Nature(), semantic.Duration); err != nil { return opt, err } - opt.Offset = pointer.Duration(offsetVal.Duration().Duration()) + dur, ok := durTypes["offset"] + if !ok || dur == nil { + return opt, errors.New("failed to parse `offset` in task") + } + durNode, err := parseSignedDuration(dur.Location().Source) + if err != nil { + return opt, err + } + if !ok || durNode == nil { + return opt, errors.New("failed to parse `offset` in task") + } + durNode.BaseNode = ast.BaseNode{} + opt.Offset = &Duration{} + opt.Offset.Node = *durNode } if concurrencyVal, ok := optObject.Get(optConcurrency); ok { @@ -177,13 +339,14 @@ func FromScript(script string) (Options, error) { // Validate returns an error if the options aren't valid. func (o *Options) Validate() error { + now := time.Now() var errs []string if o.Name == "" { errs = append(errs, "name required") } cronPresent := o.Cron != "" - everyPresent := o.Every != 0 + everyPresent := !o.Every.IsZero() if cronPresent == everyPresent { // They're both present or both missing. errs = append(errs, "must specify exactly one of either cron or every") @@ -193,16 +356,25 @@ func (o *Options) Validate() error { errs = append(errs, "cron invalid: "+err.Error()) } } else if everyPresent { - if o.Every < time.Second { + every, err := o.Every.DurationFrom(now) + if err != nil { + return err + } + if every < time.Second { errs = append(errs, "every option must be at least 1 second") - } else if o.Every.Truncate(time.Second) != o.Every { + } else if every.Truncate(time.Second) != every { errs = append(errs, "every option must be expressible as whole seconds") } } - - if o.Offset != nil && o.Offset.Truncate(time.Second) != *o.Offset { - // For now, allowing negative offset delays. Maybe they're useful for forecasting? - errs = append(errs, "offset option must be expressible as whole seconds") + if o.Offset != nil { + offset, err := o.Offset.DurationFrom(now) + if err != nil { + return err + } + if offset.Truncate(time.Second) != offset { + // For now, allowing negative offset delays. Maybe they're useful for forecasting? + errs = append(errs, "offset option must be expressible as whole seconds") + } } if o.Concurrency != nil { if *o.Concurrency < 1 { @@ -231,11 +403,15 @@ func (o *Options) Validate() error { // If the every option was specified, it is converted into a cron string using "@every". // Otherwise, the empty string is returned. // The value of the offset option is not considered. +// TODO(docmerlin): create an EffectiveCronStringFrom(t time.Time) string, +// that works from a unit of time. +// Do not use this if you haven't checked for validity already. func (o *Options) EffectiveCronString() string { if o.Cron != "" { return o.Cron } - if o.Every > 0 { + every, _ := o.Every.DurationFrom(time.Now()) // we can ignore errors here because we have alreach checked for validity. + if every > 0 { return "@every " + o.Every.String() } return "" diff --git a/task/options/options_test.go b/task/options/options_test.go index 5f77f51658..e2a96b675f 100644 --- a/task/options/options_test.go +++ b/task/options/options_test.go @@ -21,10 +21,10 @@ func scriptGenerator(opt options.Options, body string) string { if opt.Cron != "" { taskData = fmt.Sprintf("%s cron: %q,\n", taskData, opt.Cron) } - if opt.Every != 0 { + if !opt.Every.IsZero() { taskData = fmt.Sprintf("%s every: %s,\n", taskData, opt.Every.String()) } - if opt.Offset != nil && *opt.Offset != 0 { + if opt.Offset != nil && !(*opt.Offset).IsZero() { taskData = fmt.Sprintf("%s offset: %s,\n", taskData, opt.Offset.String()) } if opt.Concurrency != nil && *opt.Concurrency != 0 { @@ -45,20 +45,36 @@ func scriptGenerator(opt options.Options, body string) string { %s`, taskData, body) } +func TestNegDurations(t *testing.T) { + dur := options.MustParseDuration("-1m") + d, err := dur.DurationFrom(time.Now()) + if err != nil { + t.Fatal(err) + } + if d != -time.Minute { + t.Fatalf("expected duration to be -1m but was %s", d) + } +} + func TestFromScript(t *testing.T) { for _, c := range []struct { script string exp options.Options shouldErr bool }{ - {script: scriptGenerator(options.Options{Name: "name0", Cron: "* * * * *", Concurrency: pointer.Int64(2), Retry: pointer.Int64(3), Offset: pointer.Duration(-time.Minute)}, ""), exp: options.Options{Name: "name0", Cron: "* * * * *", Concurrency: pointer.Int64(2), Retry: pointer.Int64(3), Offset: pointer.Duration(-time.Minute)}}, - {script: scriptGenerator(options.Options{Name: "name1", Every: 5 * time.Second}, ""), exp: options.Options{Name: "name1", Every: 5 * time.Second, Concurrency: pointer.Int64(1), Retry: pointer.Int64(1)}}, + {script: scriptGenerator(options.Options{Name: "name0", Cron: "* * * * *", Concurrency: pointer.Int64(2), Retry: pointer.Int64(3), Offset: options.MustParseDuration("-1m")}, ""), + exp: options.Options{Name: "name0", + Cron: "* * * * *", + Concurrency: pointer.Int64(2), + Retry: pointer.Int64(3), + Offset: options.MustParseDuration("-1m")}}, + {script: scriptGenerator(options.Options{Name: "name1", Every: *(options.MustParseDuration("5s"))}, ""), exp: options.Options{Name: "name1", Every: *(options.MustParseDuration("5s")), Concurrency: pointer.Int64(1), Retry: pointer.Int64(1)}}, {script: scriptGenerator(options.Options{Name: "name2", Cron: "* * * * *"}, ""), exp: options.Options{Name: "name2", Cron: "* * * * *", Concurrency: pointer.Int64(1), Retry: pointer.Int64(1)}}, - {script: scriptGenerator(options.Options{Name: "name3", Every: time.Hour, Cron: "* * * * *"}, ""), shouldErr: true}, - {script: scriptGenerator(options.Options{Name: "name4", Concurrency: pointer.Int64(1000), Every: time.Hour}, ""), shouldErr: true}, + {script: scriptGenerator(options.Options{Name: "name3", Every: *(options.MustParseDuration("1h")), Cron: "* * * * *"}, ""), shouldErr: true}, + {script: scriptGenerator(options.Options{Name: "name4", Concurrency: pointer.Int64(1000), Every: *(options.MustParseDuration("1h"))}, ""), shouldErr: true}, {script: "option task = {\n name: \"name5\",\n concurrency: 0,\n every: 1m0s,\n\n}\n\nfrom(bucket: \"test\")\n |> range(start:-1h)", shouldErr: true}, {script: "option task = {\n name: \"name6\",\n concurrency: 1,\n every: 1,\n\n}\n\nfrom(bucket: \"test\")\n |> range(start:-1h)", shouldErr: true}, - {script: scriptGenerator(options.Options{Name: "name7", Retry: pointer.Int64(20), Every: time.Hour}, ""), shouldErr: true}, + {script: scriptGenerator(options.Options{Name: "name7", Retry: pointer.Int64(20), Every: *(options.MustParseDuration("1h"))}, ""), shouldErr: true}, {script: "option task = {\n name: \"name8\",\n retry: 0,\n every: 1m0s,\n\n}\n\nfrom(bucket: \"test\")\n |> range(start:-1h)", shouldErr: true}, {script: scriptGenerator(options.Options{Name: "name9"}, ""), shouldErr: true}, {script: scriptGenerator(options.Options{}, ""), shouldErr: true}, @@ -125,7 +141,7 @@ func TestValidate(t *testing.T) { } *bad = good - bad.Every = time.Minute + bad.Every = *options.MustParseDuration("1m") if err := bad.Validate(); err == nil { t.Error("expected error for options with both cron and every") } @@ -138,13 +154,13 @@ func TestValidate(t *testing.T) { *bad = good bad.Cron = "" - bad.Every = -1 * time.Minute + bad.Every = *options.MustParseDuration("-1m") if err := bad.Validate(); err == nil { t.Error("expected error for negative every") } *bad = good - bad.Offset = pointer.Duration(1500 * time.Millisecond) + bad.Offset = options.MustParseDuration("1500ms") if err := bad.Validate(); err == nil { t.Error("expected error for sub-second delay resolution") } @@ -177,11 +193,11 @@ func TestValidate(t *testing.T) { func TestEffectiveCronString(t *testing.T) { for _, c := range []struct { c string - e time.Duration + e options.Duration exp string }{ {c: "10 * * * *", exp: "10 * * * *"}, - {e: 10 * time.Second, exp: "@every 10s"}, + {e: *(options.MustParseDuration("10s")), exp: "@every 10s"}, {exp: ""}, } { o := options.Options{Cron: c.c, Every: c.e} @@ -191,3 +207,68 @@ func TestEffectiveCronString(t *testing.T) { } } } + +func TestDurationMarshaling(t *testing.T) { + t.Run("unmarshaling", func(t *testing.T) { + now := time.Now() + dur1 := options.Duration{} + if err := dur1.UnmarshalText([]byte("1h10m3s")); err != nil { + t.Fatal(err) + } + d1, err1 := dur1.DurationFrom(now) + if err1 != nil { + t.Fatal(err1) + } + + dur2 := options.Duration{} + if err := dur2.Parse("1h10m3s"); err != nil { + t.Fatal(err) + } + d2, err2 := dur2.DurationFrom(now) + if err2 != nil { + t.Fatal(err2) + } + + if d1 != d2 || d1 != time.Hour+10*time.Minute+3*time.Second { + t.Fatal("Parse and Marshaling do not give us the same result") + } + }) + + t.Run("marshaling", func(t *testing.T) { + dur := options.Duration{} + if err := dur.UnmarshalText([]byte("1h10m3s")); err != nil { + t.Fatal(err) + } + if dur.String() != "1h10m3s" { + t.Fatalf("duration string should be \"1h10m3s\" but was %s", dur.String()) + } + text, err := dur.MarshalText() + if err != nil { + t.Fatal(err) + } + if string(text) != "1h10m3s" { + t.Fatalf("duration text should be \"1h10m3s\" but was %s", text) + } + }) + + t.Run("parse zero", func(t *testing.T) { + dur := options.Duration{} + if err := dur.UnmarshalText([]byte("0h0s")); err != nil { + t.Fatal(err) + } + if !dur.IsZero() { + t.Fatalf("expected duration \"0s\" to be zero but was %s", dur.String()) + } + }) +} + +func TestDurationMath(t *testing.T) { + dur := options.MustParseDuration("10s") + d, err := dur.DurationFrom(time.Now()) + if err != nil { + t.Fatal(err) + } + if d != 10*time.Second { + t.Fatalf("expected duration to be 10s but it was %s", d) + } +} diff --git a/task/platform_adapter.go b/task/platform_adapter.go index 8740434bba..f9ff265d49 100644 --- a/task/platform_adapter.go +++ b/task/platform_adapter.go @@ -176,10 +176,10 @@ func (p pAdapter) CreateTask(ctx context.Context, t platform.TaskCreate) (*platf AuthorizationID: req.AuthorizationID, } - if opts.Every != 0 { + if !opts.Every.IsZero() { task.Every = opts.Every.String() } - if opts.Offset != nil && *opts.Offset != 0 { + if opts.Offset != nil && !(*opts.Offset).IsZero() { task.Offset = opts.Offset.String() } @@ -429,10 +429,10 @@ func (p *pAdapter) toPlatformTask(ctx context.Context, t backend.StoreTask, m *b Flux: t.Script, Cron: opts.Cron, } - if opts.Every != 0 { + if !opts.Every.IsZero() { pt.Every = opts.Every.String() } - if opts.Offset != nil && *opts.Offset != 0 { + if opts.Offset != nil && !(*opts.Offset).IsZero() { pt.Offset = opts.Offset.String() } if m != nil { diff --git a/task/service_test.go b/task/service_test.go index 2a8301e8a7..fd786a8658 100644 --- a/task/service_test.go +++ b/task/service_test.go @@ -27,7 +27,7 @@ func inMemFactory(t *testing.T) (*servicetest.System, context.CancelFunc) { i := inmem.NewService() return &servicetest.System{ - TaskControlService: servicetest.TaskControlAdaptor(st, lrw, lrw), + TaskControlService: backend.TaskControlAdaptor(st, lrw, lrw), Ctx: ctx, I: i, TaskService: servicetest.UsePlatformAdaptor(st, lrw, mock.NewScheduler(), i), @@ -63,7 +63,7 @@ func boltFactory(t *testing.T) (*servicetest.System, context.CancelFunc) { i := inmem.NewService() return &servicetest.System{ - TaskControlService: servicetest.TaskControlAdaptor(st, lrw, lrw), + TaskControlService: backend.TaskControlAdaptor(st, lrw, lrw), TaskService: servicetest.UsePlatformAdaptor(st, lrw, mock.NewScheduler(), i), Ctx: ctx, I: i, diff --git a/task/servicetest/servicetest.go b/task/servicetest/servicetest.go index 69b9caa282..21bb238358 100644 --- a/task/servicetest/servicetest.go +++ b/task/servicetest/servicetest.go @@ -18,7 +18,6 @@ import ( "github.com/influxdata/influxdb" icontext "github.com/influxdata/influxdb/context" "github.com/influxdata/influxdb/inmem" - "github.com/influxdata/influxdb/pkg/pointer" "github.com/influxdata/influxdb/task" "github.com/influxdata/influxdb/task/backend" "github.com/influxdata/influxdb/task/options" @@ -42,99 +41,6 @@ func UsePlatformAdaptor(s backend.Store, lr backend.LogReader, rc task.RunContro return task.PlatformAdapter(s, lr, rc, i, i, i) } -// TaskControlAdaptor creates a TaskControlService for the older TaskStore system. -func TaskControlAdaptor(s backend.Store, lw backend.LogWriter, lr backend.LogReader) backend.TaskControlService { - return &taskControlAdaptor{s, lw, lr} -} - -// taskControlAdaptor adapts a backend.Store and log readers and writers to implement the task control service. -type taskControlAdaptor struct { - s backend.Store - lw backend.LogWriter - lr backend.LogReader -} - -func (tcs *taskControlAdaptor) CreateNextRun(ctx context.Context, taskID influxdb.ID, now int64) (backend.RunCreation, error) { - return tcs.s.CreateNextRun(ctx, taskID, now) -} - -func (tcs *taskControlAdaptor) FinishRun(ctx context.Context, taskID, runID influxdb.ID) (*influxdb.Run, error) { - // the tests aren't looking for a returned Run because the old system didn't return one - // Once we completely switch over to the new system we can look at the returned run in the tests. - return nil, tcs.s.FinishRun(ctx, taskID, runID) -} - -func (tcs *taskControlAdaptor) NextDueRun(ctx context.Context, taskID influxdb.ID) (int64, error) { - _, m, err := tcs.s.FindTaskByIDWithMeta(ctx, taskID) - if err != nil { - return 0, err - } - return m.NextDueRun() -} - -func (tcs *taskControlAdaptor) UpdateRunState(ctx context.Context, taskID, runID influxdb.ID, when time.Time, state backend.RunStatus) error { - st, m, err := tcs.s.FindTaskByIDWithMeta(ctx, taskID) - if err != nil { - return err - } - var ( - schedFor, reqAt time.Time - ) - // check the log store - r, err := tcs.lr.FindRunByID(ctx, st.Org, runID) - if err == nil { - schedFor, _ = time.Parse(time.RFC3339, r.ScheduledFor) - reqAt, _ = time.Parse(time.RFC3339, r.RequestedAt) - } - - // in the old system the log store may not have the run until after the first - // state update, so we will need to pull the currently running. - if schedFor.IsZero() { - for _, cr := range m.CurrentlyRunning { - if influxdb.ID(cr.RunID) == runID { - schedFor = time.Unix(cr.Now, 0) - reqAt = time.Unix(cr.RequestedAt, 0) - } - } - - } - - rlb := backend.RunLogBase{ - Task: st, - RunID: runID, - RunScheduledFor: schedFor.Unix(), - RequestedAt: reqAt.Unix(), - } - - if err := tcs.lw.UpdateRunState(ctx, rlb, when, state); err != nil { - return err - } - return nil -} - -func (tcs *taskControlAdaptor) AddRunLog(ctx context.Context, taskID, runID influxdb.ID, when time.Time, log string) error { - st, err := tcs.s.FindTaskByID(ctx, taskID) - if err != nil { - return err - } - - r, err := tcs.lr.FindRunByID(ctx, st.Org, runID) - if err != nil { - return err - } - schFor, _ := time.Parse(time.RFC3339, r.ScheduledFor) - reqAt, _ := time.Parse(time.RFC3339, r.RequestedAt) - - rlb := backend.RunLogBase{ - Task: st, - RunID: runID, - RunScheduledFor: schFor.Unix(), - RequestedAt: reqAt.Unix(), - } - - return tcs.lw.AddRunLog(ctx, rlb, when, log) -} - // TestTaskService should be called by consumers of the servicetest package. // This will call fn once to create a single influxdb.TaskService // used across all subtests in TestTaskService. @@ -176,6 +82,10 @@ func TestTaskService(t *testing.T, fn BackendComponentFactory) { t.Parallel() testMetaUpdate(t, sys) }) + t.Run("Task Manual Run", func(t *testing.T) { + t.Parallel() + testManualRun(t, sys) + }) }) } @@ -365,8 +275,8 @@ func testTaskCRUD(t *testing.T, sys *System) { // Update task: switch to every. newStatus = string(backend.TaskActive) - newFlux = "import \"http\"\n\noption task = {\n\tname: \"task-changed #98\",\n\tevery: 30000000000ns,\n\toffset: 5s,\n\tconcurrency: 100,\n}\n\nfrom(bucket: \"b\")\n\t|> http.to(url: \"http://example.com\")" - f, err = sys.TaskService.UpdateTask(authorizedCtx, origID, influxdb.TaskUpdate{Options: options.Options{Every: 30 * time.Second}}) + newFlux = "import \"http\"\n\noption task = {\n\tname: \"task-changed #98\",\n\tevery: 30s,\n\toffset: 5s,\n\tconcurrency: 100,\n}\n\nfrom(bucket: \"b\")\n\t|> http.to(url: \"http://example.com\")" + f, err = sys.TaskService.UpdateTask(authorizedCtx, origID, influxdb.TaskUpdate{Options: options.Options{Every: *(options.MustParseDuration("30s"))}}) if err != nil { t.Fatal(err) } @@ -450,7 +360,7 @@ from(bucket: "b") expectedFlux := `import "http" -option task = {name: "task-Options-Update", every: 10000000000ns, concurrency: 100} +option task = {name: "task-Options-Update", every: 10s, concurrency: 100} from(bucket: "b") |> http.to(url: "http://example.com")` @@ -467,7 +377,7 @@ from(bucket: "b") if err != nil { t.Fatal(err) } - f, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, influxdb.TaskUpdate{Options: options.Options{Offset: pointer.Duration(0), Every: 10 * time.Second}}) + f, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, influxdb.TaskUpdate{Options: options.Options{Offset: &options.Duration{}, Every: *(options.MustParseDuration("10s"))}}) if err != nil { t.Fatal(err) } @@ -1026,7 +936,7 @@ func testTaskConcurrency(t *testing.T, sys *System) { // Create a run for the last task we found. // The script should run every minute, so use max now. tid := tasks[len(tasks)-1].ID - if _, err := sys.TaskControlService.CreateNextRun(sys.Ctx, tid, math.MaxInt64); err != nil { + if _, err := sys.TaskControlService.CreateNextRun(sys.Ctx, tid, math.MaxInt64>>6); err != nil { // we use the >>6 here because math.MaxInt64 is too large which causes problems when converting back and forth from time // This may have errored due to the task being deleted. Check if the task still exists. if _, err2 := sys.TaskService.FindTaskByID(sys.Ctx, tid); err2 == backend.ErrTaskNotFound { @@ -1059,6 +969,49 @@ func testTaskConcurrency(t *testing.T, sys *System) { extraWg.Wait() } +func testManualRun(t *testing.T, s *System) { + cr := creds(t, s) + + // Create a task. + tc := influxdb.TaskCreate{ + OrganizationID: cr.OrgID, + Flux: fmt.Sprintf(scriptFmt, 0), + Token: cr.Token, + } + + authorizedCtx := icontext.SetAuthorizer(s.Ctx, cr.Authorizer()) + + tsk, err := s.TaskService.CreateTask(authorizedCtx, tc) + if err != nil { + t.Fatal(err) + } + if !tsk.ID.Valid() { + t.Fatal("no task ID set") + } + scheduledFor := time.Now().UTC() + + run, err := s.TaskService.ForceRun(authorizedCtx, tsk.ID, scheduledFor.Unix()) + if err != nil { + t.Fatal(err) + } + + if run.ScheduledFor != scheduledFor.Format(time.RFC3339) { + t.Fatalf("force run returned a different scheduled for time expected: %s, got %s", scheduledFor.Format(time.RFC3339), run.ScheduledFor) + } + + runs, err := s.TaskControlService.ManualRuns(authorizedCtx, tsk.ID) + if err != nil { + t.Fatal(err) + } + if len(runs) != 1 { + t.Fatalf("expected 1 manual run: got %d", len(runs)) + } + if runs[0].ID != run.ID { + diff := cmp.Diff(runs[0], run) + t.Fatalf("manual run missmatch: %s", diff) + } +} + func creds(t *testing.T, s *System) TestCreds { t.Helper() diff --git a/task_test.go b/task_test.go index d898536217..1723e9f7dc 100644 --- a/task_test.go +++ b/task_test.go @@ -3,11 +3,9 @@ package influxdb_test import ( "encoding/json" "testing" - "time" "github.com/google/go-cmp/cmp" platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/pkg/pointer" _ "github.com/influxdata/influxdb/query/builtin" "github.com/influxdata/influxdb/task/options" ) @@ -18,10 +16,10 @@ func TestOptionsMarshal(t *testing.T) { if err := json.Unmarshal([]byte(`{"every":"10s", "offset":"1h"}`), tu); err != nil { t.Fatal(err) } - if tu.Options.Every != 10*time.Second { + if tu.Options.Every.String() != "10s" { t.Fatalf("option.every not properly unmarshaled, expected 10s got %s", tu.Options.Every) } - if *tu.Options.Offset != time.Hour { + if tu.Options.Offset.String() != "1h" { t.Fatalf("option.every not properly unmarshaled, expected 1h got %s", tu.Options.Offset) } @@ -38,22 +36,22 @@ func TestOptionsMarshal(t *testing.T) { func TestOptionsEdit(t *testing.T) { tu := &platform.TaskUpdate{} - tu.Options.Every = 10 * time.Second + tu.Options.Every = *(options.MustParseDuration("10s")) if err := tu.UpdateFlux(`option task = {every: 20s, name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } t.Run("zeroing", func(t *testing.T) { - if tu.Options.Every != 0 { - t.Errorf("expected Every to be zeroed but it wasn't") + if !tu.Options.Every.IsZero() { + t.Errorf("expected Every to be zeroed but it was not") } }) t.Run("fmt string", func(t *testing.T) { - t.Skip("This won't work until the flux formatter formats durations in a nicer way") expected := `option task = {every: 10s, name: "foo"} - from(bucket:"x") - |> range(start:-1h)` + +from(bucket: "x") + |> range(start: -1h)` if *tu.Flux != expected { - t.Errorf("got the wrong task back, expected %s,\n got %s\n", expected, *tu.Flux) + t.Errorf("got the wrong task back, expected %s,\n got %s\n diff: %s", expected, *tu.Flux, cmp.Diff(expected, *tu.Flux)) } }) t.Run("replacement", func(t *testing.T) { @@ -61,15 +59,14 @@ func TestOptionsEdit(t *testing.T) { if err != nil { t.Error(err) } - if op.Every != 10*time.Second { + if op.Every.String() != "10s" { t.Logf("expected every to be 10s but was %s", op.Every) t.Fail() } }) t.Run("add new option", func(t *testing.T) { tu := &platform.TaskUpdate{} - ofst := 30 * time.Second - tu.Options.Offset = &ofst + tu.Options.Offset = options.MustParseDuration("30s") if err := tu.UpdateFlux(`option task = {every: 20s, name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } @@ -77,7 +74,7 @@ func TestOptionsEdit(t *testing.T) { if err != nil { t.Error(err) } - if op.Offset == nil || *op.Offset != 30*time.Second { + if op.Offset == nil || op.Offset.String() != "30s" { t.Fatalf("expected every to be 30s but was %s", op.Every) } }) @@ -91,7 +88,7 @@ func TestOptionsEdit(t *testing.T) { if err != nil { t.Error(err) } - if op.Every != 0 { + if !op.Every.IsZero() { t.Fatalf("expected every to be 0 but was %s", op.Every) } if op.Cron != "* * * * *" { @@ -100,7 +97,7 @@ func TestOptionsEdit(t *testing.T) { }) t.Run("switching from cron to every", func(t *testing.T) { tu := &platform.TaskUpdate{} - tu.Options.Every = 10 * time.Second + tu.Options.Every = *(options.MustParseDuration("10s")) if err := tu.UpdateFlux(`option task = {cron: "* * * * *", name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } @@ -108,7 +105,7 @@ func TestOptionsEdit(t *testing.T) { if err != nil { t.Error(err) } - if op.Every != 10*time.Second { + if op.Every.String() != "10s" { t.Fatalf("expected every to be 10s but was %s", op.Every) } if op.Cron != "" { @@ -117,7 +114,7 @@ func TestOptionsEdit(t *testing.T) { }) t.Run("delete deletable option", func(t *testing.T) { tu := &platform.TaskUpdate{} - tu.Options.Offset = pointer.Duration(0) + tu.Options.Offset = &options.Duration{} expscript := `option task = {cron: "* * * * *", name: "foo"} from(bucket: "x") @@ -129,7 +126,7 @@ from(bucket: "x") if err != nil { t.Error(err) } - if op.Every != 0 { + if !op.Every.IsZero() { t.Fatalf("expected every to be 0s but was %s", op.Every) } if op.Cron != "* * * * *" { diff --git a/testing/auth.go b/testing/auth.go index e169ce85e1..42275839e9 100644 --- a/testing/auth.go +++ b/testing/auth.go @@ -62,8 +62,8 @@ func AuthorizationService( fn: FindAuthorizationByToken, }, { - name: "UpdateAuthorizationStatus", - fn: UpdateAuthorizationStatus, + name: "UpdateAuthorization", + fn: UpdateAuthorization, }, { name: "FindAuthorizations", @@ -447,14 +447,18 @@ func FindAuthorizationByID( } } -// UpdateAuthorizationStatus testing -func UpdateAuthorizationStatus( +func stringPtr(s string) *string { + return &s +} + +// UpdateAuthorization testing +func UpdateAuthorization( init func(AuthorizationFields, *testing.T) (platform.AuthorizationService, string, func()), t *testing.T, ) { type args struct { - id platform.ID - status platform.Status + id platform.ID + upd *platform.AuthorizationUpdate } type wants struct { err error @@ -522,8 +526,11 @@ func UpdateAuthorizationStatus( }, }, args: args{ - id: MustIDBase16(authTwoID), - status: platform.Inactive, + id: MustIDBase16(authTwoID), + upd: &platform.AuthorizationUpdate{ + Status: platform.Inactive.Ptr(), + Description: stringPtr("desc1"), + }, }, wants: wants{ authorization: &platform.Authorization{ @@ -533,6 +540,7 @@ func UpdateAuthorizationStatus( Token: "rand2", Permissions: createUsersPermission(MustIDBase16(orgOneID)), Status: platform.Inactive, + Description: "desc1", }, }, }, @@ -585,13 +593,15 @@ func UpdateAuthorizationStatus( }, }, args: args{ - id: MustIDBase16(authThreeID), - status: platform.Inactive, + id: MustIDBase16(authThreeID), + upd: &platform.AuthorizationUpdate{ + Status: platform.Inactive.Ptr(), + }, }, wants: wants{ err: &platform.Error{ Code: platform.ENotFound, - Op: platform.OpSetAuthorizationStatus, + Op: platform.OpUpdateAuthorization, Msg: "authorization not found", }, }, @@ -652,13 +662,15 @@ func UpdateAuthorizationStatus( }, }, args: args{ - id: MustIDBase16(authTwoID), - status: platform.Status("unknown"), + id: MustIDBase16(authTwoID), + upd: &platform.AuthorizationUpdate{ + Status: platform.Status("unknown").Ptr(), + }, }, wants: wants{ err: &platform.Error{ Code: platform.EInvalid, - Op: platform.OpSetAuthorizationStatus, + Op: platform.OpUpdateAuthorization, Msg: "unknown authorization status", }, }, @@ -670,7 +682,7 @@ func UpdateAuthorizationStatus( defer done() ctx := context.Background() - err := s.SetAuthorizationStatus(ctx, tt.args.id, tt.args.status) + err := s.UpdateAuthorization(ctx, tt.args.id, tt.args.upd) diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t) if tt.wants.err == nil { diff --git a/testing/cells.go b/testing/cells.go deleted file mode 100644 index 0fb0e92402..0000000000 --- a/testing/cells.go +++ /dev/null @@ -1,694 +0,0 @@ -package testing - -import ( - "bytes" - "context" - "sort" - "testing" - - "github.com/google/go-cmp/cmp" - platform "github.com/influxdata/influxdb" - "github.com/influxdata/influxdb/mock" -) - -const ( - viewOneID = "020f755c3c082000" - viewTwoID = "020f755c3c082001" - viewThreeID = "020f755c3c082002" -) - -var viewCmpOptions = cmp.Options{ - cmp.Comparer(func(x, y []byte) bool { - return bytes.Equal(x, y) - }), - cmp.Transformer("Sort", func(in []*platform.View) []*platform.View { - out := append([]*platform.View(nil), in...) // Copy input to avoid mutating it - sort.Slice(out, func(i, j int) bool { - return out[i].ID.String() > out[j].ID.String() - }) - return out - }), -} - -// ViewFields will include the IDGenerator, and views -type ViewFields struct { - IDGenerator platform.IDGenerator - Views []*platform.View -} - -// CreateView testing -func CreateView( - init func(ViewFields, *testing.T) (platform.ViewService, string, func()), - t *testing.T, -) { - type args struct { - view *platform.View - } - type wants struct { - err error - views []*platform.View - } - - tests := []struct { - name string - fields ViewFields - args args - wants wants - }{ - { - name: "basic create view", - fields: ViewFields{ - IDGenerator: &mock.IDGenerator{ - IDFn: func() platform.ID { - return MustIDBase16(viewTwoID) - }, - }, - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - }, - }, - }, - args: args{ - view: &platform.View{ - ViewContents: platform.ViewContents{ - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - wants: wants{ - views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - s, opPrefix, done := init(tt.fields, t) - defer done() - ctx := context.Background() - err := s.CreateView(ctx, tt.args.view) - diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t) - defer s.DeleteView(ctx, tt.args.view.ID) - - views, _, err := s.FindViews(ctx, platform.ViewFilter{}) - if err != nil { - t.Fatalf("failed to retrieve views: %v", err) - } - if diff := cmp.Diff(views, tt.wants.views, viewCmpOptions...); diff != "" { - t.Errorf("views are different -got/+want\ndiff %s", diff) - } - }) - } -} - -// FindViewByID testing -func FindViewByID( - init func(ViewFields, *testing.T) (platform.ViewService, string, func()), - t *testing.T, -) { - type args struct { - id platform.ID - } - type wants struct { - err error - view *platform.View - } - - tests := []struct { - name string - fields ViewFields - args args - wants wants - }{ - { - name: "basic find view by id", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - id: MustIDBase16(viewTwoID), - }, - wants: wants{ - view: &platform.View{ - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - { - name: "find view by id not found", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - id: MustIDBase16(threeID), - }, - wants: wants{ - err: &platform.Error{ - Code: platform.ENotFound, - Op: platform.OpFindViewByID, - Msg: "view not found", - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - s, opPrefix, done := init(tt.fields, t) - defer done() - ctx := context.Background() - - view, err := s.FindViewByID(ctx, tt.args.id) - diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t) - - if diff := cmp.Diff(view, tt.wants.view, viewCmpOptions...); diff != "" { - t.Errorf("view is different -got/+want\ndiff %s", diff) - } - }) - } -} - -// FindViews testing -func FindViews( - init func(ViewFields, *testing.T) (platform.ViewService, string, func()), - t *testing.T, -) { - type args struct { - ID platform.ID - } - - type wants struct { - views []*platform.View - err error - } - tests := []struct { - name string - fields ViewFields - args args - wants wants - }{ - { - name: "find all views", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{}, - wants: wants{ - views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - }, - { - name: "find view by id", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - ID: MustIDBase16(viewTwoID), - }, - wants: wants{ - views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - }, - { - name: "find view by id not found", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - ID: MustIDBase16(threeID), - }, - wants: wants{ - views: []*platform.View{}, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - s, opPrefix, done := init(tt.fields, t) - defer done() - ctx := context.Background() - - filter := platform.ViewFilter{} - if tt.args.ID.Valid() { - filter.ID = &tt.args.ID - } - - views, _, err := s.FindViews(ctx, filter) - diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t) - - if diff := cmp.Diff(views, tt.wants.views, viewCmpOptions...); diff != "" { - t.Errorf("views are different -got/+want\ndiff %s", diff) - } - }) - } -} - -// DeleteView testing -func DeleteView( - init func(ViewFields, *testing.T) (platform.ViewService, string, func()), - t *testing.T, -) { - type args struct { - ID platform.ID - } - type wants struct { - err error - views []*platform.View - } - - tests := []struct { - name string - fields ViewFields - args args - wants wants - }{ - { - name: "delete views using exist id", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - ID: MustIDBase16(viewOneID), - }, - wants: wants{ - views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - }, - { - name: "delete views using id that does not exist", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - ID: MustIDBase16(viewThreeID), - }, - wants: wants{ - err: &platform.Error{ - Code: platform.ENotFound, - Op: platform.OpDeleteView, - Msg: "view not found", - }, - views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - s, opPrefix, done := init(tt.fields, t) - defer done() - ctx := context.Background() - err := s.DeleteView(ctx, tt.args.ID) - diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t) - - filter := platform.ViewFilter{} - views, _, err := s.FindViews(ctx, filter) - if err != nil { - t.Fatalf("failed to retrieve views: %v", err) - } - if diff := cmp.Diff(views, tt.wants.views, viewCmpOptions...); diff != "" { - t.Errorf("views are different -got/+want\ndiff %s", diff) - } - }) - } -} - -// UpdateView testing -func UpdateView( - init func(ViewFields, *testing.T) (platform.ViewService, string, func()), - t *testing.T, -) { - type args struct { - name string - properties platform.ViewProperties - id platform.ID - } - type wants struct { - err error - view *platform.View - } - - tests := []struct { - name string - fields ViewFields - args args - wants wants - }{ - { - name: "update name", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - id: MustIDBase16(viewOneID), - name: "changed", - }, - wants: wants{ - view: &platform.View{ - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "changed", - }, - Properties: platform.EmptyViewProperties{}, - }, - }, - }, - { - name: "update properties", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - id: MustIDBase16(viewOneID), - properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - wants: wants{ - view: &platform.View{ - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - { - name: "update id not exists", - fields: ViewFields{ - Views: []*platform.View{ - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewOneID), - Name: "view1", - }, - Properties: platform.EmptyViewProperties{}, - }, - { - ViewContents: platform.ViewContents{ - ID: MustIDBase16(viewTwoID), - Name: "view2", - }, - Properties: platform.TableViewProperties{ - Type: "table", - TimeFormat: "rfc3339", - }, - }, - }, - }, - args: args{ - id: MustIDBase16(threeID), - name: "changed", - }, - wants: wants{ - err: &platform.Error{ - Code: platform.ENotFound, - Op: platform.OpUpdateView, - Msg: "view not found", - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - s, opPrefix, done := init(tt.fields, t) - defer done() - ctx := context.Background() - - upd := platform.ViewUpdate{} - if tt.args.name != "" { - upd.Name = &tt.args.name - } - if tt.args.properties != nil { - upd.Properties = tt.args.properties - } - - view, err := s.UpdateView(ctx, tt.args.id, upd) - diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t) - - if diff := cmp.Diff(view, tt.wants.view, viewCmpOptions...); diff != "" { - t.Errorf("view is different -got/+want\ndiff %s", diff) - } - }) - } -} diff --git a/testing/document.go b/testing/document.go index e3ad57e82e..8b488d74f5 100644 --- a/testing/document.go +++ b/testing/document.go @@ -34,6 +34,7 @@ func NewDocumentIntegrationTest(store kv.Store) func(t *testing.T) { l1 := &influxdb.Label{Name: "l1"} l2 := &influxdb.Label{Name: "l2"} mustCreateLabels(ctx, svc, l1, l2) + lBad := &influxdb.Label{Name: "bad"} o1 := &influxdb.Organization{Name: "foo"} o2 := &influxdb.Organization{Name: "bar"} @@ -109,6 +110,22 @@ func NewDocumentIntegrationTest(store kv.Store) func(t *testing.T) { } }) + t.Run("can't create document with unexisted label", func(t *testing.T) { + d4 := &influxdb.Document{ + Meta: influxdb.DocumentMeta{ + Name: "i4", + }, + Content: map[string]interface{}{ + "k4": "v4", + }, + } + err = s.CreateDocument(ctx, d4, influxdb.WithLabel(lBad.Name)) + ErrorsEqual(t, err, &influxdb.Error{ + Code: influxdb.ENotFound, + Msg: "label not found", + }) + }) + dl1 := new(influxdb.Document) *dl1 = *d1 dl1.Labels = append([]*influxdb.Label{}, l1) diff --git a/tsdb/tsm1/engine.go b/tsdb/tsm1/engine.go index 4f7aee8ae8..acfaa691c2 100644 --- a/tsdb/tsm1/engine.go +++ b/tsdb/tsm1/engine.go @@ -937,6 +937,17 @@ func (e *Engine) ShouldCompactCache(t time.Time) CacheStatus { return CacheStatusOkay } +func (e *Engine) lastModified() time.Time { + fsTime := e.FileStore.LastModified() + cacheTime := e.Cache.LastWriteTime() + + if cacheTime.After(fsTime) { + return cacheTime + } + + return fsTime +} + func (e *Engine) compact(wg *sync.WaitGroup) { t := time.NewTicker(time.Second) defer t.Stop() @@ -956,7 +967,7 @@ func (e *Engine) compact(wg *sync.WaitGroup) { level1Groups := e.CompactionPlan.PlanLevel(1) level2Groups := e.CompactionPlan.PlanLevel(2) level3Groups := e.CompactionPlan.PlanLevel(3) - level4Groups := e.CompactionPlan.Plan(e.FileStore.LastModified()) + level4Groups := e.CompactionPlan.Plan(e.lastModified()) e.compactionTracker.SetOptimiseQueue(uint64(len(level4Groups))) // If no full compactions are need, see if an optimize is needed diff --git a/ui/cypress/e2e/tasks.test.ts b/ui/cypress/e2e/tasks.test.ts index 73dea882c6..947c25bbc5 100644 --- a/ui/cypress/e2e/tasks.test.ts +++ b/ui/cypress/e2e/tasks.test.ts @@ -21,7 +21,7 @@ describe('Tasks', () => { cy.getByTestID('dropdown--item New Task').click() cy.getByInputName('name').type(taskName) - cy.getByInputName('interval').type('1d') + cy.getByInputName('interval').type('24h') cy.getByInputName('offset').type('20m') cy.get('@bucket').then(({name}) => { @@ -103,7 +103,7 @@ describe('Tasks', () => { cy.getByTestID('dropdown--item New Task').click() cy.getByInputName('name').type('🦄ask') - cy.getByInputName('interval').type('1d') + cy.getByInputName('interval').type('24h') cy.getByInputName('offset').type('20m') cy.getByTestID('flux-editor').within(() => { diff --git a/ui/cypress/support/commands.ts b/ui/cypress/support/commands.ts index 789f711b35..e446af18cb 100644 --- a/ui/cypress/support/commands.ts +++ b/ui/cypress/support/commands.ts @@ -61,7 +61,7 @@ export const createTask = ( ): Cypress.Chainable => { const flux = `option task = { name: "${name}", - every: 1d, + every: 24h, offset: 20m } from(bucket: "defbuck") diff --git a/ui/package-lock.json b/ui/package-lock.json index 238ac497bb..d882978c95 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -985,9 +985,9 @@ } }, "@influxdata/influx": { - "version": "0.2.52", - "resolved": "https://registry.npmjs.org/@influxdata/influx/-/influx-0.2.52.tgz", - "integrity": "sha512-EK1JR2c7pHqJVmWF8KcBVqdoM9MUn/tK+GeRUC2WLuI+HK7dAuc8oVvnDb1dXh01VQq3oQQxwSNO40tm9Opgrw==", + "version": "0.2.54", + "resolved": "https://registry.npmjs.org/@influxdata/influx/-/influx-0.2.54.tgz", + "integrity": "sha512-KDVe8ZYeNA/Ii9P9USWW28n4fES9ydQ2rrrQXzOqfqUQu2wkXDNluIKtMdjqJ0zwhqlc0v3hSSYGxPCHOIATEA==", "requires": { "axios": "^0.18.0" } @@ -1002,6 +1002,55 @@ "raf": "^3.1.0" } }, + "@influxdata/vis": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@influxdata/vis/-/vis-0.1.1.tgz", + "integrity": "sha512-qicZWRqujBbZo9NRh0kWxifXamL5fvBBW6ilhrrZHxN4O9spkwrMJB1mhxlR7+UxUsWt+YBQffCIlxKeOrxJBQ==", + "requires": { + "chroma-js": "^2.0.2", + "d3-array": "^2.0.3", + "d3-scale": "^2.2.2", + "immer": "^2.0.0", + "react-virtualized-auto-sizer": "^1.0.2" + }, + "dependencies": { + "chroma-js": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/chroma-js/-/chroma-js-2.0.3.tgz", + "integrity": "sha512-2kTvZZOFSV1O81/rm99t9vmkh9jQxsHqsRRoZevDVz/VCC3yKMyPuMK8M5yHG+UMg2tV6cRoqtZtgcD92udcBw==" + }, + "d3-array": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.0.3.tgz", + "integrity": "sha512-C7g4aCOoJa+/K5hPVqZLG8wjYHsTUROTk7Z1Ep9F4P5l+WVrvV0+6nAZ1wKTRLMhFWpGbozxUpyjIPZYAaLi+g==" + }, + "d3-scale": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-2.2.2.tgz", + "integrity": "sha512-LbeEvGgIb8UMcAa0EATLNX0lelKWGYDQiPdHj+gLblGVhGLyNbaCn3EvrJf0A3Y/uOOU5aD6MTh5ZFCdEwGiCw==", + "requires": { + "d3-array": "^1.2.0", + "d3-collection": "1", + "d3-format": "1", + "d3-interpolate": "1", + "d3-time": "1", + "d3-time-format": "2" + }, + "dependencies": { + "d3-array": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-1.2.4.tgz", + "integrity": "sha512-KHW6M86R+FUPYGb3R5XiYjXPq7VzwxZ22buHhAEVG5ztoEcZZMLov530mmccaqA1GghZArjQV46fuc8kUqhhHw==" + } + } + }, + "immer": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/immer/-/immer-2.1.4.tgz", + "integrity": "sha512-6UPbG/DIXFSWp10oJJaCPl5/lp5GhGEscDH0QGYKc5EMT5PLZ9+L8hhyc44zRHksI7CQXJp8r6nlDR3n09X6SA==" + } + } + }, "@mrmlnc/readdir-enhanced": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz", @@ -1127,7 +1176,7 @@ }, "@types/codemirror": { "version": "0.0.56", - "resolved": "http://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.56.tgz", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.56.tgz", "integrity": "sha512-OMtPqg2wFOEcNeVga+m+UXpYJw8ugISPCQOtShdFUho/k91Ms1oWOozoDT1I87Phv6IdwLfMLtIOahh1tO1cJQ==", "dev": true }, @@ -1306,7 +1355,7 @@ }, "@types/react-dnd-html5-backend": { "version": "2.1.9", - "resolved": "http://registry.npmjs.org/@types/react-dnd-html5-backend/-/react-dnd-html5-backend-2.1.9.tgz", + "resolved": "https://registry.npmjs.org/@types/react-dnd-html5-backend/-/react-dnd-html5-backend-2.1.9.tgz", "integrity": "sha512-o42zIpcgXXj04xYDT9o9kXoldqDa81ie5XAKKCo7/fOWEhOCRt9UYu+LzOXC308eKKf8v0HzbJaDTr93j3JrTw==", "dev": true, "requires": { @@ -2046,7 +2095,7 @@ }, "array-equal": { "version": "1.0.0", - "resolved": "http://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=", "dev": true }, @@ -2131,7 +2180,7 @@ }, "util": { "version": "0.10.3", - "resolved": "http://registry.npmjs.org/util/-/util-0.10.3.tgz", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", "dev": true, "requires": { @@ -2299,7 +2348,7 @@ }, "axios": { "version": "0.18.0", - "resolved": "http://registry.npmjs.org/axios/-/axios-0.18.0.tgz", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.0.tgz", "integrity": "sha1-MtU+SFHv3AoRmTts0AB4nXDAUQI=", "requires": { "follow-redirects": "^1.3.0", @@ -2683,7 +2732,7 @@ }, "browserify-aes": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", "dev": true, "requires": { @@ -2720,7 +2769,7 @@ }, "browserify-rsa": { "version": "4.0.1", - "resolved": "http://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", "dev": true, "requires": { @@ -2799,7 +2848,7 @@ }, "buffer": { "version": "4.9.1", - "resolved": "http://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", "dev": true, "requires": { @@ -3339,6 +3388,11 @@ "integrity": "sha512-FXDYw4TjR8wgPZYui2LeTqWh1BLpfQ8lB6upMtlpDF6WlOOxghmTTxWyngdKTgozqBgKnHbTVwTE+hOHqAykuQ==", "dev": true }, + "cnbuilder": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/cnbuilder/-/cnbuilder-1.0.8.tgz", + "integrity": "sha512-05l9Bhs0FhEFGJ6vFkqL9O9USCKT3zBfOoTAYXGDKA4nFBX1Qc780bvppG9av2U1sKpa27JT8brJtM6VQquRcQ==" + }, "co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -3664,7 +3718,7 @@ }, "create-hash": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", "dev": true, "requires": { @@ -3677,7 +3731,7 @@ }, "create-hmac": { "version": "1.1.7", - "resolved": "http://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", "dev": true, "requires": { @@ -4494,7 +4548,7 @@ }, "diffie-hellman": { "version": "5.0.3", - "resolved": "http://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", "dev": true, "requires": { @@ -4516,7 +4570,7 @@ }, "dnd-core": { "version": "2.6.0", - "resolved": "http://registry.npmjs.org/dnd-core/-/dnd-core-2.6.0.tgz", + "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-2.6.0.tgz", "integrity": "sha1-ErrWbVh0LG5ffPKUP7aFlED4CcQ=", "requires": { "asap": "^2.0.6", @@ -5766,7 +5820,7 @@ "dependencies": { "core-js": { "version": "1.2.7", - "resolved": "http://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY=" } } @@ -5835,7 +5889,7 @@ }, "finalhandler": { "version": "1.1.1", - "resolved": "http://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", "integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==", "dev": true, "requires": { @@ -6097,21 +6151,20 @@ "dependencies": { "abbrev": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "resolved": false, "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "dev": true, "optional": true }, "ansi-regex": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "resolved": false, "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true, - "optional": true + "dev": true }, "aproba": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "resolved": false, "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", "dev": true, "optional": true @@ -6129,17 +6182,15 @@ }, "balanced-match": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "resolved": false, "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true, - "optional": true + "dev": true }, "brace-expansion": { "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "resolved": false, "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, - "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -6154,35 +6205,32 @@ }, "code-point-at": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "resolved": false, "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true, - "optional": true + "dev": true }, "concat-map": { "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": false, "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true, - "optional": true + "dev": true }, "console-control-strings": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "resolved": false, "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "dev": true, - "optional": true + "dev": true }, "core-util-is": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": false, "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", "dev": true, "optional": true }, "debug": { "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "resolved": false, "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dev": true, "optional": true, @@ -6199,21 +6247,21 @@ }, "delegates": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "resolved": false, "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", "dev": true, "optional": true }, "detect-libc": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "resolved": false, "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=", "dev": true, "optional": true }, "fs-minipass": { "version": "1.2.5", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.5.tgz", + "resolved": false, "integrity": "sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ==", "dev": true, "optional": true, @@ -6223,14 +6271,14 @@ }, "fs.realpath": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "resolved": false, "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", "dev": true, "optional": true }, "gauge": { "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "resolved": false, "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", "dev": true, "optional": true, @@ -6262,7 +6310,7 @@ }, "has-unicode": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "resolved": false, "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", "dev": true, "optional": true @@ -6279,7 +6327,7 @@ }, "ignore-walk": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz", + "resolved": false, "integrity": "sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ==", "dev": true, "optional": true, @@ -6289,7 +6337,7 @@ }, "inflight": { "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": false, "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "dev": true, "optional": true, @@ -6300,58 +6348,53 @@ }, "inherits": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": false, "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true, - "optional": true + "dev": true }, "ini": { "version": "1.3.5", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "resolved": false, "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", "dev": true, "optional": true }, "is-fullwidth-code-point": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "resolved": false, "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", "dev": true, - "optional": true, "requires": { "number-is-nan": "^1.0.0" } }, "isarray": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": false, "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", "dev": true, "optional": true }, "minimatch": { "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "resolved": false, "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "dev": true, - "optional": true, "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": false, "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true, - "optional": true + "dev": true }, "minipass": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.2.4.tgz", "integrity": "sha512-hzXIWWet/BzWhYs2b+u7dRHlruXhwdgvlTMDKC6Cb1U7ps6Ac6yQlR39xsbjWJE377YTCtKwIXIpJ5oP+j5y8g==", "dev": true, - "optional": true, "requires": { "safe-buffer": "^5.1.1", "yallist": "^3.0.0" @@ -6369,17 +6412,16 @@ }, "mkdirp": { "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": false, "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "dev": true, - "optional": true, "requires": { "minimist": "0.0.8" } }, "ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "resolved": false, "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true, "optional": true @@ -6417,7 +6459,7 @@ }, "nopt": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz", + "resolved": false, "integrity": "sha1-0NRoWv1UFRk8jHUFYC0NF81kR00=", "dev": true, "optional": true, @@ -6446,7 +6488,7 @@ }, "npmlog": { "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "resolved": false, "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", "dev": true, "optional": true, @@ -6459,45 +6501,43 @@ }, "number-is-nan": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "resolved": false, "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true, - "optional": true + "dev": true }, "object-assign": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "resolved": false, "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", "dev": true, "optional": true }, "once": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": false, "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dev": true, - "optional": true, "requires": { "wrappy": "1" } }, "os-homedir": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "resolved": false, "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", "dev": true, "optional": true }, "os-tmpdir": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "resolved": false, "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", "dev": true, "optional": true }, "osenv": { "version": "0.1.5", - "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "resolved": false, "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", "dev": true, "optional": true, @@ -6508,14 +6548,14 @@ }, "path-is-absolute": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": false, "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", "dev": true, "optional": true }, "process-nextick-args": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "resolved": false, "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==", "dev": true, "optional": true @@ -6535,7 +6575,7 @@ "dependencies": { "minimist": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "resolved": false, "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", "dev": true, "optional": true @@ -6544,7 +6584,7 @@ }, "readable-stream": { "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "resolved": false, "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", "dev": true, "optional": true, @@ -6572,19 +6612,18 @@ "version": "5.1.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==", - "dev": true, - "optional": true + "dev": true }, "safer-buffer": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "resolved": false, "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true, "optional": true }, "sax": { "version": "1.2.4", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "resolved": false, "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", "dev": true, "optional": true @@ -6598,24 +6637,23 @@ }, "set-blocking": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "resolved": false, "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", "dev": true, "optional": true }, "signal-exit": { "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "resolved": false, "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", "dev": true, "optional": true }, "string-width": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "resolved": false, "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", "dev": true, - "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -6624,7 +6662,7 @@ }, "string_decoder": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "resolved": false, "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dev": true, "optional": true, @@ -6634,17 +6672,16 @@ }, "strip-ansi": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": false, "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "dev": true, - "optional": true, "requires": { "ansi-regex": "^2.0.0" } }, "strip-json-comments": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "resolved": false, "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", "dev": true, "optional": true @@ -6667,7 +6704,7 @@ }, "util-deprecate": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": false, "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", "dev": true, "optional": true @@ -6684,17 +6721,15 @@ }, "wrappy": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": false, "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true, - "optional": true + "dev": true }, "yallist": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz", "integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=", - "dev": true, - "optional": true + "dev": true } } }, @@ -6741,7 +6776,7 @@ }, "get-stream": { "version": "3.0.0", - "resolved": "http://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", "dev": true }, @@ -7583,7 +7618,7 @@ }, "http-errors": { "version": "1.6.3", - "resolved": "http://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", "dev": true, "requires": { @@ -7606,7 +7641,7 @@ }, "http-proxy-middleware": { "version": "0.18.0", - "resolved": "http://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.18.0.tgz", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.18.0.tgz", "integrity": "sha512-Fs25KVMPAIIcgjMZkVHJoKg9VcXcC1C8yb9JUgeDvVXY0S/zgVIhMb+qVswDIgtJe2DfckMSY2d6TuTEutlk6Q==", "dev": true, "requires": { @@ -9967,7 +10002,7 @@ }, "lodash.isempty": { "version": "4.4.0", - "resolved": "http://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", "integrity": "sha1-b4bL7di+TsmHvpqvM8loTbGzHn4=" }, "lodash.isequal": { @@ -10212,7 +10247,7 @@ }, "media-typer": { "version": "0.3.0", - "resolved": "http://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", "dev": true }, @@ -10408,7 +10443,7 @@ }, "minimist": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", "dev": true }, @@ -10453,7 +10488,7 @@ }, "mkdirp": { "version": "0.5.1", - "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "dev": true, "requires": { @@ -10462,7 +10497,7 @@ "dependencies": { "minimist": { "version": "0.0.8", - "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", "dev": true } @@ -11098,7 +11133,7 @@ }, "os-homedir": { "version": "1.0.2", - "resolved": "http://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", "dev": true }, @@ -11115,7 +11150,7 @@ }, "os-tmpdir": { "version": "1.0.2", - "resolved": "http://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", "dev": true }, @@ -11616,7 +11651,7 @@ }, "path-is-absolute": { "version": "1.0.1", - "resolved": "http://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", "dev": true }, @@ -12967,7 +13002,7 @@ }, "react-dnd": { "version": "2.6.0", - "resolved": "http://registry.npmjs.org/react-dnd/-/react-dnd-2.6.0.tgz", + "resolved": "https://registry.npmjs.org/react-dnd/-/react-dnd-2.6.0.tgz", "integrity": "sha1-f6JWds+CfViokSk+PBq1naACVFo=", "requires": { "disposables": "^1.0.1", @@ -12980,7 +13015,7 @@ }, "react-dnd-html5-backend": { "version": "2.6.0", - "resolved": "http://registry.npmjs.org/react-dnd-html5-backend/-/react-dnd-html5-backend-2.6.0.tgz", + "resolved": "https://registry.npmjs.org/react-dnd-html5-backend/-/react-dnd-html5-backend-2.6.0.tgz", "integrity": "sha1-WQzRzKeEQbsnTt1XH+9MCxbdz44=", "requires": { "lodash": "^4.2.0" @@ -13095,7 +13130,7 @@ }, "react-resize-detector": { "version": "2.3.0", - "resolved": "http://registry.npmjs.org/react-resize-detector/-/react-resize-detector-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/react-resize-detector/-/react-resize-detector-2.3.0.tgz", "integrity": "sha512-oCAddEWWeFWYH5FAcHdBYcZjAw9fMzRUK9sWSx6WvSSOPVRxcHd5zTIGy/mOus+AhN/u6T4TMiWxvq79PywnJQ==", "requires": { "lodash.debounce": "^4.0.8", @@ -13106,7 +13141,7 @@ }, "react-router": { "version": "3.2.1", - "resolved": "http://registry.npmjs.org/react-router/-/react-router-3.2.1.tgz", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-3.2.1.tgz", "integrity": "sha512-SXkhC0nr3G0ltzVU07IN8jYl0bB6FsrDIqlLC9dK3SITXqyTJyM7yhXlUqs89w3Nqi5OkXsfRUeHX+P874HQrg==", "requires": { "create-react-class": "^15.5.1", @@ -13123,6 +13158,26 @@ "resolved": "https://registry.npmjs.org/react-router-redux/-/react-router-redux-4.0.8.tgz", "integrity": "sha1-InQDWWtRUeGCN32rg1tdRfD4BU4=" }, + "react-scrollbars-custom": { + "version": "4.0.0-alpha.8", + "resolved": "https://registry.npmjs.org/react-scrollbars-custom/-/react-scrollbars-custom-4.0.0-alpha.8.tgz", + "integrity": "sha512-sj56pEY/0VV551B61yUzYy7YcR/h5ge51lqUGD9CbBmcJnyVDvZ90zaIIeERPsLxSR6Ddb1ueBAexKIe14zpNw==", + "requires": { + "cnbuilder": "^1.0.8", + "react-draggable": "^3.2.1" + }, + "dependencies": { + "react-draggable": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/react-draggable/-/react-draggable-3.2.1.tgz", + "integrity": "sha512-r+3Bs9InID2lyIEbR8UIRVtpn4jgu1ArFEZgIy8vibJjijLSdNLX7rH9U68BBVD4RD9v44RXbaK4EHLyKXzNQw==", + "requires": { + "classnames": "^2.2.5", + "prop-types": "^15.6.0" + } + } + } + }, "react-test-renderer": { "version": "16.5.2", "resolved": "https://registry.npmjs.org/react-test-renderer/-/react-test-renderer-16.5.2.tgz", @@ -13167,6 +13222,11 @@ "react-lifecycles-compat": "^3.0.4" } }, + "react-virtualized-auto-sizer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.2.tgz", + "integrity": "sha512-MYXhTY1BZpdJFjUovvYHVBmkq79szK/k7V3MO+36gJkWGkrXKtyr4vCPtpphaTLRAdDNoYEYFZWE8LjN+PIHNg==" + }, "read-pkg": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", @@ -13235,7 +13295,7 @@ }, "readable-stream": { "version": "2.3.6", - "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", "requires": { "core-util-is": "~1.0.0", @@ -13506,7 +13566,7 @@ "dependencies": { "hoist-non-react-statics": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-1.2.0.tgz", "integrity": "sha1-qkSM8JhtVcxAdzsXF0t90GbLfPs=" }, "prop-types": { @@ -13521,7 +13581,7 @@ }, "redux-thunk": { "version": "1.0.3", - "resolved": "http://registry.npmjs.org/redux-thunk/-/redux-thunk-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-1.0.3.tgz", "integrity": "sha1-d4qgCZ7qBZUDGrazkWX2Zw2NJr0=" }, "regenerate": { @@ -13684,7 +13744,7 @@ "dependencies": { "jsesc": { "version": "0.5.0", - "resolved": "http://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", "dev": true } @@ -13980,7 +14040,7 @@ }, "safe-regex": { "version": "1.1.0", - "resolved": "http://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", "dev": true, "requires": { @@ -14220,7 +14280,7 @@ }, "sha.js": { "version": "2.4.11", - "resolved": "http://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", "dev": true, "requires": { @@ -14699,7 +14759,7 @@ }, "string_decoder": { "version": "1.1.1", - "resolved": "http://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" @@ -14730,7 +14790,7 @@ }, "strip-eof": { "version": "1.0.0", - "resolved": "http://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", "dev": true }, @@ -16168,7 +16228,7 @@ }, "wrap-ansi": { "version": "2.1.0", - "resolved": "http://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", "dev": true, "requires": { diff --git a/ui/package.json b/ui/package.json index 5e0dc41d9d..241cbe80ec 100644 --- a/ui/package.json +++ b/ui/package.json @@ -136,9 +136,10 @@ "webpack": "^4.29.0" }, "dependencies": { - "@influxdata/influx": "0.2.52", "@influxdata/clockface": "0.0.8", + "@influxdata/influx": "0.2.54", "@influxdata/react-custom-scrollbars": "4.3.8", + "@influxdata/vis": "^0.1.1", "axios": "^0.18.0", "babel-polyfill": "^6.26.0", "bignumber.js": "^4.0.2", @@ -177,6 +178,7 @@ "react-resize-detector": "^2.3.0", "react-router": "^3.0.2", "react-router-redux": "^4.0.8", + "react-scrollbars-custom": "^4.0.0-alpha.8", "react-tooltip": "^3.2.1", "react-virtualized": "^9.18.5", "redux": "^4.0.0", diff --git a/ui/src/Signin.tsx b/ui/src/Signin.tsx index 352f441ad2..f8f435de27 100644 --- a/ui/src/Signin.tsx +++ b/ui/src/Signin.tsx @@ -94,7 +94,7 @@ export class Signin extends PureComponent { this.props.notify(sessionTimedOut()) } - this.props.router.push(`/signin${returnTo}`) + this.props.router.replace(`/signin${returnTo}`) } } } diff --git a/ui/src/clockface/components/card_select/CardSelectCard.tsx b/ui/src/clockface/components/card_select/CardSelectCard.tsx index bfe4d23721..bed0590cb8 100644 --- a/ui/src/clockface/components/card_select/CardSelectCard.tsx +++ b/ui/src/clockface/components/card_select/CardSelectCard.tsx @@ -6,17 +6,17 @@ import ProtoboardIcon from 'src/clockface/components/card_select/ProtoboardIcon' interface Props { id: string - name?: string label: string - image?: StatelessComponent - checked?: boolean - disabled?: boolean onClick: () => void + name?: string + image?: StatelessComponent + checked: boolean + disabled: boolean } @ErrorHandling class CardSelectCard extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { checked: false, disabled: false, } diff --git a/ui/src/clockface/components/card_select/ProtoboardIcon.tsx b/ui/src/clockface/components/card_select/ProtoboardIcon.tsx index 552fa6112e..338a88a283 100644 --- a/ui/src/clockface/components/card_select/ProtoboardIcon.tsx +++ b/ui/src/clockface/components/card_select/ProtoboardIcon.tsx @@ -2,11 +2,11 @@ import React, {PureComponent} from 'react' interface Props { - displayText?: string + displayText: string } class ProtoboardIcon extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { displayText: '', } diff --git a/ui/src/clockface/components/color_picker/ColorPicker.tsx b/ui/src/clockface/components/color_picker/ColorPicker.tsx index 65005ac947..8f42017480 100644 --- a/ui/src/clockface/components/color_picker/ColorPicker.tsx +++ b/ui/src/clockface/components/color_picker/ColorPicker.tsx @@ -22,24 +22,19 @@ import {validateHexCode} from 'src/configuration/utils/labels' // Styles import 'src/clockface/components/color_picker/ColorPicker.scss' -interface PassedProps { +interface Props { color: string onChange: (color: string, status?: ComponentStatus) => void + testID: string + maintainInputFocus: boolean } -interface DefaultProps { - maintainInputFocus?: boolean - testID?: string -} - -type Props = PassedProps & DefaultProps - interface State { errorMessage: string } export default class ColorPicker extends Component { - public static defaultProps: DefaultProps = { + public static defaultProps = { maintainInputFocus: false, testID: 'color-picker', } diff --git a/ui/src/clockface/components/confirmation_button/ConfirmationButton.tsx b/ui/src/clockface/components/confirmation_button/ConfirmationButton.tsx index 6ac992549c..b7a44973e2 100644 --- a/ui/src/clockface/components/confirmation_button/ConfirmationButton.tsx +++ b/ui/src/clockface/components/confirmation_button/ConfirmationButton.tsx @@ -24,16 +24,16 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { confirmText: string onConfirm: (returnValue?: any) => void + size: ComponentSize + shape: ButtonShape + testID: string + status: ComponentStatus returnValue?: any text?: string - size?: ComponentSize - shape?: ButtonShape icon?: IconFont - status?: ComponentStatus titleText?: string tabIndex?: number className?: string - testID?: string } interface State { @@ -42,7 +42,7 @@ interface State { @ErrorHandling class ConfirmationButton extends Component { - public static defaultProps: Partial = { + public static defaultProps = { size: ComponentSize.Small, shape: ButtonShape.Default, status: ComponentStatus.Default, diff --git a/ui/src/clockface/components/context_menu/Context.tsx b/ui/src/clockface/components/context_menu/Context.tsx index f4e28a0b59..4b6f7ec0e6 100644 --- a/ui/src/clockface/components/context_menu/Context.tsx +++ b/ui/src/clockface/components/context_menu/Context.tsx @@ -16,7 +16,7 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element | JSX.Element[] - align?: Alignment + align: Alignment className?: string } @@ -26,7 +26,7 @@ interface State { @ErrorHandling class Context extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { align: Alignment.Right, } diff --git a/ui/src/clockface/components/context_menu/ContextMenu.tsx b/ui/src/clockface/components/context_menu/ContextMenu.tsx index 20bda35936..3df5d08316 100644 --- a/ui/src/clockface/components/context_menu/ContextMenu.tsx +++ b/ui/src/clockface/components/context_menu/ContextMenu.tsx @@ -17,28 +17,23 @@ import { import {ErrorHandling} from 'src/shared/decorators/errors' -interface PassedProps { +interface Props { children: JSX.Element | JSX.Element[] icon: IconFont onBoostZIndex?: (boostZIndex: boolean) => void + text: string + color: ComponentColor + shape: ButtonShape + testID: string } -interface DefaultProps { - text?: string - color?: ComponentColor - shape?: ButtonShape - testID?: string -} - -type Props = PassedProps & DefaultProps - interface State { isExpanded: boolean } @ErrorHandling class ContextMenu extends Component { - public static defaultProps: DefaultProps = { + public static defaultProps = { color: ComponentColor.Primary, shape: ButtonShape.Square, text: '', diff --git a/ui/src/clockface/components/context_menu/ContextMenuItem.tsx b/ui/src/clockface/components/context_menu/ContextMenuItem.tsx index 02e8073662..fcf4d078e2 100644 --- a/ui/src/clockface/components/context_menu/ContextMenuItem.tsx +++ b/ui/src/clockface/components/context_menu/ContextMenuItem.tsx @@ -2,24 +2,19 @@ import React, {Component} from 'react' import classnames from 'classnames' -interface PassedProps { +interface Props { label: string action: (value?: any) => void + description: string + testID: string value?: any - onCollapseMenu?: () => void disabled?: boolean + onCollapseMenu?: () => void } -interface DefaultProps { - description?: string - testID?: string -} - -type Props = PassedProps & DefaultProps - class ContextMenuItem extends Component { - public static defaultProps: DefaultProps = { - description: null, + public static defaultProps = { + description: '', testID: 'context-menu-item', } diff --git a/ui/src/clockface/components/draggable_resizer/DraggableResizerPanel.tsx b/ui/src/clockface/components/draggable_resizer/DraggableResizerPanel.tsx index 11106187ea..ce9789a26b 100644 --- a/ui/src/clockface/components/draggable_resizer/DraggableResizerPanel.tsx +++ b/ui/src/clockface/components/draggable_resizer/DraggableResizerPanel.tsx @@ -6,13 +6,13 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element - minSizePixels?: number + minSizePixels: number sizePercent?: number } @ErrorHandling class DraggableResizerPanel extends Component { - public static defaultProps: Partial = { + public static defaultProps = { minSizePixels: 0, } diff --git a/ui/src/clockface/components/dropdowns/Dropdown.scss b/ui/src/clockface/components/dropdowns/Dropdown.scss index 1d963fbf98..7f4c872a9e 100644 --- a/ui/src/clockface/components/dropdowns/Dropdown.scss +++ b/ui/src/clockface/components/dropdowns/Dropdown.scss @@ -46,7 +46,6 @@ line-height: 12px; font-weight: 600; color: fade-out($g20-white, 0.18); - white-space: nowrap; position: relative; text-align: left; @@ -64,13 +63,21 @@ cursor: pointer; } + .dropdown--action & { + padding-left: 11px; + } +} + +.dropdown-item--children { .dropdown-wrap & { word-break: break-all; white-space: pre-wrap; } - .dropdown--action & { - padding-left: 11px; + .dropdown-truncate & { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; } } @@ -150,8 +157,7 @@ $dividerA, $dividerB, $dividerText, - $scrollA, - $scrollB + $checkbox ) { @include gradient-h($backgroundA, $backgroundB); @@ -167,13 +173,7 @@ background-color: $dividerA; } .dropdown-item--checkbox:after { - background-color: $scrollA; - } - .fancy-scroll--thumb-h { - @include gradient-h($scrollA, $scrollB); - } - .fancy-scroll--thumb-v { - @include gradient-v($scrollA, $scrollB); + background-color: $checkbox; } } @@ -186,8 +186,7 @@ $c-amethyst, $c-ocean, $c-potassium, - $c-neutrino, - $c-hydrogen + $c-neutrino ); } @@ -200,8 +199,7 @@ $c-sapphire, $c-ocean, $c-laser, - $c-neutrino, - $c-hydrogen + $c-neutrino ); } @@ -214,8 +212,7 @@ $c-ocean, $c-viridian, $c-krypton, - $c-neutrino, - $c-krypton + $c-neutrino ); } @@ -228,12 +225,6 @@ $g0-obsidian, $g2-kevlar, $g11-sidewalk, - $c-pool, - $c-comet + $c-pool ); } - -/* TODO: Make fancyscroll more customizable */ -.dropdown--menu-container .fancy-scroll--track-h { - display: none; -} diff --git a/ui/src/clockface/components/dropdowns/Dropdown.tsx b/ui/src/clockface/components/dropdowns/Dropdown.tsx index 7f445e5faf..c79dbbaa05 100644 --- a/ui/src/clockface/components/dropdowns/Dropdown.tsx +++ b/ui/src/clockface/components/dropdowns/Dropdown.tsx @@ -1,5 +1,5 @@ // Libraries -import React, {Component, CSSProperties, MouseEvent} from 'react' +import React, {Component, MouseEvent} from 'react' import classnames from 'classnames' // Components @@ -7,7 +7,7 @@ import {ClickOutside} from 'src/shared/components/ClickOutside' import DropdownDivider from 'src/clockface/components/dropdowns/DropdownDivider' import DropdownItem from 'src/clockface/components/dropdowns/DropdownItem' import DropdownButton from 'src/clockface/components/dropdowns/DropdownButton' -import FancyScrollbar from 'src/shared/components/fancy_scrollbar/FancyScrollbar' +import DapperScrollbars from 'src/shared/components/dapperScrollbars/DapperScrollbars' import WaitingText from 'src/shared/components/WaitingText' // Types @@ -26,41 +26,54 @@ export enum DropdownMode { Radio = 'radio', } -export interface Props { +interface ThumbColors { + start: string + stop: string +} + +interface PassedProps { children: JSX.Element[] onChange: (value: any) => void selectedID?: string + widthPixels?: number + menuWidthPixels?: number + menuHeader?: JSX.Element + icon?: IconFont + customClass?: string +} + +export interface DefaultProps { buttonColor?: ComponentColor buttonSize?: ComponentSize - menuColor?: DropdownMenuColors status?: ComponentStatus - widthPixels?: number - icon?: IconFont - wrapText?: boolean - customClass?: string maxMenuHeight?: number + menuColor?: DropdownMenuColors mode?: DropdownMode titleText?: string - menuHeader?: JSX.Element - testID: string - buttonTestID: string + wrapMenuText?: boolean + testID?: string + buttonTestID?: string } +export type Props = PassedProps & DefaultProps + interface State { expanded: boolean } @ErrorHandling class Dropdown extends Component { - public static defaultProps: Partial = { + public static defaultProps: DefaultProps = { buttonColor: ComponentColor.Default, buttonSize: ComponentSize.Small, status: ComponentStatus.Default, - wrapText: false, maxMenuHeight: 250, menuColor: DropdownMenuColors.Sapphire, mode: DropdownMode.Radio, titleText: '', + wrapMenuText: false, + testID: 'dropdown', + buttonTestID: 'dropdown-button', } public static Button = DropdownButton @@ -103,16 +116,17 @@ class Dropdown extends Component { buttonColor, buttonSize, status, - wrapText, customClass, mode, + wrapMenuText, } = this.props return classnames( `dropdown dropdown-${buttonSize} dropdown-${buttonColor}`, { disabled: status === ComponentStatus.Disabled, - 'dropdown-wrap': wrapText, + 'dropdown-wrap': wrapMenuText, + 'dropdown-truncate': !wrapMenuText, [customClass]: customClass, [`dropdown--${mode}`]: mode, } @@ -171,6 +185,8 @@ class Dropdown extends Component { const { selectedID, maxMenuHeight, + widthPixels, + menuWidthPixels, menuHeader, menuColor, children, @@ -183,15 +199,32 @@ class Dropdown extends Component { return null } + let width = '100%' + + if (widthPixels) { + width = `${widthPixels}px` + } + + if (menuWidthPixels) { + width = `${menuWidthPixels}px` + } + + const {start, stop} = this.thumbColorsFromTheme + return (
-
{ } })}
-
+
) } - private get menuStyle(): CSSProperties { - const {wrapText, widthPixels} = this.props + private get thumbColorsFromTheme(): ThumbColors { + const {menuColor} = this.props - let containerWidth = '100%' - - if (widthPixels) { - containerWidth = `${widthPixels}px` - } - - if (wrapText && widthPixels) { - return { - width: containerWidth, - } - } - - return { - minWidth: containerWidth, + switch (menuColor) { + case DropdownMenuColors.Amethyst: + case DropdownMenuColors.Sapphire: + return { + start: '#BEF0FF', + stop: '#6BDFFF', + } + case DropdownMenuColors.Malachite: + return { + start: '#BEF0FF', + stop: '#A5F3B4', + } + default: + case DropdownMenuColors.Onyx: + return { + start: '#22ADF6', + stop: '#9394FF', + } } } diff --git a/ui/src/clockface/components/dropdowns/DropdownButton.tsx b/ui/src/clockface/components/dropdowns/DropdownButton.tsx index bc5afb64c5..d3165ba793 100644 --- a/ui/src/clockface/components/dropdowns/DropdownButton.tsx +++ b/ui/src/clockface/components/dropdowns/DropdownButton.tsx @@ -17,18 +17,18 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: DropdownChild onClick: (e: MouseEvent) => void - status?: ComponentStatus - active?: boolean - color?: ComponentColor - size?: ComponentSize + status: ComponentStatus + color: ComponentColor + size: ComponentSize + active: boolean icon?: IconFont title?: string - testID: string + testID?: string } @ErrorHandling class DropdownButton extends Component { - public static defaultProps: Partial = { + public static defaultProps = { color: ComponentColor.Default, size: ComponentSize.Small, status: ComponentStatus.Default, diff --git a/ui/src/clockface/components/dropdowns/DropdownDivider.tsx b/ui/src/clockface/components/dropdowns/DropdownDivider.tsx index 7c1af86110..1fd90115b6 100644 --- a/ui/src/clockface/components/dropdowns/DropdownDivider.tsx +++ b/ui/src/clockface/components/dropdowns/DropdownDivider.tsx @@ -8,14 +8,14 @@ import {DropdownChild} from 'src/clockface/types' import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { - children?: DropdownChild id: string - text?: string + text: string + children?: DropdownChild } @ErrorHandling class DropdownDivider extends Component { - public static defaultProps: Partial = { + public static defaultProps = { text: '', } diff --git a/ui/src/clockface/components/dropdowns/DropdownItem.tsx b/ui/src/clockface/components/dropdowns/DropdownItem.tsx index dc423d3d90..94a6fd2db7 100644 --- a/ui/src/clockface/components/dropdowns/DropdownItem.tsx +++ b/ui/src/clockface/components/dropdowns/DropdownItem.tsx @@ -11,15 +11,15 @@ interface Props { id: string children: DropdownChild value: any - selected?: boolean - checkbox?: boolean + selected: boolean + checkbox: boolean onClick?: (value: any) => void testID?: string } @ErrorHandling class DropdownItem extends Component { - public static defaultProps: Partial = { + public static defaultProps = { checkbox: false, selected: false, } diff --git a/ui/src/clockface/components/dropdowns/MultiSelectDropdown.tsx b/ui/src/clockface/components/dropdowns/MultiSelectDropdown.tsx index 203836b6a5..5357b050b8 100644 --- a/ui/src/clockface/components/dropdowns/MultiSelectDropdown.tsx +++ b/ui/src/clockface/components/dropdowns/MultiSelectDropdown.tsx @@ -24,19 +24,19 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element[] onChange: (selectedIDs: string[], value: any) => void - onCollapse?: () => void selectedIDs: string[] - buttonColor?: ComponentColor - buttonSize?: ComponentSize - menuColor?: DropdownMenuColors + buttonColor: ComponentColor + buttonSize: ComponentSize + menuColor: DropdownMenuColors + wrapText: boolean + maxMenuHeight: number + emptyText: string + separatorText: string + customClass?: string + onCollapse?: () => void status?: ComponentStatus widthPixels?: number icon?: IconFont - wrapText?: boolean - customClass?: string - maxMenuHeight?: number - emptyText?: string - separatorText?: string } interface State { @@ -45,7 +45,7 @@ interface State { @ErrorHandling class MultiSelectDropdown extends Component { - public static defaultProps: Partial = { + public static defaultProps = { buttonColor: ComponentColor.Default, buttonSize: ComponentSize.Small, status: ComponentStatus.Default, diff --git a/ui/src/clockface/components/dropdowns/test/__snapshots__/MultiSelectDropdown.test.tsx.snap b/ui/src/clockface/components/dropdowns/test/__snapshots__/MultiSelectDropdown.test.tsx.snap index 7f62d707d2..7b8081ab21 100644 --- a/ui/src/clockface/components/dropdowns/test/__snapshots__/MultiSelectDropdown.test.tsx.snap +++ b/ui/src/clockface/components/dropdowns/test/__snapshots__/MultiSelectDropdown.test.tsx.snap @@ -57,6 +57,7 @@ exports[`MultiSelectDropdown with menu expanded matches snapshot 1`] = ` { - public static defaultProps: DefaultProps = { + public static defaultProps = { size: ComponentSize.Small, testID: 'empty-state', } diff --git a/ui/src/clockface/components/form_layout/Form.tsx b/ui/src/clockface/components/form_layout/Form.tsx index 0a3a227325..d3990b22b4 100644 --- a/ui/src/clockface/components/form_layout/Form.tsx +++ b/ui/src/clockface/components/form_layout/Form.tsx @@ -11,17 +11,13 @@ import FormFooter from 'src/clockface/components/form_layout/FormFooter' import {ErrorHandling} from 'src/shared/decorators/errors' -interface PassedProps { +interface Props { children: JSX.Element[] | JSX.Element style?: React.CSSProperties className?: string onSubmit?: (e: React.FormEvent) => void + testID: string } -interface DefaultProps { - testID?: string -} - -type Props = PassedProps & DefaultProps interface BoxProps { children: JSX.Element | JSX.Element[] @@ -36,7 +32,7 @@ class Form extends Component { public static Divider = FormDivider public static Footer = FormFooter - public static defaultProps: DefaultProps = { + public static defaultProps = { testID: 'form-container', } diff --git a/ui/src/clockface/components/form_layout/FormFooter.tsx b/ui/src/clockface/components/form_layout/FormFooter.tsx index 4d63340a93..43a2c47209 100644 --- a/ui/src/clockface/components/form_layout/FormFooter.tsx +++ b/ui/src/clockface/components/form_layout/FormFooter.tsx @@ -9,7 +9,7 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element | JSX.Element[] - colsXS?: Columns + colsXS: Columns colsSM?: Columns colsMD?: Columns colsLG?: Columns @@ -21,7 +21,7 @@ interface Props { @ErrorHandling class FormFooter extends Component { - public static defaultProps: Partial = { + public static defaultProps = { colsXS: Columns.Twelve, } diff --git a/ui/src/clockface/components/grid_layout/GridColumn.tsx b/ui/src/clockface/components/grid_layout/GridColumn.tsx index aa3289fe12..186fede750 100644 --- a/ui/src/clockface/components/grid_layout/GridColumn.tsx +++ b/ui/src/clockface/components/grid_layout/GridColumn.tsx @@ -7,7 +7,7 @@ import {Columns} from 'src/clockface/types' interface Props { children: JSX.Element[] | JSX.Element - widthXS?: Columns + widthXS: Columns widthSM?: Columns widthMD?: Columns widthLG?: Columns @@ -18,7 +18,7 @@ interface Props { } class GridColumn extends Component { - public static defaultProps: Partial = { + public static defaultProps = { widthXS: Columns.Twelve, } diff --git a/ui/src/clockface/components/grid_sizer/GridSizer.tsx b/ui/src/clockface/components/grid_sizer/GridSizer.tsx index 2fac0abfdf..e07b12339d 100644 --- a/ui/src/clockface/components/grid_sizer/GridSizer.tsx +++ b/ui/src/clockface/components/grid_sizer/GridSizer.tsx @@ -7,10 +7,10 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children?: JSX.Element[] - cellWidth?: number - recalculateFlag?: string - width?: number - wait?: number + cellWidth: number + recalculateFlag: string + width: number + wait: number } interface State { @@ -22,7 +22,7 @@ interface State { } @ErrorHandling class GridSizer extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { cellWidth: 150, recalculateFlag: '', width: null, diff --git a/ui/src/clockface/components/grid_sizer/ResponsiveGridSizer.tsx b/ui/src/clockface/components/grid_sizer/ResponsiveGridSizer.tsx index 254238a690..52d5c38fc5 100644 --- a/ui/src/clockface/components/grid_sizer/ResponsiveGridSizer.tsx +++ b/ui/src/clockface/components/grid_sizer/ResponsiveGridSizer.tsx @@ -8,20 +8,15 @@ import {ErrorHandling} from 'src/shared/decorators/errors' // Styles import 'src/clockface/components/grid_sizer/ResponsiveGridSizer.scss' -interface PassedProps { +interface Props { children: JSX.Element[] columns: number + gutter: number } -interface DefaultProps { - gutter?: number -} - -type Props = PassedProps & DefaultProps - @ErrorHandling class ResponsiveGridSizer extends PureComponent { - public static defaultProps: DefaultProps = { + public static defaultProps = { gutter: 4, } diff --git a/ui/src/clockface/components/index_views/IndexListRow.tsx b/ui/src/clockface/components/index_views/IndexListRow.tsx index f9e1436b24..f211dee8e0 100644 --- a/ui/src/clockface/components/index_views/IndexListRow.tsx +++ b/ui/src/clockface/components/index_views/IndexListRow.tsx @@ -6,7 +6,7 @@ import classnames from 'classnames' import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { - disabled?: boolean + disabled: boolean children: JSX.Element[] | JSX.Element customClass?: string testID: string @@ -14,7 +14,7 @@ interface Props { @ErrorHandling class IndexListRow extends Component { - public static defaultProps: Partial = { + public static defaultProps = { disabled: false, testID: 'table-row', } diff --git a/ui/src/clockface/components/index_views/IndexListRowCell.tsx b/ui/src/clockface/components/index_views/IndexListRowCell.tsx index 3968e3b25b..c406597425 100644 --- a/ui/src/clockface/components/index_views/IndexListRowCell.tsx +++ b/ui/src/clockface/components/index_views/IndexListRowCell.tsx @@ -10,14 +10,14 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: any - alignment?: Alignment - revealOnHover?: boolean - testID?: string + alignment: Alignment + revealOnHover: boolean + testID: string } @ErrorHandling class IndexListRowCell extends Component { - public static defaultProps: Partial = { + public static defaultProps = { alignment: Alignment.Left, revealOnHover: false, testID: 'table-cell', diff --git a/ui/src/clockface/components/inputs/TextArea.tsx b/ui/src/clockface/components/inputs/TextArea.tsx index 5770bcadbf..b640ca0144 100644 --- a/ui/src/clockface/components/inputs/TextArea.tsx +++ b/ui/src/clockface/components/inputs/TextArea.tsx @@ -33,24 +33,25 @@ export enum Wrap { } interface Props { - autocapitalize?: AutoCapitalize - autocomplete?: AutoComplete - autofocus?: boolean - cols?: number - disabled?: boolean - form?: string - maxlength?: number - minlength?: number - name?: string - placeholder?: string - readOnly?: boolean - required?: boolean - rows?: number - spellCheck?: boolean - wrap?: Wrap.Off + autocapitalize: AutoCapitalize + autocomplete: AutoComplete + autofocus: boolean + cols: number + disabled: boolean + form: string + maxlength: number + minlength: number + name: string + placeholder: string + readOnly: boolean + required: boolean + rows: number + spellCheck: boolean + wrap: Wrap.Off widthPixels?: number size?: ComponentSize status?: ComponentStatus + value: string customClass?: string onChange?: (s: string) => void onBlur?: (e?: ChangeEvent) => void @@ -58,11 +59,10 @@ interface Props { onKeyPress?: (e: KeyboardEvent) => void onKeyUp?: (e: KeyboardEvent) => void onKeyDown?: (e: KeyboardEvent) => void - value?: string } class TextArea extends Component { - public static defaultProps: Partial = { + public static defaultProps = { autocapitalize: AutoCapitalize.Off, autocomplete: AutoComplete.Off, autofocus: false, diff --git a/ui/src/clockface/components/label/Label.tsx b/ui/src/clockface/components/label/Label.tsx index 28a9d56ad3..c4b6ae017b 100644 --- a/ui/src/clockface/components/label/Label.tsx +++ b/ui/src/clockface/components/label/Label.tsx @@ -11,29 +11,24 @@ import './Label.scss' import {ErrorHandling} from 'src/shared/decorators/errors' -interface PassedProps { +interface Props { id: string name: string description: string colorHex: string onClick?: (id: string) => void onDelete?: (id: string) => void -} - -interface DefaultProps { - size?: ComponentSize - testID?: string + size: ComponentSize + testID: string } interface State { isMouseOver: boolean } -type Props = PassedProps & DefaultProps - @ErrorHandling class Label extends Component { - public static defaultProps: DefaultProps = { + public static defaultProps = { size: ComponentSize.ExtraSmall, testID: 'label--pill', } diff --git a/ui/src/clockface/components/overlays/Overlay.tsx b/ui/src/clockface/components/overlays/Overlay.tsx index 32196055af..170dcb504e 100644 --- a/ui/src/clockface/components/overlays/Overlay.tsx +++ b/ui/src/clockface/components/overlays/Overlay.tsx @@ -7,7 +7,7 @@ import OverlayContainer from 'src/clockface/components/overlays/OverlayContainer import OverlayHeading from 'src/clockface/components/overlays/OverlayHeading' import OverlayBody from 'src/clockface/components/overlays/OverlayBody' import OverlayFooter from 'src/clockface/components/overlays/OverlayFooter' -import FancyScrollbar from 'src/shared/components/fancy_scrollbar/FancyScrollbar' +import DapperScrollbars from 'src/shared/components/dapperScrollbars/DapperScrollbars' // Styles import 'src/clockface/components/overlays/Overlay.scss' @@ -59,15 +59,16 @@ class Overlay extends Component { public render() { return ( - {this.childContainer}
- + ) } diff --git a/ui/src/clockface/components/overlays/OverlayContainer.tsx b/ui/src/clockface/components/overlays/OverlayContainer.tsx index fef0e63a34..42ba9262b1 100644 --- a/ui/src/clockface/components/overlays/OverlayContainer.tsx +++ b/ui/src/clockface/components/overlays/OverlayContainer.tsx @@ -3,12 +3,12 @@ import classnames from 'classnames' interface Props { children: ReactNode - maxWidth?: number + maxWidth: number customClass?: string } class OverlayContainer extends Component { - public static defaultProps: Partial = { + public static defaultProps = { maxWidth: 800, } diff --git a/ui/src/clockface/components/radio_buttons/RadioButton.tsx b/ui/src/clockface/components/radio_buttons/RadioButton.tsx index 1ba057158b..3e97267baa 100644 --- a/ui/src/clockface/components/radio_buttons/RadioButton.tsx +++ b/ui/src/clockface/components/radio_buttons/RadioButton.tsx @@ -11,17 +11,18 @@ interface Props { value: any children: JSX.Element | string | number onClick: (value: any) => void - disabled?: boolean + disabled: boolean titleText: string - disabledTitleText?: string - testID?: string + disabledTitleText: string + testID: string } @ErrorHandling class RadioButton extends Component { - public static defaultProps: Partial = { + public static defaultProps = { disabled: false, disabledTitleText: 'This option is disabled', + titleText: '', testID: 'radio-button', } diff --git a/ui/src/clockface/components/radio_buttons/RadioButtons.tsx b/ui/src/clockface/components/radio_buttons/RadioButtons.tsx index 74c57fffbf..0d7215a7c4 100644 --- a/ui/src/clockface/components/radio_buttons/RadioButtons.tsx +++ b/ui/src/clockface/components/radio_buttons/RadioButtons.tsx @@ -17,14 +17,14 @@ import './RadioButtons.scss' interface Props { children: JSX.Element[] customClass?: string - color?: ComponentColor - size?: ComponentSize - shape?: ButtonShape + color: ComponentColor + size: ComponentSize + shape: ButtonShape } @ErrorHandling class Radio extends Component { - public static defaultProps: Partial = { + public static defaultProps = { color: ComponentColor.Primary, size: ComponentSize.Small, shape: ButtonShape.Default, diff --git a/ui/src/clockface/components/resource_list/ResourceCard.tsx b/ui/src/clockface/components/resource_list/ResourceCard.tsx index a265783951..79bff5b883 100644 --- a/ui/src/clockface/components/resource_list/ResourceCard.tsx +++ b/ui/src/clockface/components/resource_list/ResourceCard.tsx @@ -7,27 +7,22 @@ import classnames from 'classnames' // Constants import {UPDATED_AT_TIME_FORMAT} from 'src/dashboards/constants' -interface PassedProps { +interface Props { name: () => JSX.Element updatedAt?: string owner?: {id: string; name: string} children?: JSX.Element[] | JSX.Element disabled?: boolean + testID: string + description: () => JSX.Element + labels: () => JSX.Element + metaData: () => JSX.Element[] + contextMenu: () => JSX.Element + toggle: () => JSX.Element } -interface DefaultProps { - testID?: string - description?: () => JSX.Element - labels?: () => JSX.Element - metaData?: () => JSX.Element[] - contextMenu?: () => JSX.Element - toggle?: () => JSX.Element -} - -type Props = PassedProps & DefaultProps - export default class ResourceListCard extends PureComponent { - public static defaultProps: DefaultProps = { + public static defaultProps = { testID: 'resource-card', description: () => null, labels: () => null, diff --git a/ui/src/clockface/components/resource_list/ResourceName.tsx b/ui/src/clockface/components/resource_list/ResourceName.tsx index 1fc343e59b..56498a4c96 100644 --- a/ui/src/clockface/components/resource_list/ResourceName.tsx +++ b/ui/src/clockface/components/resource_list/ResourceName.tsx @@ -16,23 +16,18 @@ import {ErrorHandling} from 'src/shared/decorators/errors' // Styles import 'src/clockface/components/resource_list/ResourceName.scss' -interface PassedProps { +interface Props { onUpdate: (name: string) => void name: string onClick?: (e: MouseEvent) => void placeholder?: string noNameString: string + parentTestID: string + buttonTestID: string + inputTestID: string + hrefValue: string } -interface DefaultProps { - parentTestID?: string - buttonTestID?: string - inputTestID?: string - hrefValue?: string -} - -type Props = PassedProps & DefaultProps - interface State { isEditing: boolean workingName: string @@ -41,7 +36,7 @@ interface State { @ErrorHandling class ResourceName extends Component { - public static defaultProps: DefaultProps = { + public static defaultProps = { parentTestID: 'resource-name', buttonTestID: 'resource-name--button', inputTestID: 'resource-name--input', diff --git a/ui/src/clockface/components/wizard/WizardOverlay.tsx b/ui/src/clockface/components/wizard/WizardOverlay.tsx index 412ce896c7..0b63f4b169 100644 --- a/ui/src/clockface/components/wizard/WizardOverlay.tsx +++ b/ui/src/clockface/components/wizard/WizardOverlay.tsx @@ -10,13 +10,13 @@ interface Props { children: any visible: boolean title: string - maxWidth?: number + maxWidth: number onDismiss: () => void } @ErrorHandling class WizardOverlay extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { maxWidth: 1200, } diff --git a/ui/src/configuration/components/ConfigurationPage.tsx b/ui/src/configuration/components/ConfigurationPage.tsx index 21fb1bbe17..84295f74fb 100644 --- a/ui/src/configuration/components/ConfigurationPage.tsx +++ b/ui/src/configuration/components/ConfigurationPage.tsx @@ -18,6 +18,8 @@ import Scrapers from 'src/configuration/components/Scrapers' // Decorators import {ErrorHandling} from 'src/shared/decorators/errors' +import CloudExclude from 'src/shared/components/cloud/CloudExclude' +import CloudOnly from 'src/shared/components/cloud/CloudOnly' interface OwnProps { activeTabUrl: string @@ -43,66 +45,90 @@ class ConfigurationPage extends Component {
- - + - - - - - - - - + + + - - - - - - + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - + + + + + + + +
diff --git a/ui/src/configuration/components/GetResources.tsx b/ui/src/configuration/components/GetResources.tsx index 9ce33e964d..3b775ae53c 100644 --- a/ui/src/configuration/components/GetResources.tsx +++ b/ui/src/configuration/components/GetResources.tsx @@ -9,6 +9,7 @@ import {getBuckets} from 'src/buckets/actions' import {getTelegrafs} from 'src/telegrafs/actions' import {getVariables} from 'src/variables/actions' import {getScrapers} from 'src/scrapers/actions' +import {getDashboardsAsync} from 'src/dashboards/actions' // Types import {AppState} from 'src/types' @@ -24,6 +25,7 @@ import {TechnoSpinner, SpinnerContainer} from '@influxdata/clockface' import {getAuthorizations} from 'src/authorizations/actions' import {AuthorizationsState} from 'src/authorizations/reducers' import {VariablesState} from 'src/variables/reducers' +import {DashboardsState} from 'src/dashboards/reducers/dashboards' interface StateProps { org: Organization @@ -33,6 +35,7 @@ interface StateProps { variables: VariablesState scrapers: ScrapersState tokens: AuthorizationsState + dashboards: DashboardsState } interface DispatchProps { @@ -42,6 +45,7 @@ interface DispatchProps { getVariables: typeof getVariables getScrapers: typeof getScrapers getAuthorizations: typeof getAuthorizations + getDashboards: typeof getDashboardsAsync } interface PassedProps { @@ -57,12 +61,17 @@ export enum ResourceTypes { Variables = 'variables', Authorizations = 'tokens', Scrapers = 'scrapers', + Dashboards = 'dashboards', } @ErrorHandling class GetResources extends PureComponent { public async componentDidMount() { switch (this.props.resource) { + case ResourceTypes.Dashboards: { + return await this.props.getDashboards() + } + case ResourceTypes.Labels: { return await this.props.getLabels() } @@ -115,6 +124,7 @@ const mstp = ({ variables, scrapers, tokens, + dashboards, }: AppState): StateProps => { const org = orgs[0] @@ -122,6 +132,7 @@ const mstp = ({ labels, buckets, telegrafs, + dashboards, variables, scrapers, tokens, @@ -136,6 +147,7 @@ const mdtp = { getVariables: getVariables, getScrapers: getScrapers, getAuthorizations: getAuthorizations, + getDashboards: getDashboardsAsync, } export default connect( diff --git a/ui/src/dashboards/actions/index.ts b/ui/src/dashboards/actions/index.ts index 91428c0b83..d628d9b3f8 100644 --- a/ui/src/dashboards/actions/index.ts +++ b/ui/src/dashboards/actions/index.ts @@ -48,6 +48,7 @@ import { } from 'src/dashboards/utils/cellGetters' import {dashboardToTemplate} from 'src/shared/utils/resourceToTemplate' import {client} from 'src/utils/api' +import {exportVariables} from 'src/variables/utils/hydrateVars' // Constants import * as copy from 'src/shared/copy/notifications' @@ -66,22 +67,22 @@ import { } from 'src/types' export enum ActionTypes { - LoadDashboards = 'LOAD_DASHBOARDS', - LoadDashboard = 'LOAD_DASHBOARD', - DeleteDashboard = 'DELETE_DASHBOARD', + SetDashboards = 'SET_DASHBOARDS', + SetDashboard = 'SET_DASHBOARD', + RemoveDashboard = 'REMOVE_DASHBOARD', DeleteDashboardFailed = 'DELETE_DASHBOARD_FAILED', - UpdateDashboard = 'UPDATE_DASHBOARD', - DeleteCell = 'DELETE_CELL', + EditDashboard = 'EDIT_DASHBOARD', + RemoveCell = 'REMOVE_CELL', AddDashboardLabels = 'ADD_DASHBOARD_LABELS', RemoveDashboardLabels = 'REMOVE_DASHBOARD_LABELS', } export type Action = - | LoadDashboardsAction - | DeleteDashboardAction - | LoadDashboardAction - | UpdateDashboardAction - | DeleteCellAction + | SetDashboardsAction + | RemoveDashboardAction + | SetDashboardAction + | EditDashboardAction + | RemoveCellAction | PublishNotificationAction | SetViewAction | DeleteTimeRangeAction @@ -89,32 +90,33 @@ export type Action = | AddDashboardLabelsAction | RemoveDashboardLabelsAction -interface DeleteCellAction { - type: ActionTypes.DeleteCell +interface RemoveCellAction { + type: ActionTypes.RemoveCell payload: { dashboard: Dashboard cell: Cell } } -interface UpdateDashboardAction { - type: ActionTypes.UpdateDashboard +interface EditDashboardAction { + type: ActionTypes.EditDashboard payload: { dashboard: Dashboard } } -interface LoadDashboardsAction { - type: ActionTypes.LoadDashboards +interface SetDashboardsAction { + type: ActionTypes.SetDashboards payload: { - dashboards: Dashboard[] + status: RemoteDataState + list: Dashboard[] } } -interface DeleteDashboardAction { - type: ActionTypes.DeleteDashboard +interface RemoveDashboardAction { + type: ActionTypes.RemoveDashboard payload: { - dashboardID: string + id: string } } @@ -125,8 +127,8 @@ interface DeleteDashboardFailedAction { } } -interface LoadDashboardAction { - type: ActionTypes.LoadDashboard +interface SetDashboardAction { + type: ActionTypes.SetDashboard payload: { dashboard: Dashboard } @@ -150,32 +152,30 @@ interface RemoveDashboardLabelsAction { // Action Creators -export const updateDashboard = ( - dashboard: Dashboard -): UpdateDashboardAction => ({ - type: ActionTypes.UpdateDashboard, +export const editDashboard = (dashboard: Dashboard): EditDashboardAction => ({ + type: ActionTypes.EditDashboard, payload: {dashboard}, }) -export const loadDashboards = ( - dashboards: Dashboard[] -): LoadDashboardsAction => ({ - type: ActionTypes.LoadDashboards, +export const setDashboards = ( + status: RemoteDataState, + list?: Dashboard[] +): SetDashboardsAction => ({ + type: ActionTypes.SetDashboards, payload: { - dashboards, + status, + list, }, }) -export const loadDashboard = (dashboard: Dashboard): LoadDashboardAction => ({ - type: ActionTypes.LoadDashboard, +export const setDashboard = (dashboard: Dashboard): SetDashboardAction => ({ + type: ActionTypes.SetDashboard, payload: {dashboard}, }) -export const deleteDashboard = ( - dashboardID: string -): DeleteDashboardAction => ({ - type: ActionTypes.DeleteDashboard, - payload: {dashboardID}, +export const removeDashboard = (id: string): RemoveDashboardAction => ({ + type: ActionTypes.RemoveDashboard, + payload: {id}, }) export const deleteDashboardFailed = ( @@ -185,11 +185,11 @@ export const deleteDashboardFailed = ( payload: {dashboard}, }) -export const deleteCell = ( +export const removeCell = ( dashboard: Dashboard, cell: Cell -): DeleteCellAction => ({ - type: ActionTypes.DeleteCell, +): RemoveCellAction => ({ + type: ActionTypes.RemoveCell, payload: {dashboard, cell}, }) @@ -215,10 +215,12 @@ export const getDashboardsAsync = () => async ( dispatch: Dispatch ): Promise => { try { + dispatch(setDashboards(RemoteDataState.Loading)) const dashboards = await getDashboardsAJAX() - dispatch(loadDashboards(dashboards)) + dispatch(setDashboards(RemoteDataState.Done, dashboards)) return dashboards } catch (error) { + dispatch(setDashboards(RemoteDataState.Error)) console.error(error) throw error } @@ -244,9 +246,10 @@ export const importDashboardAsync = (dashboard: Dashboard) => async ( await createDashboardAJAX(dashboard) const dashboards = await getDashboardsAJAX() - dispatch(loadDashboards(dashboards)) + dispatch(setDashboards(RemoteDataState.Done, dashboards)) dispatch(notify(copy.dashboardImported())) } catch (error) { + dispatch(setDashboards(RemoteDataState.Error)) dispatch(notify(copy.dashboardImportFailed('Could not upload dashboard'))) console.error(error) } @@ -255,7 +258,7 @@ export const importDashboardAsync = (dashboard: Dashboard) => async ( export const deleteDashboardAsync = (dashboard: Dashboard) => async ( dispatch: Dispatch ): Promise => { - dispatch(deleteDashboard(dashboard.id)) + dispatch(removeDashboard(dashboard.id)) dispatch(deleteTimeRange(dashboard.id)) try { @@ -303,7 +306,7 @@ export const getDashboardAsync = (dashboardID: string) => async ( await dispatch(refreshDashboardVariableValues(dashboard, views)) // Now that all the necessary state has been loaded, set the dashboard - dispatch(loadDashboard(dashboard)) + dispatch(setDashboard(dashboard)) } catch { dispatch(replace(`/dashboards`)) dispatch(notify(copy.dashboardGetFailed(dashboardID))) @@ -319,7 +322,7 @@ export const updateDashboardAsync = (dashboard: Dashboard) => async ( ): Promise => { try { const updatedDashboard = await updateDashboardAJAX(dashboard) - dispatch(updateDashboard(updatedDashboard)) + dispatch(editDashboard(updatedDashboard)) } catch (error) { console.error(error) dispatch(notify(copy.dashboardUpdateFailed())) @@ -354,7 +357,7 @@ export const createCellWithView = ( await dispatch(refreshDashboardVariableValues(dashboard, views)) dispatch(setView(createdCell.id, newView, RemoteDataState.Done)) - dispatch(updateDashboard(updatedDashboard)) + dispatch(editDashboard(updatedDashboard)) } catch { notify(copy.cellAddFailed()) } @@ -393,7 +396,7 @@ export const updateCellsAsync = (dashboard: Dashboard, cells: Cell[]) => async ( cells: updatedCells, } - dispatch(loadDashboard(updatedDashboard)) + dispatch(setDashboard(updatedDashboard)) } catch (error) { console.error(error) } @@ -413,7 +416,7 @@ export const deleteCellAsync = (dashboard: Dashboard, cell: Cell) => async ( dispatch(refreshDashboardVariableValues(dashboard, views)), ]) - dispatch(deleteCell(dashboard, cell)) + dispatch(removeCell(dashboard, cell)) dispatch(notify(copy.cellDeleted())) } catch (error) { console.error(error) @@ -431,7 +434,7 @@ export const copyDashboardCellAsync = ( cells: [...dashboard.cells, clonedCell], } - dispatch(loadDashboard(updatedDashboard)) + dispatch(setDashboard(updatedDashboard)) dispatch(notify(copy.cellAdded())) } catch (error) { console.error(error) @@ -471,7 +474,7 @@ export const selectVariableValue = ( value: string ) => async (dispatch, getState: GetState): Promise => { const variables = getHydratedVariables(getState(), dashboardID) - const dashboard = getState().dashboards.find(d => d.id === dashboardID) + const dashboard = getState().dashboards.list.find(d => d.id === dashboardID) dispatch(selectValue(dashboardID, variableID, value)) @@ -493,7 +496,12 @@ export const convertToTemplate = (dashboardID: string) => async ( const views = await Promise.all(pendingViews) const allVariables = await client.variables.getAll() const variables = filterUnusedVars(allVariables, views) - const dashboardTemplate = dashboardToTemplate(dashboard, views, variables) + const exportedVariables = exportVariables(variables, allVariables) + const dashboardTemplate = dashboardToTemplate( + dashboard, + views, + exportedVariables + ) const orgID = dashboard.orgID // TODO remove when org is implicit app state diff --git a/ui/src/dashboards/actions/notes.ts b/ui/src/dashboards/actions/notes.ts index 3a8d4cb0f1..ff20f04dfc 100644 --- a/ui/src/dashboards/actions/notes.ts +++ b/ui/src/dashboards/actions/notes.ts @@ -65,7 +65,7 @@ export const createNoteCell = (dashboardID: string) => async ( dispatch: Dispatch, getState: GetState ) => { - const dashboard = getState().dashboards.find(d => d.id === dashboardID) + const dashboard = getState().dashboards.list.find(d => d.id === dashboardID) if (!dashboard) { throw new Error(`could not find dashboard with id "${dashboardID}"`) diff --git a/ui/src/dashboards/components/DashboardHeader.tsx b/ui/src/dashboards/components/DashboardHeader.tsx index 822eeb7e19..6c6ee298cf 100644 --- a/ui/src/dashboards/components/DashboardHeader.tsx +++ b/ui/src/dashboards/components/DashboardHeader.tsx @@ -25,11 +25,7 @@ import * as AppActions from 'src/types/actions/app' import * as QueriesModels from 'src/types/queries' import {Dashboard} from '@influxdata/influx' -interface DefaultProps { - zoomedTimeRange: QueriesModels.TimeRange -} - -interface Props extends DefaultProps { +interface Props { activeDashboard: string dashboard: Dashboard timeRange: QueriesModels.TimeRange @@ -44,10 +40,11 @@ interface Props extends DefaultProps { isShowingVariablesControlBar: boolean isHidden: boolean onAddNote: () => void + zoomedTimeRange: QueriesModels.TimeRange } export default class DashboardHeader extends Component { - public static defaultProps: DefaultProps = { + public static defaultProps = { zoomedTimeRange: { upper: null, lower: null, diff --git a/ui/src/dashboards/components/DashboardPage.tsx b/ui/src/dashboards/components/DashboardPage.tsx index 060d9044c8..f50f79cde3 100644 --- a/ui/src/dashboards/components/DashboardPage.tsx +++ b/ui/src/dashboards/components/DashboardPage.tsx @@ -312,7 +312,7 @@ const mstp = (state: AppState, {params: {dashboardID}}): StateProps => { const timeRange = ranges.find(r => r.dashboardID === dashboardID) || DEFAULT_TIME_RANGE - const dashboard = dashboards.find(d => d.id === dashboardID) + const dashboard = dashboards.list.find(d => d.id === dashboardID) return { links, diff --git a/ui/src/dashboards/components/VEO.tsx b/ui/src/dashboards/components/VEO.tsx index 5218667b46..5fef374f31 100644 --- a/ui/src/dashboards/components/VEO.tsx +++ b/ui/src/dashboards/components/VEO.tsx @@ -48,13 +48,14 @@ class VEO extends PureComponent { } public componentDidUpdate() { - if ( - !this.state.hasActivatedTimeMachine && - this.props.viewsStatus === RemoteDataState.Done - ) { - this.props.onSetActiveTimeMachine(VEO_TIME_MACHINE_ID, { - view: this.props.view, - }) + const {view, onSetActiveTimeMachine} = this.props + const {hasActivatedTimeMachine} = this.state + + const timeMachineShouldActivate = + !hasActivatedTimeMachine && this.loading === RemoteDataState.Done + + if (timeMachineShouldActivate) { + onSetActiveTimeMachine(VEO_TIME_MACHINE_ID, {view}) this.setState({hasActivatedTimeMachine: true}) } } diff --git a/ui/src/dashboards/components/VEOContents.tsx b/ui/src/dashboards/components/VEOContents.tsx index c605fbedfe..2af566470a 100644 --- a/ui/src/dashboards/components/VEOContents.tsx +++ b/ui/src/dashboards/components/VEOContents.tsx @@ -105,7 +105,7 @@ class VEOContents extends PureComponent { const mstp = (state: AppState, {dashboardID}): StateProps => { const {dashboards} = state - const dashboard = dashboards.find(d => d.id === dashboardID) + const dashboard = dashboards.list.find(d => d.id === dashboardID) const {view, draftQueries} = getActiveTimeMachine(state) diff --git a/ui/src/dashboards/components/dashboard_index/DashboardCards.tsx b/ui/src/dashboards/components/dashboard_index/DashboardCards.tsx index 33378ac99d..38378aed0d 100644 --- a/ui/src/dashboards/components/dashboard_index/DashboardCards.tsx +++ b/ui/src/dashboards/components/dashboard_index/DashboardCards.tsx @@ -84,7 +84,7 @@ class DashboardCards extends PureComponent { const mstp = (state: AppState, props: OwnProps) => { return { - sortedIDs: getSortedResource(state.dashboards, props), + sortedIDs: getSortedResource(state.dashboards.list, props), } } diff --git a/ui/src/dashboards/components/dashboard_index/DashboardsIndex.tsx b/ui/src/dashboards/components/dashboard_index/DashboardsIndex.tsx index ff624f4859..97cd1681ab 100644 --- a/ui/src/dashboards/components/dashboard_index/DashboardsIndex.tsx +++ b/ui/src/dashboards/components/dashboard_index/DashboardsIndex.tsx @@ -22,6 +22,9 @@ import { } from 'src/dashboards/actions' import {retainRangesDashTimeV1 as retainRangesDashTimeV1Action} from 'src/dashboards/actions/ranges' import {notify as notifyAction} from 'src/shared/actions/notifications' +import GetResources, { + ResourceTypes, +} from 'src/configuration/components/GetResources' // Constants import {DEFAULT_DASHBOARD_NAME} from 'src/dashboards/constants/index' @@ -71,8 +74,8 @@ class DashboardIndex extends PureComponent { } public async componentDidMount() { - const {handleGetDashboards, dashboards} = this.props - await handleGetDashboards() + const {dashboards} = this.props + const dashboardIDs = dashboards.map(d => d.id) this.props.retainRangesDashTimeV1(dashboardIDs) } @@ -98,25 +101,27 @@ class DashboardIndex extends PureComponent {
- ( - - )} - dashboards={dashboards} - onDeleteDashboard={this.handleDeleteDashboard} - onCreateDashboard={this.handleCreateDashboard} - onCloneDashboard={this.handleCloneDashboard} - onUpdateDashboard={handleUpdateDashboard} - notify={notify} - searchTerm={searchTerm} - showOwnerColumn={true} - onFilterChange={this.handleFilterDashboards} - onImportDashboard={this.summonImportOverlay} - /> + + ( + + )} + dashboards={dashboards} + onDeleteDashboard={this.handleDeleteDashboard} + onCreateDashboard={this.handleCreateDashboard} + onCloneDashboard={this.handleCloneDashboard} + onUpdateDashboard={handleUpdateDashboard} + notify={notify} + searchTerm={searchTerm} + showOwnerColumn={true} + onFilterChange={this.handleFilterDashboards} + onImportDashboard={this.summonImportOverlay} + /> +
@@ -174,7 +179,11 @@ class DashboardIndex extends PureComponent { } const mstp = (state: AppState): StateProps => { - const {dashboards, links, orgs} = state + const { + dashboards: {list: dashboards}, + links, + orgs, + } = state return { orgs, diff --git a/ui/src/dashboards/components/variablesControlBar/DraggableDropdown.tsx b/ui/src/dashboards/components/variablesControlBar/DraggableDropdown.tsx index c06e9f99a6..33233e4c50 100644 --- a/ui/src/dashboards/components/variablesControlBar/DraggableDropdown.tsx +++ b/ui/src/dashboards/components/variablesControlBar/DraggableDropdown.tsx @@ -82,7 +82,7 @@ class Dropdown extends React.Component< return connectDragSource( connectDropTarget( -
+
{/* TODO: Add variable description to title attribute when it is ready */}
diff --git a/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.scss b/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.scss index 4ba2749597..69ca0ad954 100644 --- a/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.scss +++ b/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.scss @@ -21,7 +21,11 @@ $variables-control-bar--gutter: $ix-marg-a; } .variable-dropdown { - margin: 0 $variables-control-bar--gutter / 2; + margin-right: $variables-control-bar--gutter / 2; + } + + .variable-dropdown--container { + padding-bottom: 5px; } .variables-spinner-container { @@ -29,7 +33,6 @@ $variables-control-bar--gutter: $ix-marg-a; } } - .variables-control-bar--empty { background-color: $g3-castle; border-radius: $radius; @@ -40,3 +43,8 @@ $variables-control-bar--gutter: $ix-marg-a; .variables-control-bar > .techno-spinner { margin-left: 10px; } + +.variables-control-bar.presentation-mode { + padding: 8px 0px 0px 8px; + min-height: 0px; +} diff --git a/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.tsx b/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.tsx index 70342d9d80..07ec48c7af 100644 --- a/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.tsx +++ b/ui/src/dashboards/components/variablesControlBar/VariablesControlBar.tsx @@ -4,6 +4,7 @@ import {connect} from 'react-redux' import {isEmpty} from 'lodash' import {DragDropContext} from 'react-dnd' import HTML5Backend from 'react-dnd-html5-backend' +import classnames from 'classnames' // Components import { @@ -43,6 +44,7 @@ interface StateProps { variables: Variable[] valuesStatus: RemoteDataState variablesStatus: RemoteDataState + inPresentationMode: boolean } interface DispatchProps { @@ -67,11 +69,17 @@ class VariablesControlBar extends PureComponent { ) { return {initialLoading: RemoteDataState.Done} } + + return {} } render() { return ( -
+
} @@ -143,7 +151,13 @@ const mstp = (state: AppState, props: OwnProps): StateProps => { const valuesStatus = getDashboardValuesStatus(state, props.dashboardID) const variablesStatus = getDashboardVariablesStatus(state) - return {variables, valuesStatus, variablesStatus} + const { + app: { + ephemeral: {inPresentationMode}, + }, + } = state + + return {variables, valuesStatus, variablesStatus, inPresentationMode} } export default DragDropContext(HTML5Backend)( diff --git a/ui/src/dashboards/constants/cellEditor.ts b/ui/src/dashboards/constants/cellEditor.ts index 7f987b0a59..1c5abcd2a3 100644 --- a/ui/src/dashboards/constants/cellEditor.ts +++ b/ui/src/dashboards/constants/cellEditor.ts @@ -1,7 +1,6 @@ import {DEFAULT_TABLE_OPTIONS} from 'src/dashboards/constants' import {stringifyColorValues} from 'src/shared/constants/colorOperations' -import {ViewType, Axis, Axes} from 'src/types/dashboards' -import {Color} from 'src/types/colors' +import {ViewType, Axis, Axes, Color, Base, Scale} from 'src/types' export const initializeOptions = (type: ViewType) => { switch (type) { @@ -13,10 +12,10 @@ export const initializeOptions = (type: ViewType) => { } export const AXES_SCALE_OPTIONS = { - LINEAR: 'linear', - LOG: 'log', - BASE_2: '2', - BASE_10: '10', + LINEAR: Scale.Linear, + LOG: Scale.Log, + BASE_2: Base.Two, + BASE_10: Base.Ten, } type DefaultAxis = Pick> @@ -31,7 +30,7 @@ export const DEFAULT_AXIS: DefaultAxis = { export const FULL_DEFAULT_AXIS: Axis = { ...DEFAULT_AXIS, - bounds: ['', ''], + bounds: ['', ''] as [string, string], } export const DEFAULT_AXES: Axes = { diff --git a/ui/src/dashboards/reducers/dashboards.test.ts b/ui/src/dashboards/reducers/dashboards.test.ts index 77e7604f0e..375a73fa9e 100644 --- a/ui/src/dashboards/reducers/dashboards.test.ts +++ b/ui/src/dashboards/reducers/dashboards.test.ts @@ -1,13 +1,13 @@ // Reducer -import reducer from 'src/dashboards/reducers/dashboards' +import {dashboardsReducer as reducer} from 'src/dashboards/reducers/dashboards' // Actions import { - loadDashboard, - loadDashboards, - deleteDashboard, - updateDashboard, - deleteCell, + setDashboard, + setDashboards, + removeDashboard, + editDashboard, + removeCell, addDashboardLabels, removeDashboardLabels, } from 'src/dashboards/actions/' @@ -15,48 +15,53 @@ import { // Resources import {dashboard} from 'src/dashboards/resources' import {labels} from 'mocks/dummyData' +import {RemoteDataState} from '@influxdata/clockface' + +const status = RemoteDataState.Done describe('dashboards reducer', () => { - it('can load the dashboards', () => { - const expected = [dashboard] - const actual = reducer([], loadDashboards(expected)) + it('can set the dashboards', () => { + const list = [dashboard] + + const expected = {status, list} + const actual = reducer(undefined, setDashboards(status, list)) expect(actual).toEqual(expected) }) - it('can delete a dashboard', () => { + it('can remove a dashboard', () => { const d2 = {...dashboard, id: '2'} - const state = [dashboard, d2] - const expected = [dashboard] - const actual = reducer(state, deleteDashboard(d2.id)) + const list = [dashboard, d2] + const expected = {list: [dashboard], status} + const actual = reducer({list, status}, removeDashboard(d2.id)) expect(actual).toEqual(expected) }) - it('can load a dashboard', () => { + it('can set a dashboard', () => { const loadedDashboard = {...dashboard, name: 'updated'} const d2 = {...dashboard, id: '2'} - const state = [dashboard, d2] + const state = {status, list: [dashboard, d2]} - const expected = [loadedDashboard, d2] - const actual = reducer(state, loadDashboard(loadedDashboard)) + const expected = {status, list: [loadedDashboard, d2]} + const actual = reducer(state, setDashboard(loadedDashboard)) expect(actual).toEqual(expected) }) - it('can update a dashboard', () => { + it('can edit a dashboard', () => { const updates = {...dashboard, name: 'updated dash'} - const expected = [updates] - const actual = reducer([dashboard], updateDashboard(updates)) + const expected = {status, list: [updates]} + const actual = reducer({status, list: [dashboard]}, editDashboard(updates)) expect(actual).toEqual(expected) }) - it('can delete a cell from a dashboard', () => { - const expected = [{...dashboard, cells: []}] + it('can remove a cell from a dashboard', () => { + const expected = {status, list: [{...dashboard, cells: []}]} const actual = reducer( - [dashboard], - deleteCell(dashboard, dashboard.cells[0]) + {status, list: [dashboard]}, + removeCell(dashboard, dashboard.cells[0]) ) expect(actual).toEqual(expected) @@ -64,20 +69,24 @@ describe('dashboards reducer', () => { it('can add labels to a dashboard', () => { const dashboardWithoutLabels = {...dashboard, labels: []} - const expected = [{...dashboard, labels}] + const expected = {status, list: [{...dashboard, labels}]} const actual = reducer( - [dashboardWithoutLabels], + {status, list: [dashboardWithoutLabels]}, addDashboardLabels(dashboardWithoutLabels.id, labels) ) expect(actual).toEqual(expected) }) - it('can delete labels from a dashboard', () => { - const dashboardWithLabels = {...dashboard, labels} - const expected = [{...dashboard, labels: []}] + it('can remove labels from a dashboard', () => { + const leftOverLabel = {...labels[0], name: 'wowowowo', id: '3'} + const dashboardWithLabels = { + ...dashboard, + labels: [...labels, leftOverLabel], + } + const expected = {status, list: [{...dashboard, labels: [leftOverLabel]}]} const actual = reducer( - [dashboardWithLabels], + {status, list: [dashboardWithLabels]}, removeDashboardLabels(dashboardWithLabels.id, labels) ) diff --git a/ui/src/dashboards/reducers/dashboards.ts b/ui/src/dashboards/reducers/dashboards.ts index 46a276b940..432684456c 100644 --- a/ui/src/dashboards/reducers/dashboards.ts +++ b/ui/src/dashboards/reducers/dashboards.ts @@ -1,83 +1,111 @@ -import {Action, ActionTypes} from 'src/dashboards/actions' -import {Dashboard} from 'src/types' +// Libraries +import {produce} from 'immer' import _ from 'lodash' -type State = Dashboard[] +// Types +import {Action, ActionTypes} from 'src/dashboards/actions' +import {Dashboard, RemoteDataState} from 'src/types' -export default (state: State = [], action: Action): State => { - switch (action.type) { - case ActionTypes.LoadDashboards: { - const {dashboards} = action.payload - - return [...dashboards] - } - - case ActionTypes.DeleteDashboard: { - const {dashboardID} = action.payload - - return [...state.filter(d => d.id !== dashboardID)] - } - - case ActionTypes.LoadDashboard: { - const {dashboard} = action.payload - - const newDashboards = _.unionBy([dashboard], state, 'id') - - return newDashboards - } - - case ActionTypes.UpdateDashboard: { - const {dashboard} = action.payload - const newState = state.map(d => - d.id === dashboard.id ? {...dashboard} : d - ) - - return [...newState] - } - - case ActionTypes.DeleteCell: { - const {dashboard, cell} = action.payload - const newState = state.map(d => { - if (d.id !== dashboard.id) { - return {...d} - } - - const cells = d.cells.filter(c => c.id !== cell.id) - return {...d, cells} - }) - - return [...newState] - } - - case ActionTypes.AddDashboardLabels: { - const {dashboardID, labels} = action.payload - - const newState = state.map(d => { - if (d.id === dashboardID) { - return {...d, labels: [...d.labels, ...labels]} - } - return d - }) - - return [...newState] - } - - case ActionTypes.RemoveDashboardLabels: { - const {dashboardID, labels} = action.payload - - const newState = state.map(d => { - if (d.id === dashboardID) { - const updatedLabels = d.labels.filter(l => { - return !labels.includes(l) - }) - return {...d, labels: updatedLabels} - } - return d - }) - - return [...newState] - } - } - - return state +export interface DashboardsState { + list: Dashboard[] + status: RemoteDataState +} + +const initialState = () => ({ + list: [], + status: RemoteDataState.NotStarted, +}) + +export const dashboardsReducer = ( + state: DashboardsState = initialState(), + action: Action +): DashboardsState => { + return produce(state, draftState => { + switch (action.type) { + case ActionTypes.SetDashboards: { + const {list, status} = action.payload + + draftState.status = status + if (list) { + draftState.list = list + } + + return + } + + case ActionTypes.RemoveDashboard: { + const {id} = action.payload + draftState.list = draftState.list.filter(l => l.id !== id) + + return + } + + case ActionTypes.SetDashboard: { + const {dashboard} = action.payload + draftState.list = _.unionBy([dashboard], state.list, 'id') + + return + } + + case ActionTypes.EditDashboard: { + const {dashboard} = action.payload + + draftState.list = draftState.list.map(d => { + if (d.id === dashboard.id) { + return dashboard + } + return d + }) + + return + } + + case ActionTypes.RemoveCell: { + const {dashboard, cell} = action.payload + draftState.list = draftState.list.map(d => { + if (d.id === dashboard.id) { + const cells = d.cells.filter(c => c.id !== cell.id) + d.cells = cells + } + + return d + }) + + return + } + + case ActionTypes.AddDashboardLabels: { + const {dashboardID, labels} = action.payload + + draftState.list = draftState.list.map(d => { + if (d.id === dashboardID) { + d.labels = [...d.labels, ...labels] + } + + return d + }) + + return + } + + case ActionTypes.RemoveDashboardLabels: { + const {dashboardID, labels} = action.payload + draftState.list = draftState.list.map(d => { + if (d.id === dashboardID) { + const updatedLabels = d.labels.filter(el => { + const labelToRemove = labels.find(l => l.id === el.id) + + return !labelToRemove + }) + + d.labels = updatedLabels + } + + return d + }) + + return + } + } + }) } diff --git a/ui/src/dashboards/resources.ts b/ui/src/dashboards/resources.ts index 33e1567f8c..65ec52e53a 100644 --- a/ui/src/dashboards/resources.ts +++ b/ui/src/dashboards/resources.ts @@ -12,6 +12,8 @@ import { SourceLinks, TimeRange, QueryConfig, + Scale, + Base, } from 'src/types' export const dashboard: Dashboard = { @@ -131,16 +133,16 @@ export const axes: Axes = { label: '', prefix: '', suffix: '', - base: '10', - scale: 'linear', + base: Base.Ten, + scale: Scale.Linear, }, y: { bounds: ['', ''], label: '', prefix: '', suffix: '', - base: '10', - scale: 'linear', + base: Base.Ten, + scale: Scale.Linear, }, } diff --git a/ui/src/dashboards/selectors/index.ts b/ui/src/dashboards/selectors/index.ts index 8f61dec910..7f21cd0ea2 100644 --- a/ui/src/dashboards/selectors/index.ts +++ b/ui/src/dashboards/selectors/index.ts @@ -10,7 +10,7 @@ export const getViewsForDashboard = ( state: AppState, dashboardID: string ): View[] => { - const dashboard = state.dashboards.find( + const dashboard = state.dashboards.list.find( dashboard => dashboard.id === dashboardID ) diff --git a/ui/src/dataExplorer/components/SaveAsCellForm.tsx b/ui/src/dataExplorer/components/SaveAsCellForm.tsx index d0bc9a49e4..0a30b55ccf 100644 --- a/ui/src/dataExplorer/components/SaveAsCellForm.tsx +++ b/ui/src/dataExplorer/components/SaveAsCellForm.tsx @@ -234,7 +234,7 @@ class SaveAsCellForm extends PureComponent { const mstp = (state: AppState): StateProps => { const { orgs, - dashboards, + dashboards: {list: dashboards}, timeMachines: { timeMachines: {de}, }, diff --git a/ui/src/dataExplorer/components/SaveAsOverlay.tsx b/ui/src/dataExplorer/components/SaveAsOverlay.tsx index ca58201a4e..13f12d9798 100644 --- a/ui/src/dataExplorer/components/SaveAsOverlay.tsx +++ b/ui/src/dataExplorer/components/SaveAsOverlay.tsx @@ -33,6 +33,7 @@ class SaveAsOverlay extends PureComponent {
{ Dashboard Cell { Task + > + + + {
  • Profile
  • -
  • - Tokens -
  • diff --git a/ui/src/minard/components/Axes.tsx b/ui/src/minard/components/Axes.tsx deleted file mode 100644 index 84a3edb514..0000000000 --- a/ui/src/minard/components/Axes.tsx +++ /dev/null @@ -1,131 +0,0 @@ -import React, {useRef, useLayoutEffect, SFC} from 'react' - -import { - PlotEnv, - TICK_PADDING_RIGHT, - TICK_PADDING_TOP, - PLOT_PADDING, -} from 'src/minard' -import {clearCanvas} from 'src/minard/utils/clearCanvas' - -interface Props { - env: PlotEnv - axesStroke?: string - tickFont?: string - tickFill?: string -} - -export const drawAxes = ( - canvas: HTMLCanvasElement, - env: PlotEnv, - axesStroke: string, - tickFont: string, - tickFill: string -) => { - const { - width, - height, - innerWidth, - innerHeight, - margins, - xTicks, - yTicks, - xAxisLabel, - yAxisLabel, - baseLayer: { - scales: {x: xScale, y: yScale}, - }, - } = env - - clearCanvas(canvas, width, height) - - const context = canvas.getContext('2d') - const xAxisY = height - margins.bottom - - // Draw x axis line - context.strokeStyle = axesStroke - context.beginPath() - context.moveTo(margins.left, xAxisY) - context.lineTo(width - margins.right, xAxisY) - context.stroke() - - // Draw y axis line - context.beginPath() - context.moveTo(margins.left, xAxisY) - context.lineTo(margins.left, margins.top) - context.stroke() - - context.font = tickFont - context.fillStyle = tickFill - context.textAlign = 'center' - context.textBaseline = 'top' - - // Draw and label each tick on the x axis - for (const xTick of xTicks) { - const x = xScale(xTick) + margins.left - - context.beginPath() - context.moveTo(x, xAxisY) - context.lineTo(x, margins.top) - context.stroke() - - context.fillText(String(xTick), x, xAxisY + TICK_PADDING_TOP) - } - - context.textAlign = 'end' - context.textBaseline = 'middle' - - // Draw and label each tick on the y axis - for (const yTick of yTicks) { - const y = yScale(yTick) + margins.top - - context.beginPath() - context.moveTo(margins.left, y) - context.lineTo(width - margins.right, y) - context.stroke() - - context.fillText(String(yTick), margins.left - TICK_PADDING_RIGHT, y) - } - - // Draw the x axis label - if (xAxisLabel) { - context.textAlign = 'center' - context.textBaseline = 'bottom' - context.fillText( - xAxisLabel, - margins.left + innerWidth / 2, - height - PLOT_PADDING - ) - } - - // Draw the y axis label - if (yAxisLabel) { - const x = PLOT_PADDING - const y = margins.top + innerHeight / 2 - - context.save() - context.translate(x, y) - context.rotate(-Math.PI / 2) - context.textAlign = 'center' - context.textBaseline = 'top' - context.fillText(yAxisLabel, 0, 0) - context.restore() - } -} - -export const Axes: SFC = props => { - const {children, env, tickFill, tickFont, axesStroke} = props - const canvas = useRef(null) - - useLayoutEffect( - () => drawAxes(canvas.current, env, axesStroke, tickFont, tickFill), - [canvas.current, env, axesStroke, tickFont, tickFill] - ) - - return ( - <> - {children} - - - ) -} diff --git a/ui/src/minard/components/Histogram.tsx b/ui/src/minard/components/Histogram.tsx deleted file mode 100644 index b47ff694bd..0000000000 --- a/ui/src/minard/components/Histogram.tsx +++ /dev/null @@ -1,108 +0,0 @@ -import React, {SFC} from 'react' - -import {PlotEnv, HistogramLayer} from 'src/minard' -import {bin} from 'src/minard/utils/bin' -import HistogramBars from 'src/minard/components/HistogramBars' -import HistogramTooltip from 'src/minard/components/HistogramTooltip' -import {findHoveredRowIndices} from 'src/minard/utils/findHoveredRowIndices' -import {useLayer} from 'src/minard/utils/useLayer' - -export enum Position { - Stacked = 'stacked', - Overlaid = 'overlaid', -} - -export interface Props { - env: PlotEnv - x: string - fill: string[] - colors: string[] - position?: Position - binCount?: number - tooltip?: (props: TooltipProps) => JSX.Element -} - -export interface TooltipProps { - xMin: number - xMax: number - counts: Array<{ - grouping: {[colName: string]: any} - count: number - color: string - }> -} - -export const Histogram: SFC = ({ - env, - x, - fill, - colors, - tooltip = null, - binCount = null, - position = Position.Stacked, -}: Props) => { - const baseTable = env.baseLayer.table - const xDomain = env.xDomain - - const layer = useLayer( - env, - () => { - const [table, mappings] = bin( - baseTable, - x, - xDomain, - fill, - binCount, - position - ) - - return {type: 'histogram', table, mappings, colors} - }, - [baseTable, xDomain, x, fill, position, binCount, colors] - ) as HistogramLayer - - if (!layer) { - return null - } - - const { - innerWidth, - innerHeight, - hoverX, - hoverY, - baseLayer: { - scales: {x: xScale, y: yScale}, - }, - } = env - - const {table} = layer - - const hoveredRowIndices = findHoveredRowIndices( - table, - hoverX, - hoverY, - xScale, - yScale - ) - - return ( - <> - - {hoveredRowIndices && ( - - )} - - ) -} diff --git a/ui/src/minard/components/HistogramBars.tsx b/ui/src/minard/components/HistogramBars.tsx deleted file mode 100644 index 4e285ebaed..0000000000 --- a/ui/src/minard/components/HistogramBars.tsx +++ /dev/null @@ -1,75 +0,0 @@ -import React, {useRef, useLayoutEffect, SFC} from 'react' - -import {Scale, HistogramPosition, HistogramLayer} from 'src/minard' -import {clearCanvas} from 'src/minard/utils/clearCanvas' -import {getBarFill} from 'src/minard/utils/getBarFill' - -const BAR_TRANSPARENCY = 0.5 -const BAR_TRANSPARENCY_HOVER = 0.7 -const BAR_PADDING = 1.5 - -interface Props { - width: number - height: number - layer: HistogramLayer - xScale: Scale - yScale: Scale - position: HistogramPosition - hoveredRowIndices: number[] | null -} - -const drawBars = ( - canvas: HTMLCanvasElement, - {width, height, layer, xScale, yScale, hoveredRowIndices}: Props -): void => { - clearCanvas(canvas, width, height) - - const {table} = layer - const xMinCol = table.columns.xMin.data - const xMaxCol = table.columns.xMax.data - const yMinCol = table.columns.yMin.data - const yMaxCol = table.columns.yMax.data - - const context = canvas.getContext('2d') - - for (let i = 0; i < yMaxCol.length; i++) { - if (yMinCol[i] === yMaxCol[i]) { - // Skip 0-height bars - continue - } - - const x = xScale(xMinCol[i]) - const y = yScale(yMaxCol[i]) - const width = xScale(xMaxCol[i]) - x - BAR_PADDING - const height = yScale(yMinCol[i]) - y - BAR_PADDING - const fill = getBarFill(layer, i) - const alpha = - hoveredRowIndices && hoveredRowIndices.includes(i) - ? BAR_TRANSPARENCY_HOVER - : BAR_TRANSPARENCY - - // See https://stackoverflow.com/a/45125187 - context.beginPath() - context.rect(x, y, width, height) - context.save() - context.clip() - context.lineWidth = 2 - context.globalAlpha = alpha - context.fillStyle = fill - context.fill() - context.globalAlpha = 1 - context.strokeStyle = fill - context.stroke() - context.restore() - } -} - -const HistogramBars: SFC = props => { - const canvas = useRef(null) - - useLayoutEffect(() => drawBars(canvas.current, props)) - - return -} - -export default React.memo(HistogramBars) diff --git a/ui/src/minard/components/HistogramTooltip.tsx b/ui/src/minard/components/HistogramTooltip.tsx deleted file mode 100644 index b09e6f527c..0000000000 --- a/ui/src/minard/components/HistogramTooltip.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import {SFC} from 'react' - -import {HistogramTooltipProps, HistogramLayer} from 'src/minard' -import {getHistogramTooltipProps} from 'src/minard/utils/getHistogramTooltipProps' - -interface Props { - tooltip?: (props: HistogramTooltipProps) => JSX.Element - layer: HistogramLayer - hoveredRowIndices: number[] | null -} - -const HistogramTooltip: SFC = ({ - tooltip, - layer, - hoveredRowIndices, -}: Props) => { - if (!hoveredRowIndices || !tooltip) { - return null - } - - return tooltip(getHistogramTooltipProps(layer, hoveredRowIndices)) -} - -export default HistogramTooltip diff --git a/ui/src/minard/components/Plot.tsx b/ui/src/minard/components/Plot.tsx deleted file mode 100644 index e677c2a65c..0000000000 --- a/ui/src/minard/components/Plot.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, {SFC} from 'react' -import {AutoSizer} from 'react-virtualized' - -import { - SizedPlot, - Props as SizedPlotProps, -} from 'src/minard/components/SizedPlot' - -type Props = Pick< - SizedPlotProps, - Exclude -> & {width?: number; height?: number} - -/* - Works just like a `SizedPlot`, except it will measure the width and height of - the containing element if no `width` and `height` props are passed. -*/ -export const Plot: SFC = props => { - if (props.width && props.height) { - return - } - - return ( - - {({width, height}) => { - if (width === 0 || height === 0) { - return null - } - - return - }} - - ) -} diff --git a/ui/src/minard/components/SizedPlot.tsx b/ui/src/minard/components/SizedPlot.tsx deleted file mode 100644 index fa4fbe2de8..0000000000 --- a/ui/src/minard/components/SizedPlot.tsx +++ /dev/null @@ -1,132 +0,0 @@ -import React, {useReducer, useRef, useMemo, SFC, CSSProperties} from 'react' - -import {Table, PlotEnv} from 'src/minard' -import {Axes} from 'src/minard/components/Axes' -import {useMousePos} from 'src/minard/utils/useMousePos' -import {useMountedEffect} from 'src/minard/utils/useMountedEffect' -import { - setDimensions, - setTable, - setControlledXDomain, - setControlledYDomain, - setXAxisLabel, - setYAxisLabel, -} from 'src/minard/utils/plotEnvActions' -import {plotEnvReducer, INITIAL_PLOT_ENV} from 'src/minard/utils/plotEnvReducer' - -export interface Props { - // - // Required props - // ============== - // - table: Table - width: number - height: number - children: (env: PlotEnv) => JSX.Element - - // - // Miscellaneous options - // ===================== - // - axesStroke?: string - tickFont?: string - tickFill?: string - xAxisLabel?: string - yAxisLabel?: string - - // The x domain of the plot can be explicitly set. If this prop is passed, - // then the component is operating in a "controlled" mode, where it always - // uses the passed x domain. Any interaction with the plot that should change - // the x domain (clicking, brushing, etc.) will call the `onSetXDomain` prop - // when the component is in controlled mode. If the `xDomain` prop is not - // passed, then the component is "uncontrolled". It will compute and set the - // `xDomain` automatically. - xDomain?: [number, number] - onSetXDomain?: (xDomain: [number, number]) => void - - // See the `xDomain` and `onSetXDomain` props - yDomain?: [number, number] - onSetYDomain?: (yDomain: [number, number]) => void -} - -export const SizedPlot: SFC = ({ - width, - height, - table, - children, - axesStroke = '#31313d', - tickFont = 'bold 10px Roboto', - tickFill = '#8e91a1', - xAxisLabel = '', - yAxisLabel = '', - xDomain = null, - yDomain = null, -}) => { - const [env, dispatch] = useReducer(plotEnvReducer, { - ...INITIAL_PLOT_ENV, - width, - height, - xDomain, - yDomain, - xAxisLabel, - yAxisLabel, - baseLayer: {...INITIAL_PLOT_ENV.baseLayer, table}, - }) - - useMountedEffect(() => dispatch(setTable(table)), [table]) - useMountedEffect(() => dispatch(setControlledXDomain(xDomain)), [xDomain]) - useMountedEffect(() => dispatch(setControlledYDomain(yDomain)), [yDomain]) - useMountedEffect(() => dispatch(setXAxisLabel(xAxisLabel)), [xAxisLabel]) - useMountedEffect(() => dispatch(setYAxisLabel(yAxisLabel)), [yAxisLabel]) - useMountedEffect(() => dispatch(setDimensions(width, height)), [ - width, - height, - ]) - - const mouseRegion = useRef(null) - const {x: hoverX, y: hoverY} = useMousePos(mouseRegion.current) - - const childProps = useMemo( - () => ({ - ...env, - hoverX, - hoverY, - dispatch, - }), - [env, hoverX, hoverY, dispatch] - ) - - const plotStyle: CSSProperties = { - position: 'relative', - width: `${width}px`, - height: `${height}px`, - } - - const layersStyle: CSSProperties = { - position: 'absolute', - top: `${env.margins.top}px`, - right: `${env.margins.right}px`, - bottom: `${env.margins.bottom}px`, - left: `${env.margins.left}px`, - } - - return ( -
    - -
    - {children(childProps)} -
    -
    - -
    - ) -} diff --git a/ui/src/minard/index.ts b/ui/src/minard/index.ts deleted file mode 100644 index b3e1d9a38f..0000000000 --- a/ui/src/minard/index.ts +++ /dev/null @@ -1,168 +0,0 @@ -import {PlotAction} from 'src/minard/utils/plotEnvActions' - -export const PLOT_PADDING = 20 - -export const TICK_PADDING_RIGHT = 8 -export const TICK_PADDING_TOP = 5 - -// TODO: Measure text metrics instead -export const TICK_CHAR_WIDTH = 7 -export const TICK_CHAR_HEIGHT = 10 - -export const AXIS_LABEL_PADDING_BOTTOM = 15 - -export {Plot} from 'src/minard/components/Plot' - -export { - Histogram, - Position as HistogramPosition, - TooltipProps as HistogramTooltipProps, -} from 'src/minard/components/Histogram' - -export {useTooltipStyle} from 'src/minard/utils/useTooltipStyle' - -export {isNumeric} from 'src/minard/utils/isNumeric' - -export type ColumnType = 'int' | 'uint' | 'float' | 'string' | 'time' | 'bool' - -export type NumericColumnType = 'int' | 'uint' | 'float' | 'time' - -export interface FloatColumn { - data: number[] - type: 'float' -} - -export interface IntColumn { - data: number[] - type: 'int' -} - -export interface UIntColumn { - data: number[] - type: 'uint' -} - -export interface TimeColumn { - data: number[] - type: 'time' -} - -export interface StringColumn { - data: string[] - type: 'string' -} - -export interface BoolColumn { - data: boolean[] - type: 'bool' -} - -export type NumericTableColumn = - | FloatColumn - | IntColumn - | UIntColumn - | TimeColumn - -export type TableColumn = - | FloatColumn - | IntColumn - | UIntColumn - | TimeColumn - | StringColumn - | BoolColumn - -export interface Table { - length: number - columns: { - [columnName: string]: TableColumn - } -} - -export type LayerType = 'base' | 'histogram' - -export interface Scale { - (x: D): R - invert?: (y: R) => D -} - -export interface BaseLayerMappings {} - -export interface BaseLayerScales { - x: Scale - y: Scale -} - -export interface BaseLayer { - type: 'base' - table: Table - scales: BaseLayerScales - mappings: {} - xDomain: [number, number] - yDomain: [number, number] -} - -export interface HistogramTable extends Table { - columns: { - xMin: NumericTableColumn - xMax: NumericTableColumn - yMin: IntColumn - yMax: IntColumn - [fillColumn: string]: TableColumn - } - length: number -} - -export interface HistogramMappings { - xMin: 'xMin' - xMax: 'xMax' - yMin: 'yMin' - yMax: 'yMax' - fill: string[] -} - -export interface HistogramScales { - // x and y scale are from the `BaseLayer` - fill: Scale -} - -export interface HistogramLayer { - type: 'histogram' - table: HistogramTable - mappings: HistogramMappings - scales: HistogramScales - colors: string[] -} - -export type Layer = BaseLayer | HistogramLayer - -export interface Margins { - top: number - right: number - bottom: number - left: number -} - -export interface PlotEnv { - width: number - height: number - innerWidth: number - innerHeight: number - margins: Margins - xTicks: number[] - yTicks: number[] - xAxisLabel: string - yAxisLabel: string - - // If the domains have been explicitly passed in to the `Plot` component, - // they will be stored here. Scales and child layers use the `xDomain` and - // `yDomain` in the `baseLayer`, which are set from these domains if they - // exist or computed from the extent of data otherwise - xDomain: [number, number] - yDomain: [number, number] - - baseLayer: BaseLayer - layers: {[layerKey: string]: Layer} - hoverX: number - hoverY: number - dispatch: (action: PlotAction) => void -} diff --git a/ui/src/minard/utils/assert.ts b/ui/src/minard/utils/assert.ts deleted file mode 100644 index 9eceafe9c4..0000000000 --- a/ui/src/minard/utils/assert.ts +++ /dev/null @@ -1,5 +0,0 @@ -export const assert = (message: string, condition: boolean) => { - if (!condition) { - throw new Error(message) - } -} diff --git a/ui/src/minard/utils/bin.test.ts b/ui/src/minard/utils/bin.test.ts deleted file mode 100644 index 27665fc63f..0000000000 --- a/ui/src/minard/utils/bin.test.ts +++ /dev/null @@ -1,178 +0,0 @@ -import {HistogramPosition, Table} from 'src/minard' -import {bin} from 'src/minard/utils/bin' - -const TABLE: Table = { - columns: { - _value: { - data: [70, 56, 60, 100, 76, 0, 63, 48, 79, 67], - type: 'int', - }, - _field: { - data: [ - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_idle', - 'usage_idle', - 'usage_idle', - 'usage_idle', - 'usage_idle', - ], - type: 'string', - }, - cpu: { - data: [ - 'cpu0', - 'cpu0', - 'cpu0', - 'cpu1', - 'cpu1', - 'cpu0', - 'cpu0', - 'cpu0', - 'cpu1', - 'cpu1', - ], - type: 'string', - }, - }, - length: 10, -} - -describe('bin', () => { - test('without grouping', () => { - const actual = bin(TABLE, '_value', null, [], 5, HistogramPosition.Stacked) - const expected = [ - { - columns: { - xMin: {data: [0, 20, 40, 60, 80], type: 'int'}, - xMax: {data: [20, 40, 60, 80, 100], type: 'int'}, - yMin: {data: [0, 0, 0, 0, 0], type: 'int'}, - yMax: {data: [1, 0, 2, 6, 1], type: 'int'}, - }, - length: 5, - }, - {xMin: 'xMin', xMax: 'xMax', yMin: 'yMin', yMax: 'yMax', fill: []}, - ] - - expect(actual).toEqual(expected) - }) - - test('with grouping by _field and cpu', () => { - const actual = bin( - TABLE, - '_value', - null, - ['_field'], - 5, - HistogramPosition.Stacked - )[0].columns - - const expected = { - xMin: {data: [0, 20, 40, 60, 80, 0, 20, 40, 60, 80], type: 'int'}, - xMax: {data: [20, 40, 60, 80, 100, 20, 40, 60, 80, 100], type: 'int'}, - yMin: {data: [0, 0, 0, 0, 0, 0, 0, 1, 3, 1], type: 'int'}, - yMax: {data: [0, 0, 1, 3, 1, 1, 0, 2, 6, 1], type: 'int'}, - _field: { - data: [ - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_idle', - 'usage_idle', - 'usage_idle', - 'usage_idle', - 'usage_idle', - ], - type: 'string', - }, - } - - expect(actual).toEqual(expected) - }) - - test('with grouping and overlaid positioning', () => { - const actual = bin( - TABLE, - '_value', - null, - ['_field'], - 5, - HistogramPosition.Overlaid - )[0].columns - - const expected = { - xMin: {data: [0, 20, 40, 60, 80, 0, 20, 40, 60, 80], type: 'int'}, - xMax: {data: [20, 40, 60, 80, 100, 20, 40, 60, 80, 100], type: 'int'}, - yMin: {data: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], type: 'int'}, - yMax: {data: [0, 0, 1, 3, 1, 1, 0, 1, 3, 0], type: 'int'}, - _field: { - data: [ - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_guest', - 'usage_idle', - 'usage_idle', - 'usage_idle', - 'usage_idle', - 'usage_idle', - ], - type: 'string', - }, - } - - expect(actual).toEqual(expected) - }) - - test('with an explicitly set xDomain', () => { - const actual = bin( - TABLE, - '_value', - [-200, 200], - [], - 10, - HistogramPosition.Stacked - )[0].columns - - const expected = { - xMin: { - data: [-200, -160, -120, -80, -40, 0, 40, 80, 120, 160], - type: 'int', - }, - xMax: { - data: [-160, -120, -80, -40, 0, 40, 80, 120, 160, 200], - type: 'int', - }, - yMin: {data: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], type: 'int'}, - yMax: {data: [0, 0, 0, 0, 0, 1, 8, 1, 0, 0], type: 'int'}, - } - - expect(actual).toEqual(expected) - }) - - test('ignores values outside of xDomain', () => { - const actual = bin( - TABLE, - '_value', - [50, 80], - [], - 3, - HistogramPosition.Stacked - )[0].columns - - const expected = { - xMin: {data: [50, 60, 70], type: 'int'}, - xMax: {data: [60, 70, 80], type: 'int'}, - yMin: {data: [0, 0, 0], type: 'int'}, - yMax: {data: [1, 3, 3], type: 'int'}, - } - - expect(actual).toEqual(expected) - }) -}) diff --git a/ui/src/minard/utils/bin.ts b/ui/src/minard/utils/bin.ts deleted file mode 100644 index b37f162ee9..0000000000 --- a/ui/src/minard/utils/bin.ts +++ /dev/null @@ -1,219 +0,0 @@ -import {extent, range, thresholdSturges} from 'd3-array' - -import { - Table, - HistogramTable, - HistogramMappings, - HistogramPosition, - NumericColumnType, - isNumeric, -} from 'src/minard' -import {assert} from 'src/minard/utils/assert' -import {getGroupKey} from 'src/minard/utils/getGroupKey' - -/* - Compute the data of a histogram visualization. - - The column specified by the `xColName` will be divided into `binCount` evenly - spaced bins, and the number of rows in each bin will be counted. - - If the `groupKeyCols` option is passed, rows in each bin are further grouped - by the set of values for the `groupKeyCols` for the row. - - The returned result is a table where each row represents a bar in a - (potentially stacked) histogram. For example, a histogram with two bins and - two groups in each bin (four bars total) might have a table that looks like - this: - - xMin | xMax | yMin | yMax | host | cpu - -------------------------------------- - 0 | 10 | 0 | 21 | "a" | 1 - 0 | 10 | 21 | 30 | "b" | 1 - 10 | 20 | 0 | 4 | "a" | 1 - 10 | 20 | 4 | 6 | "b" | 1 - - If `binCount` is not provided, a default value will be provided using - [Sturges' formula][0]. - - [0]: https://en.wikipedia.org/wiki/Histogram#Sturges'_formula -*/ -export const bin = ( - table: Table, - xColName: string, - xDomain: [number, number], - groupColNames: string[] = [], - binCount: number, - position: HistogramPosition -): [HistogramTable, HistogramMappings] => { - const col = table.columns[xColName] - - assert(`could not find column "${xColName}"`, !!col) - assert(`unsupported value column type "${col.type}"`, isNumeric(col.type)) - - const xCol = col.data as number[] - const xColType = col.type as NumericColumnType - - if (!binCount) { - binCount = thresholdSturges(xCol) - } - - xDomain = resolveXDomain(xCol, xDomain) - - const bins = createBins(xDomain, binCount) - - // A group is the set of key-value pairs that a row takes on for the column - // names specified in `groupColNames`. The group key is a hashable - // representation of the values of these pairs. - const groupsByGroupKey = {} - - // Count x values by bin and group - for (let i = 0; i < xCol.length; i++) { - const x = xCol[i] - - const shouldSkipPoint = - x === undefined || - x === null || - isNaN(x) || - x < xDomain[0] || - x > xDomain[1] - - if (shouldSkipPoint) { - continue - } - - const group = getGroup(table, groupColNames, i) - const groupKey = getGroupKey(Object.values(group)) - const xPercentage = (x - xDomain[0]) / (xDomain[1] - xDomain[0]) - - let binIndex = Math.floor(xPercentage * binCount) - - if (binIndex === bins.length) { - // Special case: the maximum value should be clamped to the last bin - binIndex = bins.length - 1 - } - - const bin = bins[binIndex] - - groupsByGroupKey[groupKey] = group - - if (!bin.values[groupKey]) { - bin.values[groupKey] = 1 - } else { - bin.values[groupKey] += 1 - } - } - - // Next, build up a tabular representation of each of these bins by group - const groupKeys = Object.keys(groupsByGroupKey) - const statTable = { - columns: { - xMin: { - data: [], - type: xColType, - }, - xMax: { - data: [], - type: xColType, - }, - yMin: { - data: [], - type: 'int', - }, - yMax: { - data: [], - type: 'int', - }, - }, - length: binCount * groupKeys.length, - } - - // Include original columns used to group data in the resulting table - for (const name of groupColNames) { - statTable.columns[name] = { - data: [], - type: table.columns[name].type, - } - } - - for (let i = 0; i < groupKeys.length; i++) { - const groupKey = groupKeys[i] - - for (const bin of bins) { - let yMin = 0 - - if (position === HistogramPosition.Stacked) { - yMin = groupKeys - .slice(0, i) - .reduce((sum, k) => sum + (bin.values[k] || 0), 0) - } - - statTable.columns.xMin.data.push(bin.min) - statTable.columns.xMax.data.push(bin.max) - statTable.columns.yMin.data.push(yMin) - statTable.columns.yMax.data.push(yMin + (bin.values[groupKey] || 0)) - - for (const [k, v] of Object.entries(groupsByGroupKey[groupKey])) { - statTable.columns[k].data.push(v) - } - } - } - - const mappings: HistogramMappings = { - xMin: 'xMin', - xMax: 'xMax', - yMin: 'yMin', - yMax: 'yMax', - fill: groupColNames, - } - - return [statTable as HistogramTable, mappings] -} - -const createBins = ( - xDomain: number[], - binCount: number -): Array<{max: number; min: number; values: {}}> => { - const domainWidth = xDomain[1] - xDomain[0] - const binWidth = domainWidth / binCount - const binMinimums = range(xDomain[0], xDomain[1], binWidth) - - const bins = binMinimums.map((min, i) => { - const isLastBin = i === binMinimums.length - 1 - const max = isLastBin ? xDomain[1] : binMinimums[i + 1] - - return {min, max, values: {}} - }) - - return bins -} - -const resolveXDomain = ( - xCol: number[], - preferredXDomain?: [number, number] -): [number, number] => { - let domain: [number, number] - - if (preferredXDomain) { - domain = [preferredXDomain[0], preferredXDomain[1]] - } else { - domain = extent(xCol) - } - - if (domain[0] === domain[1]) { - // Widen domains of zero width by an arbitrary amount so that they can be - // divided into bins - domain[1] = domain[0] + 1 - } - - return domain -} - -const getGroup = (table: Table, groupColNames: string[], i: number) => { - const result = {} - - for (const key of groupColNames) { - result[key] = table.columns[key].data[i] - } - - return result -} diff --git a/ui/src/minard/utils/clearCanvas.ts b/ui/src/minard/utils/clearCanvas.ts deleted file mode 100644 index 64d793ebf6..0000000000 --- a/ui/src/minard/utils/clearCanvas.ts +++ /dev/null @@ -1,17 +0,0 @@ -export const clearCanvas = ( - canvas: HTMLCanvasElement, - width: number, - height: number -) => { - const context = canvas.getContext('2d') - const dpRatio = window.devicePixelRatio || 1 - - // Configure canvas to draw on retina displays correctly - canvas.width = width * dpRatio - canvas.height = height * dpRatio - canvas.style.width = `${width}px` - canvas.style.height = `${height}px` - context.scale(dpRatio, dpRatio) - - context.clearRect(0, 0, width, height) -} diff --git a/ui/src/minard/utils/findHoveredRowIndices.tsx b/ui/src/minard/utils/findHoveredRowIndices.tsx deleted file mode 100644 index 8187ebca0a..0000000000 --- a/ui/src/minard/utils/findHoveredRowIndices.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import {Scale} from 'src/minard' -import {range} from 'd3-array' - -import {HistogramTable} from 'src/minard' - -export const findHoveredRowIndices = ( - table: HistogramTable, - hoverX: number, - hoverY: number, - xScale: Scale, - yScale: Scale -) => { - if (!hoverX || !hoverY) { - return null - } - - const xMinCol = table.columns.xMin.data - const xMaxCol = table.columns.xMax.data - const yMaxCol = table.columns.yMax.data - const dataX = xScale.invert(hoverX) - const dataY = yScale.invert(hoverY) - - // Find all bins whose x extent contain the mouse x position - const hoveredRowIndices = range(0, xMinCol.length).filter( - i => xMinCol[i] <= dataX && xMaxCol[i] > dataX - ) - - // If the mouse y position is above every one of those bars, then the mouse - // isn't hovering over them - if (!hoveredRowIndices.some(i => yMaxCol[i] >= dataY)) { - return null - } - - return hoveredRowIndices -} diff --git a/ui/src/minard/utils/getBarFill.ts b/ui/src/minard/utils/getBarFill.ts deleted file mode 100644 index 7abaffa95c..0000000000 --- a/ui/src/minard/utils/getBarFill.ts +++ /dev/null @@ -1,25 +0,0 @@ -import {HistogramLayer} from 'src/minard' -import {getGroupKey} from 'src/minard/utils/getGroupKey' - -// Given a histogram `Layer` and the index of row in its table, this function -// will get the color for the bar that corresponds to that row. -// -// Since the color of a bar depends on the values in multiple columns of that -// bar's row, the process is to: -// -// 1. Get the value of each necessary column (the columns are specified by the -// `fill` data-to-aesthetic mapping) -// 2. Turn that set of values into the hashable representation (the “group -// key”) that the scale uses as a domain -// 3. Lookup the scale and get the color via this representation -export const getBarFill = ( - {scales, mappings, table}: HistogramLayer, - i: number -): string => { - const fillScale = scales.fill - const values = mappings.fill.map(colKey => table.columns[colKey].data[i]) - const groupKey = getGroupKey(values) - const fill = fillScale(groupKey) - - return fill -} diff --git a/ui/src/minard/utils/getGroupKey.ts b/ui/src/minard/utils/getGroupKey.ts deleted file mode 100644 index 5b0e8a51fb..0000000000 --- a/ui/src/minard/utils/getGroupKey.ts +++ /dev/null @@ -1 +0,0 @@ -export const getGroupKey = (data: any[]): string => [...data].sort().join(' ') diff --git a/ui/src/minard/utils/getHistogramTooltipProps.ts b/ui/src/minard/utils/getHistogramTooltipProps.ts deleted file mode 100644 index a125faad46..0000000000 --- a/ui/src/minard/utils/getHistogramTooltipProps.ts +++ /dev/null @@ -1,38 +0,0 @@ -import {HistogramTooltipProps, HistogramLayer} from 'src/minard' -import {getBarFill} from 'src/minard/utils/getBarFill' - -export const getHistogramTooltipProps = ( - layer: HistogramLayer, - rowIndices: number[] -): HistogramTooltipProps => { - const {table, mappings} = layer - - const xMinCol = table.columns.xMin.data - const xMaxCol = table.columns.xMax.data - const yMinCol = table.columns.yMin.data - const yMaxCol = table.columns.yMax.data - - const counts = rowIndices.map(i => { - const grouping = mappings.fill.reduce( - (acc, colName) => ({ - ...acc, - [colName]: table.columns[colName].data[i], - }), - {} - ) - - return { - count: yMaxCol[i] - yMinCol[i], - color: getBarFill(layer, i), - grouping, - } - }) - - const tooltipProps: HistogramTooltipProps = { - xMin: xMinCol[rowIndices[0]], - xMax: xMaxCol[rowIndices[0]], - counts, - } - - return tooltipProps -} diff --git a/ui/src/minard/utils/isNumeric.ts b/ui/src/minard/utils/isNumeric.ts deleted file mode 100644 index 1fc5683c30..0000000000 --- a/ui/src/minard/utils/isNumeric.ts +++ /dev/null @@ -1,6 +0,0 @@ -import {ColumnType} from 'src/minard' - -const NUMERIC_TYPES = new Set(['uint', 'int', 'float', 'time']) - -export const isNumeric = (columnType: ColumnType): boolean => - NUMERIC_TYPES.has(columnType) diff --git a/ui/src/minard/utils/plotEnvActions.ts b/ui/src/minard/utils/plotEnvActions.ts deleted file mode 100644 index 4561de3279..0000000000 --- a/ui/src/minard/utils/plotEnvActions.ts +++ /dev/null @@ -1,115 +0,0 @@ -import {Table, PlotEnv, Layer} from 'src/minard' - -export type PlotAction = - | RegisterLayerAction - | UnregisterLayerAction - | SetDimensionsAction - | SetTableAction - | ResetAction - | SetControlledXDomainAction - | SetControlledYDomainAction - | SetXAxisLabelAction - | SetYAxisLabelAction - -interface RegisterLayerAction { - type: 'REGISTER_LAYER' - payload: { - layerKey: string - layer: Partial - } -} - -export const registerLayer = ( - layerKey: string, - layer: Partial -): RegisterLayerAction => ({ - type: 'REGISTER_LAYER', - payload: {layerKey, layer}, -}) - -interface UnregisterLayerAction { - type: 'UNREGISTER_LAYER' - payload: {layerKey: string} -} - -export const unregisterLayer = (layerKey: string): UnregisterLayerAction => ({ - type: 'UNREGISTER_LAYER', - payload: {layerKey}, -}) - -interface SetDimensionsAction { - type: 'SET_DIMENSIONS' - payload: {width: number; height: number} -} - -export const setDimensions = ( - width: number, - height: number -): SetDimensionsAction => ({ - type: 'SET_DIMENSIONS', - payload: {width, height}, -}) - -interface SetTableAction { - type: 'SET_TABLE' - payload: {table: Table} -} - -export const setTable = (table: Table): SetTableAction => ({ - type: 'SET_TABLE', - payload: {table}, -}) - -interface ResetAction { - type: 'RESET' - payload: Partial -} - -export const reset = (initialState: Partial): ResetAction => ({ - type: 'RESET', - payload: initialState, -}) - -interface SetControlledXDomainAction { - type: 'SET_CONTROLLED_X_DOMAIN' - payload: {xDomain: [number, number]} -} - -export const setControlledXDomain = ( - xDomain: [number, number] -): SetControlledXDomainAction => ({ - type: 'SET_CONTROLLED_X_DOMAIN', - payload: {xDomain}, -}) - -interface SetControlledYDomainAction { - type: 'SET_CONTROLLED_Y_DOMAIN' - payload: {yDomain: [number, number]} -} - -export const setControlledYDomain = ( - yDomain: [number, number] -): SetControlledYDomainAction => ({ - type: 'SET_CONTROLLED_Y_DOMAIN', - payload: {yDomain}, -}) - -interface SetXAxisLabelAction { - type: 'SET_X_AXIS_LABEL' - payload: {xAxisLabel: string} -} - -export const setXAxisLabel = (xAxisLabel: string): SetXAxisLabelAction => ({ - type: 'SET_X_AXIS_LABEL', - payload: {xAxisLabel}, -}) - -interface SetYAxisLabelAction { - type: 'SET_Y_AXIS_LABEL' - payload: {yAxisLabel: string} -} - -export const setYAxisLabel = (yAxisLabel: string): SetYAxisLabelAction => ({ - type: 'SET_Y_AXIS_LABEL', - payload: {yAxisLabel}, -}) diff --git a/ui/src/minard/utils/plotEnvReducer.ts b/ui/src/minard/utils/plotEnvReducer.ts deleted file mode 100644 index 1759347afc..0000000000 --- a/ui/src/minard/utils/plotEnvReducer.ts +++ /dev/null @@ -1,343 +0,0 @@ -import {extent, ticks} from 'd3-array' -import {scaleLinear, scaleOrdinal} from 'd3-scale' -import {produce} from 'immer' -import chroma from 'chroma-js' - -import { - PlotEnv, - Layer, - HistogramLayer, - Scale, - PLOT_PADDING, - TICK_CHAR_WIDTH, - TICK_CHAR_HEIGHT, - TICK_PADDING_RIGHT, - TICK_PADDING_TOP, - AXIS_LABEL_PADDING_BOTTOM, -} from 'src/minard' -import {PlotAction} from 'src/minard/utils/plotEnvActions' -import {getGroupKey} from 'src/minard/utils/getGroupKey' - -const DEFAULT_X_DOMAIN: [number, number] = [0, 1] -const DEFAULT_Y_DOMAIN: [number, number] = [0, 1] - -export const INITIAL_PLOT_ENV: PlotEnv = { - width: 0, - height: 0, - innerWidth: 0, - innerHeight: 0, - margins: { - top: PLOT_PADDING, - right: PLOT_PADDING, - bottom: PLOT_PADDING, - left: PLOT_PADDING, - }, - xTicks: [], - yTicks: [], - xAxisLabel: '', - yAxisLabel: '', - xDomain: null, - yDomain: null, - baseLayer: { - type: 'base', - table: {columns: {}, length: 0}, - xDomain: DEFAULT_X_DOMAIN, - yDomain: DEFAULT_Y_DOMAIN, - mappings: {}, - scales: { - x: null, - y: null, - }, - }, - layers: {}, - hoverX: null, - hoverY: null, - dispatch: () => {}, -} - -export const plotEnvReducer = (state: PlotEnv, action: PlotAction): PlotEnv => - produce(state, draftState => { - switch (action.type) { - case 'REGISTER_LAYER': { - const {layerKey, layer} = action.payload - - draftState.layers[layerKey] = {...layer, scales: {}} as Layer - - setXDomain(draftState) - setYDomain(draftState) - setLayout(draftState) - setFillScales(draftState) - - return - } - - case 'UNREGISTER_LAYER': { - const {layerKey} = action.payload - - delete draftState.layers[layerKey] - - setXDomain(draftState) - setYDomain(draftState) - setLayout(draftState) - setFillScales(draftState) - - return - } - - case 'SET_DIMENSIONS': { - const {width, height} = action.payload - - draftState.width = width - draftState.height = height - - setLayout(draftState) - - return - } - - case 'SET_TABLE': { - draftState.baseLayer.table = action.payload.table - - return - } - - case 'SET_CONTROLLED_X_DOMAIN': { - const {xDomain} = action.payload - - draftState.xDomain = xDomain - - setXDomain(draftState) - setLayout(draftState) - - return - } - - case 'SET_CONTROLLED_Y_DOMAIN': { - const {yDomain} = action.payload - - draftState.yDomain = yDomain - - setYDomain(draftState) - setLayout(draftState) - - return - } - - case 'SET_X_AXIS_LABEL': { - const {xAxisLabel} = action.payload - - draftState.xAxisLabel = xAxisLabel - - setLayout(draftState) - - return - } - - case 'SET_Y_AXIS_LABEL': { - const {yAxisLabel} = action.payload - - draftState.yAxisLabel = yAxisLabel - - setLayout(draftState) - - return - } - } - }) - -/* - Find all columns in the current in all layers that are mapped to the supplied - aesthetic mappings -*/ -const getColumnsForAesthetics = ( - state: PlotEnv, - mappings: string[] -): any[][] => { - const {baseLayer, layers} = state - - const cols = [] - - for (const layer of Object.values(layers)) { - for (const mapping of mappings) { - const colName = layer.mappings[mapping] - - if (colName) { - const col = layer.table - ? layer.table.columns[colName].data - : baseLayer.table.columns[colName].data - - cols.push(col) - } - } - } - - return cols -} - -/* - Flatten an array of arrays by one level -*/ -const flatten = (arrays: any[][]): any[] => [].concat(...arrays) - -/* - Given a list of aesthetics, find the domain across all columns in all layers - that are mapped to that aesthetic -*/ -const getDomainForAesthetics = ( - state: PlotEnv, - aesthetics: string[] -): [number, number] => { - const domains = getColumnsForAesthetics(state, aesthetics).map(col => - extent(col) - ) - - const domainOfDomains = extent(flatten(domains)) - - if (domainOfDomains.some(x => x === undefined)) { - return null - } - - return domainOfDomains -} - -/* - If the x domain is in "controlled" mode, set it according to the passed - `xDomain` prop. Otherwise compute and set the domain based on the extent of - relevant data in each layer. -*/ -const setXDomain = (draftState: PlotEnv): void => { - if (draftState.xDomain) { - draftState.baseLayer.xDomain = draftState.xDomain - } else { - draftState.baseLayer.xDomain = - getDomainForAesthetics(draftState, ['x', 'xMin', 'xMax']) || - DEFAULT_X_DOMAIN - } -} - -/* - See `setXDomain`. -*/ -const setYDomain = (draftState: PlotEnv): void => { - if (draftState.yDomain) { - draftState.baseLayer.yDomain = draftState.yDomain - } else { - draftState.baseLayer.yDomain = - getDomainForAesthetics(draftState, ['y', 'yMin', 'yMax']) || - DEFAULT_Y_DOMAIN - } -} - -const getTicks = ([d0, d1]: number[], length: number): number[] => { - const approxTickWidth = - Math.max(String(d0).length, String(d1).length) * TICK_CHAR_WIDTH - - const TICK_DENSITY = 0.3 - const numTicks = Math.round((length / approxTickWidth) * TICK_DENSITY) - const result = ticks(d0, d1, numTicks) - - return result -} - -/* - Compute and set the ticks, margins, x/y scales, and dimensions for the plot. -*/ -const setLayout = (draftState: PlotEnv): void => { - const {width, height} = draftState - const xDomain = draftState.xDomain || draftState.baseLayer.xDomain - const yDomain = draftState.yDomain || draftState.baseLayer.yDomain - - draftState.xTicks = getTicks(xDomain, width) - draftState.yTicks = getTicks(yDomain, height) - - const yTickWidth = - Math.max(...draftState.yTicks.map(t => String(t).length)) * TICK_CHAR_WIDTH - - const xAxisLabelHeight = draftState.xAxisLabel - ? TICK_CHAR_HEIGHT + AXIS_LABEL_PADDING_BOTTOM - : 0 - - const yAxisLabelHeight = draftState.yAxisLabel - ? TICK_CHAR_HEIGHT + AXIS_LABEL_PADDING_BOTTOM - : 0 - - const margins = { - top: PLOT_PADDING, - right: PLOT_PADDING, - bottom: - TICK_CHAR_HEIGHT + TICK_PADDING_TOP + PLOT_PADDING + xAxisLabelHeight, - left: yTickWidth + TICK_PADDING_RIGHT + PLOT_PADDING + yAxisLabelHeight, - } - - const innerWidth = width - margins.left - margins.right - const innerHeight = height - margins.top - margins.bottom - - draftState.margins = margins - draftState.innerWidth = innerWidth - draftState.innerHeight = innerHeight - - draftState.baseLayer.scales.x = scaleLinear() - .domain(xDomain) - .range([0, innerWidth]) - - draftState.baseLayer.scales.y = scaleLinear() - .domain(yDomain) - .range([innerHeight, 0]) -} - -/* - Get a scale that maps elements of the domain to a color according to the - color scheme passed as `colors`. -*/ -const getColorScale = ( - domain: string[], - colors: string[] -): Scale => { - const range = chroma - .scale(colors) - .mode('lch') - .colors(domain.length) - - const scale = scaleOrdinal() - .domain(domain) - .range(range) - - return scale -} - -/* - Get the domain for the scale used for the data-to-fill aesthetic mapping. - - The fill aesthetic is always used to visually distinguish different groupings - of data (for now). So the domain of the scale is a set of "group keys" which - represent all possible groupings of data in the layer. -*/ -const getFillDomain = ({table, mappings}: HistogramLayer): string[] => { - const fillColKeys = mappings.fill - - if (!fillColKeys.length) { - return [] - } - - const fillDomain = new Set() - - for (let i = 0; i < table.length; i++) { - fillDomain.add(getGroupKey(fillColKeys.map(k => table.columns[k].data[i]))) - } - - return [...fillDomain].sort() -} - -/* - For each layer, compute and set a fill scale according to the layer's - data-to-fill mapping. -*/ -const setFillScales = (draftState: PlotEnv) => { - const layers = Object.values(draftState.layers) - - layers - .filter(layer => layer.type === 'histogram') - .forEach((layer: HistogramLayer) => { - layer.scales.fill = getColorScale(getFillDomain(layer), layer.colors) - }) -} diff --git a/ui/src/minard/utils/useLayer.ts b/ui/src/minard/utils/useLayer.ts deleted file mode 100644 index 87b5afb6a4..0000000000 --- a/ui/src/minard/utils/useLayer.ts +++ /dev/null @@ -1,25 +0,0 @@ -import uuid from 'uuid' -import {useEffect, useRef, DependencyList} from 'react' -import {PlotEnv, Layer} from 'src/minard' - -import {registerLayer, unregisterLayer} from 'src/minard/utils/plotEnvActions' - -/* - Register a layer in the plot environment. A layer can optionally specify its - own data, color scheme, and data-to-aesthetic mappings. -*/ -export const useLayer = ( - env: PlotEnv, - layerFactory: () => Partial, - inputs?: DependencyList -) => { - const {current: layerKey} = useRef(uuid.v4()) - - useEffect(() => { - env.dispatch(registerLayer(layerKey, layerFactory())) - - return () => env.dispatch(unregisterLayer(layerKey)) - }, inputs) - - return env.layers[layerKey] -} diff --git a/ui/src/minard/utils/useLayoutStyle.ts b/ui/src/minard/utils/useLayoutStyle.ts deleted file mode 100644 index 07896e30c1..0000000000 --- a/ui/src/minard/utils/useLayoutStyle.ts +++ /dev/null @@ -1,18 +0,0 @@ -import {useLayoutEffect, CSSProperties} from 'react' - -export const useLayoutStyle = ( - el: HTMLElement, - f: (el: HTMLElement) => CSSProperties -) => { - useLayoutEffect(() => { - if (!el) { - return - } - - const style = f(el) - - for (const [k, v] of Object.entries(style)) { - el.style[k] = v - } - }) -} diff --git a/ui/src/minard/utils/useMountedEffect.ts b/ui/src/minard/utils/useMountedEffect.ts deleted file mode 100644 index 9c6f6b5538..0000000000 --- a/ui/src/minard/utils/useMountedEffect.ts +++ /dev/null @@ -1,22 +0,0 @@ -import {useEffect, useRef, EffectCallback, DependencyList} from 'react' - -/* - Behaves like `useEffect`, but won't fire after the initial render of a - component. -*/ -export const useMountedEffect = ( - effect: EffectCallback, - inputs?: DependencyList -) => { - const isFirstRender = useRef(true) - - useEffect(() => { - if (isFirstRender.current) { - isFirstRender.current = false - - return - } - - return effect() - }, inputs) -} diff --git a/ui/src/minard/utils/useMousePos.ts b/ui/src/minard/utils/useMousePos.ts deleted file mode 100644 index 658cb85d0a..0000000000 --- a/ui/src/minard/utils/useMousePos.ts +++ /dev/null @@ -1,35 +0,0 @@ -import {useState, useEffect} from 'react' - -export const useMousePos = (el: Element): {x: number; y: number} => { - const [state, setState] = useState({x: null, y: null}) - - useEffect(() => { - if (!el) { - return - } - - const onMouseEnter = e => { - const {left, top} = el.getBoundingClientRect() - - setState({x: e.pageX - left, y: e.pageY - top}) - } - - const onMouseMove = onMouseEnter - - const onMouseLeave = () => { - setState({x: null, y: null}) - } - - el.addEventListener('mouseenter', onMouseEnter) - el.addEventListener('mousemove', onMouseMove) - el.addEventListener('mouseleave', onMouseLeave) - - return () => { - el.removeEventListener('mouseenter', onMouseEnter) - el.removeEventListener('mousemove', onMouseMove) - el.removeEventListener('mouseleave', onMouseLeave) - } - }, [el]) - - return state -} diff --git a/ui/src/minard/utils/useTooltipStyle.ts b/ui/src/minard/utils/useTooltipStyle.ts deleted file mode 100644 index b0f4ad91a7..0000000000 --- a/ui/src/minard/utils/useTooltipStyle.ts +++ /dev/null @@ -1,48 +0,0 @@ -import {useLayoutStyle} from 'src/minard/utils/useLayoutStyle' -import {useMousePos} from 'src/minard/utils/useMousePos' - -const MARGIN_X = 15 - -export const useTooltipStyle = (el: HTMLDivElement) => { - const {x, y} = useMousePos(document.body) - - // Position the tooltip next to the mouse cursor, like this: - // - // ┌─────────────┐ - // │ │ - // (mouse) │ tooltip │ - // │ │ - // └─────────────┘ - // - // The positioning is subject to the following restrictions: - // - // - If the tooltip overflows the right side of the screen, position it on - // the left side of the cursor instead - // - // - If the tooltip overflows the top or bottom of the screen (with a bit of - // margin), shift it just enough so that it is fully back inside the screen - // - useLayoutStyle(el, ({offsetWidth, offsetHeight}) => { - let dx = MARGIN_X - let dy = 0 - offsetHeight / 2 - - if (x + dx + offsetWidth > window.innerWidth) { - dx = 0 - MARGIN_X - offsetWidth - } - - if (y + dy + offsetHeight > window.innerHeight) { - dy -= y + dy + offsetHeight - window.innerHeight - } - - if (y + dy < 0) { - dy += 0 - (y + dy) - } - - return { - display: 'inline', - position: 'fixed', - left: `${x + dx}px`, - top: `${y + dy}px`, - } - }) -} diff --git a/ui/src/onboarding/actions/index.ts b/ui/src/onboarding/actions/index.ts index 880ddf7511..18bf20f43f 100644 --- a/ui/src/onboarding/actions/index.ts +++ b/ui/src/onboarding/actions/index.ts @@ -4,7 +4,7 @@ import _ from 'lodash' // Constants import {StepStatus} from 'src/clockface/constants/wizard' import {SetupSuccess, SetupError} from 'src/shared/copy/notifications' -import {systemTemplate} from 'src/organizations/constants/index' +import {defaultTemplates} from 'src/templates/constants/' // Actions import {notify} from 'src/shared/actions/notifications' @@ -84,7 +84,7 @@ export const setupAdmin = (params: ISetupParams) => async ( await client.auth.signin(username, password) - await client.templates.create({...systemTemplate(params.bucket), orgID}) + await client.templates.create({...defaultTemplates.systemTemplate(), orgID}) dispatch(notify(SetupSuccess)) dispatch(setStepStatus(1, StepStatus.Complete)) diff --git a/ui/src/onboarding/components/SigninForm.tsx b/ui/src/onboarding/components/SigninForm.tsx index 14c3846831..3266f2c8de 100644 --- a/ui/src/onboarding/components/SigninForm.tsx +++ b/ui/src/onboarding/components/SigninForm.tsx @@ -134,7 +134,7 @@ class SigninForm extends PureComponent { const {query} = this.props.location if (query && query.returnTo) { - router.push(query.returnTo) + router.replace(query.returnTo) } else { router.push('/') } diff --git a/ui/src/onboarding/components/__snapshots__/AdminStep.test.tsx.snap b/ui/src/onboarding/components/__snapshots__/AdminStep.test.tsx.snap index 5b4dfeafed..b6e9c3353e 100644 --- a/ui/src/onboarding/components/__snapshots__/AdminStep.test.tsx.snap +++ b/ui/src/onboarding/components/__snapshots__/AdminStep.test.tsx.snap @@ -13,6 +13,7 @@ exports[`Onboarding.Components.AdminStep renders 1`] = ` async dispatch => { dispatch(populateTasks(tasks)) } -export interface PopulateDashboards { - type: ActionTypes.PopulateDashboards - payload: {dashboards: Dashboard[]} -} - -export const populateDashboards = ( - dashboards: Dashboard[] -): PopulateDashboards => ({ - type: ActionTypes.PopulateDashboards, - payload: {dashboards}, -}) - -export const getDashboards = (orgID: string) => async dispatch => { - const dashboards = await getDashboardsByOrgID(orgID) - dispatch(populateDashboards(dashboards)) -} - export const createScraper = (scraper: ScraperTargetRequest) => async () => { await client.scrapers.create(scraper) } diff --git a/ui/src/organizations/actions/orgs.ts b/ui/src/organizations/actions/orgs.ts index f4adf3a4ee..653d854a7c 100644 --- a/ui/src/organizations/actions/orgs.ts +++ b/ui/src/organizations/actions/orgs.ts @@ -7,6 +7,8 @@ import {client} from 'src/utils/api' // Types import {Organization} from 'src/types' +import {defaultTemplates} from 'src/templates/constants/' + export enum ActionTypes { SetOrgs = 'SET_ORGS', AddOrg = 'ADD_ORG', @@ -84,6 +86,10 @@ export const createOrg = (org: Organization) => async ( ): Promise => { try { const createdOrg = await client.organizations.create(org) + await client.templates.create({ + ...defaultTemplates.systemTemplate(), + orgID: createdOrg.id, + }) dispatch(addOrg(createdOrg)) } catch (e) { console.error(e) diff --git a/ui/src/organizations/components/Dashboards.tsx b/ui/src/organizations/components/Dashboards.tsx index 79aeb51fc0..f81d9f6fb6 100644 --- a/ui/src/organizations/components/Dashboards.tsx +++ b/ui/src/organizations/components/Dashboards.tsx @@ -26,8 +26,7 @@ import {DEFAULT_DASHBOARD_NAME} from 'src/dashboards/constants/index' // Types import {IconFont} from '@influxdata/clockface' -import {Notification} from 'src/types/notifications' -import {Dashboard} from 'src/types' +import {Dashboard, AppState, Notification} from 'src/types' // Decorators import {ErrorHandling} from 'src/shared/decorators/errors' @@ -39,12 +38,14 @@ interface DispatchProps { } interface OwnProps { - dashboards: Dashboard[] - onChange: () => void orgID: string } -type Props = DispatchProps & OwnProps & WithRouterProps +interface StateProps { + dashboards: Dashboard[] +} + +type Props = DispatchProps & StateProps & OwnProps & WithRouterProps interface State { searchTerm: string @@ -149,13 +150,19 @@ class Dashboards extends PureComponent { } } +const mstp = (state: AppState, props: OwnProps): StateProps => { + const dashboards = state.dashboards.list.filter(d => d.orgID === props.orgID) + + return {dashboards} +} + const mdtp: DispatchProps = { notify: notifyAction, handleDeleteDashboard: deleteDashboardAsync, handleUpdateDashboard: updateDashboardAsync, } -export default connect<{}, DispatchProps, OwnProps>( - null, +export default connect( + mstp, mdtp )(withRouter(Dashboards)) diff --git a/ui/src/organizations/components/Members.tsx b/ui/src/organizations/components/Members.tsx index f484f3d596..40a56ec529 100644 --- a/ui/src/organizations/components/Members.tsx +++ b/ui/src/organizations/components/Members.tsx @@ -117,22 +117,11 @@ export default class Members extends PureComponent { private async getUsers() { const {members} = this.props + const apiUsers = await client.users.getAll() + const allUsers = apiUsers.reduce((acc, u) => _.set(acc, u.id, u), {}) + const users = _.omit(allUsers, members.map(m => m.id)) - const data = await client.users.getAllUsers() - - const users = {} - - data.users.forEach(key => { - users[key.id] = key - }) - - members.forEach(m => { - if (users[m.id]) { - delete users[m.id] - } - }) - - this.setState({users: users}) + this.setState({users}) } private addMember = async (user: AddResourceMemberRequestBody) => { diff --git a/ui/src/organizations/components/OrgTemplatesList.tsx b/ui/src/organizations/components/OrgTemplatesList.tsx index 164f9c5b44..2ab6c72f95 100644 --- a/ui/src/organizations/components/OrgTemplatesList.tsx +++ b/ui/src/organizations/components/OrgTemplatesList.tsx @@ -54,11 +54,7 @@ export default class OrgTemplatesList extends PureComponent { + } > {this.sortedCards} diff --git a/ui/src/organizations/components/OrgVariableExportOverlay.tsx b/ui/src/organizations/components/OrgVariableExportOverlay.tsx new file mode 100644 index 0000000000..1bc4b54501 --- /dev/null +++ b/ui/src/organizations/components/OrgVariableExportOverlay.tsx @@ -0,0 +1,86 @@ +import React, {PureComponent} from 'react' +import {withRouter, WithRouterProps} from 'react-router' +import {connect} from 'react-redux' + +// Components +import ExportOverlay from 'src/shared/components/ExportOverlay' + +// Actions +import {convertToTemplate as convertToTemplateAction} from 'src/variables/actions' +import {clearExportTemplate as clearExportTemplateAction} from 'src/templates/actions' + +// Types +import {AppState} from 'src/types' +import {DocumentCreate} from '@influxdata/influx' +import {RemoteDataState} from 'src/types' + +interface OwnProps { + params: {id: string; orgID: string} +} + +interface DispatchProps { + convertToTemplate: typeof convertToTemplateAction + clearExportTemplate: typeof clearExportTemplateAction +} + +interface StateProps { + variableTemplate: DocumentCreate + status: RemoteDataState + orgID: string +} + +type Props = OwnProps & StateProps & DispatchProps & WithRouterProps + +class OrgVariableExportOverlay extends PureComponent { + public async componentDidMount() { + const { + params: {id}, + convertToTemplate, + } = this.props + + convertToTemplate(id) + } + + public render() { + const {variableTemplate, status} = this.props + + return ( + + ) + } + + private get orgID() { + const orgFromExistingResource = this.props.orgID + const orgInRoutes = this.props.params.orgID + return orgFromExistingResource || orgInRoutes + } + + private onDismiss = () => { + const {router, clearExportTemplate} = this.props + + router.goBack() + clearExportTemplate() + } +} + +const mstp = (state: AppState): StateProps => ({ + variableTemplate: state.templates.exportTemplate.item, + status: state.templates.exportTemplate.status, + orgID: state.templates.exportTemplate.orgID, +}) + +const mdtp: DispatchProps = { + convertToTemplate: convertToTemplateAction, + clearExportTemplate: clearExportTemplateAction, +} + +export default connect( + mstp, + mdtp +)(withRouter(OrgVariableExportOverlay)) diff --git a/ui/src/organizations/components/OrganizationsIndexContents.tsx b/ui/src/organizations/components/OrganizationsIndexContents.tsx index d8dd26afe9..b2360a4667 100644 --- a/ui/src/organizations/components/OrganizationsIndexContents.tsx +++ b/ui/src/organizations/components/OrganizationsIndexContents.tsx @@ -14,6 +14,7 @@ import {ErrorHandling} from 'src/shared/decorators/errors' import {Organization} from 'src/types' import {Alignment, ComponentSize} from '@influxdata/clockface' import {deleteOrg} from 'src/organizations/actions/orgs' +import CloudExclude from 'src/shared/components/cloud/CloudExclude' interface Props { orgs: Organization[] @@ -47,13 +48,15 @@ class OrganizationsPageContents extends Component { {o.name} - + + + )) diff --git a/ui/src/organizations/components/Retention.tsx b/ui/src/organizations/components/Retention.tsx index db973d192f..2af108e45b 100644 --- a/ui/src/organizations/components/Retention.tsx +++ b/ui/src/organizations/components/Retention.tsx @@ -31,6 +31,7 @@ export default class Retention extends PureComponent { <> { Never void onUpdateVariableName: (variable: Partial) => void onEditVariable: (variable: Variable) => void } -export default class VariableRow extends PureComponent { +type Props = OwnProps & WithRouterProps + +class VariableRow extends PureComponent { public render() { const {variable, onDeleteVariable} = this.props @@ -38,18 +37,38 @@ export default class VariableRow extends PureComponent { Query - + + + + + + + + ) } + private handleExport = () => { + const { + router, + variable, + location: {pathname}, + } = this.props + + router.push(`${pathname}/${variable.id}/export`) + } + private handleUpdateVariableName = async (name: string) => { const {onUpdateVariableName, variable} = this.props @@ -60,3 +79,5 @@ export default class VariableRow extends PureComponent { this.props.onEditVariable(this.props.variable) } } + +export default withRouter(VariableRow) diff --git a/ui/src/organizations/components/Variables.tsx b/ui/src/organizations/components/Variables.tsx index 6ad78e3a89..cda1947c35 100644 --- a/ui/src/organizations/components/Variables.tsx +++ b/ui/src/organizations/components/Variables.tsx @@ -2,6 +2,7 @@ import React, {PureComponent, ChangeEvent} from 'react' import _ from 'lodash' import {connect} from 'react-redux' +import {withRouter, WithRouterProps} from 'react-router' // Utils import {getVariablesForOrg} from 'src/variables/selectors' @@ -43,7 +44,7 @@ interface OwnProps { org: Organization } -type Props = StateProps & DispatchProps & OwnProps +type Props = StateProps & DispatchProps & OwnProps & WithRouterProps interface State { searchTerm: string @@ -153,7 +154,11 @@ class Variables extends PureComponent { private handleFilterBlur() {} - private handleOpenImportOverlay = (): void => {} + private handleOpenImportOverlay = (): void => { + const {router, org} = this.props + + router.push(`/organizations/${org.id}/variables/import`) + } private handleOpenCreateOverlay = (): void => { this.setState({createOverlayState: OverlayState.Open}) @@ -202,4 +207,4 @@ const mdtp = { export default connect( mstp, mdtp -)(Variables) +)(withRouter(Variables)) diff --git a/ui/src/organizations/components/__snapshots__/Variables.test.tsx.snap b/ui/src/organizations/components/__snapshots__/Variables.test.tsx.snap index 0a89d05e68..6136c58180 100644 --- a/ui/src/organizations/components/__snapshots__/Variables.test.tsx.snap +++ b/ui/src/organizations/components/__snapshots__/Variables.test.tsx.snap @@ -19,7 +19,7 @@ exports[`VariableList rendering renders 1`] = ` columnCount={3} emptyState={} > - ({ - meta: { - version: '1', - name: 'System-Template', - description: 'dashboard template for the system telegraf plugin', - }, - content: { - data: { - type: 'dashboard', - attributes: { - name: 'System', - description: - 'A collection of useful visualizations for monitoring your system stats', - }, - relationships: { - label: { - data: [], - }, - cell: { - data: [ - { - type: 'cell', - id: '0387c87bb5b3e001', - }, - { - type: 'cell', - id: '0387c87bb5f3e000', - }, - { - type: 'cell', - id: '0387c87bb5f3e001', - }, - { - type: 'cell', - id: '0387c87bb633e000', - }, - { - type: 'cell', - id: '0387c87bb633e001', - }, - { - type: 'cell', - id: '0387c87bb633e002', - }, - { - type: 'cell', - id: '0387c87bb673e000', - }, - { - type: 'cell', - id: '0387c87bb673e001', - }, - { - type: 'cell', - id: '0387c87bb6b3e000', - }, - { - type: 'cell', - id: '0387c87bb6b3e001', - }, - { - type: 'cell', - id: '0387c87bb6f3e000', - }, - { - type: 'cell', - id: '0387c87bb733e000', - }, - { - type: 'cell', - id: '0387c87bb733e001', - }, - ], - }, - }, - }, - included: [ - { - id: '0387c87bb5b3e001', - type: 'cell', - attributes: { - x: 0, - y: 0, - w: 12, - h: 1, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb5b3e001', - }, - }, - }, - }, - { - id: '0387c87bb5f3e000', - type: 'cell', - attributes: { - x: 0, - y: 1, - w: 3, - h: 1, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb5f3e000', - }, - }, - }, - }, - { - id: '0387c87bb5f3e001', - type: 'cell', - attributes: { - x: 3, - y: 1, - w: 2, - h: 1, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb5f3e001', - }, - }, - }, - }, - { - id: '0387c87bb633e000', - type: 'cell', - attributes: { - x: 5, - y: 1, - w: 2, - h: 1, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb633e000', - }, - }, - }, - }, - { - id: '0387c87bb633e001', - type: 'cell', - attributes: { - x: 7, - y: 1, - w: 2, - h: 1, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb633e001', - }, - }, - }, - }, - { - id: '0387c87bb633e002', - type: 'cell', - attributes: { - x: 0, - y: 2, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb633e002', - }, - }, - }, - }, - { - id: '0387c87bb673e000', - type: 'cell', - attributes: { - x: 3, - y: 2, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb673e000', - }, - }, - }, - }, - { - id: '0387c87bb673e001', - type: 'cell', - attributes: { - x: 6, - y: 2, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb673e001', - }, - }, - }, - }, - { - id: '0387c87bb6b3e000', - type: 'cell', - attributes: { - x: 9, - y: 1, - w: 3, - h: 4, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb6b3e000', - }, - }, - }, - }, - { - id: '0387c87bb6b3e001', - type: 'cell', - attributes: { - x: 0, - y: 5, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb6b3e001', - }, - }, - }, - }, - { - id: '0387c87bb6f3e000', - type: 'cell', - attributes: { - x: 3, - y: 5, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb6f3e000', - }, - }, - }, - }, - { - id: '0387c87bb733e000', - type: 'cell', - attributes: { - x: 6, - y: 5, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb733e000', - }, - }, - }, - }, - { - id: '0387c87bb733e001', - type: 'cell', - attributes: { - x: 9, - y: 5, - w: 3, - h: 3, - }, - relationships: { - view: { - data: { - type: 'view', - id: '0387c87bb733e001', - }, - }, - }, - }, - { - type: 'view', - id: '0387c87bb5b3e001', - attributes: { - properties: { - shape: 'chronograf-v2', - type: 'markdown', - note: - 'This dashboard gives you an overview of System metrics with metrics from `system`, `mem`, `diskio`, `swap` and `net` measurements. See the [Telegraf Documentation](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/system) for help configuring these plugins.', - }, - name: 'Name this Cell', - }, - }, - { - type: 'view', - id: '0387c87bb5f3e000', - attributes: { - properties: { - shape: 'chronograf-v2', - type: 'single-stat', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "uptime")\n |> window(period: 1h)\n |> last()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> map(fn: (r) => r._value / 86400, mergeKey: true)\n |> yield(name: "last")\n \n \n `, - editMode: 'advanced', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['system'], - }, - { - key: '_field', - values: ['uptime'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'last', - }, - ], - }, - }, - ], - prefix: '', - suffix: ' days', - colors: [ - { - id: 'base', - type: 'text', - hex: '#00C9FF', - name: 'laser', - value: 0, - }, - ], - decimalPlaces: { - isEnforced: false, - digits: 2, - }, - note: '', - showNoteWhenEmpty: false, - }, - name: 'System Uptime', - }, - }, - { - type: 'view', - id: '0387c87bb5f3e001', - attributes: { - properties: { - shape: 'chronograf-v2', - type: 'single-stat', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "n_cpus")\n |> window(period: v.windowPeriod)\n |> last()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "last")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['system'], - }, - { - key: '_field', - values: ['n_cpus'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'last', - }, - ], - }, - }, - ], - prefix: '', - suffix: ' cpus', - colors: [ - { - id: 'base', - type: 'text', - hex: '#00C9FF', - name: 'laser', - value: 0, - }, - ], - decimalPlaces: { - isEnforced: true, - digits: 2, - }, - note: '', - showNoteWhenEmpty: false, - }, - name: 'nCPUs', - }, - }, - { - type: 'view', - id: '0387c87bb633e000', - attributes: { - properties: { - shape: 'chronograf-v2', - type: 'single-stat', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "load1")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['system'], - }, - { - key: '_field', - values: ['load1'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'mean', - }, - ], - }, - }, - ], - prefix: '', - suffix: '', - colors: [ - { - id: 'base', - type: 'text', - hex: '#00C9FF', - name: 'laser', - value: 0, - }, - ], - decimalPlaces: { - isEnforced: true, - digits: 2, - }, - note: '', - showNoteWhenEmpty: false, - }, - name: 'System Load', - }, - }, - { - type: 'view', - id: '0387c87bb633e001', - attributes: { - properties: { - shape: 'chronograf-v2', - type: 'single-stat', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "mem")\n |> filter(fn: (r) => r._field == "total")\n |> window(period: v.windowPeriod)\n |> last()\n |> map(fn: (r) => r._value / 1024 / 1024 / 1024, mergeKey: true)\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "last")\n `, - editMode: 'advanced', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['mem'], - }, - { - key: '_field', - values: ['total'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'last', - }, - ], - }, - }, - ], - prefix: '', - suffix: ' GB', - colors: [ - { - id: 'base', - type: 'text', - hex: '#00C9FF', - name: 'laser', - value: 0, - }, - ], - decimalPlaces: { - isEnforced: true, - digits: 2, - }, - note: '', - showNoteWhenEmpty: false, - }, - name: 'Total Memory', - }, - }, - { - type: 'view', - id: '0387c87bb633e002', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "disk")\n |> filter(fn: (r) => r._field == "used_percent")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['disk'], - }, - { - key: '_field', - values: ['used_percent'], - }, - { - key: 'fstype', - values: [], - }, - ], - functions: [ - { - name: 'mean', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '%', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'Disk Usage', - }, - }, - { - type: 'view', - id: '0387c87bb673e000', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "cpu")\n |> filter(fn: (r) => r._field == "usage_user" or r._field == "usage_system" or r._field == "usage_idle")\n |> filter(fn: (r) => r.cpu == "cpu-total")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['cpu'], - }, - { - key: '_field', - values: ['usage_user', 'usage_system', 'usage_idle'], - }, - { - key: 'cpu', - values: ['cpu-total'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'mean', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '%', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'CPU Usage', - }, - }, - { - type: 'view', - id: '0387c87bb673e001', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "load1" or r._field == "load5" or r._field == "load15")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['system'], - }, - { - key: '_field', - values: ['load1', 'load5', 'load15'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'mean', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: 'Load', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'System Load', - }, - }, - { - type: 'view', - id: '0387c87bb6b3e000', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "mem")\n |> filter(fn: (r) => r._field == "used_percent")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['mem'], - }, - { - key: '_field', - values: ['used_percent'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'mean', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '%', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'line-plus-single-stat', - legend: {}, - colors: [ - { - id: 'base', - type: 'text', - hex: '#00C9FF', - name: 'laser', - value: 0, - }, - { - id: 'c2f922df-60a1-4471-91fc-c16427e7fcfb', - type: 'scale', - hex: '#8F8AF4', - name: 'Do Androids Dream of Electric Sheep?', - value: 0, - }, - { - id: '330f7fee-d44e-4a15-b2d6-2330178ec203', - type: 'scale', - hex: '#A51414', - name: 'Do Androids Dream of Electric Sheep?', - value: 0, - }, - { - id: 'e3c73eb3-665a-414b-afdd-1686c9b962d9', - type: 'scale', - hex: '#F4CF31', - name: 'Do Androids Dream of Electric Sheep?', - value: 0, - }, - ], - prefix: '', - suffix: '%', - decimalPlaces: { - isEnforced: true, - digits: 1, - }, - note: '', - showNoteWhenEmpty: false, - }, - name: 'Memory Usage', - }, - }, - { - type: 'view', - id: '0387c87bb6b3e001', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "diskio")\n |> filter(fn: (r) => r._field == "read_bytes" or r._field == "write_bytes")\n |> derivative(unit: v.windowPeriod, nonNegative: false)\n |> yield(name: "derivative")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['diskio'], - }, - { - key: '_field', - values: ['read_bytes', 'write_bytes'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'derivative', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: 'Bytes', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'Disk IO', - }, - }, - { - type: 'view', - id: '0387c87bb6f3e000', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "net")\n |> filter(fn: (r) => r._field == "bytes_recv" or r._field == "bytes_sent")\n |> derivative(unit: v.windowPeriod, nonNegative: false)\n |> yield(name: "derivative")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['net'], - }, - { - key: '_field', - values: ['bytes_recv', 'bytes_sent'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'derivative', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: 'Bytes', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'Network', - }, - }, - { - type: 'view', - id: '0387c87bb733e000', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "processes")\n |> filter(fn: (r) => r._field == "running" or r._field == "blocked" or r._field == "idle" or r._field == "unknown")\n |> window(period: v.windowPeriod)\n |> max()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "max")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['processes'], - }, - { - key: '_field', - values: ['running', 'blocked', 'idle', 'unknown'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'max', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'Processes', - }, - }, - { - type: 'view', - id: '0387c87bb733e001', - attributes: { - properties: { - shape: 'chronograf-v2', - queries: [ - { - text: `from(bucket: "${bucketName}")\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "swap")\n |> filter(fn: (r) => r._field == "total" or r._field == "used")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")`, - editMode: 'builder', - name: '', - builderConfig: { - buckets: [bucketName], - tags: [ - { - key: '_measurement', - values: ['swap'], - }, - { - key: '_field', - values: ['total', 'used'], - }, - { - key: 'host', - values: [], - }, - ], - functions: [ - { - name: 'mean', - }, - ], - }, - }, - ], - axes: { - x: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', - }, - }, - type: 'xy', - legend: {}, - geom: 'line', - colors: [], - note: '', - showNoteWhenEmpty: false, - }, - name: 'Swap', - }, - }, - ], - }, - labels: [], -}) diff --git a/ui/src/organizations/containers/OrgDashboardsIndex.tsx b/ui/src/organizations/containers/OrgDashboardsIndex.tsx index 135e353b26..f226190236 100644 --- a/ui/src/organizations/containers/OrgDashboardsIndex.tsx +++ b/ui/src/organizations/containers/OrgDashboardsIndex.tsx @@ -9,21 +9,19 @@ import OrganizationNavigation from 'src/organizations/components/OrganizationNav import OrgHeader from 'src/organizations/containers/OrgHeader' import {Tabs} from 'src/clockface' import {Page} from 'src/pageLayout' -import {SpinnerContainer, TechnoSpinner} from '@influxdata/clockface' import TabbedPageSection from 'src/shared/components/tabbed_page/TabbedPageSection' import Dashboards from 'src/organizations/components/Dashboards' +import GetResources, { + ResourceTypes, +} from 'src/configuration/components/GetResources' //Actions import * as NotificationsActions from 'src/types/actions/notifications' import * as notifyActions from 'src/shared/actions/notifications' -import { - getDashboards as getDashboardsAction, - populateDashboards as populateDashboardsAction, -} from 'src/organizations/actions/orgView' // Types import {Organization} from '@influxdata/influx' -import {AppState, Dashboard} from 'src/types' +import {AppState} from 'src/types' import {RemoteDataState} from 'src/types' interface RouterProps { @@ -34,13 +32,10 @@ interface RouterProps { interface DispatchProps { notify: NotificationsActions.PublishNotificationActionCreator - getDashboards: typeof getDashboardsAction - populateDashboards: typeof populateDashboardsAction } interface StateProps { org: Organization - dashboards: Dashboard[] } type Props = WithRouterProps & RouterProps & DispatchProps & StateProps @@ -51,25 +46,6 @@ interface State { @ErrorHandling class OrgDashboardsIndex extends Component { - public state = { - loadingState: RemoteDataState.NotStarted, - } - - public componentDidMount = async () => { - this.setState({loadingState: RemoteDataState.Loading}) - - const {getDashboards, org} = this.props - - await getDashboards(org.id) - - this.setState({loadingState: RemoteDataState.Done}) - } - - public componentWillUnmount = async () => { - const {populateDashboards} = this.props - populateDashboards([]) - } - public render() { const {org} = this.props @@ -87,7 +63,9 @@ class OrgDashboardsIndex extends Component { url="dashboards" title="Dashboards" > - {this.orgsDashboardsPage} + + + @@ -98,49 +76,18 @@ class OrgDashboardsIndex extends Component { ) } - - private get orgsDashboardsPage() { - const {org, dashboards} = this.props - const {loadingState} = this.state - return ( - } - > - - - ) - } - - private getDashboards = async () => { - const {getDashboards, org} = this.props - - await getDashboards(org.id) - } } const mstp = (state: AppState, props: Props): StateProps => { - const { - orgs, - orgView: {dashboards}, - } = state - - const org = orgs.find(o => o.id === props.params.orgID) + const org = state.orgs.find(o => o.id === props.params.orgID) return { org, - dashboards, } } const mdtp: DispatchProps = { notify: notifyActions.notify, - getDashboards: getDashboardsAction, - populateDashboards: populateDashboardsAction, } export default connect( diff --git a/ui/src/organizations/containers/OrgTemplatesIndex.tsx b/ui/src/organizations/containers/OrgTemplatesIndex.tsx index 767c8671bb..2498e16e8e 100644 --- a/ui/src/organizations/containers/OrgTemplatesIndex.tsx +++ b/ui/src/organizations/containers/OrgTemplatesIndex.tsx @@ -81,7 +81,7 @@ class OrgTemplatesIndex extends Component { private handleImport = () => { const {router, org} = this.props - router.push(`organizations/${org.id}/templates/import`) + router.push(`/organizations/${org.id}/templates/import`) } } diff --git a/ui/src/organizations/containers/OrgVariablesIndex.tsx b/ui/src/organizations/containers/OrgVariablesIndex.tsx index 862bb5e70c..af9cf291d7 100644 --- a/ui/src/organizations/containers/OrgVariablesIndex.tsx +++ b/ui/src/organizations/containers/OrgVariablesIndex.tsx @@ -39,28 +39,31 @@ type Props = WithRouterProps & RouterProps & DispatchProps & StateProps @ErrorHandling class OrgVariablesIndex extends Component { public render() { - const {org} = this.props + const {org, children} = this.props return ( - - - -
    - - - - - - - - -
    -
    -
    + <> + + + +
    + + + + + + + + +
    +
    +
    + {children} + ) } } diff --git a/ui/src/organizations/reducers/orgView.ts b/ui/src/organizations/reducers/orgView.ts index 2bd65c0ea2..25426085b9 100644 --- a/ui/src/organizations/reducers/orgView.ts +++ b/ui/src/organizations/reducers/orgView.ts @@ -1,16 +1,13 @@ import {ITask as Task, Telegraf} from '@influxdata/influx' -import {Dashboard} from 'src/types' import {Actions, ActionTypes} from 'src/organizations/actions/orgView' export interface OrgViewState { tasks: Task[] - dashboards: Dashboard[] telegrafs: Telegraf[] } const defaultState: OrgViewState = { tasks: [], - dashboards: [], telegrafs: [], } @@ -18,8 +15,6 @@ export default (state = defaultState, action: Actions): OrgViewState => { switch (action.type) { case ActionTypes.PopulateTasks: return {...state, tasks: action.payload.tasks} - case ActionTypes.PopulateDashboards: - return {...state, dashboards: action.payload.dashboards} default: return state } diff --git a/ui/src/pageLayout/components/NavMenuItem.tsx b/ui/src/pageLayout/components/NavMenuItem.tsx index e4fb76851c..0d4b25d32d 100644 --- a/ui/src/pageLayout/components/NavMenuItem.tsx +++ b/ui/src/pageLayout/components/NavMenuItem.tsx @@ -4,10 +4,6 @@ import {Link} from 'react-router' import classnames from 'classnames' import {get} from 'lodash' -// Components -import NavMenuSubItem from 'src/pageLayout/components/NavMenuSubItem' -import {Select} from 'src/clockface' - // Types import {IconFont} from 'src/clockface' @@ -43,7 +39,7 @@ const NavMenuItem: SFC = ({ {title} - + {children}
    ) diff --git a/ui/src/pageLayout/components/Page.scss b/ui/src/pageLayout/components/Page.scss index ff05ca6e6d..be1c49bbb8 100644 --- a/ui/src/pageLayout/components/Page.scss +++ b/ui/src/pageLayout/components/Page.scss @@ -75,11 +75,21 @@ } .page-contents { + width: 100%; + position: relative; height: calc(100% - #{$page-header-size}) !important; &.full-width { padding: 0 $page-gutter; } + + & > .dapper-scrollbars--track-y { + background: linear-gradient( + to bottom, + $g6-smoke 0%, + $g3-castle 100% + ) !important; + } } .container-fluid { @@ -97,5 +107,7 @@ .page-contents.presentation-mode { height: 100% !important; - .container-fluid {padding: 8px} -} \ No newline at end of file + .container-fluid { + padding: 8px; + } +} diff --git a/ui/src/pageLayout/components/PageContents.tsx b/ui/src/pageLayout/components/PageContents.tsx index e156000f77..9c7b8c5383 100644 --- a/ui/src/pageLayout/components/PageContents.tsx +++ b/ui/src/pageLayout/components/PageContents.tsx @@ -3,7 +3,7 @@ import React, {Component, ReactNode} from 'react' import classnames from 'classnames' // Components -import FancyScrollbar from 'src/shared/components/fancy_scrollbar/FancyScrollbar' +import DapperScrollbars from 'src/shared/components/dapperScrollbars/DapperScrollbars' // Decorators import {ErrorHandling} from 'src/shared/decorators/errors' @@ -21,9 +21,13 @@ class PageContents extends Component { if (scrollable) { return ( - + {this.children} - + ) } diff --git a/ui/src/pageLayout/components/PageHeader.tsx b/ui/src/pageLayout/components/PageHeader.tsx index 91e4973aee..d1228b98f7 100644 --- a/ui/src/pageLayout/components/PageHeader.tsx +++ b/ui/src/pageLayout/components/PageHeader.tsx @@ -13,12 +13,12 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element[] fullWidth: boolean - inPresentationMode?: boolean + inPresentationMode: boolean } @ErrorHandling class PageHeader extends Component { - public static defaultProps: Partial = { + public static defaultProps = { inPresentationMode: false, } diff --git a/ui/src/pageLayout/components/PageHeaderCenter.tsx b/ui/src/pageLayout/components/PageHeaderCenter.tsx index e2fe915339..3e76cc544e 100644 --- a/ui/src/pageLayout/components/PageHeaderCenter.tsx +++ b/ui/src/pageLayout/components/PageHeaderCenter.tsx @@ -12,12 +12,12 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element[] | JSX.Element | string | number - widthPixels?: number + widthPixels: number } @ErrorHandling class PageHeaderCenter extends Component { - public static defaultProps: Partial = { + public static defaultProps = { widthPixels: DEFAULT_PAGE_HEADER_CENTER_WIDTH, } diff --git a/ui/src/pageLayout/components/PageHeaderLeft.tsx b/ui/src/pageLayout/components/PageHeaderLeft.tsx index 43215f41d1..671ef186af 100644 --- a/ui/src/pageLayout/components/PageHeaderLeft.tsx +++ b/ui/src/pageLayout/components/PageHeaderLeft.tsx @@ -9,12 +9,12 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children: JSX.Element[] | JSX.Element | string | number - offsetPixels?: number + offsetPixels: number } @ErrorHandling class PageHeaderLeft extends Component { - public static defaultProps: Partial = { + public static defaultProps = { offsetPixels: DEFAULT_OFFSET, } diff --git a/ui/src/pageLayout/components/PageHeaderRight.tsx b/ui/src/pageLayout/components/PageHeaderRight.tsx index aa5a64a2ab..c2bf72ff78 100644 --- a/ui/src/pageLayout/components/PageHeaderRight.tsx +++ b/ui/src/pageLayout/components/PageHeaderRight.tsx @@ -9,12 +9,12 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { children?: JSX.Element[] | JSX.Element | string | number - offsetPixels?: number + offsetPixels: number } @ErrorHandling class PageHeaderRight extends Component { - public static defaultProps: Partial = { + public static defaultProps = { offsetPixels: DEFAULT_OFFSET, } diff --git a/ui/src/pageLayout/containers/Nav.tsx b/ui/src/pageLayout/containers/Nav.tsx index 409ad84ac8..b5895f26c6 100644 --- a/ui/src/pageLayout/containers/Nav.tsx +++ b/ui/src/pageLayout/containers/Nav.tsx @@ -12,6 +12,7 @@ import {AppState} from 'src/types' import {IconFont} from 'src/clockface' import {ErrorHandling} from 'src/shared/decorators/errors' +import CloudExclude from 'src/shared/components/cloud/CloudExclude' interface OwnProps { isHidden: boolean @@ -80,30 +81,32 @@ class SideNav extends PureComponent { location={location.pathname} highlightPaths={['configuration']} > - - - - + + + + + + ({ - type: ActionTypes.LoadProto, - payload: {proto}, -}) - -export const getProtos = () => async (dispatch: Dispatch) => { - try { - const protos = await client.protos.getAll() - - protos.forEach(p => { - dispatch(loadProto(p)) - }) - } catch (error) { - console.error(error) - } -} - -export const createDashFromProto = ( - protoID: string, - orgID: string -) => async dispatch => { - try { - const dashboards = await client.dashboards.createFromProto(protoID, orgID) - - dashboards.forEach((d: Dashboard) => { - const updatedDashboard = { - ...d, - cells: addDashboardIDToCells(d.cells, d.id), - } - dispatch(loadDashboard(updatedDashboard)) - }) - } catch (error) { - console.error(error) - } -} - -export const createDashboardsForPlugins = () => async ( - dispatch, - getState: GetState -) => { - await dispatch(getProtos()) - const { - dataLoading: { - dataLoaders: {telegrafPlugins}, - steps: {orgID}, - }, - protos, - } = getState() - - const plugins = [] - - try { - telegrafPlugins.forEach(tp => { - if (tp.configured === ConfigurationState.Configured) { - if (protos[tp.name]) { - dispatch(createDashFromProto(protos[tp.name].id, orgID)) - plugins.push(tp.name) - } - } - }) - - if (plugins.length) { - dispatch(notify(TelegrafDashboardCreated(plugins))) - } - } catch (err) { - console.error(err) - dispatch(notify(TelegrafDashboardFailed())) - } -} diff --git a/ui/src/protos/reducers/index.ts b/ui/src/protos/reducers/index.ts deleted file mode 100644 index a9dc0b316d..0000000000 --- a/ui/src/protos/reducers/index.ts +++ /dev/null @@ -1,27 +0,0 @@ -// Types -import {Action} from 'src/protos/actions/' -import {Proto} from '@influxdata/influx' - -export interface ProtosState { - [protoName: string]: Proto -} - -const protosReducer = (state: ProtosState = {}, action: Action) => { - switch (action.type) { - case 'LOAD_PROTO': { - const { - proto, - proto: {name}, - } = action.payload - - return { - ...state, - [name]: {...proto}, - } - } - } - - return state -} - -export default protosReducer diff --git a/ui/src/shared/components/CodeSnippet.tsx b/ui/src/shared/components/CodeSnippet.tsx index 8c9710562e..beb8d8e675 100644 --- a/ui/src/shared/components/CodeSnippet.tsx +++ b/ui/src/shared/components/CodeSnippet.tsx @@ -12,19 +12,14 @@ import CopyButton from 'src/shared/components/CopyButton' // Styles import 'src/shared/components/CodeSnippet.scss' -export interface PassedProps { +export interface Props { copyText: string + label: string } -interface DefaultProps { - label?: string -} - -type Props = PassedProps & DefaultProps - @ErrorHandling class CodeSnippet extends PureComponent { - public static defaultProps: DefaultProps = { + public static defaultProps = { label: 'Code Snippet', } diff --git a/ui/src/shared/components/ConfirmButton.tsx b/ui/src/shared/components/ConfirmButton.tsx index 4674697be2..9dde09b13f 100644 --- a/ui/src/shared/components/ConfirmButton.tsx +++ b/ui/src/shared/components/ConfirmButton.tsx @@ -6,12 +6,12 @@ import {ErrorHandling} from 'src/shared/decorators/errors' type Position = 'top' | 'bottom' | 'left' | 'right' interface Props { - text?: string - confirmText?: string + confirmText: string confirmAction: () => void - type?: string - size?: string - square?: boolean + type: string + size: string + square: boolean + text?: string icon?: string disabled?: boolean customClass?: string @@ -24,7 +24,7 @@ interface State { @ErrorHandling class ConfirmButton extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { confirmText: 'Confirm', type: 'btn-default', size: 'btn-sm', diff --git a/ui/src/shared/components/CopyButton.tsx b/ui/src/shared/components/CopyButton.tsx index 6b52f5e5de..d74822ec43 100644 --- a/ui/src/shared/components/CopyButton.tsx +++ b/ui/src/shared/components/CopyButton.tsx @@ -18,11 +18,6 @@ import {notify as notifyAction} from 'src/shared/actions/notifications' interface OwnProps { textToCopy: string contentName: string // if copying a script, its "script" - size?: ComponentSize - color?: ComponentColor -} - -interface DefaultProps { size: ComponentSize color: ComponentColor } @@ -34,10 +29,11 @@ interface DispatchProps { type Props = OwnProps & DispatchProps class CopyButton extends PureComponent { - public static defaultProps: DefaultProps = { + public static defaultProps = { size: ComponentSize.ExtraSmall, color: ComponentColor.Secondary, } + public render() { const {textToCopy, color, size} = this.props diff --git a/ui/src/shared/components/DragAndDrop.tsx b/ui/src/shared/components/DragAndDrop.tsx index a0cfa8c4dd..44490cf2eb 100644 --- a/ui/src/shared/components/DragAndDrop.tsx +++ b/ui/src/shared/components/DragAndDrop.tsx @@ -11,10 +11,10 @@ interface Props { fileTypesToAccept?: string containerClass?: string handleSubmit: (uploadContent: string | ArrayBuffer, fileName: string) => void - submitText?: string - submitOnDrop?: boolean - submitOnUpload?: boolean - compact?: boolean + submitText: string + submitOnDrop: boolean + submitOnUpload: boolean + compact: boolean onCancel?: () => void } @@ -27,7 +27,7 @@ interface State { let dragCounter = 0 class DragAndDrop extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { submitText: 'Write this File', submitOnDrop: false, submitOnUpload: false, diff --git a/ui/src/shared/components/EditableName.tsx b/ui/src/shared/components/EditableName.tsx index 0ac50b136f..beb0733be8 100644 --- a/ui/src/shared/components/EditableName.tsx +++ b/ui/src/shared/components/EditableName.tsx @@ -14,21 +14,16 @@ import {RemoteDataState} from 'src/types' // Decorators import {ErrorHandling} from 'src/shared/decorators/errors' -interface PassedProps { +interface Props { onUpdate: (name: string) => void name: string + noNameString: string + hrefValue: string + testID: string onEditName?: (e?: MouseEvent) => void placeholder?: string - noNameString: string } -interface DefaultProps { - hrefValue?: string - testID?: string -} - -type Props = PassedProps & DefaultProps - interface State { isEditing: boolean workingName: string @@ -37,7 +32,7 @@ interface State { @ErrorHandling class EditableName extends Component { - public static defaultProps: DefaultProps = { + public static defaultProps = { hrefValue: '#', testID: 'editable-name', } diff --git a/ui/src/shared/components/ExportOverlay.tsx b/ui/src/shared/components/ExportOverlay.tsx index 2d80243ad2..bda82a5cc6 100644 --- a/ui/src/shared/components/ExportOverlay.tsx +++ b/ui/src/shared/components/ExportOverlay.tsx @@ -32,16 +32,13 @@ import {DocumentCreate} from '@influxdata/influx' import {ComponentColor, ComponentSize} from '@influxdata/clockface' import {RemoteDataState} from 'src/types' -interface OwnProps extends DefaultProps { +interface OwnProps { onDismissOverlay: () => void resource: DocumentCreate resourceName: string orgID: string status: RemoteDataState -} - -interface DefaultProps { - isVisible?: boolean + isVisible: boolean } interface DispatchProps { @@ -51,7 +48,7 @@ interface DispatchProps { type Props = OwnProps & DispatchProps class ExportOverlay extends PureComponent { - public static defaultProps: DefaultProps = { + public static defaultProps = { isVisible: true, } diff --git a/ui/src/shared/components/Histogram.tsx b/ui/src/shared/components/Histogram.tsx index c92edf8ac0..4b35a95491 100644 --- a/ui/src/shared/components/Histogram.tsx +++ b/ui/src/shared/components/Histogram.tsx @@ -6,7 +6,7 @@ import { Histogram as MinardHistogram, Table, isNumeric, -} from 'src/minard' +} from '@influxdata/vis' // Components import HistogramTooltip from 'src/shared/components/HistogramTooltip' diff --git a/ui/src/shared/components/HistogramTooltip.tsx b/ui/src/shared/components/HistogramTooltip.tsx index 24a0db0e40..cd9a28c966 100644 --- a/ui/src/shared/components/HistogramTooltip.tsx +++ b/ui/src/shared/components/HistogramTooltip.tsx @@ -1,7 +1,7 @@ import React, {useRef, SFC} from 'react' import {createPortal} from 'react-dom' import {uniq, flatten} from 'lodash' -import {HistogramTooltipProps, useTooltipStyle} from 'src/minard' +import {HistogramTooltipProps, useTooltipStyle} from '@influxdata/vis' import {format} from 'd3-format' import {TOOLTIP_PORTAL_ID} from 'src/shared/components/TooltipPortal' diff --git a/ui/src/shared/components/ImportOverlay.tsx b/ui/src/shared/components/ImportOverlay.tsx index d7ef6a80c9..e9243ffbe1 100644 --- a/ui/src/shared/components/ImportOverlay.tsx +++ b/ui/src/shared/components/ImportOverlay.tsx @@ -24,7 +24,7 @@ enum ImportOption { Paste = 'paste', } -interface OwnProps extends WithRouterProps { +interface OwnProps { onDismissOverlay: () => void resourceName: string onSubmit: (importString: string, orgID: string) => void @@ -41,7 +41,7 @@ interface State { orgID: string } -type Props = StateProps & OwnProps +type Props = StateProps & OwnProps & WithRouterProps class ImportOverlay extends PureComponent { public static defaultProps: {isVisible: boolean} = { @@ -195,4 +195,4 @@ const mstp = ({orgs}: AppState): StateProps => ({orgs}) export default connect( mstp, null -)(withRouter(ImportOverlay)) +)(withRouter(ImportOverlay)) diff --git a/ui/src/shared/components/InputClickToEdit.tsx b/ui/src/shared/components/InputClickToEdit.tsx index a2a1323aae..7afc8833e8 100644 --- a/ui/src/shared/components/InputClickToEdit.tsx +++ b/ui/src/shared/components/InputClickToEdit.tsx @@ -3,12 +3,12 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { wrapperClass: string + tabIndex: number value?: string onChange?: (value: string) => void onKeyDown?: (value: string) => void onBlur: (value: string) => void disabled?: boolean - tabIndex?: number placeholder?: string appearAsNormalInput?: boolean } @@ -20,7 +20,7 @@ interface State { @ErrorHandling class InputClickToEdit extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { tabIndex: 0, } diff --git a/ui/src/shared/components/Legend.tsx b/ui/src/shared/components/Legend.tsx index 3421b918de..9b837cfd14 100644 --- a/ui/src/shared/components/Legend.tsx +++ b/ui/src/shared/components/Legend.tsx @@ -5,7 +5,7 @@ import moment from 'moment' import {uniq, flatten, isNumber} from 'lodash' // Components -import FancyScrollbar from 'src/shared/components/fancy_scrollbar/FancyScrollbar' +import DapperScrollbars from 'src/shared/components/dapperScrollbars/DapperScrollbars' // Constants import {LEGEND_PORTAL_ID} from 'src/shared/components/LegendPortal' @@ -42,7 +42,10 @@ class Legend extends PureComponent { return createPortal(
    {this.time}
    - +
    {this.columns.map(({name, isNumeric, rows}, i) => (
    {
    ))}
    -
    +
    , document.querySelector(`#${LEGEND_PORTAL_ID}`) ) diff --git a/ui/src/shared/components/LoadingDots.tsx b/ui/src/shared/components/LoadingDots.tsx deleted file mode 100644 index 9ec4c07ddd..0000000000 --- a/ui/src/shared/components/LoadingDots.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import React, {Component} from 'react' - -interface Props { - className?: string -} -class LoadingDots extends Component { - public static defaultProps: Partial = { - className: '', - } - - public render() { - const {className} = this.props - - return ( -
    -
    -
    -
    -
    - ) - } -} - -export default LoadingDots diff --git a/ui/src/shared/components/MenuTooltipButton.tsx b/ui/src/shared/components/MenuTooltipButton.tsx index 01b8eec22d..8e9c588e10 100644 --- a/ui/src/shared/components/MenuTooltipButton.tsx +++ b/ui/src/shared/components/MenuTooltipButton.tsx @@ -12,7 +12,7 @@ export interface MenuItem { } interface Props { - theme?: string + theme: string icon: string informParent: () => void menuItems: MenuItem[] @@ -24,7 +24,7 @@ interface State { @ErrorHandling export default class MenuTooltipButton extends Component { - public static defaultProps: Partial = { + public static defaultProps = { theme: 'default', } diff --git a/ui/src/shared/components/RefreshingView.tsx b/ui/src/shared/components/RefreshingView.tsx index dcbb6d9afe..2ec0e0c4bd 100644 --- a/ui/src/shared/components/RefreshingView.tsx +++ b/ui/src/shared/components/RefreshingView.tsx @@ -44,7 +44,7 @@ interface State { type Props = OwnProps & StateProps class RefreshingView extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { inView: true, manualRefresh: 0, } diff --git a/ui/src/shared/components/SearchableDropdown.tsx b/ui/src/shared/components/SearchableDropdown.tsx index 831ea09013..15d38240e9 100644 --- a/ui/src/shared/components/SearchableDropdown.tsx +++ b/ui/src/shared/components/SearchableDropdown.tsx @@ -18,11 +18,12 @@ interface Props extends DropdownProps { searchTerm?: string searchPlaceholder?: string onChangeSearchTerm?: (value: string) => void + buttonSize: ComponentSize } @ErrorHandling export default class SearchableDropdown extends Component { - public static defaultProps: Partial = { + public static defaultProps = { buttonSize: ComponentSize.Small, } diff --git a/ui/src/shared/components/TimeRangeDropdown.tsx b/ui/src/shared/components/TimeRangeDropdown.tsx index 18c7fde692..ed04828685 100644 --- a/ui/src/shared/components/TimeRangeDropdown.tsx +++ b/ui/src/shared/components/TimeRangeDropdown.tsx @@ -4,7 +4,7 @@ import {get} from 'lodash' import moment from 'moment' // Components -import {Dropdown} from 'src/clockface' +import {Dropdown, DropdownMode} from 'src/clockface' import DateRangePicker from 'src/shared/components/dateRangePicker/DateRangePicker' // Constants @@ -55,7 +55,9 @@ class TimeRangeDropdown extends PureComponent { selectedID={timeRange.label} onChange={this.handleChange} widthPixels={this.dropdownWidth} + menuWidthPixels={this.dropdownWidth + 50} titleText={this.formattedCustomTimeRange} + mode={DropdownMode.ActionList} > {TIME_RANGES.map(({label}) => { if (label === TIME_RANGE_LABEL) { @@ -143,7 +145,7 @@ class TimeRangeDropdown extends PureComponent { } private handleHideDatePicker = () => { - this.setState({isDatePickerOpen: false, dropdownPosition: null}) + this.setState({isDatePickerOpen: false, dropdownPosition: undefined}) } private handleChange = (label: string): void => { diff --git a/ui/src/shared/components/cloud/CloudExclude.tsx b/ui/src/shared/components/cloud/CloudExclude.tsx new file mode 100644 index 0000000000..81b3b96bf6 --- /dev/null +++ b/ui/src/shared/components/cloud/CloudExclude.tsx @@ -0,0 +1,13 @@ +import {PureComponent} from 'react' + +export default class extends PureComponent { + render() { + const {children} = this.props + + if (process.env.CLOUD !== 'true') { + return children + } + + return null + } +} diff --git a/ui/src/shared/components/cloud/CloudOnly.tsx b/ui/src/shared/components/cloud/CloudOnly.tsx new file mode 100644 index 0000000000..aa45bb4d10 --- /dev/null +++ b/ui/src/shared/components/cloud/CloudOnly.tsx @@ -0,0 +1,13 @@ +import {PureComponent} from 'react' + +export default class extends PureComponent { + render() { + const {children} = this.props + + if (process.env.CLOUD === 'true') { + return children + } + + return null + } +} diff --git a/ui/src/shared/components/dapperScrollbars/DapperScrollbars.scss b/ui/src/shared/components/dapperScrollbars/DapperScrollbars.scss new file mode 100644 index 0000000000..ffbd7bf5f0 --- /dev/null +++ b/ui/src/shared/components/dapperScrollbars/DapperScrollbars.scss @@ -0,0 +1,59 @@ +$dapper-scrollbars--size: 6px; + +.dapper-scrollbars--wrapper { + margin-right: 0 !important; + margin-bottom: 0 !important; + right: 0 !important; + bottom: 0 !important; +} + +.dapper-scrollbars--track-x, +.dapper-scrollbars--track-y { + border-radius: $dapper-scrollbars--size !important; + position: absolute !important; + background-color: rgba($g0-obsidian, 0.4) !important; + user-select: none !important; + overflow: hidden !important; + transition: opacity 0.25s ease !important; +} + +.dapper-scrollbars--track-x { + height: $dapper-scrollbars--size !important; + width: calc(100% - #{$dapper-scrollbars--size}) !important; + bottom: $dapper-scrollbars--size / 2 !important; + left: $dapper-scrollbars--size / 2 !important; +} + +.dapper-scrollbars--track-y { + width: $dapper-scrollbars--size !important; + height: calc(100% - #{$dapper-scrollbars--size}) !important; + right: $dapper-scrollbars--size / 2 !important; + top: $dapper-scrollbars--size / 2 !important; +} + +.dapper-scrollbars--thumb-x, +.dapper-scrollbars--thumb-y { + border-radius: $dapper-scrollbars--size / 2 !important; +} + +.dapper-scrollbars--thumb-x { + height: $dapper-scrollbars--size !important; +} + +.dapper-scrollbars--thumb-y { + width: $dapper-scrollbars--size !important; +} + +.dapper-scrollbars--autohide { + .dapper-scrollbars--track-x, + .dapper-scrollbars--track-y { + opacity: 0; + } + + &:hover { + .dapper-scrollbars--track-x, + .dapper-scrollbars--track-y { + opacity: 1; + } + } +} diff --git a/ui/src/shared/components/dapperScrollbars/DapperScrollbars.tsx b/ui/src/shared/components/dapperScrollbars/DapperScrollbars.tsx new file mode 100644 index 0000000000..1cb262bade --- /dev/null +++ b/ui/src/shared/components/dapperScrollbars/DapperScrollbars.tsx @@ -0,0 +1,112 @@ +// Libraries +import React, {Component, CSSProperties, ReactNode} from 'react' +import _ from 'lodash' +import classnames from 'classnames' +import Scrollbar from 'react-scrollbars-custom' + +// Decorators +import {ErrorHandling} from 'src/shared/decorators/errors' + +interface PassedProps { + children: JSX.Element | JSX.Element[] | ReactNode +} + +interface DefaultProps { + className?: string + removeTracksWhenNotUsed?: boolean + removeTrackYWhenNotUsed?: boolean + removeTrackXWhenNotUsed?: boolean + noScrollX?: boolean + noScrollY?: boolean + noScroll?: boolean + thumbStartColor?: string + thumbStopColor?: string + style?: CSSProperties + autoHide?: boolean + autoSize?: boolean +} + +type Props = PassedProps & DefaultProps + +@ErrorHandling +class DapperScrollbars extends Component { + public static defaultProps: DefaultProps = { + removeTracksWhenNotUsed: true, + removeTrackYWhenNotUsed: true, + removeTrackXWhenNotUsed: true, + noScrollX: false, + noScrollY: false, + noScroll: false, + thumbStartColor: '#00C9FF', + thumbStopColor: '#9394FF', + autoHide: false, + autoSize: true, + } + + public render() { + const { + removeTracksWhenNotUsed, + removeTrackYWhenNotUsed, + removeTrackXWhenNotUsed, + noScrollX, + noScrollY, + className, + autoHide, + autoSize, + noScroll, + children, + style, + } = this.props + + const classname = classnames('dapper-scrollbars', { + 'dapper-scrollbars--autohide': autoHide, + [`${className}`]: className, + }) + + return ( + + {children} + + ) + } + + private get thumbXStyle(): CSSProperties { + const {thumbStartColor, thumbStopColor} = this.props + + return { + background: `linear-gradient(to right, ${thumbStartColor} 0%,${thumbStopColor} 100%)`, + } + } + + private get thumbYStyle(): CSSProperties { + const {thumbStartColor, thumbStopColor} = this.props + + return { + background: `linear-gradient(to bottom, ${thumbStartColor} 0%,${thumbStopColor} 100%)`, + } + } +} + +export default DapperScrollbars diff --git a/ui/src/shared/components/dropdown_auto_refresh/AutoRefreshDropdown.tsx b/ui/src/shared/components/dropdown_auto_refresh/AutoRefreshDropdown.tsx index b830853c81..0b283e888f 100644 --- a/ui/src/shared/components/dropdown_auto_refresh/AutoRefreshDropdown.tsx +++ b/ui/src/shared/components/dropdown_auto_refresh/AutoRefreshDropdown.tsx @@ -11,19 +11,21 @@ import autoRefreshOptions, { AutoRefreshOption, AutoRefreshOptionType, } from 'src/shared/data/autoRefreshes' +const DROPDOWN_WIDTH_COLLAPSED = 50 +const DROPDOWN_WIDTH_FULL = 84 import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { selected: number onChoose: (milliseconds: number) => void - showManualRefresh?: boolean + showManualRefresh: boolean onManualRefresh?: () => void } @ErrorHandling class AutoRefreshDropdown extends Component { - public static defaultProps: Partial = { + public static defaultProps = { showManualRefresh: true, } @@ -41,6 +43,7 @@ class AutoRefreshDropdown extends Component { @@ -96,10 +99,10 @@ class AutoRefreshDropdown extends Component { private get dropdownWidthPixels(): number { if (this.isPaused) { - return 50 + return DROPDOWN_WIDTH_COLLAPSED } - return 84 + return DROPDOWN_WIDTH_FULL } private get selectedID(): string { diff --git a/ui/src/shared/components/dygraph/Dygraph.tsx b/ui/src/shared/components/dygraph/Dygraph.tsx index d1d1829760..d11f782b63 100644 --- a/ui/src/shared/components/dygraph/Dygraph.tsx +++ b/ui/src/shared/components/dygraph/Dygraph.tsx @@ -19,22 +19,15 @@ import {withHoverTime, InjectedHoverProps} from 'src/dashboards/utils/hoverTime' // Constants import {LINE_COLORS, LABEL_WIDTH, CHAR_PIXELS} from 'src/shared/graphs/helpers' import {getLineColorsHexes} from 'src/shared/constants/graphColorPalettes' -import { - AXES_SCALE_OPTIONS, - DEFAULT_AXIS, -} from 'src/dashboards/constants/cellEditor' +import {DEFAULT_AXIS} from 'src/dashboards/constants/cellEditor' // Types -import {Axes, TimeRange} from 'src/types' +import {Axes, TimeRange, Base, Scale, DashboardQuery, Color} from 'src/types' import {DygraphData, Options, SeriesLegendData} from 'src/external/dygraph' -import {Color} from 'src/types/colors' -import {DashboardQuery} from 'src/types/dashboards' import {SeriesDescription} from 'src/shared/parsing/flux/spreadTables' const getRangeMemoizedY = memoizeOne(getRange) -const {LOG, BASE_10, BASE_2} = AXES_SCALE_OPTIONS - const DEFAULT_DYGRAPH_OPTIONS = { yRangePad: 10, labelsKMB: true, @@ -55,20 +48,18 @@ interface LegendData { } interface OwnProps { + axes: Axes viewID: string - queries?: DashboardQuery[] - timeSeries: DygraphData - labels: string[] - seriesDescriptions: SeriesDescription[] - options?: Partial colors: Color[] - timeRange?: TimeRange - axes?: Axes - isGraphFilled?: boolean - onZoom?: (timeRange: TimeRange) => void + labels: string[] + timeSeries: DygraphData + options: Partial + seriesDescriptions: SeriesDescription[] + onZoom: (timeRange: TimeRange) => void mode?: string - underlayCallback?: () => void + timeRange?: TimeRange children?: JSX.Element + queries?: DashboardQuery[] } type Props = OwnProps & InjectedHoverProps @@ -85,24 +76,23 @@ interface State { @ErrorHandling class Dygraph extends Component { - public static defaultProps: Partial = { + public static defaultProps = { + onZoom: () => {}, axes: { x: { - bounds: [null, null], + prefix: '', + suffix: '', + base: Base.Ten, + scale: Scale.Linear, + label: '', ...DEFAULT_AXIS, + bounds: [null, null] as [null, null], }, y: { - bounds: [null, null], - ...DEFAULT_AXIS, - }, - y2: { - bounds: undefined, ...DEFAULT_AXIS, + bounds: [null, null] as [null, null], }, }, - isGraphFilled: true, - onZoom: () => {}, - underlayCallback: () => {}, options: {}, } @@ -244,7 +234,7 @@ class Dygraph extends Component { const [min, max] = range // Bug in Dygraph calculates a negative range for logscale when min range is 0 - if (y.scale === LOG && min <= 0) { + if (y.scale === Scale.Log && min <= 0) { range = [0.01, max] } @@ -282,8 +272,6 @@ class Dygraph extends Component { const { labels, axes: {y}, - underlayCallback, - isGraphFilled, options: passedOptions, } = this.props @@ -298,18 +286,17 @@ class Dygraph extends Component { const options = { labels, - underlayCallback, colors, file: timeSeries as any, zoomCallback: handleZoom, - fillGraph: isGraphFilled, - logscale: y.scale === LOG, + fillGraph: true, + logscale: y.scale === Scale.Log, ylabel: yLabel, axes: { y: { axisLabelWidth: labelWidth, - labelsKMB: y.base === BASE_10, - labelsKMG2: y.base === BASE_2, + labelsKMB: y.base === Base.Ten, + labelsKMG2: y.base === Base.Two, axisLabelFormatter: formatYVal, valueRange: this.getYRange(timeSeries), }, @@ -400,4 +387,4 @@ class Dygraph extends Component { } } -export default withHoverTime(Dygraph) +export default withHoverTime(Dygraph) diff --git a/ui/src/shared/components/fancy_scrollbar/FancyScrollbar.tsx b/ui/src/shared/components/fancy_scrollbar/FancyScrollbar.tsx index 092e2930b2..c290d521b9 100644 --- a/ui/src/shared/components/fancy_scrollbar/FancyScrollbar.tsx +++ b/ui/src/shared/components/fancy_scrollbar/FancyScrollbar.tsx @@ -7,17 +7,14 @@ import {Scrollbars} from '@influxdata/react-custom-scrollbars' // Decorators import {ErrorHandling} from 'src/shared/decorators/errors' -interface DefaultProps { +interface Props { + className: string + maxHeight: number autoHide: boolean autoHeight: boolean - maxHeight: number - setScrollTop: (value: React.MouseEvent) => void style: React.CSSProperties - hideTracksWhenNotNeeded?: boolean -} - -interface Props { - className?: string + hideTracksWhenNotNeeded: boolean + setScrollTop: (value: React.MouseEvent) => void scrollTop?: number scrollLeft?: number thumbStartColor?: string @@ -25,11 +22,12 @@ interface Props { } @ErrorHandling -class FancyScrollbar extends Component> { - public static defaultProps: DefaultProps = { +class FancyScrollbar extends Component { + public static defaultProps = { + className: '', autoHide: true, - hideTracksWhenNotNeeded: true, autoHeight: false, + hideTracksWhenNotNeeded: true, maxHeight: null, style: {}, setScrollTop: () => {}, diff --git a/ui/src/shared/components/notifications/Notifications.tsx b/ui/src/shared/components/notifications/Notifications.tsx index 0d9bd9ecce..3b6f95c48f 100644 --- a/ui/src/shared/components/notifications/Notifications.tsx +++ b/ui/src/shared/components/notifications/Notifications.tsx @@ -4,13 +4,14 @@ import {Notification as NotificationType} from 'src/types/notifications' import Notification from 'src/shared/components/notifications/Notification' interface Props { - inPresentationMode?: boolean notifications: NotificationType[] + inPresentationMode: boolean } class Notifications extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { inPresentationMode: false, + notifications: [], } public render() { diff --git a/ui/src/shared/components/permissionsWidget/PermissionsWidget.tsx b/ui/src/shared/components/permissionsWidget/PermissionsWidget.tsx index f4ffbfc479..aa865dfb8e 100644 --- a/ui/src/shared/components/permissionsWidget/PermissionsWidget.tsx +++ b/ui/src/shared/components/permissionsWidget/PermissionsWidget.tsx @@ -19,12 +19,12 @@ export enum PermissionsWidgetSelection { interface Props { children: JSX.Element[] | JSX.Element mode: PermissionsWidgetMode - heightPixels?: number + heightPixels: number className?: string } class PermissionsWidget extends Component { - public static defaultProps: Partial = { + public static defaultProps = { heightPixels: 500, } diff --git a/ui/src/shared/components/search_widget/SearchWidget.tsx b/ui/src/shared/components/search_widget/SearchWidget.tsx index 1d0584e965..3a9aeb71b2 100644 --- a/ui/src/shared/components/search_widget/SearchWidget.tsx +++ b/ui/src/shared/components/search_widget/SearchWidget.tsx @@ -13,8 +13,8 @@ import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { onSearch: (searchTerm: string) => void - widthPixels?: number - placeholderText?: string + widthPixels: number + placeholderText: string searchTerm: string } @@ -24,7 +24,7 @@ interface State { @ErrorHandling class SearchWidget extends Component { - public static defaultProps: Partial = { + public static defaultProps = { widthPixels: 440, placeholderText: 'Search...', searchTerm: '', diff --git a/ui/src/shared/components/threesizer/Division.tsx b/ui/src/shared/components/threesizer/Division.tsx index 0719e15233..bcce5cec5a 100644 --- a/ui/src/shared/components/threesizer/Division.tsx +++ b/ui/src/shared/components/threesizer/Division.tsx @@ -17,15 +17,15 @@ import { const NOOP = () => {} interface Props { - name?: string - handleDisplay?: string - style?: CSSProperties handlePixels: number id: string size: number + name: string offset: number draggable: boolean orientation: string + handleDisplay: string + style: CSSProperties activeHandleID: string headerOrientation: string render: (visibility: string, pixels: number) => ReactElement @@ -37,7 +37,7 @@ interface Props { } class Division extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { name: '', handleDisplay: 'visible', style: {}, diff --git a/ui/src/shared/components/threesizer/Threesizer.tsx b/ui/src/shared/components/threesizer/Threesizer.tsx index d7a50578f1..bdd836bee0 100644 --- a/ui/src/shared/components/threesizer/Threesizer.tsx +++ b/ui/src/shared/components/threesizer/Threesizer.tsx @@ -48,12 +48,12 @@ interface DivisionState extends DivisionProps { interface Props { divisions: DivisionProps[] orientation: string - containerClass?: string + containerClass: string } @ErrorHandling class Threesizer extends Component { - public static defaultProps: Partial = { + public static defaultProps = { orientation: HANDLE_HORIZONTAL, containerClass: '', } diff --git a/ui/src/shared/copy/notifications.ts b/ui/src/shared/copy/notifications.ts index 4373628a16..79e92b7dbc 100644 --- a/ui/src/shared/copy/notifications.ts +++ b/ui/src/shared/copy/notifications.ts @@ -845,6 +845,16 @@ export const createTemplateFailed = (error: string): Notification => ({ message: `Failed to export resource as template: ${error}`, }) +export const updateTemplateSucceeded = (): Notification => ({ + ...defaultSuccessNotification, + message: `Successfully updated template.`, +}) + +export const updateTemplateFailed = (error: string): Notification => ({ + ...defaultErrorNotification, + message: `Failed to update template: ${error}`, +}) + export const deleteTemplateFailed = (error: string): Notification => ({ ...defaultErrorNotification, message: `Failed to delete template: ${error}`, diff --git a/ui/src/shared/utils/resourceToTemplate.test.ts b/ui/src/shared/utils/resourceToTemplate.test.ts index 3845cf203d..b9925fb061 100644 --- a/ui/src/shared/utils/resourceToTemplate.test.ts +++ b/ui/src/shared/utils/resourceToTemplate.test.ts @@ -2,8 +2,9 @@ import { labelToRelationship, labelToIncluded, taskToTemplate, + variableToTemplate, } from 'src/shared/utils/resourceToTemplate' -import {TemplateType} from '@influxdata/influx' +import {TemplateType, Variable} from '@influxdata/influx' import {Label, Task, TaskStatus} from 'src/types' const myfavelabel: Label = { @@ -16,7 +17,7 @@ const myfavetask: Task = { authorizationID: '037b084ed9abc000', every: '24h0m0s', flux: - 'option task = {name: "lala", every: 86400000000000ns, offset: 60000000000ns}\n\nfrom(bucket: "defnuck")\n\t|> range(start: -task.every)', + 'option task = {name: "lala", every: 24h0m0s, offset: 1m0s}\n\nfrom(bucket: "defnuck")\n\t|> range(start: -task.every)', id: '037b0877b359a000', labels: [ { @@ -32,6 +33,20 @@ const myfavetask: Task = { status: TaskStatus.Active, } +const myVariable: Variable = { + id: '039ae3b3b74b0000', + orgID: '039aa15b38cb0000', + name: 'beep', + selected: null, + arguments: { + type: 'query', + values: { + query: 'test!', + language: 'flux', + }, + }, +} + describe('resourceToTemplate', () => { describe('labelToRelationship', () => { it('converts a label to a relationship struct', () => { @@ -59,6 +74,116 @@ describe('resourceToTemplate', () => { expect(actual).toEqual(expected) }) }) + + describe('variableToTemplate', () => { + it('converts a variable to a template', () => { + const actual = variableToTemplate(myVariable, []) + const expected = { + meta: { + version: '1', + name: 'beep-Template', + description: 'template created from variable: beep', + }, + content: { + data: { + type: 'variable', + id: '039ae3b3b74b0000', + attributes: { + name: 'beep', + arguments: { + type: 'query', + values: { + query: 'test!', + language: 'flux', + }, + }, + selected: null, + }, + relationships: { + variable: { + data: [], + }, + }, + }, + included: [], + }, + labels: [], + } + + expect(actual).toEqual(expected) + }) + + it('converts a variable with dependencies to a template', () => { + const parentArgs = { + values: {...myVariable.arguments.values, query: `v.${myVariable.name}`}, + } + const parentVar = { + ...myVariable, + id: '123Parent', + name: 'Parent Var', + arguments: { + ...myVariable.arguments, + ...parentArgs, + }, + } + const actual = variableToTemplate(parentVar, [myVariable]) + const expected = { + meta: { + version: '1', + name: 'Parent Var-Template', + description: 'template created from variable: Parent Var', + }, + content: { + data: { + type: 'variable', + id: '123Parent', + attributes: { + name: 'Parent Var', + arguments: { + type: 'query', + values: { + query: 'v.beep', + language: 'flux', + }, + }, + selected: null, + }, + relationships: { + variable: { + data: [ + { + type: 'variable', + id: '039ae3b3b74b0000', + }, + ], + }, + }, + }, + included: [ + { + type: 'variable', + id: '039ae3b3b74b0000', + attributes: { + name: 'beep', + arguments: { + type: 'query', + values: { + query: 'test!', + language: 'flux', + }, + }, + selected: null, + }, + }, + ], + }, + labels: [], + } + + expect(actual).toEqual(expected) + }) + }) + describe('taskToTemplate', () => { it('converts a task to a template', () => { const actual = taskToTemplate(myfavetask) @@ -69,7 +194,7 @@ describe('resourceToTemplate', () => { attributes: { every: '24h0m0s', flux: - 'option task = {name: "lala", every: 86400000000000ns, offset: 60000000000ns}\n\nfrom(bucket: "defnuck")\n\t|> range(start: -task.every)', + 'option task = {name: "lala", every: 24h0m0s, offset: 1m0s}\n\nfrom(bucket: "defnuck")\n\t|> range(start: -task.every)', name: 'lala', offset: '1m0s', status: 'active', diff --git a/ui/src/shared/utils/resourceToTemplate.ts b/ui/src/shared/utils/resourceToTemplate.ts index 84245c49ca..b2ecac72b5 100644 --- a/ui/src/shared/utils/resourceToTemplate.ts +++ b/ui/src/shared/utils/resourceToTemplate.ts @@ -29,6 +29,18 @@ const blankTaskTemplate = () => { } } +const blankVariableTemplate = () => { + const baseTemplate = blankTemplate() + return { + ...baseTemplate, + content: { + ...baseTemplate.content, + data: {...baseTemplate.content.data, type: TemplateType.Variable}, + }, + labels: [], + } +} + const blankDashboardTemplate = () => { const baseTemplate = blankTemplate() return { @@ -138,6 +150,41 @@ const cellToRelationship = (cell: Cell) => ({ id: cell.id, }) +export const variableToTemplate = ( + v: Variable, + dependencies: Variable[], + baseTemplate = blankVariableTemplate() +) => { + const variableName = _.get(v, 'name', '') + const templateName = `${variableName}-Template` + const variableData = variableToIncluded(v) + const dependencyRelationships = dependencies.map(d => + variableToRelationship(d) + ) + const includedDependencies = dependencies.map(d => variableToIncluded(d)) + return { + ...baseTemplate, + meta: { + ...baseTemplate.meta, + name: templateName, + description: `template created from variable: ${variableName}`, + }, + content: { + ...baseTemplate.content, + data: { + ...baseTemplate.content.data, + ...variableData, + relationships: { + [TemplateType.Variable]: { + data: [...dependencyRelationships], + }, + }, + }, + included: [...includedDependencies], + }, + } +} + const variableToIncluded = (v: Variable) => { const variableAttributes = _.pick(v, ['name', 'arguments', 'selected']) return { diff --git a/ui/src/shared/utils/toMinardTable.ts b/ui/src/shared/utils/toMinardTable.ts index 71dbde91e2..d1b34eb7f9 100644 --- a/ui/src/shared/utils/toMinardTable.ts +++ b/ui/src/shared/utils/toMinardTable.ts @@ -1,5 +1,5 @@ import {FluxTable} from 'src/types' -import {Table, ColumnType, isNumeric} from 'src/minard' +import {Table, ColumnType, isNumeric} from '@influxdata/vis' export const GROUP_KEY_COL_NAME = 'group_key' diff --git a/ui/src/shared/utils/view.ts b/ui/src/shared/utils/view.ts index 4fee26823d..1541db036f 100644 --- a/ui/src/shared/utils/view.ts +++ b/ui/src/shared/utils/view.ts @@ -7,8 +7,8 @@ import { } from 'src/shared/constants/thresholds' // Types -import {ViewType, ViewShape} from 'src/types' -import {HistogramPosition} from 'src/minard' +import {ViewType, ViewShape, Base, Scale} from 'src/types' +import {HistogramPosition} from '@influxdata/vis' import { XYView, XYViewGeom, @@ -22,6 +22,7 @@ import { ViewProperties, DashboardQuery, QueryEditMode, + BuilderConfig, } from 'src/types/dashboards' function defaultView() { @@ -35,14 +36,14 @@ export function defaultViewQuery(): DashboardQuery { name: '', text: '', editMode: QueryEditMode.Builder, - builderConfig: { - buckets: [], - tags: [{key: '_measurement', values: []}], - functions: [], - }, + builderConfig: defaultBuilderConfig(), } } +export function defaultBuilderConfig(): BuilderConfig { + return {buckets: [], tags: [{key: '_measurement', values: []}], functions: []} +} + function defaultLineViewProperties() { return { queries: [defaultViewQuery()], @@ -56,24 +57,16 @@ function defaultLineViewProperties() { label: '', prefix: '', suffix: '', - base: '10', - scale: 'linear', + base: Base.Ten, + scale: Scale.Linear, }, y: { bounds: ['', ''] as [string, string], label: '', prefix: '', suffix: '', - base: '10', - scale: 'linear', - }, - y2: { - bounds: ['', ''] as [string, string], - label: '', - prefix: '', - suffix: '', - base: '10', - scale: 'linear', + base: Base.Ten, + scale: Scale.Linear, }, }, } diff --git a/ui/src/store/configureStore.ts b/ui/src/store/configureStore.ts index a3592d754f..8954d74d21 100644 --- a/ui/src/store/configureStore.ts +++ b/ui/src/store/configureStore.ts @@ -13,7 +13,7 @@ import persistStateEnhancer from './persistStateEnhancer' import meReducer from 'src/shared/reducers/v2/me' import tasksReducer from 'src/tasks/reducers' import rangesReducer from 'src/dashboards/reducers/ranges' -import dashboardsReducer from 'src/dashboards/reducers/dashboards' +import {dashboardsReducer} from 'src/dashboards/reducers/dashboards' import viewsReducer from 'src/dashboards/reducers/views' import {timeMachinesReducer} from 'src/timeMachine/reducers' import orgsReducer from 'src/organizations/reducers/orgs' @@ -21,7 +21,6 @@ import orgViewReducer from 'src/organizations/reducers/orgView' import onboardingReducer from 'src/onboarding/reducers' import noteEditorReducer from 'src/dashboards/reducers/notes' import dataLoadingReducer from 'src/dataLoaders/reducers' -import protosReducer from 'src/protos/reducers' import {variablesReducer} from 'src/variables/reducers' import {labelsReducer} from 'src/labels/reducers' import {bucketsReducer} from 'src/buckets/reducers' @@ -51,7 +50,6 @@ export const rootReducer = combineReducers({ onboarding: onboardingReducer, noteEditor: noteEditorReducer, dataLoading: dataLoadingReducer, - protos: protosReducer, variables: variablesReducer, labels: labelsReducer, buckets: bucketsReducer, diff --git a/ui/src/style/chronograf.scss b/ui/src/style/chronograf.scss index c38ea801bf..7fb605adaf 100644 --- a/ui/src/style/chronograf.scss +++ b/ui/src/style/chronograf.scss @@ -106,6 +106,7 @@ @import 'src/dataLoaders/components/side_bar/SideBar.scss'; @import 'src/dataLoaders/components/DataLoadersOverlay.scss'; @import 'src/shared/components/EmptyGraphError.scss'; +@import 'src/shared/components/dapperScrollbars/DapperScrollbars.scss'; // External @import '../../node_modules/@influxdata/react-custom-scrollbars/dist/styles.css'; diff --git a/ui/src/tasks/components/TaskForm.tsx b/ui/src/tasks/components/TaskForm.tsx index 44d14ef03e..4dd5e265bd 100644 --- a/ui/src/tasks/components/TaskForm.tsx +++ b/ui/src/tasks/components/TaskForm.tsx @@ -36,15 +36,15 @@ import {TaskOptions, TaskSchedule} from 'src/utils/taskOptionsToFluxScript' interface Props { orgs: Organization[] taskOptions: TaskOptions + isInOverlay: boolean + canSubmit: boolean + onSubmit: () => void + dismiss: () => void onChangeScheduleType: (schedule: TaskSchedule) => void onChangeInput: (e: ChangeEvent) => void onChangeTaskOrgID: (orgID: string) => void onChangeToOrgName: (orgName: string) => void onChangeToBucketName: (bucketName: string) => void - isInOverlay?: boolean - onSubmit?: () => void - canSubmit?: boolean - dismiss?: () => void } interface State { @@ -54,12 +54,15 @@ interface State { const getBuckets = (org: Organization) => client.buckets.getAllByOrg(org.name) export default class TaskForm extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { isInOverlay: false, - onSubmit: () => {}, canSubmit: true, + onSubmit: () => {}, dismiss: () => {}, + onChangeToBucketName: () => {}, + onChangeToOrgName: () => {}, } + constructor(props: Props) { super(props) diff --git a/ui/src/tasks/components/TaskRunsList.tsx b/ui/src/tasks/components/TaskRunsList.tsx index 41fbb24ec5..610d1d57e5 100644 --- a/ui/src/tasks/components/TaskRunsList.tsx +++ b/ui/src/tasks/components/TaskRunsList.tsx @@ -27,7 +27,7 @@ export default class TaskRunsList extends PureComponent { constructor(props) { super(props) this.state = { - sortKey: null, + sortKey: 'scheduledFor', sortDirection: Sort.Descending, } } @@ -104,9 +104,12 @@ export default class TaskRunsList extends PureComponent { public listRuns = (runs: Run[]): JSX.Element => { const {taskID} = this.props + + let recentRuns = runs.slice(0, 20) + const runsRow = ( <> - {runs.map(r => ( + {recentRuns.map(r => ( ))} diff --git a/ui/src/tasks/containers/TaskPage.tsx b/ui/src/tasks/containers/TaskPage.tsx index 0074ea8eff..08e45887ad 100644 --- a/ui/src/tasks/containers/TaskPage.tsx +++ b/ui/src/tasks/containers/TaskPage.tsx @@ -88,8 +88,8 @@ class TaskPage extends PureComponent<
    { it('clears the interval property from the task options when cron is selected', () => { const initialState = defaultState - initialState.taskOptions = {...defaultTaskOptions, interval: '1d'} + initialState.taskOptions = {...defaultTaskOptions, interval: '24h'} // todo(docmerlin): allow for time units larger than 1d, right now h is the longest unit our s const actual = tasksReducer( initialState, diff --git a/ui/src/templates/actions/index.ts b/ui/src/templates/actions/index.ts index c283ae5f1d..c20bb293e9 100644 --- a/ui/src/templates/actions/index.ts +++ b/ui/src/templates/actions/index.ts @@ -23,6 +23,7 @@ export enum ActionTypes { SetExportTemplate = 'SET_EXPORT_TEMPLATE', RemoveTemplateSummary = 'REMOVE_TEMPLATE_SUMMARY', AddTemplateSummary = 'ADD_TEMPLATE_SUMMARY', + SetTemplateSummary = 'SET_TEMPLATE_SUMMARY', } export type Actions = @@ -31,6 +32,7 @@ export type Actions = | SetExportTemplate | RemoveTemplateSummary | AddTemplateSummary + | SetTemplateSummary export interface AddTemplateSummary { type: ActionTypes.AddTemplateSummary @@ -108,6 +110,33 @@ export const createTemplate = (template: DocumentCreate) => async dispatch => { } } +interface SetTemplateSummary { + type: ActionTypes.SetTemplateSummary + payload: {id: string; templateSummary: TemplateSummary} +} + +export const setTemplateSummary = ( + id: string, + templateSummary: TemplateSummary +): SetTemplateSummary => ({ + type: ActionTypes.SetTemplateSummary, + payload: {id, templateSummary}, +}) + +export const updateTemplate = (id: string, props: TemplateSummary) => async ( + dispatch +): Promise => { + try { + const {meta} = await client.templates.update(id, props) + + dispatch(setTemplateSummary(id, {...props, meta})) + dispatch(notify(copy.updateTemplateSucceeded())) + } catch (e) { + console.error(e) + dispatch(notify(copy.updateTemplateFailed(e))) + } +} + export const convertToTemplate = (id: string) => async ( dispatch ): Promise => { diff --git a/ui/src/templates/api/index.ts b/ui/src/templates/api/index.ts index f0c7fdc095..31c27d307f 100644 --- a/ui/src/templates/api/index.ts +++ b/ui/src/templates/api/index.ts @@ -8,6 +8,7 @@ import { TaskTemplate, TemplateBase, Task, + VariableTemplate, } from 'src/types' import {IDashboard, Cell} from '@influxdata/influx' import {client} from 'src/utils/api' @@ -173,7 +174,7 @@ const createViewsFromTemplate = async ( } const createVariablesFromTemplate = async ( - template: DashboardTemplate, + template: DashboardTemplate | VariableTemplate, orgID: string ) => { const { @@ -229,3 +230,32 @@ const createTaskLabelsFromTemplate = async ( const templateLabels = await createLabelsFromTemplate(template, task.orgID) await client.tasks.addLabels(task.id, templateLabels) } + +export const createVariableFromTemplate = async ( + template: VariableTemplate, + orgID: string +) => { + const {content} = template + + if ( + content.data.type !== TemplateType.Variable || + template.meta.version !== '1' + ) { + throw new Error('Can not create variable from this template') + } + + const createdVariable = await client.variables.create({ + ...content.data.attributes, + orgID, + }) + + if (!createdVariable || !createdVariable.id) { + throw new Error('Failed to create variable from template') + } + + await createVariablesFromTemplate(template, orgID) + + const variable = await client.variables.get(createdVariable.id) + + return variable +} diff --git a/ui/src/templates/components/EmptyTemplatesList.tsx b/ui/src/templates/components/EmptyTemplatesList.tsx index ecec8853e7..7b31bec799 100644 --- a/ui/src/templates/components/EmptyTemplatesList.tsx +++ b/ui/src/templates/components/EmptyTemplatesList.tsx @@ -2,21 +2,23 @@ import React, {FunctionComponent} from 'react' // Components -import {EmptyState} from '@influxdata/clockface' -import AddResourceDropdown from 'src/shared/components/AddResourceDropdown' +import { + EmptyState, + IconFont, + ComponentColor, + Button, +} from '@influxdata/clockface' // Types import {ComponentSize} from '@influxdata/clockface' interface Props { searchTerm: string - onCreate: () => void onImport: () => void } const EmptyTemplatesList: FunctionComponent = ({ searchTerm, - onCreate, onImport, }) => { if (searchTerm === '') { @@ -26,10 +28,11 @@ const EmptyTemplatesList: FunctionComponent = ({ text={"Looks like you don't have any Templates, why not create one?"} highlightWords={['Templates']} /> - ) diff --git a/ui/src/templates/components/TemplateCard.tsx b/ui/src/templates/components/TemplateCard.tsx index 57bd52dd5a..cda49e4216 100644 --- a/ui/src/templates/components/TemplateCard.tsx +++ b/ui/src/templates/components/TemplateCard.tsx @@ -7,7 +7,11 @@ import {withRouter, WithRouterProps} from 'react-router' import {ResourceList, Context, IconFont} from 'src/clockface' // Actions -import {deleteTemplate, cloneTemplate} from 'src/templates/actions' +import { + deleteTemplate, + cloneTemplate, + updateTemplate, +} from 'src/templates/actions' // Types import {TemplateSummary} from '@influxdata/influx' @@ -24,6 +28,7 @@ interface OwnProps { interface DispatchProps { onDelete: typeof deleteTemplate onClone: typeof cloneTemplate + onUpdate: typeof updateTemplate } type Props = DispatchProps & OwnProps @@ -39,7 +44,7 @@ export class TemplateCard extends PureComponent { name={() => ( { ) } - //TODO handle rename template - private doNothing = () => {} + private handleUpdateTemplate = (name: string) => { + const {template} = this.props + + this.props.onUpdate(template.id, { + ...template, + meta: {...template.meta, name}, + }) + } private get contextMenu(): JSX.Element { const { @@ -110,6 +121,7 @@ export class TemplateCard extends PureComponent { const mdtp: DispatchProps = { onDelete: deleteTemplate, onClone: cloneTemplate, + onUpdate: updateTemplate, } export default connect<{}, DispatchProps, OwnProps>( diff --git a/ui/src/templates/constants/defaultTemplates.ts b/ui/src/templates/constants/defaultTemplates.ts new file mode 100644 index 0000000000..4e5ea46687 --- /dev/null +++ b/ui/src/templates/constants/defaultTemplates.ts @@ -0,0 +1,989 @@ +export const systemTemplate = () => ({ + meta: { + version: '1', + name: 'System-Template', + description: 'Dashboard template for the system telegraf plugin', + }, + content: { + data: { + type: 'dashboard', + attributes: { + name: 'System', + description: + 'A collection of useful visualizations for monitoring your system stats', + }, + relationships: { + label: { + data: [], + }, + cell: { + data: [ + { + type: 'cell', + id: '039d8c0b62c34000', + }, + { + type: 'cell', + id: '039d8c0b63434000', + }, + { + type: 'cell', + id: '039d8c0b63c34000', + }, + { + type: 'cell', + id: '039d8c0b64034000', + }, + { + type: 'cell', + id: '039d8c0b64c34000', + }, + { + type: 'cell', + id: '039d8c0b65034000', + }, + { + type: 'cell', + id: '039d8c0b65834000', + }, + { + type: 'cell', + id: '039d8c0b66034000', + }, + { + type: 'cell', + id: '039d8c0b66834000', + }, + { + type: 'cell', + id: '039d8c0b67034000', + }, + { + type: 'cell', + id: '039d8c0b67434000', + }, + { + type: 'cell', + id: '039d8c0b67c34000', + }, + { + type: 'cell', + id: '039d8c0b68434000', + }, + ], + }, + variable: { + data: [ + { + type: 'variable', + id: '0399e8fd61294000', + }, + ], + }, + }, + }, + included: [ + { + id: '039d8c0b62c34000', + type: 'cell', + attributes: { + x: 0, + y: 0, + w: 12, + h: 1, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b62c34000', + }, + }, + }, + }, + { + id: '039d8c0b63434000', + type: 'cell', + attributes: { + x: 0, + y: 1, + w: 3, + h: 1, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b63434000', + }, + }, + }, + }, + { + id: '039d8c0b63c34000', + type: 'cell', + attributes: { + x: 3, + y: 1, + w: 2, + h: 1, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b63c34000', + }, + }, + }, + }, + { + id: '039d8c0b64034000', + type: 'cell', + attributes: { + x: 5, + y: 1, + w: 2, + h: 1, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b64034000', + }, + }, + }, + }, + { + id: '039d8c0b64c34000', + type: 'cell', + attributes: { + x: 7, + y: 1, + w: 2, + h: 1, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b64c34000', + }, + }, + }, + }, + { + id: '039d8c0b65034000', + type: 'cell', + attributes: { + x: 0, + y: 2, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b65034000', + }, + }, + }, + }, + { + id: '039d8c0b65834000', + type: 'cell', + attributes: { + x: 3, + y: 2, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b65834000', + }, + }, + }, + }, + { + id: '039d8c0b66034000', + type: 'cell', + attributes: { + x: 6, + y: 2, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b66034000', + }, + }, + }, + }, + { + id: '039d8c0b66834000', + type: 'cell', + attributes: { + x: 9, + y: 1, + w: 3, + h: 4, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b66834000', + }, + }, + }, + }, + { + id: '039d8c0b67034000', + type: 'cell', + attributes: { + x: 0, + y: 5, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b67034000', + }, + }, + }, + }, + { + id: '039d8c0b67434000', + type: 'cell', + attributes: { + x: 3, + y: 5, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b67434000', + }, + }, + }, + }, + { + id: '039d8c0b67c34000', + type: 'cell', + attributes: { + x: 6, + y: 5, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b67c34000', + }, + }, + }, + }, + { + id: '039d8c0b68434000', + type: 'cell', + attributes: { + x: 9, + y: 5, + w: 3, + h: 3, + }, + relationships: { + view: { + data: { + type: 'view', + id: '039d8c0b68434000', + }, + }, + }, + }, + { + type: 'view', + id: '039d8c0b62c34000', + attributes: { + properties: { + shape: 'chronograf-v2', + type: 'markdown', + note: + 'This dashboard gives you an overview of System metrics with metrics from `system`, `mem`, `diskio`, `swap` and `net` measurements. See the [Telegraf Documentation](https://github.com/influxdata/telegraf/tree/master/plugins/inputs/system) for help configuring these plugins.', + }, + name: 'Name this Cell', + }, + }, + { + type: 'view', + id: '039d8c0b63434000', + attributes: { + properties: { + shape: 'chronograf-v2', + type: 'single-stat', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "uptime")\n |> window(period: 1h)\n |> last()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> map(fn: (r) => r._value / 86400, mergeKey: true)\n |> yield(name: "last")\n \n \n ', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + prefix: '', + suffix: ' days', + colors: [ + { + id: 'base', + type: 'text', + hex: '#00C9FF', + name: 'laser', + value: 0, + }, + ], + decimalPlaces: { + isEnforced: false, + digits: 2, + }, + note: '', + showNoteWhenEmpty: false, + }, + name: 'System Uptime', + }, + }, + { + type: 'view', + id: '039d8c0b63c34000', + attributes: { + properties: { + shape: 'chronograf-v2', + type: 'single-stat', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "n_cpus")\n |> window(period: v.windowPeriod)\n |> last()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "last")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + prefix: '', + suffix: ' cpus', + colors: [ + { + id: 'base', + type: 'text', + hex: '#00C9FF', + name: 'laser', + value: 0, + }, + ], + decimalPlaces: { + isEnforced: true, + digits: 2, + }, + note: '', + showNoteWhenEmpty: false, + }, + name: 'nCPUs', + }, + }, + { + type: 'view', + id: '039d8c0b64034000', + attributes: { + properties: { + shape: 'chronograf-v2', + type: 'single-stat', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "load1")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + prefix: '', + suffix: '', + colors: [ + { + id: 'base', + type: 'text', + hex: '#00C9FF', + name: 'laser', + value: 0, + }, + ], + decimalPlaces: { + isEnforced: true, + digits: 2, + }, + note: '', + showNoteWhenEmpty: false, + }, + name: 'System Load', + }, + }, + { + type: 'view', + id: '039d8c0b64c34000', + attributes: { + properties: { + shape: 'chronograf-v2', + type: 'single-stat', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "mem")\n |> filter(fn: (r) => r._field == "total")\n |> window(period: v.windowPeriod)\n |> last()\n |> map(fn: (r) => r._value / 1024 / 1024 / 1024, mergeKey: true)\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "last")\n ', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + prefix: '', + suffix: ' GB', + colors: [ + { + id: 'base', + type: 'text', + hex: '#00C9FF', + name: 'laser', + value: 0, + }, + ], + decimalPlaces: { + isEnforced: true, + digits: 2, + }, + note: '', + showNoteWhenEmpty: false, + }, + name: 'Total Memory', + }, + }, + { + type: 'view', + id: '039d8c0b65034000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "disk")\n |> filter(fn: (r) => r._field == "used_percent")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '%', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'Disk Usage', + }, + }, + { + type: 'view', + id: '039d8c0b65834000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "cpu")\n |> filter(fn: (r) => r._field == "usage_user" or r._field == "usage_system" or r._field == "usage_idle")\n |> filter(fn: (r) => r.cpu == "cpu-total")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '%', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'CPU Usage', + }, + }, + { + type: 'view', + id: '039d8c0b66034000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "system")\n |> filter(fn: (r) => r._field == "load1" or r._field == "load5" or r._field == "load15")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: 'Load', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'System Load', + }, + }, + { + type: 'view', + id: '039d8c0b66834000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "mem")\n |> filter(fn: (r) => r._field == "used_percent")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '%', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'line-plus-single-stat', + legend: {}, + colors: [ + { + id: 'base', + type: 'text', + hex: '#00C9FF', + name: 'laser', + value: 0, + }, + { + id: 'c2f922df-60a1-4471-91fc-c16427e7fcfb', + type: 'scale', + hex: '#8F8AF4', + name: 'Do Androids Dream of Electric Sheep?', + value: 0, + }, + { + id: '330f7fee-d44e-4a15-b2d6-2330178ec203', + type: 'scale', + hex: '#A51414', + name: 'Do Androids Dream of Electric Sheep?', + value: 0, + }, + { + id: 'e3c73eb3-665a-414b-afdd-1686c9b962d9', + type: 'scale', + hex: '#F4CF31', + name: 'Do Androids Dream of Electric Sheep?', + value: 0, + }, + ], + prefix: '', + suffix: '%', + decimalPlaces: { + isEnforced: true, + digits: 1, + }, + note: '', + showNoteWhenEmpty: false, + }, + name: 'Memory Usage', + }, + }, + { + type: 'view', + id: '039d8c0b67034000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "diskio")\n |> filter(fn: (r) => r._field == "read_bytes" or r._field == "write_bytes")\n |> derivative(unit: v.windowPeriod, nonNegative: false)\n |> yield(name: "derivative")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: 'Bytes', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'Disk IO', + }, + }, + { + type: 'view', + id: '039d8c0b67434000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "net")\n |> filter(fn: (r) => r._field == "bytes_recv" or r._field == "bytes_sent")\n |> derivative(unit: v.windowPeriod, nonNegative: false)\n |> yield(name: "derivative")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: 'Bytes', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'Network', + }, + }, + { + type: 'view', + id: '039d8c0b67c34000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "processes")\n |> filter(fn: (r) => r._field == "running" or r._field == "blocked" or r._field == "idle" or r._field == "unknown")\n |> window(period: v.windowPeriod)\n |> max()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "max")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'Processes', + }, + }, + { + type: 'view', + id: '039d8c0b68434000', + attributes: { + properties: { + shape: 'chronograf-v2', + queries: [ + { + text: + 'from(bucket: v.bucket)\n |> range(start: v.timeRangeStart)\n |> filter(fn: (r) => r._measurement == "swap")\n |> filter(fn: (r) => r._field == "total" or r._field == "used")\n |> window(period: v.windowPeriod)\n |> mean()\n |> group(columns: ["_value", "_time", "_start", "_stop"], mode: "except")\n |> yield(name: "mean")', + editMode: 'advanced', + name: '', + builderConfig: { + buckets: [], + tags: [{key: '_measurement', values: []}], + functions: [], + }, + }, + ], + axes: { + x: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + y2: { + bounds: ['', ''], + label: '', + prefix: '', + suffix: '', + base: '10', + scale: 'linear', + }, + }, + type: 'xy', + legend: {}, + geom: 'line', + colors: [], + note: '', + showNoteWhenEmpty: false, + }, + name: 'Swap', + }, + }, + { + id: '0399e8fd61294000', + type: 'variable', + attributes: { + name: 'bucket', + arguments: { + type: 'query', + values: { + query: 'buckets()\n |> map(fn: (r) => r.name)\n', + language: 'flux', + }, + }, + selected: null, + }, + }, + ], + }, + labels: [], +}) diff --git a/ui/src/templates/constants/index.ts b/ui/src/templates/constants/index.ts index 171eef9f5c..7a2ed1aafd 100644 --- a/ui/src/templates/constants/index.ts +++ b/ui/src/templates/constants/index.ts @@ -1 +1,5 @@ -export const DEFAULT_TEMPLATE_NAME = 'Untitled Template' +import * as defaultTemplates from 'src/templates/constants/defaultTemplates' + +const DEFAULT_TEMPLATE_NAME = 'Untitled Template' + +export {defaultTemplates, DEFAULT_TEMPLATE_NAME} diff --git a/ui/src/templates/reducers/index.test.ts b/ui/src/templates/reducers/index.test.ts new file mode 100644 index 0000000000..b9f1012de8 --- /dev/null +++ b/ui/src/templates/reducers/index.test.ts @@ -0,0 +1,36 @@ +import templatesReducer, {defaultState} from 'src/templates/reducers' +import {setTemplateSummary} from 'src/templates/actions' + +describe('templatesReducer', () => { + describe('setTemplateSummary', () => { + it('can update the name of a template', () => { + const initialState = defaultState() + const initialTemplate = { + id: 'abc', + labels: [], + meta: {name: 'Belcalis', version: '1'}, + } + initialState.items.push(initialTemplate) + + const actual = templatesReducer( + initialState, + setTemplateSummary(initialTemplate.id, { + ...initialTemplate, + meta: {...initialTemplate.meta, name: 'Cardi B'}, + }) + ) + + const expected = { + ...defaultState(), + items: [ + { + ...initialTemplate, + meta: {...initialTemplate.meta, name: 'Cardi B'}, + }, + ], + } + + expect(actual).toEqual(expected) + }) + }) +}) diff --git a/ui/src/templates/reducers/index.ts b/ui/src/templates/reducers/index.ts index d55e690213..8e46d87e64 100644 --- a/ui/src/templates/reducers/index.ts +++ b/ui/src/templates/reducers/index.ts @@ -9,7 +9,7 @@ export interface TemplatesState { exportTemplate: {status: RemoteDataState; item: DocumentCreate; orgID: string} } -const defaultState = (): TemplatesState => ({ +export const defaultState = (): TemplatesState => ({ status: RemoteDataState.NotStarted, items: [], exportTemplate: { @@ -19,7 +19,7 @@ const defaultState = (): TemplatesState => ({ }, }) -const templatesReducer = ( +export const templatesReducer = ( state: TemplatesState = defaultState(), action: Actions ): TemplatesState => @@ -42,6 +42,16 @@ const templatesReducer = ( return } + case ActionTypes.SetTemplateSummary: { + const filtered = draftState.items.filter(t => { + return t.id !== action.payload.id + }) + + draftState.items = [...filtered, action.payload.templateSummary] + + return + } + case ActionTypes.SetExportTemplate: { const {status, item, orgID} = action.payload draftState.exportTemplate.status = status diff --git a/ui/src/timeMachine/actions/index.ts b/ui/src/timeMachine/actions/index.ts index ac5907d52e..26edfee602 100644 --- a/ui/src/timeMachine/actions/index.ts +++ b/ui/src/timeMachine/actions/index.ts @@ -21,7 +21,7 @@ import { TimeMachineTab, } from 'src/types' import {Color} from 'src/types/colors' -import {Table, HistogramPosition, isNumeric} from 'src/minard' +import {Table, HistogramPosition, isNumeric} from '@influxdata/vis' export type Action = | QueryBuilderAction diff --git a/ui/src/timeMachine/components/TagSelector.tsx b/ui/src/timeMachine/components/TagSelector.tsx index 210ae4c001..df7aa49bef 100644 --- a/ui/src/timeMachine/components/TagSelector.tsx +++ b/ui/src/timeMachine/components/TagSelector.tsx @@ -113,12 +113,12 @@ class TagSelector extends PureComponent { > diff --git a/ui/src/timeMachine/components/ToolbarTab.tsx b/ui/src/timeMachine/components/ToolbarTab.tsx index fa700438ea..70f4f4f9a2 100644 --- a/ui/src/timeMachine/components/ToolbarTab.tsx +++ b/ui/src/timeMachine/components/ToolbarTab.tsx @@ -1,20 +1,15 @@ // Libraries import React, {PureComponent} from 'react' -interface PassedProps { +interface Props { onSetActive: () => void name: string active: boolean + testID: string } -interface DefaultProps { - testID?: string -} - -type Props = PassedProps & DefaultProps - export default class ToolbarTab extends PureComponent { - public static defaultProps: DefaultProps = { + public static defaultProps = { testID: 'toolbar-tab', } diff --git a/ui/src/timeMachine/components/fluxFunctionsToolbar/ToolbarFunction.tsx b/ui/src/timeMachine/components/fluxFunctionsToolbar/ToolbarFunction.tsx index 3ca98fc272..a111262161 100644 --- a/ui/src/timeMachine/components/fluxFunctionsToolbar/ToolbarFunction.tsx +++ b/ui/src/timeMachine/components/fluxFunctionsToolbar/ToolbarFunction.tsx @@ -11,7 +11,7 @@ import {FluxToolbarFunction} from 'src/types/shared' interface Props { func: FluxToolbarFunction onClickFunction: (name: string, example: string) => void - testID?: string + testID: string } interface State { @@ -19,7 +19,7 @@ interface State { } class ToolbarFunction extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { testID: 'toolbar-function', } diff --git a/ui/src/timeMachine/components/view_options/HistogramOptions.tsx b/ui/src/timeMachine/components/view_options/HistogramOptions.tsx index 655227a5de..bfb8192f7f 100644 --- a/ui/src/timeMachine/components/view_options/HistogramOptions.tsx +++ b/ui/src/timeMachine/components/view_options/HistogramOptions.tsx @@ -24,7 +24,7 @@ import {getActiveTimeMachine} from 'src/timeMachine/selectors' // Types import {ComponentStatus} from '@influxdata/clockface' -import {HistogramPosition} from 'src/minard' +import {HistogramPosition} from '@influxdata/vis' import {Color} from 'src/types/colors' import {AppState} from 'src/types' diff --git a/ui/src/timeMachine/components/view_options/ThresholdItem.tsx b/ui/src/timeMachine/components/view_options/ThresholdItem.tsx index ccbb40bb59..6c4dddb1bb 100644 --- a/ui/src/timeMachine/components/view_options/ThresholdItem.tsx +++ b/ui/src/timeMachine/components/view_options/ThresholdItem.tsx @@ -22,11 +22,11 @@ import {SeverityColor, SeverityColorOptions} from 'src/types/logs' import {ErrorHandling} from 'src/shared/decorators/errors' interface Props { - label?: string threshold: Color - isBase?: boolean - isDeletable?: boolean - disableColor?: boolean + label: string + isBase: boolean + isDeletable: boolean + disableColor: boolean onChooseColor: (threshold: Color) => void onValidateColorValue: (threshold: Color, targetValue: number) => boolean onUpdateColorValue: (threshold: Color, targetValue: number) => void @@ -40,7 +40,7 @@ interface State { @ErrorHandling class Threshold extends PureComponent { - public static defaultProps: Partial = { + public static defaultProps = { label: 'Value is <=', disableColor: false, isDeletable: true, diff --git a/ui/src/timeMachine/reducers/index.ts b/ui/src/timeMachine/reducers/index.ts index c22e089d27..57cb75f480 100644 --- a/ui/src/timeMachine/reducers/index.ts +++ b/ui/src/timeMachine/reducers/index.ts @@ -79,13 +79,7 @@ export const initialStateHelper = (): TimeMachineState => ({ activeQueryIndex: 0, availableXColumns: [], availableGroupColumns: [], - queryResults: { - files: null, - status: RemoteDataState.NotStarted, - isInitialFetch: true, - fetchDuration: null, - errorMessage: null, - }, + queryResults: initialQueryResultsState(), queryBuilder: { buckets: [], bucketsStatus: RemoteDataState.NotStarted, @@ -123,7 +117,7 @@ export const timeMachinesReducer = ( hidden: false, })) const queryBuilder = initialQueryBuilderState(draftQueries[0].builderConfig) - const activeQueryIndex = 0 + const queryResults = initialQueryResultsState() return { ...state, @@ -134,9 +128,13 @@ export const timeMachinesReducer = ( ...activeTimeMachine, ...initialState, activeTab: TimeMachineTab.Queries, - activeQueryIndex, + isViewingRawData: false, + availableXColumns: [], + availableGroupColumns: [], + activeQueryIndex: 0, draftQueries, queryBuilder, + queryResults, }, }, } @@ -839,6 +837,14 @@ const initialQueryBuilderState = ( } } +const initialQueryResultsState = (): QueryResultsState => ({ + files: null, + status: RemoteDataState.NotStarted, + isInitialFetch: true, + fetchDuration: null, + errorMessage: null, +}) + const buildActiveQuery = (draftState: TimeMachineState) => { const draftQuery = draftState.draftQueries[draftState.activeQueryIndex] diff --git a/ui/src/types/dashboards.ts b/ui/src/types/dashboards.ts index c86e32cfeb..d4cef22ac9 100644 --- a/ui/src/types/dashboards.ts +++ b/ui/src/types/dashboards.ts @@ -1,4 +1,4 @@ -import {HistogramPosition} from 'src/minard' +import {HistogramPosition} from '@influxdata/vis' import {Color} from 'src/types/colors' import { @@ -7,13 +7,23 @@ import { Cell as CellAPI, } from '@influxdata/influx' +export enum Scale { + Linear = 'linear', + Log = 'log', +} + +export enum Base { + Two = '2', + Ten = '10', +} + export interface Axis { label: string prefix: string suffix: string - base: string - scale: string - bounds: [string, string] + base: Base + scale: Scale + bounds: [string, string] | [null, null] } export type TimeSeriesValue = string | number | null | undefined diff --git a/ui/src/types/stores.ts b/ui/src/types/stores.ts index 4ace82dec7..4d5cf97bcd 100644 --- a/ui/src/types/stores.ts +++ b/ui/src/types/stores.ts @@ -1,4 +1,3 @@ -import {Dashboard} from 'src/types/dashboards' import {Organization} from 'src/types/orgs' import {Links} from 'src/types/links' import {Notification} from 'src/types' @@ -11,7 +10,6 @@ import {MeState} from 'src/shared/reducers/v2/me' import {NoteEditorState} from 'src/dashboards/reducers/notes' import {DataLoadingState} from 'src/dataLoaders/reducers' import {OnboardingState} from 'src/onboarding/reducers' -import {ProtosState} from 'src/protos/reducers' import {VariablesState} from 'src/variables/reducers' import {OrgViewState} from 'src/organizations/reducers/orgView' import {LabelsState} from 'src/labels/reducers' @@ -23,6 +21,7 @@ import {RangeState} from 'src/dashboards/reducers/ranges' import {ViewsState} from 'src/dashboards/reducers/views' import {ScrapersState} from 'src/scrapers/reducers' import {UserSettingsState} from 'src/userSettings/reducers' +import {DashboardsState} from 'src/dashboards/reducers/dashboards' export interface AppState { VERSION: string @@ -33,7 +32,7 @@ export interface AppState { app: AppPresentationState ranges: RangeState views: ViewsState - dashboards: Dashboard[] + dashboards: DashboardsState notifications: Notification[] timeMachines: TimeMachinesState routing: RouterState @@ -45,7 +44,6 @@ export interface AppState { onboarding: OnboardingState noteEditor: NoteEditorState dataLoading: DataLoadingState - protos: ProtosState variables: VariablesState tokens: AuthorizationsState templates: TemplatesState diff --git a/ui/src/types/templates.ts b/ui/src/types/templates.ts index 82903d5a78..688ad3fedc 100644 --- a/ui/src/types/templates.ts +++ b/ui/src/types/templates.ts @@ -98,6 +98,8 @@ export type DashboardTemplateIncluded = | LabelIncluded | VariableIncluded +export type VariableTemplateIncluded = LabelIncluded | VariableIncluded + // Template Datas interface TaskTemplateData extends TemplateData { type: TemplateType.Task @@ -117,6 +119,15 @@ interface DashboardTemplateData extends TemplateData { } } +interface VariableTemplateData extends TemplateData { + type: TemplateType.Variable + attributes: Variable + relationships: { + [TemplateType.Label]: {data: LabelRelationship[]} + [TemplateType.Variable]: {data: VariableRelationship[]} + } +} + // Templates export interface TaskTemplate extends TemplateBase { content: { @@ -132,6 +143,13 @@ export interface DashboardTemplate extends TemplateBase { } } +export interface VariableTemplate extends TemplateBase { + content: { + data: VariableTemplateData + included: VariableTemplateIncluded[] + } +} + export type Template = TaskTemplate | DashboardTemplate export interface TemplateSummary extends DocumentListEntry { diff --git a/ui/src/variables/actions/index.ts b/ui/src/variables/actions/index.ts index bc220a16b1..6362a47fa1 100644 --- a/ui/src/variables/actions/index.ts +++ b/ui/src/variables/actions/index.ts @@ -14,14 +14,23 @@ import { createVariableSuccess, updateVariableSuccess, } from 'src/shared/copy/notifications' +import {setExportTemplate} from 'src/templates/actions' + +// APIs +import {createVariableFromTemplate as createVariableFromTemplateAJAX} from 'src/templates/api' // Utils import {getValueSelections, getVariablesForOrg} from 'src/variables/selectors' import {WrappedCancelablePromise, CancellationError} from 'src/types/promises' +import {variableToTemplate} from 'src/shared/utils/resourceToTemplate' +import {findDependentVariables} from 'src/variables/utils/hydrateVars' + +// Constants +import * as copy from 'src/shared/copy/notifications' // Types import {Dispatch} from 'redux-thunk' -import {RemoteDataState} from 'src/types' +import {RemoteDataState, VariableTemplate} from 'src/types' import {GetState} from 'src/types' import {Variable} from '@influxdata/influx' import {VariableValuesByID} from 'src/variables/types' @@ -174,6 +183,26 @@ export const createVariable = (variable: Variable) => async ( } } +export const createVariableFromTemplate = ( + template: VariableTemplate, + orgID: string +) => async (dispatch: Dispatch) => { + try { + const createdVariable = await createVariableFromTemplateAJAX( + template, + orgID + ) + + dispatch( + setVariable(createdVariable.id, RemoteDataState.Done, createdVariable) + ) + dispatch(notify(createVariableSuccess(createdVariable.name))) + } catch (e) { + console.error(e) + dispatch(notify(createVariableFailed(e.response.data.message))) + } +} + export const updateVariable = (id: string, props: Partial) => async ( dispatch: Dispatch ) => { @@ -246,3 +275,23 @@ export const refreshVariableValues = ( dispatch(setValues(contextID, RemoteDataState.Error)) } } + +export const convertToTemplate = (variableID: string) => async ( + dispatch +): Promise => { + try { + dispatch(setExportTemplate(RemoteDataState.Loading)) + + const variable = await client.variables.get(variableID) + const allVariables = await client.variables.getAll() + + const dependencies = findDependentVariables(variable, allVariables) + const variableTemplate = variableToTemplate(variable, dependencies) + const orgID = variable.orgID // TODO remove when org is implicit app state + + dispatch(setExportTemplate(RemoteDataState.Done, variableTemplate, orgID)) + } catch (error) { + dispatch(setExportTemplate(RemoteDataState.Error)) + dispatch(notify(copy.createTemplateFailed(error))) + } +} diff --git a/ui/src/variables/components/VariableImportOverlay.tsx b/ui/src/variables/components/VariableImportOverlay.tsx new file mode 100644 index 0000000000..55e61777ab --- /dev/null +++ b/ui/src/variables/components/VariableImportOverlay.tsx @@ -0,0 +1,83 @@ +import React, {PureComponent} from 'react' +import {withRouter, WithRouterProps} from 'react-router' +import {connect} from 'react-redux' + +// Components +import ImportOverlay from 'src/shared/components/ImportOverlay' + +// Actions +import { + createVariableFromTemplate as createVariableFromTemplateAction, + getVariables as getVariablesAction, +} from 'src/variables/actions' +import {notify as notifyAction} from 'src/shared/actions/notifications' + +// Types +import {AppState, Organization} from 'src/types' + +interface DispatchProps { + getVariables: typeof getVariablesAction + createVariableFromTemplate: typeof createVariableFromTemplateAction + notify: typeof notifyAction +} + +interface StateProps { + org: Organization +} + +interface OwnProps extends WithRouterProps { + params: {orgID: string} +} + +type Props = DispatchProps & OwnProps & StateProps + +class VariableImportOverlay extends PureComponent { + public render() { + return ( + + ) + } + + private onDismiss = () => { + const {router} = this.props + + router.goBack() + } + + private handleImportVariable = async ( + uploadContent: string, + orgID: string + ): Promise => { + const {createVariableFromTemplate, getVariables} = this.props + + const template = JSON.parse(uploadContent) + await createVariableFromTemplate(template, orgID) + + await getVariables() + + this.onDismiss() + } +} + +const mstp = (state: AppState, props: Props): StateProps => { + const {orgs} = state + + const org = orgs.find(o => o.id === props.params.orgID) + + return {org} +} + +const mdtp: DispatchProps = { + notify: notifyAction, + createVariableFromTemplate: createVariableFromTemplateAction, + getVariables: getVariablesAction, +} + +export default connect( + mstp, + mdtp +)(withRouter(VariableImportOverlay)) diff --git a/ui/src/variables/utils/hydrateVars.test.ts b/ui/src/variables/utils/hydrateVars.test.ts index 1486aad56d..83e0bae72a 100644 --- a/ui/src/variables/utils/hydrateVars.test.ts +++ b/ui/src/variables/utils/hydrateVars.test.ts @@ -1,6 +1,6 @@ // Utils import {ValueFetcher} from 'src/variables/utils/ValueFetcher' -import {hydrateVars} from 'src/variables/utils/hydrateVars' +import {hydrateVars, exportVariables} from 'src/variables/utils/hydrateVars' // Types import {Variable} from '@influxdata/influx' @@ -40,6 +40,20 @@ class FakeFetcher implements ValueFetcher { } describe('hydrate vars', () => { + describe('exportVariables', () => { + test('should find variable exports', () => { + const a = createVariable('a', 'f(x: v.b, v.c)') + const b = createVariable('b', 'beep') + const c = createVariable('c', 'robit') + const d = createVariable('d', 'nooooo!') + const vars = [a, b, c, d] + + const actual = exportVariables([a], vars) + + expect(actual).toEqual([a, b, c]) + }) + }) + test('should invalidate cyclic subgraphs', async () => { // Construct the following graph: // diff --git a/ui/src/variables/utils/hydrateVars.ts b/ui/src/variables/utils/hydrateVars.ts index a32dee1765..ff6ac910ca 100644 --- a/ui/src/variables/utils/hydrateVars.ts +++ b/ui/src/variables/utils/hydrateVars.ts @@ -33,6 +33,34 @@ interface HydrateVarsOptions { fetcher?: ValueFetcher } +export const findDependentVariables = ( + variable: Variable, + allVariables: Variable[] +): Variable[] => { + const query: string = variable.arguments.values.query + const dependencies = allVariables.filter(maybeChild => + query.includes(`${OPTION_NAME}.${maybeChild.name}`) + ) + + return dependencies +} + +export const exportVariables = ( + variables: Variable[], + allVariables: Variable[] +): Variable[] => { + const varSet = new Set() + + for (const v of variables) { + varSet.add(v) + for (const d of findDependentVariables(v, allVariables)) { + varSet.add(d) + } + } + + return [...varSet] +} + const createVariableGraph = (allVariables: Variable[]): VariableNode[] => { const nodesByID: {[variableID: string]: VariableNode} = {} diff --git a/view.go b/view.go deleted file mode 100644 index 33ce80f04d..0000000000 --- a/view.go +++ /dev/null @@ -1,542 +0,0 @@ -package influxdb - -import ( - "context" - "encoding/json" - "fmt" -) - -// ErrViewNotFound is the error msg for a missing View. -const ErrViewNotFound = "view not found" - -// ops for view. -const ( - OpFindViewByID = "FindViewByID" - OpFindViews = "FindViews" - OpCreateView = "CreateView" - OpUpdateView = "UpdateView" - OpDeleteView = "DeleteView" -) - -// NOTE: This service has been DEPRECATED and should be removed. Views are now -// resources that are nested beneath dashboards. -// -// ViewService represents a service for managing View data. -type ViewService interface { - // FindViewByID returns a single View by ID. - FindViewByID(ctx context.Context, id ID) (*View, error) - - // FindViews returns a list of Views that match filter and the total count of matching Views. - // Additional options provide pagination & sorting. - FindViews(ctx context.Context, filter ViewFilter) ([]*View, int, error) - - // CreateView creates a new View and sets b.ID with the new identifier. - CreateView(ctx context.Context, b *View) error - - // UpdateView updates a single View with changeset. - // Returns the new View state after update. - UpdateView(ctx context.Context, id ID, upd ViewUpdate) (*View, error) - - // DeleteView removes a View by ID. - DeleteView(ctx context.Context, id ID) error -} - -// ViewUpdate is a struct for updating Views. -type ViewUpdate struct { - ViewContentsUpdate - Properties ViewProperties -} - -// Valid validates the update struct. It expects minimal values to be set. -func (u ViewUpdate) Valid() *Error { - _, ok := u.Properties.(EmptyViewProperties) - if u.Name == nil && ok { - return &Error{ - Code: EInvalid, - Msg: "expected at least one attribute to be updated", - } - } - - return nil -} - -// Apply updates a view with the view updates properties. -func (u ViewUpdate) Apply(v *View) error { - if err := u.Valid(); err != nil { - return err - } - - if u.Name != nil { - v.Name = *u.Name - } - - if u.Properties != nil { - v.Properties = u.Properties - } - - return nil -} - -// ViewContentsUpdate is a struct for updating the non properties content of a View. -type ViewContentsUpdate struct { - Name *string `json:"name"` -} - -// ViewFilter represents a set of filter that restrict the returned results. -type ViewFilter struct { - ID *ID - Types []string -} - -// View holds positional and visual information for a View. -type View struct { - ViewContents - Properties ViewProperties -} - -// ViewContents is the id and name of a specific view. -type ViewContents struct { - ID ID `json:"id,omitempty"` - Name string `json:"name"` -} - -// ViewProperties is used to mark other structures as conforming to a View. -type ViewProperties interface { - viewProperties() - GetType() string -} - -// EmptyViewProperties is visualization that has no values -type EmptyViewProperties struct{} - -func (v EmptyViewProperties) viewProperties() {} - -func (v EmptyViewProperties) GetType() string { return "" } - -// UnmarshalViewPropertiesJSON unmarshals JSON bytes into a ViewProperties. -func UnmarshalViewPropertiesJSON(b []byte) (ViewProperties, error) { - var v struct { - B json.RawMessage `json:"properties"` - } - - if err := json.Unmarshal(b, &v); err != nil { - return nil, err - } - - if len(v.B) == 0 { - // Then there wasn't any visualization field, so there's no need unmarshal it - return EmptyViewProperties{}, nil - } - - var t struct { - Shape string `json:"shape"` - Type string `json:"type"` - } - - if err := json.Unmarshal(v.B, &t); err != nil { - return nil, err - } - - var vis ViewProperties - switch t.Shape { - case "chronograf-v2": - switch t.Type { - case "xy": - var xyv XYViewProperties - if err := json.Unmarshal(v.B, &xyv); err != nil { - return nil, err - } - vis = xyv - case "single-stat": - var ssv SingleStatViewProperties - if err := json.Unmarshal(v.B, &ssv); err != nil { - return nil, err - } - vis = ssv - case "gauge": - var gv GaugeViewProperties - if err := json.Unmarshal(v.B, &gv); err != nil { - return nil, err - } - vis = gv - case "table": - var tv TableViewProperties - if err := json.Unmarshal(v.B, &tv); err != nil { - return nil, err - } - vis = tv - case "markdown": - var mv MarkdownViewProperties - if err := json.Unmarshal(v.B, &mv); err != nil { - return nil, err - } - vis = mv - case "log-viewer": // happens in log viewer stays in log viewer. - var lv LogViewProperties - if err := json.Unmarshal(v.B, &lv); err != nil { - return nil, err - } - vis = lv - case "line-plus-single-stat": - var lv LinePlusSingleStatProperties - if err := json.Unmarshal(v.B, &lv); err != nil { - return nil, err - } - vis = lv - case "histogram": - var hv HistogramViewProperties - if err := json.Unmarshal(v.B, &hv); err != nil { - return nil, err - } - vis = hv - } - case "empty": - var ev EmptyViewProperties - if err := json.Unmarshal(v.B, &ev); err != nil { - return nil, err - } - vis = ev - default: - return nil, fmt.Errorf("unknown type %v", t.Shape) - } - - return vis, nil -} - -// MarshalViewPropertiesJSON encodes a view into JSON bytes. -func MarshalViewPropertiesJSON(v ViewProperties) ([]byte, error) { - var s interface{} - switch vis := v.(type) { - case SingleStatViewProperties: - s = struct { - Shape string `json:"shape"` - SingleStatViewProperties - }{ - Shape: "chronograf-v2", - - SingleStatViewProperties: vis, - } - case TableViewProperties: - s = struct { - Shape string `json:"shape"` - TableViewProperties - }{ - Shape: "chronograf-v2", - - TableViewProperties: vis, - } - case GaugeViewProperties: - s = struct { - Shape string `json:"shape"` - GaugeViewProperties - }{ - Shape: "chronograf-v2", - - GaugeViewProperties: vis, - } - case XYViewProperties: - s = struct { - Shape string `json:"shape"` - XYViewProperties - }{ - Shape: "chronograf-v2", - - XYViewProperties: vis, - } - case LinePlusSingleStatProperties: - s = struct { - Shape string `json:"shape"` - LinePlusSingleStatProperties - }{ - Shape: "chronograf-v2", - - LinePlusSingleStatProperties: vis, - } - case HistogramViewProperties: - s = struct { - Shape string `json:"shape"` - HistogramViewProperties - }{ - Shape: "chronograf-v2", - - HistogramViewProperties: vis, - } - case MarkdownViewProperties: - s = struct { - Shape string `json:"shape"` - MarkdownViewProperties - }{ - Shape: "chronograf-v2", - - MarkdownViewProperties: vis, - } - case LogViewProperties: - s = struct { - Shape string `json:"shape"` - LogViewProperties - }{ - Shape: "chronograf-v2", - LogViewProperties: vis, - } - default: - s = struct { - Shape string `json:"shape"` - EmptyViewProperties - }{ - Shape: "empty", - EmptyViewProperties: EmptyViewProperties{}, - } - } - return json.Marshal(s) -} - -// MarshalJSON encodes a view to JSON bytes. -func (c View) MarshalJSON() ([]byte, error) { - vis, err := MarshalViewPropertiesJSON(c.Properties) - if err != nil { - return nil, err - } - - return json.Marshal(struct { - ViewContents - ViewProperties json.RawMessage `json:"properties"` - }{ - ViewContents: c.ViewContents, - ViewProperties: vis, - }) -} - -// UnmarshalJSON decodes JSON bytes into the corresponding view type (those that implement ViewProperties). -func (c *View) UnmarshalJSON(b []byte) error { - if err := json.Unmarshal(b, &c.ViewContents); err != nil { - return err - } - - v, err := UnmarshalViewPropertiesJSON(b) - if err != nil { - return err - } - c.Properties = v - return nil -} - -// UnmarshalJSON decodes JSON bytes into the corresponding view update type (those that implement ViewProperties). -func (u *ViewUpdate) UnmarshalJSON(b []byte) error { - if err := json.Unmarshal(b, &u.ViewContentsUpdate); err != nil { - return err - } - - v, err := UnmarshalViewPropertiesJSON(b) - if err != nil { - return err - } - u.Properties = v - return nil -} - -// MarshalJSON encodes a view to JSON bytes. -func (u ViewUpdate) MarshalJSON() ([]byte, error) { - vis, err := MarshalViewPropertiesJSON(u.Properties) - if err != nil { - return nil, err - } - - return json.Marshal(struct { - ViewContentsUpdate - ViewProperties json.RawMessage `json:"properties,omitempty"` - }{ - ViewContentsUpdate: u.ViewContentsUpdate, - ViewProperties: vis, - }) -} - -// LinePlusSingleStatProperties represents options for line plus single stat view in Chronograf -type LinePlusSingleStatProperties struct { - Queries []DashboardQuery `json:"queries"` - Axes map[string]Axis `json:"axes"` - Type string `json:"type"` - Legend Legend `json:"legend"` - ViewColors []ViewColor `json:"colors"` - Prefix string `json:"prefix"` - Suffix string `json:"suffix"` - DecimalPlaces DecimalPlaces `json:"decimalPlaces"` - Note string `json:"note"` - ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` -} - -// XYViewProperties represents options for line, bar, step, or stacked view in Chronograf -type XYViewProperties struct { - Queries []DashboardQuery `json:"queries"` - Axes map[string]Axis `json:"axes"` - Type string `json:"type"` - Legend Legend `json:"legend"` - Geom string `json:"geom"` // Either "line", "step", "stacked", or "bar" - ViewColors []ViewColor `json:"colors"` - Note string `json:"note"` - ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` -} - -// SingleStatViewProperties represents options for single stat view in Chronograf -type SingleStatViewProperties struct { - Type string `json:"type"` - Queries []DashboardQuery `json:"queries"` - Prefix string `json:"prefix"` - Suffix string `json:"suffix"` - ViewColors []ViewColor `json:"colors"` - DecimalPlaces DecimalPlaces `json:"decimalPlaces"` - Note string `json:"note"` - ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` -} - -// HistogramViewProperties represents options for histogram view in Chronograf -type HistogramViewProperties struct { - Type string `json:"type"` - Queries []DashboardQuery `json:"queries"` - ViewColors []ViewColor `json:"colors"` - XColumn string `json:"xColumn"` - FillColumns []string `json:"fillColumns"` - XDomain []float64 `json:"xDomain,omitEmpty"` - XAxisLabel string `json:"xAxisLabel"` - Position string `json:"position"` - BinCount int `json:"binCount"` - Note string `json:"note"` - ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` -} - -// GaugeViewProperties represents options for gauge view in Chronograf -type GaugeViewProperties struct { - Type string `json:"type"` - Queries []DashboardQuery `json:"queries"` - Prefix string `json:"prefix"` - Suffix string `json:"suffix"` - ViewColors []ViewColor `json:"colors"` - DecimalPlaces DecimalPlaces `json:"decimalPlaces"` - Note string `json:"note"` - ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` -} - -// TableViewProperties represents options for table view in Chronograf -type TableViewProperties struct { - Type string `json:"type"` - Queries []DashboardQuery `json:"queries"` - ViewColors []ViewColor `json:"colors"` - TableOptions TableOptions `json:"tableOptions"` - FieldOptions []RenamableField `json:"fieldOptions"` - TimeFormat string `json:"timeFormat"` - DecimalPlaces DecimalPlaces `json:"decimalPlaces"` - Note string `json:"note"` - ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` -} - -type MarkdownViewProperties struct { - Type string `json:"type"` - Note string `json:"note"` -} - -// LogViewProperties represents options for log viewer in Chronograf. -type LogViewProperties struct { - Type string `json:"type"` - Columns []LogViewerColumn `json:"columns"` -} - -// LogViewerColumn represents a specific column in a Log Viewer. -type LogViewerColumn struct { - Name string `json:"name"` - Position int32 `json:"position"` - Settings []LogColumnSetting `json:"settings"` -} - -// LogColumnSetting represent the settings for a specific column of a Log Viewer. -type LogColumnSetting struct { - Type string `json:"type"` - Value string `json:"value"` - Name string `json:"name,omitempty"` -} - -func (XYViewProperties) viewProperties() {} -func (LinePlusSingleStatProperties) viewProperties() {} -func (SingleStatViewProperties) viewProperties() {} -func (HistogramViewProperties) viewProperties() {} -func (GaugeViewProperties) viewProperties() {} -func (TableViewProperties) viewProperties() {} -func (MarkdownViewProperties) viewProperties() {} -func (LogViewProperties) viewProperties() {} - -func (v XYViewProperties) GetType() string { return v.Type } -func (v LinePlusSingleStatProperties) GetType() string { return v.Type } -func (v SingleStatViewProperties) GetType() string { return v.Type } -func (v HistogramViewProperties) GetType() string { return v.Type } -func (v GaugeViewProperties) GetType() string { return v.Type } -func (v TableViewProperties) GetType() string { return v.Type } -func (v MarkdownViewProperties) GetType() string { return v.Type } -func (v LogViewProperties) GetType() string { return v.Type } - -///////////////////////////// -// Old Chronograf Types -///////////////////////////// - -// DashboardQuery represents a query used in a dashboard cell -type DashboardQuery struct { - Text string `json:"text"` - EditMode string `json:"editMode"` // Either "builder" or "advanced" - Name string `json:"name"` // Term or phrase that refers to the query - BuilderConfig BuilderConfig `json:"builderConfig"` -} - -type BuilderConfig struct { - Buckets []string `json:"buckets"` - Tags []struct { - Key string `json:"key"` - Values []string `json:"values"` - } `json:"tags"` - Functions []struct { - Name string `json:"name"` - } `json:"functions"` -} - -// Axis represents the visible extents of a visualization -type Axis struct { - Bounds []string `json:"bounds"` // bounds are an arbitrary list of client-defined strings that specify the viewport for a View - LegacyBounds [2]int64 `json:"-"` // legacy bounds are for testing a migration from an earlier version of axis - Label string `json:"label"` // label is a description of this Axis - Prefix string `json:"prefix"` // Prefix represents a label prefix for formatting axis values - Suffix string `json:"suffix"` // Suffix represents a label suffix for formatting axis values - Base string `json:"base"` // Base represents the radix for formatting axis values - Scale string `json:"scale"` // Scale is the axis formatting scale. Supported: "log", "linear" -} - -// ViewColor represents the encoding of data into visualizations -type ViewColor struct { - ID string `json:"id"` // ID is the unique id of the View color - Type string `json:"type"` // Type is how the color is used. Accepted (min,max,threshold) - Hex string `json:"hex"` // Hex is the hex number of the color - Name string `json:"name"` // Name is the user-facing name of the hex color - Value float64 `json:"value"` // Value is the data value mapped to this color -} - -// Legend represents the encoding of data into a legend -type Legend struct { - Type string `json:"type,omitempty"` - Orientation string `json:"orientation,omitempty"` -} - -// TableOptions is a type of options for a DashboardView with type Table -type TableOptions struct { - VerticalTimeAxis bool `json:"verticalTimeAxis"` - SortBy RenamableField `json:"sortBy"` - Wrapping string `json:"wrapping"` - FixFirstColumn bool `json:"fixFirstColumn"` -} - -// RenamableField is a column/row field in a DashboardView of type Table -type RenamableField struct { - InternalName string `json:"internalName"` - DisplayName string `json:"displayName"` - Visible bool `json:"visible"` -} - -// DecimalPlaces indicates whether decimal places should be enforced, and how many digits it should show. -type DecimalPlaces struct { - IsEnforced bool `json:"isEnforced"` - Digits int32 `json:"digits"` -} diff --git a/zap/auth_service.go b/zap/auth_service.go index a4307c59f5..e651d5e7d2 100644 --- a/zap/auth_service.go +++ b/zap/auth_service.go @@ -70,13 +70,13 @@ func (s *AuthorizationService) DeleteAuthorization(ctx context.Context, id platf return s.AuthorizationService.DeleteAuthorization(ctx, id) } -// SetAuthorizationStatus updates an authorization's status and logs any errors. -func (s *AuthorizationService) SetAuthorizationStatus(ctx context.Context, id platform.ID, status platform.Status) (err error) { +// UpdateAuthorization updates an authorization's status, description and logs any errors. +func (s *AuthorizationService) UpdateAuthorization(ctx context.Context, id platform.ID, upd *platform.AuthorizationUpdate) (err error) { defer func() { if err != nil { s.Logger.Info("error updating authorization", zap.Error(err)) } }() - return s.AuthorizationService.SetAuthorizationStatus(ctx, id, status) + return s.AuthorizationService.UpdateAuthorization(ctx, id, upd) }