diff --git a/CHANGELOG.md b/CHANGELOG.md index dd05f637e..d5283a000 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,12 +2,22 @@ ### Bug Fixes 1. [#1104](https://github.com/influxdata/chronograf/pull/1104): Fix windows hosts on host list + 1. [#1125](https://github.com/influxdata/chronograf/pull/1125): Fix visualizations not showing graph name + 1. [#1133](https://github.com/influxdata/chronograf/issue/1133): Fix Enterprise Kapacitor authentication. + 1. [#1142](https://github.com/influxdata/chronograf/issue/1142): Fix Kapacitor Telegram config to display correct disableNotification setting ### Features 1. [#1112](https://github.com/influxdata/chronograf/pull/1112): Add ability to delete a dashboard + 1. [#1120](https://github.com/influxdata/chronograf/pull/1120): Allow users to update user passwords. + 1. [#1129](https://github.com/influxdata/chronograf/pull/1129): Allow InfluxDB and Kapacitor configuration via ENV vars or CLI options + 1. [#1130](https://github.com/influxdata/chronograf/pull/1130): Add loading spinner to Alert History page. ### UI Improvements 1. [#1101](https://github.com/influxdata/chronograf/pull/1101): Compress InfluxQL responses with gzip + 1. [#1132](https://github.com/influxdata/chronograf/pull/1132): All sidebar items show activity with a blue strip + 1. [#1135](https://github.com/influxdata/chronograf/pull/1135): Clarify Kapacitor Alert configuration for Telegram + 1. [#1137](https://github.com/influxdata/chronograf/pull/1137): Clarify Kapacitor Alert configuration for HipChat + 1. [#1079](https://github.com/influxdata/chronograf/issues/1079): Remove series highlighting in line graphs ## v1.2.0-beta7 [2017-03-28] ### Bug Fixes diff --git a/LICENSE_OF_DEPENDENCIES.md b/LICENSE_OF_DEPENDENCIES.md index b9ed04af0..03e0778bd 100644 --- a/LICENSE_OF_DEPENDENCIES.md +++ b/LICENSE_OF_DEPENDENCIES.md @@ -891,6 +891,7 @@ * rimraf 2.5.3 [ISC](http://github.com/isaacs/rimraf) * rimraf 2.5.4 [ISC](http://github.com/isaacs/rimraf) * ripemd160 0.2.0 [Unknown](https://github.com/cryptocoinjs/ripemd160) +* rome 2.1.22 [MIT](https://github.com/bevacqua/rome) * run-async 0.1.0 [MIT](http://github.com/SBoudrias/run-async) * rx-lite 3.1.2 [Apache License](https://github.com/Reactive-Extensions/RxJS) * samsam 1.1.2 [BSD](https://github.com/busterjs/samsam) diff --git a/chronograf.go b/chronograf.go index 021877f1d..83a246703 100644 --- a/chronograf.go +++ b/chronograf.go @@ -137,7 +137,7 @@ type Response interface { // Source is connection information to a time-series data store. type Source struct { - ID int `json:"id,omitempty,string"` // ID is the unique ID of the source + ID int `json:"id,string"` // ID is the unique ID of the source Name string `json:"name"` // Name is the user-defined name for the source Type string `json:"type,omitempty"` // Type specifies which kinds of source (enterprise vs oss) Username string `json:"username,omitempty"` // Username is the username to connect to the source diff --git a/memdb/kapacitors.go b/memdb/kapacitors.go new file mode 100644 index 000000000..83f1fed32 --- /dev/null +++ b/memdb/kapacitors.go @@ -0,0 +1,144 @@ +package memdb + +import ( + "context" + "fmt" + + "github.com/influxdata/chronograf" +) + +// Ensure KapacitorStore and MultiKapacitorStore implements chronograf.ServersStore. +var _ chronograf.ServersStore = &KapacitorStore{} +var _ chronograf.ServersStore = &MultiKapacitorStore{} + +// KapacitorStore implements the chronograf.ServersStore interface, and keeps +// an in-memory Kapacitor according to startup configuration +type KapacitorStore struct { + Kapacitor *chronograf.Server +} + +// All will return a slice containing a configured source +func (store *KapacitorStore) All(ctx context.Context) ([]chronograf.Server, error) { + if store.Kapacitor != nil { + return []chronograf.Server{*store.Kapacitor}, nil + } + return nil, nil +} + +// Add does not have any effect +func (store *KapacitorStore) Add(ctx context.Context, kap chronograf.Server) (chronograf.Server, error) { + return chronograf.Server{}, fmt.Errorf("In-memory KapacitorStore does not support adding a Kapacitor") +} + +// Delete removes the in-memory configured Kapacitor if its ID matches what's provided +func (store *KapacitorStore) Delete(ctx context.Context, kap chronograf.Server) error { + if store.Kapacitor == nil || store.Kapacitor.ID != kap.ID { + return fmt.Errorf("Unable to find Kapacitor with id %d", kap.ID) + } + store.Kapacitor = nil + return nil +} + +// Get returns the in-memory Kapacitor if its ID matches what's provided +func (store *KapacitorStore) Get(ctx context.Context, id int) (chronograf.Server, error) { + if store.Kapacitor == nil || store.Kapacitor.ID != id { + return chronograf.Server{}, fmt.Errorf("Unable to find Kapacitor with id %d", id) + } + return *store.Kapacitor, nil +} + +// Update overwrites the in-memory configured Kapacitor if its ID matches what's provided +func (store *KapacitorStore) Update(ctx context.Context, kap chronograf.Server) error { + if store.Kapacitor == nil || store.Kapacitor.ID != kap.ID { + return fmt.Errorf("Unable to find Kapacitor with id %d", kap.ID) + } + store.Kapacitor = &kap + return nil +} + +// MultiKapacitorStore implements the chronograf.ServersStore interface, and +// delegates to all contained KapacitorStores +type MultiKapacitorStore struct { + Stores []chronograf.ServersStore +} + +// All concatenates the Kapacitors of all contained Stores +func (multi *MultiKapacitorStore) All(ctx context.Context) ([]chronograf.Server, error) { + all := []chronograf.Server{} + kapSet := map[int]struct{}{} + + ok := false + var err error + for _, store := range multi.Stores { + var kaps []chronograf.Server + kaps, err = store.All(ctx) + if err != nil { + // If this Store is unable to return an array of kapacitors, skip to the + // next Store. + continue + } + ok = true // We've received a response from at least one Store + for _, kap := range kaps { + // Enforce that the kapacitor has a unique ID + // If the ID has been seen before, ignore the kapacitor + if _, okay := kapSet[kap.ID]; !okay { // We have a new kapacitor + kapSet[kap.ID] = struct{}{} // We just care that the ID is unique + all = append(all, kap) + } + } + } + if !ok { + return nil, err + } + return all, nil +} + +// Add the kap to the first responsive Store +func (multi *MultiKapacitorStore) Add(ctx context.Context, kap chronograf.Server) (chronograf.Server, error) { + var err error + for _, store := range multi.Stores { + var k chronograf.Server + k, err = store.Add(ctx, kap) + if err == nil { + return k, nil + } + } + return chronograf.Server{}, nil +} + +// Delete delegates to all Stores, returns success if one Store is successful +func (multi *MultiKapacitorStore) Delete(ctx context.Context, kap chronograf.Server) error { + var err error + for _, store := range multi.Stores { + err = store.Delete(ctx, kap) + if err == nil { + return nil + } + } + return err +} + +// Get finds the Source by id among all contained Stores +func (multi *MultiKapacitorStore) Get(ctx context.Context, id int) (chronograf.Server, error) { + var err error + for _, store := range multi.Stores { + var k chronograf.Server + k, err = store.Get(ctx, id) + if err == nil { + return k, nil + } + } + return chronograf.Server{}, nil +} + +// Update the first responsive Store +func (multi *MultiKapacitorStore) Update(ctx context.Context, kap chronograf.Server) error { + var err error + for _, store := range multi.Stores { + err = store.Update(ctx, kap) + if err == nil { + return nil + } + } + return err +} diff --git a/memdb/kapacitors_test.go b/memdb/kapacitors_test.go new file mode 100644 index 000000000..393900d35 --- /dev/null +++ b/memdb/kapacitors_test.go @@ -0,0 +1,129 @@ +package memdb + +import ( + "context" + "testing" + + "github.com/influxdata/chronograf" +) + +func TestInterfaceImplementation(t *testing.T) { + var _ chronograf.ServersStore = &KapacitorStore{} + var _ chronograf.ServersStore = &MultiKapacitorStore{} +} + +func TestKapacitorStoreAll(t *testing.T) { + ctx := context.Background() + + store := KapacitorStore{} + kaps, err := store.All(ctx) + if err != nil { + t.Fatal("All should not throw an error with an empty Store") + } + if len(kaps) != 0 { + t.Fatal("Store should be empty") + } + + store.Kapacitor = &chronograf.Server{} + kaps, err = store.All(ctx) + if err != nil { + t.Fatal("All should not throw an error with an empty Store") + } + if len(kaps) != 1 { + t.Fatal("Store should have 1 element") + } +} + +func TestKapacitorStoreAdd(t *testing.T) { + ctx := context.Background() + + store := KapacitorStore{} + _, err := store.Add(ctx, chronograf.Server{}) + if err == nil { + t.Fatal("Store should not support adding another source") + } +} + +func TestKapacitorStoreDelete(t *testing.T) { + ctx := context.Background() + + store := KapacitorStore{} + err := store.Delete(ctx, chronograf.Server{}) + if err == nil { + t.Fatal("Delete should not operate on an empty Store") + } + + store.Kapacitor = &chronograf.Server{ + ID: 9, + } + err = store.Delete(ctx, chronograf.Server{ + ID: 8, + }) + if err == nil { + t.Fatal("Delete should not remove elements with the wrong ID") + } + + err = store.Delete(ctx, chronograf.Server{ + ID: 9, + }) + if err != nil { + t.Fatal("Delete should remove an element with a matching ID") + } +} + +func TestKapacitorStoreGet(t *testing.T) { + ctx := context.Background() + + store := KapacitorStore{} + _, err := store.Get(ctx, 9) + if err == nil { + t.Fatal("Get should return an error for an empty Store") + } + + store.Kapacitor = &chronograf.Server{ + ID: 9, + } + _, err = store.Get(ctx, 8) + if err == nil { + t.Fatal("Get should return an error if it finds no matches") + } + + store.Kapacitor = &chronograf.Server{ + ID: 9, + } + kap, err := store.Get(ctx, 9) + if err != nil || kap.ID != 9 { + t.Fatal("Get should find the element with a matching ID") + } +} + +func TestKapacitorStoreUpdate(t *testing.T) { + ctx := context.Background() + + store := KapacitorStore{} + err := store.Update(ctx, chronograf.Server{}) + if err == nil { + t.Fatal("Update fhouls return an error for an empty Store") + } + + store.Kapacitor = &chronograf.Server{ + ID: 9, + } + err = store.Update(ctx, chronograf.Server{ + ID: 8, + }) + if err == nil { + t.Fatal("Update should return an error if it finds no matches") + } + + store.Kapacitor = &chronograf.Server{ + ID: 9, + } + err = store.Update(ctx, chronograf.Server{ + ID: 9, + URL: "http://crystal.pepsi.com", + }) + if err != nil || store.Kapacitor.URL != "http://crystal.pepsi.com" { + t.Fatal("Update should overwrite elements with matching IDs") + } +} diff --git a/memdb/sources.go b/memdb/sources.go new file mode 100644 index 000000000..4f1036335 --- /dev/null +++ b/memdb/sources.go @@ -0,0 +1,142 @@ +package memdb + +import ( + "context" + "fmt" + + "github.com/influxdata/chronograf" +) + +// Ensure MultiSourcesStore and SourcesStore implements chronograf.SourcesStore. +var _ chronograf.SourcesStore = &SourcesStore{} +var _ chronograf.SourcesStore = &MultiSourcesStore{} + +// MultiSourcesStore delegates to the SourcesStores that compose it +type MultiSourcesStore struct { + Stores []chronograf.SourcesStore +} + +// All concatenates the Sources of all contained Stores +func (multi *MultiSourcesStore) All(ctx context.Context) ([]chronograf.Source, error) { + all := []chronograf.Source{} + sourceSet := map[int]struct{}{} + + ok := false + var err error + for _, store := range multi.Stores { + var sources []chronograf.Source + sources, err = store.All(ctx) + if err != nil { + // If this Store is unable to return an array of sources, skip to the + // next Store. + continue + } + ok = true // We've received a response from at least one Store + for _, s := range sources { + // Enforce that the source has a unique ID + // If the source has been seen before, don't override what we already have + if _, okay := sourceSet[s.ID]; !okay { // We have a new Source! + sourceSet[s.ID] = struct{}{} // We just care that the ID is unique + all = append(all, s) + } + } + } + if !ok { + return nil, err + } + return all, nil +} + +// Add the src to the first Store to respond successfully +func (multi *MultiSourcesStore) Add(ctx context.Context, src chronograf.Source) (chronograf.Source, error) { + var err error + for _, store := range multi.Stores { + var s chronograf.Source + s, err = store.Add(ctx, src) + if err == nil { + return s, nil + } + } + return chronograf.Source{}, nil +} + +// Delete delegates to all stores, returns success if one Store is successful +func (multi *MultiSourcesStore) Delete(ctx context.Context, src chronograf.Source) error { + var err error + for _, store := range multi.Stores { + err = store.Delete(ctx, src) + if err == nil { + return nil + } + } + return err +} + +// Get finds the Source by id among all contained Stores +func (multi *MultiSourcesStore) Get(ctx context.Context, id int) (chronograf.Source, error) { + var err error + for _, store := range multi.Stores { + var s chronograf.Source + s, err = store.Get(ctx, id) + if err == nil { + return s, nil + } + } + return chronograf.Source{}, err +} + +// Update the first store to return a successful response +func (multi *MultiSourcesStore) Update(ctx context.Context, src chronograf.Source) error { + var err error + for _, store := range multi.Stores { + err = store.Update(ctx, src) + if err == nil { + return nil + } + } + return err +} + +// SourcesStore implements the chronograf.SourcesStore interface +type SourcesStore struct { + Source *chronograf.Source +} + +// Add does not have any effect +func (store *SourcesStore) Add(ctx context.Context, src chronograf.Source) (chronograf.Source, error) { + return chronograf.Source{}, fmt.Errorf("In-memory SourcesStore does not support adding a Source") +} + +// All will return a slice containing a configured source +func (store *SourcesStore) All(ctx context.Context) ([]chronograf.Source, error) { + if store.Source != nil { + return []chronograf.Source{*store.Source}, nil + } + return nil, nil +} + +// Delete removes the SourcesStore.Soruce if it matches the provided Source +func (store *SourcesStore) Delete(ctx context.Context, src chronograf.Source) error { + if store.Source == nil || store.Source.ID != src.ID { + return fmt.Errorf("Unable to find Source with id %d", src.ID) + } + store.Source = nil + return nil +} + +// Get returns the configured source if the id matches +func (store *SourcesStore) Get(ctx context.Context, id int) (chronograf.Source, error) { + if store.Source == nil || store.Source.ID != id { + return chronograf.Source{}, fmt.Errorf("Unable to find Source with id %d", id) + } + return *store.Source, nil +} + +// Update does nothing +func (store *SourcesStore) Update(ctx context.Context, src chronograf.Source) error { + if store.Source == nil || store.Source.ID != src.ID { + return fmt.Errorf("Unable to find Source with id %d", src.ID) + } + store.Source = &src + return nil +} diff --git a/memdb/sources_test.go b/memdb/sources_test.go new file mode 100644 index 000000000..c4fd861e0 --- /dev/null +++ b/memdb/sources_test.go @@ -0,0 +1,128 @@ +package memdb + +import ( + "context" + "testing" + + "github.com/influxdata/chronograf" +) + +func TestSourcesStore(t *testing.T) { + var _ chronograf.SourcesStore = &SourcesStore{} +} + +func TestSourcesStoreAdd(t *testing.T) { + ctx := context.Background() + + store := SourcesStore{} + _, err := store.Add(ctx, chronograf.Source{}) + if err == nil { + t.Fatal("Store should not support adding another source") + } +} + +func TestSourcesStoreAll(t *testing.T) { + ctx := context.Background() + + store := SourcesStore{} + srcs, err := store.All(ctx) + if err != nil { + t.Fatal("All should not throw an error with an empty Store") + } + if len(srcs) != 0 { + t.Fatal("Store should be empty") + } + + store.Source = &chronograf.Source{} + srcs, err = store.All(ctx) + if err != nil { + t.Fatal("All should not throw an error with an empty Store") + } + if len(srcs) != 1 { + t.Fatal("Store should have 1 element") + } +} + +func TestSourcesStoreDelete(t *testing.T) { + ctx := context.Background() + + store := SourcesStore{} + err := store.Delete(ctx, chronograf.Source{}) + if err == nil { + t.Fatal("Delete should not operate on an empty Store") + } + + store.Source = &chronograf.Source{ + ID: 9, + } + err = store.Delete(ctx, chronograf.Source{ + ID: 8, + }) + if err == nil { + t.Fatal("Delete should not remove elements with the wrong ID") + } + + err = store.Delete(ctx, chronograf.Source{ + ID: 9, + }) + if err != nil { + t.Fatal("Delete should remove an element with a matching ID") + } +} + +func TestSourcesStoreGet(t *testing.T) { + ctx := context.Background() + + store := SourcesStore{} + _, err := store.Get(ctx, 9) + if err == nil { + t.Fatal("Get should return an error for an empty Store") + } + + store.Source = &chronograf.Source{ + ID: 9, + } + _, err = store.Get(ctx, 8) + if err == nil { + t.Fatal("Get should return an error if it finds no matches") + } + + store.Source = &chronograf.Source{ + ID: 9, + } + src, err := store.Get(ctx, 9) + if err != nil || src.ID != 9 { + t.Fatal("Get should find the element with a matching ID") + } +} + +func TestSourcesStoreUpdate(t *testing.T) { + ctx := context.Background() + + store := SourcesStore{} + err := store.Update(ctx, chronograf.Source{}) + if err == nil { + t.Fatal("Update should return an error for an empty Store") + } + + store.Source = &chronograf.Source{ + ID: 9, + } + err = store.Update(ctx, chronograf.Source{ + ID: 8, + }) + if err == nil { + t.Fatal("Update should return an error if it finds no matches") + } + + store.Source = &chronograf.Source{ + ID: 9, + } + err = store.Update(ctx, chronograf.Source{ + ID: 9, + URL: "http://crystal.pepsi.com", + }) + if err != nil || store.Source.URL != "http://crystal.pepsi.com" { + t.Fatal("Update should overwrite elements with matching IDs") + } +} diff --git a/server/builders.go b/server/builders.go new file mode 100644 index 000000000..57a43eb84 --- /dev/null +++ b/server/builders.go @@ -0,0 +1,113 @@ +package server + +import ( + "github.com/influxdata/chronograf" + "github.com/influxdata/chronograf/canned" + "github.com/influxdata/chronograf/layouts" + "github.com/influxdata/chronograf/memdb" +) + +// LayoutBuilder is responsible for building Layouts +type LayoutBuilder interface { + Build(chronograf.LayoutStore) (*layouts.MultiLayoutStore, error) +} + +// MultiLayoutBuilder implements LayoutBuilder and will return a MultiLayoutStore +type MultiLayoutBuilder struct { + Logger chronograf.Logger + UUID chronograf.ID + CannedPath string +} + +// Build will construct a MultiLayoutStore of canned and db-backed personalized +// layouts +func (builder *MultiLayoutBuilder) Build(db chronograf.LayoutStore) (*layouts.MultiLayoutStore, error) { + // These apps are those handled from a directory + apps := canned.NewApps(builder.CannedPath, builder.UUID, builder.Logger) + // These apps are statically compiled into chronograf + binApps := &canned.BinLayoutStore{ + Logger: builder.Logger, + } + // Acts as a front-end to both the bolt layouts, filesystem layouts and binary statically compiled layouts. + // The idea here is that these stores form a hierarchy in which each is tried sequentially until + // the operation has success. So, the database is preferred over filesystem over binary data. + layouts := &layouts.MultiLayoutStore{ + Stores: []chronograf.LayoutStore{ + db, + apps, + binApps, + }, + } + + return layouts, nil +} + +// SourcesBuilder builds a MultiSourceStore +type SourcesBuilder interface { + Build(chronograf.SourcesStore) (*memdb.MultiSourcesStore, error) +} + +// MultiSourceBuilder implements SourcesBuilder +type MultiSourceBuilder struct { + InfluxDBURL string + InfluxDBUsername string + InfluxDBPassword string +} + +// Build will return a MultiSourceStore +func (fs *MultiSourceBuilder) Build(db chronograf.SourcesStore) (*memdb.MultiSourcesStore, error) { + stores := []chronograf.SourcesStore{db} + + if fs.InfluxDBURL != "" { + influxStore := &memdb.SourcesStore{ + Source: &chronograf.Source{ + ID: 0, + Name: fs.InfluxDBURL, + Type: chronograf.InfluxDB, + Username: fs.InfluxDBUsername, + Password: fs.InfluxDBPassword, + URL: fs.InfluxDBURL, + Default: true, + }} + stores = append([]chronograf.SourcesStore{influxStore}, stores...) + } + sources := &memdb.MultiSourcesStore{ + Stores: stores, + } + + return sources, nil +} + +// KapacitorBuilder builds a KapacitorStore +type KapacitorBuilder interface { + Build(chronograf.ServersStore) (*memdb.MultiKapacitorStore, error) +} + +// MultiKapacitorBuilder implements KapacitorBuilder +type MultiKapacitorBuilder struct { + KapacitorURL string + KapacitorUsername string + KapacitorPassword string +} + +// Build will return a MultiKapacitorStore +func (builder *MultiKapacitorBuilder) Build(db chronograf.ServersStore) (*memdb.MultiKapacitorStore, error) { + stores := []chronograf.ServersStore{db} + if builder.KapacitorURL != "" { + memStore := &memdb.KapacitorStore{ + Kapacitor: &chronograf.Server{ + ID: 0, + SrcID: 0, + Name: builder.KapacitorURL, + URL: builder.KapacitorURL, + Username: builder.KapacitorUsername, + Password: builder.KapacitorPassword, + }, + } + stores = append([]chronograf.ServersStore{memStore}, stores...) + } + kapacitors := &memdb.MultiKapacitorStore{ + Stores: stores, + } + return kapacitors, nil +} diff --git a/server/proxy.go b/server/proxy.go index cf1cd155d..5e1a0fa85 100644 --- a/server/proxy.go +++ b/server/proxy.go @@ -1,7 +1,6 @@ package server import ( - "encoding/base64" "fmt" "net/http" "net/http/httputil" @@ -49,9 +48,7 @@ func (h *Service) KapacitorProxy(w http.ResponseWriter, r *http.Request) { // Because we are acting as a proxy, kapacitor needs to have the basic auth information set as // a header directly if srv.Username != "" && srv.Password != "" { - auth := "Basic " + srv.Username + ":" + srv.Password - header := base64.StdEncoding.EncodeToString([]byte(auth)) - req.Header.Set("Authorization", header) + req.SetBasicAuth(srv.Username, srv.Password) } } proxy := &httputil.ReverseProxy{ diff --git a/server/server.go b/server/server.go index a19a0c077..4efb88123 100644 --- a/server/server.go +++ b/server/server.go @@ -14,15 +14,13 @@ import ( "github.com/influxdata/chronograf" "github.com/influxdata/chronograf/bolt" - "github.com/influxdata/chronograf/canned" - "github.com/influxdata/chronograf/layouts" + "github.com/influxdata/chronograf/influx" clog "github.com/influxdata/chronograf/log" "github.com/influxdata/chronograf/oauth2" "github.com/influxdata/chronograf/uuid" client "github.com/influxdata/usage-client/v1" flags "github.com/jessevdk/go-flags" "github.com/tylerb/graceful" - "github.com/influxdata/chronograf/influx" ) var ( @@ -42,6 +40,14 @@ type Server struct { Cert flags.Filename `long:"cert" description:"Path to PEM encoded public key certificate. " env:"TLS_CERTIFICATE"` Key flags.Filename `long:"key" description:"Path to private key associated with given certificate. " env:"TLS_PRIVATE_KEY"` + InfluxDBURL string `long:"influxdb-url" description:"Location of your InfluxDB instance" env:"INFLUXDB_URL"` + InfluxDBUsername string `long:"influxdb-username" description:"Username for your InfluxDB instance" env:"INFLUXDB_USERNAME"` + InfluxDBPassword string `long:"influxdb-password" description:"Password for your InfluxDB instance" env:"INFLUXDB_PASSWORD"` + + KapacitorURL string `long:"kapacitor-url" description:"Location of your Kapacitor instance" env:"KAPACITOR_URL"` + KapacitorUsername string `long:"kapacitor-username" description:"Username of your Kapacitor instance" env:"KAPACITOR_USERNAME"` + KapacitorPassword string `long:"kapacitor-password" description:"Password of your Kapacitor instance" env:"KAPACITOR_PASSWORD"` + Develop bool `short:"d" long:"develop" description:"Run server in develop mode."` BoltPath string `short:"b" long:"bolt-path" description:"Full path to boltDB file (/var/lib/chronograf/chronograf-v1.db)" env:"BOLT_PATH" default:"chronograf-v1.db"` CannedPath string `short:"c" long:"canned-path" description:"Path to directory of pre-canned application layouts (/usr/share/chronograf/canned)" env:"CANNED_PATH" default:"canned"` @@ -180,7 +186,22 @@ func (s *Server) NewListener() (net.Listener, error) { // Serve starts and runs the chronograf server func (s *Server) Serve(ctx context.Context) error { logger := clog.New(clog.ParseLevel(s.LogLevel)) - service := openService(ctx, s.BoltPath, s.CannedPath, logger, s.useAuth()) + layoutBuilder := &MultiLayoutBuilder{ + Logger: logger, + UUID: &uuid.V4{}, + CannedPath: s.CannedPath, + } + sourcesBuilder := &MultiSourceBuilder{ + InfluxDBURL: s.InfluxDBURL, + InfluxDBUsername: s.InfluxDBUsername, + InfluxDBPassword: s.InfluxDBPassword, + } + kapacitorBuilder := &MultiKapacitorBuilder{ + KapacitorURL: s.KapacitorURL, + KapacitorUsername: s.KapacitorUsername, + KapacitorPassword: s.KapacitorPassword, + } + service := openService(ctx, s.BoltPath, layoutBuilder, sourcesBuilder, kapacitorBuilder, logger, s.useAuth()) basepath = s.Basepath providerFuncs := []func(func(oauth2.Provider, oauth2.Mux)){} @@ -256,7 +277,7 @@ func (s *Server) Serve(ctx context.Context) error { return nil } -func openService(ctx context.Context, boltPath, cannedPath string, logger chronograf.Logger, useAuth bool) Service { +func openService(ctx context.Context, boltPath string, lBuilder LayoutBuilder, sBuilder SourcesBuilder, kapBuilder KapacitorBuilder, logger chronograf.Logger, useAuth bool) Service { db := bolt.NewClient() db.Path = boltPath if err := db.Open(ctx); err != nil { @@ -266,28 +287,34 @@ func openService(ctx context.Context, boltPath, cannedPath string, logger chrono os.Exit(1) } - // These apps are those handled from a directory - apps := canned.NewApps(cannedPath, &uuid.V4{}, logger) - // These apps are statically compiled into chronograf - binApps := &canned.BinLayoutStore{ - Logger: logger, + layouts, err := lBuilder.Build(db.LayoutStore) + if err != nil { + logger. + WithField("component", "LayoutStore"). + Error("Unable to construct a MultiLayoutStore", err) + os.Exit(1) } - // Acts as a front-end to both the bolt layouts, filesystem layouts and binary statically compiled layouts. - // The idea here is that these stores form a hierarchy in which each is tried sequentially until - // the operation has success. So, the database is preferred over filesystem over binary data. - layouts := &layouts.MultiLayoutStore{ - Stores: []chronograf.LayoutStore{ - db.LayoutStore, - apps, - binApps, - }, + sources, err := sBuilder.Build(db.SourcesStore) + if err != nil { + logger. + WithField("component", "SourcesStore"). + Error("Unable to construct a MultiSourcesStore", err) + os.Exit(1) + } + + kapacitors, err := kapBuilder.Build(db.ServersStore) + if err != nil { + logger. + WithField("component", "KapacitorStore"). + Error("Unable to construct a MultiKapacitorStore", err) + os.Exit(1) } return Service{ TimeSeriesClient: &InfluxClient{}, - SourcesStore: db.SourcesStore, - ServersStore: db.ServersStore, + SourcesStore: sources, + ServersStore: kapacitors, UsersStore: db.UsersStore, LayoutStore: layouts, DashboardsStore: db.DashboardsStore, diff --git a/server/server_test.go b/server/server_test.go new file mode 100644 index 000000000..829f53faf --- /dev/null +++ b/server/server_test.go @@ -0,0 +1,26 @@ +package server + +import "testing" + +func TestLayoutBuilder(t *testing.T) { + var l LayoutBuilder = &MultiLayoutBuilder{} + layout, err := l.Build(nil) + if err != nil { + t.Fatalf("MultiLayoutBuilder can't build a MultiLayoutStore: %v", err) + } + + if layout == nil { + t.Fatal("LayoutBuilder should have built a layout") + } +} + +func TestSourcesStoresBuilder(t *testing.T) { + var b SourcesBuilder = &MultiSourceBuilder{} + sources, err := b.Build(nil) + if err != nil { + t.Fatalf("MultiSourceBuilder can't build a MultiSourcesStore: %v", err) + } + if sources == nil { + t.Fatal("SourcesBuilder should have built a MultiSourceStore") + } +} diff --git a/server/swagger.json b/server/swagger.json index 97965471b..0fec463ee 100644 --- a/server/swagger.json +++ b/server/swagger.json @@ -3010,14 +3010,9 @@ } }, "Roles": { - "type": "object", - "properties": { - "roles": { - "type": "array", - "items": { - "$ref": "#/definitions/Role" - } - } + "type": "array", + "items": { + "$ref": "#/definitions/Role" }, "example": { "roles": [ @@ -3178,6 +3173,9 @@ "permissions": { "$ref": "#/definitions/Permissions" }, + "roles": { + "$ref": "#/definitions/Roles" + }, "links": { "type": "object", "description": "URL relations of this user", diff --git a/ui/package.json b/ui/package.json index a969d8278..ddd544aff 100644 --- a/ui/package.json +++ b/ui/package.json @@ -111,6 +111,7 @@ "react-tooltip": "^3.2.1", "redux": "^3.3.1", "redux-thunk": "^1.0.3", + "rome": "^2.1.22", "updeep": "^0.13.0" } } diff --git a/ui/spec/admin/reducers/adminSpec.js b/ui/spec/admin/reducers/adminSpec.js index 5d8dec75e..ac234be1e 100644 --- a/ui/spec/admin/reducers/adminSpec.js +++ b/ui/spec/admin/reducers/adminSpec.js @@ -30,7 +30,7 @@ import { NEW_EMPTY_RP, } from 'src/admin/constants' -let state = undefined +let state // Users const u1 = { @@ -57,11 +57,11 @@ const u1 = { 'Monitor', 'CopyShard', 'KapacitorAPI', - 'KapacitorConfigAPI' + 'KapacitorConfigAPI', ], scope: 'all', }, - } + }, ], permissions: [], links: {self: '/chronograf/v1/sources/1/users/acidburn'}, @@ -98,16 +98,16 @@ const r1 = { 'Monitor', 'CopyShard', 'KapacitorAPI', - 'KapacitorConfigAPI' + 'KapacitorConfigAPI', ], scope: 'all', }, ], - links: {self: '/chronograf/v1/sources/1/roles/hax0r'} + links: {self: '/chronograf/v1/sources/1/roles/hax0r'}, } const r2 = { name: 'l33tus3r', - links: {self: '/chronograf/v1/sources/1/roles/l33tus3r'} + links: {self: '/chronograf/v1/sources/1/roles/l33tus3r'}, } const roles = [r1, r2] @@ -226,7 +226,7 @@ describe('Admin.Reducers', () => { state = { users: [ u1, - ] + ], } const actual = reducer(state, addUser()) @@ -260,7 +260,7 @@ describe('Admin.Reducers', () => { const actual = reducer(state, editUser(u2, updates)) const expected = { - users: [{...u2, ...updates}, u1] + users: [{...u2, ...updates}, u1], } expect(actual.users).to.deep.equal(expected.users) @@ -270,7 +270,7 @@ describe('Admin.Reducers', () => { state = { roles: [ r1, - ] + ], } const actual = reducer(state, addRole()) @@ -304,7 +304,7 @@ describe('Admin.Reducers', () => { const actual = reducer(state, editRole(r2, updates)) const expected = { - roles: [{...r2, ...updates}, r1] + roles: [{...r2, ...updates}, r1], } expect(actual.roles).to.deep.equal(expected.roles) @@ -323,7 +323,7 @@ describe('Admin.Reducers', () => { state = { roles: [ r1, - ] + ], } const actual = reducer(state, deleteRole(r1)) @@ -338,7 +338,7 @@ describe('Admin.Reducers', () => { state = { users: [ u1, - ] + ], } const actual = reducer(state, deleteUser(u1)) diff --git a/ui/spec/dashboards/reducers/uiSpec.js b/ui/spec/dashboards/reducers/uiSpec.js index dc8897f95..2387a60ac 100644 --- a/ui/spec/dashboards/reducers/uiSpec.js +++ b/ui/spec/dashboards/reducers/uiSpec.js @@ -1,7 +1,7 @@ import _ from 'lodash' import reducer from 'src/dashboards/reducers/ui' -import timeRanges from 'hson!src/shared/data/timeRanges.hson'; +import timeRanges from 'hson!src/shared/data/timeRanges.hson' import { loadDashboards, @@ -19,8 +19,8 @@ const noopAction = () => { return {type: 'NOOP'} } -let state = undefined -const timeRange = timeRanges[1]; +let state +const timeRange = timeRanges[1] const d1 = {id: 1, cells: [], name: "d1"} const d2 = {id: 2, cells: [], name: "d2"} const dashboards = [d1, d2] @@ -117,7 +117,7 @@ describe('DataExplorer.Reducers.UI', () => { const newCell = { x: c1.x, y: c1.y, - name: newCellName + name: newCellName, } const dash = {...d1, cells: [c1]} state = { diff --git a/ui/spec/data_explorer/reducers/queryConfigSpec.js b/ui/spec/data_explorer/reducers/queryConfigSpec.js index 1c35e9193..f23524e45 100644 --- a/ui/spec/data_explorer/reducers/queryConfigSpec.js +++ b/ui/spec/data_explorer/reducers/queryConfigSpec.js @@ -1,5 +1,5 @@ -import reducer from 'src/data_explorer/reducers/queryConfigs'; -import defaultQueryConfig from 'src/utils/defaultQueryConfig'; +import reducer from 'src/data_explorer/reducers/queryConfigs' +import defaultQueryConfig from 'src/utils/defaultQueryConfig' import { chooseNamespace, chooseMeasurement, @@ -10,108 +10,108 @@ import { groupByTime, toggleTagAcceptance, updateRawQuery, -} from 'src/data_explorer/actions/view'; +} from 'src/data_explorer/actions/view' const fakeAddQueryAction = (panelID, queryID) => { return { type: 'ADD_QUERY', payload: {panelID, queryID}, - }; -}; + } +} function buildInitialState(queryId, params) { - return Object.assign({}, defaultQueryConfig(queryId), params); + return Object.assign({}, defaultQueryConfig(queryId), params) } describe('Chronograf.Reducers.queryConfig', () => { - const queryId = 123; + const queryId = 123 it('can add a query', () => { - const state = reducer({}, fakeAddQueryAction('blah', queryId)); + const state = reducer({}, fakeAddQueryAction('blah', queryId)) - const actual = state[queryId]; - const expected = defaultQueryConfig(queryId); - expect(actual).to.deep.equal(expected); - }); + const actual = state[queryId] + const expected = defaultQueryConfig(queryId) + expect(actual).to.deep.equal(expected) + }) describe('choosing db, rp, and measurement', () => { - let state; + let state beforeEach(() => { - state = reducer({}, fakeAddQueryAction('any', queryId)); - }); + state = reducer({}, fakeAddQueryAction('any', queryId)) + }) it('sets the db and rp', () => { const newState = reducer(state, chooseNamespace(queryId, { database: 'telegraf', retentionPolicy: 'monitor', - })); + })) - expect(newState[queryId].database).to.equal('telegraf'); - expect(newState[queryId].retentionPolicy).to.equal('monitor'); - }); + expect(newState[queryId].database).to.equal('telegraf') + expect(newState[queryId].retentionPolicy).to.equal('monitor') + }) it('sets the measurement', () => { - const newState = reducer(state, chooseMeasurement(queryId, 'mem')); + const newState = reducer(state, chooseMeasurement(queryId, 'mem')) - expect(newState[queryId].measurement).to.equal('mem'); - }); - }); + expect(newState[queryId].measurement).to.equal('mem') + }) + }) describe('a query has measurements and fields', () => { - let state; + let state beforeEach(() => { - const one = reducer({}, fakeAddQueryAction('any', queryId)); + const one = reducer({}, fakeAddQueryAction('any', queryId)) const two = reducer(one, chooseNamespace(queryId, { database: '_internal', retentionPolicy: 'daily', - })); - const three = reducer(two, chooseMeasurement(queryId, 'disk')); - state = reducer(three, toggleField(queryId, {field: 'a great field', funcs: []})); - }); + })) + const three = reducer(two, chooseMeasurement(queryId, 'disk')) + state = reducer(three, toggleField(queryId, {field: 'a great field', funcs: []})) + }) describe('choosing a new namespace', () => { it('clears out the old measurement and fields', () => { // what about tags? - expect(state[queryId].measurement).to.exist; - expect(state[queryId].fields.length).to.equal(1); + expect(state[queryId].measurement).to.exist + expect(state[queryId].fields.length).to.equal(1) const newState = reducer(state, chooseNamespace(queryId, { database: 'newdb', retentionPolicy: 'newrp', - })); + })) - expect(newState[queryId].measurement).not.to.exist; - expect(newState[queryId].fields.length).to.equal(0); - }); - }); + expect(newState[queryId].measurement).not.to.exist + expect(newState[queryId].fields.length).to.equal(0) + }) + }) describe('choosing a new measurement', () => { it('leaves the namespace and clears out the old fields', () => { // what about tags? - expect(state[queryId].fields.length).to.equal(1); + expect(state[queryId].fields.length).to.equal(1) - const newState = reducer(state, chooseMeasurement(queryId, 'newmeasurement')); + const newState = reducer(state, chooseMeasurement(queryId, 'newmeasurement')) - expect(state[queryId].database).to.equal(newState[queryId].database); - expect(state[queryId].retentionPolicy).to.equal(newState[queryId].retentionPolicy); - expect(newState[queryId].fields.length).to.equal(0); - }); - }); + expect(state[queryId].database).to.equal(newState[queryId].database) + expect(state[queryId].retentionPolicy).to.equal(newState[queryId].retentionPolicy) + expect(newState[queryId].fields.length).to.equal(0) + }) + }) describe('when the query is part of a kapacitor rule', () => { it('only allows one field', () => { - expect(state[queryId].fields.length).to.equal(1); + expect(state[queryId].fields.length).to.equal(1) - const isKapacitorRule = true; - const newState = reducer(state, toggleField(queryId, {field: 'a different field', funcs: []}, isKapacitorRule)); + const isKapacitorRule = true + const newState = reducer(state, toggleField(queryId, {field: 'a different field', funcs: []}, isKapacitorRule)) - expect(newState[queryId].fields.length).to.equal(1); - expect(newState[queryId].fields[0].field).to.equal('a different field'); - }); - }); - }); + expect(newState[queryId].fields.length).to.equal(1) + expect(newState[queryId].fields[0].field).to.equal('a different field') + }) + }) + }) describe('APPLY_FUNCS_TO_FIELD', () => { it('applies functions to a field without any existing functions', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: { id: 123, @@ -121,23 +121,23 @@ describe('Chronograf.Reducers.queryConfig', () => { {field: 'f1', funcs: ['fn1', 'fn2']}, {field: 'f2', funcs: ['fn1']}, ], - } - }; + }, + } const action = applyFuncsToField(queryId, { field: 'f1', funcs: ['fn3', 'fn4'], - }); + }) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) expect(nextState[queryId].fields).to.eql([ {field: 'f1', funcs: ['fn3', 'fn4']}, {field: 'f2', funcs: ['fn1']}, - ]); - }); + ]) + }) it('removes all functions and group by time when one field has no funcs applied', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: { id: 123, @@ -151,27 +151,27 @@ describe('Chronograf.Reducers.queryConfig', () => { time: '1m', tags: [], }, - } - }; + }, + } const action = applyFuncsToField(queryId, { field: 'f1', funcs: [], - }); + }) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) expect(nextState[queryId].fields).to.eql([ {field: 'f1', funcs: []}, {field: 'f2', funcs: []}, - ]); - expect(nextState[queryId].groupBy.time).to.equal(null); - }); - }); + ]) + expect(nextState[queryId].groupBy.time).to.equal(null) + }) + }) describe('CHOOSE_TAG', () => { it('adds a tag key/value to the query', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: buildInitialState(queryId, { tags: { @@ -179,63 +179,63 @@ describe('Chronograf.Reducers.queryConfig', () => { k2: ['foo'], }, }), - }; + } const action = chooseTag(queryId, { key: 'k1', value: 'v1', - }); + }) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) expect(nextState[queryId].tags).to.eql({ k1: ['v0', 'v1'], k2: ['foo'], - }); - }); + }) + }) it('creates a new entry if it\'s the first key', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: buildInitialState(queryId, { tags: {}, }), - }; + } const action = chooseTag(queryId, { key: 'k1', value: 'v1', - }); + }) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) expect(nextState[queryId].tags).to.eql({ k1: ['v1'], - }); - }); + }) + }) it('removes a value that is already in the list', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: buildInitialState(queryId, { tags: { k1: ['v1'], }, }), - }; + } const action = chooseTag(queryId, { key: 'k1', value: 'v1', - }); + }) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) // TODO: this should probably remove the `k1` property entirely from the tags object - expect(nextState[queryId].tags).to.eql({}); - }); - }); + expect(nextState[queryId].tags).to.eql({}) + }) + }) describe('GROUP_BY_TAG', () => { it('adds a tag key/value to the query', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: { id: 123, @@ -244,20 +244,20 @@ describe('Chronograf.Reducers.queryConfig', () => { fields: [], tags: {}, groupBy: {tags: [], time: null}, - } - }; - const action = groupByTag(queryId, 'k1'); + }, + } + const action = groupByTag(queryId, 'k1') - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) expect(nextState[queryId].groupBy).to.eql({ time: null, tags: ['k1'], - }); - }); + }) + }) it('removes a tag if the given tag key is already in the GROUP BY list', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: { id: 123, @@ -266,59 +266,59 @@ describe('Chronograf.Reducers.queryConfig', () => { fields: [], tags: {}, groupBy: {tags: ['k1'], time: null}, - } - }; - const action = groupByTag(queryId, 'k1'); + }, + } + const action = groupByTag(queryId, 'k1') - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) expect(nextState[queryId].groupBy).to.eql({ time: null, tags: [], - }); - }); - }); + }) + }) + }) describe('TOGGLE_TAG_ACCEPTANCE', () => { it('it toggles areTagsAccepted', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: buildInitialState(queryId), - }; - const action = toggleTagAcceptance(queryId); + } + const action = toggleTagAcceptance(queryId) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) - expect(nextState[queryId].areTagsAccepted).to.equal(!initialState[queryId].areTagsAccepted); - }); - }); + expect(nextState[queryId].areTagsAccepted).to.equal(!initialState[queryId].areTagsAccepted) + }) + }) describe('GROUP_BY_TIME', () => { it('applys the appropriate group by time', () => { - const queryId = 123; - const time = '100y'; + const queryId = 123 + const time = '100y' const initialState = { [queryId]: buildInitialState(queryId), - }; + } - const action = groupByTime(queryId, time); + const action = groupByTime(queryId, time) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) - expect(nextState[queryId].groupBy.time).to.equal(time); - }); - }); + expect(nextState[queryId].groupBy.time).to.equal(time) + }) + }) it('updates a query\'s raw text', () => { - const queryId = 123; + const queryId = 123 const initialState = { [queryId]: buildInitialState(queryId), - }; - const text = 'foo'; - const action = updateRawQuery(queryId, text); + } + const text = 'foo' + const action = updateRawQuery(queryId, text) - const nextState = reducer(initialState, action); + const nextState = reducer(initialState, action) - expect(nextState[queryId].rawText).to.equal('foo'); - }); -}); + expect(nextState[queryId].rawText).to.equal('foo') + }) +}) diff --git a/ui/spec/data_explorer/reducers/timeRangeSpec.js b/ui/spec/data_explorer/reducers/timeRangeSpec.js index 67a870c49..bb767289d 100644 --- a/ui/spec/data_explorer/reducers/timeRangeSpec.js +++ b/ui/spec/data_explorer/reducers/timeRangeSpec.js @@ -1,31 +1,31 @@ -import reducer from 'src/data_explorer/reducers/timeRange'; +import reducer from 'src/data_explorer/reducers/timeRange' import { setTimeRange, -} from 'src/data_explorer/actions/view'; +} from 'src/data_explorer/actions/view' const noopAction = () => { - return {type: 'NOOP'}; + return {type: 'NOOP'} } describe('DataExplorer.Reducers.TimeRange', () => { it('it sets the default timeRange', () => { - const state = reducer(undefined, noopAction()); + const state = reducer(undefined, noopAction()) const expected = { lower: 'now() - 15m', upper: null, - }; + } - expect(state).to.deep.equal(expected); - }); + expect(state).to.deep.equal(expected) + }) it('it can set the time range', () => { const timeRange = { lower: 'now() - 5m', upper: null, - }; - const expected = reducer(undefined, setTimeRange(timeRange)); + } + const expected = reducer(undefined, setTimeRange(timeRange)) - expect(timeRange).to.deep.equal(expected); - }); -}); + expect(timeRange).to.deep.equal(expected) + }) +}) diff --git a/ui/spec/data_explorer/reducers/uiSpec.js b/ui/spec/data_explorer/reducers/uiSpec.js index 735ccc820..c82135a76 100644 --- a/ui/spec/data_explorer/reducers/uiSpec.js +++ b/ui/spec/data_explorer/reducers/uiSpec.js @@ -1,40 +1,40 @@ -import reducer from 'src/data_explorer/reducers/ui'; +import reducer from 'src/data_explorer/reducers/ui' import { addQuery, deleteQuery, -} from 'src/data_explorer/actions/view'; +} from 'src/data_explorer/actions/view' const noopAction = () => { - return {type: 'NOOP'}; + return {type: 'NOOP'} } -let state = undefined; +let state describe('DataExplorer.Reducers.UI', () => { it('it sets the default state for UI', () => { - const actual = reducer(state, noopAction()); + const actual = reducer(state, noopAction()) const expected = { queryIDs: [], - }; + } - expect(actual).to.deep.equal(expected); - }); + expect(actual).to.deep.equal(expected) + }) it('it can add a query', () => { - const actual = reducer(state, addQuery()); - expect(actual.queryIDs.length).to.equal(1); - }); + const actual = reducer(state, addQuery()) + expect(actual.queryIDs.length).to.equal(1) + }) it('it can delete a query', () => { - const queryID = '123'; - state = {queryIDs: ['456', queryID]}; + const queryID = '123' + state = {queryIDs: ['456', queryID]} - const actual = reducer(state, deleteQuery(queryID)); + const actual = reducer(state, deleteQuery(queryID)) const expected = { queryIDs: ['456'], - }; + } - expect(actual).to.deep.equal(expected); - }); -}); + expect(actual).to.deep.equal(expected) + }) +}) diff --git a/ui/spec/data_explorer/utils/influxql/selectSpec.js b/ui/spec/data_explorer/utils/influxql/selectSpec.js index e44b0401c..67e96bd47 100644 --- a/ui/spec/data_explorer/utils/influxql/selectSpec.js +++ b/ui/spec/data_explorer/utils/influxql/selectSpec.js @@ -1,117 +1,117 @@ -import buildInfluxQLQuery from 'utils/influxql'; -import defaultQueryConfig from 'src/utils/defaultQueryConfig'; +import buildInfluxQLQuery from 'utils/influxql' +import defaultQueryConfig from 'src/utils/defaultQueryConfig' function mergeConfig(options) { - return Object.assign({}, defaultQueryConfig(123), options); + return Object.assign({}, defaultQueryConfig(123), options) } describe('buildInfluxQLQuery', () => { - let config, timeBounds; + let config, timeBounds describe('when information is missing', () => { it('returns a null select statement', () => { - expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null); - expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null); // no measurement - expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null); // no fields - }); - }); + expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null) + expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null) // no measurement + expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null) // no fields + }) + }) describe('with a database, measurement, field, and NO retention policy', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]}); - }); + config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]}) + }) it('builds the right query', () => { - expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"'); - }); - }); + expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"') + }) + }) describe('with a database, measurement, retention policy, and field', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]}); - timeBounds = {lower: 'now() - 1hr'}; - }); + config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]}) + timeBounds = {lower: 'now() - 1hr'} + }) it('builds the right query', () => { - expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"'); - }); + expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"') + }) it('builds the right query with a time range', () => { - expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr'); - }); - }); + expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr') + }) + }) describe('when the field is *', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]}); - }); + config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]}) + }) it('does not quote the star', () => { - expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"'); - }); - }); + expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"') + }) + }) describe('with a measurement and one field, an aggregate, and a GROUP BY time()', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}}); - timeBounds = {lower: 'now() - 12h'}; - }); + config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}}) + timeBounds = {lower: 'now() - 12h'} + }) it('builds the right query', () => { - const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)'; - expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected); - }); - }); + const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)' + expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected) + }) + }) describe('with a measurement and one field, an aggregate, and a GROUP BY tags', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}}); - timeBounds = {lower: 'now() - 12h'}; - }); + config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}}) + timeBounds = {lower: 'now() - 12h'} + }) it('builds the right query', () => { - const expected = `SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY "t1", "t2"`; - expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected); - }); - }); + const expected = `SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY "t1", "t2"` + expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected) + }) + }) describe('with a measurement, one field, and an upper / lower absolute time range', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]}); - timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"}; - }); + config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]}) + timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"} + }) it('builds the right query', () => { - const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\''; - expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected); - }); - }); + const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\'' + expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected) + }) + }) describe('with a measurement and one field, an aggregate, and a GROUP BY time(), and tags', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}}); - timeBounds = {lower: 'now() - 12h'}; - }); + config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}}) + timeBounds = {lower: 'now() - 12h'} + }) it('builds the right query', () => { - const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"'; - expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected); - }); - }); + const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"' + expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected) + }) + }) describe('with a measurement and two fields', () => { beforeEach(() => { - config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]}); - timeBounds = {upper: "'2015-02-24T00:00:00Z'"}; - }); + config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]}) + timeBounds = {upper: "'2015-02-24T00:00:00Z'"} + }) it('builds the right query', () => { - expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"'); - }); + expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"') + }) it('builds the right query with a time range', () => { - const expected = `SELECT "f0", "f1" FROM "db1"."rp1"."m0" WHERE time < '2015-02-24T00:00:00Z'`; - expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected); - }); + const expected = `SELECT "f0", "f1" FROM "db1"."rp1"."m0" WHERE time < '2015-02-24T00:00:00Z'` + expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected) + }) describe('with multiple tag pairs', () => { beforeEach(() => { @@ -128,16 +128,16 @@ describe('buildInfluxQLQuery', () => { ], k2: [ 'v2', - ] + ], }, - }); - timeBounds = {lower: 'now() - 6h'}; - }); + }) + timeBounds = {lower: 'now() - 6h'} + }) it('correctly uses AND/OR to combine pairs', () => { - const expected = `SELECT "f0" FROM "db1"."rp1"."m0" WHERE time > now() - 6h AND ("k1"='v1' OR "k1"='v3' OR "k1"='v4') AND "k2"='v2'`; - expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected); - }); - }); - }); -}); + const expected = `SELECT "f0" FROM "db1"."rp1"."m0" WHERE time > now() - 6h AND ("k1"='v1' OR "k1"='v3' OR "k1"='v4') AND "k2"='v2'` + expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected) + }) + }) + }) +}) diff --git a/ui/spec/index.js b/ui/spec/index.js index 0c6432928..0ee9eb8fd 100644 --- a/ui/spec/index.js +++ b/ui/spec/index.js @@ -1,3 +1,3 @@ -var context = require.context('./', true, /Spec\.js$/); -context.keys().forEach(context); -module.exports = context; +const context = require.context('./', true, /Spec\.js$/) +context.keys().forEach(context) +module.exports = context diff --git a/ui/spec/kapacitor/reducers/rulesSpec.js b/ui/spec/kapacitor/reducers/rulesSpec.js index 60b4c8399..5016fd5b0 100644 --- a/ui/spec/kapacitor/reducers/rulesSpec.js +++ b/ui/spec/kapacitor/reducers/rulesSpec.js @@ -1,6 +1,6 @@ -import reducer from 'src/kapacitor/reducers/rules'; -import {defaultRuleConfigs} from 'src/kapacitor/constants'; -import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants'; +import reducer from 'src/kapacitor/reducers/rules' +import {defaultRuleConfigs} from 'src/kapacitor/constants' +import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants' import { chooseTrigger, @@ -12,93 +12,93 @@ import { updateRuleName, deleteRuleSuccess, updateRuleStatusSuccess, -} from 'src/kapacitor/actions/view'; +} from 'src/kapacitor/actions/view' describe('Kapacitor.Reducers.rules', () => { it('can choose a trigger', () => { - const ruleID = 1; - const initialState = { - [ruleID]: { - id: ruleID, - queryID: 988, - trigger: '', - } - }; + const ruleID = 1 + const initialState = { + [ruleID]: { + id: ruleID, + queryID: 988, + trigger: '', + }, + } - let newState = reducer(initialState, chooseTrigger(ruleID, 'deadman')); - expect(newState[ruleID].trigger).to.equal('deadman'); - expect(newState[ruleID].values).to.equal(defaultRuleConfigs.deadman); + let newState = reducer(initialState, chooseTrigger(ruleID, 'deadman')) + expect(newState[ruleID].trigger).to.equal('deadman') + expect(newState[ruleID].values).to.equal(defaultRuleConfigs.deadman) - newState = reducer(initialState, chooseTrigger(ruleID, 'relative')); - expect(newState[ruleID].trigger).to.equal('relative'); - expect(newState[ruleID].values).to.equal(defaultRuleConfigs.relative); + newState = reducer(initialState, chooseTrigger(ruleID, 'relative')) + expect(newState[ruleID].trigger).to.equal('relative') + expect(newState[ruleID].values).to.equal(defaultRuleConfigs.relative) - newState = reducer(initialState, chooseTrigger(ruleID, 'threshold')); - expect(newState[ruleID].trigger).to.equal('threshold'); - expect(newState[ruleID].values).to.equal(defaultRuleConfigs.threshold); - }); + newState = reducer(initialState, chooseTrigger(ruleID, 'threshold')) + expect(newState[ruleID].trigger).to.equal('threshold') + expect(newState[ruleID].values).to.equal(defaultRuleConfigs.threshold) + }) it('can update the values', () => { - const ruleID = 1; + const ruleID = 1 const initialState = { [ruleID]: { id: ruleID, queryID: 988, trigger: 'deadman', - values: defaultRuleConfigs.deadman - } - }; + values: defaultRuleConfigs.deadman, + }, + } - const newDeadmanValues = {duration: '5m'}; - const newState = reducer(initialState, updateRuleValues(ruleID, 'deadman', newDeadmanValues)); - expect(newState[ruleID].values).to.equal(newDeadmanValues); + const newDeadmanValues = {duration: '5m'} + const newState = reducer(initialState, updateRuleValues(ruleID, 'deadman', newDeadmanValues)) + expect(newState[ruleID].values).to.equal(newDeadmanValues) - const newRelativeValues = {func: 'max', change: 'change'}; - const finalState = reducer(newState, updateRuleValues(ruleID, 'relative', newRelativeValues)); - expect(finalState[ruleID].trigger).to.equal('relative'); - expect(finalState[ruleID].values).to.equal(newRelativeValues); - }); + const newRelativeValues = {func: 'max', change: 'change'} + const finalState = reducer(newState, updateRuleValues(ruleID, 'relative', newRelativeValues)) + expect(finalState[ruleID].trigger).to.equal('relative') + expect(finalState[ruleID].values).to.equal(newRelativeValues) + }) it('can update the message', () => { - const ruleID = 1; + const ruleID = 1 const initialState = { [ruleID]: { id: ruleID, queryID: 988, message: '', - } - }; + }, + } - const message = 'im a kapacitor rule message'; - const newState = reducer(initialState, updateMessage(ruleID, message)); - expect(newState[ruleID].message).to.equal(message); - }); + const message = 'im a kapacitor rule message' + const newState = reducer(initialState, updateMessage(ruleID, message)) + expect(newState[ruleID].message).to.equal(message) + }) it('can update the alerts', () => { - const ruleID = 1; + const ruleID = 1 const initialState = { [ruleID]: { id: ruleID, queryID: 988, alerts: [], - } - }; + }, + } - const alerts = ['slack']; - const newState = reducer(initialState, updateAlerts(ruleID, alerts)); - expect(newState[ruleID].alerts).to.equal(alerts); - }); + const alerts = ['slack'] + const newState = reducer(initialState, updateAlerts(ruleID, alerts)) + expect(newState[ruleID].alerts).to.equal(alerts) + }) it('can update an alerta alert', () => { - const ruleID = 1; + const ruleID = 1 const initialState = { [ruleID]: { id: ruleID, queryID: 988, alerts: [], alertNodes: [], - } - }; + }, + } const tickScript = `stream |alert() @@ -108,39 +108,39 @@ describe('Kapacitor.Reducers.rules', () => { .environment('Development') .group('Dev. Servers') .services('a b c') - `; + ` - let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript)); - const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`; - let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]); + let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript)) + const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')` + let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]) // Test both data structure and accessor string - expect(actualStr).to.equal(expectedStr); + expect(actualStr).to.equal(expectedStr) // Test that accessor string is the same if fed back in - newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr)); - actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]); - expect(actualStr).to.equal(expectedStr); - }); + newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr)) + actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]) + expect(actualStr).to.equal(expectedStr) + }) it('can update the name', () => { - const ruleID = 1; + const ruleID = 1 const name = 'New name' const initialState = { [ruleID]: { id: ruleID, queryID: 988, name: 'Random album title', - } - }; + }, + } - const newState = reducer(initialState, updateRuleName(ruleID, name)); - expect(newState[ruleID].name).to.equal(name); - }); + const newState = reducer(initialState, updateRuleName(ruleID, name)) + expect(newState[ruleID].name).to.equal(name) + }) it('it can delete a rule', () => { - const rule1 = 1; - const rule2 = 2; + const rule1 = 1 + const rule2 = 2 const initialState = { [rule1]: { id: rule1, @@ -148,43 +148,43 @@ describe('Kapacitor.Reducers.rules', () => { [rule2]: { id: rule2, }, - }; + } - expect(Object.keys(initialState).length).to.equal(2); - const newState = reducer(initialState, deleteRuleSuccess(rule2)); - expect(Object.keys(newState).length).to.equal(1); - expect(newState[rule1]).to.equal(initialState[rule1]); - }); + expect(Object.keys(initialState).length).to.equal(2) + const newState = reducer(initialState, deleteRuleSuccess(rule2)) + expect(Object.keys(newState).length).to.equal(1) + expect(newState[rule1]).to.equal(initialState[rule1]) + }) it('can update details', () => { - const ruleID = 1; - const details = 'im some rule details'; + const ruleID = 1 + const details = 'im some rule details' const initialState = { [ruleID]: { id: ruleID, queryID: 988, details: '', - } - }; + }, + } - const newState = reducer(initialState, updateDetails(ruleID, details)); - expect(newState[ruleID].details).to.equal(details); - }); + const newState = reducer(initialState, updateDetails(ruleID, details)) + expect(newState[ruleID].details).to.equal(details) + }) it('can update status', () => { - const ruleID = 1; - const status = 'enabled'; + const ruleID = 1 + const status = 'enabled' const initialState = { [ruleID]: { id: ruleID, queryID: 988, status: 'disabled', - } - }; + }, + } - const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status)); - expect(newState[ruleID].status).to.equal(status); - }); -}); + const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status)) + expect(newState[ruleID].status).to.equal(status) + }) +}) diff --git a/ui/spec/shared/parsing/diskBytesSpec.js b/ui/spec/shared/parsing/diskBytesSpec.js index 9b14dd0b8..39f3f5daa 100644 --- a/ui/spec/shared/parsing/diskBytesSpec.js +++ b/ui/spec/shared/parsing/diskBytesSpec.js @@ -1,71 +1,71 @@ -import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes'; +import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes' describe('diskBytesFromShard', () => { it('sums all the disk bytes in multiple series', () => { - const response = {"results":[ - {"series":[{"name":"shard","tags":{"clusterID":"6272208615254493595","database":"_internal","engine":"tsm1","hostname":"WattsInfluxDB","id":"1","nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[[1464811503000000000,100]]}]}, - {"series":[{"name":"shard","tags":{"clusterID":"6272208615254493595","database":"telegraf","engine":"tsm1","hostname":"WattsInfluxDB","id":"2","nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/telegraf/default/2","retentionPolicy":"default"},"columns":["time","last"],"values":[[1464811503000000000,200]]}]}, - ]}; + const response = {results: [ + {series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "_internal", engine: "tsm1", hostname: "WattsInfluxDB", id: "1", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [[1464811503000000000, 100]]}]}, + {series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "telegraf", engine: "tsm1", hostname: "WattsInfluxDB", id: "2", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/telegraf/default/2", retentionPolicy: "default"}, columns: ["time", "last"], values: [[1464811503000000000, 200]]}]}, + ]} - const result = diskBytesFromShard(response); - const expectedTotal = 300; + const result = diskBytesFromShard(response) + const expectedTotal = 300 - expect(result.errors).to.deep.equal([]); - expect(result.bytes).to.equal(expectedTotal); - }); + expect(result.errors).to.deep.equal([]) + expect(result.bytes).to.equal(expectedTotal) + }) it('returns emtpy with empty response', () => { - const response = {"results":[{}]}; + const response = {results: [{}]} - const result = diskBytesFromShard(response); + const result = diskBytesFromShard(response) - expect(result.errors).to.deep.equal([]); - expect(result.bytes).to.equal(0); - }); + expect(result.errors).to.deep.equal([]) + expect(result.bytes).to.equal(0) + }) it('exposes the server error', () => { - const response = {"results":[{"error":"internal server error?"}]}; + const response = {results: [{error: "internal server error?"}]} - const result = diskBytesFromShard(response); + const result = diskBytesFromShard(response) - expect(result.errors).to.deep.equal(['internal server error?']); - expect(result.bytes).to.equal(0); - }); -}); + expect(result.errors).to.deep.equal(['internal server error?']) + expect(result.bytes).to.equal(0) + }) +}) describe('diskBytesFromShardForDatabase', () => { it('return parses data as expected', () => { - const response = {"results":[{"series":[ - {"name":"shard","tags":{"nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",100]]}, - {"name":"shard","tags":{"nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/3","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",200]]}, - {"name":"shard","tags":{"nodeID":"localhost:8188","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",100]]}, - {"name":"shard","tags":{"nodeID":"localhost:8188","path":"/Users/watts/.influxdb/data/_internal/monitor/3","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",200]]}, - ]}]}; + const response = {results: [{series: [ + {name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]}, + {name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]}, + {name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]}, + {name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]}, + ]}]} - const result = diskBytesFromShardForDatabase(response); + const result = diskBytesFromShardForDatabase(response) const expected = { - 1: [{nodeID: 'localhost:8088', diskUsage: 100},{nodeID: 'localhost:8188', diskUsage: 100}], - 3: [{nodeID: 'localhost:8088', diskUsage: 200},{nodeID: 'localhost:8188', diskUsage: 200}], - }; + 1: [{nodeID: 'localhost:8088', diskUsage: 100}, {nodeID: 'localhost:8188', diskUsage: 100}], + 3: [{nodeID: 'localhost:8088', diskUsage: 200}, {nodeID: 'localhost:8188', diskUsage: 200}], + } - expect(result.shardData).to.deep.equal(expected); - }); + expect(result.shardData).to.deep.equal(expected) + }) it('returns emtpy with empty response', () => { - const response = {"results":[{}]}; + const response = {results: [{}]} - const result = diskBytesFromShardForDatabase(response); + const result = diskBytesFromShardForDatabase(response) - expect(result.errors).to.deep.equal([]); - expect(result.shardData).to.deep.equal({}); - }); + expect(result.errors).to.deep.equal([]) + expect(result.shardData).to.deep.equal({}) + }) it('exposes the server error', () => { - const response = {"results":[{"error":"internal server error?"}]}; + const response = {results: [{error: "internal server error?"}]} - const result = diskBytesFromShardForDatabase(response); + const result = diskBytesFromShardForDatabase(response) - expect(result.errors).to.deep.equal(['internal server error?']); - expect(result.shardData).to.deep.equal({}); - }); -}); + expect(result.errors).to.deep.equal(['internal server error?']) + expect(result.shardData).to.deep.equal({}) + }) +}) diff --git a/ui/spec/shared/parsing/getRangeForDygraphSpec.js b/ui/spec/shared/parsing/getRangeForDygraphSpec.js index 8c9ebe0a7..e4bcf0cb0 100644 --- a/ui/spec/shared/parsing/getRangeForDygraphSpec.js +++ b/ui/spec/shared/parsing/getRangeForDygraphSpec.js @@ -1,23 +1,23 @@ -import getRange from 'shared/parsing/getRangeForDygraph'; +import getRange from 'shared/parsing/getRangeForDygraph' describe('getRangeForDygraphSpec', () => { it('gets the range for one timeSeries', () => { - const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]]; + const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]] - const actual = getRange(timeSeries); - const expected = [1, 3]; + const actual = getRange(timeSeries) + const expected = [1, 3] - expect(actual).to.deep.equal(expected); - }); + expect(actual).to.deep.equal(expected) + }) it('does not get range when a range is provided', () => { - const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]]; + const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]] - const providedRange = [0, 4]; - const actual = getRange(timeSeries, providedRange); + const providedRange = [0, 4] + const actual = getRange(timeSeries, providedRange) - expect(actual).to.deep.equal(providedRange); - }); + expect(actual).to.deep.equal(providedRange) + }) it('gets the range for multiple timeSeries', () => { const timeSeries = [ @@ -25,63 +25,63 @@ describe('getRangeForDygraphSpec', () => { [new Date(1000), 100, 1], [new Date(2000), null, 2], [new Date(3000), 200, 3], - ]; + ] - const actual = getRange(timeSeries); - const expected = [1, 200]; + const actual = getRange(timeSeries) + const expected = [1, 200] - expect(actual).to.deep.equal(expected); - }); + expect(actual).to.deep.equal(expected) + }) it('returns a null array of two elements when min and max are equal', () => { - const timeSeries = [[new Date(1000), 1], [new Date(2000), 1], [new Date(3000), 1]]; - const actual = getRange(timeSeries); - const expected = [null, null]; + const timeSeries = [[new Date(1000), 1], [new Date(2000), 1], [new Date(3000), 1]] + const actual = getRange(timeSeries) + const expected = [null, null] - expect(actual).to.deep.equal(expected); - }); + expect(actual).to.deep.equal(expected) + }) describe('when user provides a rule value', () => { - const defaultMax = 20; - const defaultMin = -10; - const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]]; + const defaultMax = 20 + const defaultMin = -10 + const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]] it('can pad positive values', () => { - const value = 20; - const [min, max] = getRange(timeSeries, undefined, value); + const value = 20 + const [min, max] = getRange(timeSeries, undefined, value) - expect(min).to.equal(defaultMin); - expect(max).to.be.above(defaultMax); - }); + expect(min).to.equal(defaultMin) + expect(max).to.be.above(defaultMax) + }) it('can pad negative values', () => { - const value = -10; - const [min, max] = getRange(timeSeries, undefined, value); + const value = -10 + const [min, max] = getRange(timeSeries, undefined, value) - expect(min).to.be.below(defaultMin); - expect(max).to.equal(defaultMax); - }); - }); + expect(min).to.be.below(defaultMin) + expect(max).to.equal(defaultMax) + }) + }) describe('when user provides a rule range value', () => { - const defaultMax = 20; - const defaultMin = -10; - const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]]; + const defaultMax = 20 + const defaultMin = -10 + const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]] it('can pad positive values', () => { - const rangeValue = 20; - const [min, max] = getRange(timeSeries, undefined, 0, rangeValue); + const rangeValue = 20 + const [min, max] = getRange(timeSeries, undefined, 0, rangeValue) - expect(min).to.equal(defaultMin); - expect(max).to.be.above(defaultMax); - }); + expect(min).to.equal(defaultMin) + expect(max).to.be.above(defaultMax) + }) it('can pad negative values', () => { - const rangeValue = -10; - const [min, max] = getRange(timeSeries, undefined, 0, rangeValue); + const rangeValue = -10 + const [min, max] = getRange(timeSeries, undefined, 0, rangeValue) - expect(min).to.be.below(defaultMin); - expect(max).to.equal(defaultMax); - }); - }); -}); + expect(min).to.be.below(defaultMin) + expect(max).to.equal(defaultMax) + }) + }) +}) diff --git a/ui/spec/shared/parsing/parseAlertaSpec.js b/ui/spec/shared/parsing/parseAlertaSpec.js index 447a5622f..b04043f31 100644 --- a/ui/spec/shared/parsing/parseAlertaSpec.js +++ b/ui/spec/shared/parsing/parseAlertaSpec.js @@ -1,4 +1,4 @@ -import {parseAlerta} from 'src/shared/parsing/parseAlerta'; +import {parseAlerta} from 'src/shared/parsing/parseAlerta' it('can parse an alerta tick script', () => { const tickScript = `stream @@ -9,50 +9,50 @@ it('can parse an alerta tick script', () => { .environment('Development') .group('Dev. Servers') .services('a b c') - `; + ` - let actualObj = parseAlerta(tickScript); + let actualObj = parseAlerta(tickScript) const expectedObj = [ { - "name": "resource", - "args": [ - "Hostname or service" - ] + name: "resource", + args: [ + "Hostname or service", + ], }, { - "name": "event", - "args": [ - "Something went wrong" - ] + name: "event", + args: [ + "Something went wrong", + ], }, { - "name": "environment", - "args": [ - "Development" - ] + name: "environment", + args: [ + "Development", + ], }, { - "name": "group", - "args": [ - "Dev. Servers" - ] + name: "group", + args: [ + "Dev. Servers", + ], }, { - "name": "services", - "args": [ + name: "services", + args: [ "a", "b", - "c" - ] - } - ]; + "c", + ], + }, + ] // Test data structure - expect(actualObj).to.deep.equal(expectedObj); + expect(actualObj).to.deep.equal(expectedObj) // Test that data structure is the same if fed back in - const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`; - actualObj = parseAlerta(expectedStr); - expect(actualObj).to.deep.equal(expectedObj); -}); + const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')` + actualObj = parseAlerta(expectedStr) + expect(actualObj).to.deep.equal(expectedObj) +}) diff --git a/ui/spec/shared/parsing/showDatabasesSpec.js b/ui/spec/shared/parsing/showDatabasesSpec.js index fb9021253..f2eaf8078 100644 --- a/ui/spec/shared/parsing/showDatabasesSpec.js +++ b/ui/spec/shared/parsing/showDatabasesSpec.js @@ -1,32 +1,32 @@ -import showDatabases from 'shared/parsing/showDatabases'; +import showDatabases from 'shared/parsing/showDatabases' describe('showDatabases', () => { it('exposes all the database properties', () => { - const response = {"results":[{"series":[{"columns":["name"],"values":[["mydb1"], ["mydb2"]]}]}]}; + const response = {results: [{series: [{columns: ["name"], values: [["mydb1"], ["mydb2"]]}]}]} - const result = showDatabases(response); + const result = showDatabases(response) - expect(result.errors).to.deep.equal([]); - expect(result.databases.length).to.equal(2); - expect(result.databases[0]).to.equal('mydb1'); - expect(result.databases[1]).to.equal('mydb2'); - }); + expect(result.errors).to.deep.equal([]) + expect(result.databases.length).to.equal(2) + expect(result.databases[0]).to.equal('mydb1') + expect(result.databases[1]).to.equal('mydb2') + }) it('returns an empty array when there are no databases', () => { - const response = {"results":[{"series":[{"columns":["name"]}]}]}; + const response = {results: [{series: [{columns: ["name"]}]}]} - const result = showDatabases(response); + const result = showDatabases(response) - expect(result.errors).to.deep.equal([]); - expect(result.databases).to.deep.equal([]); - }); + expect(result.errors).to.deep.equal([]) + expect(result.databases).to.deep.equal([]) + }) it('exposes the server error', () => { - const response = {"results":[{"error":"internal server error?"}]}; + const response = {results: [{error: "internal server error?"}]} - const result = showDatabases(response); + const result = showDatabases(response) - expect(result.errors).to.deep.equal(['internal server error?']); - expect(result.databases).to.deep.equal([]); - }); -}); + expect(result.errors).to.deep.equal(['internal server error?']) + expect(result.databases).to.deep.equal([]) + }) +}) diff --git a/ui/spec/shared/parsing/showFieldKeysSpec.js b/ui/spec/shared/parsing/showFieldKeysSpec.js index 167ecb7cd..581ab9dd2 100644 --- a/ui/spec/shared/parsing/showFieldKeysSpec.js +++ b/ui/spec/shared/parsing/showFieldKeysSpec.js @@ -1,39 +1,39 @@ -import parseShowFieldKeys from 'shared/parsing/showFieldKeys'; +import parseShowFieldKeys from 'shared/parsing/showFieldKeys' describe('parseShowFieldKeys', () => { it('parses a single result', () => { - const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]}]}; + const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}]} - const result = parseShowFieldKeys(response); - expect(result.errors).to.eql([]); + const result = parseShowFieldKeys(response) + expect(result.errors).to.eql([]) expect(result.fieldSets).to.eql({ m1: ['f1', 'f2'], - }); - }); + }) + }) it('parses multiple results', () => { - const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]},{"series":[{"name":"m2","columns":["fieldKey"],"values":[["f3"],["f4"]]}]}]}; - const result = parseShowFieldKeys(response); - expect(result.errors).to.eql([]); + const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {series: [{name: "m2", columns: ["fieldKey"], values: [["f3"], ["f4"]]}]}]} + const result = parseShowFieldKeys(response) + expect(result.errors).to.eql([]) expect(result.fieldSets).to.eql({ m1: ['f1', 'f2'], m2: ['f3', 'f4'], - }); - }); + }) + }) it('parses multiple errors', () => { - const response = {"results":[{"error": "measurement not found: m1"}, {"error": "measurement not found: m2"}]}; - const result = parseShowFieldKeys(response); - expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2']); - expect(result.fieldSets).to.eql({}); - }); + const response = {results: [{error: "measurement not found: m1"}, {error: "measurement not found: m2"}]} + const result = parseShowFieldKeys(response) + expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2']) + expect(result.fieldSets).to.eql({}) + }) it('parses a mix of results and errors', () => { - const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]},{"error": "measurement not found: m2"}]}; - const result = parseShowFieldKeys(response); - expect(result.errors).to.eql(['measurement not found: m2']); + const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {error: "measurement not found: m2"}]} + const result = parseShowFieldKeys(response) + expect(result.errors).to.eql(['measurement not found: m2']) expect(result.fieldSets).to.eql({ m1: ['f1', 'f2'], - }); - }); -}); + }) + }) +}) diff --git a/ui/spec/shared/parsing/showQueriesSpec.js b/ui/spec/shared/parsing/showQueriesSpec.js index 04452959b..44522df8e 100644 --- a/ui/spec/shared/parsing/showQueriesSpec.js +++ b/ui/spec/shared/parsing/showQueriesSpec.js @@ -1,34 +1,34 @@ -import showQueriesParser from 'shared/parsing/showQueries'; +import showQueriesParser from 'shared/parsing/showQueries' describe('showQueriesParser', () => { it('exposes all currently running queries', () => { - const response = {"results":[{"series":[{"columns":["qid","query","database","duration"],"values":[[1,"SHOW QUERIES","db1","1s"], [2,"SELECT foo FROM bar","db1","2s"]]}]}]}; + const response = {results: [{series: [{columns: ["qid", "query", "database", "duration"], values: [[1, "SHOW QUERIES", "db1", "1s"], [2, "SELECT foo FROM bar", "db1", "2s"]]}]}]} - const result = showQueriesParser(response); + const result = showQueriesParser(response) - expect(result.errors).to.eql([]); - expect(result.queries.length).to.equal(2); + expect(result.errors).to.eql([]) + expect(result.queries.length).to.equal(2) expect(result.queries[0]).to.eql({ id: 1, database: 'db1', query: 'SHOW QUERIES', duration: '1s', - }); + }) expect(result.queries[1]).to.eql({ id: 2, database: 'db1', query: 'SELECT foo FROM bar', duration: '2s', - }); + }) expect({foo: 'bar'}).to.eql({foo: 'bar'}) - }); + }) it('exposes the server error', () => { - const response = {"results":[{"error":"internal server error?"}]}; + const response = {results: [{error: "internal server error?"}]} - const result = showQueriesParser(response); + const result = showQueriesParser(response) - expect(result.errors).to.eql(['internal server error?']); - expect(result.queries).to.eql([]); - }); -}); + expect(result.errors).to.eql(['internal server error?']) + expect(result.queries).to.eql([]) + }) +}) diff --git a/ui/spec/shared/parsing/showTagKeysSpec.js b/ui/spec/shared/parsing/showTagKeysSpec.js index ad8127e21..07dcf8806 100644 --- a/ui/spec/shared/parsing/showTagKeysSpec.js +++ b/ui/spec/shared/parsing/showTagKeysSpec.js @@ -1,27 +1,27 @@ -import parseShowTagKeys from 'shared/parsing/showTagKeys'; +import parseShowTagKeys from 'shared/parsing/showTagKeys' describe('parseShowTagKeys', () => { it('parses the tag keys', () => { - const response = {"results":[{"series":[{"name":"cpu","columns":["tagKey"],"values":[["cpu"],["host"]]}]}]}; + const response = {results: [{series: [{name: "cpu", columns: ["tagKey"], values: [["cpu"], ["host"]]}]}]} - const result = parseShowTagKeys(response); - expect(result.errors).to.eql([]); - expect(result.tagKeys).to.eql(['cpu', 'host']); - }); + const result = parseShowTagKeys(response) + expect(result.errors).to.eql([]) + expect(result.tagKeys).to.eql(['cpu', 'host']) + }) it('handles empty results', () => { - const response = {"results":[{}]}; + const response = {results: [{}]} - const result = parseShowTagKeys(response); - expect(result.errors).to.eql([]); - expect(result.tagKeys).to.eql([]); - }); + const result = parseShowTagKeys(response) + expect(result.errors).to.eql([]) + expect(result.tagKeys).to.eql([]) + }) it('handles errors', () => { - const response = {"results":[{"error": "influxdb error"}]}; + const response = {results: [{error: "influxdb error"}]} - const result = parseShowTagKeys(response); - expect(result.errors).to.eql([response.results[0].error]); - expect(result.tagKeys).to.eql([]); - }); -}); + const result = parseShowTagKeys(response) + expect(result.errors).to.eql([response.results[0].error]) + expect(result.tagKeys).to.eql([]) + }) +}) diff --git a/ui/spec/shared/parsing/showTagValuesSpec.js b/ui/spec/shared/parsing/showTagValuesSpec.js index 33b2d02a1..bf79868eb 100644 --- a/ui/spec/shared/parsing/showTagValuesSpec.js +++ b/ui/spec/shared/parsing/showTagValuesSpec.js @@ -1,38 +1,38 @@ -import showTagValuesParser from 'shared/parsing/showTagValues'; +import showTagValuesParser from 'shared/parsing/showTagValues' describe('showTagValuesParser', () => { it('handles an empty result set', () => { - const response = {"results":[{}]}; + const response = {results: [{}]} - const result = showTagValuesParser(response); + const result = showTagValuesParser(response) - expect(result.errors).to.eql([]); - expect(result.tags).to.eql({}); - }); + expect(result.errors).to.eql([]) + expect(result.tags).to.eql({}) + }) it('returns a an object of tag keys mapped to their values', () => { const response = { - "results": [ + results: [ { - "series": [ + series: [ { - "name": "measurementA", - "columns": ["key","value"], - "values": [ + name: "measurementA", + columns: ["key", "value"], + values: [ ["host", "hostA"], ["host", "hostB"], ["cpu", "cpu0"], ["cpu", "cpu1"], - ] - } - ] - } - ] - }; + ], + }, + ], + }, + ], + } - const result = showTagValuesParser(response); + const result = showTagValuesParser(response) - expect(result.errors).to.eql([]); + expect(result.errors).to.eql([]) expect(result.tags).to.eql({ host: [ 'hostA', @@ -42,6 +42,6 @@ describe('showTagValuesParser', () => { 'cpu0', 'cpu1', ], - }); - }); -}); + }) + }) +}) diff --git a/ui/spec/shared/presenters/presentersSpec.js b/ui/spec/shared/presenters/presentersSpec.js index a754a81d7..3326e9e1b 100644 --- a/ui/spec/shared/presenters/presentersSpec.js +++ b/ui/spec/shared/presenters/presentersSpec.js @@ -1,7 +1,7 @@ import { buildRoles, buildClusterAccounts, -} from 'src/shared/presenters'; +} from 'src/shared/presenters' describe('Presenters', function() { describe('roles utils', function() { @@ -17,13 +17,13 @@ describe('Presenters', function() { ], }, }, - ]; + ] - const actual = buildRoles(roles); + const actual = buildRoles(roles) - expect(actual[0].users).to.eql([]); - }); - }); + expect(actual[0].users).to.eql([]) + }) + }) describe('when a role has no permissions', function() { it('set\'s a roles permission as an empty array', function() { @@ -35,47 +35,47 @@ describe('Presenters', function() { "will@influxdb.com", ], }, - ]; + ] - const actual = buildRoles(roles); + const actual = buildRoles(roles) - expect(actual[0].permissions).to.eql([]); - }); - }); + expect(actual[0].permissions).to.eql([]) + }) + }) describe('when a role has users and permissions', function() { beforeEach(function() { const roles = [ { - "name": "Marketing", - "permissions": { + name: "Marketing", + permissions: { "": [ "ViewAdmin", ], - "db1": [ - "ReadData" + db1: [ + "ReadData", ], - "db2": [ + db2: [ "ReadData", "AddRemoveNode", ], }, - "users": [ + users: [ "roley@influxdb.com", - "will@influxdb.com" - ] + "will@influxdb.com", + ], }, - ]; + ] - this.roles = buildRoles(roles); - }); + this.roles = buildRoles(roles) + }) it('each role has a name and a list of users (if they exist)', function() { - const role = this.roles[0]; - expect(role.name).to.equal('Marketing'); - expect(role.users).to.contain("roley@influxdb.com"); - expect(role.users).to.contain("will@influxdb.com"); - }); + const role = this.roles[0] + expect(role.name).to.equal('Marketing') + expect(role.users).to.contain("roley@influxdb.com") + expect(role.users).to.contain("will@influxdb.com") + }) it('transforms permissions into a list of objects and each permission has a list of resources', function() { expect(this.roles[0].permissions).to.eql([ @@ -97,11 +97,11 @@ describe('Presenters', function() { description: 'Can add/remove nodes from a cluster', resources: ['db2'], }, - ]); - }); - }); - }); - }); + ]) + }) + }) + }) + }) describe('cluster utils', function() { describe('buildClusterAccounts', function() { @@ -109,50 +109,50 @@ describe('Presenters', function() { it('adds role information to each cluster account and parses permissions', function() { const users = [ { - "name":"jon@example.com", - "hash":"xxxxx", - "permissions": { + name: "jon@example.com", + hash: "xxxxx", + permissions: { "": [ "ViewAdmin", ], - "db1": [ + db1: [ "ReadData", ], - } + }, }, { - "name":"ned@example.com", - "hash":"xxxxx" - } - ]; + name: "ned@example.com", + hash: "xxxxx", + }, + ] const roles = [ { - "name":"Admin", - "permissions":{ - "db2": [ + name: "Admin", + permissions: { + db2: [ "ViewAdmin", - ] + ], }, - "users":[ + users: [ "jon@example.com", "ned@example.com", - ] + ], }, { - "name":"Marketing", - "permissions": { - "db3": [ + name: "Marketing", + permissions: { + db3: [ "ReadData", ], }, - "users": [ + users: [ "jon@example.com", - ] - } + ], + }, ] - const actual = buildClusterAccounts(users, roles); + const actual = buildClusterAccounts(users, roles) const expected = [ { @@ -183,7 +183,7 @@ describe('Presenters', function() { resources: ['db2'], }, ], - users:[ + users: [ "jon@example.com", "ned@example.com", ], @@ -198,10 +198,10 @@ describe('Presenters', function() { resources: ['db3'], }, ], - users:[ + users: [ "jon@example.com", - ] - } + ], + }, ], }, { @@ -219,38 +219,38 @@ describe('Presenters', function() { resources: ['db2'], }, ], - users:[ + users: [ "jon@example.com", "ned@example.com", ], }, ], - } - ]; + }, + ] - expect(actual).to.eql(expected); - }); + expect(actual).to.eql(expected) + }) it('can handle empty results for users and roles', function() { - const users = undefined; - const roles = undefined; + const users = undefined + const roles = undefined - const actual = buildClusterAccounts(users, roles); + const actual = buildClusterAccounts(users, roles) - expect(actual).to.eql([]); - }); + expect(actual).to.eql([]) + }) it('sets roles to an empty array if a user has no roles', function() { const users = [{ name: "ned@example.com", hash: "xxxxx", - }]; - const roles = []; + }] + const roles = [] - const actual = buildClusterAccounts(users, roles); + const actual = buildClusterAccounts(users, roles) - expect(actual[0].roles).to.eql([]); - }); - }); - }); -}); + expect(actual[0].roles).to.eql([]) + }) + }) + }) +}) diff --git a/ui/spec/shared/reducers/appSpec.js b/ui/spec/shared/reducers/appSpec.js index e43ee650a..5d1190c77 100644 --- a/ui/spec/shared/reducers/appSpec.js +++ b/ui/spec/shared/reducers/appSpec.js @@ -12,29 +12,29 @@ describe('Shared.Reducers.appReducer', () => { inPresentationMode: false, }, persisted: { - autoRefresh: 0 + autoRefresh: 0, }, } it('should handle ENABLE_PRESENTATION_MODE', () => { - const reducedState = appReducer(initialState, enablePresentationMode()); + const reducedState = appReducer(initialState, enablePresentationMode()) - expect(reducedState.ephemeral.inPresentationMode).to.equal(true); + expect(reducedState.ephemeral.inPresentationMode).to.equal(true) }) it('should handle DISABLE_PRESENTATION_MODE', () => { Object.assign(initialState, {ephemeral: {inPresentationMode: true}}) - const reducedState = appReducer(initialState, disablePresentationMode()); + const reducedState = appReducer(initialState, disablePresentationMode()) - expect(reducedState.ephemeral.inPresentationMode).to.equal(false); + expect(reducedState.ephemeral.inPresentationMode).to.equal(false) }) it('should handle SET_AUTOREFRESH', () => { const expectedMs = 15000 - const reducedState = appReducer(initialState, setAutoRefresh(expectedMs)); - - expect(reducedState.persisted.autoRefresh).to.equal(expectedMs); + const reducedState = appReducer(initialState, setAutoRefresh(expectedMs)) + + expect(reducedState.persisted.autoRefresh).to.equal(expectedMs) }) }) diff --git a/ui/spec/shared/reducers/sourcesSpec.js b/ui/spec/shared/reducers/sourcesSpec.js index 6228b8907..59f95849d 100644 --- a/ui/spec/shared/reducers/sourcesSpec.js +++ b/ui/spec/shared/reducers/sourcesSpec.js @@ -1,47 +1,47 @@ -import reducer from 'src/shared/reducers/sources'; +import reducer from 'src/shared/reducers/sources' import { loadSources, updateSource, addSource, -} from 'src/shared/actions/sources'; +} from 'src/shared/actions/sources' describe('Shared.Reducers.sources', () => { it('can correctly show default sources when adding a source', () => { - let state = []; + let state = [] state = reducer(state, addSource({ id: '1', - default: true, - })); + "default": true, + })) state = reducer(state, addSource({ id: '2', - default: true, - })); + "default": true, + })) - expect(state.filter((s) => s.default).length).to.equal(1); - }); + expect(state.filter((s) => s.default).length).to.equal(1) + }) it('can correctly show default sources when updating a source', () => { - let state = []; + let state = [] state = reducer(state, addSource({ id: '1', - default: true, - })); + "default": true, + })) state = reducer(state, addSource({ id: '2', - default: true, - })); + "default": true, + })) state = reducer(state, updateSource({ id: '1', - default: true, - })); + "default": true, + })) - expect(state.find(({id}) => id === '1').default).to.equal(true); - expect(state.find(({id}) => id === '2').default).to.equal(false); - }); -}); + expect(state.find(({id}) => id === '1').default).to.equal(true) + expect(state.find(({id}) => id === '2').default).to.equal(false) + }) +}) diff --git a/ui/spec/spec-helper.js b/ui/spec/spec-helper.js index 494ee1494..6a3638a0f 100644 --- a/ui/spec/spec-helper.js +++ b/ui/spec/spec-helper.js @@ -1,13 +1,13 @@ window.then = function(cb, done) { window.setTimeout(function() { - cb(); + cb() if (typeof done === 'function') { - done(); + done() } - }, 0); -}; + }, 0) +} -var chai = require('chai'); -chai.use(require('sinon-chai')); +const chai = require('chai') +chai.use(require('sinon-chai')) -global.expect = chai.expect; +global.expect = chai.expect diff --git a/ui/spec/utils/formattingSpec.js b/ui/spec/utils/formattingSpec.js index 97f8d0ba5..d3af80d1d 100644 --- a/ui/spec/utils/formattingSpec.js +++ b/ui/spec/utils/formattingSpec.js @@ -1,39 +1,39 @@ -import {formatBytes, formatRPDuration} from 'utils/formatting'; +import {formatBytes, formatRPDuration} from 'utils/formatting' describe('Formatting helpers', () => { describe('formatBytes', () => { it('returns null when passed a falsey value', () => { - const actual = formatBytes(null); + const actual = formatBytes(null) - expect(actual).to.equal(null); - }); + expect(actual).to.equal(null) + }) it('returns the correct value when passed 0', () => { - const actual = formatBytes(0); + const actual = formatBytes(0) - expect(actual).to.equal('0 Bytes'); - }); + expect(actual).to.equal('0 Bytes') + }) it('converts a raw byte value into it\'s most appropriate unit', () => { - expect(formatBytes(1000)).to.equal('1 KB'); - expect(formatBytes(1000000)).to.equal('1 MB'); - expect(formatBytes(1000000000)).to.equal('1 GB'); - }); - }); + expect(formatBytes(1000)).to.equal('1 KB') + expect(formatBytes(1000000)).to.equal('1 MB') + expect(formatBytes(1000000000)).to.equal('1 GB') + }) + }) describe('formatRPDuration', () => { it("returns 'infinite' for a retention policy with a value of '0'", () => { const actual = formatRPDuration('0') - expect(actual).to.equal('∞'); - }); + expect(actual).to.equal('∞') + }) it('correctly formats retention policy durations', () => { - expect(formatRPDuration('24h0m0s')).to.equal('24h'); + expect(formatRPDuration('24h0m0s')).to.equal('24h') - expect(formatRPDuration('168h0m0s')).to.equal('7d'); + expect(formatRPDuration('168h0m0s')).to.equal('7d') - expect(formatRPDuration('200h32m3s')).to.equal('8d8h32m3s'); - }); - }); -}); + expect(formatRPDuration('200h32m3s')).to.equal('8d8h32m3s') + }) + }) +}) diff --git a/ui/spec/utils/timeSeriesToDygraphSpec.js b/ui/spec/utils/timeSeriesToDygraphSpec.js index c80cf24bf..5dfb04e53 100644 --- a/ui/spec/utils/timeSeriesToDygraphSpec.js +++ b/ui/spec/utils/timeSeriesToDygraphSpec.js @@ -1,39 +1,36 @@ -import timeSeriesToDygraph from 'src/utils/timeSeriesToDygraph'; -import {STROKE_WIDTH} from 'src/shared/constants'; - -const {light: strokeWidth} = STROKE_WIDTH; +import timeSeriesToDygraph from 'src/utils/timeSeriesToDygraph' describe('timeSeriesToDygraph', () => { it('parses a raw InfluxDB response into a dygraph friendly data format', () => { const influxResponse = [ { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], + name: "m1", + columns: ["time", "f1"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f2"], - "values": [[2000, 3],[4000, 4]], + name: "m1", + columns: ["time", "f2"], + values: [[2000, 3], [4000, 4]], }, - ] + ], }, ], }, - } - ]; + }, + ] - const actual = timeSeriesToDygraph(influxResponse); + const actual = timeSeriesToDygraph(influxResponse) const expected = { labels: [ @@ -49,40 +46,38 @@ describe('timeSeriesToDygraph', () => { dygraphSeries: { 'm1.f1': { axis: 'y', - strokeWidth, }, 'm1.f2': { axis: 'y', - strokeWidth, }, }, - }; + } - expect(actual).to.deep.equal(expected); - }); + expect(actual).to.deep.equal(expected) + }) it('can sort numerical timestamps correctly', () => { const influxResponse = [ { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f1"], - "values": [[100, 1],[3000, 3],[200, 2]], + name: "m1", + columns: ["time", "f1"], + values: [[100, 1], [3000, 3], [200, 2]], }, - ] + ], }, ], }, - } - ]; + }, + ] - const actual = timeSeriesToDygraph(influxResponse); + const actual = timeSeriesToDygraph(influxResponse) const expected = { labels: [ @@ -94,113 +89,110 @@ describe('timeSeriesToDygraph', () => { [new Date(200), 2], [new Date(3000), 3], ], - }; + } - expect(actual.timeSeries).to.deep.equal(expected.timeSeries); - }); + expect(actual.timeSeries).to.deep.equal(expected.timeSeries) + }) it('can parse multiple responses into two axes', () => { const influxResponse = [ { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], + name: "m1", + columns: ["time", "f1"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f2"], - "values": [[2000, 3],[4000, 4]], + name: "m1", + columns: ["time", "f2"], + values: [[2000, 3], [4000, 4]], }, - ] + ], }, ], }, }, { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m3", - "columns": ["time","f3"], - "values": [[1000, 1],[2000, 2]], + name: "m3", + columns: ["time", "f3"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, ], }, }, - ]; + ] - const actual = timeSeriesToDygraph(influxResponse); + const actual = timeSeriesToDygraph(influxResponse) const expected = { - 'm1.f1': { - axis: 'y', - strokeWidth, - }, - 'm1.f2': { - axis: 'y', - strokeWidth, - }, - 'm3.f3': { - axis: 'y2', - strokeWidth, - }, - }; + 'm1.f1': { + axis: 'y', + }, + 'm1.f2': { + axis: 'y', + }, + 'm3.f3': { + axis: 'y2', + }, + } - expect(actual.dygraphSeries).to.deep.equal(expected); - }); + expect(actual.dygraphSeries).to.deep.equal(expected) + }) it('can parse multiple responses with the same field and measurement', () => { const influxResponse = [ { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], + name: "m1", + columns: ["time", "f1"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, ], }, }, { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f1"], - "values": [[2000, 3],[4000, 4]], + name: "m1", + columns: ["time", "f1"], + values: [[2000, 3], [4000, 4]], }, - ] + ], }, ], }, }, - ]; + ] - const actual = timeSeriesToDygraph(influxResponse); + const actual = timeSeriesToDygraph(influxResponse) const expected = { labels: [ @@ -214,162 +206,107 @@ describe('timeSeriesToDygraph', () => { [new Date(4000), null, 4], ], dygraphSeries: { - // 'm1.f1': { - // axis: 'y', - // strokeWidth, - // }, 'm1.f1': { axis: 'y2', - strokeWidth, }, }, - }; + } - expect(actual).to.deep.equal(expected); - }); + expect(actual).to.deep.equal(expected) + }) it('it does not use multiple axes if being used for the DataExplorer', () => { const influxResponse = [ { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], + name: "m1", + columns: ["time", "f1"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, ], }, }, { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"m1", - "columns": ["time","f2"], - "values": [[2000, 3],[4000, 4]], + name: "m1", + columns: ["time", "f2"], + values: [[2000, 3], [4000, 4]], }, - ] + ], }, ], }, }, - ]; + ] - const isInDataExplorer = true; - const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer); + const isInDataExplorer = true + const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer) - const expected = { - 'm1.f1': { - strokeWidth, - }, - 'm1.f2': { - strokeWidth, - }, - }; + const expected = {} - expect(actual.dygraphSeries).to.deep.equal(expected); - }); - - it('it highlights the appropriate response', () => { - const influxResponse = [ - { - "response": - { - "results": [ - { - "series": [ - { - "name":"m1", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], - }, - ] - }, - ], - }, - }, - { - "response": - { - "results": [ - { - "series": [ - { - "name":"m2", - "columns": ["time","f2"], - "values": [[2000, 3],[4000, 4]], - }, - ] - }, - ], - }, - }, - ]; - - const highlightIndex = 1; - const actual = timeSeriesToDygraph(influxResponse, highlightIndex); - const {dygraphSeries} = actual; - - expect(dygraphSeries["m2.f2"].strokeWidth).to.be.above(dygraphSeries["m1.f1"].strokeWidth); - }); + expect(actual.dygraphSeries).to.deep.equal(expected) + }) it('parses a raw InfluxDB response into a dygraph friendly data format', () => { const influxResponse = [ { - "response": + response: { - "results": [ + results: [ { - "series": [ + series: [ { - "name":"mb", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], + name: "mb", + columns: ["time", "f1"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, { - "series": [ + series: [ { - "name":"ma", - "columns": ["time","f1"], - "values": [[1000, 1],[2000, 2]], + name: "ma", + columns: ["time", "f1"], + values: [[1000, 1], [2000, 2]], }, - ] + ], }, { - "series": [ + series: [ { - "name":"mc", - "columns": ["time","f2"], - "values": [[2000, 3],[4000, 4]], + name: "mc", + columns: ["time", "f2"], + values: [[2000, 3], [4000, 4]], }, - ] + ], }, { - "series": [ + series: [ { - "name":"mc", - "columns": ["time","f1"], - "values": [[2000, 3],[4000, 4]], + name: "mc", + columns: ["time", "f1"], + values: [[2000, 3], [4000, 4]], }, - ] + ], }, ], }, - } - ]; + }, + ] - const actual = timeSeriesToDygraph(influxResponse); + const actual = timeSeriesToDygraph(influxResponse) const expected = [ 'time', @@ -377,8 +314,8 @@ describe('timeSeriesToDygraph', () => { `mb.f1`, `mc.f1`, `mc.f2`, - ]; + ] - expect(actual.labels).to.deep.equal(expected); - }); -}); + expect(actual.labels).to.deep.equal(expected) + }) +}) diff --git a/ui/src/admin/actions/index.js b/ui/src/admin/actions/index.js index f69705d4b..f9191afb3 100644 --- a/ui/src/admin/actions/index.js +++ b/ui/src/admin/actions/index.js @@ -364,26 +364,36 @@ export const updateRolePermissionsAsync = (role, permissions) => async (dispatch dispatch(publishAutoDismissingNotification('success', 'Role permissions updated')) dispatch(syncRole(role, data)) } catch (error) { - dispatch(publishNotification('error', `Failed to updated role: ${error.data.message}`)) + dispatch(publishNotification('error', `Failed to update role: ${error.data.message}`)) } } export const updateUserPermissionsAsync = (user, permissions) => async (dispatch) => { try { - const {data} = await updateUserAJAX(user.links.self, user.roles, permissions) + const {data} = await updateUserAJAX(user.links.self, {permissions}) dispatch(publishAutoDismissingNotification('success', 'User permissions updated')) dispatch(syncUser(user, data)) } catch (error) { - dispatch(publishNotification('error', `Failed to updated user: ${error.data.message}`)) + dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`)) } } export const updateUserRolesAsync = (user, roles) => async (dispatch) => { try { - const {data} = await updateUserAJAX(user.links.self, roles, user.permissions) + const {data} = await updateUserAJAX(user.links.self, {roles}) dispatch(publishAutoDismissingNotification('success', 'User roles updated')) dispatch(syncUser(user, data)) } catch (error) { - dispatch(publishNotification('error', `Failed to updated user: ${error.data.message}`)) + dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`)) + } +} + +export const updateUserPasswordAsync = (user, password) => async (dispatch) => { + try { + const {data} = await updateUserAJAX(user.links.self, {password}) + dispatch(publishAutoDismissingNotification('success', 'User password updated')) + dispatch(syncUser(user, data)) + } catch (error) { + dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`)) } } diff --git a/ui/src/admin/apis/index.js b/ui/src/admin/apis/index.js index 7ef4ab11e..4e450fe4a 100644 --- a/ui/src/admin/apis/index.js +++ b/ui/src/admin/apis/index.js @@ -159,15 +159,12 @@ export const updateRole = async (url, users, permissions) => { } } -export const updateUser = async (url, roles, permissions) => { +export const updateUser = async (url, updates) => { try { return await AJAX({ method: 'PATCH', url, - data: { - roles, - permissions, - }, + data: updates, }) } catch (error) { console.error(error) diff --git a/ui/src/admin/components/AdminTabs.js b/ui/src/admin/components/AdminTabs.js index 7c7e898dd..bb6c82860 100644 --- a/ui/src/admin/components/AdminTabs.js +++ b/ui/src/admin/components/AdminTabs.js @@ -28,6 +28,7 @@ const AdminTabs = ({ onUpdateRolePermissions, onUpdateUserRoles, onUpdateUserPermissions, + onUpdateUserPassword, }) => { let tabs = [ { @@ -51,6 +52,7 @@ const AdminTabs = ({ onFilter={onFilterUsers} onUpdatePermissions={onUpdateUserPermissions} onUpdateRoles={onUpdateUserRoles} + onUpdatePassword={onUpdateUserPassword} /> ), }, @@ -135,6 +137,7 @@ AdminTabs.propTypes = { hasRoles: bool.isRequired, onUpdateUserPermissions: func, onUpdateUserRoles: func, + onUpdateUserPassword: func, } export default AdminTabs diff --git a/ui/src/admin/components/ChangePassRow.js b/ui/src/admin/components/ChangePassRow.js new file mode 100644 index 000000000..c5f177b0d --- /dev/null +++ b/ui/src/admin/components/ChangePassRow.js @@ -0,0 +1,94 @@ +import React, {Component, PropTypes} from 'react' + +import OnClickOutside from 'shared/components/OnClickOutside' +import ConfirmButtons from 'src/shared/components/ConfirmButtons' + +class ChangePassRow extends Component { + constructor(props) { + super(props) + this.state = { + showForm: false, + } + this.showForm = ::this.showForm + this.handleCancel = ::this.handleCancel + this.handleKeyPress = ::this.handleKeyPress + this.handleEdit = ::this.handleEdit + this.handleSubmit = ::this.handleSubmit + } + + showForm() { + this.setState({showForm: true}) + } + + handleCancel() { + this.setState({showForm: false}) + } + + handleClickOutside() { + this.setState({showForm: false}) + } + + handleSubmit(user) { + this.props.onApply(user) + this.setState({showForm: false}) + } + + handleKeyPress(user) { + return (e) => { + if (e.key === 'Enter') { + this.handleSubmit(user) + } + } + } + + handleEdit(user) { + return (e) => { + this.props.onEdit(user, {[e.target.name]: e.target.value}) + } + } + + render() { + const {user} = this.props + + if (this.state.showForm) { + return ( +
Have alerts sent to HipChat.
+Send alert messages to HipChat.