Merge branch 'master' into some-amount-of-polish

pull/1124/head
Alex P 2017-03-31 09:10:36 -07:00
commit a4d3056402
64 changed files with 2431 additions and 1027 deletions

View File

@ -2,12 +2,22 @@
### Bug Fixes
1. [#1104](https://github.com/influxdata/chronograf/pull/1104): Fix windows hosts on host list
1. [#1125](https://github.com/influxdata/chronograf/pull/1125): Fix visualizations not showing graph name
1. [#1133](https://github.com/influxdata/chronograf/issue/1133): Fix Enterprise Kapacitor authentication.
1. [#1142](https://github.com/influxdata/chronograf/issue/1142): Fix Kapacitor Telegram config to display correct disableNotification setting
### Features
1. [#1112](https://github.com/influxdata/chronograf/pull/1112): Add ability to delete a dashboard
1. [#1120](https://github.com/influxdata/chronograf/pull/1120): Allow users to update user passwords.
1. [#1129](https://github.com/influxdata/chronograf/pull/1129): Allow InfluxDB and Kapacitor configuration via ENV vars or CLI options
1. [#1130](https://github.com/influxdata/chronograf/pull/1130): Add loading spinner to Alert History page.
### UI Improvements
1. [#1101](https://github.com/influxdata/chronograf/pull/1101): Compress InfluxQL responses with gzip
1. [#1132](https://github.com/influxdata/chronograf/pull/1132): All sidebar items show activity with a blue strip
1. [#1135](https://github.com/influxdata/chronograf/pull/1135): Clarify Kapacitor Alert configuration for Telegram
1. [#1137](https://github.com/influxdata/chronograf/pull/1137): Clarify Kapacitor Alert configuration for HipChat
1. [#1079](https://github.com/influxdata/chronograf/issues/1079): Remove series highlighting in line graphs
## v1.2.0-beta7 [2017-03-28]
### Bug Fixes

View File

@ -891,6 +891,7 @@
* rimraf 2.5.3 [ISC](http://github.com/isaacs/rimraf)
* rimraf 2.5.4 [ISC](http://github.com/isaacs/rimraf)
* ripemd160 0.2.0 [Unknown](https://github.com/cryptocoinjs/ripemd160)
* rome 2.1.22 [MIT](https://github.com/bevacqua/rome)
* run-async 0.1.0 [MIT](http://github.com/SBoudrias/run-async)
* rx-lite 3.1.2 [Apache License](https://github.com/Reactive-Extensions/RxJS)
* samsam 1.1.2 [BSD](https://github.com/busterjs/samsam)

View File

@ -137,7 +137,7 @@ type Response interface {
// Source is connection information to a time-series data store.
type Source struct {
ID int `json:"id,omitempty,string"` // ID is the unique ID of the source
ID int `json:"id,string"` // ID is the unique ID of the source
Name string `json:"name"` // Name is the user-defined name for the source
Type string `json:"type,omitempty"` // Type specifies which kinds of source (enterprise vs oss)
Username string `json:"username,omitempty"` // Username is the username to connect to the source

144
memdb/kapacitors.go Normal file
View File

@ -0,0 +1,144 @@
package memdb
import (
"context"
"fmt"
"github.com/influxdata/chronograf"
)
// Ensure KapacitorStore and MultiKapacitorStore implements chronograf.ServersStore.
var _ chronograf.ServersStore = &KapacitorStore{}
var _ chronograf.ServersStore = &MultiKapacitorStore{}
// KapacitorStore implements the chronograf.ServersStore interface, and keeps
// an in-memory Kapacitor according to startup configuration
type KapacitorStore struct {
Kapacitor *chronograf.Server
}
// All will return a slice containing a configured source
func (store *KapacitorStore) All(ctx context.Context) ([]chronograf.Server, error) {
if store.Kapacitor != nil {
return []chronograf.Server{*store.Kapacitor}, nil
}
return nil, nil
}
// Add does not have any effect
func (store *KapacitorStore) Add(ctx context.Context, kap chronograf.Server) (chronograf.Server, error) {
return chronograf.Server{}, fmt.Errorf("In-memory KapacitorStore does not support adding a Kapacitor")
}
// Delete removes the in-memory configured Kapacitor if its ID matches what's provided
func (store *KapacitorStore) Delete(ctx context.Context, kap chronograf.Server) error {
if store.Kapacitor == nil || store.Kapacitor.ID != kap.ID {
return fmt.Errorf("Unable to find Kapacitor with id %d", kap.ID)
}
store.Kapacitor = nil
return nil
}
// Get returns the in-memory Kapacitor if its ID matches what's provided
func (store *KapacitorStore) Get(ctx context.Context, id int) (chronograf.Server, error) {
if store.Kapacitor == nil || store.Kapacitor.ID != id {
return chronograf.Server{}, fmt.Errorf("Unable to find Kapacitor with id %d", id)
}
return *store.Kapacitor, nil
}
// Update overwrites the in-memory configured Kapacitor if its ID matches what's provided
func (store *KapacitorStore) Update(ctx context.Context, kap chronograf.Server) error {
if store.Kapacitor == nil || store.Kapacitor.ID != kap.ID {
return fmt.Errorf("Unable to find Kapacitor with id %d", kap.ID)
}
store.Kapacitor = &kap
return nil
}
// MultiKapacitorStore implements the chronograf.ServersStore interface, and
// delegates to all contained KapacitorStores
type MultiKapacitorStore struct {
Stores []chronograf.ServersStore
}
// All concatenates the Kapacitors of all contained Stores
func (multi *MultiKapacitorStore) All(ctx context.Context) ([]chronograf.Server, error) {
all := []chronograf.Server{}
kapSet := map[int]struct{}{}
ok := false
var err error
for _, store := range multi.Stores {
var kaps []chronograf.Server
kaps, err = store.All(ctx)
if err != nil {
// If this Store is unable to return an array of kapacitors, skip to the
// next Store.
continue
}
ok = true // We've received a response from at least one Store
for _, kap := range kaps {
// Enforce that the kapacitor has a unique ID
// If the ID has been seen before, ignore the kapacitor
if _, okay := kapSet[kap.ID]; !okay { // We have a new kapacitor
kapSet[kap.ID] = struct{}{} // We just care that the ID is unique
all = append(all, kap)
}
}
}
if !ok {
return nil, err
}
return all, nil
}
// Add the kap to the first responsive Store
func (multi *MultiKapacitorStore) Add(ctx context.Context, kap chronograf.Server) (chronograf.Server, error) {
var err error
for _, store := range multi.Stores {
var k chronograf.Server
k, err = store.Add(ctx, kap)
if err == nil {
return k, nil
}
}
return chronograf.Server{}, nil
}
// Delete delegates to all Stores, returns success if one Store is successful
func (multi *MultiKapacitorStore) Delete(ctx context.Context, kap chronograf.Server) error {
var err error
for _, store := range multi.Stores {
err = store.Delete(ctx, kap)
if err == nil {
return nil
}
}
return err
}
// Get finds the Source by id among all contained Stores
func (multi *MultiKapacitorStore) Get(ctx context.Context, id int) (chronograf.Server, error) {
var err error
for _, store := range multi.Stores {
var k chronograf.Server
k, err = store.Get(ctx, id)
if err == nil {
return k, nil
}
}
return chronograf.Server{}, nil
}
// Update the first responsive Store
func (multi *MultiKapacitorStore) Update(ctx context.Context, kap chronograf.Server) error {
var err error
for _, store := range multi.Stores {
err = store.Update(ctx, kap)
if err == nil {
return nil
}
}
return err
}

129
memdb/kapacitors_test.go Normal file
View File

@ -0,0 +1,129 @@
package memdb
import (
"context"
"testing"
"github.com/influxdata/chronograf"
)
func TestInterfaceImplementation(t *testing.T) {
var _ chronograf.ServersStore = &KapacitorStore{}
var _ chronograf.ServersStore = &MultiKapacitorStore{}
}
func TestKapacitorStoreAll(t *testing.T) {
ctx := context.Background()
store := KapacitorStore{}
kaps, err := store.All(ctx)
if err != nil {
t.Fatal("All should not throw an error with an empty Store")
}
if len(kaps) != 0 {
t.Fatal("Store should be empty")
}
store.Kapacitor = &chronograf.Server{}
kaps, err = store.All(ctx)
if err != nil {
t.Fatal("All should not throw an error with an empty Store")
}
if len(kaps) != 1 {
t.Fatal("Store should have 1 element")
}
}
func TestKapacitorStoreAdd(t *testing.T) {
ctx := context.Background()
store := KapacitorStore{}
_, err := store.Add(ctx, chronograf.Server{})
if err == nil {
t.Fatal("Store should not support adding another source")
}
}
func TestKapacitorStoreDelete(t *testing.T) {
ctx := context.Background()
store := KapacitorStore{}
err := store.Delete(ctx, chronograf.Server{})
if err == nil {
t.Fatal("Delete should not operate on an empty Store")
}
store.Kapacitor = &chronograf.Server{
ID: 9,
}
err = store.Delete(ctx, chronograf.Server{
ID: 8,
})
if err == nil {
t.Fatal("Delete should not remove elements with the wrong ID")
}
err = store.Delete(ctx, chronograf.Server{
ID: 9,
})
if err != nil {
t.Fatal("Delete should remove an element with a matching ID")
}
}
func TestKapacitorStoreGet(t *testing.T) {
ctx := context.Background()
store := KapacitorStore{}
_, err := store.Get(ctx, 9)
if err == nil {
t.Fatal("Get should return an error for an empty Store")
}
store.Kapacitor = &chronograf.Server{
ID: 9,
}
_, err = store.Get(ctx, 8)
if err == nil {
t.Fatal("Get should return an error if it finds no matches")
}
store.Kapacitor = &chronograf.Server{
ID: 9,
}
kap, err := store.Get(ctx, 9)
if err != nil || kap.ID != 9 {
t.Fatal("Get should find the element with a matching ID")
}
}
func TestKapacitorStoreUpdate(t *testing.T) {
ctx := context.Background()
store := KapacitorStore{}
err := store.Update(ctx, chronograf.Server{})
if err == nil {
t.Fatal("Update fhouls return an error for an empty Store")
}
store.Kapacitor = &chronograf.Server{
ID: 9,
}
err = store.Update(ctx, chronograf.Server{
ID: 8,
})
if err == nil {
t.Fatal("Update should return an error if it finds no matches")
}
store.Kapacitor = &chronograf.Server{
ID: 9,
}
err = store.Update(ctx, chronograf.Server{
ID: 9,
URL: "http://crystal.pepsi.com",
})
if err != nil || store.Kapacitor.URL != "http://crystal.pepsi.com" {
t.Fatal("Update should overwrite elements with matching IDs")
}
}

142
memdb/sources.go Normal file
View File

@ -0,0 +1,142 @@
package memdb
import (
"context"
"fmt"
"github.com/influxdata/chronograf"
)
// Ensure MultiSourcesStore and SourcesStore implements chronograf.SourcesStore.
var _ chronograf.SourcesStore = &SourcesStore{}
var _ chronograf.SourcesStore = &MultiSourcesStore{}
// MultiSourcesStore delegates to the SourcesStores that compose it
type MultiSourcesStore struct {
Stores []chronograf.SourcesStore
}
// All concatenates the Sources of all contained Stores
func (multi *MultiSourcesStore) All(ctx context.Context) ([]chronograf.Source, error) {
all := []chronograf.Source{}
sourceSet := map[int]struct{}{}
ok := false
var err error
for _, store := range multi.Stores {
var sources []chronograf.Source
sources, err = store.All(ctx)
if err != nil {
// If this Store is unable to return an array of sources, skip to the
// next Store.
continue
}
ok = true // We've received a response from at least one Store
for _, s := range sources {
// Enforce that the source has a unique ID
// If the source has been seen before, don't override what we already have
if _, okay := sourceSet[s.ID]; !okay { // We have a new Source!
sourceSet[s.ID] = struct{}{} // We just care that the ID is unique
all = append(all, s)
}
}
}
if !ok {
return nil, err
}
return all, nil
}
// Add the src to the first Store to respond successfully
func (multi *MultiSourcesStore) Add(ctx context.Context, src chronograf.Source) (chronograf.Source, error) {
var err error
for _, store := range multi.Stores {
var s chronograf.Source
s, err = store.Add(ctx, src)
if err == nil {
return s, nil
}
}
return chronograf.Source{}, nil
}
// Delete delegates to all stores, returns success if one Store is successful
func (multi *MultiSourcesStore) Delete(ctx context.Context, src chronograf.Source) error {
var err error
for _, store := range multi.Stores {
err = store.Delete(ctx, src)
if err == nil {
return nil
}
}
return err
}
// Get finds the Source by id among all contained Stores
func (multi *MultiSourcesStore) Get(ctx context.Context, id int) (chronograf.Source, error) {
var err error
for _, store := range multi.Stores {
var s chronograf.Source
s, err = store.Get(ctx, id)
if err == nil {
return s, nil
}
}
return chronograf.Source{}, err
}
// Update the first store to return a successful response
func (multi *MultiSourcesStore) Update(ctx context.Context, src chronograf.Source) error {
var err error
for _, store := range multi.Stores {
err = store.Update(ctx, src)
if err == nil {
return nil
}
}
return err
}
// SourcesStore implements the chronograf.SourcesStore interface
type SourcesStore struct {
Source *chronograf.Source
}
// Add does not have any effect
func (store *SourcesStore) Add(ctx context.Context, src chronograf.Source) (chronograf.Source, error) {
return chronograf.Source{}, fmt.Errorf("In-memory SourcesStore does not support adding a Source")
}
// All will return a slice containing a configured source
func (store *SourcesStore) All(ctx context.Context) ([]chronograf.Source, error) {
if store.Source != nil {
return []chronograf.Source{*store.Source}, nil
}
return nil, nil
}
// Delete removes the SourcesStore.Soruce if it matches the provided Source
func (store *SourcesStore) Delete(ctx context.Context, src chronograf.Source) error {
if store.Source == nil || store.Source.ID != src.ID {
return fmt.Errorf("Unable to find Source with id %d", src.ID)
}
store.Source = nil
return nil
}
// Get returns the configured source if the id matches
func (store *SourcesStore) Get(ctx context.Context, id int) (chronograf.Source, error) {
if store.Source == nil || store.Source.ID != id {
return chronograf.Source{}, fmt.Errorf("Unable to find Source with id %d", id)
}
return *store.Source, nil
}
// Update does nothing
func (store *SourcesStore) Update(ctx context.Context, src chronograf.Source) error {
if store.Source == nil || store.Source.ID != src.ID {
return fmt.Errorf("Unable to find Source with id %d", src.ID)
}
store.Source = &src
return nil
}

128
memdb/sources_test.go Normal file
View File

@ -0,0 +1,128 @@
package memdb
import (
"context"
"testing"
"github.com/influxdata/chronograf"
)
func TestSourcesStore(t *testing.T) {
var _ chronograf.SourcesStore = &SourcesStore{}
}
func TestSourcesStoreAdd(t *testing.T) {
ctx := context.Background()
store := SourcesStore{}
_, err := store.Add(ctx, chronograf.Source{})
if err == nil {
t.Fatal("Store should not support adding another source")
}
}
func TestSourcesStoreAll(t *testing.T) {
ctx := context.Background()
store := SourcesStore{}
srcs, err := store.All(ctx)
if err != nil {
t.Fatal("All should not throw an error with an empty Store")
}
if len(srcs) != 0 {
t.Fatal("Store should be empty")
}
store.Source = &chronograf.Source{}
srcs, err = store.All(ctx)
if err != nil {
t.Fatal("All should not throw an error with an empty Store")
}
if len(srcs) != 1 {
t.Fatal("Store should have 1 element")
}
}
func TestSourcesStoreDelete(t *testing.T) {
ctx := context.Background()
store := SourcesStore{}
err := store.Delete(ctx, chronograf.Source{})
if err == nil {
t.Fatal("Delete should not operate on an empty Store")
}
store.Source = &chronograf.Source{
ID: 9,
}
err = store.Delete(ctx, chronograf.Source{
ID: 8,
})
if err == nil {
t.Fatal("Delete should not remove elements with the wrong ID")
}
err = store.Delete(ctx, chronograf.Source{
ID: 9,
})
if err != nil {
t.Fatal("Delete should remove an element with a matching ID")
}
}
func TestSourcesStoreGet(t *testing.T) {
ctx := context.Background()
store := SourcesStore{}
_, err := store.Get(ctx, 9)
if err == nil {
t.Fatal("Get should return an error for an empty Store")
}
store.Source = &chronograf.Source{
ID: 9,
}
_, err = store.Get(ctx, 8)
if err == nil {
t.Fatal("Get should return an error if it finds no matches")
}
store.Source = &chronograf.Source{
ID: 9,
}
src, err := store.Get(ctx, 9)
if err != nil || src.ID != 9 {
t.Fatal("Get should find the element with a matching ID")
}
}
func TestSourcesStoreUpdate(t *testing.T) {
ctx := context.Background()
store := SourcesStore{}
err := store.Update(ctx, chronograf.Source{})
if err == nil {
t.Fatal("Update should return an error for an empty Store")
}
store.Source = &chronograf.Source{
ID: 9,
}
err = store.Update(ctx, chronograf.Source{
ID: 8,
})
if err == nil {
t.Fatal("Update should return an error if it finds no matches")
}
store.Source = &chronograf.Source{
ID: 9,
}
err = store.Update(ctx, chronograf.Source{
ID: 9,
URL: "http://crystal.pepsi.com",
})
if err != nil || store.Source.URL != "http://crystal.pepsi.com" {
t.Fatal("Update should overwrite elements with matching IDs")
}
}

113
server/builders.go Normal file
View File

@ -0,0 +1,113 @@
package server
import (
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/canned"
"github.com/influxdata/chronograf/layouts"
"github.com/influxdata/chronograf/memdb"
)
// LayoutBuilder is responsible for building Layouts
type LayoutBuilder interface {
Build(chronograf.LayoutStore) (*layouts.MultiLayoutStore, error)
}
// MultiLayoutBuilder implements LayoutBuilder and will return a MultiLayoutStore
type MultiLayoutBuilder struct {
Logger chronograf.Logger
UUID chronograf.ID
CannedPath string
}
// Build will construct a MultiLayoutStore of canned and db-backed personalized
// layouts
func (builder *MultiLayoutBuilder) Build(db chronograf.LayoutStore) (*layouts.MultiLayoutStore, error) {
// These apps are those handled from a directory
apps := canned.NewApps(builder.CannedPath, builder.UUID, builder.Logger)
// These apps are statically compiled into chronograf
binApps := &canned.BinLayoutStore{
Logger: builder.Logger,
}
// Acts as a front-end to both the bolt layouts, filesystem layouts and binary statically compiled layouts.
// The idea here is that these stores form a hierarchy in which each is tried sequentially until
// the operation has success. So, the database is preferred over filesystem over binary data.
layouts := &layouts.MultiLayoutStore{
Stores: []chronograf.LayoutStore{
db,
apps,
binApps,
},
}
return layouts, nil
}
// SourcesBuilder builds a MultiSourceStore
type SourcesBuilder interface {
Build(chronograf.SourcesStore) (*memdb.MultiSourcesStore, error)
}
// MultiSourceBuilder implements SourcesBuilder
type MultiSourceBuilder struct {
InfluxDBURL string
InfluxDBUsername string
InfluxDBPassword string
}
// Build will return a MultiSourceStore
func (fs *MultiSourceBuilder) Build(db chronograf.SourcesStore) (*memdb.MultiSourcesStore, error) {
stores := []chronograf.SourcesStore{db}
if fs.InfluxDBURL != "" {
influxStore := &memdb.SourcesStore{
Source: &chronograf.Source{
ID: 0,
Name: fs.InfluxDBURL,
Type: chronograf.InfluxDB,
Username: fs.InfluxDBUsername,
Password: fs.InfluxDBPassword,
URL: fs.InfluxDBURL,
Default: true,
}}
stores = append([]chronograf.SourcesStore{influxStore}, stores...)
}
sources := &memdb.MultiSourcesStore{
Stores: stores,
}
return sources, nil
}
// KapacitorBuilder builds a KapacitorStore
type KapacitorBuilder interface {
Build(chronograf.ServersStore) (*memdb.MultiKapacitorStore, error)
}
// MultiKapacitorBuilder implements KapacitorBuilder
type MultiKapacitorBuilder struct {
KapacitorURL string
KapacitorUsername string
KapacitorPassword string
}
// Build will return a MultiKapacitorStore
func (builder *MultiKapacitorBuilder) Build(db chronograf.ServersStore) (*memdb.MultiKapacitorStore, error) {
stores := []chronograf.ServersStore{db}
if builder.KapacitorURL != "" {
memStore := &memdb.KapacitorStore{
Kapacitor: &chronograf.Server{
ID: 0,
SrcID: 0,
Name: builder.KapacitorURL,
URL: builder.KapacitorURL,
Username: builder.KapacitorUsername,
Password: builder.KapacitorPassword,
},
}
stores = append([]chronograf.ServersStore{memStore}, stores...)
}
kapacitors := &memdb.MultiKapacitorStore{
Stores: stores,
}
return kapacitors, nil
}

View File

@ -1,7 +1,6 @@
package server
import (
"encoding/base64"
"fmt"
"net/http"
"net/http/httputil"
@ -49,9 +48,7 @@ func (h *Service) KapacitorProxy(w http.ResponseWriter, r *http.Request) {
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
// a header directly
if srv.Username != "" && srv.Password != "" {
auth := "Basic " + srv.Username + ":" + srv.Password
header := base64.StdEncoding.EncodeToString([]byte(auth))
req.Header.Set("Authorization", header)
req.SetBasicAuth(srv.Username, srv.Password)
}
}
proxy := &httputil.ReverseProxy{

View File

@ -14,15 +14,13 @@ import (
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/bolt"
"github.com/influxdata/chronograf/canned"
"github.com/influxdata/chronograf/layouts"
"github.com/influxdata/chronograf/influx"
clog "github.com/influxdata/chronograf/log"
"github.com/influxdata/chronograf/oauth2"
"github.com/influxdata/chronograf/uuid"
client "github.com/influxdata/usage-client/v1"
flags "github.com/jessevdk/go-flags"
"github.com/tylerb/graceful"
"github.com/influxdata/chronograf/influx"
)
var (
@ -42,6 +40,14 @@ type Server struct {
Cert flags.Filename `long:"cert" description:"Path to PEM encoded public key certificate. " env:"TLS_CERTIFICATE"`
Key flags.Filename `long:"key" description:"Path to private key associated with given certificate. " env:"TLS_PRIVATE_KEY"`
InfluxDBURL string `long:"influxdb-url" description:"Location of your InfluxDB instance" env:"INFLUXDB_URL"`
InfluxDBUsername string `long:"influxdb-username" description:"Username for your InfluxDB instance" env:"INFLUXDB_USERNAME"`
InfluxDBPassword string `long:"influxdb-password" description:"Password for your InfluxDB instance" env:"INFLUXDB_PASSWORD"`
KapacitorURL string `long:"kapacitor-url" description:"Location of your Kapacitor instance" env:"KAPACITOR_URL"`
KapacitorUsername string `long:"kapacitor-username" description:"Username of your Kapacitor instance" env:"KAPACITOR_USERNAME"`
KapacitorPassword string `long:"kapacitor-password" description:"Password of your Kapacitor instance" env:"KAPACITOR_PASSWORD"`
Develop bool `short:"d" long:"develop" description:"Run server in develop mode."`
BoltPath string `short:"b" long:"bolt-path" description:"Full path to boltDB file (/var/lib/chronograf/chronograf-v1.db)" env:"BOLT_PATH" default:"chronograf-v1.db"`
CannedPath string `short:"c" long:"canned-path" description:"Path to directory of pre-canned application layouts (/usr/share/chronograf/canned)" env:"CANNED_PATH" default:"canned"`
@ -180,7 +186,22 @@ func (s *Server) NewListener() (net.Listener, error) {
// Serve starts and runs the chronograf server
func (s *Server) Serve(ctx context.Context) error {
logger := clog.New(clog.ParseLevel(s.LogLevel))
service := openService(ctx, s.BoltPath, s.CannedPath, logger, s.useAuth())
layoutBuilder := &MultiLayoutBuilder{
Logger: logger,
UUID: &uuid.V4{},
CannedPath: s.CannedPath,
}
sourcesBuilder := &MultiSourceBuilder{
InfluxDBURL: s.InfluxDBURL,
InfluxDBUsername: s.InfluxDBUsername,
InfluxDBPassword: s.InfluxDBPassword,
}
kapacitorBuilder := &MultiKapacitorBuilder{
KapacitorURL: s.KapacitorURL,
KapacitorUsername: s.KapacitorUsername,
KapacitorPassword: s.KapacitorPassword,
}
service := openService(ctx, s.BoltPath, layoutBuilder, sourcesBuilder, kapacitorBuilder, logger, s.useAuth())
basepath = s.Basepath
providerFuncs := []func(func(oauth2.Provider, oauth2.Mux)){}
@ -256,7 +277,7 @@ func (s *Server) Serve(ctx context.Context) error {
return nil
}
func openService(ctx context.Context, boltPath, cannedPath string, logger chronograf.Logger, useAuth bool) Service {
func openService(ctx context.Context, boltPath string, lBuilder LayoutBuilder, sBuilder SourcesBuilder, kapBuilder KapacitorBuilder, logger chronograf.Logger, useAuth bool) Service {
db := bolt.NewClient()
db.Path = boltPath
if err := db.Open(ctx); err != nil {
@ -266,28 +287,34 @@ func openService(ctx context.Context, boltPath, cannedPath string, logger chrono
os.Exit(1)
}
// These apps are those handled from a directory
apps := canned.NewApps(cannedPath, &uuid.V4{}, logger)
// These apps are statically compiled into chronograf
binApps := &canned.BinLayoutStore{
Logger: logger,
layouts, err := lBuilder.Build(db.LayoutStore)
if err != nil {
logger.
WithField("component", "LayoutStore").
Error("Unable to construct a MultiLayoutStore", err)
os.Exit(1)
}
// Acts as a front-end to both the bolt layouts, filesystem layouts and binary statically compiled layouts.
// The idea here is that these stores form a hierarchy in which each is tried sequentially until
// the operation has success. So, the database is preferred over filesystem over binary data.
layouts := &layouts.MultiLayoutStore{
Stores: []chronograf.LayoutStore{
db.LayoutStore,
apps,
binApps,
},
sources, err := sBuilder.Build(db.SourcesStore)
if err != nil {
logger.
WithField("component", "SourcesStore").
Error("Unable to construct a MultiSourcesStore", err)
os.Exit(1)
}
kapacitors, err := kapBuilder.Build(db.ServersStore)
if err != nil {
logger.
WithField("component", "KapacitorStore").
Error("Unable to construct a MultiKapacitorStore", err)
os.Exit(1)
}
return Service{
TimeSeriesClient: &InfluxClient{},
SourcesStore: db.SourcesStore,
ServersStore: db.ServersStore,
SourcesStore: sources,
ServersStore: kapacitors,
UsersStore: db.UsersStore,
LayoutStore: layouts,
DashboardsStore: db.DashboardsStore,

26
server/server_test.go Normal file
View File

@ -0,0 +1,26 @@
package server
import "testing"
func TestLayoutBuilder(t *testing.T) {
var l LayoutBuilder = &MultiLayoutBuilder{}
layout, err := l.Build(nil)
if err != nil {
t.Fatalf("MultiLayoutBuilder can't build a MultiLayoutStore: %v", err)
}
if layout == nil {
t.Fatal("LayoutBuilder should have built a layout")
}
}
func TestSourcesStoresBuilder(t *testing.T) {
var b SourcesBuilder = &MultiSourceBuilder{}
sources, err := b.Build(nil)
if err != nil {
t.Fatalf("MultiSourceBuilder can't build a MultiSourcesStore: %v", err)
}
if sources == nil {
t.Fatal("SourcesBuilder should have built a MultiSourceStore")
}
}

View File

@ -3010,14 +3010,9 @@
}
},
"Roles": {
"type": "object",
"properties": {
"roles": {
"type": "array",
"items": {
"$ref": "#/definitions/Role"
}
}
"type": "array",
"items": {
"$ref": "#/definitions/Role"
},
"example": {
"roles": [
@ -3178,6 +3173,9 @@
"permissions": {
"$ref": "#/definitions/Permissions"
},
"roles": {
"$ref": "#/definitions/Roles"
},
"links": {
"type": "object",
"description": "URL relations of this user",

View File

@ -111,6 +111,7 @@
"react-tooltip": "^3.2.1",
"redux": "^3.3.1",
"redux-thunk": "^1.0.3",
"rome": "^2.1.22",
"updeep": "^0.13.0"
}
}

View File

@ -30,7 +30,7 @@ import {
NEW_EMPTY_RP,
} from 'src/admin/constants'
let state = undefined
let state
// Users
const u1 = {
@ -57,11 +57,11 @@ const u1 = {
'Monitor',
'CopyShard',
'KapacitorAPI',
'KapacitorConfigAPI'
'KapacitorConfigAPI',
],
scope: 'all',
},
}
},
],
permissions: [],
links: {self: '/chronograf/v1/sources/1/users/acidburn'},
@ -98,16 +98,16 @@ const r1 = {
'Monitor',
'CopyShard',
'KapacitorAPI',
'KapacitorConfigAPI'
'KapacitorConfigAPI',
],
scope: 'all',
},
],
links: {self: '/chronograf/v1/sources/1/roles/hax0r'}
links: {self: '/chronograf/v1/sources/1/roles/hax0r'},
}
const r2 = {
name: 'l33tus3r',
links: {self: '/chronograf/v1/sources/1/roles/l33tus3r'}
links: {self: '/chronograf/v1/sources/1/roles/l33tus3r'},
}
const roles = [r1, r2]
@ -226,7 +226,7 @@ describe('Admin.Reducers', () => {
state = {
users: [
u1,
]
],
}
const actual = reducer(state, addUser())
@ -260,7 +260,7 @@ describe('Admin.Reducers', () => {
const actual = reducer(state, editUser(u2, updates))
const expected = {
users: [{...u2, ...updates}, u1]
users: [{...u2, ...updates}, u1],
}
expect(actual.users).to.deep.equal(expected.users)
@ -270,7 +270,7 @@ describe('Admin.Reducers', () => {
state = {
roles: [
r1,
]
],
}
const actual = reducer(state, addRole())
@ -304,7 +304,7 @@ describe('Admin.Reducers', () => {
const actual = reducer(state, editRole(r2, updates))
const expected = {
roles: [{...r2, ...updates}, r1]
roles: [{...r2, ...updates}, r1],
}
expect(actual.roles).to.deep.equal(expected.roles)
@ -323,7 +323,7 @@ describe('Admin.Reducers', () => {
state = {
roles: [
r1,
]
],
}
const actual = reducer(state, deleteRole(r1))
@ -338,7 +338,7 @@ describe('Admin.Reducers', () => {
state = {
users: [
u1,
]
],
}
const actual = reducer(state, deleteUser(u1))

View File

@ -1,7 +1,7 @@
import _ from 'lodash'
import reducer from 'src/dashboards/reducers/ui'
import timeRanges from 'hson!src/shared/data/timeRanges.hson';
import timeRanges from 'hson!src/shared/data/timeRanges.hson'
import {
loadDashboards,
@ -19,8 +19,8 @@ const noopAction = () => {
return {type: 'NOOP'}
}
let state = undefined
const timeRange = timeRanges[1];
let state
const timeRange = timeRanges[1]
const d1 = {id: 1, cells: [], name: "d1"}
const d2 = {id: 2, cells: [], name: "d2"}
const dashboards = [d1, d2]
@ -117,7 +117,7 @@ describe('DataExplorer.Reducers.UI', () => {
const newCell = {
x: c1.x,
y: c1.y,
name: newCellName
name: newCellName,
}
const dash = {...d1, cells: [c1]}
state = {

View File

@ -1,5 +1,5 @@
import reducer from 'src/data_explorer/reducers/queryConfigs';
import defaultQueryConfig from 'src/utils/defaultQueryConfig';
import reducer from 'src/data_explorer/reducers/queryConfigs'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
chooseNamespace,
chooseMeasurement,
@ -10,108 +10,108 @@ import {
groupByTime,
toggleTagAcceptance,
updateRawQuery,
} from 'src/data_explorer/actions/view';
} from 'src/data_explorer/actions/view'
const fakeAddQueryAction = (panelID, queryID) => {
return {
type: 'ADD_QUERY',
payload: {panelID, queryID},
};
};
}
}
function buildInitialState(queryId, params) {
return Object.assign({}, defaultQueryConfig(queryId), params);
return Object.assign({}, defaultQueryConfig(queryId), params)
}
describe('Chronograf.Reducers.queryConfig', () => {
const queryId = 123;
const queryId = 123
it('can add a query', () => {
const state = reducer({}, fakeAddQueryAction('blah', queryId));
const state = reducer({}, fakeAddQueryAction('blah', queryId))
const actual = state[queryId];
const expected = defaultQueryConfig(queryId);
expect(actual).to.deep.equal(expected);
});
const actual = state[queryId]
const expected = defaultQueryConfig(queryId)
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state;
let state
beforeEach(() => {
state = reducer({}, fakeAddQueryAction('any', queryId));
});
state = reducer({}, fakeAddQueryAction('any', queryId))
})
it('sets the db and rp', () => {
const newState = reducer(state, chooseNamespace(queryId, {
database: 'telegraf',
retentionPolicy: 'monitor',
}));
}))
expect(newState[queryId].database).to.equal('telegraf');
expect(newState[queryId].retentionPolicy).to.equal('monitor');
});
expect(newState[queryId].database).to.equal('telegraf')
expect(newState[queryId].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
const newState = reducer(state, chooseMeasurement(queryId, 'mem'));
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
expect(newState[queryId].measurement).to.equal('mem');
});
});
expect(newState[queryId].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state;
let state
beforeEach(() => {
const one = reducer({}, fakeAddQueryAction('any', queryId));
const one = reducer({}, fakeAddQueryAction('any', queryId))
const two = reducer(one, chooseNamespace(queryId, {
database: '_internal',
retentionPolicy: 'daily',
}));
const three = reducer(two, chooseMeasurement(queryId, 'disk'));
state = reducer(three, toggleField(queryId, {field: 'a great field', funcs: []}));
});
}))
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
state = reducer(three, toggleField(queryId, {field: 'a great field', funcs: []}))
})
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => { // what about tags?
expect(state[queryId].measurement).to.exist;
expect(state[queryId].fields.length).to.equal(1);
expect(state[queryId].measurement).to.exist
expect(state[queryId].fields.length).to.equal(1)
const newState = reducer(state, chooseNamespace(queryId, {
database: 'newdb',
retentionPolicy: 'newrp',
}));
}))
expect(newState[queryId].measurement).not.to.exist;
expect(newState[queryId].fields.length).to.equal(0);
});
});
expect(newState[queryId].measurement).not.to.exist
expect(newState[queryId].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => { // what about tags?
expect(state[queryId].fields.length).to.equal(1);
expect(state[queryId].fields.length).to.equal(1)
const newState = reducer(state, chooseMeasurement(queryId, 'newmeasurement'));
const newState = reducer(state, chooseMeasurement(queryId, 'newmeasurement'))
expect(state[queryId].database).to.equal(newState[queryId].database);
expect(state[queryId].retentionPolicy).to.equal(newState[queryId].retentionPolicy);
expect(newState[queryId].fields.length).to.equal(0);
});
});
expect(state[queryId].database).to.equal(newState[queryId].database)
expect(state[queryId].retentionPolicy).to.equal(newState[queryId].retentionPolicy)
expect(newState[queryId].fields.length).to.equal(0)
})
})
describe('when the query is part of a kapacitor rule', () => {
it('only allows one field', () => {
expect(state[queryId].fields.length).to.equal(1);
expect(state[queryId].fields.length).to.equal(1)
const isKapacitorRule = true;
const newState = reducer(state, toggleField(queryId, {field: 'a different field', funcs: []}, isKapacitorRule));
const isKapacitorRule = true
const newState = reducer(state, toggleField(queryId, {field: 'a different field', funcs: []}, isKapacitorRule))
expect(newState[queryId].fields.length).to.equal(1);
expect(newState[queryId].fields[0].field).to.equal('a different field');
});
});
});
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryId].fields[0].field).to.equal('a different field')
})
})
})
describe('APPLY_FUNCS_TO_FIELD', () => {
it('applies functions to a field without any existing functions', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: {
id: 123,
@ -121,23 +121,23 @@ describe('Chronograf.Reducers.queryConfig', () => {
{field: 'f1', funcs: ['fn1', 'fn2']},
{field: 'f2', funcs: ['fn1']},
],
}
};
},
}
const action = applyFuncsToField(queryId, {
field: 'f1',
funcs: ['fn3', 'fn4'],
});
})
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].fields).to.eql([
{field: 'f1', funcs: ['fn3', 'fn4']},
{field: 'f2', funcs: ['fn1']},
]);
});
])
})
it('removes all functions and group by time when one field has no funcs applied', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: {
id: 123,
@ -151,27 +151,27 @@ describe('Chronograf.Reducers.queryConfig', () => {
time: '1m',
tags: [],
},
}
};
},
}
const action = applyFuncsToField(queryId, {
field: 'f1',
funcs: [],
});
})
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].fields).to.eql([
{field: 'f1', funcs: []},
{field: 'f2', funcs: []},
]);
expect(nextState[queryId].groupBy.time).to.equal(null);
});
});
])
expect(nextState[queryId].groupBy.time).to.equal(null)
})
})
describe('CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: buildInitialState(queryId, {
tags: {
@ -179,63 +179,63 @@ describe('Chronograf.Reducers.queryConfig', () => {
k2: ['foo'],
},
}),
};
}
const action = chooseTag(queryId, {
key: 'k1',
value: 'v1',
});
})
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
});
});
})
})
it('creates a new entry if it\'s the first key', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: buildInitialState(queryId, {
tags: {},
}),
};
}
const action = chooseTag(queryId, {
key: 'k1',
value: 'v1',
});
})
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
k1: ['v1'],
});
});
})
})
it('removes a value that is already in the list', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: buildInitialState(queryId, {
tags: {
k1: ['v1'],
},
}),
};
}
const action = chooseTag(queryId, {
key: 'k1',
value: 'v1',
});
})
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
expect(nextState[queryId].tags).to.eql({});
});
});
expect(nextState[queryId].tags).to.eql({})
})
})
describe('GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: {
id: 123,
@ -244,20 +244,20 @@ describe('Chronograf.Reducers.queryConfig', () => {
fields: [],
tags: {},
groupBy: {tags: [], time: null},
}
};
const action = groupByTag(queryId, 'k1');
},
}
const action = groupByTag(queryId, 'k1')
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
time: null,
tags: ['k1'],
});
});
})
})
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: {
id: 123,
@ -266,59 +266,59 @@ describe('Chronograf.Reducers.queryConfig', () => {
fields: [],
tags: {},
groupBy: {tags: ['k1'], time: null},
}
};
const action = groupByTag(queryId, 'k1');
},
}
const action = groupByTag(queryId, 'k1')
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
time: null,
tags: [],
});
});
});
})
})
})
describe('TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: buildInitialState(queryId),
};
const action = toggleTagAcceptance(queryId);
}
const action = toggleTagAcceptance(queryId)
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].areTagsAccepted).to.equal(!initialState[queryId].areTagsAccepted);
});
});
expect(nextState[queryId].areTagsAccepted).to.equal(!initialState[queryId].areTagsAccepted)
})
})
describe('GROUP_BY_TIME', () => {
it('applys the appropriate group by time', () => {
const queryId = 123;
const time = '100y';
const queryId = 123
const time = '100y'
const initialState = {
[queryId]: buildInitialState(queryId),
};
}
const action = groupByTime(queryId, time);
const action = groupByTime(queryId, time)
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy.time).to.equal(time);
});
});
expect(nextState[queryId].groupBy.time).to.equal(time)
})
})
it('updates a query\'s raw text', () => {
const queryId = 123;
const queryId = 123
const initialState = {
[queryId]: buildInitialState(queryId),
};
const text = 'foo';
const action = updateRawQuery(queryId, text);
}
const text = 'foo'
const action = updateRawQuery(queryId, text)
const nextState = reducer(initialState, action);
const nextState = reducer(initialState, action)
expect(nextState[queryId].rawText).to.equal('foo');
});
});
expect(nextState[queryId].rawText).to.equal('foo')
})
})

View File

@ -1,31 +1,31 @@
import reducer from 'src/data_explorer/reducers/timeRange';
import reducer from 'src/data_explorer/reducers/timeRange'
import {
setTimeRange,
} from 'src/data_explorer/actions/view';
} from 'src/data_explorer/actions/view'
const noopAction = () => {
return {type: 'NOOP'};
return {type: 'NOOP'}
}
describe('DataExplorer.Reducers.TimeRange', () => {
it('it sets the default timeRange', () => {
const state = reducer(undefined, noopAction());
const state = reducer(undefined, noopAction())
const expected = {
lower: 'now() - 15m',
upper: null,
};
}
expect(state).to.deep.equal(expected);
});
expect(state).to.deep.equal(expected)
})
it('it can set the time range', () => {
const timeRange = {
lower: 'now() - 5m',
upper: null,
};
const expected = reducer(undefined, setTimeRange(timeRange));
}
const expected = reducer(undefined, setTimeRange(timeRange))
expect(timeRange).to.deep.equal(expected);
});
});
expect(timeRange).to.deep.equal(expected)
})
})

View File

@ -1,40 +1,40 @@
import reducer from 'src/data_explorer/reducers/ui';
import reducer from 'src/data_explorer/reducers/ui'
import {
addQuery,
deleteQuery,
} from 'src/data_explorer/actions/view';
} from 'src/data_explorer/actions/view'
const noopAction = () => {
return {type: 'NOOP'};
return {type: 'NOOP'}
}
let state = undefined;
let state
describe('DataExplorer.Reducers.UI', () => {
it('it sets the default state for UI', () => {
const actual = reducer(state, noopAction());
const actual = reducer(state, noopAction())
const expected = {
queryIDs: [],
};
}
expect(actual).to.deep.equal(expected);
});
expect(actual).to.deep.equal(expected)
})
it('it can add a query', () => {
const actual = reducer(state, addQuery());
expect(actual.queryIDs.length).to.equal(1);
});
const actual = reducer(state, addQuery())
expect(actual.queryIDs.length).to.equal(1)
})
it('it can delete a query', () => {
const queryID = '123';
state = {queryIDs: ['456', queryID]};
const queryID = '123'
state = {queryIDs: ['456', queryID]}
const actual = reducer(state, deleteQuery(queryID));
const actual = reducer(state, deleteQuery(queryID))
const expected = {
queryIDs: ['456'],
};
}
expect(actual).to.deep.equal(expected);
});
});
expect(actual).to.deep.equal(expected)
})
})

View File

@ -1,117 +1,117 @@
import buildInfluxQLQuery from 'utils/influxql';
import defaultQueryConfig from 'src/utils/defaultQueryConfig';
import buildInfluxQLQuery from 'utils/influxql'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
function mergeConfig(options) {
return Object.assign({}, defaultQueryConfig(123), options);
return Object.assign({}, defaultQueryConfig(123), options)
}
describe('buildInfluxQLQuery', () => {
let config, timeBounds;
let config, timeBounds
describe('when information is missing', () => {
it('returns a null select statement', () => {
expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null);
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null); // no measurement
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null); // no fields
});
});
expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null)
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null) // no measurement
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null) // no fields
})
})
describe('with a database, measurement, field, and NO retention policy', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]});
});
config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]})
})
it('builds the right query', () => {
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"');
});
});
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"')
})
})
describe('with a database, measurement, retention policy, and field', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]});
timeBounds = {lower: 'now() - 1hr'};
});
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]})
timeBounds = {lower: 'now() - 1hr'}
})
it('builds the right query', () => {
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"');
});
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"')
})
it('builds the right query with a time range', () => {
expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr');
});
});
expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr')
})
})
describe('when the field is *', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]});
});
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]})
})
it('does not quote the star', () => {
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"');
});
});
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"')
})
})
describe('with a measurement and one field, an aggregate, and a GROUP BY time()', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}});
timeBounds = {lower: 'now() - 12h'};
});
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}})
timeBounds = {lower: 'now() - 12h'}
})
it('builds the right query', () => {
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)';
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
});
});
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)'
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement and one field, an aggregate, and a GROUP BY tags', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}});
timeBounds = {lower: 'now() - 12h'};
});
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}})
timeBounds = {lower: 'now() - 12h'}
})
it('builds the right query', () => {
const expected = `SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY "t1", "t2"`;
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
});
});
const expected = `SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY "t1", "t2"`
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement, one field, and an upper / lower absolute time range', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]});
timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"};
});
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]})
timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"}
})
it('builds the right query', () => {
const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\'';
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
});
});
const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\''
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement and one field, an aggregate, and a GROUP BY time(), and tags', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}});
timeBounds = {lower: 'now() - 12h'};
});
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}})
timeBounds = {lower: 'now() - 12h'}
})
it('builds the right query', () => {
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"';
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
});
});
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"'
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement and two fields', () => {
beforeEach(() => {
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]});
timeBounds = {upper: "'2015-02-24T00:00:00Z'"};
});
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]})
timeBounds = {upper: "'2015-02-24T00:00:00Z'"}
})
it('builds the right query', () => {
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"');
});
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"')
})
it('builds the right query with a time range', () => {
const expected = `SELECT "f0", "f1" FROM "db1"."rp1"."m0" WHERE time < '2015-02-24T00:00:00Z'`;
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
});
const expected = `SELECT "f0", "f1" FROM "db1"."rp1"."m0" WHERE time < '2015-02-24T00:00:00Z'`
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
describe('with multiple tag pairs', () => {
beforeEach(() => {
@ -128,16 +128,16 @@ describe('buildInfluxQLQuery', () => {
],
k2: [
'v2',
]
],
},
});
timeBounds = {lower: 'now() - 6h'};
});
})
timeBounds = {lower: 'now() - 6h'}
})
it('correctly uses AND/OR to combine pairs', () => {
const expected = `SELECT "f0" FROM "db1"."rp1"."m0" WHERE time > now() - 6h AND ("k1"='v1' OR "k1"='v3' OR "k1"='v4') AND "k2"='v2'`;
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
});
});
});
});
const expected = `SELECT "f0" FROM "db1"."rp1"."m0" WHERE time > now() - 6h AND ("k1"='v1' OR "k1"='v3' OR "k1"='v4') AND "k2"='v2'`
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
})
})

View File

@ -1,3 +1,3 @@
var context = require.context('./', true, /Spec\.js$/);
context.keys().forEach(context);
module.exports = context;
const context = require.context('./', true, /Spec\.js$/)
context.keys().forEach(context)
module.exports = context

View File

@ -1,6 +1,6 @@
import reducer from 'src/kapacitor/reducers/rules';
import {defaultRuleConfigs} from 'src/kapacitor/constants';
import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants';
import reducer from 'src/kapacitor/reducers/rules'
import {defaultRuleConfigs} from 'src/kapacitor/constants'
import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants'
import {
chooseTrigger,
@ -12,93 +12,93 @@ import {
updateRuleName,
deleteRuleSuccess,
updateRuleStatusSuccess,
} from 'src/kapacitor/actions/view';
} from 'src/kapacitor/actions/view'
describe('Kapacitor.Reducers.rules', () => {
it('can choose a trigger', () => {
const ruleID = 1;
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
trigger: '',
}
};
const ruleID = 1
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
trigger: '',
},
}
let newState = reducer(initialState, chooseTrigger(ruleID, 'deadman'));
expect(newState[ruleID].trigger).to.equal('deadman');
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.deadman);
let newState = reducer(initialState, chooseTrigger(ruleID, 'deadman'))
expect(newState[ruleID].trigger).to.equal('deadman')
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.deadman)
newState = reducer(initialState, chooseTrigger(ruleID, 'relative'));
expect(newState[ruleID].trigger).to.equal('relative');
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.relative);
newState = reducer(initialState, chooseTrigger(ruleID, 'relative'))
expect(newState[ruleID].trigger).to.equal('relative')
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.relative)
newState = reducer(initialState, chooseTrigger(ruleID, 'threshold'));
expect(newState[ruleID].trigger).to.equal('threshold');
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.threshold);
});
newState = reducer(initialState, chooseTrigger(ruleID, 'threshold'))
expect(newState[ruleID].trigger).to.equal('threshold')
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.threshold)
})
it('can update the values', () => {
const ruleID = 1;
const ruleID = 1
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
trigger: 'deadman',
values: defaultRuleConfigs.deadman
}
};
values: defaultRuleConfigs.deadman,
},
}
const newDeadmanValues = {duration: '5m'};
const newState = reducer(initialState, updateRuleValues(ruleID, 'deadman', newDeadmanValues));
expect(newState[ruleID].values).to.equal(newDeadmanValues);
const newDeadmanValues = {duration: '5m'}
const newState = reducer(initialState, updateRuleValues(ruleID, 'deadman', newDeadmanValues))
expect(newState[ruleID].values).to.equal(newDeadmanValues)
const newRelativeValues = {func: 'max', change: 'change'};
const finalState = reducer(newState, updateRuleValues(ruleID, 'relative', newRelativeValues));
expect(finalState[ruleID].trigger).to.equal('relative');
expect(finalState[ruleID].values).to.equal(newRelativeValues);
});
const newRelativeValues = {func: 'max', change: 'change'}
const finalState = reducer(newState, updateRuleValues(ruleID, 'relative', newRelativeValues))
expect(finalState[ruleID].trigger).to.equal('relative')
expect(finalState[ruleID].values).to.equal(newRelativeValues)
})
it('can update the message', () => {
const ruleID = 1;
const ruleID = 1
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
message: '',
}
};
},
}
const message = 'im a kapacitor rule message';
const newState = reducer(initialState, updateMessage(ruleID, message));
expect(newState[ruleID].message).to.equal(message);
});
const message = 'im a kapacitor rule message'
const newState = reducer(initialState, updateMessage(ruleID, message))
expect(newState[ruleID].message).to.equal(message)
})
it('can update the alerts', () => {
const ruleID = 1;
const ruleID = 1
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
alerts: [],
}
};
},
}
const alerts = ['slack'];
const newState = reducer(initialState, updateAlerts(ruleID, alerts));
expect(newState[ruleID].alerts).to.equal(alerts);
});
const alerts = ['slack']
const newState = reducer(initialState, updateAlerts(ruleID, alerts))
expect(newState[ruleID].alerts).to.equal(alerts)
})
it('can update an alerta alert', () => {
const ruleID = 1;
const ruleID = 1
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
alerts: [],
alertNodes: [],
}
};
},
}
const tickScript = `stream
|alert()
@ -108,39 +108,39 @@ describe('Kapacitor.Reducers.rules', () => {
.environment('Development')
.group('Dev. Servers')
.services('a b c')
`;
`
let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript));
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`;
let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]);
let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript))
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`
let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID])
// Test both data structure and accessor string
expect(actualStr).to.equal(expectedStr);
expect(actualStr).to.equal(expectedStr)
// Test that accessor string is the same if fed back in
newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr));
actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]);
expect(actualStr).to.equal(expectedStr);
});
newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr))
actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID])
expect(actualStr).to.equal(expectedStr)
})
it('can update the name', () => {
const ruleID = 1;
const ruleID = 1
const name = 'New name'
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
name: 'Random album title',
}
};
},
}
const newState = reducer(initialState, updateRuleName(ruleID, name));
expect(newState[ruleID].name).to.equal(name);
});
const newState = reducer(initialState, updateRuleName(ruleID, name))
expect(newState[ruleID].name).to.equal(name)
})
it('it can delete a rule', () => {
const rule1 = 1;
const rule2 = 2;
const rule1 = 1
const rule2 = 2
const initialState = {
[rule1]: {
id: rule1,
@ -148,43 +148,43 @@ describe('Kapacitor.Reducers.rules', () => {
[rule2]: {
id: rule2,
},
};
}
expect(Object.keys(initialState).length).to.equal(2);
const newState = reducer(initialState, deleteRuleSuccess(rule2));
expect(Object.keys(newState).length).to.equal(1);
expect(newState[rule1]).to.equal(initialState[rule1]);
});
expect(Object.keys(initialState).length).to.equal(2)
const newState = reducer(initialState, deleteRuleSuccess(rule2))
expect(Object.keys(newState).length).to.equal(1)
expect(newState[rule1]).to.equal(initialState[rule1])
})
it('can update details', () => {
const ruleID = 1;
const details = 'im some rule details';
const ruleID = 1
const details = 'im some rule details'
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
details: '',
}
};
},
}
const newState = reducer(initialState, updateDetails(ruleID, details));
expect(newState[ruleID].details).to.equal(details);
});
const newState = reducer(initialState, updateDetails(ruleID, details))
expect(newState[ruleID].details).to.equal(details)
})
it('can update status', () => {
const ruleID = 1;
const status = 'enabled';
const ruleID = 1
const status = 'enabled'
const initialState = {
[ruleID]: {
id: ruleID,
queryID: 988,
status: 'disabled',
}
};
},
}
const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status));
expect(newState[ruleID].status).to.equal(status);
});
});
const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status))
expect(newState[ruleID].status).to.equal(status)
})
})

View File

@ -1,71 +1,71 @@
import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes';
import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes'
describe('diskBytesFromShard', () => {
it('sums all the disk bytes in multiple series', () => {
const response = {"results":[
{"series":[{"name":"shard","tags":{"clusterID":"6272208615254493595","database":"_internal","engine":"tsm1","hostname":"WattsInfluxDB","id":"1","nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[[1464811503000000000,100]]}]},
{"series":[{"name":"shard","tags":{"clusterID":"6272208615254493595","database":"telegraf","engine":"tsm1","hostname":"WattsInfluxDB","id":"2","nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/telegraf/default/2","retentionPolicy":"default"},"columns":["time","last"],"values":[[1464811503000000000,200]]}]},
]};
const response = {results: [
{series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "_internal", engine: "tsm1", hostname: "WattsInfluxDB", id: "1", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [[1464811503000000000, 100]]}]},
{series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "telegraf", engine: "tsm1", hostname: "WattsInfluxDB", id: "2", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/telegraf/default/2", retentionPolicy: "default"}, columns: ["time", "last"], values: [[1464811503000000000, 200]]}]},
]}
const result = diskBytesFromShard(response);
const expectedTotal = 300;
const result = diskBytesFromShard(response)
const expectedTotal = 300
expect(result.errors).to.deep.equal([]);
expect(result.bytes).to.equal(expectedTotal);
});
expect(result.errors).to.deep.equal([])
expect(result.bytes).to.equal(expectedTotal)
})
it('returns emtpy with empty response', () => {
const response = {"results":[{}]};
const response = {results: [{}]}
const result = diskBytesFromShard(response);
const result = diskBytesFromShard(response)
expect(result.errors).to.deep.equal([]);
expect(result.bytes).to.equal(0);
});
expect(result.errors).to.deep.equal([])
expect(result.bytes).to.equal(0)
})
it('exposes the server error', () => {
const response = {"results":[{"error":"internal server error?"}]};
const response = {results: [{error: "internal server error?"}]}
const result = diskBytesFromShard(response);
const result = diskBytesFromShard(response)
expect(result.errors).to.deep.equal(['internal server error?']);
expect(result.bytes).to.equal(0);
});
});
expect(result.errors).to.deep.equal(['internal server error?'])
expect(result.bytes).to.equal(0)
})
})
describe('diskBytesFromShardForDatabase', () => {
it('return parses data as expected', () => {
const response = {"results":[{"series":[
{"name":"shard","tags":{"nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",100]]},
{"name":"shard","tags":{"nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/3","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",200]]},
{"name":"shard","tags":{"nodeID":"localhost:8188","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",100]]},
{"name":"shard","tags":{"nodeID":"localhost:8188","path":"/Users/watts/.influxdb/data/_internal/monitor/3","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",200]]},
]}]};
const response = {results: [{series: [
{name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]},
{name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]},
{name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]},
{name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]},
]}]}
const result = diskBytesFromShardForDatabase(response);
const result = diskBytesFromShardForDatabase(response)
const expected = {
1: [{nodeID: 'localhost:8088', diskUsage: 100},{nodeID: 'localhost:8188', diskUsage: 100}],
3: [{nodeID: 'localhost:8088', diskUsage: 200},{nodeID: 'localhost:8188', diskUsage: 200}],
};
1: [{nodeID: 'localhost:8088', diskUsage: 100}, {nodeID: 'localhost:8188', diskUsage: 100}],
3: [{nodeID: 'localhost:8088', diskUsage: 200}, {nodeID: 'localhost:8188', diskUsage: 200}],
}
expect(result.shardData).to.deep.equal(expected);
});
expect(result.shardData).to.deep.equal(expected)
})
it('returns emtpy with empty response', () => {
const response = {"results":[{}]};
const response = {results: [{}]}
const result = diskBytesFromShardForDatabase(response);
const result = diskBytesFromShardForDatabase(response)
expect(result.errors).to.deep.equal([]);
expect(result.shardData).to.deep.equal({});
});
expect(result.errors).to.deep.equal([])
expect(result.shardData).to.deep.equal({})
})
it('exposes the server error', () => {
const response = {"results":[{"error":"internal server error?"}]};
const response = {results: [{error: "internal server error?"}]}
const result = diskBytesFromShardForDatabase(response);
const result = diskBytesFromShardForDatabase(response)
expect(result.errors).to.deep.equal(['internal server error?']);
expect(result.shardData).to.deep.equal({});
});
});
expect(result.errors).to.deep.equal(['internal server error?'])
expect(result.shardData).to.deep.equal({})
})
})

View File

@ -1,23 +1,23 @@
import getRange from 'shared/parsing/getRangeForDygraph';
import getRange from 'shared/parsing/getRangeForDygraph'
describe('getRangeForDygraphSpec', () => {
it('gets the range for one timeSeries', () => {
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]];
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]]
const actual = getRange(timeSeries);
const expected = [1, 3];
const actual = getRange(timeSeries)
const expected = [1, 3]
expect(actual).to.deep.equal(expected);
});
expect(actual).to.deep.equal(expected)
})
it('does not get range when a range is provided', () => {
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]];
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]]
const providedRange = [0, 4];
const actual = getRange(timeSeries, providedRange);
const providedRange = [0, 4]
const actual = getRange(timeSeries, providedRange)
expect(actual).to.deep.equal(providedRange);
});
expect(actual).to.deep.equal(providedRange)
})
it('gets the range for multiple timeSeries', () => {
const timeSeries = [
@ -25,63 +25,63 @@ describe('getRangeForDygraphSpec', () => {
[new Date(1000), 100, 1],
[new Date(2000), null, 2],
[new Date(3000), 200, 3],
];
]
const actual = getRange(timeSeries);
const expected = [1, 200];
const actual = getRange(timeSeries)
const expected = [1, 200]
expect(actual).to.deep.equal(expected);
});
expect(actual).to.deep.equal(expected)
})
it('returns a null array of two elements when min and max are equal', () => {
const timeSeries = [[new Date(1000), 1], [new Date(2000), 1], [new Date(3000), 1]];
const actual = getRange(timeSeries);
const expected = [null, null];
const timeSeries = [[new Date(1000), 1], [new Date(2000), 1], [new Date(3000), 1]]
const actual = getRange(timeSeries)
const expected = [null, null]
expect(actual).to.deep.equal(expected);
});
expect(actual).to.deep.equal(expected)
})
describe('when user provides a rule value', () => {
const defaultMax = 20;
const defaultMin = -10;
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]];
const defaultMax = 20
const defaultMin = -10
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]]
it('can pad positive values', () => {
const value = 20;
const [min, max] = getRange(timeSeries, undefined, value);
const value = 20
const [min, max] = getRange(timeSeries, undefined, value)
expect(min).to.equal(defaultMin);
expect(max).to.be.above(defaultMax);
});
expect(min).to.equal(defaultMin)
expect(max).to.be.above(defaultMax)
})
it('can pad negative values', () => {
const value = -10;
const [min, max] = getRange(timeSeries, undefined, value);
const value = -10
const [min, max] = getRange(timeSeries, undefined, value)
expect(min).to.be.below(defaultMin);
expect(max).to.equal(defaultMax);
});
});
expect(min).to.be.below(defaultMin)
expect(max).to.equal(defaultMax)
})
})
describe('when user provides a rule range value', () => {
const defaultMax = 20;
const defaultMin = -10;
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]];
const defaultMax = 20
const defaultMin = -10
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]]
it('can pad positive values', () => {
const rangeValue = 20;
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue);
const rangeValue = 20
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue)
expect(min).to.equal(defaultMin);
expect(max).to.be.above(defaultMax);
});
expect(min).to.equal(defaultMin)
expect(max).to.be.above(defaultMax)
})
it('can pad negative values', () => {
const rangeValue = -10;
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue);
const rangeValue = -10
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue)
expect(min).to.be.below(defaultMin);
expect(max).to.equal(defaultMax);
});
});
});
expect(min).to.be.below(defaultMin)
expect(max).to.equal(defaultMax)
})
})
})

View File

@ -1,4 +1,4 @@
import {parseAlerta} from 'src/shared/parsing/parseAlerta';
import {parseAlerta} from 'src/shared/parsing/parseAlerta'
it('can parse an alerta tick script', () => {
const tickScript = `stream
@ -9,50 +9,50 @@ it('can parse an alerta tick script', () => {
.environment('Development')
.group('Dev. Servers')
.services('a b c')
`;
`
let actualObj = parseAlerta(tickScript);
let actualObj = parseAlerta(tickScript)
const expectedObj = [
{
"name": "resource",
"args": [
"Hostname or service"
]
name: "resource",
args: [
"Hostname or service",
],
},
{
"name": "event",
"args": [
"Something went wrong"
]
name: "event",
args: [
"Something went wrong",
],
},
{
"name": "environment",
"args": [
"Development"
]
name: "environment",
args: [
"Development",
],
},
{
"name": "group",
"args": [
"Dev. Servers"
]
name: "group",
args: [
"Dev. Servers",
],
},
{
"name": "services",
"args": [
name: "services",
args: [
"a",
"b",
"c"
]
}
];
"c",
],
},
]
// Test data structure
expect(actualObj).to.deep.equal(expectedObj);
expect(actualObj).to.deep.equal(expectedObj)
// Test that data structure is the same if fed back in
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`;
actualObj = parseAlerta(expectedStr);
expect(actualObj).to.deep.equal(expectedObj);
});
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`
actualObj = parseAlerta(expectedStr)
expect(actualObj).to.deep.equal(expectedObj)
})

View File

@ -1,32 +1,32 @@
import showDatabases from 'shared/parsing/showDatabases';
import showDatabases from 'shared/parsing/showDatabases'
describe('showDatabases', () => {
it('exposes all the database properties', () => {
const response = {"results":[{"series":[{"columns":["name"],"values":[["mydb1"], ["mydb2"]]}]}]};
const response = {results: [{series: [{columns: ["name"], values: [["mydb1"], ["mydb2"]]}]}]}
const result = showDatabases(response);
const result = showDatabases(response)
expect(result.errors).to.deep.equal([]);
expect(result.databases.length).to.equal(2);
expect(result.databases[0]).to.equal('mydb1');
expect(result.databases[1]).to.equal('mydb2');
});
expect(result.errors).to.deep.equal([])
expect(result.databases.length).to.equal(2)
expect(result.databases[0]).to.equal('mydb1')
expect(result.databases[1]).to.equal('mydb2')
})
it('returns an empty array when there are no databases', () => {
const response = {"results":[{"series":[{"columns":["name"]}]}]};
const response = {results: [{series: [{columns: ["name"]}]}]}
const result = showDatabases(response);
const result = showDatabases(response)
expect(result.errors).to.deep.equal([]);
expect(result.databases).to.deep.equal([]);
});
expect(result.errors).to.deep.equal([])
expect(result.databases).to.deep.equal([])
})
it('exposes the server error', () => {
const response = {"results":[{"error":"internal server error?"}]};
const response = {results: [{error: "internal server error?"}]}
const result = showDatabases(response);
const result = showDatabases(response)
expect(result.errors).to.deep.equal(['internal server error?']);
expect(result.databases).to.deep.equal([]);
});
});
expect(result.errors).to.deep.equal(['internal server error?'])
expect(result.databases).to.deep.equal([])
})
})

View File

@ -1,39 +1,39 @@
import parseShowFieldKeys from 'shared/parsing/showFieldKeys';
import parseShowFieldKeys from 'shared/parsing/showFieldKeys'
describe('parseShowFieldKeys', () => {
it('parses a single result', () => {
const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]}]};
const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}]}
const result = parseShowFieldKeys(response);
expect(result.errors).to.eql([]);
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql([])
expect(result.fieldSets).to.eql({
m1: ['f1', 'f2'],
});
});
})
})
it('parses multiple results', () => {
const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]},{"series":[{"name":"m2","columns":["fieldKey"],"values":[["f3"],["f4"]]}]}]};
const result = parseShowFieldKeys(response);
expect(result.errors).to.eql([]);
const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {series: [{name: "m2", columns: ["fieldKey"], values: [["f3"], ["f4"]]}]}]}
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql([])
expect(result.fieldSets).to.eql({
m1: ['f1', 'f2'],
m2: ['f3', 'f4'],
});
});
})
})
it('parses multiple errors', () => {
const response = {"results":[{"error": "measurement not found: m1"}, {"error": "measurement not found: m2"}]};
const result = parseShowFieldKeys(response);
expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2']);
expect(result.fieldSets).to.eql({});
});
const response = {results: [{error: "measurement not found: m1"}, {error: "measurement not found: m2"}]}
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2'])
expect(result.fieldSets).to.eql({})
})
it('parses a mix of results and errors', () => {
const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]},{"error": "measurement not found: m2"}]};
const result = parseShowFieldKeys(response);
expect(result.errors).to.eql(['measurement not found: m2']);
const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {error: "measurement not found: m2"}]}
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql(['measurement not found: m2'])
expect(result.fieldSets).to.eql({
m1: ['f1', 'f2'],
});
});
});
})
})
})

View File

@ -1,34 +1,34 @@
import showQueriesParser from 'shared/parsing/showQueries';
import showQueriesParser from 'shared/parsing/showQueries'
describe('showQueriesParser', () => {
it('exposes all currently running queries', () => {
const response = {"results":[{"series":[{"columns":["qid","query","database","duration"],"values":[[1,"SHOW QUERIES","db1","1s"], [2,"SELECT foo FROM bar","db1","2s"]]}]}]};
const response = {results: [{series: [{columns: ["qid", "query", "database", "duration"], values: [[1, "SHOW QUERIES", "db1", "1s"], [2, "SELECT foo FROM bar", "db1", "2s"]]}]}]}
const result = showQueriesParser(response);
const result = showQueriesParser(response)
expect(result.errors).to.eql([]);
expect(result.queries.length).to.equal(2);
expect(result.errors).to.eql([])
expect(result.queries.length).to.equal(2)
expect(result.queries[0]).to.eql({
id: 1,
database: 'db1',
query: 'SHOW QUERIES',
duration: '1s',
});
})
expect(result.queries[1]).to.eql({
id: 2,
database: 'db1',
query: 'SELECT foo FROM bar',
duration: '2s',
});
})
expect({foo: 'bar'}).to.eql({foo: 'bar'})
});
})
it('exposes the server error', () => {
const response = {"results":[{"error":"internal server error?"}]};
const response = {results: [{error: "internal server error?"}]}
const result = showQueriesParser(response);
const result = showQueriesParser(response)
expect(result.errors).to.eql(['internal server error?']);
expect(result.queries).to.eql([]);
});
});
expect(result.errors).to.eql(['internal server error?'])
expect(result.queries).to.eql([])
})
})

View File

@ -1,27 +1,27 @@
import parseShowTagKeys from 'shared/parsing/showTagKeys';
import parseShowTagKeys from 'shared/parsing/showTagKeys'
describe('parseShowTagKeys', () => {
it('parses the tag keys', () => {
const response = {"results":[{"series":[{"name":"cpu","columns":["tagKey"],"values":[["cpu"],["host"]]}]}]};
const response = {results: [{series: [{name: "cpu", columns: ["tagKey"], values: [["cpu"], ["host"]]}]}]}
const result = parseShowTagKeys(response);
expect(result.errors).to.eql([]);
expect(result.tagKeys).to.eql(['cpu', 'host']);
});
const result = parseShowTagKeys(response)
expect(result.errors).to.eql([])
expect(result.tagKeys).to.eql(['cpu', 'host'])
})
it('handles empty results', () => {
const response = {"results":[{}]};
const response = {results: [{}]}
const result = parseShowTagKeys(response);
expect(result.errors).to.eql([]);
expect(result.tagKeys).to.eql([]);
});
const result = parseShowTagKeys(response)
expect(result.errors).to.eql([])
expect(result.tagKeys).to.eql([])
})
it('handles errors', () => {
const response = {"results":[{"error": "influxdb error"}]};
const response = {results: [{error: "influxdb error"}]}
const result = parseShowTagKeys(response);
expect(result.errors).to.eql([response.results[0].error]);
expect(result.tagKeys).to.eql([]);
});
});
const result = parseShowTagKeys(response)
expect(result.errors).to.eql([response.results[0].error])
expect(result.tagKeys).to.eql([])
})
})

View File

@ -1,38 +1,38 @@
import showTagValuesParser from 'shared/parsing/showTagValues';
import showTagValuesParser from 'shared/parsing/showTagValues'
describe('showTagValuesParser', () => {
it('handles an empty result set', () => {
const response = {"results":[{}]};
const response = {results: [{}]}
const result = showTagValuesParser(response);
const result = showTagValuesParser(response)
expect(result.errors).to.eql([]);
expect(result.tags).to.eql({});
});
expect(result.errors).to.eql([])
expect(result.tags).to.eql({})
})
it('returns a an object of tag keys mapped to their values', () => {
const response = {
"results": [
results: [
{
"series": [
series: [
{
"name": "measurementA",
"columns": ["key","value"],
"values": [
name: "measurementA",
columns: ["key", "value"],
values: [
["host", "hostA"],
["host", "hostB"],
["cpu", "cpu0"],
["cpu", "cpu1"],
]
}
]
}
]
};
],
},
],
},
],
}
const result = showTagValuesParser(response);
const result = showTagValuesParser(response)
expect(result.errors).to.eql([]);
expect(result.errors).to.eql([])
expect(result.tags).to.eql({
host: [
'hostA',
@ -42,6 +42,6 @@ describe('showTagValuesParser', () => {
'cpu0',
'cpu1',
],
});
});
});
})
})
})

View File

@ -1,7 +1,7 @@
import {
buildRoles,
buildClusterAccounts,
} from 'src/shared/presenters';
} from 'src/shared/presenters'
describe('Presenters', function() {
describe('roles utils', function() {
@ -17,13 +17,13 @@ describe('Presenters', function() {
],
},
},
];
]
const actual = buildRoles(roles);
const actual = buildRoles(roles)
expect(actual[0].users).to.eql([]);
});
});
expect(actual[0].users).to.eql([])
})
})
describe('when a role has no permissions', function() {
it('set\'s a roles permission as an empty array', function() {
@ -35,47 +35,47 @@ describe('Presenters', function() {
"will@influxdb.com",
],
},
];
]
const actual = buildRoles(roles);
const actual = buildRoles(roles)
expect(actual[0].permissions).to.eql([]);
});
});
expect(actual[0].permissions).to.eql([])
})
})
describe('when a role has users and permissions', function() {
beforeEach(function() {
const roles = [
{
"name": "Marketing",
"permissions": {
name: "Marketing",
permissions: {
"": [
"ViewAdmin",
],
"db1": [
"ReadData"
db1: [
"ReadData",
],
"db2": [
db2: [
"ReadData",
"AddRemoveNode",
],
},
"users": [
users: [
"roley@influxdb.com",
"will@influxdb.com"
]
"will@influxdb.com",
],
},
];
]
this.roles = buildRoles(roles);
});
this.roles = buildRoles(roles)
})
it('each role has a name and a list of users (if they exist)', function() {
const role = this.roles[0];
expect(role.name).to.equal('Marketing');
expect(role.users).to.contain("roley@influxdb.com");
expect(role.users).to.contain("will@influxdb.com");
});
const role = this.roles[0]
expect(role.name).to.equal('Marketing')
expect(role.users).to.contain("roley@influxdb.com")
expect(role.users).to.contain("will@influxdb.com")
})
it('transforms permissions into a list of objects and each permission has a list of resources', function() {
expect(this.roles[0].permissions).to.eql([
@ -97,11 +97,11 @@ describe('Presenters', function() {
description: 'Can add/remove nodes from a cluster',
resources: ['db2'],
},
]);
});
});
});
});
])
})
})
})
})
describe('cluster utils', function() {
describe('buildClusterAccounts', function() {
@ -109,50 +109,50 @@ describe('Presenters', function() {
it('adds role information to each cluster account and parses permissions', function() {
const users = [
{
"name":"jon@example.com",
"hash":"xxxxx",
"permissions": {
name: "jon@example.com",
hash: "xxxxx",
permissions: {
"": [
"ViewAdmin",
],
"db1": [
db1: [
"ReadData",
],
}
},
},
{
"name":"ned@example.com",
"hash":"xxxxx"
}
];
name: "ned@example.com",
hash: "xxxxx",
},
]
const roles = [
{
"name":"Admin",
"permissions":{
"db2": [
name: "Admin",
permissions: {
db2: [
"ViewAdmin",
]
],
},
"users":[
users: [
"jon@example.com",
"ned@example.com",
]
],
},
{
"name":"Marketing",
"permissions": {
"db3": [
name: "Marketing",
permissions: {
db3: [
"ReadData",
],
},
"users": [
users: [
"jon@example.com",
]
}
],
},
]
const actual = buildClusterAccounts(users, roles);
const actual = buildClusterAccounts(users, roles)
const expected = [
{
@ -183,7 +183,7 @@ describe('Presenters', function() {
resources: ['db2'],
},
],
users:[
users: [
"jon@example.com",
"ned@example.com",
],
@ -198,10 +198,10 @@ describe('Presenters', function() {
resources: ['db3'],
},
],
users:[
users: [
"jon@example.com",
]
}
],
},
],
},
{
@ -219,38 +219,38 @@ describe('Presenters', function() {
resources: ['db2'],
},
],
users:[
users: [
"jon@example.com",
"ned@example.com",
],
},
],
}
];
},
]
expect(actual).to.eql(expected);
});
expect(actual).to.eql(expected)
})
it('can handle empty results for users and roles', function() {
const users = undefined;
const roles = undefined;
const users = undefined
const roles = undefined
const actual = buildClusterAccounts(users, roles);
const actual = buildClusterAccounts(users, roles)
expect(actual).to.eql([]);
});
expect(actual).to.eql([])
})
it('sets roles to an empty array if a user has no roles', function() {
const users = [{
name: "ned@example.com",
hash: "xxxxx",
}];
const roles = [];
}]
const roles = []
const actual = buildClusterAccounts(users, roles);
const actual = buildClusterAccounts(users, roles)
expect(actual[0].roles).to.eql([]);
});
});
});
});
expect(actual[0].roles).to.eql([])
})
})
})
})

View File

@ -12,29 +12,29 @@ describe('Shared.Reducers.appReducer', () => {
inPresentationMode: false,
},
persisted: {
autoRefresh: 0
autoRefresh: 0,
},
}
it('should handle ENABLE_PRESENTATION_MODE', () => {
const reducedState = appReducer(initialState, enablePresentationMode());
const reducedState = appReducer(initialState, enablePresentationMode())
expect(reducedState.ephemeral.inPresentationMode).to.equal(true);
expect(reducedState.ephemeral.inPresentationMode).to.equal(true)
})
it('should handle DISABLE_PRESENTATION_MODE', () => {
Object.assign(initialState, {ephemeral: {inPresentationMode: true}})
const reducedState = appReducer(initialState, disablePresentationMode());
const reducedState = appReducer(initialState, disablePresentationMode())
expect(reducedState.ephemeral.inPresentationMode).to.equal(false);
expect(reducedState.ephemeral.inPresentationMode).to.equal(false)
})
it('should handle SET_AUTOREFRESH', () => {
const expectedMs = 15000
const reducedState = appReducer(initialState, setAutoRefresh(expectedMs));
const reducedState = appReducer(initialState, setAutoRefresh(expectedMs))
expect(reducedState.persisted.autoRefresh).to.equal(expectedMs);
expect(reducedState.persisted.autoRefresh).to.equal(expectedMs)
})
})

View File

@ -1,47 +1,47 @@
import reducer from 'src/shared/reducers/sources';
import reducer from 'src/shared/reducers/sources'
import {
loadSources,
updateSource,
addSource,
} from 'src/shared/actions/sources';
} from 'src/shared/actions/sources'
describe('Shared.Reducers.sources', () => {
it('can correctly show default sources when adding a source', () => {
let state = [];
let state = []
state = reducer(state, addSource({
id: '1',
default: true,
}));
"default": true,
}))
state = reducer(state, addSource({
id: '2',
default: true,
}));
"default": true,
}))
expect(state.filter((s) => s.default).length).to.equal(1);
});
expect(state.filter((s) => s.default).length).to.equal(1)
})
it('can correctly show default sources when updating a source', () => {
let state = [];
let state = []
state = reducer(state, addSource({
id: '1',
default: true,
}));
"default": true,
}))
state = reducer(state, addSource({
id: '2',
default: true,
}));
"default": true,
}))
state = reducer(state, updateSource({
id: '1',
default: true,
}));
"default": true,
}))
expect(state.find(({id}) => id === '1').default).to.equal(true);
expect(state.find(({id}) => id === '2').default).to.equal(false);
});
});
expect(state.find(({id}) => id === '1').default).to.equal(true)
expect(state.find(({id}) => id === '2').default).to.equal(false)
})
})

View File

@ -1,13 +1,13 @@
window.then = function(cb, done) {
window.setTimeout(function() {
cb();
cb()
if (typeof done === 'function') {
done();
done()
}
}, 0);
};
}, 0)
}
var chai = require('chai');
chai.use(require('sinon-chai'));
const chai = require('chai')
chai.use(require('sinon-chai'))
global.expect = chai.expect;
global.expect = chai.expect

View File

@ -1,39 +1,39 @@
import {formatBytes, formatRPDuration} from 'utils/formatting';
import {formatBytes, formatRPDuration} from 'utils/formatting'
describe('Formatting helpers', () => {
describe('formatBytes', () => {
it('returns null when passed a falsey value', () => {
const actual = formatBytes(null);
const actual = formatBytes(null)
expect(actual).to.equal(null);
});
expect(actual).to.equal(null)
})
it('returns the correct value when passed 0', () => {
const actual = formatBytes(0);
const actual = formatBytes(0)
expect(actual).to.equal('0 Bytes');
});
expect(actual).to.equal('0 Bytes')
})
it('converts a raw byte value into it\'s most appropriate unit', () => {
expect(formatBytes(1000)).to.equal('1 KB');
expect(formatBytes(1000000)).to.equal('1 MB');
expect(formatBytes(1000000000)).to.equal('1 GB');
});
});
expect(formatBytes(1000)).to.equal('1 KB')
expect(formatBytes(1000000)).to.equal('1 MB')
expect(formatBytes(1000000000)).to.equal('1 GB')
})
})
describe('formatRPDuration', () => {
it("returns 'infinite' for a retention policy with a value of '0'", () => {
const actual = formatRPDuration('0')
expect(actual).to.equal('∞');
});
expect(actual).to.equal('∞')
})
it('correctly formats retention policy durations', () => {
expect(formatRPDuration('24h0m0s')).to.equal('24h');
expect(formatRPDuration('24h0m0s')).to.equal('24h')
expect(formatRPDuration('168h0m0s')).to.equal('7d');
expect(formatRPDuration('168h0m0s')).to.equal('7d')
expect(formatRPDuration('200h32m3s')).to.equal('8d8h32m3s');
});
});
});
expect(formatRPDuration('200h32m3s')).to.equal('8d8h32m3s')
})
})
})

View File

@ -1,39 +1,36 @@
import timeSeriesToDygraph from 'src/utils/timeSeriesToDygraph';
import {STROKE_WIDTH} from 'src/shared/constants';
const {light: strokeWidth} = STROKE_WIDTH;
import timeSeriesToDygraph from 'src/utils/timeSeriesToDygraph'
describe('timeSeriesToDygraph', () => {
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
const influxResponse = [
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
name: "m1",
columns: ["time", "f1"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f2"],
"values": [[2000, 3],[4000, 4]],
name: "m1",
columns: ["time", "f2"],
values: [[2000, 3], [4000, 4]],
},
]
],
},
],
},
}
];
},
]
const actual = timeSeriesToDygraph(influxResponse);
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
labels: [
@ -49,40 +46,38 @@ describe('timeSeriesToDygraph', () => {
dygraphSeries: {
'm1.f1': {
axis: 'y',
strokeWidth,
},
'm1.f2': {
axis: 'y',
strokeWidth,
},
},
};
}
expect(actual).to.deep.equal(expected);
});
expect(actual).to.deep.equal(expected)
})
it('can sort numerical timestamps correctly', () => {
const influxResponse = [
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[100, 1],[3000, 3],[200, 2]],
name: "m1",
columns: ["time", "f1"],
values: [[100, 1], [3000, 3], [200, 2]],
},
]
],
},
],
},
}
];
},
]
const actual = timeSeriesToDygraph(influxResponse);
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
labels: [
@ -94,113 +89,110 @@ describe('timeSeriesToDygraph', () => {
[new Date(200), 2],
[new Date(3000), 3],
],
};
}
expect(actual.timeSeries).to.deep.equal(expected.timeSeries);
});
expect(actual.timeSeries).to.deep.equal(expected.timeSeries)
})
it('can parse multiple responses into two axes', () => {
const influxResponse = [
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
name: "m1",
columns: ["time", "f1"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f2"],
"values": [[2000, 3],[4000, 4]],
name: "m1",
columns: ["time", "f2"],
values: [[2000, 3], [4000, 4]],
},
]
],
},
],
},
},
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m3",
"columns": ["time","f3"],
"values": [[1000, 1],[2000, 2]],
name: "m3",
columns: ["time", "f3"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
],
},
},
];
]
const actual = timeSeriesToDygraph(influxResponse);
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
'm1.f1': {
axis: 'y',
strokeWidth,
},
'm1.f2': {
axis: 'y',
strokeWidth,
},
'm3.f3': {
axis: 'y2',
strokeWidth,
},
};
'm1.f1': {
axis: 'y',
},
'm1.f2': {
axis: 'y',
},
'm3.f3': {
axis: 'y2',
},
}
expect(actual.dygraphSeries).to.deep.equal(expected);
});
expect(actual.dygraphSeries).to.deep.equal(expected)
})
it('can parse multiple responses with the same field and measurement', () => {
const influxResponse = [
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
name: "m1",
columns: ["time", "f1"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
],
},
},
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[2000, 3],[4000, 4]],
name: "m1",
columns: ["time", "f1"],
values: [[2000, 3], [4000, 4]],
},
]
],
},
],
},
},
];
]
const actual = timeSeriesToDygraph(influxResponse);
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
labels: [
@ -214,162 +206,107 @@ describe('timeSeriesToDygraph', () => {
[new Date(4000), null, 4],
],
dygraphSeries: {
// 'm1.f1': {
// axis: 'y',
// strokeWidth,
// },
'm1.f1': {
axis: 'y2',
strokeWidth,
},
},
};
}
expect(actual).to.deep.equal(expected);
});
expect(actual).to.deep.equal(expected)
})
it('it does not use multiple axes if being used for the DataExplorer', () => {
const influxResponse = [
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
name: "m1",
columns: ["time", "f1"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
],
},
},
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"m1",
"columns": ["time","f2"],
"values": [[2000, 3],[4000, 4]],
name: "m1",
columns: ["time", "f2"],
values: [[2000, 3], [4000, 4]],
},
]
],
},
],
},
},
];
]
const isInDataExplorer = true;
const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer);
const isInDataExplorer = true
const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer)
const expected = {
'm1.f1': {
strokeWidth,
},
'm1.f2': {
strokeWidth,
},
};
const expected = {}
expect(actual.dygraphSeries).to.deep.equal(expected);
});
it('it highlights the appropriate response', () => {
const influxResponse = [
{
"response":
{
"results": [
{
"series": [
{
"name":"m1",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
},
]
},
],
},
},
{
"response":
{
"results": [
{
"series": [
{
"name":"m2",
"columns": ["time","f2"],
"values": [[2000, 3],[4000, 4]],
},
]
},
],
},
},
];
const highlightIndex = 1;
const actual = timeSeriesToDygraph(influxResponse, highlightIndex);
const {dygraphSeries} = actual;
expect(dygraphSeries["m2.f2"].strokeWidth).to.be.above(dygraphSeries["m1.f1"].strokeWidth);
});
expect(actual.dygraphSeries).to.deep.equal(expected)
})
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
const influxResponse = [
{
"response":
response:
{
"results": [
results: [
{
"series": [
series: [
{
"name":"mb",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
name: "mb",
columns: ["time", "f1"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
{
"series": [
series: [
{
"name":"ma",
"columns": ["time","f1"],
"values": [[1000, 1],[2000, 2]],
name: "ma",
columns: ["time", "f1"],
values: [[1000, 1], [2000, 2]],
},
]
],
},
{
"series": [
series: [
{
"name":"mc",
"columns": ["time","f2"],
"values": [[2000, 3],[4000, 4]],
name: "mc",
columns: ["time", "f2"],
values: [[2000, 3], [4000, 4]],
},
]
],
},
{
"series": [
series: [
{
"name":"mc",
"columns": ["time","f1"],
"values": [[2000, 3],[4000, 4]],
name: "mc",
columns: ["time", "f1"],
values: [[2000, 3], [4000, 4]],
},
]
],
},
],
},
}
];
},
]
const actual = timeSeriesToDygraph(influxResponse);
const actual = timeSeriesToDygraph(influxResponse)
const expected = [
'time',
@ -377,8 +314,8 @@ describe('timeSeriesToDygraph', () => {
`mb.f1`,
`mc.f1`,
`mc.f2`,
];
]
expect(actual.labels).to.deep.equal(expected);
});
});
expect(actual.labels).to.deep.equal(expected)
})
})

View File

@ -364,26 +364,36 @@ export const updateRolePermissionsAsync = (role, permissions) => async (dispatch
dispatch(publishAutoDismissingNotification('success', 'Role permissions updated'))
dispatch(syncRole(role, data))
} catch (error) {
dispatch(publishNotification('error', `Failed to updated role: ${error.data.message}`))
dispatch(publishNotification('error', `Failed to update role: ${error.data.message}`))
}
}
export const updateUserPermissionsAsync = (user, permissions) => async (dispatch) => {
try {
const {data} = await updateUserAJAX(user.links.self, user.roles, permissions)
const {data} = await updateUserAJAX(user.links.self, {permissions})
dispatch(publishAutoDismissingNotification('success', 'User permissions updated'))
dispatch(syncUser(user, data))
} catch (error) {
dispatch(publishNotification('error', `Failed to updated user: ${error.data.message}`))
dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`))
}
}
export const updateUserRolesAsync = (user, roles) => async (dispatch) => {
try {
const {data} = await updateUserAJAX(user.links.self, roles, user.permissions)
const {data} = await updateUserAJAX(user.links.self, {roles})
dispatch(publishAutoDismissingNotification('success', 'User roles updated'))
dispatch(syncUser(user, data))
} catch (error) {
dispatch(publishNotification('error', `Failed to updated user: ${error.data.message}`))
dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`))
}
}
export const updateUserPasswordAsync = (user, password) => async (dispatch) => {
try {
const {data} = await updateUserAJAX(user.links.self, {password})
dispatch(publishAutoDismissingNotification('success', 'User password updated'))
dispatch(syncUser(user, data))
} catch (error) {
dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`))
}
}

View File

@ -159,15 +159,12 @@ export const updateRole = async (url, users, permissions) => {
}
}
export const updateUser = async (url, roles, permissions) => {
export const updateUser = async (url, updates) => {
try {
return await AJAX({
method: 'PATCH',
url,
data: {
roles,
permissions,
},
data: updates,
})
} catch (error) {
console.error(error)

View File

@ -28,6 +28,7 @@ const AdminTabs = ({
onUpdateRolePermissions,
onUpdateUserRoles,
onUpdateUserPermissions,
onUpdateUserPassword,
}) => {
let tabs = [
{
@ -51,6 +52,7 @@ const AdminTabs = ({
onFilter={onFilterUsers}
onUpdatePermissions={onUpdateUserPermissions}
onUpdateRoles={onUpdateUserRoles}
onUpdatePassword={onUpdateUserPassword}
/>
),
},
@ -135,6 +137,7 @@ AdminTabs.propTypes = {
hasRoles: bool.isRequired,
onUpdateUserPermissions: func,
onUpdateUserRoles: func,
onUpdateUserPassword: func,
}
export default AdminTabs

View File

@ -0,0 +1,94 @@
import React, {Component, PropTypes} from 'react'
import OnClickOutside from 'shared/components/OnClickOutside'
import ConfirmButtons from 'src/shared/components/ConfirmButtons'
class ChangePassRow extends Component {
constructor(props) {
super(props)
this.state = {
showForm: false,
}
this.showForm = ::this.showForm
this.handleCancel = ::this.handleCancel
this.handleKeyPress = ::this.handleKeyPress
this.handleEdit = ::this.handleEdit
this.handleSubmit = ::this.handleSubmit
}
showForm() {
this.setState({showForm: true})
}
handleCancel() {
this.setState({showForm: false})
}
handleClickOutside() {
this.setState({showForm: false})
}
handleSubmit(user) {
this.props.onApply(user)
this.setState({showForm: false})
}
handleKeyPress(user) {
return (e) => {
if (e.key === 'Enter') {
this.handleSubmit(user)
}
}
}
handleEdit(user) {
return (e) => {
this.props.onEdit(user, {[e.target.name]: e.target.value})
}
}
render() {
const {user} = this.props
if (this.state.showForm) {
return (
<div>
<input
className="form-control"
name="password"
type="password"
value={user.password || ''}
placeholder="Password"
onChange={this.handleEdit(user)}
onKeyPress={this.handleKeyPress(user)}
autoFocus={true}
/>
<ConfirmButtons
onConfirm={this.handleSubmit}
item={user}
onCancel={this.handleCancel}
/>
</div>
)
}
return (
<button
className="btn btn-xs btn-info admin-table--hidden"
onClick={this.showForm}
>
Change Password
</button>
)
}
}
const {shape, func} = PropTypes
ChangePassRow.propTypes = {
user: shape().isRequired,
onApply: func.isRequired,
onEdit: func.isRequired,
}
export default OnClickOutside(ChangePassRow)

View File

@ -112,7 +112,7 @@ class DatabaseRow extends Component {
onConfirm={() => onDelete(database, retentionPolicy)}
onCancel={this.handleEndDelete} /> :
<button
className="btn btn-xs btn-danger admin-table--delete"
className="btn btn-xs btn-danger admin-table--hidden"
style={isDeletable ? {} : {visibility: 'hidden'}}
onClick={this.handleStartDelete}>{`Delete ${name}`}
</button>

View File

@ -6,9 +6,10 @@ import UserEditingRow from 'src/admin/components/UserEditingRow'
import MultiSelectDropdown from 'shared/components/MultiSelectDropdown'
import ConfirmButtons from 'shared/components/ConfirmButtons'
import DeleteConfirmTableCell from 'shared/components/DeleteConfirmTableCell'
import ChangePassRow from 'src/admin/components/ChangePassRow'
const UserRow = ({
user: {name, roles, permissions},
user: {name, roles, permissions, password},
user,
allRoles,
allPermissions,
@ -21,6 +22,7 @@ const UserRow = ({
onDelete,
onUpdatePermissions,
onUpdateRoles,
onUpdatePassword,
}) => {
const handleUpdatePermissions = (allowed) => {
onUpdatePermissions(user, [{scope: 'all', allowed}])
@ -30,6 +32,10 @@ const UserRow = ({
onUpdateRoles(user, allRoles.filter(r => roleNames.find(rn => rn === r.name)))
}
const handleUpdatePassword = () => {
onUpdatePassword(user, password)
}
if (isEditing) {
return (
<tr className="admin-table--edit-row">
@ -69,6 +75,9 @@ const UserRow = ({
/> : null
}
</td>
<td className="text-right" style={{width: "300px"}}>
<ChangePassRow onEdit={onEdit} onApply={handleUpdatePassword} user={user} />
</td>
<DeleteConfirmTableCell onDelete={onDelete} item={user} />
</tr>
)
@ -91,6 +100,7 @@ UserRow.propTypes = {
permissions: arrayOf(shape({
name: string,
})),
password: string,
}).isRequired,
allRoles: arrayOf(shape()),
allPermissions: arrayOf(string),
@ -103,6 +113,7 @@ UserRow.propTypes = {
onDelete: func.isRequired,
onUpdatePermissions: func,
onUpdateRoles: func,
onUpdatePassword: func,
}
export default UserRow

View File

@ -18,6 +18,7 @@ const UsersTable = ({
onFilter,
onUpdatePermissions,
onUpdateRoles,
onUpdatePassword,
}) => (
<div className="panel panel-info">
<FilterBar type="users" onFilter={onFilter} isEditing={isEditing} onClickCreate={onClickCreate} />
@ -49,6 +50,7 @@ const UsersTable = ({
allPermissions={permissions}
onUpdatePermissions={onUpdatePermissions}
onUpdateRoles={onUpdateRoles}
onUpdatePassword={onUpdatePassword}
/>) :
<EmptyRow tableName={'Users'} />
}
@ -89,6 +91,7 @@ UsersTable.propTypes = {
hasRoles: bool.isRequired,
onUpdatePermissions: func,
onUpdateRoles: func,
onUpdatePassword: func,
}
export default UsersTable

View File

@ -19,6 +19,7 @@ import {
updateRolePermissionsAsync,
updateUserPermissionsAsync,
updateUserRolesAsync,
updateUserPasswordAsync,
filterUsers as filterUsersAction,
filterRoles as filterRolesAction,
} from 'src/admin/actions'
@ -54,6 +55,7 @@ class AdminPage extends Component {
this.handleUpdateRolePermissions = ::this.handleUpdateRolePermissions
this.handleUpdateUserPermissions = ::this.handleUpdateUserPermissions
this.handleUpdateUserRoles = ::this.handleUpdateUserRoles
this.handleUpdateUserPassword = ::this.handleUpdateUserPassword
}
componentDidMount() {
@ -105,7 +107,6 @@ class AdminPage extends Component {
this.props.createRole(this.props.source.links.roles, role)
} else {
// TODO update role
// console.log('update')
}
}
@ -141,6 +142,10 @@ class AdminPage extends Component {
this.props.updateUserRoles(user, roles)
}
handleUpdateUserPassword(user, password) {
this.props.updateUserPassword(user, password)
}
render() {
const {users, roles, source, permissions, filterUsers, filterRoles} = this.props
const hasRoles = !!source.links.roles
@ -186,6 +191,7 @@ class AdminPage extends Component {
onUpdateRolePermissions={this.handleUpdateRolePermissions}
onUpdateUserPermissions={this.handleUpdateUserPermissions}
onUpdateUserRoles={this.handleUpdateUserRoles}
onUpdateUserPassword={this.handleUpdateUserPassword}
/> :
<span>Loading...</span>
}
@ -233,6 +239,7 @@ AdminPage.propTypes = {
updateRolePermissions: func,
updateUserPermissions: func,
updateUserRoles: func,
updateUserPassword: func,
notify: func,
}
@ -262,6 +269,7 @@ const mapDispatchToProps = (dispatch) => ({
updateRolePermissions: bindActionCreators(updateRolePermissionsAsync, dispatch),
updateUserPermissions: bindActionCreators(updateUserPermissionsAsync, dispatch),
updateUserRoles: bindActionCreators(updateUserRolesAsync, dispatch),
updateUserPassword: bindActionCreators(updateUserPasswordAsync, dispatch),
notify: bindActionCreators(publishAutoDismissingNotification, dispatch),
})

View File

@ -1,9 +1,9 @@
import {proxy} from 'utils/queryUrlGenerator'
export function getAlerts(proxyLink) {
export function getAlerts(source, timeRange) {
return proxy({
source: proxyLink,
query: "select host, value, level, alertName from alerts order by time desc",
source,
query: `SELECT host, value, level, alertName FROM alerts WHERE time >= '${timeRange.lower}' AND time <= '${timeRange.upper}' ORDER BY time desc`,
db: "chronograf",
})
}

View File

@ -27,18 +27,19 @@ const AlertsTable = React.createClass({
},
componentWillReceiveProps(newProps) {
this.filterAlerts(newProps.alerts, this.state.searchTerm)
this.filterAlerts(this.state.searchTerm, newProps.alerts)
},
filterAlerts(searchTerm) {
const filteredAlerts = this.props.alerts.filter((h) => {
filterAlerts(searchTerm, newAlerts) {
const alerts = newAlerts || this.props.alerts
const filteredAlerts = alerts.filter((h) => {
if (h.host === null || h.name === null || h.level === null) {
return false
}
return h.name.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
h.host.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
h.level.toLowerCase().search((searchTerm).toLowerCase()) !== -1
h.host.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
h.level.toLowerCase().search((searchTerm).toLowerCase()) !== -1
})
this.setState({searchTerm, filteredAlerts})
},

View File

@ -1,31 +1,36 @@
import React, {PropTypes} from 'react'
import AlertsTable from '../components/AlertsTable'
import React, {PropTypes, Component} from 'react'
import SourceIndicator from '../../shared/components/SourceIndicator'
import AlertsTable from '../components/AlertsTable'
import NoKapacitorError from '../../shared/components/NoKapacitorError'
import CustomTimeRange from '../../shared/components/CustomTimeRange'
import {getAlerts} from '../apis'
import AJAX from 'utils/ajax'
import _ from 'lodash'
import NoKapacitorError from '../../shared/components/NoKapacitorError'
import moment from 'moment'
const AlertsApp = React.createClass({
propTypes: {
source: PropTypes.shape({
id: PropTypes.string.isRequired,
name: PropTypes.string.isRequired,
type: PropTypes.string, // 'influx-enterprise'
links: PropTypes.shape({
proxy: PropTypes.string.isRequired,
}).isRequired,
}), // .isRequired,
addFlashMessage: PropTypes.func, // .isRequired,
},
getInitialState() {
return {
class AlertsApp extends Component {
constructor(props) {
super(props)
this.state = {
loading: true,
hasKapacitor: false,
alerts: [],
isTimeOpen: false,
timeRange: {
upper: moment().format(),
lower: moment().subtract(1, 'd').format(),
},
}
},
this.fetchAlerts = ::this.fetchAlerts
this.renderSubComponents = ::this.renderSubComponents
this.handleToggleTime = ::this.handleToggleTime
this.handleCloseTime = ::this.handleCloseTime
this.handleApplyTime = ::this.handleApplyTime
}
// TODO: show a loading screen until we figure out if there is a kapacitor and fetch the alerts
componentDidMount() {
const {source} = this.props
@ -41,10 +46,16 @@ const AlertsApp = React.createClass({
this.setState({loading: false})
}
})
},
}
componentDidUpdate(prevProps, prevState) {
if (!_.isEqual(prevState.timeRange, this.state.timeRange)) {
this.fetchAlerts()
}
}
fetchAlerts() {
getAlerts(this.props.source.links.proxy).then((resp) => {
getAlerts(this.props.source.links.proxy, this.state.timeRange).then((resp) => {
const results = []
const alertSeries = _.get(resp, ['data', 'results', '0', 'series'], [])
@ -70,7 +81,7 @@ const AlertsApp = React.createClass({
})
this.setState({loading: false, alerts: results})
})
},
}
renderSubComponents() {
let component
@ -87,13 +98,29 @@ const AlertsApp = React.createClass({
}
}
return component
},
}
handleToggleTime() {
this.setState({isTimeOpen: !this.state.isTimeOpen})
}
handleCloseTime() {
this.setState({isTimeOpen: false})
}
handleApplyTime(timeRange) {
this.setState({timeRange})
}
render() {
const {source} = this.props
const {loading, timeRange} = this.state
if (loading || !source) {
return <div className="page-spinner" />
}
return (
// I stole this from the Hosts page.
// Perhaps we should create an abstraction?
<div className="page">
<div className="page-header">
<div className="page-header__container">
@ -104,6 +131,13 @@ const AlertsApp = React.createClass({
</div>
<div className="page-header__right">
<SourceIndicator sourceName={source.name} />
<CustomTimeRange
isVisible={this.state.isTimeOpen}
onToggle={this.handleToggleTime}
onClose={this.handleCloseTime}
onApplyTimeRange={this.handleApplyTime}
timeRange={timeRange}
/>
</div>
</div>
</div>
@ -111,15 +145,32 @@ const AlertsApp = React.createClass({
<div className="container-fluid">
<div className="row">
<div className="col-md-12">
{ this.renderSubComponents() }
{this.renderSubComponents()}
</div>
</div>
</div>
</div>
</div>
)
},
}
}
})
const {
func,
shape,
string,
} = PropTypes
AlertsApp.propTypes = {
source: shape({
id: string.isRequired,
name: string.isRequired,
type: string, // 'influx-enterprise'
links: shape({
proxy: string.isRequired,
}).isRequired,
}),
addFlashMessage: func,
}
export default AlertsApp

View File

@ -91,7 +91,13 @@ class CellEditorOverlay extends Component {
render() {
const {onCancel, autoRefresh, timeRange} = this.props
const {activeQueryIndex, cellWorkingType, queriesWorkingDraft} = this.state
const {
activeQueryIndex,
cellWorkingName,
cellWorkingType,
queriesWorkingDraft,
} = this.state
const queryActions = {
addQuery: this.handleAddQuery,
..._.mapValues(queryModifiers, (qm) => this.queryStateReducer(qm)),
@ -106,6 +112,7 @@ class CellEditorOverlay extends Component {
queryConfigs={queriesWorkingDraft}
activeQueryIndex={0}
cellType={cellWorkingType}
cellName={cellWorkingName}
/>
<ResizeBottom>
<OverlayControls

View File

@ -4,8 +4,8 @@ import {connect} from 'react-redux'
import {bindActionCreators} from 'redux'
import CellEditorOverlay from 'src/dashboards/components/CellEditorOverlay'
import Header from 'src/dashboards/components/DashboardHeader'
import EditHeader from 'src/dashboards/components/DashboardHeaderEdit'
import DashboardHeader from 'src/dashboards/components/DashboardHeader'
import DashboardHeaderEdit from 'src/dashboards/components/DashboardHeaderEdit'
import Dashboard from 'src/dashboards/components/Dashboard'
import * as dashboardActionCreators from 'src/dashboards/actions'
@ -205,12 +205,12 @@ const DashboardPage = React.createClass({
}
{
isEditMode ?
<EditHeader
<DashboardHeaderEdit
dashboard={dashboard}
onCancel={this.handleCancelEditDashboard}
onSave={this.handleRenameDashboard}
/> :
<Header
<DashboardHeader
buttonText={dashboard ? dashboard.name : ''}
handleChooseAutoRefresh={handleChooseAutoRefresh}
autoRefresh={autoRefresh}
@ -237,7 +237,7 @@ const DashboardPage = React.createClass({
}) :
null
}
</Header>
</DashboardHeader>
}
{
dashboard ?

View File

@ -18,6 +18,7 @@ const {
const Visualization = React.createClass({
propTypes: {
cellName: string,
cellType: string,
autoRefresh: number.isRequired,
timeRange: shape({
@ -25,7 +26,6 @@ const Visualization = React.createClass({
lower: string,
}).isRequired,
queryConfigs: arrayOf(shape({})).isRequired,
name: string,
activeQueryIndex: number,
height: string,
heightPixels: number,
@ -74,7 +74,14 @@ const Visualization = React.createClass({
},
render() {
const {queryConfigs, timeRange, height, heightPixels} = this.props
const {
queryConfigs,
timeRange,
height,
heightPixels,
cellName,
} = this.props
const {source} = this.context
const proxyLink = source.links.proxy
@ -91,7 +98,7 @@ const Visualization = React.createClass({
<div className={classNames("graph", {active: true})} style={{height}}>
<div className="graph-heading">
<div className="graph-title">
{name || "Graph"}
{cellName || "Graph"}
</div>
<div className="graph-actions">
<ul className="toggle toggle-sm">

View File

@ -1,15 +1,24 @@
import React, {PropTypes} from 'react'
import QuestionMarkTooltip from 'src/shared/components/QuestionMarkTooltip'
import {HIPCHAT_TOKEN_TIP} from 'src/kapacitor/copy'
const {
bool,
func,
shape,
string,
} = PropTypes
const HipchatConfig = React.createClass({
propTypes: {
config: PropTypes.shape({
options: PropTypes.shape({
room: PropTypes.string.isRequired,
token: PropTypes.bool.isRequired,
url: PropTypes.string.isRequired,
config: shape({
options: shape({
room: string.isRequired,
token: bool.isRequired,
url: string.isRequired,
}).isRequired,
}).isRequired,
onSave: PropTypes.func.isRequired,
onSave: func.isRequired,
},
handleSaveAlert(e) {
@ -32,21 +41,48 @@ const HipchatConfig = React.createClass({
<div>
<h4 className="text-center no-user-select">HipChat Alert</h4>
<br/>
<p className="no-user-select">Have alerts sent to HipChat.</p>
<p className="no-user-select">Send alert messages to HipChat.</p>
<form onSubmit={this.handleSaveAlert}>
<div className="form-group col-xs-12">
<label htmlFor="url">HipChat URL</label>
<input className="form-control" id="url" type="text" ref={(r) => this.url = r} defaultValue={url || ''}></input>
<input
className="form-control"
id="url"
type="text"
placeholder="https://your-subdomain.hipchat.com/v2/room"
ref={(r) => this.url = r}
defaultValue={url || ''}
/>
</div>
<div className="form-group col-xs-12">
<label htmlFor="room">Room</label>
<input className="form-control" id="room" type="text" ref={(r) => this.room = r} defaultValue={room || ''}></input>
<input
className="form-control"
id="room"
type="text"
placeholder="your-hipchat-token"
ref={(r) => this.room = r}
defaultValue={room || ''}
/>
</div>
<div className="form-group col-xs-12">
<label htmlFor="token">Token</label>
<input className="form-control" id="token" type="text" ref={(r) => this.token = r} defaultValue={token || ''}></input>
<label htmlFor="token">
Token
<QuestionMarkTooltip
tipID="token"
tipContent={HIPCHAT_TOKEN_TIP}
/>
</label>
<input
className="form-control"
id="token"
type="text"
placeholder="your-hipchat-token"
ref={(r) => this.token = r}
defaultValue={token || ''}
/>
<label className="form-helper">Note: a value of <code>true</code> indicates the HipChat token has been set</label>
</div>

View File

@ -1,18 +1,26 @@
import React, {PropTypes} from 'react'
import QuestionMarkTooltip from 'src/shared/components/QuestionMarkTooltip'
import {TELEGRAM_CHAT_ID_TIP, TELEGRAM_TOKEN_TIP} from 'src/kapacitor/copy'
const {
bool,
func,
shape,
string,
} = PropTypes
const TelegramConfig = React.createClass({
propTypes: {
config: PropTypes.shape({
options: PropTypes.shape({
'chat-id': PropTypes.string.isRequired,
'disable-notification': PropTypes.bool.isRequired,
'disable-web-page-preview': PropTypes.bool.isRequired,
'parse-mode': PropTypes.string.isRequired,
token: PropTypes.bool.isRequired,
url: PropTypes.string.isRequired,
config: shape({
options: shape({
'chat-id': string.isRequired,
'disable-notification': bool.isRequired,
'disable-web-page-preview': bool.isRequired,
'parse-mode': string.isRequired,
token: bool.isRequired,
}).isRequired,
}).isRequired,
onSave: PropTypes.func.isRequired,
onSave: func.isRequired,
},
handleSaveAlert(e) {
@ -32,7 +40,6 @@ const TelegramConfig = React.createClass({
'disable-web-page-preview': this.disableWebPagePreview.checked,
'parse-mode': parseMode,
token: this.token.value,
url: this.url.value,
}
this.props.onSave(properties)
@ -40,9 +47,9 @@ const TelegramConfig = React.createClass({
render() {
const {options} = this.props.config
const {url, token} = options
const {token} = options
const chatID = options['chat-id']
const disableNotification = options['chat-id']
const disableNotification = options['disable-notification']
const disableWebPagePreview = options['disable-web-page-preview']
const parseMode = options['parse-mode']
@ -50,49 +57,76 @@ const TelegramConfig = React.createClass({
<div>
<h4 className="text-center no-user-select">Telegram Alert</h4>
<br/>
<p className="no-user-select">You can have alerts sent to Telegram by entering info below.</p>
<p className="no-user-select">
Send alert messages to a <a href="https://docs.influxdata.com/kapacitor/v1.2/guides/event-handler-setup/#telegram-bot" target="_blank">Telegram bot</a>.
</p>
<form onSubmit={this.handleSaveAlert}>
<div className="form-group col-xs-12">
<label htmlFor="url">Telegram URL</label>
<input className="form-control" id="url" type="text" ref={(r) => this.url = r} defaultValue={url || ''}></input>
</div>
<div className="form-group col-xs-12">
<label htmlFor="token">Token</label>
<input className="form-control" id="token" type="text" ref={(r) => this.token = r} defaultValue={token || ''}></input>
<label htmlFor="token">
Token
<QuestionMarkTooltip
tipID="token"
tipContent={TELEGRAM_TOKEN_TIP}
/>
</label>
<input
className="form-control"
id="token"
type="text"
placeholder="your-telegram-token"
ref={(r) => this.token = r}
defaultValue={token || ''}>
</input>
<label className="form-helper">Note: a value of <code>true</code> indicates the Telegram token has been set</label>
</div>
<div className="form-group col-xs-12">
<label htmlFor="chat-id">Chat ID</label>
<input className="form-control" id="chat-id" type="text" ref={(r) => this.chatID = r} defaultValue={chatID || ''}></input>
<label htmlFor="chat-id">
Chat ID
<QuestionMarkTooltip
tipID="chat-id"
tipContent={TELEGRAM_CHAT_ID_TIP}
/>
</label>
<input
className="form-control"
id="chat-id"
type="text"
placeholder="your-telegram-chat-id"
ref={(r) => this.chatID = r}
defaultValue={chatID || ''}>
</input>
</div>
<div className="form-group col-xs-12">
<label htmlFor="parseMode">Parse Mode</label>
<label htmlFor="parseMode">Select the alert message format</label>
<div className="form-control-static">
<div className="radio">
<input id="parseModeMarkdown" type="radio" name="parseMode" value="markdown" defaultChecked={parseMode !== 'HTML'} ref={(r) => this.parseModeMarkdown = r} />
<label htmlFor="parseModeMarkdown">Markdown</label>
</div>
<div className="radio">
<input id="parseModeHTML" type="radio" name="parseMode" value="html" defaultChecked={parseMode === 'HTML'} ref={(r) => this.parseModeHTML = r} />
<label htmlFor="parseModeHTML">HTML</label>
</div>
<div className="radio">
<input id="parseModeMarkdown" type="radio" name="parseMode" value="markdown" defaultChecked={parseMode === 'Markdown'} ref={(r) => this.parseModeMarkdown = r} />
<label htmlFor="parseModeMarkdown">Markdown</label>
</div>
</div>
</div>
<div className="form-group col-xs-12">
<div className="form-control-static">
<input id="disableWebPagePreview" type="checkbox" defaultChecked={disableWebPagePreview} ref={(r) => this.disableWebPagePreview = r} />
<label htmlFor="disableWebPagePreview">Disable Web Page Preview</label>
<label htmlFor="disableWebPagePreview">
Disable <a href="https://telegram.org/blog/link-preview" target="_blank">link previews</a> in alert messages.
</label>
</div>
</div>
<div className="form-group col-xs-12">
<div className="form-control-static">
<input id="disableNotification" type="checkbox" defaultChecked={disableNotification} ref={(r) => this.disableNotification = r} />
<label htmlFor="disableNotification">Disable Notification</label>
<label htmlFor="disableNotification">
Disable notifications on iOS devices and disable sounds on Android devices. Android users continue to receive notifications.
</label>
</div>
</div>

8
ui/src/kapacitor/copy.js Normal file
View File

@ -0,0 +1,8 @@
const telegramChatIDLink = 'https://docs.influxdata.com/kapacitor/latest/guides/event-handler-setup/#telegram-chat-id'
export const TELEGRAM_CHAT_ID_TIP = `<p>Need help finding your chat id? Check out <a target='_blank' href='${telegramChatIDLink}'>these steps</a>.</p>`
const telegramTokenLink = 'https://docs.influxdata.com/kapacitor/latest/guides/event-handler-setup/#telegram-api-access-token'
export const TELEGRAM_TOKEN_TIP = `<p>Need help finding your token? Check out <a target='_blank' href='${telegramTokenLink}'>these steps</a>.</p>`
const hipchatTokenLink = 'https://docs.influxdata.com/kapacitor/latest/guides/event-handler-setup/#hipchat-api-access-token'
export const HIPCHAT_TOKEN_TIP = `<p>Need help creating a token? Check out <a href='${hipchatTokenLink}' target='_blank'>these steps</a>.</p>`

View File

@ -0,0 +1,112 @@
import React, {PropTypes, Component} from 'react'
import rome from 'rome'
import moment from 'moment'
import classNames from 'classnames'
import OnClickOutside from 'react-onclickoutside'
class CustomTimeRange extends Component {
constructor(props) {
super(props)
this.handleClick = ::this.handleClick
}
handleClickOutside() {
this.props.onClose()
}
componentDidMount() {
const {timeRange} = this.props
const lower = rome(this.lower, {
initialValue: this._formatTimeRange(timeRange.lower),
})
const upper = rome(this.upper, {
initialValue: this._formatTimeRange(timeRange.upper),
})
this.lowerCal = lower
this.upperCal = upper
}
// If there is an upper or lower time range set, set the corresponding calendar's value.
componentWillReceiveProps(nextProps) {
const {lower, upper} = nextProps.timeRange
if (lower) {
this.lowerCal.setValue(this._formatTimeRange(lower))
}
if (upper) {
this.upperCal.setValue(this._formatTimeRange(upper))
}
}
render() {
const {isVisible, onToggle, timeRange: {upper, lower}} = this.props
return (
<div className={classNames("custom-time-range", {show: isVisible})} style={{display: 'flex'}}>
<button className="btn btn-sm btn-info custom-time-range--btn" onClick={onToggle}>
<span className="icon clock"></span>
{`${moment(lower).format('MMM Do HH:mm')}${moment(upper).format('MMM Do HH:mm')}`}
<span className="caret"></span>
</button>
<div className="custom-time--container">
<div className="custom-time--dates">
<div className="custom-time--lower" ref={(r) => this.lower = r} />
<div className="custom-time--upper" ref={(r) => this.upper = r} />
</div>
<div className="custom-time--apply btn btn-sm btn-primary" onClick={this.handleClick}>Apply</div>
</div>
</div>
)
}
handleClick() {
const lower = this.lowerCal.getDate().toISOString()
const upper = this.upperCal.getDate().toISOString()
this.props.onApplyTimeRange({lower, upper})
this.props.onClose()
}
/*
* Upper and lower time ranges are passed in with single quotes as part of
* the string literal, i.e. "'2015-09-23T18:00:00.000Z'". Remove them
* before passing the string to be parsed.
*/
_formatTimeRange(timeRange) {
if (!timeRange) {
return ''
}
// If the given time range is relative, create a fixed timestamp based on its value
if (timeRange.match(/^now/)) {
const match = timeRange.match(/\d+\w/)[0]
const duration = match.slice(0, match.length - 1)
const unitOfTime = match[match.length - 1]
return moment().subtract(duration, unitOfTime)
}
return moment(timeRange.replace(/\'/g, '')).format('YYYY-MM-DD HH:mm')
}
}
const {
bool,
func,
shape,
string,
} = PropTypes
CustomTimeRange.propTypes = {
onApplyTimeRange: func.isRequired,
timeRange: shape({
lower: string.isRequired,
upper: string.isRequired,
}).isRequired,
isVisible: bool.isRequired,
onToggle: func.isRequired,
onClose: func.isRequired,
}
export default OnClickOutside(CustomTimeRange)

View File

@ -4,7 +4,7 @@ import OnClickOutside from 'shared/components/OnClickOutside'
import ConfirmButtons from 'shared/components/ConfirmButtons'
const DeleteButton = ({onClickDelete}) => (
<button className="btn btn-xs btn-danger admin-table--delete" onClick={onClickDelete}>
<button className="btn btn-xs btn-danger admin-table--hidden" onClick={onClickDelete}>
Delete
</button>
)

View File

@ -0,0 +1,30 @@
import React, {PropTypes} from 'react'
import ReactTooltip from 'react-tooltip'
const QuestionMarkTooltip = ({
tipID,
tipContent,
}) => (
<div style={{display: "inline-block"}}>
<div data-for={`${tipID}-tooltip`} data-tip={tipContent} style={{margin: "0 5px"}}>?</div>
<ReactTooltip
id={`${tipID}-tooltip`}
effect="solid"
html={true}
offset={{top: 2}}
place="bottom"
class="influx-tooltip__hover place-bottom"
/>
</div>
)
const {
string,
} = PropTypes
QuestionMarkTooltip.propTypes = {
tipID: string.isRequired,
tipContent: string.isRequired,
}
export default QuestionMarkTooltip

View File

@ -47,7 +47,7 @@ const NavBlock = React.createClass({
const {location, className, wrapperClassName} = this.props
const isActive = React.Children.toArray(this.props.children).find((child) => {
return child.type === NavListItem && location.startsWith(child.props.link)
return location.startsWith(child.props.link)
})
const children = React.Children.map((this.props.children), (child) => {

View File

@ -40,6 +40,7 @@
@import 'components/resizer';
@import 'components/source-indicator';
@import 'components/confirm-buttons';
@import 'components/custom-time-range';
// Pages
@import 'pages/alerts';

View File

@ -0,0 +1,254 @@
/*
Custom Time Range Dropdown
------------------------------------------------------
*/
.custom-time-range {
position: relative;
}
.btn.btn-sm.btn-info.custom-time-range--btn {
padding: 0 30px 0 9px !important;
.caret {
position: absolute;
right: 9px;
top: calc(50% + 1px);
transform: translateY(-50%);
}
}
.custom-time--container {
display: none;
position: absolute;
flex-direction: column;
align-items: center;
top: 35px;
right: 0;
background: $g5-pepper;
border-radius: $radius;
padding: 8px;
z-index: 1000;
box-shadow: 0 2px 5px 0.6px rgba(15, 14, 21, 0.2);
}
.custom-time--dates {
display: flex;
align-items: flex-start;
justify-content: space-between;
}
.custom-time--lower {
margin-right: 4px;
}
.custom-time--upper {
margin-left: 4px;
}
$custom-time-arrow: 28px;
$rd-cell-size: 30px;
.rd-container {
display: flex !important;
flex-direction: column;
align-items: center;
}
.rd-date {
position: relative;
}
.rd-back,
.rd-next,
.rd-month-label {
position: absolute;
top: 0;
height: $custom-time-arrow;
line-height: $custom-time-arrow;
}
.rd-back,
.rd-next {
outline: none;
width: $custom-time-arrow;
border: 0;
background-color: transparent;
border-radius: 50%;
color: $g15-platinum;
transition:
background-color 0.25s ease,
color 0.25s ease;
&:after {
font-family: 'icomoon' !important;
font-style: normal;
font-weight: normal;
font-variant: normal;
color: inherit;
position: absolute;
top: 50%;
transform: translate(-50%,-50%);
font-size: 16px;
}
&:hover {
background-color: $g6-smoke;
color: $g20-white;
}
}
.rd-back {
left: 0;
&:after {
left: calc(50% - 1px);
content: "\e90c";
}
}
.rd-next {
left: calc(100% - #{$custom-time-arrow});
&:after {
left: calc(50% + 1px);
content: "\e911";
}
}
.rd-month-label {
font-weight: 600;
color: $g15-platinum;
left: $custom-time-arrow;
text-align: center;
@include no-user-select();
width: calc(100% - #{($custom-time-arrow * 2)});
}
.rd-days {
margin-top: ($custom-time-arrow + 8px);
background-color: transparent;
border-radius: $radius-small;
/* Cancel out default table styles */
tr:hover {
background-color: transparent !important;
color: inherit !important;
}
thead.rd-days-head th.rd-day-head,
tbody.rd-days-body td.rd-day-body {
padding: 0 !important;
min-height: $rd-cell-size !important;
height: $rd-cell-size !important;
max-height: $rd-cell-size !important;
min-width: $rd-cell-size !important;
width: $rd-cell-size !important;
max-width: $rd-cell-size !important;
vertical-align: middle;
text-align: center;
border: 2px solid $g5-pepper !important;
}
thead.rd-days-head th.rd-day-head {
color: $g15-platinum !important;
background-color: $g5-pepper !important;
}
tbody.rd-days-body td.rd-day-body {
@include no-user-select();
letter-spacing: -1px;
font-family: $code-font;
transition:
background-color 0.25s ease,
color 0.25s ease;
color: $g13-mist !important;
background-color: $g3-castle;
border-radius: 5px;
&:hover {
cursor: $cc-pointer;
color: $g20-white !important;
background-color: $g6-smoke;
}
&.rd-day-next-month,
&.rd-day-prev-month {
cursor: $cc-default;
color: $g8-storm !important;
background-color: $g5-pepper !important;
}
&.rd-day-selected {
background-color: $c-pool !important;
color: $g20-white !important;
}
}
}
.rd-time {
margin: 0 2px;
width: calc(100% - 4px);
height: 30px;
display: flex;
align-items: center;
justify-content: center;
position: relative;
}
.rd-time-selected {
@include no-user-select();
height: 28px;
line-height: 28px;
background-color: $g3-castle;
border-radius: $radius-small;
width: 100%;
letter-spacing: -1px;
font-family: $code-font;
color: $g13-mist;
display: inline-block;
transition:
color 0.25s ease,
background-color 0.25s ease;
text-align: center;
&:hover {
color: $g20-white;
background-color: $g6-smoke;
cursor: $cc-pointer;
}
}
.rd-time-list {
position: absolute;
top: 50%;
left: 50%;
width: 120px;
height: 200px;
transform: translate(-50%,-50%);
overflow: auto;
overflow-x: hidden;
overflow-y: scroll;
@include custom-scrollbar-round($c-pool, $c-laser);
@include gradient-h($c-ocean, $c-pool);
border-radius: $radius;
box-shadow: 0 2px 5px 0.6px rgba(15, 14, 21, 0.2);
}
.rd-time-option {
width: 100%;
height: 24px;
line-height: 24px;
padding-left: $scrollbar-width;
text-align: center;
@include no-user-select();
font-family: $code-font;
color: $c-yeti;
letter-spacing: -1px;
&:hover,
&:active,
&:focus {
color: $g20-white;
cursor: $cc-pointer;
outline: none;
@include gradient-h($c-laser, $c-pool);
}
}
.custom-time--apply {
margin-top: 8px;
width: 120px;
}
/* Show State */
.custom-time-range.show {
.custom-time--container {
display: flex;
}
.custom-time-range--btn {
color: $g20-white !important;
background-color: $g6-smoke;
}
}

View File

@ -86,3 +86,12 @@ $tooltip-code-color: $c-potassium;
}
}
}
.influx-tooltip__hover {
@extend .influx-tooltip;
pointer-events: auto!important;
&:hover {
visibility: visible!important;
opacity: 1!important;
}
}

View File

@ -41,6 +41,35 @@ $scrollbar-offset: 3px;
@mixin custom-scrollbar($trackColor, $handleColor) {
&::-webkit-scrollbar {
width: $scrollbar-width;
&-button {
background-color: $trackColor;
}
&-track {
background-color: $trackColor;
}
&-track-piece {
background-color: $trackColor;
border: $scrollbar-offset solid $trackColor;
border-radius: ($scrollbar-width / 2);
}
&-thumb {
background-color: $handleColor;
border: $scrollbar-offset solid $trackColor;
border-radius: ($scrollbar-width / 2);
}
&-corner {
background-color: $trackColor;
}
}
&::-webkit-resizer {
background-color: $trackColor;
}
}
@mixin custom-scrollbar-round($trackColor, $handleColor) {
&::-webkit-scrollbar {
width: $scrollbar-width;
border-top-right-radius: $radius;
border-bottom-right-radius: $radius;
&-button {
@ -48,6 +77,7 @@ $scrollbar-offset: 3px;
}
&-track {
background-color: $trackColor;
border-top-right-radius: $radius;
border-bottom-right-radius: $radius;
}
&-track-piece {

View File

@ -67,7 +67,7 @@
width: 100%;
min-width: 150px;
}
.admin-table--delete {
.admin-table--hidden {
visibility: hidden;
}
.dropdown-toggle {
@ -83,7 +83,7 @@
}
.open .dropdown-toggle .multi-select-dropdown__label {left: 9px;}
tbody tr:hover {
.admin-table--delete {
.admin-table--hidden {
visibility: visible;
}
.dropdown-toggle {

View File

@ -267,7 +267,7 @@ input {
padding: 0 !important;
max-height: 290px;
overflow: auto;
@include custom-scrollbar($c-pool, $c-laser);
@include custom-scrollbar-round($c-pool, $c-laser);
@include gradient-h($c-ocean, $c-pool);
box-shadow: 0 2px 5px 0.6px fade-out($g0-obsidian, 0.8);

View File

@ -1,5 +1,4 @@
import _ from 'lodash'
import {STROKE_WIDTH} from 'src/shared/constants'
import {map, reduce, forEach, concat, clone} from 'fast.js'
/**
@ -102,15 +101,11 @@ export default function timeSeriesToDygraph(raw = [], activeQueryIndex, isInData
}
const sortedTimeSeries = _.sortBy(timeSeries, 'time')
const {light, heavy} = STROKE_WIDTH
const dygraphSeries = reduce(sortedLabels, (acc, {label, responseIndex}) => {
acc[label] = {
strokeWidth: responseIndex === activeQueryIndex ? heavy : light,
}
if (!isInDataExplorer) {
acc[label].axis = responseIndex === 0 ? 'y' : 'y2'
acc[label] = {
axis: responseIndex === 0 ? 'y' : 'y2',
}
}
return acc

View File

@ -352,6 +352,10 @@ asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
atoa@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/atoa/-/atoa-1.0.0.tgz#0cc0e91a480e738f923ebc103676471779b34a49"
atob@~1.1.0:
version "1.1.3"
resolved "https://registry.yarnpkg.com/atob/-/atob-1.1.3.tgz#95f13629b12c3a51a5d215abdce2aa9f32f80773"
@ -1540,6 +1544,14 @@ builtin-status-codes@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-2.0.0.tgz#6f22003baacf003ccd287afe6872151fddc58579"
bullseye@1.4.6:
version "1.4.6"
resolved "https://registry.yarnpkg.com/bullseye/-/bullseye-1.4.6.tgz#b73f606f7b4273be80ac65acd75295d62606fe24"
dependencies:
crossvent "^1.3.1"
seleccion "2.0.0"
sell "^1.0.0"
bytes@2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.3.0.tgz#d5b680a165b6201739acb611542aabc2d8ceb070"
@ -1964,6 +1976,13 @@ content-type@~1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed"
contra@1.9.1:
version "1.9.1"
resolved "https://registry.yarnpkg.com/contra/-/contra-1.9.1.tgz#60e498274b3d2d332896d60f82900aefa2ecac8c"
dependencies:
atoa "1.0.0"
ticky "1.0.0"
convert-source-map@^0.3.3:
version "0.3.5"
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.3.5.tgz#f1d802950af7dd2631a1febe0596550c86ab3190"
@ -2018,6 +2037,12 @@ cross-spawn@^5.0.0:
shebang-command "^1.2.0"
which "^1.2.9"
crossvent@1.5.0, crossvent@^1.3.1:
version "1.5.0"
resolved "https://registry.yarnpkg.com/crossvent/-/crossvent-1.5.0.tgz#3779c1242699e19417f0414e61b144753a52fd6d"
dependencies:
custom-event "1.0.0"
cryptiles@2.x.x:
version "2.0.5"
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
@ -2200,6 +2225,10 @@ currently-unhandled@^0.4.1:
dependencies:
array-find-index "^1.0.1"
custom-event@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.0.tgz#2e4628be19dc4b214b5c02630c5971e811618062"
custom-event@~1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425"
@ -4596,7 +4625,7 @@ mocha@^2.4.5:
supports-color "1.2.0"
to-iso-string "0.0.2"
moment@^2.13.0:
moment@^2.13.0, moment@^2.8.2:
version "2.17.1"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.17.1.tgz#fed9506063f36b10f066c8b59a144d7faebe1d82"
@ -6203,6 +6232,15 @@ ripemd160@0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-0.2.0.tgz#2bf198bde167cacfa51c0a928e84b68bbe171fce"
rome@^2.1.22:
version "2.1.22"
resolved "https://registry.yarnpkg.com/rome/-/rome-2.1.22.tgz#4bf25318cc0522ae92dd090472ce7a6e0b1f5e02"
dependencies:
bullseye "1.4.6"
contra "1.9.1"
crossvent "1.5.0"
moment "^2.8.2"
run-async@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389"
@ -6243,6 +6281,14 @@ script-loader@~0.6.0:
dependencies:
raw-loader "~0.5.1"
seleccion@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/seleccion/-/seleccion-2.0.0.tgz#0984ac1e8df513e38b41a608e65042e8381e0a73"
sell@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/sell/-/sell-1.0.0.tgz#3baca7e51f78ddee9e22eea1ac747a6368bd1630"
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@~5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f"
@ -6842,6 +6888,10 @@ through@^2.3.6, through@~2.3.4:
version "2.3.8"
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
ticky@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/ticky/-/ticky-1.0.0.tgz#e87f38ee0491ea32f62e8f0567ba9638b29f049c"
timers-browserify@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.2.tgz#ab4883cf597dcd50af211349a00fbca56ac86b86"