Merge branch 'master' into some-amount-of-polish
commit
a4d3056402
10
CHANGELOG.md
10
CHANGELOG.md
|
@ -2,12 +2,22 @@
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
1. [#1104](https://github.com/influxdata/chronograf/pull/1104): Fix windows hosts on host list
|
1. [#1104](https://github.com/influxdata/chronograf/pull/1104): Fix windows hosts on host list
|
||||||
|
1. [#1125](https://github.com/influxdata/chronograf/pull/1125): Fix visualizations not showing graph name
|
||||||
|
1. [#1133](https://github.com/influxdata/chronograf/issue/1133): Fix Enterprise Kapacitor authentication.
|
||||||
|
1. [#1142](https://github.com/influxdata/chronograf/issue/1142): Fix Kapacitor Telegram config to display correct disableNotification setting
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
1. [#1112](https://github.com/influxdata/chronograf/pull/1112): Add ability to delete a dashboard
|
1. [#1112](https://github.com/influxdata/chronograf/pull/1112): Add ability to delete a dashboard
|
||||||
|
1. [#1120](https://github.com/influxdata/chronograf/pull/1120): Allow users to update user passwords.
|
||||||
|
1. [#1129](https://github.com/influxdata/chronograf/pull/1129): Allow InfluxDB and Kapacitor configuration via ENV vars or CLI options
|
||||||
|
1. [#1130](https://github.com/influxdata/chronograf/pull/1130): Add loading spinner to Alert History page.
|
||||||
|
|
||||||
### UI Improvements
|
### UI Improvements
|
||||||
1. [#1101](https://github.com/influxdata/chronograf/pull/1101): Compress InfluxQL responses with gzip
|
1. [#1101](https://github.com/influxdata/chronograf/pull/1101): Compress InfluxQL responses with gzip
|
||||||
|
1. [#1132](https://github.com/influxdata/chronograf/pull/1132): All sidebar items show activity with a blue strip
|
||||||
|
1. [#1135](https://github.com/influxdata/chronograf/pull/1135): Clarify Kapacitor Alert configuration for Telegram
|
||||||
|
1. [#1137](https://github.com/influxdata/chronograf/pull/1137): Clarify Kapacitor Alert configuration for HipChat
|
||||||
|
1. [#1079](https://github.com/influxdata/chronograf/issues/1079): Remove series highlighting in line graphs
|
||||||
|
|
||||||
## v1.2.0-beta7 [2017-03-28]
|
## v1.2.0-beta7 [2017-03-28]
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
|
|
@ -891,6 +891,7 @@
|
||||||
* rimraf 2.5.3 [ISC](http://github.com/isaacs/rimraf)
|
* rimraf 2.5.3 [ISC](http://github.com/isaacs/rimraf)
|
||||||
* rimraf 2.5.4 [ISC](http://github.com/isaacs/rimraf)
|
* rimraf 2.5.4 [ISC](http://github.com/isaacs/rimraf)
|
||||||
* ripemd160 0.2.0 [Unknown](https://github.com/cryptocoinjs/ripemd160)
|
* ripemd160 0.2.0 [Unknown](https://github.com/cryptocoinjs/ripemd160)
|
||||||
|
* rome 2.1.22 [MIT](https://github.com/bevacqua/rome)
|
||||||
* run-async 0.1.0 [MIT](http://github.com/SBoudrias/run-async)
|
* run-async 0.1.0 [MIT](http://github.com/SBoudrias/run-async)
|
||||||
* rx-lite 3.1.2 [Apache License](https://github.com/Reactive-Extensions/RxJS)
|
* rx-lite 3.1.2 [Apache License](https://github.com/Reactive-Extensions/RxJS)
|
||||||
* samsam 1.1.2 [BSD](https://github.com/busterjs/samsam)
|
* samsam 1.1.2 [BSD](https://github.com/busterjs/samsam)
|
||||||
|
|
|
@ -137,7 +137,7 @@ type Response interface {
|
||||||
|
|
||||||
// Source is connection information to a time-series data store.
|
// Source is connection information to a time-series data store.
|
||||||
type Source struct {
|
type Source struct {
|
||||||
ID int `json:"id,omitempty,string"` // ID is the unique ID of the source
|
ID int `json:"id,string"` // ID is the unique ID of the source
|
||||||
Name string `json:"name"` // Name is the user-defined name for the source
|
Name string `json:"name"` // Name is the user-defined name for the source
|
||||||
Type string `json:"type,omitempty"` // Type specifies which kinds of source (enterprise vs oss)
|
Type string `json:"type,omitempty"` // Type specifies which kinds of source (enterprise vs oss)
|
||||||
Username string `json:"username,omitempty"` // Username is the username to connect to the source
|
Username string `json:"username,omitempty"` // Username is the username to connect to the source
|
||||||
|
|
|
@ -0,0 +1,144 @@
|
||||||
|
package memdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ensure KapacitorStore and MultiKapacitorStore implements chronograf.ServersStore.
|
||||||
|
var _ chronograf.ServersStore = &KapacitorStore{}
|
||||||
|
var _ chronograf.ServersStore = &MultiKapacitorStore{}
|
||||||
|
|
||||||
|
// KapacitorStore implements the chronograf.ServersStore interface, and keeps
|
||||||
|
// an in-memory Kapacitor according to startup configuration
|
||||||
|
type KapacitorStore struct {
|
||||||
|
Kapacitor *chronograf.Server
|
||||||
|
}
|
||||||
|
|
||||||
|
// All will return a slice containing a configured source
|
||||||
|
func (store *KapacitorStore) All(ctx context.Context) ([]chronograf.Server, error) {
|
||||||
|
if store.Kapacitor != nil {
|
||||||
|
return []chronograf.Server{*store.Kapacitor}, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add does not have any effect
|
||||||
|
func (store *KapacitorStore) Add(ctx context.Context, kap chronograf.Server) (chronograf.Server, error) {
|
||||||
|
return chronograf.Server{}, fmt.Errorf("In-memory KapacitorStore does not support adding a Kapacitor")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete removes the in-memory configured Kapacitor if its ID matches what's provided
|
||||||
|
func (store *KapacitorStore) Delete(ctx context.Context, kap chronograf.Server) error {
|
||||||
|
if store.Kapacitor == nil || store.Kapacitor.ID != kap.ID {
|
||||||
|
return fmt.Errorf("Unable to find Kapacitor with id %d", kap.ID)
|
||||||
|
}
|
||||||
|
store.Kapacitor = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns the in-memory Kapacitor if its ID matches what's provided
|
||||||
|
func (store *KapacitorStore) Get(ctx context.Context, id int) (chronograf.Server, error) {
|
||||||
|
if store.Kapacitor == nil || store.Kapacitor.ID != id {
|
||||||
|
return chronograf.Server{}, fmt.Errorf("Unable to find Kapacitor with id %d", id)
|
||||||
|
}
|
||||||
|
return *store.Kapacitor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update overwrites the in-memory configured Kapacitor if its ID matches what's provided
|
||||||
|
func (store *KapacitorStore) Update(ctx context.Context, kap chronograf.Server) error {
|
||||||
|
if store.Kapacitor == nil || store.Kapacitor.ID != kap.ID {
|
||||||
|
return fmt.Errorf("Unable to find Kapacitor with id %d", kap.ID)
|
||||||
|
}
|
||||||
|
store.Kapacitor = &kap
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultiKapacitorStore implements the chronograf.ServersStore interface, and
|
||||||
|
// delegates to all contained KapacitorStores
|
||||||
|
type MultiKapacitorStore struct {
|
||||||
|
Stores []chronograf.ServersStore
|
||||||
|
}
|
||||||
|
|
||||||
|
// All concatenates the Kapacitors of all contained Stores
|
||||||
|
func (multi *MultiKapacitorStore) All(ctx context.Context) ([]chronograf.Server, error) {
|
||||||
|
all := []chronograf.Server{}
|
||||||
|
kapSet := map[int]struct{}{}
|
||||||
|
|
||||||
|
ok := false
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
var kaps []chronograf.Server
|
||||||
|
kaps, err = store.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
// If this Store is unable to return an array of kapacitors, skip to the
|
||||||
|
// next Store.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ok = true // We've received a response from at least one Store
|
||||||
|
for _, kap := range kaps {
|
||||||
|
// Enforce that the kapacitor has a unique ID
|
||||||
|
// If the ID has been seen before, ignore the kapacitor
|
||||||
|
if _, okay := kapSet[kap.ID]; !okay { // We have a new kapacitor
|
||||||
|
kapSet[kap.ID] = struct{}{} // We just care that the ID is unique
|
||||||
|
all = append(all, kap)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return all, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the kap to the first responsive Store
|
||||||
|
func (multi *MultiKapacitorStore) Add(ctx context.Context, kap chronograf.Server) (chronograf.Server, error) {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
var k chronograf.Server
|
||||||
|
k, err = store.Add(ctx, kap)
|
||||||
|
if err == nil {
|
||||||
|
return k, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return chronograf.Server{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete delegates to all Stores, returns success if one Store is successful
|
||||||
|
func (multi *MultiKapacitorStore) Delete(ctx context.Context, kap chronograf.Server) error {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
err = store.Delete(ctx, kap)
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get finds the Source by id among all contained Stores
|
||||||
|
func (multi *MultiKapacitorStore) Get(ctx context.Context, id int) (chronograf.Server, error) {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
var k chronograf.Server
|
||||||
|
k, err = store.Get(ctx, id)
|
||||||
|
if err == nil {
|
||||||
|
return k, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return chronograf.Server{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the first responsive Store
|
||||||
|
func (multi *MultiKapacitorStore) Update(ctx context.Context, kap chronograf.Server) error {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
err = store.Update(ctx, kap)
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
|
@ -0,0 +1,129 @@
|
||||||
|
package memdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestInterfaceImplementation(t *testing.T) {
|
||||||
|
var _ chronograf.ServersStore = &KapacitorStore{}
|
||||||
|
var _ chronograf.ServersStore = &MultiKapacitorStore{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKapacitorStoreAll(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := KapacitorStore{}
|
||||||
|
kaps, err := store.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("All should not throw an error with an empty Store")
|
||||||
|
}
|
||||||
|
if len(kaps) != 0 {
|
||||||
|
t.Fatal("Store should be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Kapacitor = &chronograf.Server{}
|
||||||
|
kaps, err = store.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("All should not throw an error with an empty Store")
|
||||||
|
}
|
||||||
|
if len(kaps) != 1 {
|
||||||
|
t.Fatal("Store should have 1 element")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKapacitorStoreAdd(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := KapacitorStore{}
|
||||||
|
_, err := store.Add(ctx, chronograf.Server{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Store should not support adding another source")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKapacitorStoreDelete(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := KapacitorStore{}
|
||||||
|
err := store.Delete(ctx, chronograf.Server{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Delete should not operate on an empty Store")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Kapacitor = &chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
err = store.Delete(ctx, chronograf.Server{
|
||||||
|
ID: 8,
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Delete should not remove elements with the wrong ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = store.Delete(ctx, chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Delete should remove an element with a matching ID")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKapacitorStoreGet(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := KapacitorStore{}
|
||||||
|
_, err := store.Get(ctx, 9)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Get should return an error for an empty Store")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Kapacitor = &chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
_, err = store.Get(ctx, 8)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Get should return an error if it finds no matches")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Kapacitor = &chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
kap, err := store.Get(ctx, 9)
|
||||||
|
if err != nil || kap.ID != 9 {
|
||||||
|
t.Fatal("Get should find the element with a matching ID")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestKapacitorStoreUpdate(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := KapacitorStore{}
|
||||||
|
err := store.Update(ctx, chronograf.Server{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Update fhouls return an error for an empty Store")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Kapacitor = &chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
err = store.Update(ctx, chronograf.Server{
|
||||||
|
ID: 8,
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Update should return an error if it finds no matches")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Kapacitor = &chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
err = store.Update(ctx, chronograf.Server{
|
||||||
|
ID: 9,
|
||||||
|
URL: "http://crystal.pepsi.com",
|
||||||
|
})
|
||||||
|
if err != nil || store.Kapacitor.URL != "http://crystal.pepsi.com" {
|
||||||
|
t.Fatal("Update should overwrite elements with matching IDs")
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,142 @@
|
||||||
|
package memdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ensure MultiSourcesStore and SourcesStore implements chronograf.SourcesStore.
|
||||||
|
var _ chronograf.SourcesStore = &SourcesStore{}
|
||||||
|
var _ chronograf.SourcesStore = &MultiSourcesStore{}
|
||||||
|
|
||||||
|
// MultiSourcesStore delegates to the SourcesStores that compose it
|
||||||
|
type MultiSourcesStore struct {
|
||||||
|
Stores []chronograf.SourcesStore
|
||||||
|
}
|
||||||
|
|
||||||
|
// All concatenates the Sources of all contained Stores
|
||||||
|
func (multi *MultiSourcesStore) All(ctx context.Context) ([]chronograf.Source, error) {
|
||||||
|
all := []chronograf.Source{}
|
||||||
|
sourceSet := map[int]struct{}{}
|
||||||
|
|
||||||
|
ok := false
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
var sources []chronograf.Source
|
||||||
|
sources, err = store.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
// If this Store is unable to return an array of sources, skip to the
|
||||||
|
// next Store.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ok = true // We've received a response from at least one Store
|
||||||
|
for _, s := range sources {
|
||||||
|
// Enforce that the source has a unique ID
|
||||||
|
// If the source has been seen before, don't override what we already have
|
||||||
|
if _, okay := sourceSet[s.ID]; !okay { // We have a new Source!
|
||||||
|
sourceSet[s.ID] = struct{}{} // We just care that the ID is unique
|
||||||
|
all = append(all, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return all, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the src to the first Store to respond successfully
|
||||||
|
func (multi *MultiSourcesStore) Add(ctx context.Context, src chronograf.Source) (chronograf.Source, error) {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
var s chronograf.Source
|
||||||
|
s, err = store.Add(ctx, src)
|
||||||
|
if err == nil {
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return chronograf.Source{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete delegates to all stores, returns success if one Store is successful
|
||||||
|
func (multi *MultiSourcesStore) Delete(ctx context.Context, src chronograf.Source) error {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
err = store.Delete(ctx, src)
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get finds the Source by id among all contained Stores
|
||||||
|
func (multi *MultiSourcesStore) Get(ctx context.Context, id int) (chronograf.Source, error) {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
var s chronograf.Source
|
||||||
|
s, err = store.Get(ctx, id)
|
||||||
|
if err == nil {
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return chronograf.Source{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the first store to return a successful response
|
||||||
|
func (multi *MultiSourcesStore) Update(ctx context.Context, src chronograf.Source) error {
|
||||||
|
var err error
|
||||||
|
for _, store := range multi.Stores {
|
||||||
|
err = store.Update(ctx, src)
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SourcesStore implements the chronograf.SourcesStore interface
|
||||||
|
type SourcesStore struct {
|
||||||
|
Source *chronograf.Source
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add does not have any effect
|
||||||
|
func (store *SourcesStore) Add(ctx context.Context, src chronograf.Source) (chronograf.Source, error) {
|
||||||
|
return chronograf.Source{}, fmt.Errorf("In-memory SourcesStore does not support adding a Source")
|
||||||
|
}
|
||||||
|
|
||||||
|
// All will return a slice containing a configured source
|
||||||
|
func (store *SourcesStore) All(ctx context.Context) ([]chronograf.Source, error) {
|
||||||
|
if store.Source != nil {
|
||||||
|
return []chronograf.Source{*store.Source}, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete removes the SourcesStore.Soruce if it matches the provided Source
|
||||||
|
func (store *SourcesStore) Delete(ctx context.Context, src chronograf.Source) error {
|
||||||
|
if store.Source == nil || store.Source.ID != src.ID {
|
||||||
|
return fmt.Errorf("Unable to find Source with id %d", src.ID)
|
||||||
|
}
|
||||||
|
store.Source = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns the configured source if the id matches
|
||||||
|
func (store *SourcesStore) Get(ctx context.Context, id int) (chronograf.Source, error) {
|
||||||
|
if store.Source == nil || store.Source.ID != id {
|
||||||
|
return chronograf.Source{}, fmt.Errorf("Unable to find Source with id %d", id)
|
||||||
|
}
|
||||||
|
return *store.Source, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update does nothing
|
||||||
|
func (store *SourcesStore) Update(ctx context.Context, src chronograf.Source) error {
|
||||||
|
if store.Source == nil || store.Source.ID != src.ID {
|
||||||
|
return fmt.Errorf("Unable to find Source with id %d", src.ID)
|
||||||
|
}
|
||||||
|
store.Source = &src
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -0,0 +1,128 @@
|
||||||
|
package memdb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSourcesStore(t *testing.T) {
|
||||||
|
var _ chronograf.SourcesStore = &SourcesStore{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSourcesStoreAdd(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := SourcesStore{}
|
||||||
|
_, err := store.Add(ctx, chronograf.Source{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Store should not support adding another source")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSourcesStoreAll(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := SourcesStore{}
|
||||||
|
srcs, err := store.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("All should not throw an error with an empty Store")
|
||||||
|
}
|
||||||
|
if len(srcs) != 0 {
|
||||||
|
t.Fatal("Store should be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Source = &chronograf.Source{}
|
||||||
|
srcs, err = store.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("All should not throw an error with an empty Store")
|
||||||
|
}
|
||||||
|
if len(srcs) != 1 {
|
||||||
|
t.Fatal("Store should have 1 element")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSourcesStoreDelete(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := SourcesStore{}
|
||||||
|
err := store.Delete(ctx, chronograf.Source{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Delete should not operate on an empty Store")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Source = &chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
err = store.Delete(ctx, chronograf.Source{
|
||||||
|
ID: 8,
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Delete should not remove elements with the wrong ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = store.Delete(ctx, chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("Delete should remove an element with a matching ID")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSourcesStoreGet(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := SourcesStore{}
|
||||||
|
_, err := store.Get(ctx, 9)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Get should return an error for an empty Store")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Source = &chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
_, err = store.Get(ctx, 8)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Get should return an error if it finds no matches")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Source = &chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
src, err := store.Get(ctx, 9)
|
||||||
|
if err != nil || src.ID != 9 {
|
||||||
|
t.Fatal("Get should find the element with a matching ID")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSourcesStoreUpdate(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
store := SourcesStore{}
|
||||||
|
err := store.Update(ctx, chronograf.Source{})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Update should return an error for an empty Store")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Source = &chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
err = store.Update(ctx, chronograf.Source{
|
||||||
|
ID: 8,
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("Update should return an error if it finds no matches")
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Source = &chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
}
|
||||||
|
err = store.Update(ctx, chronograf.Source{
|
||||||
|
ID: 9,
|
||||||
|
URL: "http://crystal.pepsi.com",
|
||||||
|
})
|
||||||
|
if err != nil || store.Source.URL != "http://crystal.pepsi.com" {
|
||||||
|
t.Fatal("Update should overwrite elements with matching IDs")
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,113 @@
|
||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
"github.com/influxdata/chronograf/canned"
|
||||||
|
"github.com/influxdata/chronograf/layouts"
|
||||||
|
"github.com/influxdata/chronograf/memdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LayoutBuilder is responsible for building Layouts
|
||||||
|
type LayoutBuilder interface {
|
||||||
|
Build(chronograf.LayoutStore) (*layouts.MultiLayoutStore, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultiLayoutBuilder implements LayoutBuilder and will return a MultiLayoutStore
|
||||||
|
type MultiLayoutBuilder struct {
|
||||||
|
Logger chronograf.Logger
|
||||||
|
UUID chronograf.ID
|
||||||
|
CannedPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build will construct a MultiLayoutStore of canned and db-backed personalized
|
||||||
|
// layouts
|
||||||
|
func (builder *MultiLayoutBuilder) Build(db chronograf.LayoutStore) (*layouts.MultiLayoutStore, error) {
|
||||||
|
// These apps are those handled from a directory
|
||||||
|
apps := canned.NewApps(builder.CannedPath, builder.UUID, builder.Logger)
|
||||||
|
// These apps are statically compiled into chronograf
|
||||||
|
binApps := &canned.BinLayoutStore{
|
||||||
|
Logger: builder.Logger,
|
||||||
|
}
|
||||||
|
// Acts as a front-end to both the bolt layouts, filesystem layouts and binary statically compiled layouts.
|
||||||
|
// The idea here is that these stores form a hierarchy in which each is tried sequentially until
|
||||||
|
// the operation has success. So, the database is preferred over filesystem over binary data.
|
||||||
|
layouts := &layouts.MultiLayoutStore{
|
||||||
|
Stores: []chronograf.LayoutStore{
|
||||||
|
db,
|
||||||
|
apps,
|
||||||
|
binApps,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return layouts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SourcesBuilder builds a MultiSourceStore
|
||||||
|
type SourcesBuilder interface {
|
||||||
|
Build(chronograf.SourcesStore) (*memdb.MultiSourcesStore, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultiSourceBuilder implements SourcesBuilder
|
||||||
|
type MultiSourceBuilder struct {
|
||||||
|
InfluxDBURL string
|
||||||
|
InfluxDBUsername string
|
||||||
|
InfluxDBPassword string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build will return a MultiSourceStore
|
||||||
|
func (fs *MultiSourceBuilder) Build(db chronograf.SourcesStore) (*memdb.MultiSourcesStore, error) {
|
||||||
|
stores := []chronograf.SourcesStore{db}
|
||||||
|
|
||||||
|
if fs.InfluxDBURL != "" {
|
||||||
|
influxStore := &memdb.SourcesStore{
|
||||||
|
Source: &chronograf.Source{
|
||||||
|
ID: 0,
|
||||||
|
Name: fs.InfluxDBURL,
|
||||||
|
Type: chronograf.InfluxDB,
|
||||||
|
Username: fs.InfluxDBUsername,
|
||||||
|
Password: fs.InfluxDBPassword,
|
||||||
|
URL: fs.InfluxDBURL,
|
||||||
|
Default: true,
|
||||||
|
}}
|
||||||
|
stores = append([]chronograf.SourcesStore{influxStore}, stores...)
|
||||||
|
}
|
||||||
|
sources := &memdb.MultiSourcesStore{
|
||||||
|
Stores: stores,
|
||||||
|
}
|
||||||
|
|
||||||
|
return sources, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// KapacitorBuilder builds a KapacitorStore
|
||||||
|
type KapacitorBuilder interface {
|
||||||
|
Build(chronograf.ServersStore) (*memdb.MultiKapacitorStore, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultiKapacitorBuilder implements KapacitorBuilder
|
||||||
|
type MultiKapacitorBuilder struct {
|
||||||
|
KapacitorURL string
|
||||||
|
KapacitorUsername string
|
||||||
|
KapacitorPassword string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build will return a MultiKapacitorStore
|
||||||
|
func (builder *MultiKapacitorBuilder) Build(db chronograf.ServersStore) (*memdb.MultiKapacitorStore, error) {
|
||||||
|
stores := []chronograf.ServersStore{db}
|
||||||
|
if builder.KapacitorURL != "" {
|
||||||
|
memStore := &memdb.KapacitorStore{
|
||||||
|
Kapacitor: &chronograf.Server{
|
||||||
|
ID: 0,
|
||||||
|
SrcID: 0,
|
||||||
|
Name: builder.KapacitorURL,
|
||||||
|
URL: builder.KapacitorURL,
|
||||||
|
Username: builder.KapacitorUsername,
|
||||||
|
Password: builder.KapacitorPassword,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
stores = append([]chronograf.ServersStore{memStore}, stores...)
|
||||||
|
}
|
||||||
|
kapacitors := &memdb.MultiKapacitorStore{
|
||||||
|
Stores: stores,
|
||||||
|
}
|
||||||
|
return kapacitors, nil
|
||||||
|
}
|
|
@ -1,7 +1,6 @@
|
||||||
package server
|
package server
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/base64"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
|
@ -49,9 +48,7 @@ func (h *Service) KapacitorProxy(w http.ResponseWriter, r *http.Request) {
|
||||||
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
|
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
|
||||||
// a header directly
|
// a header directly
|
||||||
if srv.Username != "" && srv.Password != "" {
|
if srv.Username != "" && srv.Password != "" {
|
||||||
auth := "Basic " + srv.Username + ":" + srv.Password
|
req.SetBasicAuth(srv.Username, srv.Password)
|
||||||
header := base64.StdEncoding.EncodeToString([]byte(auth))
|
|
||||||
req.Header.Set("Authorization", header)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
proxy := &httputil.ReverseProxy{
|
proxy := &httputil.ReverseProxy{
|
||||||
|
|
|
@ -14,15 +14,13 @@ import (
|
||||||
|
|
||||||
"github.com/influxdata/chronograf"
|
"github.com/influxdata/chronograf"
|
||||||
"github.com/influxdata/chronograf/bolt"
|
"github.com/influxdata/chronograf/bolt"
|
||||||
"github.com/influxdata/chronograf/canned"
|
"github.com/influxdata/chronograf/influx"
|
||||||
"github.com/influxdata/chronograf/layouts"
|
|
||||||
clog "github.com/influxdata/chronograf/log"
|
clog "github.com/influxdata/chronograf/log"
|
||||||
"github.com/influxdata/chronograf/oauth2"
|
"github.com/influxdata/chronograf/oauth2"
|
||||||
"github.com/influxdata/chronograf/uuid"
|
"github.com/influxdata/chronograf/uuid"
|
||||||
client "github.com/influxdata/usage-client/v1"
|
client "github.com/influxdata/usage-client/v1"
|
||||||
flags "github.com/jessevdk/go-flags"
|
flags "github.com/jessevdk/go-flags"
|
||||||
"github.com/tylerb/graceful"
|
"github.com/tylerb/graceful"
|
||||||
"github.com/influxdata/chronograf/influx"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -42,6 +40,14 @@ type Server struct {
|
||||||
Cert flags.Filename `long:"cert" description:"Path to PEM encoded public key certificate. " env:"TLS_CERTIFICATE"`
|
Cert flags.Filename `long:"cert" description:"Path to PEM encoded public key certificate. " env:"TLS_CERTIFICATE"`
|
||||||
Key flags.Filename `long:"key" description:"Path to private key associated with given certificate. " env:"TLS_PRIVATE_KEY"`
|
Key flags.Filename `long:"key" description:"Path to private key associated with given certificate. " env:"TLS_PRIVATE_KEY"`
|
||||||
|
|
||||||
|
InfluxDBURL string `long:"influxdb-url" description:"Location of your InfluxDB instance" env:"INFLUXDB_URL"`
|
||||||
|
InfluxDBUsername string `long:"influxdb-username" description:"Username for your InfluxDB instance" env:"INFLUXDB_USERNAME"`
|
||||||
|
InfluxDBPassword string `long:"influxdb-password" description:"Password for your InfluxDB instance" env:"INFLUXDB_PASSWORD"`
|
||||||
|
|
||||||
|
KapacitorURL string `long:"kapacitor-url" description:"Location of your Kapacitor instance" env:"KAPACITOR_URL"`
|
||||||
|
KapacitorUsername string `long:"kapacitor-username" description:"Username of your Kapacitor instance" env:"KAPACITOR_USERNAME"`
|
||||||
|
KapacitorPassword string `long:"kapacitor-password" description:"Password of your Kapacitor instance" env:"KAPACITOR_PASSWORD"`
|
||||||
|
|
||||||
Develop bool `short:"d" long:"develop" description:"Run server in develop mode."`
|
Develop bool `short:"d" long:"develop" description:"Run server in develop mode."`
|
||||||
BoltPath string `short:"b" long:"bolt-path" description:"Full path to boltDB file (/var/lib/chronograf/chronograf-v1.db)" env:"BOLT_PATH" default:"chronograf-v1.db"`
|
BoltPath string `short:"b" long:"bolt-path" description:"Full path to boltDB file (/var/lib/chronograf/chronograf-v1.db)" env:"BOLT_PATH" default:"chronograf-v1.db"`
|
||||||
CannedPath string `short:"c" long:"canned-path" description:"Path to directory of pre-canned application layouts (/usr/share/chronograf/canned)" env:"CANNED_PATH" default:"canned"`
|
CannedPath string `short:"c" long:"canned-path" description:"Path to directory of pre-canned application layouts (/usr/share/chronograf/canned)" env:"CANNED_PATH" default:"canned"`
|
||||||
|
@ -180,7 +186,22 @@ func (s *Server) NewListener() (net.Listener, error) {
|
||||||
// Serve starts and runs the chronograf server
|
// Serve starts and runs the chronograf server
|
||||||
func (s *Server) Serve(ctx context.Context) error {
|
func (s *Server) Serve(ctx context.Context) error {
|
||||||
logger := clog.New(clog.ParseLevel(s.LogLevel))
|
logger := clog.New(clog.ParseLevel(s.LogLevel))
|
||||||
service := openService(ctx, s.BoltPath, s.CannedPath, logger, s.useAuth())
|
layoutBuilder := &MultiLayoutBuilder{
|
||||||
|
Logger: logger,
|
||||||
|
UUID: &uuid.V4{},
|
||||||
|
CannedPath: s.CannedPath,
|
||||||
|
}
|
||||||
|
sourcesBuilder := &MultiSourceBuilder{
|
||||||
|
InfluxDBURL: s.InfluxDBURL,
|
||||||
|
InfluxDBUsername: s.InfluxDBUsername,
|
||||||
|
InfluxDBPassword: s.InfluxDBPassword,
|
||||||
|
}
|
||||||
|
kapacitorBuilder := &MultiKapacitorBuilder{
|
||||||
|
KapacitorURL: s.KapacitorURL,
|
||||||
|
KapacitorUsername: s.KapacitorUsername,
|
||||||
|
KapacitorPassword: s.KapacitorPassword,
|
||||||
|
}
|
||||||
|
service := openService(ctx, s.BoltPath, layoutBuilder, sourcesBuilder, kapacitorBuilder, logger, s.useAuth())
|
||||||
basepath = s.Basepath
|
basepath = s.Basepath
|
||||||
|
|
||||||
providerFuncs := []func(func(oauth2.Provider, oauth2.Mux)){}
|
providerFuncs := []func(func(oauth2.Provider, oauth2.Mux)){}
|
||||||
|
@ -256,7 +277,7 @@ func (s *Server) Serve(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func openService(ctx context.Context, boltPath, cannedPath string, logger chronograf.Logger, useAuth bool) Service {
|
func openService(ctx context.Context, boltPath string, lBuilder LayoutBuilder, sBuilder SourcesBuilder, kapBuilder KapacitorBuilder, logger chronograf.Logger, useAuth bool) Service {
|
||||||
db := bolt.NewClient()
|
db := bolt.NewClient()
|
||||||
db.Path = boltPath
|
db.Path = boltPath
|
||||||
if err := db.Open(ctx); err != nil {
|
if err := db.Open(ctx); err != nil {
|
||||||
|
@ -266,28 +287,34 @@ func openService(ctx context.Context, boltPath, cannedPath string, logger chrono
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// These apps are those handled from a directory
|
layouts, err := lBuilder.Build(db.LayoutStore)
|
||||||
apps := canned.NewApps(cannedPath, &uuid.V4{}, logger)
|
if err != nil {
|
||||||
// These apps are statically compiled into chronograf
|
logger.
|
||||||
binApps := &canned.BinLayoutStore{
|
WithField("component", "LayoutStore").
|
||||||
Logger: logger,
|
Error("Unable to construct a MultiLayoutStore", err)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Acts as a front-end to both the bolt layouts, filesystem layouts and binary statically compiled layouts.
|
sources, err := sBuilder.Build(db.SourcesStore)
|
||||||
// The idea here is that these stores form a hierarchy in which each is tried sequentially until
|
if err != nil {
|
||||||
// the operation has success. So, the database is preferred over filesystem over binary data.
|
logger.
|
||||||
layouts := &layouts.MultiLayoutStore{
|
WithField("component", "SourcesStore").
|
||||||
Stores: []chronograf.LayoutStore{
|
Error("Unable to construct a MultiSourcesStore", err)
|
||||||
db.LayoutStore,
|
os.Exit(1)
|
||||||
apps,
|
}
|
||||||
binApps,
|
|
||||||
},
|
kapacitors, err := kapBuilder.Build(db.ServersStore)
|
||||||
|
if err != nil {
|
||||||
|
logger.
|
||||||
|
WithField("component", "KapacitorStore").
|
||||||
|
Error("Unable to construct a MultiKapacitorStore", err)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
return Service{
|
return Service{
|
||||||
TimeSeriesClient: &InfluxClient{},
|
TimeSeriesClient: &InfluxClient{},
|
||||||
SourcesStore: db.SourcesStore,
|
SourcesStore: sources,
|
||||||
ServersStore: db.ServersStore,
|
ServersStore: kapacitors,
|
||||||
UsersStore: db.UsersStore,
|
UsersStore: db.UsersStore,
|
||||||
LayoutStore: layouts,
|
LayoutStore: layouts,
|
||||||
DashboardsStore: db.DashboardsStore,
|
DashboardsStore: db.DashboardsStore,
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
package server
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestLayoutBuilder(t *testing.T) {
|
||||||
|
var l LayoutBuilder = &MultiLayoutBuilder{}
|
||||||
|
layout, err := l.Build(nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("MultiLayoutBuilder can't build a MultiLayoutStore: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if layout == nil {
|
||||||
|
t.Fatal("LayoutBuilder should have built a layout")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSourcesStoresBuilder(t *testing.T) {
|
||||||
|
var b SourcesBuilder = &MultiSourceBuilder{}
|
||||||
|
sources, err := b.Build(nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("MultiSourceBuilder can't build a MultiSourcesStore: %v", err)
|
||||||
|
}
|
||||||
|
if sources == nil {
|
||||||
|
t.Fatal("SourcesBuilder should have built a MultiSourceStore")
|
||||||
|
}
|
||||||
|
}
|
|
@ -3010,14 +3010,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Roles": {
|
"Roles": {
|
||||||
"type": "object",
|
"type": "array",
|
||||||
"properties": {
|
"items": {
|
||||||
"roles": {
|
"$ref": "#/definitions/Role"
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/Role"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"example": {
|
"example": {
|
||||||
"roles": [
|
"roles": [
|
||||||
|
@ -3178,6 +3173,9 @@
|
||||||
"permissions": {
|
"permissions": {
|
||||||
"$ref": "#/definitions/Permissions"
|
"$ref": "#/definitions/Permissions"
|
||||||
},
|
},
|
||||||
|
"roles": {
|
||||||
|
"$ref": "#/definitions/Roles"
|
||||||
|
},
|
||||||
"links": {
|
"links": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"description": "URL relations of this user",
|
"description": "URL relations of this user",
|
||||||
|
|
|
@ -111,6 +111,7 @@
|
||||||
"react-tooltip": "^3.2.1",
|
"react-tooltip": "^3.2.1",
|
||||||
"redux": "^3.3.1",
|
"redux": "^3.3.1",
|
||||||
"redux-thunk": "^1.0.3",
|
"redux-thunk": "^1.0.3",
|
||||||
|
"rome": "^2.1.22",
|
||||||
"updeep": "^0.13.0"
|
"updeep": "^0.13.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ import {
|
||||||
NEW_EMPTY_RP,
|
NEW_EMPTY_RP,
|
||||||
} from 'src/admin/constants'
|
} from 'src/admin/constants'
|
||||||
|
|
||||||
let state = undefined
|
let state
|
||||||
|
|
||||||
// Users
|
// Users
|
||||||
const u1 = {
|
const u1 = {
|
||||||
|
@ -57,11 +57,11 @@ const u1 = {
|
||||||
'Monitor',
|
'Monitor',
|
||||||
'CopyShard',
|
'CopyShard',
|
||||||
'KapacitorAPI',
|
'KapacitorAPI',
|
||||||
'KapacitorConfigAPI'
|
'KapacitorConfigAPI',
|
||||||
],
|
],
|
||||||
scope: 'all',
|
scope: 'all',
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
],
|
],
|
||||||
permissions: [],
|
permissions: [],
|
||||||
links: {self: '/chronograf/v1/sources/1/users/acidburn'},
|
links: {self: '/chronograf/v1/sources/1/users/acidburn'},
|
||||||
|
@ -98,16 +98,16 @@ const r1 = {
|
||||||
'Monitor',
|
'Monitor',
|
||||||
'CopyShard',
|
'CopyShard',
|
||||||
'KapacitorAPI',
|
'KapacitorAPI',
|
||||||
'KapacitorConfigAPI'
|
'KapacitorConfigAPI',
|
||||||
],
|
],
|
||||||
scope: 'all',
|
scope: 'all',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
links: {self: '/chronograf/v1/sources/1/roles/hax0r'}
|
links: {self: '/chronograf/v1/sources/1/roles/hax0r'},
|
||||||
}
|
}
|
||||||
const r2 = {
|
const r2 = {
|
||||||
name: 'l33tus3r',
|
name: 'l33tus3r',
|
||||||
links: {self: '/chronograf/v1/sources/1/roles/l33tus3r'}
|
links: {self: '/chronograf/v1/sources/1/roles/l33tus3r'},
|
||||||
}
|
}
|
||||||
const roles = [r1, r2]
|
const roles = [r1, r2]
|
||||||
|
|
||||||
|
@ -226,7 +226,7 @@ describe('Admin.Reducers', () => {
|
||||||
state = {
|
state = {
|
||||||
users: [
|
users: [
|
||||||
u1,
|
u1,
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
const actual = reducer(state, addUser())
|
const actual = reducer(state, addUser())
|
||||||
|
@ -260,7 +260,7 @@ describe('Admin.Reducers', () => {
|
||||||
|
|
||||||
const actual = reducer(state, editUser(u2, updates))
|
const actual = reducer(state, editUser(u2, updates))
|
||||||
const expected = {
|
const expected = {
|
||||||
users: [{...u2, ...updates}, u1]
|
users: [{...u2, ...updates}, u1],
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(actual.users).to.deep.equal(expected.users)
|
expect(actual.users).to.deep.equal(expected.users)
|
||||||
|
@ -270,7 +270,7 @@ describe('Admin.Reducers', () => {
|
||||||
state = {
|
state = {
|
||||||
roles: [
|
roles: [
|
||||||
r1,
|
r1,
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
const actual = reducer(state, addRole())
|
const actual = reducer(state, addRole())
|
||||||
|
@ -304,7 +304,7 @@ describe('Admin.Reducers', () => {
|
||||||
|
|
||||||
const actual = reducer(state, editRole(r2, updates))
|
const actual = reducer(state, editRole(r2, updates))
|
||||||
const expected = {
|
const expected = {
|
||||||
roles: [{...r2, ...updates}, r1]
|
roles: [{...r2, ...updates}, r1],
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(actual.roles).to.deep.equal(expected.roles)
|
expect(actual.roles).to.deep.equal(expected.roles)
|
||||||
|
@ -323,7 +323,7 @@ describe('Admin.Reducers', () => {
|
||||||
state = {
|
state = {
|
||||||
roles: [
|
roles: [
|
||||||
r1,
|
r1,
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
const actual = reducer(state, deleteRole(r1))
|
const actual = reducer(state, deleteRole(r1))
|
||||||
|
@ -338,7 +338,7 @@ describe('Admin.Reducers', () => {
|
||||||
state = {
|
state = {
|
||||||
users: [
|
users: [
|
||||||
u1,
|
u1,
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
const actual = reducer(state, deleteUser(u1))
|
const actual = reducer(state, deleteUser(u1))
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import _ from 'lodash'
|
import _ from 'lodash'
|
||||||
|
|
||||||
import reducer from 'src/dashboards/reducers/ui'
|
import reducer from 'src/dashboards/reducers/ui'
|
||||||
import timeRanges from 'hson!src/shared/data/timeRanges.hson';
|
import timeRanges from 'hson!src/shared/data/timeRanges.hson'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
loadDashboards,
|
loadDashboards,
|
||||||
|
@ -19,8 +19,8 @@ const noopAction = () => {
|
||||||
return {type: 'NOOP'}
|
return {type: 'NOOP'}
|
||||||
}
|
}
|
||||||
|
|
||||||
let state = undefined
|
let state
|
||||||
const timeRange = timeRanges[1];
|
const timeRange = timeRanges[1]
|
||||||
const d1 = {id: 1, cells: [], name: "d1"}
|
const d1 = {id: 1, cells: [], name: "d1"}
|
||||||
const d2 = {id: 2, cells: [], name: "d2"}
|
const d2 = {id: 2, cells: [], name: "d2"}
|
||||||
const dashboards = [d1, d2]
|
const dashboards = [d1, d2]
|
||||||
|
@ -117,7 +117,7 @@ describe('DataExplorer.Reducers.UI', () => {
|
||||||
const newCell = {
|
const newCell = {
|
||||||
x: c1.x,
|
x: c1.x,
|
||||||
y: c1.y,
|
y: c1.y,
|
||||||
name: newCellName
|
name: newCellName,
|
||||||
}
|
}
|
||||||
const dash = {...d1, cells: [c1]}
|
const dash = {...d1, cells: [c1]}
|
||||||
state = {
|
state = {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import reducer from 'src/data_explorer/reducers/queryConfigs';
|
import reducer from 'src/data_explorer/reducers/queryConfigs'
|
||||||
import defaultQueryConfig from 'src/utils/defaultQueryConfig';
|
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
|
||||||
import {
|
import {
|
||||||
chooseNamespace,
|
chooseNamespace,
|
||||||
chooseMeasurement,
|
chooseMeasurement,
|
||||||
|
@ -10,108 +10,108 @@ import {
|
||||||
groupByTime,
|
groupByTime,
|
||||||
toggleTagAcceptance,
|
toggleTagAcceptance,
|
||||||
updateRawQuery,
|
updateRawQuery,
|
||||||
} from 'src/data_explorer/actions/view';
|
} from 'src/data_explorer/actions/view'
|
||||||
|
|
||||||
const fakeAddQueryAction = (panelID, queryID) => {
|
const fakeAddQueryAction = (panelID, queryID) => {
|
||||||
return {
|
return {
|
||||||
type: 'ADD_QUERY',
|
type: 'ADD_QUERY',
|
||||||
payload: {panelID, queryID},
|
payload: {panelID, queryID},
|
||||||
};
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
function buildInitialState(queryId, params) {
|
function buildInitialState(queryId, params) {
|
||||||
return Object.assign({}, defaultQueryConfig(queryId), params);
|
return Object.assign({}, defaultQueryConfig(queryId), params)
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Chronograf.Reducers.queryConfig', () => {
|
describe('Chronograf.Reducers.queryConfig', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
|
|
||||||
it('can add a query', () => {
|
it('can add a query', () => {
|
||||||
const state = reducer({}, fakeAddQueryAction('blah', queryId));
|
const state = reducer({}, fakeAddQueryAction('blah', queryId))
|
||||||
|
|
||||||
const actual = state[queryId];
|
const actual = state[queryId]
|
||||||
const expected = defaultQueryConfig(queryId);
|
const expected = defaultQueryConfig(queryId)
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('choosing db, rp, and measurement', () => {
|
describe('choosing db, rp, and measurement', () => {
|
||||||
let state;
|
let state
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
state = reducer({}, fakeAddQueryAction('any', queryId));
|
state = reducer({}, fakeAddQueryAction('any', queryId))
|
||||||
});
|
})
|
||||||
|
|
||||||
it('sets the db and rp', () => {
|
it('sets the db and rp', () => {
|
||||||
const newState = reducer(state, chooseNamespace(queryId, {
|
const newState = reducer(state, chooseNamespace(queryId, {
|
||||||
database: 'telegraf',
|
database: 'telegraf',
|
||||||
retentionPolicy: 'monitor',
|
retentionPolicy: 'monitor',
|
||||||
}));
|
}))
|
||||||
|
|
||||||
expect(newState[queryId].database).to.equal('telegraf');
|
expect(newState[queryId].database).to.equal('telegraf')
|
||||||
expect(newState[queryId].retentionPolicy).to.equal('monitor');
|
expect(newState[queryId].retentionPolicy).to.equal('monitor')
|
||||||
});
|
})
|
||||||
|
|
||||||
it('sets the measurement', () => {
|
it('sets the measurement', () => {
|
||||||
const newState = reducer(state, chooseMeasurement(queryId, 'mem'));
|
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
|
||||||
|
|
||||||
expect(newState[queryId].measurement).to.equal('mem');
|
expect(newState[queryId].measurement).to.equal('mem')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('a query has measurements and fields', () => {
|
describe('a query has measurements and fields', () => {
|
||||||
let state;
|
let state
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
const one = reducer({}, fakeAddQueryAction('any', queryId));
|
const one = reducer({}, fakeAddQueryAction('any', queryId))
|
||||||
const two = reducer(one, chooseNamespace(queryId, {
|
const two = reducer(one, chooseNamespace(queryId, {
|
||||||
database: '_internal',
|
database: '_internal',
|
||||||
retentionPolicy: 'daily',
|
retentionPolicy: 'daily',
|
||||||
}));
|
}))
|
||||||
const three = reducer(two, chooseMeasurement(queryId, 'disk'));
|
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
|
||||||
state = reducer(three, toggleField(queryId, {field: 'a great field', funcs: []}));
|
state = reducer(three, toggleField(queryId, {field: 'a great field', funcs: []}))
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('choosing a new namespace', () => {
|
describe('choosing a new namespace', () => {
|
||||||
it('clears out the old measurement and fields', () => { // what about tags?
|
it('clears out the old measurement and fields', () => { // what about tags?
|
||||||
expect(state[queryId].measurement).to.exist;
|
expect(state[queryId].measurement).to.exist
|
||||||
expect(state[queryId].fields.length).to.equal(1);
|
expect(state[queryId].fields.length).to.equal(1)
|
||||||
|
|
||||||
const newState = reducer(state, chooseNamespace(queryId, {
|
const newState = reducer(state, chooseNamespace(queryId, {
|
||||||
database: 'newdb',
|
database: 'newdb',
|
||||||
retentionPolicy: 'newrp',
|
retentionPolicy: 'newrp',
|
||||||
}));
|
}))
|
||||||
|
|
||||||
expect(newState[queryId].measurement).not.to.exist;
|
expect(newState[queryId].measurement).not.to.exist
|
||||||
expect(newState[queryId].fields.length).to.equal(0);
|
expect(newState[queryId].fields.length).to.equal(0)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('choosing a new measurement', () => {
|
describe('choosing a new measurement', () => {
|
||||||
it('leaves the namespace and clears out the old fields', () => { // what about tags?
|
it('leaves the namespace and clears out the old fields', () => { // what about tags?
|
||||||
expect(state[queryId].fields.length).to.equal(1);
|
expect(state[queryId].fields.length).to.equal(1)
|
||||||
|
|
||||||
const newState = reducer(state, chooseMeasurement(queryId, 'newmeasurement'));
|
const newState = reducer(state, chooseMeasurement(queryId, 'newmeasurement'))
|
||||||
|
|
||||||
expect(state[queryId].database).to.equal(newState[queryId].database);
|
expect(state[queryId].database).to.equal(newState[queryId].database)
|
||||||
expect(state[queryId].retentionPolicy).to.equal(newState[queryId].retentionPolicy);
|
expect(state[queryId].retentionPolicy).to.equal(newState[queryId].retentionPolicy)
|
||||||
expect(newState[queryId].fields.length).to.equal(0);
|
expect(newState[queryId].fields.length).to.equal(0)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('when the query is part of a kapacitor rule', () => {
|
describe('when the query is part of a kapacitor rule', () => {
|
||||||
it('only allows one field', () => {
|
it('only allows one field', () => {
|
||||||
expect(state[queryId].fields.length).to.equal(1);
|
expect(state[queryId].fields.length).to.equal(1)
|
||||||
|
|
||||||
const isKapacitorRule = true;
|
const isKapacitorRule = true
|
||||||
const newState = reducer(state, toggleField(queryId, {field: 'a different field', funcs: []}, isKapacitorRule));
|
const newState = reducer(state, toggleField(queryId, {field: 'a different field', funcs: []}, isKapacitorRule))
|
||||||
|
|
||||||
expect(newState[queryId].fields.length).to.equal(1);
|
expect(newState[queryId].fields.length).to.equal(1)
|
||||||
expect(newState[queryId].fields[0].field).to.equal('a different field');
|
expect(newState[queryId].fields[0].field).to.equal('a different field')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('APPLY_FUNCS_TO_FIELD', () => {
|
describe('APPLY_FUNCS_TO_FIELD', () => {
|
||||||
it('applies functions to a field without any existing functions', () => {
|
it('applies functions to a field without any existing functions', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: {
|
[queryId]: {
|
||||||
id: 123,
|
id: 123,
|
||||||
|
@ -121,23 +121,23 @@ describe('Chronograf.Reducers.queryConfig', () => {
|
||||||
{field: 'f1', funcs: ['fn1', 'fn2']},
|
{field: 'f1', funcs: ['fn1', 'fn2']},
|
||||||
{field: 'f2', funcs: ['fn1']},
|
{field: 'f2', funcs: ['fn1']},
|
||||||
],
|
],
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
const action = applyFuncsToField(queryId, {
|
const action = applyFuncsToField(queryId, {
|
||||||
field: 'f1',
|
field: 'f1',
|
||||||
funcs: ['fn3', 'fn4'],
|
funcs: ['fn3', 'fn4'],
|
||||||
});
|
})
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].fields).to.eql([
|
expect(nextState[queryId].fields).to.eql([
|
||||||
{field: 'f1', funcs: ['fn3', 'fn4']},
|
{field: 'f1', funcs: ['fn3', 'fn4']},
|
||||||
{field: 'f2', funcs: ['fn1']},
|
{field: 'f2', funcs: ['fn1']},
|
||||||
]);
|
])
|
||||||
});
|
})
|
||||||
|
|
||||||
it('removes all functions and group by time when one field has no funcs applied', () => {
|
it('removes all functions and group by time when one field has no funcs applied', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: {
|
[queryId]: {
|
||||||
id: 123,
|
id: 123,
|
||||||
|
@ -151,27 +151,27 @@ describe('Chronograf.Reducers.queryConfig', () => {
|
||||||
time: '1m',
|
time: '1m',
|
||||||
tags: [],
|
tags: [],
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const action = applyFuncsToField(queryId, {
|
const action = applyFuncsToField(queryId, {
|
||||||
field: 'f1',
|
field: 'f1',
|
||||||
funcs: [],
|
funcs: [],
|
||||||
});
|
})
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].fields).to.eql([
|
expect(nextState[queryId].fields).to.eql([
|
||||||
{field: 'f1', funcs: []},
|
{field: 'f1', funcs: []},
|
||||||
{field: 'f2', funcs: []},
|
{field: 'f2', funcs: []},
|
||||||
]);
|
])
|
||||||
expect(nextState[queryId].groupBy.time).to.equal(null);
|
expect(nextState[queryId].groupBy.time).to.equal(null)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('CHOOSE_TAG', () => {
|
describe('CHOOSE_TAG', () => {
|
||||||
it('adds a tag key/value to the query', () => {
|
it('adds a tag key/value to the query', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: buildInitialState(queryId, {
|
[queryId]: buildInitialState(queryId, {
|
||||||
tags: {
|
tags: {
|
||||||
|
@ -179,63 +179,63 @@ describe('Chronograf.Reducers.queryConfig', () => {
|
||||||
k2: ['foo'],
|
k2: ['foo'],
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
};
|
}
|
||||||
const action = chooseTag(queryId, {
|
const action = chooseTag(queryId, {
|
||||||
key: 'k1',
|
key: 'k1',
|
||||||
value: 'v1',
|
value: 'v1',
|
||||||
});
|
})
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].tags).to.eql({
|
expect(nextState[queryId].tags).to.eql({
|
||||||
k1: ['v0', 'v1'],
|
k1: ['v0', 'v1'],
|
||||||
k2: ['foo'],
|
k2: ['foo'],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('creates a new entry if it\'s the first key', () => {
|
it('creates a new entry if it\'s the first key', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: buildInitialState(queryId, {
|
[queryId]: buildInitialState(queryId, {
|
||||||
tags: {},
|
tags: {},
|
||||||
}),
|
}),
|
||||||
};
|
}
|
||||||
const action = chooseTag(queryId, {
|
const action = chooseTag(queryId, {
|
||||||
key: 'k1',
|
key: 'k1',
|
||||||
value: 'v1',
|
value: 'v1',
|
||||||
});
|
})
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].tags).to.eql({
|
expect(nextState[queryId].tags).to.eql({
|
||||||
k1: ['v1'],
|
k1: ['v1'],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('removes a value that is already in the list', () => {
|
it('removes a value that is already in the list', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: buildInitialState(queryId, {
|
[queryId]: buildInitialState(queryId, {
|
||||||
tags: {
|
tags: {
|
||||||
k1: ['v1'],
|
k1: ['v1'],
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
};
|
}
|
||||||
const action = chooseTag(queryId, {
|
const action = chooseTag(queryId, {
|
||||||
key: 'k1',
|
key: 'k1',
|
||||||
value: 'v1',
|
value: 'v1',
|
||||||
});
|
})
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
// TODO: this should probably remove the `k1` property entirely from the tags object
|
// TODO: this should probably remove the `k1` property entirely from the tags object
|
||||||
expect(nextState[queryId].tags).to.eql({});
|
expect(nextState[queryId].tags).to.eql({})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('GROUP_BY_TAG', () => {
|
describe('GROUP_BY_TAG', () => {
|
||||||
it('adds a tag key/value to the query', () => {
|
it('adds a tag key/value to the query', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: {
|
[queryId]: {
|
||||||
id: 123,
|
id: 123,
|
||||||
|
@ -244,20 +244,20 @@ describe('Chronograf.Reducers.queryConfig', () => {
|
||||||
fields: [],
|
fields: [],
|
||||||
tags: {},
|
tags: {},
|
||||||
groupBy: {tags: [], time: null},
|
groupBy: {tags: [], time: null},
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
const action = groupByTag(queryId, 'k1');
|
const action = groupByTag(queryId, 'k1')
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].groupBy).to.eql({
|
expect(nextState[queryId].groupBy).to.eql({
|
||||||
time: null,
|
time: null,
|
||||||
tags: ['k1'],
|
tags: ['k1'],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
|
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: {
|
[queryId]: {
|
||||||
id: 123,
|
id: 123,
|
||||||
|
@ -266,59 +266,59 @@ describe('Chronograf.Reducers.queryConfig', () => {
|
||||||
fields: [],
|
fields: [],
|
||||||
tags: {},
|
tags: {},
|
||||||
groupBy: {tags: ['k1'], time: null},
|
groupBy: {tags: ['k1'], time: null},
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
const action = groupByTag(queryId, 'k1');
|
const action = groupByTag(queryId, 'k1')
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].groupBy).to.eql({
|
expect(nextState[queryId].groupBy).to.eql({
|
||||||
time: null,
|
time: null,
|
||||||
tags: [],
|
tags: [],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('TOGGLE_TAG_ACCEPTANCE', () => {
|
describe('TOGGLE_TAG_ACCEPTANCE', () => {
|
||||||
it('it toggles areTagsAccepted', () => {
|
it('it toggles areTagsAccepted', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: buildInitialState(queryId),
|
[queryId]: buildInitialState(queryId),
|
||||||
};
|
}
|
||||||
const action = toggleTagAcceptance(queryId);
|
const action = toggleTagAcceptance(queryId)
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].areTagsAccepted).to.equal(!initialState[queryId].areTagsAccepted);
|
expect(nextState[queryId].areTagsAccepted).to.equal(!initialState[queryId].areTagsAccepted)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('GROUP_BY_TIME', () => {
|
describe('GROUP_BY_TIME', () => {
|
||||||
it('applys the appropriate group by time', () => {
|
it('applys the appropriate group by time', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const time = '100y';
|
const time = '100y'
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: buildInitialState(queryId),
|
[queryId]: buildInitialState(queryId),
|
||||||
};
|
}
|
||||||
|
|
||||||
const action = groupByTime(queryId, time);
|
const action = groupByTime(queryId, time)
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].groupBy.time).to.equal(time);
|
expect(nextState[queryId].groupBy.time).to.equal(time)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('updates a query\'s raw text', () => {
|
it('updates a query\'s raw text', () => {
|
||||||
const queryId = 123;
|
const queryId = 123
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[queryId]: buildInitialState(queryId),
|
[queryId]: buildInitialState(queryId),
|
||||||
};
|
}
|
||||||
const text = 'foo';
|
const text = 'foo'
|
||||||
const action = updateRawQuery(queryId, text);
|
const action = updateRawQuery(queryId, text)
|
||||||
|
|
||||||
const nextState = reducer(initialState, action);
|
const nextState = reducer(initialState, action)
|
||||||
|
|
||||||
expect(nextState[queryId].rawText).to.equal('foo');
|
expect(nextState[queryId].rawText).to.equal('foo')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,31 +1,31 @@
|
||||||
import reducer from 'src/data_explorer/reducers/timeRange';
|
import reducer from 'src/data_explorer/reducers/timeRange'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
setTimeRange,
|
setTimeRange,
|
||||||
} from 'src/data_explorer/actions/view';
|
} from 'src/data_explorer/actions/view'
|
||||||
|
|
||||||
const noopAction = () => {
|
const noopAction = () => {
|
||||||
return {type: 'NOOP'};
|
return {type: 'NOOP'}
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('DataExplorer.Reducers.TimeRange', () => {
|
describe('DataExplorer.Reducers.TimeRange', () => {
|
||||||
it('it sets the default timeRange', () => {
|
it('it sets the default timeRange', () => {
|
||||||
const state = reducer(undefined, noopAction());
|
const state = reducer(undefined, noopAction())
|
||||||
const expected = {
|
const expected = {
|
||||||
lower: 'now() - 15m',
|
lower: 'now() - 15m',
|
||||||
upper: null,
|
upper: null,
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(state).to.deep.equal(expected);
|
expect(state).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('it can set the time range', () => {
|
it('it can set the time range', () => {
|
||||||
const timeRange = {
|
const timeRange = {
|
||||||
lower: 'now() - 5m',
|
lower: 'now() - 5m',
|
||||||
upper: null,
|
upper: null,
|
||||||
};
|
}
|
||||||
const expected = reducer(undefined, setTimeRange(timeRange));
|
const expected = reducer(undefined, setTimeRange(timeRange))
|
||||||
|
|
||||||
expect(timeRange).to.deep.equal(expected);
|
expect(timeRange).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,40 +1,40 @@
|
||||||
import reducer from 'src/data_explorer/reducers/ui';
|
import reducer from 'src/data_explorer/reducers/ui'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
addQuery,
|
addQuery,
|
||||||
deleteQuery,
|
deleteQuery,
|
||||||
} from 'src/data_explorer/actions/view';
|
} from 'src/data_explorer/actions/view'
|
||||||
|
|
||||||
const noopAction = () => {
|
const noopAction = () => {
|
||||||
return {type: 'NOOP'};
|
return {type: 'NOOP'}
|
||||||
}
|
}
|
||||||
|
|
||||||
let state = undefined;
|
let state
|
||||||
|
|
||||||
describe('DataExplorer.Reducers.UI', () => {
|
describe('DataExplorer.Reducers.UI', () => {
|
||||||
it('it sets the default state for UI', () => {
|
it('it sets the default state for UI', () => {
|
||||||
const actual = reducer(state, noopAction());
|
const actual = reducer(state, noopAction())
|
||||||
const expected = {
|
const expected = {
|
||||||
queryIDs: [],
|
queryIDs: [],
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('it can add a query', () => {
|
it('it can add a query', () => {
|
||||||
const actual = reducer(state, addQuery());
|
const actual = reducer(state, addQuery())
|
||||||
expect(actual.queryIDs.length).to.equal(1);
|
expect(actual.queryIDs.length).to.equal(1)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('it can delete a query', () => {
|
it('it can delete a query', () => {
|
||||||
const queryID = '123';
|
const queryID = '123'
|
||||||
state = {queryIDs: ['456', queryID]};
|
state = {queryIDs: ['456', queryID]}
|
||||||
|
|
||||||
const actual = reducer(state, deleteQuery(queryID));
|
const actual = reducer(state, deleteQuery(queryID))
|
||||||
const expected = {
|
const expected = {
|
||||||
queryIDs: ['456'],
|
queryIDs: ['456'],
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,117 +1,117 @@
|
||||||
import buildInfluxQLQuery from 'utils/influxql';
|
import buildInfluxQLQuery from 'utils/influxql'
|
||||||
import defaultQueryConfig from 'src/utils/defaultQueryConfig';
|
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
|
||||||
|
|
||||||
function mergeConfig(options) {
|
function mergeConfig(options) {
|
||||||
return Object.assign({}, defaultQueryConfig(123), options);
|
return Object.assign({}, defaultQueryConfig(123), options)
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('buildInfluxQLQuery', () => {
|
describe('buildInfluxQLQuery', () => {
|
||||||
let config, timeBounds;
|
let config, timeBounds
|
||||||
describe('when information is missing', () => {
|
describe('when information is missing', () => {
|
||||||
it('returns a null select statement', () => {
|
it('returns a null select statement', () => {
|
||||||
expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null);
|
expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null)
|
||||||
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null); // no measurement
|
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null) // no measurement
|
||||||
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null); // no fields
|
expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null) // no fields
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a database, measurement, field, and NO retention policy', () => {
|
describe('with a database, measurement, field, and NO retention policy', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]});
|
config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"');
|
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a database, measurement, retention policy, and field', () => {
|
describe('with a database, measurement, retention policy, and field', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]});
|
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]})
|
||||||
timeBounds = {lower: 'now() - 1hr'};
|
timeBounds = {lower: 'now() - 1hr'}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"');
|
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"')
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query with a time range', () => {
|
it('builds the right query with a time range', () => {
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr');
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('when the field is *', () => {
|
describe('when the field is *', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]});
|
config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('does not quote the star', () => {
|
it('does not quote the star', () => {
|
||||||
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"');
|
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a measurement and one field, an aggregate, and a GROUP BY time()', () => {
|
describe('with a measurement and one field, an aggregate, and a GROUP BY time()', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}});
|
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}})
|
||||||
timeBounds = {lower: 'now() - 12h'};
|
timeBounds = {lower: 'now() - 12h'}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)';
|
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)'
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a measurement and one field, an aggregate, and a GROUP BY tags', () => {
|
describe('with a measurement and one field, an aggregate, and a GROUP BY tags', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}});
|
config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}})
|
||||||
timeBounds = {lower: 'now() - 12h'};
|
timeBounds = {lower: 'now() - 12h'}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
const expected = `SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY "t1", "t2"`;
|
const expected = `SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY "t1", "t2"`
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a measurement, one field, and an upper / lower absolute time range', () => {
|
describe('with a measurement, one field, and an upper / lower absolute time range', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]});
|
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]})
|
||||||
timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"};
|
timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\'';
|
const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\''
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a measurement and one field, an aggregate, and a GROUP BY time(), and tags', () => {
|
describe('with a measurement and one field, an aggregate, and a GROUP BY time(), and tags', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}});
|
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}})
|
||||||
timeBounds = {lower: 'now() - 12h'};
|
timeBounds = {lower: 'now() - 12h'}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"';
|
const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"'
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with a measurement and two fields', () => {
|
describe('with a measurement and two fields', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]});
|
config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]})
|
||||||
timeBounds = {upper: "'2015-02-24T00:00:00Z'"};
|
timeBounds = {upper: "'2015-02-24T00:00:00Z'"}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query', () => {
|
it('builds the right query', () => {
|
||||||
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"');
|
expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"')
|
||||||
});
|
})
|
||||||
|
|
||||||
it('builds the right query with a time range', () => {
|
it('builds the right query with a time range', () => {
|
||||||
const expected = `SELECT "f0", "f1" FROM "db1"."rp1"."m0" WHERE time < '2015-02-24T00:00:00Z'`;
|
const expected = `SELECT "f0", "f1" FROM "db1"."rp1"."m0" WHERE time < '2015-02-24T00:00:00Z'`
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('with multiple tag pairs', () => {
|
describe('with multiple tag pairs', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
@ -128,16 +128,16 @@ describe('buildInfluxQLQuery', () => {
|
||||||
],
|
],
|
||||||
k2: [
|
k2: [
|
||||||
'v2',
|
'v2',
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
});
|
})
|
||||||
timeBounds = {lower: 'now() - 6h'};
|
timeBounds = {lower: 'now() - 6h'}
|
||||||
});
|
})
|
||||||
|
|
||||||
it('correctly uses AND/OR to combine pairs', () => {
|
it('correctly uses AND/OR to combine pairs', () => {
|
||||||
const expected = `SELECT "f0" FROM "db1"."rp1"."m0" WHERE time > now() - 6h AND ("k1"='v1' OR "k1"='v3' OR "k1"='v4') AND "k2"='v2'`;
|
const expected = `SELECT "f0" FROM "db1"."rp1"."m0" WHERE time > now() - 6h AND ("k1"='v1' OR "k1"='v3' OR "k1"='v4') AND "k2"='v2'`
|
||||||
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected);
|
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
var context = require.context('./', true, /Spec\.js$/);
|
const context = require.context('./', true, /Spec\.js$/)
|
||||||
context.keys().forEach(context);
|
context.keys().forEach(context)
|
||||||
module.exports = context;
|
module.exports = context
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import reducer from 'src/kapacitor/reducers/rules';
|
import reducer from 'src/kapacitor/reducers/rules'
|
||||||
import {defaultRuleConfigs} from 'src/kapacitor/constants';
|
import {defaultRuleConfigs} from 'src/kapacitor/constants'
|
||||||
import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants';
|
import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
chooseTrigger,
|
chooseTrigger,
|
||||||
|
@ -12,93 +12,93 @@ import {
|
||||||
updateRuleName,
|
updateRuleName,
|
||||||
deleteRuleSuccess,
|
deleteRuleSuccess,
|
||||||
updateRuleStatusSuccess,
|
updateRuleStatusSuccess,
|
||||||
} from 'src/kapacitor/actions/view';
|
} from 'src/kapacitor/actions/view'
|
||||||
|
|
||||||
describe('Kapacitor.Reducers.rules', () => {
|
describe('Kapacitor.Reducers.rules', () => {
|
||||||
it('can choose a trigger', () => {
|
it('can choose a trigger', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
trigger: '',
|
trigger: '',
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
let newState = reducer(initialState, chooseTrigger(ruleID, 'deadman'));
|
let newState = reducer(initialState, chooseTrigger(ruleID, 'deadman'))
|
||||||
expect(newState[ruleID].trigger).to.equal('deadman');
|
expect(newState[ruleID].trigger).to.equal('deadman')
|
||||||
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.deadman);
|
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.deadman)
|
||||||
|
|
||||||
newState = reducer(initialState, chooseTrigger(ruleID, 'relative'));
|
newState = reducer(initialState, chooseTrigger(ruleID, 'relative'))
|
||||||
expect(newState[ruleID].trigger).to.equal('relative');
|
expect(newState[ruleID].trigger).to.equal('relative')
|
||||||
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.relative);
|
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.relative)
|
||||||
|
|
||||||
newState = reducer(initialState, chooseTrigger(ruleID, 'threshold'));
|
newState = reducer(initialState, chooseTrigger(ruleID, 'threshold'))
|
||||||
expect(newState[ruleID].trigger).to.equal('threshold');
|
expect(newState[ruleID].trigger).to.equal('threshold')
|
||||||
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.threshold);
|
expect(newState[ruleID].values).to.equal(defaultRuleConfigs.threshold)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update the values', () => {
|
it('can update the values', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
trigger: 'deadman',
|
trigger: 'deadman',
|
||||||
values: defaultRuleConfigs.deadman
|
values: defaultRuleConfigs.deadman,
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const newDeadmanValues = {duration: '5m'};
|
const newDeadmanValues = {duration: '5m'}
|
||||||
const newState = reducer(initialState, updateRuleValues(ruleID, 'deadman', newDeadmanValues));
|
const newState = reducer(initialState, updateRuleValues(ruleID, 'deadman', newDeadmanValues))
|
||||||
expect(newState[ruleID].values).to.equal(newDeadmanValues);
|
expect(newState[ruleID].values).to.equal(newDeadmanValues)
|
||||||
|
|
||||||
const newRelativeValues = {func: 'max', change: 'change'};
|
const newRelativeValues = {func: 'max', change: 'change'}
|
||||||
const finalState = reducer(newState, updateRuleValues(ruleID, 'relative', newRelativeValues));
|
const finalState = reducer(newState, updateRuleValues(ruleID, 'relative', newRelativeValues))
|
||||||
expect(finalState[ruleID].trigger).to.equal('relative');
|
expect(finalState[ruleID].trigger).to.equal('relative')
|
||||||
expect(finalState[ruleID].values).to.equal(newRelativeValues);
|
expect(finalState[ruleID].values).to.equal(newRelativeValues)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update the message', () => {
|
it('can update the message', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
message: '',
|
message: '',
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const message = 'im a kapacitor rule message';
|
const message = 'im a kapacitor rule message'
|
||||||
const newState = reducer(initialState, updateMessage(ruleID, message));
|
const newState = reducer(initialState, updateMessage(ruleID, message))
|
||||||
expect(newState[ruleID].message).to.equal(message);
|
expect(newState[ruleID].message).to.equal(message)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update the alerts', () => {
|
it('can update the alerts', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
alerts: [],
|
alerts: [],
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const alerts = ['slack'];
|
const alerts = ['slack']
|
||||||
const newState = reducer(initialState, updateAlerts(ruleID, alerts));
|
const newState = reducer(initialState, updateAlerts(ruleID, alerts))
|
||||||
expect(newState[ruleID].alerts).to.equal(alerts);
|
expect(newState[ruleID].alerts).to.equal(alerts)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update an alerta alert', () => {
|
it('can update an alerta alert', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
alerts: [],
|
alerts: [],
|
||||||
alertNodes: [],
|
alertNodes: [],
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const tickScript = `stream
|
const tickScript = `stream
|
||||||
|alert()
|
|alert()
|
||||||
|
@ -108,39 +108,39 @@ describe('Kapacitor.Reducers.rules', () => {
|
||||||
.environment('Development')
|
.environment('Development')
|
||||||
.group('Dev. Servers')
|
.group('Dev. Servers')
|
||||||
.services('a b c')
|
.services('a b c')
|
||||||
`;
|
`
|
||||||
|
|
||||||
let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript));
|
let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript))
|
||||||
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`;
|
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`
|
||||||
let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]);
|
let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID])
|
||||||
|
|
||||||
// Test both data structure and accessor string
|
// Test both data structure and accessor string
|
||||||
expect(actualStr).to.equal(expectedStr);
|
expect(actualStr).to.equal(expectedStr)
|
||||||
|
|
||||||
// Test that accessor string is the same if fed back in
|
// Test that accessor string is the same if fed back in
|
||||||
newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr));
|
newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr))
|
||||||
actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]);
|
actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID])
|
||||||
expect(actualStr).to.equal(expectedStr);
|
expect(actualStr).to.equal(expectedStr)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update the name', () => {
|
it('can update the name', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const name = 'New name'
|
const name = 'New name'
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
name: 'Random album title',
|
name: 'Random album title',
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const newState = reducer(initialState, updateRuleName(ruleID, name));
|
const newState = reducer(initialState, updateRuleName(ruleID, name))
|
||||||
expect(newState[ruleID].name).to.equal(name);
|
expect(newState[ruleID].name).to.equal(name)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('it can delete a rule', () => {
|
it('it can delete a rule', () => {
|
||||||
const rule1 = 1;
|
const rule1 = 1
|
||||||
const rule2 = 2;
|
const rule2 = 2
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[rule1]: {
|
[rule1]: {
|
||||||
id: rule1,
|
id: rule1,
|
||||||
|
@ -148,43 +148,43 @@ describe('Kapacitor.Reducers.rules', () => {
|
||||||
[rule2]: {
|
[rule2]: {
|
||||||
id: rule2,
|
id: rule2,
|
||||||
},
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(Object.keys(initialState).length).to.equal(2);
|
expect(Object.keys(initialState).length).to.equal(2)
|
||||||
const newState = reducer(initialState, deleteRuleSuccess(rule2));
|
const newState = reducer(initialState, deleteRuleSuccess(rule2))
|
||||||
expect(Object.keys(newState).length).to.equal(1);
|
expect(Object.keys(newState).length).to.equal(1)
|
||||||
expect(newState[rule1]).to.equal(initialState[rule1]);
|
expect(newState[rule1]).to.equal(initialState[rule1])
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update details', () => {
|
it('can update details', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const details = 'im some rule details';
|
const details = 'im some rule details'
|
||||||
|
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
details: '',
|
details: '',
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const newState = reducer(initialState, updateDetails(ruleID, details));
|
const newState = reducer(initialState, updateDetails(ruleID, details))
|
||||||
expect(newState[ruleID].details).to.equal(details);
|
expect(newState[ruleID].details).to.equal(details)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can update status', () => {
|
it('can update status', () => {
|
||||||
const ruleID = 1;
|
const ruleID = 1
|
||||||
const status = 'enabled';
|
const status = 'enabled'
|
||||||
|
|
||||||
const initialState = {
|
const initialState = {
|
||||||
[ruleID]: {
|
[ruleID]: {
|
||||||
id: ruleID,
|
id: ruleID,
|
||||||
queryID: 988,
|
queryID: 988,
|
||||||
status: 'disabled',
|
status: 'disabled',
|
||||||
}
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status));
|
const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status))
|
||||||
expect(newState[ruleID].status).to.equal(status);
|
expect(newState[ruleID].status).to.equal(status)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,71 +1,71 @@
|
||||||
import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes';
|
import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes'
|
||||||
|
|
||||||
describe('diskBytesFromShard', () => {
|
describe('diskBytesFromShard', () => {
|
||||||
it('sums all the disk bytes in multiple series', () => {
|
it('sums all the disk bytes in multiple series', () => {
|
||||||
const response = {"results":[
|
const response = {results: [
|
||||||
{"series":[{"name":"shard","tags":{"clusterID":"6272208615254493595","database":"_internal","engine":"tsm1","hostname":"WattsInfluxDB","id":"1","nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[[1464811503000000000,100]]}]},
|
{series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "_internal", engine: "tsm1", hostname: "WattsInfluxDB", id: "1", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [[1464811503000000000, 100]]}]},
|
||||||
{"series":[{"name":"shard","tags":{"clusterID":"6272208615254493595","database":"telegraf","engine":"tsm1","hostname":"WattsInfluxDB","id":"2","nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/telegraf/default/2","retentionPolicy":"default"},"columns":["time","last"],"values":[[1464811503000000000,200]]}]},
|
{series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "telegraf", engine: "tsm1", hostname: "WattsInfluxDB", id: "2", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/telegraf/default/2", retentionPolicy: "default"}, columns: ["time", "last"], values: [[1464811503000000000, 200]]}]},
|
||||||
]};
|
]}
|
||||||
|
|
||||||
const result = diskBytesFromShard(response);
|
const result = diskBytesFromShard(response)
|
||||||
const expectedTotal = 300;
|
const expectedTotal = 300
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal([]);
|
expect(result.errors).to.deep.equal([])
|
||||||
expect(result.bytes).to.equal(expectedTotal);
|
expect(result.bytes).to.equal(expectedTotal)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('returns emtpy with empty response', () => {
|
it('returns emtpy with empty response', () => {
|
||||||
const response = {"results":[{}]};
|
const response = {results: [{}]}
|
||||||
|
|
||||||
const result = diskBytesFromShard(response);
|
const result = diskBytesFromShard(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal([]);
|
expect(result.errors).to.deep.equal([])
|
||||||
expect(result.bytes).to.equal(0);
|
expect(result.bytes).to.equal(0)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('exposes the server error', () => {
|
it('exposes the server error', () => {
|
||||||
const response = {"results":[{"error":"internal server error?"}]};
|
const response = {results: [{error: "internal server error?"}]}
|
||||||
|
|
||||||
const result = diskBytesFromShard(response);
|
const result = diskBytesFromShard(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal(['internal server error?']);
|
expect(result.errors).to.deep.equal(['internal server error?'])
|
||||||
expect(result.bytes).to.equal(0);
|
expect(result.bytes).to.equal(0)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('diskBytesFromShardForDatabase', () => {
|
describe('diskBytesFromShardForDatabase', () => {
|
||||||
it('return parses data as expected', () => {
|
it('return parses data as expected', () => {
|
||||||
const response = {"results":[{"series":[
|
const response = {results: [{series: [
|
||||||
{"name":"shard","tags":{"nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",100]]},
|
{name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]},
|
||||||
{"name":"shard","tags":{"nodeID":"localhost:8088","path":"/Users/watts/.influxdb/data/_internal/monitor/3","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",200]]},
|
{name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]},
|
||||||
{"name":"shard","tags":{"nodeID":"localhost:8188","path":"/Users/watts/.influxdb/data/_internal/monitor/1","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",100]]},
|
{name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]},
|
||||||
{"name":"shard","tags":{"nodeID":"localhost:8188","path":"/Users/watts/.influxdb/data/_internal/monitor/3","retentionPolicy":"monitor"},"columns":["time","last"],"values":[["2016-06-02T01:06:13Z",200]]},
|
{name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]},
|
||||||
]}]};
|
]}]}
|
||||||
|
|
||||||
const result = diskBytesFromShardForDatabase(response);
|
const result = diskBytesFromShardForDatabase(response)
|
||||||
const expected = {
|
const expected = {
|
||||||
1: [{nodeID: 'localhost:8088', diskUsage: 100},{nodeID: 'localhost:8188', diskUsage: 100}],
|
1: [{nodeID: 'localhost:8088', diskUsage: 100}, {nodeID: 'localhost:8188', diskUsage: 100}],
|
||||||
3: [{nodeID: 'localhost:8088', diskUsage: 200},{nodeID: 'localhost:8188', diskUsage: 200}],
|
3: [{nodeID: 'localhost:8088', diskUsage: 200}, {nodeID: 'localhost:8188', diskUsage: 200}],
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(result.shardData).to.deep.equal(expected);
|
expect(result.shardData).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('returns emtpy with empty response', () => {
|
it('returns emtpy with empty response', () => {
|
||||||
const response = {"results":[{}]};
|
const response = {results: [{}]}
|
||||||
|
|
||||||
const result = diskBytesFromShardForDatabase(response);
|
const result = diskBytesFromShardForDatabase(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal([]);
|
expect(result.errors).to.deep.equal([])
|
||||||
expect(result.shardData).to.deep.equal({});
|
expect(result.shardData).to.deep.equal({})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('exposes the server error', () => {
|
it('exposes the server error', () => {
|
||||||
const response = {"results":[{"error":"internal server error?"}]};
|
const response = {results: [{error: "internal server error?"}]}
|
||||||
|
|
||||||
const result = diskBytesFromShardForDatabase(response);
|
const result = diskBytesFromShardForDatabase(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal(['internal server error?']);
|
expect(result.errors).to.deep.equal(['internal server error?'])
|
||||||
expect(result.shardData).to.deep.equal({});
|
expect(result.shardData).to.deep.equal({})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
import getRange from 'shared/parsing/getRangeForDygraph';
|
import getRange from 'shared/parsing/getRangeForDygraph'
|
||||||
|
|
||||||
describe('getRangeForDygraphSpec', () => {
|
describe('getRangeForDygraphSpec', () => {
|
||||||
it('gets the range for one timeSeries', () => {
|
it('gets the range for one timeSeries', () => {
|
||||||
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]];
|
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]]
|
||||||
|
|
||||||
const actual = getRange(timeSeries);
|
const actual = getRange(timeSeries)
|
||||||
const expected = [1, 3];
|
const expected = [1, 3]
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('does not get range when a range is provided', () => {
|
it('does not get range when a range is provided', () => {
|
||||||
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]];
|
const timeSeries = [[new Date(1000), 1], [new Date(2000), 2], [new Date(3000), 3]]
|
||||||
|
|
||||||
const providedRange = [0, 4];
|
const providedRange = [0, 4]
|
||||||
const actual = getRange(timeSeries, providedRange);
|
const actual = getRange(timeSeries, providedRange)
|
||||||
|
|
||||||
expect(actual).to.deep.equal(providedRange);
|
expect(actual).to.deep.equal(providedRange)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('gets the range for multiple timeSeries', () => {
|
it('gets the range for multiple timeSeries', () => {
|
||||||
const timeSeries = [
|
const timeSeries = [
|
||||||
|
@ -25,63 +25,63 @@ describe('getRangeForDygraphSpec', () => {
|
||||||
[new Date(1000), 100, 1],
|
[new Date(1000), 100, 1],
|
||||||
[new Date(2000), null, 2],
|
[new Date(2000), null, 2],
|
||||||
[new Date(3000), 200, 3],
|
[new Date(3000), 200, 3],
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = getRange(timeSeries);
|
const actual = getRange(timeSeries)
|
||||||
const expected = [1, 200];
|
const expected = [1, 200]
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('returns a null array of two elements when min and max are equal', () => {
|
it('returns a null array of two elements when min and max are equal', () => {
|
||||||
const timeSeries = [[new Date(1000), 1], [new Date(2000), 1], [new Date(3000), 1]];
|
const timeSeries = [[new Date(1000), 1], [new Date(2000), 1], [new Date(3000), 1]]
|
||||||
const actual = getRange(timeSeries);
|
const actual = getRange(timeSeries)
|
||||||
const expected = [null, null];
|
const expected = [null, null]
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('when user provides a rule value', () => {
|
describe('when user provides a rule value', () => {
|
||||||
const defaultMax = 20;
|
const defaultMax = 20
|
||||||
const defaultMin = -10;
|
const defaultMin = -10
|
||||||
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]];
|
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]]
|
||||||
|
|
||||||
it('can pad positive values', () => {
|
it('can pad positive values', () => {
|
||||||
const value = 20;
|
const value = 20
|
||||||
const [min, max] = getRange(timeSeries, undefined, value);
|
const [min, max] = getRange(timeSeries, undefined, value)
|
||||||
|
|
||||||
expect(min).to.equal(defaultMin);
|
expect(min).to.equal(defaultMin)
|
||||||
expect(max).to.be.above(defaultMax);
|
expect(max).to.be.above(defaultMax)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can pad negative values', () => {
|
it('can pad negative values', () => {
|
||||||
const value = -10;
|
const value = -10
|
||||||
const [min, max] = getRange(timeSeries, undefined, value);
|
const [min, max] = getRange(timeSeries, undefined, value)
|
||||||
|
|
||||||
expect(min).to.be.below(defaultMin);
|
expect(min).to.be.below(defaultMin)
|
||||||
expect(max).to.equal(defaultMax);
|
expect(max).to.equal(defaultMax)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('when user provides a rule range value', () => {
|
describe('when user provides a rule range value', () => {
|
||||||
const defaultMax = 20;
|
const defaultMax = 20
|
||||||
const defaultMin = -10;
|
const defaultMin = -10
|
||||||
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]];
|
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]]
|
||||||
|
|
||||||
it('can pad positive values', () => {
|
it('can pad positive values', () => {
|
||||||
const rangeValue = 20;
|
const rangeValue = 20
|
||||||
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue);
|
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue)
|
||||||
|
|
||||||
expect(min).to.equal(defaultMin);
|
expect(min).to.equal(defaultMin)
|
||||||
expect(max).to.be.above(defaultMax);
|
expect(max).to.be.above(defaultMax)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can pad negative values', () => {
|
it('can pad negative values', () => {
|
||||||
const rangeValue = -10;
|
const rangeValue = -10
|
||||||
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue);
|
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue)
|
||||||
|
|
||||||
expect(min).to.be.below(defaultMin);
|
expect(min).to.be.below(defaultMin)
|
||||||
expect(max).to.equal(defaultMax);
|
expect(max).to.equal(defaultMax)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import {parseAlerta} from 'src/shared/parsing/parseAlerta';
|
import {parseAlerta} from 'src/shared/parsing/parseAlerta'
|
||||||
|
|
||||||
it('can parse an alerta tick script', () => {
|
it('can parse an alerta tick script', () => {
|
||||||
const tickScript = `stream
|
const tickScript = `stream
|
||||||
|
@ -9,50 +9,50 @@ it('can parse an alerta tick script', () => {
|
||||||
.environment('Development')
|
.environment('Development')
|
||||||
.group('Dev. Servers')
|
.group('Dev. Servers')
|
||||||
.services('a b c')
|
.services('a b c')
|
||||||
`;
|
`
|
||||||
|
|
||||||
let actualObj = parseAlerta(tickScript);
|
let actualObj = parseAlerta(tickScript)
|
||||||
|
|
||||||
const expectedObj = [
|
const expectedObj = [
|
||||||
{
|
{
|
||||||
"name": "resource",
|
name: "resource",
|
||||||
"args": [
|
args: [
|
||||||
"Hostname or service"
|
"Hostname or service",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "event",
|
name: "event",
|
||||||
"args": [
|
args: [
|
||||||
"Something went wrong"
|
"Something went wrong",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "environment",
|
name: "environment",
|
||||||
"args": [
|
args: [
|
||||||
"Development"
|
"Development",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "group",
|
name: "group",
|
||||||
"args": [
|
args: [
|
||||||
"Dev. Servers"
|
"Dev. Servers",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "services",
|
name: "services",
|
||||||
"args": [
|
args: [
|
||||||
"a",
|
"a",
|
||||||
"b",
|
"b",
|
||||||
"c"
|
"c",
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
// Test data structure
|
// Test data structure
|
||||||
expect(actualObj).to.deep.equal(expectedObj);
|
expect(actualObj).to.deep.equal(expectedObj)
|
||||||
|
|
||||||
// Test that data structure is the same if fed back in
|
// Test that data structure is the same if fed back in
|
||||||
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`;
|
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`
|
||||||
actualObj = parseAlerta(expectedStr);
|
actualObj = parseAlerta(expectedStr)
|
||||||
expect(actualObj).to.deep.equal(expectedObj);
|
expect(actualObj).to.deep.equal(expectedObj)
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,32 +1,32 @@
|
||||||
import showDatabases from 'shared/parsing/showDatabases';
|
import showDatabases from 'shared/parsing/showDatabases'
|
||||||
|
|
||||||
describe('showDatabases', () => {
|
describe('showDatabases', () => {
|
||||||
it('exposes all the database properties', () => {
|
it('exposes all the database properties', () => {
|
||||||
const response = {"results":[{"series":[{"columns":["name"],"values":[["mydb1"], ["mydb2"]]}]}]};
|
const response = {results: [{series: [{columns: ["name"], values: [["mydb1"], ["mydb2"]]}]}]}
|
||||||
|
|
||||||
const result = showDatabases(response);
|
const result = showDatabases(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal([]);
|
expect(result.errors).to.deep.equal([])
|
||||||
expect(result.databases.length).to.equal(2);
|
expect(result.databases.length).to.equal(2)
|
||||||
expect(result.databases[0]).to.equal('mydb1');
|
expect(result.databases[0]).to.equal('mydb1')
|
||||||
expect(result.databases[1]).to.equal('mydb2');
|
expect(result.databases[1]).to.equal('mydb2')
|
||||||
});
|
})
|
||||||
|
|
||||||
it('returns an empty array when there are no databases', () => {
|
it('returns an empty array when there are no databases', () => {
|
||||||
const response = {"results":[{"series":[{"columns":["name"]}]}]};
|
const response = {results: [{series: [{columns: ["name"]}]}]}
|
||||||
|
|
||||||
const result = showDatabases(response);
|
const result = showDatabases(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal([]);
|
expect(result.errors).to.deep.equal([])
|
||||||
expect(result.databases).to.deep.equal([]);
|
expect(result.databases).to.deep.equal([])
|
||||||
});
|
})
|
||||||
|
|
||||||
it('exposes the server error', () => {
|
it('exposes the server error', () => {
|
||||||
const response = {"results":[{"error":"internal server error?"}]};
|
const response = {results: [{error: "internal server error?"}]}
|
||||||
|
|
||||||
const result = showDatabases(response);
|
const result = showDatabases(response)
|
||||||
|
|
||||||
expect(result.errors).to.deep.equal(['internal server error?']);
|
expect(result.errors).to.deep.equal(['internal server error?'])
|
||||||
expect(result.databases).to.deep.equal([]);
|
expect(result.databases).to.deep.equal([])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,39 +1,39 @@
|
||||||
import parseShowFieldKeys from 'shared/parsing/showFieldKeys';
|
import parseShowFieldKeys from 'shared/parsing/showFieldKeys'
|
||||||
|
|
||||||
describe('parseShowFieldKeys', () => {
|
describe('parseShowFieldKeys', () => {
|
||||||
it('parses a single result', () => {
|
it('parses a single result', () => {
|
||||||
const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]}]};
|
const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}]}
|
||||||
|
|
||||||
const result = parseShowFieldKeys(response);
|
const result = parseShowFieldKeys(response)
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.fieldSets).to.eql({
|
expect(result.fieldSets).to.eql({
|
||||||
m1: ['f1', 'f2'],
|
m1: ['f1', 'f2'],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('parses multiple results', () => {
|
it('parses multiple results', () => {
|
||||||
const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]},{"series":[{"name":"m2","columns":["fieldKey"],"values":[["f3"],["f4"]]}]}]};
|
const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {series: [{name: "m2", columns: ["fieldKey"], values: [["f3"], ["f4"]]}]}]}
|
||||||
const result = parseShowFieldKeys(response);
|
const result = parseShowFieldKeys(response)
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.fieldSets).to.eql({
|
expect(result.fieldSets).to.eql({
|
||||||
m1: ['f1', 'f2'],
|
m1: ['f1', 'f2'],
|
||||||
m2: ['f3', 'f4'],
|
m2: ['f3', 'f4'],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('parses multiple errors', () => {
|
it('parses multiple errors', () => {
|
||||||
const response = {"results":[{"error": "measurement not found: m1"}, {"error": "measurement not found: m2"}]};
|
const response = {results: [{error: "measurement not found: m1"}, {error: "measurement not found: m2"}]}
|
||||||
const result = parseShowFieldKeys(response);
|
const result = parseShowFieldKeys(response)
|
||||||
expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2']);
|
expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2'])
|
||||||
expect(result.fieldSets).to.eql({});
|
expect(result.fieldSets).to.eql({})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('parses a mix of results and errors', () => {
|
it('parses a mix of results and errors', () => {
|
||||||
const response = {"results":[{"series":[{"name":"m1","columns":["fieldKey"],"values":[["f1"],["f2"]]}]},{"error": "measurement not found: m2"}]};
|
const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {error: "measurement not found: m2"}]}
|
||||||
const result = parseShowFieldKeys(response);
|
const result = parseShowFieldKeys(response)
|
||||||
expect(result.errors).to.eql(['measurement not found: m2']);
|
expect(result.errors).to.eql(['measurement not found: m2'])
|
||||||
expect(result.fieldSets).to.eql({
|
expect(result.fieldSets).to.eql({
|
||||||
m1: ['f1', 'f2'],
|
m1: ['f1', 'f2'],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,34 +1,34 @@
|
||||||
import showQueriesParser from 'shared/parsing/showQueries';
|
import showQueriesParser from 'shared/parsing/showQueries'
|
||||||
|
|
||||||
describe('showQueriesParser', () => {
|
describe('showQueriesParser', () => {
|
||||||
it('exposes all currently running queries', () => {
|
it('exposes all currently running queries', () => {
|
||||||
const response = {"results":[{"series":[{"columns":["qid","query","database","duration"],"values":[[1,"SHOW QUERIES","db1","1s"], [2,"SELECT foo FROM bar","db1","2s"]]}]}]};
|
const response = {results: [{series: [{columns: ["qid", "query", "database", "duration"], values: [[1, "SHOW QUERIES", "db1", "1s"], [2, "SELECT foo FROM bar", "db1", "2s"]]}]}]}
|
||||||
|
|
||||||
const result = showQueriesParser(response);
|
const result = showQueriesParser(response)
|
||||||
|
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.queries.length).to.equal(2);
|
expect(result.queries.length).to.equal(2)
|
||||||
expect(result.queries[0]).to.eql({
|
expect(result.queries[0]).to.eql({
|
||||||
id: 1,
|
id: 1,
|
||||||
database: 'db1',
|
database: 'db1',
|
||||||
query: 'SHOW QUERIES',
|
query: 'SHOW QUERIES',
|
||||||
duration: '1s',
|
duration: '1s',
|
||||||
});
|
})
|
||||||
expect(result.queries[1]).to.eql({
|
expect(result.queries[1]).to.eql({
|
||||||
id: 2,
|
id: 2,
|
||||||
database: 'db1',
|
database: 'db1',
|
||||||
query: 'SELECT foo FROM bar',
|
query: 'SELECT foo FROM bar',
|
||||||
duration: '2s',
|
duration: '2s',
|
||||||
});
|
})
|
||||||
expect({foo: 'bar'}).to.eql({foo: 'bar'})
|
expect({foo: 'bar'}).to.eql({foo: 'bar'})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('exposes the server error', () => {
|
it('exposes the server error', () => {
|
||||||
const response = {"results":[{"error":"internal server error?"}]};
|
const response = {results: [{error: "internal server error?"}]}
|
||||||
|
|
||||||
const result = showQueriesParser(response);
|
const result = showQueriesParser(response)
|
||||||
|
|
||||||
expect(result.errors).to.eql(['internal server error?']);
|
expect(result.errors).to.eql(['internal server error?'])
|
||||||
expect(result.queries).to.eql([]);
|
expect(result.queries).to.eql([])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,27 +1,27 @@
|
||||||
import parseShowTagKeys from 'shared/parsing/showTagKeys';
|
import parseShowTagKeys from 'shared/parsing/showTagKeys'
|
||||||
|
|
||||||
describe('parseShowTagKeys', () => {
|
describe('parseShowTagKeys', () => {
|
||||||
it('parses the tag keys', () => {
|
it('parses the tag keys', () => {
|
||||||
const response = {"results":[{"series":[{"name":"cpu","columns":["tagKey"],"values":[["cpu"],["host"]]}]}]};
|
const response = {results: [{series: [{name: "cpu", columns: ["tagKey"], values: [["cpu"], ["host"]]}]}]}
|
||||||
|
|
||||||
const result = parseShowTagKeys(response);
|
const result = parseShowTagKeys(response)
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.tagKeys).to.eql(['cpu', 'host']);
|
expect(result.tagKeys).to.eql(['cpu', 'host'])
|
||||||
});
|
})
|
||||||
|
|
||||||
it('handles empty results', () => {
|
it('handles empty results', () => {
|
||||||
const response = {"results":[{}]};
|
const response = {results: [{}]}
|
||||||
|
|
||||||
const result = parseShowTagKeys(response);
|
const result = parseShowTagKeys(response)
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.tagKeys).to.eql([]);
|
expect(result.tagKeys).to.eql([])
|
||||||
});
|
})
|
||||||
|
|
||||||
it('handles errors', () => {
|
it('handles errors', () => {
|
||||||
const response = {"results":[{"error": "influxdb error"}]};
|
const response = {results: [{error: "influxdb error"}]}
|
||||||
|
|
||||||
const result = parseShowTagKeys(response);
|
const result = parseShowTagKeys(response)
|
||||||
expect(result.errors).to.eql([response.results[0].error]);
|
expect(result.errors).to.eql([response.results[0].error])
|
||||||
expect(result.tagKeys).to.eql([]);
|
expect(result.tagKeys).to.eql([])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,38 +1,38 @@
|
||||||
import showTagValuesParser from 'shared/parsing/showTagValues';
|
import showTagValuesParser from 'shared/parsing/showTagValues'
|
||||||
|
|
||||||
describe('showTagValuesParser', () => {
|
describe('showTagValuesParser', () => {
|
||||||
it('handles an empty result set', () => {
|
it('handles an empty result set', () => {
|
||||||
const response = {"results":[{}]};
|
const response = {results: [{}]}
|
||||||
|
|
||||||
const result = showTagValuesParser(response);
|
const result = showTagValuesParser(response)
|
||||||
|
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.tags).to.eql({});
|
expect(result.tags).to.eql({})
|
||||||
});
|
})
|
||||||
|
|
||||||
it('returns a an object of tag keys mapped to their values', () => {
|
it('returns a an object of tag keys mapped to their values', () => {
|
||||||
const response = {
|
const response = {
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name": "measurementA",
|
name: "measurementA",
|
||||||
"columns": ["key","value"],
|
columns: ["key", "value"],
|
||||||
"values": [
|
values: [
|
||||||
["host", "hostA"],
|
["host", "hostA"],
|
||||||
["host", "hostB"],
|
["host", "hostB"],
|
||||||
["cpu", "cpu0"],
|
["cpu", "cpu0"],
|
||||||
["cpu", "cpu1"],
|
["cpu", "cpu1"],
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
};
|
}
|
||||||
|
|
||||||
const result = showTagValuesParser(response);
|
const result = showTagValuesParser(response)
|
||||||
|
|
||||||
expect(result.errors).to.eql([]);
|
expect(result.errors).to.eql([])
|
||||||
expect(result.tags).to.eql({
|
expect(result.tags).to.eql({
|
||||||
host: [
|
host: [
|
||||||
'hostA',
|
'hostA',
|
||||||
|
@ -42,6 +42,6 @@ describe('showTagValuesParser', () => {
|
||||||
'cpu0',
|
'cpu0',
|
||||||
'cpu1',
|
'cpu1',
|
||||||
],
|
],
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import {
|
import {
|
||||||
buildRoles,
|
buildRoles,
|
||||||
buildClusterAccounts,
|
buildClusterAccounts,
|
||||||
} from 'src/shared/presenters';
|
} from 'src/shared/presenters'
|
||||||
|
|
||||||
describe('Presenters', function() {
|
describe('Presenters', function() {
|
||||||
describe('roles utils', function() {
|
describe('roles utils', function() {
|
||||||
|
@ -17,13 +17,13 @@ describe('Presenters', function() {
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = buildRoles(roles);
|
const actual = buildRoles(roles)
|
||||||
|
|
||||||
expect(actual[0].users).to.eql([]);
|
expect(actual[0].users).to.eql([])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('when a role has no permissions', function() {
|
describe('when a role has no permissions', function() {
|
||||||
it('set\'s a roles permission as an empty array', function() {
|
it('set\'s a roles permission as an empty array', function() {
|
||||||
|
@ -35,47 +35,47 @@ describe('Presenters', function() {
|
||||||
"will@influxdb.com",
|
"will@influxdb.com",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = buildRoles(roles);
|
const actual = buildRoles(roles)
|
||||||
|
|
||||||
expect(actual[0].permissions).to.eql([]);
|
expect(actual[0].permissions).to.eql([])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('when a role has users and permissions', function() {
|
describe('when a role has users and permissions', function() {
|
||||||
beforeEach(function() {
|
beforeEach(function() {
|
||||||
const roles = [
|
const roles = [
|
||||||
{
|
{
|
||||||
"name": "Marketing",
|
name: "Marketing",
|
||||||
"permissions": {
|
permissions: {
|
||||||
"": [
|
"": [
|
||||||
"ViewAdmin",
|
"ViewAdmin",
|
||||||
],
|
],
|
||||||
"db1": [
|
db1: [
|
||||||
"ReadData"
|
"ReadData",
|
||||||
],
|
],
|
||||||
"db2": [
|
db2: [
|
||||||
"ReadData",
|
"ReadData",
|
||||||
"AddRemoveNode",
|
"AddRemoveNode",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"users": [
|
users: [
|
||||||
"roley@influxdb.com",
|
"roley@influxdb.com",
|
||||||
"will@influxdb.com"
|
"will@influxdb.com",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
this.roles = buildRoles(roles);
|
this.roles = buildRoles(roles)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('each role has a name and a list of users (if they exist)', function() {
|
it('each role has a name and a list of users (if they exist)', function() {
|
||||||
const role = this.roles[0];
|
const role = this.roles[0]
|
||||||
expect(role.name).to.equal('Marketing');
|
expect(role.name).to.equal('Marketing')
|
||||||
expect(role.users).to.contain("roley@influxdb.com");
|
expect(role.users).to.contain("roley@influxdb.com")
|
||||||
expect(role.users).to.contain("will@influxdb.com");
|
expect(role.users).to.contain("will@influxdb.com")
|
||||||
});
|
})
|
||||||
|
|
||||||
it('transforms permissions into a list of objects and each permission has a list of resources', function() {
|
it('transforms permissions into a list of objects and each permission has a list of resources', function() {
|
||||||
expect(this.roles[0].permissions).to.eql([
|
expect(this.roles[0].permissions).to.eql([
|
||||||
|
@ -97,11 +97,11 @@ describe('Presenters', function() {
|
||||||
description: 'Can add/remove nodes from a cluster',
|
description: 'Can add/remove nodes from a cluster',
|
||||||
resources: ['db2'],
|
resources: ['db2'],
|
||||||
},
|
},
|
||||||
]);
|
])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('cluster utils', function() {
|
describe('cluster utils', function() {
|
||||||
describe('buildClusterAccounts', function() {
|
describe('buildClusterAccounts', function() {
|
||||||
|
@ -109,50 +109,50 @@ describe('Presenters', function() {
|
||||||
it('adds role information to each cluster account and parses permissions', function() {
|
it('adds role information to each cluster account and parses permissions', function() {
|
||||||
const users = [
|
const users = [
|
||||||
{
|
{
|
||||||
"name":"jon@example.com",
|
name: "jon@example.com",
|
||||||
"hash":"xxxxx",
|
hash: "xxxxx",
|
||||||
"permissions": {
|
permissions: {
|
||||||
"": [
|
"": [
|
||||||
"ViewAdmin",
|
"ViewAdmin",
|
||||||
],
|
],
|
||||||
"db1": [
|
db1: [
|
||||||
"ReadData",
|
"ReadData",
|
||||||
],
|
],
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name":"ned@example.com",
|
name: "ned@example.com",
|
||||||
"hash":"xxxxx"
|
hash: "xxxxx",
|
||||||
}
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const roles = [
|
const roles = [
|
||||||
{
|
{
|
||||||
"name":"Admin",
|
name: "Admin",
|
||||||
"permissions":{
|
permissions: {
|
||||||
"db2": [
|
db2: [
|
||||||
"ViewAdmin",
|
"ViewAdmin",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
"users":[
|
users: [
|
||||||
"jon@example.com",
|
"jon@example.com",
|
||||||
"ned@example.com",
|
"ned@example.com",
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name":"Marketing",
|
name: "Marketing",
|
||||||
"permissions": {
|
permissions: {
|
||||||
"db3": [
|
db3: [
|
||||||
"ReadData",
|
"ReadData",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"users": [
|
users: [
|
||||||
"jon@example.com",
|
"jon@example.com",
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
const actual = buildClusterAccounts(users, roles);
|
const actual = buildClusterAccounts(users, roles)
|
||||||
|
|
||||||
const expected = [
|
const expected = [
|
||||||
{
|
{
|
||||||
|
@ -183,7 +183,7 @@ describe('Presenters', function() {
|
||||||
resources: ['db2'],
|
resources: ['db2'],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
users:[
|
users: [
|
||||||
"jon@example.com",
|
"jon@example.com",
|
||||||
"ned@example.com",
|
"ned@example.com",
|
||||||
],
|
],
|
||||||
|
@ -198,10 +198,10 @@ describe('Presenters', function() {
|
||||||
resources: ['db3'],
|
resources: ['db3'],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
users:[
|
users: [
|
||||||
"jon@example.com",
|
"jon@example.com",
|
||||||
]
|
],
|
||||||
}
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -219,38 +219,38 @@ describe('Presenters', function() {
|
||||||
resources: ['db2'],
|
resources: ['db2'],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
users:[
|
users: [
|
||||||
"jon@example.com",
|
"jon@example.com",
|
||||||
"ned@example.com",
|
"ned@example.com",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
expect(actual).to.eql(expected);
|
expect(actual).to.eql(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can handle empty results for users and roles', function() {
|
it('can handle empty results for users and roles', function() {
|
||||||
const users = undefined;
|
const users = undefined
|
||||||
const roles = undefined;
|
const roles = undefined
|
||||||
|
|
||||||
const actual = buildClusterAccounts(users, roles);
|
const actual = buildClusterAccounts(users, roles)
|
||||||
|
|
||||||
expect(actual).to.eql([]);
|
expect(actual).to.eql([])
|
||||||
});
|
})
|
||||||
|
|
||||||
it('sets roles to an empty array if a user has no roles', function() {
|
it('sets roles to an empty array if a user has no roles', function() {
|
||||||
const users = [{
|
const users = [{
|
||||||
name: "ned@example.com",
|
name: "ned@example.com",
|
||||||
hash: "xxxxx",
|
hash: "xxxxx",
|
||||||
}];
|
}]
|
||||||
const roles = [];
|
const roles = []
|
||||||
|
|
||||||
const actual = buildClusterAccounts(users, roles);
|
const actual = buildClusterAccounts(users, roles)
|
||||||
|
|
||||||
expect(actual[0].roles).to.eql([]);
|
expect(actual[0].roles).to.eql([])
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -12,29 +12,29 @@ describe('Shared.Reducers.appReducer', () => {
|
||||||
inPresentationMode: false,
|
inPresentationMode: false,
|
||||||
},
|
},
|
||||||
persisted: {
|
persisted: {
|
||||||
autoRefresh: 0
|
autoRefresh: 0,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
it('should handle ENABLE_PRESENTATION_MODE', () => {
|
it('should handle ENABLE_PRESENTATION_MODE', () => {
|
||||||
const reducedState = appReducer(initialState, enablePresentationMode());
|
const reducedState = appReducer(initialState, enablePresentationMode())
|
||||||
|
|
||||||
expect(reducedState.ephemeral.inPresentationMode).to.equal(true);
|
expect(reducedState.ephemeral.inPresentationMode).to.equal(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should handle DISABLE_PRESENTATION_MODE', () => {
|
it('should handle DISABLE_PRESENTATION_MODE', () => {
|
||||||
Object.assign(initialState, {ephemeral: {inPresentationMode: true}})
|
Object.assign(initialState, {ephemeral: {inPresentationMode: true}})
|
||||||
|
|
||||||
const reducedState = appReducer(initialState, disablePresentationMode());
|
const reducedState = appReducer(initialState, disablePresentationMode())
|
||||||
|
|
||||||
expect(reducedState.ephemeral.inPresentationMode).to.equal(false);
|
expect(reducedState.ephemeral.inPresentationMode).to.equal(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should handle SET_AUTOREFRESH', () => {
|
it('should handle SET_AUTOREFRESH', () => {
|
||||||
const expectedMs = 15000
|
const expectedMs = 15000
|
||||||
|
|
||||||
const reducedState = appReducer(initialState, setAutoRefresh(expectedMs));
|
const reducedState = appReducer(initialState, setAutoRefresh(expectedMs))
|
||||||
|
|
||||||
expect(reducedState.persisted.autoRefresh).to.equal(expectedMs);
|
expect(reducedState.persisted.autoRefresh).to.equal(expectedMs)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,47 +1,47 @@
|
||||||
import reducer from 'src/shared/reducers/sources';
|
import reducer from 'src/shared/reducers/sources'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
loadSources,
|
loadSources,
|
||||||
updateSource,
|
updateSource,
|
||||||
addSource,
|
addSource,
|
||||||
} from 'src/shared/actions/sources';
|
} from 'src/shared/actions/sources'
|
||||||
|
|
||||||
describe('Shared.Reducers.sources', () => {
|
describe('Shared.Reducers.sources', () => {
|
||||||
it('can correctly show default sources when adding a source', () => {
|
it('can correctly show default sources when adding a source', () => {
|
||||||
let state = [];
|
let state = []
|
||||||
|
|
||||||
state = reducer(state, addSource({
|
state = reducer(state, addSource({
|
||||||
id: '1',
|
id: '1',
|
||||||
default: true,
|
"default": true,
|
||||||
}));
|
}))
|
||||||
|
|
||||||
state = reducer(state, addSource({
|
state = reducer(state, addSource({
|
||||||
id: '2',
|
id: '2',
|
||||||
default: true,
|
"default": true,
|
||||||
}));
|
}))
|
||||||
|
|
||||||
expect(state.filter((s) => s.default).length).to.equal(1);
|
expect(state.filter((s) => s.default).length).to.equal(1)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can correctly show default sources when updating a source', () => {
|
it('can correctly show default sources when updating a source', () => {
|
||||||
let state = [];
|
let state = []
|
||||||
|
|
||||||
state = reducer(state, addSource({
|
state = reducer(state, addSource({
|
||||||
id: '1',
|
id: '1',
|
||||||
default: true,
|
"default": true,
|
||||||
}));
|
}))
|
||||||
|
|
||||||
state = reducer(state, addSource({
|
state = reducer(state, addSource({
|
||||||
id: '2',
|
id: '2',
|
||||||
default: true,
|
"default": true,
|
||||||
}));
|
}))
|
||||||
|
|
||||||
state = reducer(state, updateSource({
|
state = reducer(state, updateSource({
|
||||||
id: '1',
|
id: '1',
|
||||||
default: true,
|
"default": true,
|
||||||
}));
|
}))
|
||||||
|
|
||||||
expect(state.find(({id}) => id === '1').default).to.equal(true);
|
expect(state.find(({id}) => id === '1').default).to.equal(true)
|
||||||
expect(state.find(({id}) => id === '2').default).to.equal(false);
|
expect(state.find(({id}) => id === '2').default).to.equal(false)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
window.then = function(cb, done) {
|
window.then = function(cb, done) {
|
||||||
window.setTimeout(function() {
|
window.setTimeout(function() {
|
||||||
cb();
|
cb()
|
||||||
if (typeof done === 'function') {
|
if (typeof done === 'function') {
|
||||||
done();
|
done()
|
||||||
}
|
}
|
||||||
}, 0);
|
}, 0)
|
||||||
};
|
}
|
||||||
|
|
||||||
var chai = require('chai');
|
const chai = require('chai')
|
||||||
chai.use(require('sinon-chai'));
|
chai.use(require('sinon-chai'))
|
||||||
|
|
||||||
global.expect = chai.expect;
|
global.expect = chai.expect
|
||||||
|
|
|
@ -1,39 +1,39 @@
|
||||||
import {formatBytes, formatRPDuration} from 'utils/formatting';
|
import {formatBytes, formatRPDuration} from 'utils/formatting'
|
||||||
|
|
||||||
describe('Formatting helpers', () => {
|
describe('Formatting helpers', () => {
|
||||||
describe('formatBytes', () => {
|
describe('formatBytes', () => {
|
||||||
it('returns null when passed a falsey value', () => {
|
it('returns null when passed a falsey value', () => {
|
||||||
const actual = formatBytes(null);
|
const actual = formatBytes(null)
|
||||||
|
|
||||||
expect(actual).to.equal(null);
|
expect(actual).to.equal(null)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('returns the correct value when passed 0', () => {
|
it('returns the correct value when passed 0', () => {
|
||||||
const actual = formatBytes(0);
|
const actual = formatBytes(0)
|
||||||
|
|
||||||
expect(actual).to.equal('0 Bytes');
|
expect(actual).to.equal('0 Bytes')
|
||||||
});
|
})
|
||||||
|
|
||||||
it('converts a raw byte value into it\'s most appropriate unit', () => {
|
it('converts a raw byte value into it\'s most appropriate unit', () => {
|
||||||
expect(formatBytes(1000)).to.equal('1 KB');
|
expect(formatBytes(1000)).to.equal('1 KB')
|
||||||
expect(formatBytes(1000000)).to.equal('1 MB');
|
expect(formatBytes(1000000)).to.equal('1 MB')
|
||||||
expect(formatBytes(1000000000)).to.equal('1 GB');
|
expect(formatBytes(1000000000)).to.equal('1 GB')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
||||||
describe('formatRPDuration', () => {
|
describe('formatRPDuration', () => {
|
||||||
it("returns 'infinite' for a retention policy with a value of '0'", () => {
|
it("returns 'infinite' for a retention policy with a value of '0'", () => {
|
||||||
const actual = formatRPDuration('0')
|
const actual = formatRPDuration('0')
|
||||||
|
|
||||||
expect(actual).to.equal('∞');
|
expect(actual).to.equal('∞')
|
||||||
});
|
})
|
||||||
|
|
||||||
it('correctly formats retention policy durations', () => {
|
it('correctly formats retention policy durations', () => {
|
||||||
expect(formatRPDuration('24h0m0s')).to.equal('24h');
|
expect(formatRPDuration('24h0m0s')).to.equal('24h')
|
||||||
|
|
||||||
expect(formatRPDuration('168h0m0s')).to.equal('7d');
|
expect(formatRPDuration('168h0m0s')).to.equal('7d')
|
||||||
|
|
||||||
expect(formatRPDuration('200h32m3s')).to.equal('8d8h32m3s');
|
expect(formatRPDuration('200h32m3s')).to.equal('8d8h32m3s')
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -1,39 +1,36 @@
|
||||||
import timeSeriesToDygraph from 'src/utils/timeSeriesToDygraph';
|
import timeSeriesToDygraph from 'src/utils/timeSeriesToDygraph'
|
||||||
import {STROKE_WIDTH} from 'src/shared/constants';
|
|
||||||
|
|
||||||
const {light: strokeWidth} = STROKE_WIDTH;
|
|
||||||
|
|
||||||
describe('timeSeriesToDygraph', () => {
|
describe('timeSeriesToDygraph', () => {
|
||||||
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
|
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
|
||||||
const influxResponse = [
|
const influxResponse = [
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f2"],
|
columns: ["time", "f2"],
|
||||||
"values": [[2000, 3],[4000, 4]],
|
values: [[2000, 3], [4000, 4]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = timeSeriesToDygraph(influxResponse);
|
const actual = timeSeriesToDygraph(influxResponse)
|
||||||
|
|
||||||
const expected = {
|
const expected = {
|
||||||
labels: [
|
labels: [
|
||||||
|
@ -49,40 +46,38 @@ describe('timeSeriesToDygraph', () => {
|
||||||
dygraphSeries: {
|
dygraphSeries: {
|
||||||
'm1.f1': {
|
'm1.f1': {
|
||||||
axis: 'y',
|
axis: 'y',
|
||||||
strokeWidth,
|
|
||||||
},
|
},
|
||||||
'm1.f2': {
|
'm1.f2': {
|
||||||
axis: 'y',
|
axis: 'y',
|
||||||
strokeWidth,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can sort numerical timestamps correctly', () => {
|
it('can sort numerical timestamps correctly', () => {
|
||||||
const influxResponse = [
|
const influxResponse = [
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[100, 1],[3000, 3],[200, 2]],
|
values: [[100, 1], [3000, 3], [200, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
|
|
||||||
const actual = timeSeriesToDygraph(influxResponse);
|
const actual = timeSeriesToDygraph(influxResponse)
|
||||||
|
|
||||||
const expected = {
|
const expected = {
|
||||||
labels: [
|
labels: [
|
||||||
|
@ -94,113 +89,110 @@ describe('timeSeriesToDygraph', () => {
|
||||||
[new Date(200), 2],
|
[new Date(200), 2],
|
||||||
[new Date(3000), 3],
|
[new Date(3000), 3],
|
||||||
],
|
],
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(actual.timeSeries).to.deep.equal(expected.timeSeries);
|
expect(actual.timeSeries).to.deep.equal(expected.timeSeries)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can parse multiple responses into two axes', () => {
|
it('can parse multiple responses into two axes', () => {
|
||||||
const influxResponse = [
|
const influxResponse = [
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f2"],
|
columns: ["time", "f2"],
|
||||||
"values": [[2000, 3],[4000, 4]],
|
values: [[2000, 3], [4000, 4]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m3",
|
name: "m3",
|
||||||
"columns": ["time","f3"],
|
columns: ["time", "f3"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = timeSeriesToDygraph(influxResponse);
|
const actual = timeSeriesToDygraph(influxResponse)
|
||||||
|
|
||||||
const expected = {
|
const expected = {
|
||||||
'm1.f1': {
|
'm1.f1': {
|
||||||
axis: 'y',
|
axis: 'y',
|
||||||
strokeWidth,
|
},
|
||||||
},
|
'm1.f2': {
|
||||||
'm1.f2': {
|
axis: 'y',
|
||||||
axis: 'y',
|
},
|
||||||
strokeWidth,
|
'm3.f3': {
|
||||||
},
|
axis: 'y2',
|
||||||
'm3.f3': {
|
},
|
||||||
axis: 'y2',
|
}
|
||||||
strokeWidth,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
expect(actual.dygraphSeries).to.deep.equal(expected);
|
expect(actual.dygraphSeries).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('can parse multiple responses with the same field and measurement', () => {
|
it('can parse multiple responses with the same field and measurement', () => {
|
||||||
const influxResponse = [
|
const influxResponse = [
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[2000, 3],[4000, 4]],
|
values: [[2000, 3], [4000, 4]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = timeSeriesToDygraph(influxResponse);
|
const actual = timeSeriesToDygraph(influxResponse)
|
||||||
|
|
||||||
const expected = {
|
const expected = {
|
||||||
labels: [
|
labels: [
|
||||||
|
@ -214,162 +206,107 @@ describe('timeSeriesToDygraph', () => {
|
||||||
[new Date(4000), null, 4],
|
[new Date(4000), null, 4],
|
||||||
],
|
],
|
||||||
dygraphSeries: {
|
dygraphSeries: {
|
||||||
// 'm1.f1': {
|
|
||||||
// axis: 'y',
|
|
||||||
// strokeWidth,
|
|
||||||
// },
|
|
||||||
'm1.f1': {
|
'm1.f1': {
|
||||||
axis: 'y2',
|
axis: 'y2',
|
||||||
strokeWidth,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
expect(actual).to.deep.equal(expected);
|
expect(actual).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('it does not use multiple axes if being used for the DataExplorer', () => {
|
it('it does not use multiple axes if being used for the DataExplorer', () => {
|
||||||
const influxResponse = [
|
const influxResponse = [
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"m1",
|
name: "m1",
|
||||||
"columns": ["time","f2"],
|
columns: ["time", "f2"],
|
||||||
"values": [[2000, 3],[4000, 4]],
|
values: [[2000, 3], [4000, 4]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const isInDataExplorer = true;
|
const isInDataExplorer = true
|
||||||
const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer);
|
const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer)
|
||||||
|
|
||||||
const expected = {
|
const expected = {}
|
||||||
'm1.f1': {
|
|
||||||
strokeWidth,
|
|
||||||
},
|
|
||||||
'm1.f2': {
|
|
||||||
strokeWidth,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
expect(actual.dygraphSeries).to.deep.equal(expected);
|
expect(actual.dygraphSeries).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
|
|
||||||
it('it highlights the appropriate response', () => {
|
|
||||||
const influxResponse = [
|
|
||||||
{
|
|
||||||
"response":
|
|
||||||
{
|
|
||||||
"results": [
|
|
||||||
{
|
|
||||||
"series": [
|
|
||||||
{
|
|
||||||
"name":"m1",
|
|
||||||
"columns": ["time","f1"],
|
|
||||||
"values": [[1000, 1],[2000, 2]],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"response":
|
|
||||||
{
|
|
||||||
"results": [
|
|
||||||
{
|
|
||||||
"series": [
|
|
||||||
{
|
|
||||||
"name":"m2",
|
|
||||||
"columns": ["time","f2"],
|
|
||||||
"values": [[2000, 3],[4000, 4]],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const highlightIndex = 1;
|
|
||||||
const actual = timeSeriesToDygraph(influxResponse, highlightIndex);
|
|
||||||
const {dygraphSeries} = actual;
|
|
||||||
|
|
||||||
expect(dygraphSeries["m2.f2"].strokeWidth).to.be.above(dygraphSeries["m1.f1"].strokeWidth);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
|
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
|
||||||
const influxResponse = [
|
const influxResponse = [
|
||||||
{
|
{
|
||||||
"response":
|
response:
|
||||||
{
|
{
|
||||||
"results": [
|
results: [
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"mb",
|
name: "mb",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"ma",
|
name: "ma",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[1000, 1],[2000, 2]],
|
values: [[1000, 1], [2000, 2]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"mc",
|
name: "mc",
|
||||||
"columns": ["time","f2"],
|
columns: ["time", "f2"],
|
||||||
"values": [[2000, 3],[4000, 4]],
|
values: [[2000, 3], [4000, 4]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"series": [
|
series: [
|
||||||
{
|
{
|
||||||
"name":"mc",
|
name: "mc",
|
||||||
"columns": ["time","f1"],
|
columns: ["time", "f1"],
|
||||||
"values": [[2000, 3],[4000, 4]],
|
values: [[2000, 3], [4000, 4]],
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
];
|
]
|
||||||
|
|
||||||
const actual = timeSeriesToDygraph(influxResponse);
|
const actual = timeSeriesToDygraph(influxResponse)
|
||||||
|
|
||||||
const expected = [
|
const expected = [
|
||||||
'time',
|
'time',
|
||||||
|
@ -377,8 +314,8 @@ describe('timeSeriesToDygraph', () => {
|
||||||
`mb.f1`,
|
`mb.f1`,
|
||||||
`mc.f1`,
|
`mc.f1`,
|
||||||
`mc.f2`,
|
`mc.f2`,
|
||||||
];
|
]
|
||||||
|
|
||||||
expect(actual.labels).to.deep.equal(expected);
|
expect(actual.labels).to.deep.equal(expected)
|
||||||
});
|
})
|
||||||
});
|
})
|
||||||
|
|
|
@ -364,26 +364,36 @@ export const updateRolePermissionsAsync = (role, permissions) => async (dispatch
|
||||||
dispatch(publishAutoDismissingNotification('success', 'Role permissions updated'))
|
dispatch(publishAutoDismissingNotification('success', 'Role permissions updated'))
|
||||||
dispatch(syncRole(role, data))
|
dispatch(syncRole(role, data))
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
dispatch(publishNotification('error', `Failed to updated role: ${error.data.message}`))
|
dispatch(publishNotification('error', `Failed to update role: ${error.data.message}`))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const updateUserPermissionsAsync = (user, permissions) => async (dispatch) => {
|
export const updateUserPermissionsAsync = (user, permissions) => async (dispatch) => {
|
||||||
try {
|
try {
|
||||||
const {data} = await updateUserAJAX(user.links.self, user.roles, permissions)
|
const {data} = await updateUserAJAX(user.links.self, {permissions})
|
||||||
dispatch(publishAutoDismissingNotification('success', 'User permissions updated'))
|
dispatch(publishAutoDismissingNotification('success', 'User permissions updated'))
|
||||||
dispatch(syncUser(user, data))
|
dispatch(syncUser(user, data))
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
dispatch(publishNotification('error', `Failed to updated user: ${error.data.message}`))
|
dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const updateUserRolesAsync = (user, roles) => async (dispatch) => {
|
export const updateUserRolesAsync = (user, roles) => async (dispatch) => {
|
||||||
try {
|
try {
|
||||||
const {data} = await updateUserAJAX(user.links.self, roles, user.permissions)
|
const {data} = await updateUserAJAX(user.links.self, {roles})
|
||||||
dispatch(publishAutoDismissingNotification('success', 'User roles updated'))
|
dispatch(publishAutoDismissingNotification('success', 'User roles updated'))
|
||||||
dispatch(syncUser(user, data))
|
dispatch(syncUser(user, data))
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
dispatch(publishNotification('error', `Failed to updated user: ${error.data.message}`))
|
dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const updateUserPasswordAsync = (user, password) => async (dispatch) => {
|
||||||
|
try {
|
||||||
|
const {data} = await updateUserAJAX(user.links.self, {password})
|
||||||
|
dispatch(publishAutoDismissingNotification('success', 'User password updated'))
|
||||||
|
dispatch(syncUser(user, data))
|
||||||
|
} catch (error) {
|
||||||
|
dispatch(publishNotification('error', `Failed to update user: ${error.data.message}`))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,15 +159,12 @@ export const updateRole = async (url, users, permissions) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const updateUser = async (url, roles, permissions) => {
|
export const updateUser = async (url, updates) => {
|
||||||
try {
|
try {
|
||||||
return await AJAX({
|
return await AJAX({
|
||||||
method: 'PATCH',
|
method: 'PATCH',
|
||||||
url,
|
url,
|
||||||
data: {
|
data: updates,
|
||||||
roles,
|
|
||||||
permissions,
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error)
|
console.error(error)
|
||||||
|
|
|
@ -28,6 +28,7 @@ const AdminTabs = ({
|
||||||
onUpdateRolePermissions,
|
onUpdateRolePermissions,
|
||||||
onUpdateUserRoles,
|
onUpdateUserRoles,
|
||||||
onUpdateUserPermissions,
|
onUpdateUserPermissions,
|
||||||
|
onUpdateUserPassword,
|
||||||
}) => {
|
}) => {
|
||||||
let tabs = [
|
let tabs = [
|
||||||
{
|
{
|
||||||
|
@ -51,6 +52,7 @@ const AdminTabs = ({
|
||||||
onFilter={onFilterUsers}
|
onFilter={onFilterUsers}
|
||||||
onUpdatePermissions={onUpdateUserPermissions}
|
onUpdatePermissions={onUpdateUserPermissions}
|
||||||
onUpdateRoles={onUpdateUserRoles}
|
onUpdateRoles={onUpdateUserRoles}
|
||||||
|
onUpdatePassword={onUpdateUserPassword}
|
||||||
/>
|
/>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
@ -135,6 +137,7 @@ AdminTabs.propTypes = {
|
||||||
hasRoles: bool.isRequired,
|
hasRoles: bool.isRequired,
|
||||||
onUpdateUserPermissions: func,
|
onUpdateUserPermissions: func,
|
||||||
onUpdateUserRoles: func,
|
onUpdateUserRoles: func,
|
||||||
|
onUpdateUserPassword: func,
|
||||||
}
|
}
|
||||||
|
|
||||||
export default AdminTabs
|
export default AdminTabs
|
||||||
|
|
|
@ -0,0 +1,94 @@
|
||||||
|
import React, {Component, PropTypes} from 'react'
|
||||||
|
|
||||||
|
import OnClickOutside from 'shared/components/OnClickOutside'
|
||||||
|
import ConfirmButtons from 'src/shared/components/ConfirmButtons'
|
||||||
|
|
||||||
|
class ChangePassRow extends Component {
|
||||||
|
constructor(props) {
|
||||||
|
super(props)
|
||||||
|
this.state = {
|
||||||
|
showForm: false,
|
||||||
|
}
|
||||||
|
this.showForm = ::this.showForm
|
||||||
|
this.handleCancel = ::this.handleCancel
|
||||||
|
this.handleKeyPress = ::this.handleKeyPress
|
||||||
|
this.handleEdit = ::this.handleEdit
|
||||||
|
this.handleSubmit = ::this.handleSubmit
|
||||||
|
}
|
||||||
|
|
||||||
|
showForm() {
|
||||||
|
this.setState({showForm: true})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleCancel() {
|
||||||
|
this.setState({showForm: false})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleClickOutside() {
|
||||||
|
this.setState({showForm: false})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleSubmit(user) {
|
||||||
|
this.props.onApply(user)
|
||||||
|
this.setState({showForm: false})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKeyPress(user) {
|
||||||
|
return (e) => {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
this.handleSubmit(user)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handleEdit(user) {
|
||||||
|
return (e) => {
|
||||||
|
this.props.onEdit(user, {[e.target.name]: e.target.value})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const {user} = this.props
|
||||||
|
|
||||||
|
if (this.state.showForm) {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<input
|
||||||
|
className="form-control"
|
||||||
|
name="password"
|
||||||
|
type="password"
|
||||||
|
value={user.password || ''}
|
||||||
|
placeholder="Password"
|
||||||
|
onChange={this.handleEdit(user)}
|
||||||
|
onKeyPress={this.handleKeyPress(user)}
|
||||||
|
autoFocus={true}
|
||||||
|
/>
|
||||||
|
<ConfirmButtons
|
||||||
|
onConfirm={this.handleSubmit}
|
||||||
|
item={user}
|
||||||
|
onCancel={this.handleCancel}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
className="btn btn-xs btn-info admin-table--hidden"
|
||||||
|
onClick={this.showForm}
|
||||||
|
>
|
||||||
|
Change Password
|
||||||
|
</button>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const {shape, func} = PropTypes
|
||||||
|
|
||||||
|
ChangePassRow.propTypes = {
|
||||||
|
user: shape().isRequired,
|
||||||
|
onApply: func.isRequired,
|
||||||
|
onEdit: func.isRequired,
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OnClickOutside(ChangePassRow)
|
|
@ -112,7 +112,7 @@ class DatabaseRow extends Component {
|
||||||
onConfirm={() => onDelete(database, retentionPolicy)}
|
onConfirm={() => onDelete(database, retentionPolicy)}
|
||||||
onCancel={this.handleEndDelete} /> :
|
onCancel={this.handleEndDelete} /> :
|
||||||
<button
|
<button
|
||||||
className="btn btn-xs btn-danger admin-table--delete"
|
className="btn btn-xs btn-danger admin-table--hidden"
|
||||||
style={isDeletable ? {} : {visibility: 'hidden'}}
|
style={isDeletable ? {} : {visibility: 'hidden'}}
|
||||||
onClick={this.handleStartDelete}>{`Delete ${name}`}
|
onClick={this.handleStartDelete}>{`Delete ${name}`}
|
||||||
</button>
|
</button>
|
||||||
|
|
|
@ -6,9 +6,10 @@ import UserEditingRow from 'src/admin/components/UserEditingRow'
|
||||||
import MultiSelectDropdown from 'shared/components/MultiSelectDropdown'
|
import MultiSelectDropdown from 'shared/components/MultiSelectDropdown'
|
||||||
import ConfirmButtons from 'shared/components/ConfirmButtons'
|
import ConfirmButtons from 'shared/components/ConfirmButtons'
|
||||||
import DeleteConfirmTableCell from 'shared/components/DeleteConfirmTableCell'
|
import DeleteConfirmTableCell from 'shared/components/DeleteConfirmTableCell'
|
||||||
|
import ChangePassRow from 'src/admin/components/ChangePassRow'
|
||||||
|
|
||||||
const UserRow = ({
|
const UserRow = ({
|
||||||
user: {name, roles, permissions},
|
user: {name, roles, permissions, password},
|
||||||
user,
|
user,
|
||||||
allRoles,
|
allRoles,
|
||||||
allPermissions,
|
allPermissions,
|
||||||
|
@ -21,6 +22,7 @@ const UserRow = ({
|
||||||
onDelete,
|
onDelete,
|
||||||
onUpdatePermissions,
|
onUpdatePermissions,
|
||||||
onUpdateRoles,
|
onUpdateRoles,
|
||||||
|
onUpdatePassword,
|
||||||
}) => {
|
}) => {
|
||||||
const handleUpdatePermissions = (allowed) => {
|
const handleUpdatePermissions = (allowed) => {
|
||||||
onUpdatePermissions(user, [{scope: 'all', allowed}])
|
onUpdatePermissions(user, [{scope: 'all', allowed}])
|
||||||
|
@ -30,6 +32,10 @@ const UserRow = ({
|
||||||
onUpdateRoles(user, allRoles.filter(r => roleNames.find(rn => rn === r.name)))
|
onUpdateRoles(user, allRoles.filter(r => roleNames.find(rn => rn === r.name)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const handleUpdatePassword = () => {
|
||||||
|
onUpdatePassword(user, password)
|
||||||
|
}
|
||||||
|
|
||||||
if (isEditing) {
|
if (isEditing) {
|
||||||
return (
|
return (
|
||||||
<tr className="admin-table--edit-row">
|
<tr className="admin-table--edit-row">
|
||||||
|
@ -69,6 +75,9 @@ const UserRow = ({
|
||||||
/> : null
|
/> : null
|
||||||
}
|
}
|
||||||
</td>
|
</td>
|
||||||
|
<td className="text-right" style={{width: "300px"}}>
|
||||||
|
<ChangePassRow onEdit={onEdit} onApply={handleUpdatePassword} user={user} />
|
||||||
|
</td>
|
||||||
<DeleteConfirmTableCell onDelete={onDelete} item={user} />
|
<DeleteConfirmTableCell onDelete={onDelete} item={user} />
|
||||||
</tr>
|
</tr>
|
||||||
)
|
)
|
||||||
|
@ -91,6 +100,7 @@ UserRow.propTypes = {
|
||||||
permissions: arrayOf(shape({
|
permissions: arrayOf(shape({
|
||||||
name: string,
|
name: string,
|
||||||
})),
|
})),
|
||||||
|
password: string,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
allRoles: arrayOf(shape()),
|
allRoles: arrayOf(shape()),
|
||||||
allPermissions: arrayOf(string),
|
allPermissions: arrayOf(string),
|
||||||
|
@ -103,6 +113,7 @@ UserRow.propTypes = {
|
||||||
onDelete: func.isRequired,
|
onDelete: func.isRequired,
|
||||||
onUpdatePermissions: func,
|
onUpdatePermissions: func,
|
||||||
onUpdateRoles: func,
|
onUpdateRoles: func,
|
||||||
|
onUpdatePassword: func,
|
||||||
}
|
}
|
||||||
|
|
||||||
export default UserRow
|
export default UserRow
|
||||||
|
|
|
@ -18,6 +18,7 @@ const UsersTable = ({
|
||||||
onFilter,
|
onFilter,
|
||||||
onUpdatePermissions,
|
onUpdatePermissions,
|
||||||
onUpdateRoles,
|
onUpdateRoles,
|
||||||
|
onUpdatePassword,
|
||||||
}) => (
|
}) => (
|
||||||
<div className="panel panel-info">
|
<div className="panel panel-info">
|
||||||
<FilterBar type="users" onFilter={onFilter} isEditing={isEditing} onClickCreate={onClickCreate} />
|
<FilterBar type="users" onFilter={onFilter} isEditing={isEditing} onClickCreate={onClickCreate} />
|
||||||
|
@ -49,6 +50,7 @@ const UsersTable = ({
|
||||||
allPermissions={permissions}
|
allPermissions={permissions}
|
||||||
onUpdatePermissions={onUpdatePermissions}
|
onUpdatePermissions={onUpdatePermissions}
|
||||||
onUpdateRoles={onUpdateRoles}
|
onUpdateRoles={onUpdateRoles}
|
||||||
|
onUpdatePassword={onUpdatePassword}
|
||||||
/>) :
|
/>) :
|
||||||
<EmptyRow tableName={'Users'} />
|
<EmptyRow tableName={'Users'} />
|
||||||
}
|
}
|
||||||
|
@ -89,6 +91,7 @@ UsersTable.propTypes = {
|
||||||
hasRoles: bool.isRequired,
|
hasRoles: bool.isRequired,
|
||||||
onUpdatePermissions: func,
|
onUpdatePermissions: func,
|
||||||
onUpdateRoles: func,
|
onUpdateRoles: func,
|
||||||
|
onUpdatePassword: func,
|
||||||
}
|
}
|
||||||
|
|
||||||
export default UsersTable
|
export default UsersTable
|
||||||
|
|
|
@ -19,6 +19,7 @@ import {
|
||||||
updateRolePermissionsAsync,
|
updateRolePermissionsAsync,
|
||||||
updateUserPermissionsAsync,
|
updateUserPermissionsAsync,
|
||||||
updateUserRolesAsync,
|
updateUserRolesAsync,
|
||||||
|
updateUserPasswordAsync,
|
||||||
filterUsers as filterUsersAction,
|
filterUsers as filterUsersAction,
|
||||||
filterRoles as filterRolesAction,
|
filterRoles as filterRolesAction,
|
||||||
} from 'src/admin/actions'
|
} from 'src/admin/actions'
|
||||||
|
@ -54,6 +55,7 @@ class AdminPage extends Component {
|
||||||
this.handleUpdateRolePermissions = ::this.handleUpdateRolePermissions
|
this.handleUpdateRolePermissions = ::this.handleUpdateRolePermissions
|
||||||
this.handleUpdateUserPermissions = ::this.handleUpdateUserPermissions
|
this.handleUpdateUserPermissions = ::this.handleUpdateUserPermissions
|
||||||
this.handleUpdateUserRoles = ::this.handleUpdateUserRoles
|
this.handleUpdateUserRoles = ::this.handleUpdateUserRoles
|
||||||
|
this.handleUpdateUserPassword = ::this.handleUpdateUserPassword
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
|
@ -105,7 +107,6 @@ class AdminPage extends Component {
|
||||||
this.props.createRole(this.props.source.links.roles, role)
|
this.props.createRole(this.props.source.links.roles, role)
|
||||||
} else {
|
} else {
|
||||||
// TODO update role
|
// TODO update role
|
||||||
// console.log('update')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,6 +142,10 @@ class AdminPage extends Component {
|
||||||
this.props.updateUserRoles(user, roles)
|
this.props.updateUserRoles(user, roles)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
handleUpdateUserPassword(user, password) {
|
||||||
|
this.props.updateUserPassword(user, password)
|
||||||
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {users, roles, source, permissions, filterUsers, filterRoles} = this.props
|
const {users, roles, source, permissions, filterUsers, filterRoles} = this.props
|
||||||
const hasRoles = !!source.links.roles
|
const hasRoles = !!source.links.roles
|
||||||
|
@ -186,6 +191,7 @@ class AdminPage extends Component {
|
||||||
onUpdateRolePermissions={this.handleUpdateRolePermissions}
|
onUpdateRolePermissions={this.handleUpdateRolePermissions}
|
||||||
onUpdateUserPermissions={this.handleUpdateUserPermissions}
|
onUpdateUserPermissions={this.handleUpdateUserPermissions}
|
||||||
onUpdateUserRoles={this.handleUpdateUserRoles}
|
onUpdateUserRoles={this.handleUpdateUserRoles}
|
||||||
|
onUpdateUserPassword={this.handleUpdateUserPassword}
|
||||||
/> :
|
/> :
|
||||||
<span>Loading...</span>
|
<span>Loading...</span>
|
||||||
}
|
}
|
||||||
|
@ -233,6 +239,7 @@ AdminPage.propTypes = {
|
||||||
updateRolePermissions: func,
|
updateRolePermissions: func,
|
||||||
updateUserPermissions: func,
|
updateUserPermissions: func,
|
||||||
updateUserRoles: func,
|
updateUserRoles: func,
|
||||||
|
updateUserPassword: func,
|
||||||
notify: func,
|
notify: func,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,6 +269,7 @@ const mapDispatchToProps = (dispatch) => ({
|
||||||
updateRolePermissions: bindActionCreators(updateRolePermissionsAsync, dispatch),
|
updateRolePermissions: bindActionCreators(updateRolePermissionsAsync, dispatch),
|
||||||
updateUserPermissions: bindActionCreators(updateUserPermissionsAsync, dispatch),
|
updateUserPermissions: bindActionCreators(updateUserPermissionsAsync, dispatch),
|
||||||
updateUserRoles: bindActionCreators(updateUserRolesAsync, dispatch),
|
updateUserRoles: bindActionCreators(updateUserRolesAsync, dispatch),
|
||||||
|
updateUserPassword: bindActionCreators(updateUserPasswordAsync, dispatch),
|
||||||
notify: bindActionCreators(publishAutoDismissingNotification, dispatch),
|
notify: bindActionCreators(publishAutoDismissingNotification, dispatch),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import {proxy} from 'utils/queryUrlGenerator'
|
import {proxy} from 'utils/queryUrlGenerator'
|
||||||
|
|
||||||
export function getAlerts(proxyLink) {
|
export function getAlerts(source, timeRange) {
|
||||||
return proxy({
|
return proxy({
|
||||||
source: proxyLink,
|
source,
|
||||||
query: "select host, value, level, alertName from alerts order by time desc",
|
query: `SELECT host, value, level, alertName FROM alerts WHERE time >= '${timeRange.lower}' AND time <= '${timeRange.upper}' ORDER BY time desc`,
|
||||||
db: "chronograf",
|
db: "chronograf",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,18 +27,19 @@ const AlertsTable = React.createClass({
|
||||||
},
|
},
|
||||||
|
|
||||||
componentWillReceiveProps(newProps) {
|
componentWillReceiveProps(newProps) {
|
||||||
this.filterAlerts(newProps.alerts, this.state.searchTerm)
|
this.filterAlerts(this.state.searchTerm, newProps.alerts)
|
||||||
},
|
},
|
||||||
|
|
||||||
filterAlerts(searchTerm) {
|
filterAlerts(searchTerm, newAlerts) {
|
||||||
const filteredAlerts = this.props.alerts.filter((h) => {
|
const alerts = newAlerts || this.props.alerts
|
||||||
|
const filteredAlerts = alerts.filter((h) => {
|
||||||
if (h.host === null || h.name === null || h.level === null) {
|
if (h.host === null || h.name === null || h.level === null) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return h.name.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
|
return h.name.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
|
||||||
h.host.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
|
h.host.toLowerCase().search((searchTerm).toLowerCase()) !== -1 ||
|
||||||
h.level.toLowerCase().search((searchTerm).toLowerCase()) !== -1
|
h.level.toLowerCase().search((searchTerm).toLowerCase()) !== -1
|
||||||
})
|
})
|
||||||
this.setState({searchTerm, filteredAlerts})
|
this.setState({searchTerm, filteredAlerts})
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,31 +1,36 @@
|
||||||
import React, {PropTypes} from 'react'
|
import React, {PropTypes, Component} from 'react'
|
||||||
import AlertsTable from '../components/AlertsTable'
|
|
||||||
import SourceIndicator from '../../shared/components/SourceIndicator'
|
import SourceIndicator from '../../shared/components/SourceIndicator'
|
||||||
|
import AlertsTable from '../components/AlertsTable'
|
||||||
|
import NoKapacitorError from '../../shared/components/NoKapacitorError'
|
||||||
|
import CustomTimeRange from '../../shared/components/CustomTimeRange'
|
||||||
|
|
||||||
import {getAlerts} from '../apis'
|
import {getAlerts} from '../apis'
|
||||||
import AJAX from 'utils/ajax'
|
import AJAX from 'utils/ajax'
|
||||||
|
|
||||||
import _ from 'lodash'
|
import _ from 'lodash'
|
||||||
import NoKapacitorError from '../../shared/components/NoKapacitorError'
|
import moment from 'moment'
|
||||||
|
|
||||||
const AlertsApp = React.createClass({
|
class AlertsApp extends Component {
|
||||||
propTypes: {
|
constructor(props) {
|
||||||
source: PropTypes.shape({
|
super(props)
|
||||||
id: PropTypes.string.isRequired,
|
this.state = {
|
||||||
name: PropTypes.string.isRequired,
|
|
||||||
type: PropTypes.string, // 'influx-enterprise'
|
|
||||||
links: PropTypes.shape({
|
|
||||||
proxy: PropTypes.string.isRequired,
|
|
||||||
}).isRequired,
|
|
||||||
}), // .isRequired,
|
|
||||||
addFlashMessage: PropTypes.func, // .isRequired,
|
|
||||||
},
|
|
||||||
|
|
||||||
getInitialState() {
|
|
||||||
return {
|
|
||||||
loading: true,
|
loading: true,
|
||||||
hasKapacitor: false,
|
hasKapacitor: false,
|
||||||
alerts: [],
|
alerts: [],
|
||||||
|
isTimeOpen: false,
|
||||||
|
timeRange: {
|
||||||
|
upper: moment().format(),
|
||||||
|
lower: moment().subtract(1, 'd').format(),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
},
|
|
||||||
|
this.fetchAlerts = ::this.fetchAlerts
|
||||||
|
this.renderSubComponents = ::this.renderSubComponents
|
||||||
|
this.handleToggleTime = ::this.handleToggleTime
|
||||||
|
this.handleCloseTime = ::this.handleCloseTime
|
||||||
|
this.handleApplyTime = ::this.handleApplyTime
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: show a loading screen until we figure out if there is a kapacitor and fetch the alerts
|
// TODO: show a loading screen until we figure out if there is a kapacitor and fetch the alerts
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
const {source} = this.props
|
const {source} = this.props
|
||||||
|
@ -41,10 +46,16 @@ const AlertsApp = React.createClass({
|
||||||
this.setState({loading: false})
|
this.setState({loading: false})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
},
|
}
|
||||||
|
|
||||||
|
componentDidUpdate(prevProps, prevState) {
|
||||||
|
if (!_.isEqual(prevState.timeRange, this.state.timeRange)) {
|
||||||
|
this.fetchAlerts()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fetchAlerts() {
|
fetchAlerts() {
|
||||||
getAlerts(this.props.source.links.proxy).then((resp) => {
|
getAlerts(this.props.source.links.proxy, this.state.timeRange).then((resp) => {
|
||||||
const results = []
|
const results = []
|
||||||
|
|
||||||
const alertSeries = _.get(resp, ['data', 'results', '0', 'series'], [])
|
const alertSeries = _.get(resp, ['data', 'results', '0', 'series'], [])
|
||||||
|
@ -70,7 +81,7 @@ const AlertsApp = React.createClass({
|
||||||
})
|
})
|
||||||
this.setState({loading: false, alerts: results})
|
this.setState({loading: false, alerts: results})
|
||||||
})
|
})
|
||||||
},
|
}
|
||||||
|
|
||||||
renderSubComponents() {
|
renderSubComponents() {
|
||||||
let component
|
let component
|
||||||
|
@ -87,13 +98,29 @@ const AlertsApp = React.createClass({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return component
|
return component
|
||||||
},
|
}
|
||||||
|
|
||||||
|
handleToggleTime() {
|
||||||
|
this.setState({isTimeOpen: !this.state.isTimeOpen})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleCloseTime() {
|
||||||
|
this.setState({isTimeOpen: false})
|
||||||
|
}
|
||||||
|
|
||||||
|
handleApplyTime(timeRange) {
|
||||||
|
this.setState({timeRange})
|
||||||
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {source} = this.props
|
const {source} = this.props
|
||||||
|
const {loading, timeRange} = this.state
|
||||||
|
|
||||||
|
if (loading || !source) {
|
||||||
|
return <div className="page-spinner" />
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
// I stole this from the Hosts page.
|
|
||||||
// Perhaps we should create an abstraction?
|
|
||||||
<div className="page">
|
<div className="page">
|
||||||
<div className="page-header">
|
<div className="page-header">
|
||||||
<div className="page-header__container">
|
<div className="page-header__container">
|
||||||
|
@ -104,6 +131,13 @@ const AlertsApp = React.createClass({
|
||||||
</div>
|
</div>
|
||||||
<div className="page-header__right">
|
<div className="page-header__right">
|
||||||
<SourceIndicator sourceName={source.name} />
|
<SourceIndicator sourceName={source.name} />
|
||||||
|
<CustomTimeRange
|
||||||
|
isVisible={this.state.isTimeOpen}
|
||||||
|
onToggle={this.handleToggleTime}
|
||||||
|
onClose={this.handleCloseTime}
|
||||||
|
onApplyTimeRange={this.handleApplyTime}
|
||||||
|
timeRange={timeRange}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -111,15 +145,32 @@ const AlertsApp = React.createClass({
|
||||||
<div className="container-fluid">
|
<div className="container-fluid">
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-md-12">
|
<div className="col-md-12">
|
||||||
{ this.renderSubComponents() }
|
{this.renderSubComponents()}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
},
|
}
|
||||||
|
}
|
||||||
|
|
||||||
})
|
const {
|
||||||
|
func,
|
||||||
|
shape,
|
||||||
|
string,
|
||||||
|
} = PropTypes
|
||||||
|
|
||||||
|
AlertsApp.propTypes = {
|
||||||
|
source: shape({
|
||||||
|
id: string.isRequired,
|
||||||
|
name: string.isRequired,
|
||||||
|
type: string, // 'influx-enterprise'
|
||||||
|
links: shape({
|
||||||
|
proxy: string.isRequired,
|
||||||
|
}).isRequired,
|
||||||
|
}),
|
||||||
|
addFlashMessage: func,
|
||||||
|
}
|
||||||
|
|
||||||
export default AlertsApp
|
export default AlertsApp
|
||||||
|
|
|
@ -91,7 +91,13 @@ class CellEditorOverlay extends Component {
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {onCancel, autoRefresh, timeRange} = this.props
|
const {onCancel, autoRefresh, timeRange} = this.props
|
||||||
const {activeQueryIndex, cellWorkingType, queriesWorkingDraft} = this.state
|
const {
|
||||||
|
activeQueryIndex,
|
||||||
|
cellWorkingName,
|
||||||
|
cellWorkingType,
|
||||||
|
queriesWorkingDraft,
|
||||||
|
} = this.state
|
||||||
|
|
||||||
const queryActions = {
|
const queryActions = {
|
||||||
addQuery: this.handleAddQuery,
|
addQuery: this.handleAddQuery,
|
||||||
..._.mapValues(queryModifiers, (qm) => this.queryStateReducer(qm)),
|
..._.mapValues(queryModifiers, (qm) => this.queryStateReducer(qm)),
|
||||||
|
@ -106,6 +112,7 @@ class CellEditorOverlay extends Component {
|
||||||
queryConfigs={queriesWorkingDraft}
|
queryConfigs={queriesWorkingDraft}
|
||||||
activeQueryIndex={0}
|
activeQueryIndex={0}
|
||||||
cellType={cellWorkingType}
|
cellType={cellWorkingType}
|
||||||
|
cellName={cellWorkingName}
|
||||||
/>
|
/>
|
||||||
<ResizeBottom>
|
<ResizeBottom>
|
||||||
<OverlayControls
|
<OverlayControls
|
||||||
|
|
|
@ -4,8 +4,8 @@ import {connect} from 'react-redux'
|
||||||
import {bindActionCreators} from 'redux'
|
import {bindActionCreators} from 'redux'
|
||||||
|
|
||||||
import CellEditorOverlay from 'src/dashboards/components/CellEditorOverlay'
|
import CellEditorOverlay from 'src/dashboards/components/CellEditorOverlay'
|
||||||
import Header from 'src/dashboards/components/DashboardHeader'
|
import DashboardHeader from 'src/dashboards/components/DashboardHeader'
|
||||||
import EditHeader from 'src/dashboards/components/DashboardHeaderEdit'
|
import DashboardHeaderEdit from 'src/dashboards/components/DashboardHeaderEdit'
|
||||||
import Dashboard from 'src/dashboards/components/Dashboard'
|
import Dashboard from 'src/dashboards/components/Dashboard'
|
||||||
|
|
||||||
import * as dashboardActionCreators from 'src/dashboards/actions'
|
import * as dashboardActionCreators from 'src/dashboards/actions'
|
||||||
|
@ -205,12 +205,12 @@ const DashboardPage = React.createClass({
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
isEditMode ?
|
isEditMode ?
|
||||||
<EditHeader
|
<DashboardHeaderEdit
|
||||||
dashboard={dashboard}
|
dashboard={dashboard}
|
||||||
onCancel={this.handleCancelEditDashboard}
|
onCancel={this.handleCancelEditDashboard}
|
||||||
onSave={this.handleRenameDashboard}
|
onSave={this.handleRenameDashboard}
|
||||||
/> :
|
/> :
|
||||||
<Header
|
<DashboardHeader
|
||||||
buttonText={dashboard ? dashboard.name : ''}
|
buttonText={dashboard ? dashboard.name : ''}
|
||||||
handleChooseAutoRefresh={handleChooseAutoRefresh}
|
handleChooseAutoRefresh={handleChooseAutoRefresh}
|
||||||
autoRefresh={autoRefresh}
|
autoRefresh={autoRefresh}
|
||||||
|
@ -237,7 +237,7 @@ const DashboardPage = React.createClass({
|
||||||
}) :
|
}) :
|
||||||
null
|
null
|
||||||
}
|
}
|
||||||
</Header>
|
</DashboardHeader>
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
dashboard ?
|
dashboard ?
|
||||||
|
|
|
@ -18,6 +18,7 @@ const {
|
||||||
|
|
||||||
const Visualization = React.createClass({
|
const Visualization = React.createClass({
|
||||||
propTypes: {
|
propTypes: {
|
||||||
|
cellName: string,
|
||||||
cellType: string,
|
cellType: string,
|
||||||
autoRefresh: number.isRequired,
|
autoRefresh: number.isRequired,
|
||||||
timeRange: shape({
|
timeRange: shape({
|
||||||
|
@ -25,7 +26,6 @@ const Visualization = React.createClass({
|
||||||
lower: string,
|
lower: string,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
queryConfigs: arrayOf(shape({})).isRequired,
|
queryConfigs: arrayOf(shape({})).isRequired,
|
||||||
name: string,
|
|
||||||
activeQueryIndex: number,
|
activeQueryIndex: number,
|
||||||
height: string,
|
height: string,
|
||||||
heightPixels: number,
|
heightPixels: number,
|
||||||
|
@ -74,7 +74,14 @@ const Visualization = React.createClass({
|
||||||
},
|
},
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {queryConfigs, timeRange, height, heightPixels} = this.props
|
const {
|
||||||
|
queryConfigs,
|
||||||
|
timeRange,
|
||||||
|
height,
|
||||||
|
heightPixels,
|
||||||
|
cellName,
|
||||||
|
} = this.props
|
||||||
|
|
||||||
const {source} = this.context
|
const {source} = this.context
|
||||||
const proxyLink = source.links.proxy
|
const proxyLink = source.links.proxy
|
||||||
|
|
||||||
|
@ -91,7 +98,7 @@ const Visualization = React.createClass({
|
||||||
<div className={classNames("graph", {active: true})} style={{height}}>
|
<div className={classNames("graph", {active: true})} style={{height}}>
|
||||||
<div className="graph-heading">
|
<div className="graph-heading">
|
||||||
<div className="graph-title">
|
<div className="graph-title">
|
||||||
{name || "Graph"}
|
{cellName || "Graph"}
|
||||||
</div>
|
</div>
|
||||||
<div className="graph-actions">
|
<div className="graph-actions">
|
||||||
<ul className="toggle toggle-sm">
|
<ul className="toggle toggle-sm">
|
||||||
|
|
|
@ -1,15 +1,24 @@
|
||||||
import React, {PropTypes} from 'react'
|
import React, {PropTypes} from 'react'
|
||||||
|
import QuestionMarkTooltip from 'src/shared/components/QuestionMarkTooltip'
|
||||||
|
import {HIPCHAT_TOKEN_TIP} from 'src/kapacitor/copy'
|
||||||
|
|
||||||
|
const {
|
||||||
|
bool,
|
||||||
|
func,
|
||||||
|
shape,
|
||||||
|
string,
|
||||||
|
} = PropTypes
|
||||||
|
|
||||||
const HipchatConfig = React.createClass({
|
const HipchatConfig = React.createClass({
|
||||||
propTypes: {
|
propTypes: {
|
||||||
config: PropTypes.shape({
|
config: shape({
|
||||||
options: PropTypes.shape({
|
options: shape({
|
||||||
room: PropTypes.string.isRequired,
|
room: string.isRequired,
|
||||||
token: PropTypes.bool.isRequired,
|
token: bool.isRequired,
|
||||||
url: PropTypes.string.isRequired,
|
url: string.isRequired,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
onSave: PropTypes.func.isRequired,
|
onSave: func.isRequired,
|
||||||
},
|
},
|
||||||
|
|
||||||
handleSaveAlert(e) {
|
handleSaveAlert(e) {
|
||||||
|
@ -32,21 +41,48 @@ const HipchatConfig = React.createClass({
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-center no-user-select">HipChat Alert</h4>
|
<h4 className="text-center no-user-select">HipChat Alert</h4>
|
||||||
<br/>
|
<br/>
|
||||||
<p className="no-user-select">Have alerts sent to HipChat.</p>
|
<p className="no-user-select">Send alert messages to HipChat.</p>
|
||||||
<form onSubmit={this.handleSaveAlert}>
|
<form onSubmit={this.handleSaveAlert}>
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<label htmlFor="url">HipChat URL</label>
|
<label htmlFor="url">HipChat URL</label>
|
||||||
<input className="form-control" id="url" type="text" ref={(r) => this.url = r} defaultValue={url || ''}></input>
|
<input
|
||||||
|
className="form-control"
|
||||||
|
id="url"
|
||||||
|
type="text"
|
||||||
|
placeholder="https://your-subdomain.hipchat.com/v2/room"
|
||||||
|
ref={(r) => this.url = r}
|
||||||
|
defaultValue={url || ''}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<label htmlFor="room">Room</label>
|
<label htmlFor="room">Room</label>
|
||||||
<input className="form-control" id="room" type="text" ref={(r) => this.room = r} defaultValue={room || ''}></input>
|
<input
|
||||||
|
className="form-control"
|
||||||
|
id="room"
|
||||||
|
type="text"
|
||||||
|
placeholder="your-hipchat-token"
|
||||||
|
ref={(r) => this.room = r}
|
||||||
|
defaultValue={room || ''}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<label htmlFor="token">Token</label>
|
<label htmlFor="token">
|
||||||
<input className="form-control" id="token" type="text" ref={(r) => this.token = r} defaultValue={token || ''}></input>
|
Token
|
||||||
|
<QuestionMarkTooltip
|
||||||
|
tipID="token"
|
||||||
|
tipContent={HIPCHAT_TOKEN_TIP}
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
className="form-control"
|
||||||
|
id="token"
|
||||||
|
type="text"
|
||||||
|
placeholder="your-hipchat-token"
|
||||||
|
ref={(r) => this.token = r}
|
||||||
|
defaultValue={token || ''}
|
||||||
|
/>
|
||||||
<label className="form-helper">Note: a value of <code>true</code> indicates the HipChat token has been set</label>
|
<label className="form-helper">Note: a value of <code>true</code> indicates the HipChat token has been set</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,26 @@
|
||||||
import React, {PropTypes} from 'react'
|
import React, {PropTypes} from 'react'
|
||||||
|
import QuestionMarkTooltip from 'src/shared/components/QuestionMarkTooltip'
|
||||||
|
import {TELEGRAM_CHAT_ID_TIP, TELEGRAM_TOKEN_TIP} from 'src/kapacitor/copy'
|
||||||
|
|
||||||
|
const {
|
||||||
|
bool,
|
||||||
|
func,
|
||||||
|
shape,
|
||||||
|
string,
|
||||||
|
} = PropTypes
|
||||||
|
|
||||||
const TelegramConfig = React.createClass({
|
const TelegramConfig = React.createClass({
|
||||||
propTypes: {
|
propTypes: {
|
||||||
config: PropTypes.shape({
|
config: shape({
|
||||||
options: PropTypes.shape({
|
options: shape({
|
||||||
'chat-id': PropTypes.string.isRequired,
|
'chat-id': string.isRequired,
|
||||||
'disable-notification': PropTypes.bool.isRequired,
|
'disable-notification': bool.isRequired,
|
||||||
'disable-web-page-preview': PropTypes.bool.isRequired,
|
'disable-web-page-preview': bool.isRequired,
|
||||||
'parse-mode': PropTypes.string.isRequired,
|
'parse-mode': string.isRequired,
|
||||||
token: PropTypes.bool.isRequired,
|
token: bool.isRequired,
|
||||||
url: PropTypes.string.isRequired,
|
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
}).isRequired,
|
}).isRequired,
|
||||||
onSave: PropTypes.func.isRequired,
|
onSave: func.isRequired,
|
||||||
},
|
},
|
||||||
|
|
||||||
handleSaveAlert(e) {
|
handleSaveAlert(e) {
|
||||||
|
@ -32,7 +40,6 @@ const TelegramConfig = React.createClass({
|
||||||
'disable-web-page-preview': this.disableWebPagePreview.checked,
|
'disable-web-page-preview': this.disableWebPagePreview.checked,
|
||||||
'parse-mode': parseMode,
|
'parse-mode': parseMode,
|
||||||
token: this.token.value,
|
token: this.token.value,
|
||||||
url: this.url.value,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.props.onSave(properties)
|
this.props.onSave(properties)
|
||||||
|
@ -40,9 +47,9 @@ const TelegramConfig = React.createClass({
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {options} = this.props.config
|
const {options} = this.props.config
|
||||||
const {url, token} = options
|
const {token} = options
|
||||||
const chatID = options['chat-id']
|
const chatID = options['chat-id']
|
||||||
const disableNotification = options['chat-id']
|
const disableNotification = options['disable-notification']
|
||||||
const disableWebPagePreview = options['disable-web-page-preview']
|
const disableWebPagePreview = options['disable-web-page-preview']
|
||||||
const parseMode = options['parse-mode']
|
const parseMode = options['parse-mode']
|
||||||
|
|
||||||
|
@ -50,49 +57,76 @@ const TelegramConfig = React.createClass({
|
||||||
<div>
|
<div>
|
||||||
<h4 className="text-center no-user-select">Telegram Alert</h4>
|
<h4 className="text-center no-user-select">Telegram Alert</h4>
|
||||||
<br/>
|
<br/>
|
||||||
<p className="no-user-select">You can have alerts sent to Telegram by entering info below.</p>
|
<p className="no-user-select">
|
||||||
|
Send alert messages to a <a href="https://docs.influxdata.com/kapacitor/v1.2/guides/event-handler-setup/#telegram-bot" target="_blank">Telegram bot</a>.
|
||||||
|
</p>
|
||||||
<form onSubmit={this.handleSaveAlert}>
|
<form onSubmit={this.handleSaveAlert}>
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<label htmlFor="url">Telegram URL</label>
|
<label htmlFor="token">
|
||||||
<input className="form-control" id="url" type="text" ref={(r) => this.url = r} defaultValue={url || ''}></input>
|
Token
|
||||||
</div>
|
<QuestionMarkTooltip
|
||||||
|
tipID="token"
|
||||||
<div className="form-group col-xs-12">
|
tipContent={TELEGRAM_TOKEN_TIP}
|
||||||
<label htmlFor="token">Token</label>
|
/>
|
||||||
<input className="form-control" id="token" type="text" ref={(r) => this.token = r} defaultValue={token || ''}></input>
|
</label>
|
||||||
|
<input
|
||||||
|
className="form-control"
|
||||||
|
id="token"
|
||||||
|
type="text"
|
||||||
|
placeholder="your-telegram-token"
|
||||||
|
ref={(r) => this.token = r}
|
||||||
|
defaultValue={token || ''}>
|
||||||
|
</input>
|
||||||
<label className="form-helper">Note: a value of <code>true</code> indicates the Telegram token has been set</label>
|
<label className="form-helper">Note: a value of <code>true</code> indicates the Telegram token has been set</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<label htmlFor="chat-id">Chat ID</label>
|
<label htmlFor="chat-id">
|
||||||
<input className="form-control" id="chat-id" type="text" ref={(r) => this.chatID = r} defaultValue={chatID || ''}></input>
|
Chat ID
|
||||||
|
<QuestionMarkTooltip
|
||||||
|
tipID="chat-id"
|
||||||
|
tipContent={TELEGRAM_CHAT_ID_TIP}
|
||||||
|
/>
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
className="form-control"
|
||||||
|
id="chat-id"
|
||||||
|
type="text"
|
||||||
|
placeholder="your-telegram-chat-id"
|
||||||
|
ref={(r) => this.chatID = r}
|
||||||
|
defaultValue={chatID || ''}>
|
||||||
|
</input>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<label htmlFor="parseMode">Parse Mode</label>
|
<label htmlFor="parseMode">Select the alert message format</label>
|
||||||
<div className="form-control-static">
|
<div className="form-control-static">
|
||||||
|
<div className="radio">
|
||||||
|
<input id="parseModeMarkdown" type="radio" name="parseMode" value="markdown" defaultChecked={parseMode !== 'HTML'} ref={(r) => this.parseModeMarkdown = r} />
|
||||||
|
<label htmlFor="parseModeMarkdown">Markdown</label>
|
||||||
|
</div>
|
||||||
<div className="radio">
|
<div className="radio">
|
||||||
<input id="parseModeHTML" type="radio" name="parseMode" value="html" defaultChecked={parseMode === 'HTML'} ref={(r) => this.parseModeHTML = r} />
|
<input id="parseModeHTML" type="radio" name="parseMode" value="html" defaultChecked={parseMode === 'HTML'} ref={(r) => this.parseModeHTML = r} />
|
||||||
<label htmlFor="parseModeHTML">HTML</label>
|
<label htmlFor="parseModeHTML">HTML</label>
|
||||||
</div>
|
</div>
|
||||||
<div className="radio">
|
|
||||||
<input id="parseModeMarkdown" type="radio" name="parseMode" value="markdown" defaultChecked={parseMode === 'Markdown'} ref={(r) => this.parseModeMarkdown = r} />
|
|
||||||
<label htmlFor="parseModeMarkdown">Markdown</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<div className="form-control-static">
|
<div className="form-control-static">
|
||||||
<input id="disableWebPagePreview" type="checkbox" defaultChecked={disableWebPagePreview} ref={(r) => this.disableWebPagePreview = r} />
|
<input id="disableWebPagePreview" type="checkbox" defaultChecked={disableWebPagePreview} ref={(r) => this.disableWebPagePreview = r} />
|
||||||
<label htmlFor="disableWebPagePreview">Disable Web Page Preview</label>
|
<label htmlFor="disableWebPagePreview">
|
||||||
|
Disable <a href="https://telegram.org/blog/link-preview" target="_blank">link previews</a> in alert messages.
|
||||||
|
</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="form-group col-xs-12">
|
<div className="form-group col-xs-12">
|
||||||
<div className="form-control-static">
|
<div className="form-control-static">
|
||||||
<input id="disableNotification" type="checkbox" defaultChecked={disableNotification} ref={(r) => this.disableNotification = r} />
|
<input id="disableNotification" type="checkbox" defaultChecked={disableNotification} ref={(r) => this.disableNotification = r} />
|
||||||
<label htmlFor="disableNotification">Disable Notification</label>
|
<label htmlFor="disableNotification">
|
||||||
|
Disable notifications on iOS devices and disable sounds on Android devices. Android users continue to receive notifications.
|
||||||
|
</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
const telegramChatIDLink = 'https://docs.influxdata.com/kapacitor/latest/guides/event-handler-setup/#telegram-chat-id'
|
||||||
|
export const TELEGRAM_CHAT_ID_TIP = `<p>Need help finding your chat id? Check out <a target='_blank' href='${telegramChatIDLink}'>these steps</a>.</p>`
|
||||||
|
|
||||||
|
const telegramTokenLink = 'https://docs.influxdata.com/kapacitor/latest/guides/event-handler-setup/#telegram-api-access-token'
|
||||||
|
export const TELEGRAM_TOKEN_TIP = `<p>Need help finding your token? Check out <a target='_blank' href='${telegramTokenLink}'>these steps</a>.</p>`
|
||||||
|
|
||||||
|
const hipchatTokenLink = 'https://docs.influxdata.com/kapacitor/latest/guides/event-handler-setup/#hipchat-api-access-token'
|
||||||
|
export const HIPCHAT_TOKEN_TIP = `<p>Need help creating a token? Check out <a href='${hipchatTokenLink}' target='_blank'>these steps</a>.</p>`
|
|
@ -0,0 +1,112 @@
|
||||||
|
import React, {PropTypes, Component} from 'react'
|
||||||
|
import rome from 'rome'
|
||||||
|
import moment from 'moment'
|
||||||
|
import classNames from 'classnames'
|
||||||
|
import OnClickOutside from 'react-onclickoutside'
|
||||||
|
|
||||||
|
class CustomTimeRange extends Component {
|
||||||
|
constructor(props) {
|
||||||
|
super(props)
|
||||||
|
|
||||||
|
this.handleClick = ::this.handleClick
|
||||||
|
}
|
||||||
|
|
||||||
|
handleClickOutside() {
|
||||||
|
this.props.onClose()
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidMount() {
|
||||||
|
const {timeRange} = this.props
|
||||||
|
|
||||||
|
const lower = rome(this.lower, {
|
||||||
|
initialValue: this._formatTimeRange(timeRange.lower),
|
||||||
|
})
|
||||||
|
const upper = rome(this.upper, {
|
||||||
|
initialValue: this._formatTimeRange(timeRange.upper),
|
||||||
|
})
|
||||||
|
|
||||||
|
this.lowerCal = lower
|
||||||
|
this.upperCal = upper
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there is an upper or lower time range set, set the corresponding calendar's value.
|
||||||
|
componentWillReceiveProps(nextProps) {
|
||||||
|
const {lower, upper} = nextProps.timeRange
|
||||||
|
if (lower) {
|
||||||
|
this.lowerCal.setValue(this._formatTimeRange(lower))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (upper) {
|
||||||
|
this.upperCal.setValue(this._formatTimeRange(upper))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const {isVisible, onToggle, timeRange: {upper, lower}} = this.props
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={classNames("custom-time-range", {show: isVisible})} style={{display: 'flex'}}>
|
||||||
|
<button className="btn btn-sm btn-info custom-time-range--btn" onClick={onToggle}>
|
||||||
|
<span className="icon clock"></span>
|
||||||
|
{`${moment(lower).format('MMM Do HH:mm')} — ${moment(upper).format('MMM Do HH:mm')}`}
|
||||||
|
<span className="caret"></span>
|
||||||
|
</button>
|
||||||
|
<div className="custom-time--container">
|
||||||
|
<div className="custom-time--dates">
|
||||||
|
<div className="custom-time--lower" ref={(r) => this.lower = r} />
|
||||||
|
<div className="custom-time--upper" ref={(r) => this.upper = r} />
|
||||||
|
</div>
|
||||||
|
<div className="custom-time--apply btn btn-sm btn-primary" onClick={this.handleClick}>Apply</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
handleClick() {
|
||||||
|
const lower = this.lowerCal.getDate().toISOString()
|
||||||
|
const upper = this.upperCal.getDate().toISOString()
|
||||||
|
|
||||||
|
this.props.onApplyTimeRange({lower, upper})
|
||||||
|
this.props.onClose()
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
* Upper and lower time ranges are passed in with single quotes as part of
|
||||||
|
* the string literal, i.e. "'2015-09-23T18:00:00.000Z'". Remove them
|
||||||
|
* before passing the string to be parsed.
|
||||||
|
*/
|
||||||
|
_formatTimeRange(timeRange) {
|
||||||
|
if (!timeRange) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the given time range is relative, create a fixed timestamp based on its value
|
||||||
|
if (timeRange.match(/^now/)) {
|
||||||
|
const match = timeRange.match(/\d+\w/)[0]
|
||||||
|
const duration = match.slice(0, match.length - 1)
|
||||||
|
const unitOfTime = match[match.length - 1]
|
||||||
|
return moment().subtract(duration, unitOfTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
return moment(timeRange.replace(/\'/g, '')).format('YYYY-MM-DD HH:mm')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
bool,
|
||||||
|
func,
|
||||||
|
shape,
|
||||||
|
string,
|
||||||
|
} = PropTypes
|
||||||
|
|
||||||
|
CustomTimeRange.propTypes = {
|
||||||
|
onApplyTimeRange: func.isRequired,
|
||||||
|
timeRange: shape({
|
||||||
|
lower: string.isRequired,
|
||||||
|
upper: string.isRequired,
|
||||||
|
}).isRequired,
|
||||||
|
isVisible: bool.isRequired,
|
||||||
|
onToggle: func.isRequired,
|
||||||
|
onClose: func.isRequired,
|
||||||
|
}
|
||||||
|
|
||||||
|
export default OnClickOutside(CustomTimeRange)
|
|
@ -4,7 +4,7 @@ import OnClickOutside from 'shared/components/OnClickOutside'
|
||||||
import ConfirmButtons from 'shared/components/ConfirmButtons'
|
import ConfirmButtons from 'shared/components/ConfirmButtons'
|
||||||
|
|
||||||
const DeleteButton = ({onClickDelete}) => (
|
const DeleteButton = ({onClickDelete}) => (
|
||||||
<button className="btn btn-xs btn-danger admin-table--delete" onClick={onClickDelete}>
|
<button className="btn btn-xs btn-danger admin-table--hidden" onClick={onClickDelete}>
|
||||||
Delete
|
Delete
|
||||||
</button>
|
</button>
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
import React, {PropTypes} from 'react'
|
||||||
|
import ReactTooltip from 'react-tooltip'
|
||||||
|
|
||||||
|
const QuestionMarkTooltip = ({
|
||||||
|
tipID,
|
||||||
|
tipContent,
|
||||||
|
}) => (
|
||||||
|
<div style={{display: "inline-block"}}>
|
||||||
|
<div data-for={`${tipID}-tooltip`} data-tip={tipContent} style={{margin: "0 5px"}}>?</div>
|
||||||
|
<ReactTooltip
|
||||||
|
id={`${tipID}-tooltip`}
|
||||||
|
effect="solid"
|
||||||
|
html={true}
|
||||||
|
offset={{top: 2}}
|
||||||
|
place="bottom"
|
||||||
|
class="influx-tooltip__hover place-bottom"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
|
||||||
|
const {
|
||||||
|
string,
|
||||||
|
} = PropTypes
|
||||||
|
|
||||||
|
QuestionMarkTooltip.propTypes = {
|
||||||
|
tipID: string.isRequired,
|
||||||
|
tipContent: string.isRequired,
|
||||||
|
}
|
||||||
|
|
||||||
|
export default QuestionMarkTooltip
|
|
@ -47,7 +47,7 @@ const NavBlock = React.createClass({
|
||||||
const {location, className, wrapperClassName} = this.props
|
const {location, className, wrapperClassName} = this.props
|
||||||
|
|
||||||
const isActive = React.Children.toArray(this.props.children).find((child) => {
|
const isActive = React.Children.toArray(this.props.children).find((child) => {
|
||||||
return child.type === NavListItem && location.startsWith(child.props.link)
|
return location.startsWith(child.props.link)
|
||||||
})
|
})
|
||||||
|
|
||||||
const children = React.Children.map((this.props.children), (child) => {
|
const children = React.Children.map((this.props.children), (child) => {
|
||||||
|
|
|
@ -40,6 +40,7 @@
|
||||||
@import 'components/resizer';
|
@import 'components/resizer';
|
||||||
@import 'components/source-indicator';
|
@import 'components/source-indicator';
|
||||||
@import 'components/confirm-buttons';
|
@import 'components/confirm-buttons';
|
||||||
|
@import 'components/custom-time-range';
|
||||||
|
|
||||||
// Pages
|
// Pages
|
||||||
@import 'pages/alerts';
|
@import 'pages/alerts';
|
||||||
|
|
|
@ -0,0 +1,254 @@
|
||||||
|
/*
|
||||||
|
Custom Time Range Dropdown
|
||||||
|
------------------------------------------------------
|
||||||
|
*/
|
||||||
|
|
||||||
|
.custom-time-range {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.btn.btn-sm.btn-info.custom-time-range--btn {
|
||||||
|
padding: 0 30px 0 9px !important;
|
||||||
|
|
||||||
|
.caret {
|
||||||
|
position: absolute;
|
||||||
|
right: 9px;
|
||||||
|
top: calc(50% + 1px);
|
||||||
|
transform: translateY(-50%);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.custom-time--container {
|
||||||
|
display: none;
|
||||||
|
position: absolute;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
top: 35px;
|
||||||
|
right: 0;
|
||||||
|
background: $g5-pepper;
|
||||||
|
border-radius: $radius;
|
||||||
|
padding: 8px;
|
||||||
|
z-index: 1000;
|
||||||
|
box-shadow: 0 2px 5px 0.6px rgba(15, 14, 21, 0.2);
|
||||||
|
}
|
||||||
|
.custom-time--dates {
|
||||||
|
display: flex;
|
||||||
|
align-items: flex-start;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
.custom-time--lower {
|
||||||
|
margin-right: 4px;
|
||||||
|
}
|
||||||
|
.custom-time--upper {
|
||||||
|
margin-left: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
$custom-time-arrow: 28px;
|
||||||
|
$rd-cell-size: 30px;
|
||||||
|
|
||||||
|
.rd-container {
|
||||||
|
display: flex !important;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
.rd-date {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.rd-back,
|
||||||
|
.rd-next,
|
||||||
|
.rd-month-label {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
height: $custom-time-arrow;
|
||||||
|
line-height: $custom-time-arrow;
|
||||||
|
}
|
||||||
|
.rd-back,
|
||||||
|
.rd-next {
|
||||||
|
outline: none;
|
||||||
|
width: $custom-time-arrow;
|
||||||
|
border: 0;
|
||||||
|
background-color: transparent;
|
||||||
|
border-radius: 50%;
|
||||||
|
color: $g15-platinum;
|
||||||
|
transition:
|
||||||
|
background-color 0.25s ease,
|
||||||
|
color 0.25s ease;
|
||||||
|
|
||||||
|
&:after {
|
||||||
|
font-family: 'icomoon' !important;
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: normal;
|
||||||
|
font-variant: normal;
|
||||||
|
color: inherit;
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
transform: translate(-50%,-50%);
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
&:hover {
|
||||||
|
background-color: $g6-smoke;
|
||||||
|
color: $g20-white;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.rd-back {
|
||||||
|
left: 0;
|
||||||
|
&:after {
|
||||||
|
left: calc(50% - 1px);
|
||||||
|
content: "\e90c";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.rd-next {
|
||||||
|
left: calc(100% - #{$custom-time-arrow});
|
||||||
|
&:after {
|
||||||
|
left: calc(50% + 1px);
|
||||||
|
content: "\e911";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.rd-month-label {
|
||||||
|
font-weight: 600;
|
||||||
|
color: $g15-platinum;
|
||||||
|
left: $custom-time-arrow;
|
||||||
|
text-align: center;
|
||||||
|
@include no-user-select();
|
||||||
|
width: calc(100% - #{($custom-time-arrow * 2)});
|
||||||
|
}
|
||||||
|
.rd-days {
|
||||||
|
margin-top: ($custom-time-arrow + 8px);
|
||||||
|
background-color: transparent;
|
||||||
|
border-radius: $radius-small;
|
||||||
|
|
||||||
|
/* Cancel out default table styles */
|
||||||
|
tr:hover {
|
||||||
|
background-color: transparent !important;
|
||||||
|
color: inherit !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
thead.rd-days-head th.rd-day-head,
|
||||||
|
tbody.rd-days-body td.rd-day-body {
|
||||||
|
padding: 0 !important;
|
||||||
|
min-height: $rd-cell-size !important;
|
||||||
|
height: $rd-cell-size !important;
|
||||||
|
max-height: $rd-cell-size !important;
|
||||||
|
min-width: $rd-cell-size !important;
|
||||||
|
width: $rd-cell-size !important;
|
||||||
|
max-width: $rd-cell-size !important;
|
||||||
|
vertical-align: middle;
|
||||||
|
text-align: center;
|
||||||
|
border: 2px solid $g5-pepper !important;
|
||||||
|
}
|
||||||
|
thead.rd-days-head th.rd-day-head {
|
||||||
|
color: $g15-platinum !important;
|
||||||
|
background-color: $g5-pepper !important;
|
||||||
|
}
|
||||||
|
tbody.rd-days-body td.rd-day-body {
|
||||||
|
@include no-user-select();
|
||||||
|
letter-spacing: -1px;
|
||||||
|
font-family: $code-font;
|
||||||
|
transition:
|
||||||
|
background-color 0.25s ease,
|
||||||
|
color 0.25s ease;
|
||||||
|
color: $g13-mist !important;
|
||||||
|
background-color: $g3-castle;
|
||||||
|
border-radius: 5px;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
cursor: $cc-pointer;
|
||||||
|
color: $g20-white !important;
|
||||||
|
background-color: $g6-smoke;
|
||||||
|
}
|
||||||
|
&.rd-day-next-month,
|
||||||
|
&.rd-day-prev-month {
|
||||||
|
cursor: $cc-default;
|
||||||
|
color: $g8-storm !important;
|
||||||
|
background-color: $g5-pepper !important;
|
||||||
|
}
|
||||||
|
&.rd-day-selected {
|
||||||
|
background-color: $c-pool !important;
|
||||||
|
color: $g20-white !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.rd-time {
|
||||||
|
margin: 0 2px;
|
||||||
|
width: calc(100% - 4px);
|
||||||
|
height: 30px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.rd-time-selected {
|
||||||
|
@include no-user-select();
|
||||||
|
height: 28px;
|
||||||
|
line-height: 28px;
|
||||||
|
background-color: $g3-castle;
|
||||||
|
border-radius: $radius-small;
|
||||||
|
width: 100%;
|
||||||
|
letter-spacing: -1px;
|
||||||
|
font-family: $code-font;
|
||||||
|
color: $g13-mist;
|
||||||
|
display: inline-block;
|
||||||
|
transition:
|
||||||
|
color 0.25s ease,
|
||||||
|
background-color 0.25s ease;
|
||||||
|
text-align: center;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
color: $g20-white;
|
||||||
|
background-color: $g6-smoke;
|
||||||
|
cursor: $cc-pointer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.rd-time-list {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
width: 120px;
|
||||||
|
height: 200px;
|
||||||
|
transform: translate(-50%,-50%);
|
||||||
|
overflow: auto;
|
||||||
|
overflow-x: hidden;
|
||||||
|
overflow-y: scroll;
|
||||||
|
@include custom-scrollbar-round($c-pool, $c-laser);
|
||||||
|
@include gradient-h($c-ocean, $c-pool);
|
||||||
|
border-radius: $radius;
|
||||||
|
box-shadow: 0 2px 5px 0.6px rgba(15, 14, 21, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rd-time-option {
|
||||||
|
width: 100%;
|
||||||
|
height: 24px;
|
||||||
|
line-height: 24px;
|
||||||
|
padding-left: $scrollbar-width;
|
||||||
|
text-align: center;
|
||||||
|
@include no-user-select();
|
||||||
|
font-family: $code-font;
|
||||||
|
color: $c-yeti;
|
||||||
|
letter-spacing: -1px;
|
||||||
|
|
||||||
|
&:hover,
|
||||||
|
&:active,
|
||||||
|
&:focus {
|
||||||
|
color: $g20-white;
|
||||||
|
cursor: $cc-pointer;
|
||||||
|
outline: none;
|
||||||
|
@include gradient-h($c-laser, $c-pool);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.custom-time--apply {
|
||||||
|
margin-top: 8px;
|
||||||
|
width: 120px;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Show State */
|
||||||
|
.custom-time-range.show {
|
||||||
|
.custom-time--container {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
.custom-time-range--btn {
|
||||||
|
color: $g20-white !important;
|
||||||
|
background-color: $g6-smoke;
|
||||||
|
}
|
||||||
|
}
|
|
@ -86,3 +86,12 @@ $tooltip-code-color: $c-potassium;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.influx-tooltip__hover {
|
||||||
|
@extend .influx-tooltip;
|
||||||
|
pointer-events: auto!important;
|
||||||
|
&:hover {
|
||||||
|
visibility: visible!important;
|
||||||
|
opacity: 1!important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -41,6 +41,35 @@ $scrollbar-offset: 3px;
|
||||||
@mixin custom-scrollbar($trackColor, $handleColor) {
|
@mixin custom-scrollbar($trackColor, $handleColor) {
|
||||||
&::-webkit-scrollbar {
|
&::-webkit-scrollbar {
|
||||||
width: $scrollbar-width;
|
width: $scrollbar-width;
|
||||||
|
|
||||||
|
&-button {
|
||||||
|
background-color: $trackColor;
|
||||||
|
}
|
||||||
|
&-track {
|
||||||
|
background-color: $trackColor;
|
||||||
|
}
|
||||||
|
&-track-piece {
|
||||||
|
background-color: $trackColor;
|
||||||
|
border: $scrollbar-offset solid $trackColor;
|
||||||
|
border-radius: ($scrollbar-width / 2);
|
||||||
|
}
|
||||||
|
&-thumb {
|
||||||
|
background-color: $handleColor;
|
||||||
|
border: $scrollbar-offset solid $trackColor;
|
||||||
|
border-radius: ($scrollbar-width / 2);
|
||||||
|
}
|
||||||
|
&-corner {
|
||||||
|
background-color: $trackColor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
&::-webkit-resizer {
|
||||||
|
background-color: $trackColor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@mixin custom-scrollbar-round($trackColor, $handleColor) {
|
||||||
|
&::-webkit-scrollbar {
|
||||||
|
width: $scrollbar-width;
|
||||||
|
border-top-right-radius: $radius;
|
||||||
border-bottom-right-radius: $radius;
|
border-bottom-right-radius: $radius;
|
||||||
|
|
||||||
&-button {
|
&-button {
|
||||||
|
@ -48,6 +77,7 @@ $scrollbar-offset: 3px;
|
||||||
}
|
}
|
||||||
&-track {
|
&-track {
|
||||||
background-color: $trackColor;
|
background-color: $trackColor;
|
||||||
|
border-top-right-radius: $radius;
|
||||||
border-bottom-right-radius: $radius;
|
border-bottom-right-radius: $radius;
|
||||||
}
|
}
|
||||||
&-track-piece {
|
&-track-piece {
|
||||||
|
|
|
@ -67,7 +67,7 @@
|
||||||
width: 100%;
|
width: 100%;
|
||||||
min-width: 150px;
|
min-width: 150px;
|
||||||
}
|
}
|
||||||
.admin-table--delete {
|
.admin-table--hidden {
|
||||||
visibility: hidden;
|
visibility: hidden;
|
||||||
}
|
}
|
||||||
.dropdown-toggle {
|
.dropdown-toggle {
|
||||||
|
@ -83,7 +83,7 @@
|
||||||
}
|
}
|
||||||
.open .dropdown-toggle .multi-select-dropdown__label {left: 9px;}
|
.open .dropdown-toggle .multi-select-dropdown__label {left: 9px;}
|
||||||
tbody tr:hover {
|
tbody tr:hover {
|
||||||
.admin-table--delete {
|
.admin-table--hidden {
|
||||||
visibility: visible;
|
visibility: visible;
|
||||||
}
|
}
|
||||||
.dropdown-toggle {
|
.dropdown-toggle {
|
||||||
|
|
|
@ -267,7 +267,7 @@ input {
|
||||||
padding: 0 !important;
|
padding: 0 !important;
|
||||||
max-height: 290px;
|
max-height: 290px;
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
@include custom-scrollbar($c-pool, $c-laser);
|
@include custom-scrollbar-round($c-pool, $c-laser);
|
||||||
@include gradient-h($c-ocean, $c-pool);
|
@include gradient-h($c-ocean, $c-pool);
|
||||||
box-shadow: 0 2px 5px 0.6px fade-out($g0-obsidian, 0.8);
|
box-shadow: 0 2px 5px 0.6px fade-out($g0-obsidian, 0.8);
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import _ from 'lodash'
|
import _ from 'lodash'
|
||||||
import {STROKE_WIDTH} from 'src/shared/constants'
|
|
||||||
import {map, reduce, forEach, concat, clone} from 'fast.js'
|
import {map, reduce, forEach, concat, clone} from 'fast.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -102,15 +101,11 @@ export default function timeSeriesToDygraph(raw = [], activeQueryIndex, isInData
|
||||||
}
|
}
|
||||||
const sortedTimeSeries = _.sortBy(timeSeries, 'time')
|
const sortedTimeSeries = _.sortBy(timeSeries, 'time')
|
||||||
|
|
||||||
const {light, heavy} = STROKE_WIDTH
|
|
||||||
|
|
||||||
const dygraphSeries = reduce(sortedLabels, (acc, {label, responseIndex}) => {
|
const dygraphSeries = reduce(sortedLabels, (acc, {label, responseIndex}) => {
|
||||||
acc[label] = {
|
|
||||||
strokeWidth: responseIndex === activeQueryIndex ? heavy : light,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInDataExplorer) {
|
if (!isInDataExplorer) {
|
||||||
acc[label].axis = responseIndex === 0 ? 'y' : 'y2'
|
acc[label] = {
|
||||||
|
axis: responseIndex === 0 ? 'y' : 'y2',
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return acc
|
return acc
|
||||||
|
|
52
ui/yarn.lock
52
ui/yarn.lock
|
@ -352,6 +352,10 @@ asynckit@^0.4.0:
|
||||||
version "0.4.0"
|
version "0.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||||
|
|
||||||
|
atoa@1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/atoa/-/atoa-1.0.0.tgz#0cc0e91a480e738f923ebc103676471779b34a49"
|
||||||
|
|
||||||
atob@~1.1.0:
|
atob@~1.1.0:
|
||||||
version "1.1.3"
|
version "1.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/atob/-/atob-1.1.3.tgz#95f13629b12c3a51a5d215abdce2aa9f32f80773"
|
resolved "https://registry.yarnpkg.com/atob/-/atob-1.1.3.tgz#95f13629b12c3a51a5d215abdce2aa9f32f80773"
|
||||||
|
@ -1540,6 +1544,14 @@ builtin-status-codes@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-2.0.0.tgz#6f22003baacf003ccd287afe6872151fddc58579"
|
resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-2.0.0.tgz#6f22003baacf003ccd287afe6872151fddc58579"
|
||||||
|
|
||||||
|
bullseye@1.4.6:
|
||||||
|
version "1.4.6"
|
||||||
|
resolved "https://registry.yarnpkg.com/bullseye/-/bullseye-1.4.6.tgz#b73f606f7b4273be80ac65acd75295d62606fe24"
|
||||||
|
dependencies:
|
||||||
|
crossvent "^1.3.1"
|
||||||
|
seleccion "2.0.0"
|
||||||
|
sell "^1.0.0"
|
||||||
|
|
||||||
bytes@2.3.0:
|
bytes@2.3.0:
|
||||||
version "2.3.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.3.0.tgz#d5b680a165b6201739acb611542aabc2d8ceb070"
|
resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.3.0.tgz#d5b680a165b6201739acb611542aabc2d8ceb070"
|
||||||
|
@ -1964,6 +1976,13 @@ content-type@~1.0.2:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed"
|
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed"
|
||||||
|
|
||||||
|
contra@1.9.1:
|
||||||
|
version "1.9.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/contra/-/contra-1.9.1.tgz#60e498274b3d2d332896d60f82900aefa2ecac8c"
|
||||||
|
dependencies:
|
||||||
|
atoa "1.0.0"
|
||||||
|
ticky "1.0.0"
|
||||||
|
|
||||||
convert-source-map@^0.3.3:
|
convert-source-map@^0.3.3:
|
||||||
version "0.3.5"
|
version "0.3.5"
|
||||||
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.3.5.tgz#f1d802950af7dd2631a1febe0596550c86ab3190"
|
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.3.5.tgz#f1d802950af7dd2631a1febe0596550c86ab3190"
|
||||||
|
@ -2018,6 +2037,12 @@ cross-spawn@^5.0.0:
|
||||||
shebang-command "^1.2.0"
|
shebang-command "^1.2.0"
|
||||||
which "^1.2.9"
|
which "^1.2.9"
|
||||||
|
|
||||||
|
crossvent@1.5.0, crossvent@^1.3.1:
|
||||||
|
version "1.5.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/crossvent/-/crossvent-1.5.0.tgz#3779c1242699e19417f0414e61b144753a52fd6d"
|
||||||
|
dependencies:
|
||||||
|
custom-event "1.0.0"
|
||||||
|
|
||||||
cryptiles@2.x.x:
|
cryptiles@2.x.x:
|
||||||
version "2.0.5"
|
version "2.0.5"
|
||||||
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
|
resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8"
|
||||||
|
@ -2200,6 +2225,10 @@ currently-unhandled@^0.4.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
array-find-index "^1.0.1"
|
array-find-index "^1.0.1"
|
||||||
|
|
||||||
|
custom-event@1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.0.tgz#2e4628be19dc4b214b5c02630c5971e811618062"
|
||||||
|
|
||||||
custom-event@~1.0.0:
|
custom-event@~1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425"
|
resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425"
|
||||||
|
@ -4596,7 +4625,7 @@ mocha@^2.4.5:
|
||||||
supports-color "1.2.0"
|
supports-color "1.2.0"
|
||||||
to-iso-string "0.0.2"
|
to-iso-string "0.0.2"
|
||||||
|
|
||||||
moment@^2.13.0:
|
moment@^2.13.0, moment@^2.8.2:
|
||||||
version "2.17.1"
|
version "2.17.1"
|
||||||
resolved "https://registry.yarnpkg.com/moment/-/moment-2.17.1.tgz#fed9506063f36b10f066c8b59a144d7faebe1d82"
|
resolved "https://registry.yarnpkg.com/moment/-/moment-2.17.1.tgz#fed9506063f36b10f066c8b59a144d7faebe1d82"
|
||||||
|
|
||||||
|
@ -6203,6 +6232,15 @@ ripemd160@0.2.0:
|
||||||
version "0.2.0"
|
version "0.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-0.2.0.tgz#2bf198bde167cacfa51c0a928e84b68bbe171fce"
|
resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-0.2.0.tgz#2bf198bde167cacfa51c0a928e84b68bbe171fce"
|
||||||
|
|
||||||
|
rome@^2.1.22:
|
||||||
|
version "2.1.22"
|
||||||
|
resolved "https://registry.yarnpkg.com/rome/-/rome-2.1.22.tgz#4bf25318cc0522ae92dd090472ce7a6e0b1f5e02"
|
||||||
|
dependencies:
|
||||||
|
bullseye "1.4.6"
|
||||||
|
contra "1.9.1"
|
||||||
|
crossvent "1.5.0"
|
||||||
|
moment "^2.8.2"
|
||||||
|
|
||||||
run-async@^0.1.0:
|
run-async@^0.1.0:
|
||||||
version "0.1.0"
|
version "0.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389"
|
resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389"
|
||||||
|
@ -6243,6 +6281,14 @@ script-loader@~0.6.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
raw-loader "~0.5.1"
|
raw-loader "~0.5.1"
|
||||||
|
|
||||||
|
seleccion@2.0.0:
|
||||||
|
version "2.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/seleccion/-/seleccion-2.0.0.tgz#0984ac1e8df513e38b41a608e65042e8381e0a73"
|
||||||
|
|
||||||
|
sell@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/sell/-/sell-1.0.0.tgz#3baca7e51f78ddee9e22eea1ac747a6368bd1630"
|
||||||
|
|
||||||
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@~5.3.0:
|
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@~5.3.0:
|
||||||
version "5.3.0"
|
version "5.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f"
|
resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f"
|
||||||
|
@ -6842,6 +6888,10 @@ through@^2.3.6, through@~2.3.4:
|
||||||
version "2.3.8"
|
version "2.3.8"
|
||||||
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
|
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
|
||||||
|
|
||||||
|
ticky@1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/ticky/-/ticky-1.0.0.tgz#e87f38ee0491ea32f62e8f0567ba9638b29f049c"
|
||||||
|
|
||||||
timers-browserify@^2.0.2:
|
timers-browserify@^2.0.2:
|
||||||
version "2.0.2"
|
version "2.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.2.tgz#ab4883cf597dcd50af211349a00fbca56ac86b86"
|
resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.2.tgz#ab4883cf597dcd50af211349a00fbca56ac86b86"
|
||||||
|
|
Loading…
Reference in New Issue