Merge branch 'master' into multitenancy
commit
59fb016a4e
10
CHANGELOG.md
10
CHANGELOG.md
|
@ -1,5 +1,15 @@
|
||||||
## v1.3.11.0 [unreleased]
|
## v1.3.11.0 [unreleased]
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
|
||||||
|
1. [#2158](https://github.com/influxdata/chronograf/pull/2158): Fix 'Cannot connect to source' false error flag on Dashboard page
|
||||||
|
1. [#2167](https://github.com/influxdata/chronograf/pull/2167): Add fractions of seconds to time field in csv export
|
||||||
|
1. [#1077](https://github.com/influxdata/chronograf/pull/2087): Fix Chronograf requiring Telegraf's CPU and system plugins to ensure that all Apps appear on the HOST LIST page.
|
||||||
|
1. [#2222](https://github.com/influxdata/chronograf/pull/2222): Fix template variables in dashboard query building.
|
||||||
|
1. [#2291](https://github.com/influxdata/chronograf/pull/2291): Fix several kapacitor alert creation panics.
|
||||||
|
1. [#2303](https://github.com/influxdata/chronograf/pull/2303): Add shadow-utils to RPM release packages
|
||||||
|
1. [#2292](https://github.com/influxdata/chronograf/pull/2292): Source extra command line options from defaults file
|
||||||
|
1. [#2329](https://github.com/influxdata/chronograf/pull/2329): Include tag values alongside measurement name in Data Explorer result tabs
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
### UI Improvements
|
### UI Improvements
|
||||||
|
|
||||||
|
|
49
Makefile
49
Makefile
|
@ -1,4 +1,4 @@
|
||||||
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev ctags continuous
|
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev run-hmr ctags
|
||||||
|
|
||||||
VERSION ?= $(shell git describe --always --tags)
|
VERSION ?= $(shell git describe --always --tags)
|
||||||
COMMIT ?= $(shell git rev-parse --short=8 HEAD)
|
COMMIT ?= $(shell git rev-parse --short=8 HEAD)
|
||||||
|
@ -23,14 +23,42 @@ ${BINARY}: $(SOURCES) .bindata .jsdep .godep
|
||||||
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
|
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
|
||||||
|
|
||||||
define CHRONOGIRAFFE
|
define CHRONOGIRAFFE
|
||||||
._ o o
|
tLf iCf.
|
||||||
\_`-)|_
|
.CCC. tCC:
|
||||||
,"" _\_
|
CGG; CGG:
|
||||||
," ## | 0 0.
|
tG0Gt: GGGGGGGGGGGGGGGG1 .,:,
|
||||||
," ## ,-\__ `.
|
LG1,,:1CC: .GGL;iLC1iii1LCi;GG1 .1GCL1iGG1
|
||||||
," / `--._;) - "HAI, I'm Chronogiraffe. Let's be friends!"
|
LG1:::;i1CGGt;;;;;;L0t;;;;;;GGGC1;;::,iGC
|
||||||
," ## /
|
,ii:. 1GG1iiii;;tfiC;;;;;;;GGCfCGCGGC,
|
||||||
," ## /
|
fGCiiiiGi1Lt;;iCLL,i;;;CGt
|
||||||
|
fGG11iiii1C1iiiiiGt1;;;;;CGf
|
||||||
|
.GGLLL1i1CitfiiL1iCi;;iLCGGt
|
||||||
|
.CGL11LGCCCCCCCLLCGG1;1GG;
|
||||||
|
CGL1tf1111iiiiiiL1ifGG,
|
||||||
|
LGCff1fCt1tCfiiCiCGC
|
||||||
|
LGGf111111111iCGGt
|
||||||
|
fGGGGGGGGGGGGGGi
|
||||||
|
ifii111111itL
|
||||||
|
;f1i11111iitf
|
||||||
|
;f1iiiiiii1tf
|
||||||
|
:fi111iii11tf
|
||||||
|
:fi111ii1i1tf
|
||||||
|
:f111111ii1tt
|
||||||
|
,L111111ii1tt
|
||||||
|
.Li1111i1111CCCCCCCCCCCCCCLt;
|
||||||
|
L111ii11111ittttt1tttttittti1fC;
|
||||||
|
f1111ii111i1ttttt1;iii1ittt1ttttCt.
|
||||||
|
tt11ii111tti1ttt1tt1;11;;;;iitttifCCCL,
|
||||||
|
11i1i11ttttti;1t1;;;ttt1;;ii;itti;L,;CCL
|
||||||
|
;f;;;;1tttti;;ttti;;;;;;;;;;;1tt1ifi .CCi
|
||||||
|
,L;itti;;;it;;;;;tt1;;;t1;;;;;;ii;t; :CC,
|
||||||
|
L;;;;iti;;;;;;;;;;;;;;;;;;;;;;;i;L, ;CC.
|
||||||
|
ti;;;iLLfffi;;;;;ittt11i;;;;;;;;;L tCCfff;
|
||||||
|
it;;;;;;L,ti;;;;;1Ltttft1t;;;;;;1t ;CCCL;
|
||||||
|
:f;;;;;;L.ti;;;;;tftttf1,f;;;;;;f: ;CC1:
|
||||||
|
.L;;;;;;L.t1;;;;;tt111fi,f;;;;;;L.
|
||||||
|
1Li;;iL1 :Ci;;;tL1i1fC, Lt;;;;Li
|
||||||
|
.;tt; ifLt:;fLf; ;LCCt,
|
||||||
endef
|
endef
|
||||||
export CHRONOGIRAFFE
|
export CHRONOGIRAFFE
|
||||||
chronogiraffe: ${BINARY}
|
chronogiraffe: ${BINARY}
|
||||||
|
@ -106,6 +134,9 @@ run: ${BINARY}
|
||||||
run-dev: chronogiraffe
|
run-dev: chronogiraffe
|
||||||
./chronograf -d --log-level=debug
|
./chronograf -d --log-level=debug
|
||||||
|
|
||||||
|
run-hmr:
|
||||||
|
cd ui && npm run start:hmr
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
if [ -f ${BINARY} ] ; then rm ${BINARY} ; fi
|
if [ -f ${BINARY} ] ; then rm ${BINARY} ; fi
|
||||||
cd ui && yarn run clean
|
cd ui && yarn run clean
|
||||||
|
|
|
@ -566,14 +566,15 @@ type KapacitorProperty struct {
|
||||||
|
|
||||||
// Server represents a proxy connection to an HTTP server
|
// Server represents a proxy connection to an HTTP server
|
||||||
type Server struct {
|
type Server struct {
|
||||||
ID int // ID is the unique ID of the server
|
ID int // ID is the unique ID of the server
|
||||||
SrcID int // SrcID of the data source
|
SrcID int // SrcID of the data source
|
||||||
Name string // Name is the user-defined name for the server
|
Name string // Name is the user-defined name for the server
|
||||||
Username string // Username is the username to connect to the server
|
Username string // Username is the username to connect to the server
|
||||||
Password string // Password is in CLEARTEXT
|
Password string // Password is in CLEARTEXT
|
||||||
URL string // URL are the connections to the server
|
URL string // URL are the connections to the server
|
||||||
Active bool // Is this the active server for the source?
|
InsecureSkipVerify bool // InsecureSkipVerify as true means any certificate presented by the server is accepted.
|
||||||
Organization string // Organization is the organization ID that resource belongs to
|
Active bool // Is this the active server for the source?
|
||||||
|
Organization string // Organization is the organization ID that resource belongs to
|
||||||
}
|
}
|
||||||
|
|
||||||
// ServersStore stores connection information for a `Server`
|
// ServersStore stores connection information for a `Server`
|
||||||
|
|
|
@ -3,7 +3,7 @@ machine:
|
||||||
services:
|
services:
|
||||||
- docker
|
- docker
|
||||||
environment:
|
environment:
|
||||||
DOCKER_TAG: chronograf-20170516
|
DOCKER_TAG: chronograf-20171027
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
override:
|
override:
|
||||||
|
|
|
@ -18,7 +18,7 @@ RUN pip install boto requests python-jose --upgrade
|
||||||
RUN gem install fpm
|
RUN gem install fpm
|
||||||
|
|
||||||
# Install node
|
# Install node
|
||||||
ENV NODE_VERSION v6.10.3
|
ENV NODE_VERSION v6.11.5
|
||||||
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-${NODE_VERSION}-linux-x64.tar.gz; \
|
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-${NODE_VERSION}-linux-x64.tar.gz; \
|
||||||
tar -xvf node-${NODE_VERSION}-linux-x64.tar.gz -C / --strip-components=1; \
|
tar -xvf node-${NODE_VERSION}-linux-x64.tar.gz -C / --strip-components=1; \
|
||||||
rm -f node-${NODE_VERSION}-linux-x64.tar.gz
|
rm -f node-${NODE_VERSION}-linux-x64.tar.gz
|
||||||
|
@ -35,7 +35,7 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
|
||||||
|
|
||||||
# Install go
|
# Install go
|
||||||
ENV GOPATH /root/go
|
ENV GOPATH /root/go
|
||||||
ENV GO_VERSION 1.8.1
|
ENV GO_VERSION 1.9.2
|
||||||
ENV GO_ARCH amd64
|
ENV GO_ARCH amd64
|
||||||
RUN wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
|
RUN wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
|
||||||
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
|
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
|
||||||
|
|
|
@ -674,7 +674,7 @@ def package(build_output, pkg_name, version, nightly=False, iteration=1, static=
|
||||||
package_build_root,
|
package_build_root,
|
||||||
current_location)
|
current_location)
|
||||||
if package_type == "rpm":
|
if package_type == "rpm":
|
||||||
fpm_command += "--depends coreutils"
|
fpm_command += "--depends coreutils --depends shadow-utils"
|
||||||
# TODO: Check for changelog
|
# TODO: Check for changelog
|
||||||
# elif package_type == "deb":
|
# elif package_type == "deb":
|
||||||
# fpm_command += "--deb-changelog {} ".format(os.path.join(os.getcwd(), "CHANGELOG.md"))
|
# fpm_command += "--deb-changelog {} ".format(os.path.join(os.getcwd(), "CHANGELOG.md"))
|
||||||
|
|
|
@ -9,7 +9,7 @@ After=network-online.target
|
||||||
User=chronograf
|
User=chronograf
|
||||||
Group=chronograf
|
Group=chronograf
|
||||||
EnvironmentFile=-/etc/default/chronograf
|
EnvironmentFile=-/etc/default/chronograf
|
||||||
ExecStart=/usr/bin/chronograf --host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned
|
ExecStart=/usr/bin/chronograf --host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned $CHRONOGRAF_OPTS
|
||||||
KillMode=control-group
|
KillMode=control-group
|
||||||
Restart=on-failure
|
Restart=on-failure
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,8 @@
|
||||||
# Script to execute when starting
|
# Script to execute when starting
|
||||||
SCRIPT="/usr/bin/chronograf"
|
SCRIPT="/usr/bin/chronograf"
|
||||||
# Options to pass to the script on startup
|
# Options to pass to the script on startup
|
||||||
SCRIPT_OPTS="--host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned"
|
. /etc/default/chronograf
|
||||||
|
SCRIPT_OPTS="--host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned ${CHRONOGRAF_OPTS}"
|
||||||
|
|
||||||
# User to run the process under
|
# User to run the process under
|
||||||
RUNAS=chronograf
|
RUNAS=chronograf
|
||||||
|
|
|
@ -5,4 +5,5 @@
|
||||||
dateext
|
dateext
|
||||||
copytruncate
|
copytruncate
|
||||||
compress
|
compress
|
||||||
|
notifempty
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,12 +19,13 @@ const (
|
||||||
|
|
||||||
// Client communicates to kapacitor
|
// Client communicates to kapacitor
|
||||||
type Client struct {
|
type Client struct {
|
||||||
URL string
|
URL string
|
||||||
Username string
|
Username string
|
||||||
Password string
|
Password string
|
||||||
ID chronograf.ID
|
InsecureSkipVerify bool
|
||||||
Ticker chronograf.Ticker
|
ID chronograf.ID
|
||||||
kapaClient func(url, username, password string) (KapaClient, error)
|
Ticker chronograf.Ticker
|
||||||
|
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// KapaClient represents a connection to a kapacitor instance
|
// KapaClient represents a connection to a kapacitor instance
|
||||||
|
@ -37,14 +38,15 @@ type KapaClient interface {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewClient creates a client that interfaces with Kapacitor tasks
|
// NewClient creates a client that interfaces with Kapacitor tasks
|
||||||
func NewClient(url, username, password string) *Client {
|
func NewClient(url, username, password string, insecureSkipVerify bool) *Client {
|
||||||
return &Client{
|
return &Client{
|
||||||
URL: url,
|
URL: url,
|
||||||
Username: username,
|
Username: username,
|
||||||
Password: password,
|
Password: password,
|
||||||
ID: &uuid.V4{},
|
InsecureSkipVerify: insecureSkipVerify,
|
||||||
Ticker: &Alert{},
|
ID: &uuid.V4{},
|
||||||
kapaClient: NewKapaClient,
|
Ticker: &Alert{},
|
||||||
|
kapaClient: NewKapaClient,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +123,7 @@ func (c *Client) Create(ctx context.Context, rule chronograf.AlertRule) (*Task,
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -189,7 +191,7 @@ func (c *Client) createFromQueryConfig(rule chronograf.AlertRule) (*client.Creat
|
||||||
|
|
||||||
// Delete removes tickscript task from kapacitor
|
// Delete removes tickscript task from kapacitor
|
||||||
func (c *Client) Delete(ctx context.Context, href string) error {
|
func (c *Client) Delete(ctx context.Context, href string) error {
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -197,7 +199,7 @@ func (c *Client) Delete(ctx context.Context, href string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) updateStatus(ctx context.Context, href string, status client.TaskStatus) (*Task, error) {
|
func (c *Client) updateStatus(ctx context.Context, href string, status client.TaskStatus) (*Task, error) {
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -235,7 +237,7 @@ func (c *Client) Status(ctx context.Context, href string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) status(ctx context.Context, href string) (client.TaskStatus, error) {
|
func (c *Client) status(ctx context.Context, href string) (client.TaskStatus, error) {
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
@ -249,7 +251,7 @@ func (c *Client) status(ctx context.Context, href string) (client.TaskStatus, er
|
||||||
|
|
||||||
// All returns all tasks in kapacitor
|
// All returns all tasks in kapacitor
|
||||||
func (c *Client) All(ctx context.Context) (map[string]*Task, error) {
|
func (c *Client) All(ctx context.Context) (map[string]*Task, error) {
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -286,7 +288,7 @@ func (c *Client) Reverse(id string, script chronograf.TICKScript) chronograf.Ale
|
||||||
|
|
||||||
// Get returns a single alert in kapacitor
|
// Get returns a single alert in kapacitor
|
||||||
func (c *Client) Get(ctx context.Context, id string) (*Task, error) {
|
func (c *Client) Get(ctx context.Context, id string) (*Task, error) {
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -301,7 +303,7 @@ func (c *Client) Get(ctx context.Context, id string) (*Task, error) {
|
||||||
|
|
||||||
// Update changes the tickscript of a given id.
|
// Update changes the tickscript of a given id.
|
||||||
func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertRule) (*Task, error) {
|
func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertRule) (*Task, error) {
|
||||||
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
|
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -317,6 +319,9 @@ func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertR
|
||||||
} else {
|
} else {
|
||||||
opt, err = c.updateFromTick(rule)
|
opt, err = c.updateFromTick(rule)
|
||||||
}
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
task, err := kapa.UpdateTask(client.Link{Href: href}, *opt)
|
task, err := kapa.UpdateTask(client.Link{Href: href}, *opt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -386,7 +391,7 @@ func toTask(q *chronograf.QueryConfig) client.TaskType {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewKapaClient creates a Kapacitor client connection
|
// NewKapaClient creates a Kapacitor client connection
|
||||||
func NewKapaClient(url, username, password string) (KapaClient, error) {
|
func NewKapaClient(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
var creds *client.Credentials
|
var creds *client.Credentials
|
||||||
if username != "" {
|
if username != "" {
|
||||||
creds = &client.Credentials{
|
creds = &client.Credentials{
|
||||||
|
@ -397,8 +402,9 @@ func NewKapaClient(url, username, password string) (KapaClient, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
clnt, err := client.New(client.Config{
|
clnt, err := client.New(client.Config{
|
||||||
URL: url,
|
URL: url,
|
||||||
Credentials: creds,
|
Credentials: creds,
|
||||||
|
InsecureSkipVerify: insecureSkipVerify,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/google/go-cmp/cmp/cmpopts"
|
||||||
"github.com/influxdata/chronograf"
|
"github.com/influxdata/chronograf"
|
||||||
client "github.com/influxdata/kapacitor/client/v1"
|
client "github.com/influxdata/kapacitor/client/v1"
|
||||||
)
|
)
|
||||||
|
@ -75,7 +76,7 @@ func TestClient_All(t *testing.T) {
|
||||||
Password string
|
Password string
|
||||||
ID chronograf.ID
|
ID chronograf.ID
|
||||||
Ticker chronograf.Ticker
|
Ticker chronograf.Ticker
|
||||||
kapaClient func(url, username, password string) (KapaClient, error)
|
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
|
||||||
}
|
}
|
||||||
type args struct {
|
type args struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
|
@ -100,7 +101,7 @@ func TestClient_All(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "return no tasks",
|
name: "return no tasks",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -110,7 +111,7 @@ func TestClient_All(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "return a non-reversible task",
|
name: "return a non-reversible task",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -141,7 +142,7 @@ func TestClient_All(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "return a reversible task",
|
name: "return a reversible task",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -380,7 +381,7 @@ func TestClient_Get(t *testing.T) {
|
||||||
Password string
|
Password string
|
||||||
ID chronograf.ID
|
ID chronograf.ID
|
||||||
Ticker chronograf.Ticker
|
Ticker chronograf.Ticker
|
||||||
kapaClient func(url, username, password string) (KapaClient, error)
|
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
|
||||||
}
|
}
|
||||||
type args struct {
|
type args struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
|
@ -406,7 +407,7 @@ func TestClient_Get(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "return no task",
|
name: "return no task",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -423,7 +424,7 @@ func TestClient_Get(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "return non-reversible task",
|
name: "return non-reversible task",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -465,7 +466,7 @@ func TestClient_Get(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "return reversible task",
|
name: "return reversible task",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -706,7 +707,7 @@ func TestClient_updateStatus(t *testing.T) {
|
||||||
Password string
|
Password string
|
||||||
ID chronograf.ID
|
ID chronograf.ID
|
||||||
Ticker chronograf.Ticker
|
Ticker chronograf.Ticker
|
||||||
kapaClient func(url, username, password string) (KapaClient, error)
|
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
|
||||||
}
|
}
|
||||||
type args struct {
|
type args struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
|
@ -727,7 +728,7 @@ func TestClient_updateStatus(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "disable alert rule",
|
name: "disable alert rule",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -777,7 +778,7 @@ func TestClient_updateStatus(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "fail to enable alert rule",
|
name: "fail to enable alert rule",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -797,7 +798,7 @@ func TestClient_updateStatus(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "enable alert rule",
|
name: "enable alert rule",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -880,7 +881,7 @@ func TestClient_Update(t *testing.T) {
|
||||||
Password string
|
Password string
|
||||||
ID chronograf.ID
|
ID chronograf.ID
|
||||||
Ticker chronograf.Ticker
|
Ticker chronograf.Ticker
|
||||||
kapaClient func(url, username, password string) (KapaClient, error)
|
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
|
||||||
}
|
}
|
||||||
type args struct {
|
type args struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
|
@ -902,7 +903,7 @@ func TestClient_Update(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "update alert rule error",
|
name: "update alert rule error",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -936,7 +937,7 @@ func TestClient_Update(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "update alert rule",
|
name: "update alert rule",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -945,10 +946,22 @@ func TestClient_Update(t *testing.T) {
|
||||||
ctx: context.Background(),
|
ctx: context.Background(),
|
||||||
href: "/kapacitor/v1/tasks/howdy",
|
href: "/kapacitor/v1/tasks/howdy",
|
||||||
rule: chronograf.AlertRule{
|
rule: chronograf.AlertRule{
|
||||||
ID: "howdy",
|
ID: "howdy",
|
||||||
|
Name: "myname",
|
||||||
Query: &chronograf.QueryConfig{
|
Query: &chronograf.QueryConfig{
|
||||||
Database: "db",
|
Database: "db",
|
||||||
RetentionPolicy: "rp",
|
RetentionPolicy: "rp",
|
||||||
|
Measurement: "meas",
|
||||||
|
Fields: []chronograf.Field{
|
||||||
|
{
|
||||||
|
Type: "field",
|
||||||
|
Value: "usage_user",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Trigger: "threshold",
|
||||||
|
TriggerValues: chronograf.TriggerValues{
|
||||||
|
Operator: greaterThan,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -1000,7 +1013,7 @@ func TestClient_Update(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "stays disabled when already disabled",
|
name: "stays disabled when already disabled",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -1009,10 +1022,22 @@ func TestClient_Update(t *testing.T) {
|
||||||
ctx: context.Background(),
|
ctx: context.Background(),
|
||||||
href: "/kapacitor/v1/tasks/howdy",
|
href: "/kapacitor/v1/tasks/howdy",
|
||||||
rule: chronograf.AlertRule{
|
rule: chronograf.AlertRule{
|
||||||
ID: "howdy",
|
ID: "howdy",
|
||||||
|
Name: "myname",
|
||||||
Query: &chronograf.QueryConfig{
|
Query: &chronograf.QueryConfig{
|
||||||
Database: "db",
|
Database: "db",
|
||||||
RetentionPolicy: "rp",
|
RetentionPolicy: "rp",
|
||||||
|
Measurement: "meas",
|
||||||
|
Fields: []chronograf.Field{
|
||||||
|
{
|
||||||
|
Type: "field",
|
||||||
|
Value: "usage_user",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Trigger: "threshold",
|
||||||
|
TriggerValues: chronograf.TriggerValues{
|
||||||
|
Operator: greaterThan,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -1061,6 +1086,135 @@ func TestClient_Update(t *testing.T) {
|
||||||
},
|
},
|
||||||
wantStatus: client.Disabled,
|
wantStatus: client.Disabled,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "error because relative cannot have inside range",
|
||||||
|
wantErr: true,
|
||||||
|
fields: fields{
|
||||||
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
|
return kapa, nil
|
||||||
|
},
|
||||||
|
Ticker: &Alert{},
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
href: "/kapacitor/v1/tasks/error",
|
||||||
|
rule: chronograf.AlertRule{
|
||||||
|
ID: "error",
|
||||||
|
Query: &chronograf.QueryConfig{
|
||||||
|
Database: "db",
|
||||||
|
RetentionPolicy: "rp",
|
||||||
|
Fields: []chronograf.Field{
|
||||||
|
{
|
||||||
|
Value: "usage_user",
|
||||||
|
Type: "field",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Trigger: Relative,
|
||||||
|
TriggerValues: chronograf.TriggerValues{
|
||||||
|
Operator: InsideRange,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error because rule has an unknown trigger mechanism",
|
||||||
|
wantErr: true,
|
||||||
|
fields: fields{
|
||||||
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
|
return kapa, nil
|
||||||
|
},
|
||||||
|
Ticker: &Alert{},
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
href: "/kapacitor/v1/tasks/error",
|
||||||
|
rule: chronograf.AlertRule{
|
||||||
|
ID: "error",
|
||||||
|
Query: &chronograf.QueryConfig{
|
||||||
|
Database: "db",
|
||||||
|
RetentionPolicy: "rp",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error because query has no fields",
|
||||||
|
wantErr: true,
|
||||||
|
fields: fields{
|
||||||
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
|
return kapa, nil
|
||||||
|
},
|
||||||
|
Ticker: &Alert{},
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
href: "/kapacitor/v1/tasks/error",
|
||||||
|
rule: chronograf.AlertRule{
|
||||||
|
ID: "error",
|
||||||
|
Trigger: Threshold,
|
||||||
|
TriggerValues: chronograf.TriggerValues{
|
||||||
|
Period: "1d",
|
||||||
|
},
|
||||||
|
Name: "myname",
|
||||||
|
Query: &chronograf.QueryConfig{
|
||||||
|
Database: "db",
|
||||||
|
RetentionPolicy: "rp",
|
||||||
|
Measurement: "meas",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error because alert has no name",
|
||||||
|
wantErr: true,
|
||||||
|
fields: fields{
|
||||||
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
|
return kapa, nil
|
||||||
|
},
|
||||||
|
Ticker: &Alert{},
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
href: "/kapacitor/v1/tasks/error",
|
||||||
|
rule: chronograf.AlertRule{
|
||||||
|
ID: "error",
|
||||||
|
Trigger: Deadman,
|
||||||
|
TriggerValues: chronograf.TriggerValues{
|
||||||
|
Period: "1d",
|
||||||
|
},
|
||||||
|
Query: &chronograf.QueryConfig{
|
||||||
|
Database: "db",
|
||||||
|
RetentionPolicy: "rp",
|
||||||
|
Measurement: "meas",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error because alert period cannot be an empty string in deadman alert",
|
||||||
|
wantErr: true,
|
||||||
|
fields: fields{
|
||||||
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
|
return kapa, nil
|
||||||
|
},
|
||||||
|
Ticker: &Alert{},
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
ctx: context.Background(),
|
||||||
|
href: "/kapacitor/v1/tasks/error",
|
||||||
|
rule: chronograf.AlertRule{
|
||||||
|
ID: "error",
|
||||||
|
Name: "myname",
|
||||||
|
Trigger: Deadman,
|
||||||
|
Query: &chronograf.QueryConfig{
|
||||||
|
Database: "db",
|
||||||
|
RetentionPolicy: "rp",
|
||||||
|
Measurement: "meas",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
kapa.ResTask = tt.resTask
|
kapa.ResTask = tt.resTask
|
||||||
|
@ -1079,11 +1233,17 @@ func TestClient_Update(t *testing.T) {
|
||||||
t.Errorf("Client.Update() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("Client.Update() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if tt.wantErr {
|
||||||
|
return
|
||||||
|
}
|
||||||
if !cmp.Equal(got, tt.want) {
|
if !cmp.Equal(got, tt.want) {
|
||||||
t.Errorf("%q. Client.Update() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
|
t.Errorf("%q. Client.Update() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(kapa.UpdateTaskOptions, tt.updateTaskOptions) {
|
var cmpOptions = cmp.Options{
|
||||||
t.Errorf("Client.Update() = %v, want %v", kapa.UpdateTaskOptions, tt.updateTaskOptions)
|
cmpopts.IgnoreFields(client.UpdateTaskOptions{}, "TICKscript"),
|
||||||
|
}
|
||||||
|
if !cmp.Equal(kapa.UpdateTaskOptions, tt.updateTaskOptions, cmpOptions...) {
|
||||||
|
t.Errorf("Client.Update() = %s", cmp.Diff(got, tt.updateTaskOptions, cmpOptions...))
|
||||||
}
|
}
|
||||||
if tt.wantStatus != kapa.LastStatus {
|
if tt.wantStatus != kapa.LastStatus {
|
||||||
t.Errorf("Client.Update() = %v, want %v", kapa.LastStatus, tt.wantStatus)
|
t.Errorf("Client.Update() = %v, want %v", kapa.LastStatus, tt.wantStatus)
|
||||||
|
@ -1099,7 +1259,7 @@ func TestClient_Create(t *testing.T) {
|
||||||
Password string
|
Password string
|
||||||
ID chronograf.ID
|
ID chronograf.ID
|
||||||
Ticker chronograf.Ticker
|
Ticker chronograf.Ticker
|
||||||
kapaClient func(url, username, password string) (KapaClient, error)
|
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
|
||||||
}
|
}
|
||||||
type args struct {
|
type args struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
|
@ -1119,7 +1279,7 @@ func TestClient_Create(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "create alert rule",
|
name: "create alert rule",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -1130,10 +1290,16 @@ func TestClient_Create(t *testing.T) {
|
||||||
args: args{
|
args: args{
|
||||||
ctx: context.Background(),
|
ctx: context.Background(),
|
||||||
rule: chronograf.AlertRule{
|
rule: chronograf.AlertRule{
|
||||||
ID: "howdy",
|
ID: "howdy",
|
||||||
|
Name: "myname's",
|
||||||
Query: &chronograf.QueryConfig{
|
Query: &chronograf.QueryConfig{
|
||||||
Database: "db",
|
Database: "db",
|
||||||
RetentionPolicy: "rp",
|
RetentionPolicy: "rp",
|
||||||
|
Measurement: "meas",
|
||||||
|
},
|
||||||
|
Trigger: Deadman,
|
||||||
|
TriggerValues: chronograf.TriggerValues{
|
||||||
|
Period: "1d",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -1152,10 +1318,79 @@ func TestClient_Create(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
createTaskOptions: &client.CreateTaskOptions{
|
createTaskOptions: &client.CreateTaskOptions{
|
||||||
TICKscript: "",
|
TICKscript: `var db = 'db'
|
||||||
ID: "chronograf-v1-howdy",
|
|
||||||
Type: client.StreamTask,
|
var rp = 'rp'
|
||||||
Status: client.Enabled,
|
|
||||||
|
var measurement = 'meas'
|
||||||
|
|
||||||
|
var groupBy = []
|
||||||
|
|
||||||
|
var whereFilter = lambda: TRUE
|
||||||
|
|
||||||
|
var period = 1d
|
||||||
|
|
||||||
|
var name = 'myname\'s'
|
||||||
|
|
||||||
|
var idVar = name + ':{{.Group}}'
|
||||||
|
|
||||||
|
var message = ''
|
||||||
|
|
||||||
|
var idTag = 'alertID'
|
||||||
|
|
||||||
|
var levelTag = 'level'
|
||||||
|
|
||||||
|
var messageField = 'message'
|
||||||
|
|
||||||
|
var durationField = 'duration'
|
||||||
|
|
||||||
|
var outputDB = 'chronograf'
|
||||||
|
|
||||||
|
var outputRP = 'autogen'
|
||||||
|
|
||||||
|
var outputMeasurement = 'alerts'
|
||||||
|
|
||||||
|
var triggerType = 'deadman'
|
||||||
|
|
||||||
|
var threshold = 0.0
|
||||||
|
|
||||||
|
var data = stream
|
||||||
|
|from()
|
||||||
|
.database(db)
|
||||||
|
.retentionPolicy(rp)
|
||||||
|
.measurement(measurement)
|
||||||
|
.groupBy(groupBy)
|
||||||
|
.where(whereFilter)
|
||||||
|
|
||||||
|
var trigger = data
|
||||||
|
|deadman(threshold, period)
|
||||||
|
.stateChangesOnly()
|
||||||
|
.message(message)
|
||||||
|
.id(idVar)
|
||||||
|
.idTag(idTag)
|
||||||
|
.levelTag(levelTag)
|
||||||
|
.messageField(messageField)
|
||||||
|
.durationField(durationField)
|
||||||
|
|
||||||
|
trigger
|
||||||
|
|eval(lambda: "emitted")
|
||||||
|
.as('value')
|
||||||
|
.keep('value', messageField, durationField)
|
||||||
|
|influxDBOut()
|
||||||
|
.create()
|
||||||
|
.database(outputDB)
|
||||||
|
.retentionPolicy(outputRP)
|
||||||
|
.measurement(outputMeasurement)
|
||||||
|
.tag('alertName', name)
|
||||||
|
.tag('triggerType', triggerType)
|
||||||
|
|
||||||
|
trigger
|
||||||
|
|httpOut('output')
|
||||||
|
`,
|
||||||
|
|
||||||
|
ID: "chronograf-v1-howdy",
|
||||||
|
Type: client.StreamTask,
|
||||||
|
Status: client.Enabled,
|
||||||
DBRPs: []client.DBRP{
|
DBRPs: []client.DBRP{
|
||||||
{
|
{
|
||||||
Database: "db",
|
Database: "db",
|
||||||
|
@ -1185,7 +1420,7 @@ func TestClient_Create(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "create alert rule error",
|
name: "create alert rule error",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
kapaClient: func(url, username, password string) (KapaClient, error) {
|
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||||
return kapa, nil
|
return kapa, nil
|
||||||
},
|
},
|
||||||
Ticker: &Alert{},
|
Ticker: &Alert{},
|
||||||
|
@ -1205,10 +1440,9 @@ func TestClient_Create(t *testing.T) {
|
||||||
},
|
},
|
||||||
resError: fmt.Errorf("error"),
|
resError: fmt.Errorf("error"),
|
||||||
createTaskOptions: &client.CreateTaskOptions{
|
createTaskOptions: &client.CreateTaskOptions{
|
||||||
TICKscript: "",
|
ID: "chronograf-v1-howdy",
|
||||||
ID: "chronograf-v1-howdy",
|
Type: client.StreamTask,
|
||||||
Type: client.StreamTask,
|
Status: client.Enabled,
|
||||||
Status: client.Enabled,
|
|
||||||
DBRPs: []client.DBRP{
|
DBRPs: []client.DBRP{
|
||||||
{
|
{
|
||||||
Database: "db",
|
Database: "db",
|
||||||
|
@ -1236,6 +1470,9 @@ func TestClient_Create(t *testing.T) {
|
||||||
t.Errorf("Client.Create() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("Client.Create() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if tt.wantErr {
|
||||||
|
return
|
||||||
|
}
|
||||||
if !cmp.Equal(got, tt.want) {
|
if !cmp.Equal(got, tt.want) {
|
||||||
t.Errorf("%q. Client.Create() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
|
t.Errorf("%q. Client.Create() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package kapacitor
|
package kapacitor
|
||||||
|
|
||||||
import "fmt"
|
import (
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
greaterThan = "greater than"
|
greaterThan = "greater than"
|
||||||
|
|
|
@ -15,11 +15,11 @@ type Alert struct{}
|
||||||
func (a *Alert) Generate(rule chronograf.AlertRule) (chronograf.TICKScript, error) {
|
func (a *Alert) Generate(rule chronograf.AlertRule) (chronograf.TICKScript, error) {
|
||||||
vars, err := Vars(rule)
|
vars, err := Vars(rule)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil
|
return "", err
|
||||||
}
|
}
|
||||||
data, err := Data(rule)
|
data, err := Data(rule)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil
|
return "", err
|
||||||
}
|
}
|
||||||
trigger, err := Trigger(rule)
|
trigger, err := Trigger(rule)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
package kapacitor
|
package kapacitor
|
||||||
|
|
||||||
import "github.com/influxdata/chronograf"
|
import (
|
||||||
import "fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// Deadman triggers when data is missing for a period of time
|
// Deadman triggers when data is missing for a period of time
|
||||||
|
|
|
@ -76,7 +76,41 @@ func Vars(rule chronograf.AlertRule) (string, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type NotEmpty struct {
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *NotEmpty) Valid(name, s string) error {
|
||||||
|
if n.Err != nil {
|
||||||
|
return n.Err
|
||||||
|
|
||||||
|
}
|
||||||
|
if s == "" {
|
||||||
|
n.Err = fmt.Errorf("%s cannot be an empty string", name)
|
||||||
|
}
|
||||||
|
return n.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
func Escape(str string) string {
|
||||||
|
return strings.Replace(str, "'", `\'`, -1)
|
||||||
|
}
|
||||||
|
|
||||||
func commonVars(rule chronograf.AlertRule) (string, error) {
|
func commonVars(rule chronograf.AlertRule) (string, error) {
|
||||||
|
n := new(NotEmpty)
|
||||||
|
n.Valid("database", rule.Query.Database)
|
||||||
|
n.Valid("retention policy", rule.Query.RetentionPolicy)
|
||||||
|
n.Valid("measurement", rule.Query.Measurement)
|
||||||
|
n.Valid("alert name", rule.Name)
|
||||||
|
n.Valid("trigger type", rule.Trigger)
|
||||||
|
if n.Err != nil {
|
||||||
|
return "", n.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
wind, err := window(rule)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
common := `
|
common := `
|
||||||
var db = '%s'
|
var db = '%s'
|
||||||
var rp = '%s'
|
var rp = '%s'
|
||||||
|
@ -99,14 +133,14 @@ func commonVars(rule chronograf.AlertRule) (string, error) {
|
||||||
var triggerType = '%s'
|
var triggerType = '%s'
|
||||||
`
|
`
|
||||||
res := fmt.Sprintf(common,
|
res := fmt.Sprintf(common,
|
||||||
rule.Query.Database,
|
Escape(rule.Query.Database),
|
||||||
rule.Query.RetentionPolicy,
|
Escape(rule.Query.RetentionPolicy),
|
||||||
rule.Query.Measurement,
|
Escape(rule.Query.Measurement),
|
||||||
groupBy(rule.Query),
|
groupBy(rule.Query),
|
||||||
whereFilter(rule.Query),
|
whereFilter(rule.Query),
|
||||||
window(rule),
|
wind,
|
||||||
rule.Name,
|
Escape(rule.Name),
|
||||||
rule.Message,
|
Escape(rule.Message),
|
||||||
IDTag,
|
IDTag,
|
||||||
LevelTag,
|
LevelTag,
|
||||||
MessageField,
|
MessageField,
|
||||||
|
@ -127,17 +161,27 @@ func commonVars(rule chronograf.AlertRule) (string, error) {
|
||||||
|
|
||||||
// window is only used if deadman or threshold/relative with aggregate. Will return empty
|
// window is only used if deadman or threshold/relative with aggregate. Will return empty
|
||||||
// if no period.
|
// if no period.
|
||||||
func window(rule chronograf.AlertRule) string {
|
func window(rule chronograf.AlertRule) (string, error) {
|
||||||
if rule.Trigger == Deadman {
|
if rule.Trigger == Deadman {
|
||||||
return fmt.Sprintf("var period = %s", rule.TriggerValues.Period)
|
if rule.TriggerValues.Period == "" {
|
||||||
|
return "", fmt.Errorf("period cannot be an empty string in deadman alert")
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("var period = %s", rule.TriggerValues.Period), nil
|
||||||
|
|
||||||
}
|
}
|
||||||
// Period only makes sense if the field has a been grouped via a time duration.
|
// Period only makes sense if the field has a been grouped via a time duration.
|
||||||
for _, field := range rule.Query.Fields {
|
for _, field := range rule.Query.Fields {
|
||||||
if field.Type == "func" {
|
if field.Type == "func" {
|
||||||
return fmt.Sprintf("var period = %s\nvar every = %s", rule.Query.GroupBy.Time, rule.Every)
|
n := new(NotEmpty)
|
||||||
|
n.Valid("group by time", rule.Query.GroupBy.Time)
|
||||||
|
n.Valid("every", rule.Every)
|
||||||
|
if n.Err != nil {
|
||||||
|
return "", n.Err
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("var period = %s\nvar every = %s", rule.Query.GroupBy.Time, rule.Every), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ""
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func groupBy(q *chronograf.QueryConfig) string {
|
func groupBy(q *chronograf.QueryConfig) string {
|
||||||
|
|
|
@ -12,12 +12,13 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type postKapacitorRequest struct {
|
type postKapacitorRequest struct {
|
||||||
Name *string `json:"name"` // User facing name of kapacitor instance.; Required: true
|
Name *string `json:"name"` // User facing name of kapacitor instance.; Required: true
|
||||||
URL *string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092);/ Required: true
|
URL *string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092);/ Required: true
|
||||||
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
|
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
|
||||||
Password string `json:"password,omitempty"`
|
Password string `json:"password,omitempty"`
|
||||||
Active bool `json:"active"`
|
InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` // InsecureSkipVerify as true means any certificate presented by the kapacitor is accepted.
|
||||||
Organization string `json:"organization"` // Organization is the organization ID that resource belongs to
|
Active bool `json:"active"`
|
||||||
|
Organization string `json:"organization"` // Organization is the organization ID that resource belongs to
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *postKapacitorRequest) Valid(defaultOrgID string) error {
|
func (p *postKapacitorRequest) Valid(defaultOrgID string) error {
|
||||||
|
@ -49,13 +50,14 @@ type kapaLinks struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type kapacitor struct {
|
type kapacitor struct {
|
||||||
ID int `json:"id,string"` // Unique identifier representing a kapacitor instance.
|
ID int `json:"id,string"` // Unique identifier representing a kapacitor instance.
|
||||||
Name string `json:"name"` // User facing name of kapacitor instance.
|
Name string `json:"name"` // User facing name of kapacitor instance.
|
||||||
URL string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092)
|
URL string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092)
|
||||||
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
|
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
|
||||||
Password string `json:"password,omitempty"`
|
Password string `json:"password,omitempty"`
|
||||||
Active bool `json:"active"`
|
InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` // InsecureSkipVerify as true means any certificate presented by the kapacitor is accepted.
|
||||||
Links kapaLinks `json:"links"` // Links are URI locations related to kapacitor
|
Active bool `json:"active"`
|
||||||
|
Links kapaLinks `json:"links"` // Links are URI locations related to kapacitor
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewKapacitor adds valid kapacitor store store.
|
// NewKapacitor adds valid kapacitor store store.
|
||||||
|
@ -91,13 +93,14 @@ func (s *Service) NewKapacitor(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
srv := chronograf.Server{
|
srv := chronograf.Server{
|
||||||
SrcID: srcID,
|
SrcID: srcID,
|
||||||
Name: *req.Name,
|
Name: *req.Name,
|
||||||
Username: req.Username,
|
Username: req.Username,
|
||||||
Password: req.Password,
|
Password: req.Password,
|
||||||
URL: *req.URL,
|
InsecureSkipVerify: req.InsecureSkipVerify,
|
||||||
Active: req.Active,
|
URL: *req.URL,
|
||||||
Organization: req.Organization,
|
Active: req.Active,
|
||||||
|
Organization: req.Organization,
|
||||||
}
|
}
|
||||||
|
|
||||||
if srv, err = s.Store.Servers(ctx).Add(ctx, srv); err != nil {
|
if srv, err = s.Store.Servers(ctx).Add(ctx, srv); err != nil {
|
||||||
|
@ -114,11 +117,12 @@ func (s *Service) NewKapacitor(w http.ResponseWriter, r *http.Request) {
|
||||||
func newKapacitor(srv chronograf.Server) kapacitor {
|
func newKapacitor(srv chronograf.Server) kapacitor {
|
||||||
httpAPISrcs := "/chronograf/v1/sources"
|
httpAPISrcs := "/chronograf/v1/sources"
|
||||||
return kapacitor{
|
return kapacitor{
|
||||||
ID: srv.ID,
|
ID: srv.ID,
|
||||||
Name: srv.Name,
|
Name: srv.Name,
|
||||||
Username: srv.Username,
|
Username: srv.Username,
|
||||||
URL: srv.URL,
|
URL: srv.URL,
|
||||||
Active: srv.Active,
|
Active: srv.Active,
|
||||||
|
InsecureSkipVerify: srv.InsecureSkipVerify,
|
||||||
Links: kapaLinks{
|
Links: kapaLinks{
|
||||||
Self: fmt.Sprintf("%s/%d/kapacitors/%d", httpAPISrcs, srv.SrcID, srv.ID),
|
Self: fmt.Sprintf("%s/%d/kapacitors/%d", httpAPISrcs, srv.SrcID, srv.ID),
|
||||||
Proxy: fmt.Sprintf("%s/%d/kapacitors/%d/proxy", httpAPISrcs, srv.SrcID, srv.ID),
|
Proxy: fmt.Sprintf("%s/%d/kapacitors/%d/proxy", httpAPISrcs, srv.SrcID, srv.ID),
|
||||||
|
@ -217,11 +221,12 @@ func (s *Service) RemoveKapacitor(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type patchKapacitorRequest struct {
|
type patchKapacitorRequest struct {
|
||||||
Name *string `json:"name,omitempty"` // User facing name of kapacitor instance.
|
Name *string `json:"name,omitempty"` // User facing name of kapacitor instance.
|
||||||
URL *string `json:"url,omitempty"` // URL for the kapacitor
|
URL *string `json:"url,omitempty"` // URL for the kapacitor
|
||||||
Username *string `json:"username,omitempty"` // Username for kapacitor auth
|
Username *string `json:"username,omitempty"` // Username for kapacitor auth
|
||||||
Password *string `json:"password,omitempty"`
|
Password *string `json:"password,omitempty"`
|
||||||
Active *bool `json:"active"`
|
InsecureSkipVerify *bool `json:"insecureSkipVerify,omitempty"` // InsecureSkipVerify as true means any certificate presented by the kapacitor is accepted.
|
||||||
|
Active *bool `json:"active"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *patchKapacitorRequest) Valid() error {
|
func (p *patchKapacitorRequest) Valid() error {
|
||||||
|
@ -281,6 +286,9 @@ func (s *Service) UpdateKapacitor(w http.ResponseWriter, r *http.Request) {
|
||||||
if req.Username != nil {
|
if req.Username != nil {
|
||||||
srv.Username = *req.Username
|
srv.Username = *req.Username
|
||||||
}
|
}
|
||||||
|
if req.InsecureSkipVerify != nil {
|
||||||
|
srv.InsecureSkipVerify = *req.InsecureSkipVerify
|
||||||
|
}
|
||||||
if req.Active != nil {
|
if req.Active != nil {
|
||||||
srv.Active = *req.Active
|
srv.Active = *req.Active
|
||||||
}
|
}
|
||||||
|
@ -316,7 +324,7 @@ func (s *Service) KapacitorRulesPost(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
|
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
|
||||||
|
|
||||||
var req chronograf.AlertRule
|
var req chronograf.AlertRule
|
||||||
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
@ -446,7 +454,7 @@ func (s *Service) KapacitorRulesPut(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
tid := httprouter.GetParamFromContext(ctx, "tid")
|
tid := httprouter.GetParamFromContext(ctx, "tid")
|
||||||
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
|
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
|
||||||
var req chronograf.AlertRule
|
var req chronograf.AlertRule
|
||||||
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
invalidJSON(w, s.Logger)
|
invalidJSON(w, s.Logger)
|
||||||
|
@ -516,7 +524,7 @@ func (s *Service) KapacitorRulesStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
tid := httprouter.GetParamFromContext(ctx, "tid")
|
tid := httprouter.GetParamFromContext(ctx, "tid")
|
||||||
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
|
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
|
||||||
|
|
||||||
var req KapacitorStatus
|
var req KapacitorStatus
|
||||||
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
@ -576,7 +584,7 @@ func (s *Service) KapacitorRulesGet(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
|
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
|
||||||
tasks, err := c.All(ctx)
|
tasks, err := c.All(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Error(w, http.StatusInternalServerError, err.Error(), s.Logger)
|
Error(w, http.StatusInternalServerError, err.Error(), s.Logger)
|
||||||
|
@ -619,7 +627,7 @@ func (s *Service) KapacitorRulesID(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
tid := httprouter.GetParamFromContext(ctx, "tid")
|
tid := httprouter.GetParamFromContext(ctx, "tid")
|
||||||
|
|
||||||
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
|
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
|
||||||
|
|
||||||
// Check if the rule exists within scope
|
// Check if the rule exists within scope
|
||||||
task, err := c.Get(ctx, tid)
|
task, err := c.Get(ctx, tid)
|
||||||
|
@ -657,7 +665,7 @@ func (s *Service) KapacitorRulesDelete(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
|
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
|
||||||
|
|
||||||
tid := httprouter.GetParamFromContext(ctx, "tid")
|
tid := httprouter.GetParamFromContext(ctx, "tid")
|
||||||
// Check if the rule is linked to this server and kapacitor
|
// Check if the rule is linked to this server and kapacitor
|
||||||
|
|
|
@ -187,6 +187,14 @@ func Test_KapacitorRulesGet(t *testing.T) {
|
||||||
testLogger := mocks.TestLogger{}
|
testLogger := mocks.TestLogger{}
|
||||||
svc := &server.Service{
|
svc := &server.Service{
|
||||||
Store: &mocks.Store{
|
Store: &mocks.Store{
|
||||||
|
SourcesStore: &mocks.SourcesStore{
|
||||||
|
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||||
|
return chronograf.Source{
|
||||||
|
ID: ID,
|
||||||
|
InsecureSkipVerify: true,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
},
|
||||||
ServersStore: &mocks.ServersStore{
|
ServersStore: &mocks.ServersStore{
|
||||||
GetF: func(ctx context.Context, ID int) (chronograf.Server, error) {
|
GetF: func(ctx context.Context, ID int) (chronograf.Server, error) {
|
||||||
return chronograf.Server{
|
return chronograf.Server{
|
||||||
|
|
|
@ -7,39 +7,57 @@ import (
|
||||||
"github.com/influxdata/chronograf"
|
"github.com/influxdata/chronograf"
|
||||||
)
|
)
|
||||||
|
|
||||||
type logResponseWriter struct {
|
// statusWriterFlusher captures the status header of an http.ResponseWriter
|
||||||
|
// and is a flusher
|
||||||
|
type statusWriter struct {
|
||||||
http.ResponseWriter
|
http.ResponseWriter
|
||||||
|
Flusher http.Flusher
|
||||||
responseCode int
|
status int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *logResponseWriter) WriteHeader(status int) {
|
func (w *statusWriter) WriteHeader(status int) {
|
||||||
l.responseCode = status
|
w.status = status
|
||||||
l.ResponseWriter.WriteHeader(status)
|
w.ResponseWriter.WriteHeader(status)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *statusWriter) Status() int { return w.status }
|
||||||
|
|
||||||
|
// Flush is here because the underlying HTTP chunked transfer response writer
|
||||||
|
// to implement http.Flusher. Without it data is silently buffered. This
|
||||||
|
// was discovered when proxying kapacitor chunked logs.
|
||||||
|
func (w *statusWriter) Flush() {
|
||||||
|
if w.Flusher != nil {
|
||||||
|
w.Flusher.Flush()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Logger is middleware that logs the request
|
// Logger is middleware that logs the request
|
||||||
func Logger(logger chronograf.Logger, next http.Handler) http.Handler {
|
func Logger(logger chronograf.Logger, next http.Handler) http.Handler {
|
||||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
logger.
|
logger.WithField("component", "server").
|
||||||
WithField("component", "server").
|
|
||||||
WithField("remote_addr", r.RemoteAddr).
|
WithField("remote_addr", r.RemoteAddr).
|
||||||
WithField("method", r.Method).
|
WithField("method", r.Method).
|
||||||
WithField("url", r.URL).
|
WithField("url", r.URL).
|
||||||
Info("Request")
|
Debug("Request")
|
||||||
|
|
||||||
lrr := &logResponseWriter{w, 0}
|
sw := &statusWriter{
|
||||||
next.ServeHTTP(lrr, r)
|
ResponseWriter: w,
|
||||||
|
}
|
||||||
|
if f, ok := w.(http.Flusher); ok {
|
||||||
|
sw.Flusher = f
|
||||||
|
}
|
||||||
|
next.ServeHTTP(sw, r)
|
||||||
later := time.Now()
|
later := time.Now()
|
||||||
elapsed := later.Sub(now)
|
elapsed := later.Sub(now)
|
||||||
|
|
||||||
logger.
|
logger.
|
||||||
WithField("component", "server").
|
WithField("component", "server").
|
||||||
WithField("remote_addr", r.RemoteAddr).
|
WithField("remote_addr", r.RemoteAddr).
|
||||||
|
WithField("method", r.Method).
|
||||||
WithField("response_time", elapsed.String()).
|
WithField("response_time", elapsed.String()).
|
||||||
WithField("code", lrr.responseCode).
|
WithField("status", sw.Status()).
|
||||||
Info("Response: ", http.StatusText(lrr.responseCode))
|
Info("Response: ", http.StatusText(sw.Status()))
|
||||||
}
|
}
|
||||||
return http.HandlerFunc(fn)
|
return http.HandlerFunc(fn)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,8 @@ import (
|
||||||
|
|
||||||
type interceptingResponseWriter struct {
|
type interceptingResponseWriter struct {
|
||||||
http.ResponseWriter
|
http.ResponseWriter
|
||||||
Prefix string
|
Flusher http.Flusher
|
||||||
|
Prefix string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *interceptingResponseWriter) WriteHeader(status int) {
|
func (i *interceptingResponseWriter) WriteHeader(status int) {
|
||||||
|
@ -25,11 +26,26 @@ func (i *interceptingResponseWriter) WriteHeader(status int) {
|
||||||
i.ResponseWriter.WriteHeader(status)
|
i.ResponseWriter.WriteHeader(status)
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrefixingRedirector alters the Location header of downstream http.Handlers
|
// Flush is here because the underlying HTTP chunked transfer response writer
|
||||||
|
// to implement http.Flusher. Without it data is silently buffered. This
|
||||||
|
// was discovered when proxying kapacitor chunked logs.
|
||||||
|
func (i *interceptingResponseWriter) Flush() {
|
||||||
|
if i.Flusher != nil {
|
||||||
|
i.Flusher.Flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrefixedRedirect alters the Location header of downstream http.Handlers
|
||||||
// to include a specified prefix
|
// to include a specified prefix
|
||||||
func PrefixedRedirect(prefix string, next http.Handler) http.Handler {
|
func PrefixedRedirect(prefix string, next http.Handler) http.Handler {
|
||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
iw := &interceptingResponseWriter{w, prefix}
|
iw := &interceptingResponseWriter{
|
||||||
|
ResponseWriter: w,
|
||||||
|
Prefix: prefix,
|
||||||
|
}
|
||||||
|
if flusher, ok := w.(http.Flusher); ok {
|
||||||
|
iw.Flusher = flusher
|
||||||
|
}
|
||||||
next.ServeHTTP(iw, r)
|
next.ServeHTTP(iw, r)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,8 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// KapacitorProxy proxies requests to kapacitor using the path query parameter.
|
// KapacitorProxy proxies requests to kapacitor using the path query parameter.
|
||||||
|
@ -34,28 +36,33 @@ func (s *Service) KapacitorProxy(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
u, err := url.Parse(srv.URL)
|
// To preserve any HTTP query arguments to the kapacitor path,
|
||||||
|
// we concat and parse them into u.
|
||||||
|
uri := singleJoiningSlash(srv.URL, path)
|
||||||
|
u, err := url.Parse(uri)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
msg := fmt.Sprintf("Error parsing kapacitor url: %v", err)
|
msg := fmt.Sprintf("Error parsing kapacitor url: %v", err)
|
||||||
Error(w, http.StatusUnprocessableEntity, msg, s.Logger)
|
Error(w, http.StatusUnprocessableEntity, msg, s.Logger)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
u.Path = path
|
|
||||||
|
|
||||||
director := func(req *http.Request) {
|
director := func(req *http.Request) {
|
||||||
// Set the Host header of the original Kapacitor URL
|
// Set the Host header of the original Kapacitor URL
|
||||||
req.Host = u.Host
|
req.Host = u.Host
|
||||||
|
|
||||||
req.URL = u
|
req.URL = u
|
||||||
|
|
||||||
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
|
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
|
||||||
// a header directly
|
// a header directly
|
||||||
if srv.Username != "" && srv.Password != "" {
|
if srv.Username != "" && srv.Password != "" {
|
||||||
req.SetBasicAuth(srv.Username, srv.Password)
|
req.SetBasicAuth(srv.Username, srv.Password)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Without a FlushInterval the HTTP Chunked response for kapacitor logs is
|
||||||
|
// buffered and flushed every 30 seconds.
|
||||||
proxy := &httputil.ReverseProxy{
|
proxy := &httputil.ReverseProxy{
|
||||||
Director: director,
|
Director: director,
|
||||||
|
FlushInterval: time.Second,
|
||||||
}
|
}
|
||||||
proxy.ServeHTTP(w, r)
|
proxy.ServeHTTP(w, r)
|
||||||
}
|
}
|
||||||
|
@ -79,3 +86,15 @@ func (s *Service) KapacitorProxyGet(w http.ResponseWriter, r *http.Request) {
|
||||||
func (s *Service) KapacitorProxyDelete(w http.ResponseWriter, r *http.Request) {
|
func (s *Service) KapacitorProxyDelete(w http.ResponseWriter, r *http.Request) {
|
||||||
s.KapacitorProxy(w, r)
|
s.KapacitorProxy(w, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func singleJoiningSlash(a, b string) string {
|
||||||
|
aslash := strings.HasSuffix(a, "/")
|
||||||
|
bslash := strings.HasPrefix(b, "/")
|
||||||
|
if aslash && bslash {
|
||||||
|
return a + b[1:]
|
||||||
|
}
|
||||||
|
if !aslash && !bslash {
|
||||||
|
return a + "/" + b
|
||||||
|
}
|
||||||
|
return a + b
|
||||||
|
}
|
||||||
|
|
|
@ -12,16 +12,21 @@ import (
|
||||||
"github.com/influxdata/chronograf/influx/queries"
|
"github.com/influxdata/chronograf/influx/queries"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// QueryRequest is query that will be converted to a queryConfig
|
||||||
type QueryRequest struct {
|
type QueryRequest struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Query string `json:"query"`
|
Query string `json:"query"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueriesRequest converts all queries to queryConfigs with the help
|
||||||
|
// of the template variables
|
||||||
|
type QueriesRequest struct {
|
||||||
|
Queries []QueryRequest `json:"queries"`
|
||||||
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
|
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type QueriesRequest struct {
|
// QueryResponse is the return result of a QueryRequest including
|
||||||
Queries []QueryRequest `json:"queries"`
|
// the raw query, the templated query, the queryConfig and the queryAST
|
||||||
}
|
|
||||||
|
|
||||||
type QueryResponse struct {
|
type QueryResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Query string `json:"query"`
|
Query string `json:"query"`
|
||||||
|
@ -31,11 +36,12 @@ type QueryResponse struct {
|
||||||
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
|
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// QueriesResponse is the response for a QueriesRequest
|
||||||
type QueriesResponse struct {
|
type QueriesResponse struct {
|
||||||
Queries []QueryResponse `json:"queries"`
|
Queries []QueryResponse `json:"queries"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Queries parses InfluxQL and returns the JSON
|
// Queries analyzes InfluxQL to produce front-end friendly QueryConfig
|
||||||
func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
||||||
srcID, err := paramID("id", r)
|
srcID, err := paramID("id", r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -66,12 +72,7 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
||||||
Query: q.Query,
|
Query: q.Query,
|
||||||
}
|
}
|
||||||
|
|
||||||
query := q.Query
|
query := influx.TemplateReplace(q.Query, req.TemplateVars)
|
||||||
if len(q.TemplateVars) > 0 {
|
|
||||||
query = influx.TemplateReplace(query, q.TemplateVars)
|
|
||||||
qr.QueryTemplated = &query
|
|
||||||
}
|
|
||||||
|
|
||||||
qc := ToQueryConfig(query)
|
qc := ToQueryConfig(query)
|
||||||
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
|
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
|
||||||
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
|
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
|
||||||
|
@ -83,9 +84,10 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
||||||
qr.QueryAST = stmt
|
qr.QueryAST = stmt
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(q.TemplateVars) > 0 {
|
if len(req.TemplateVars) > 0 {
|
||||||
qr.TemplateVars = q.TemplateVars
|
qr.TemplateVars = req.TemplateVars
|
||||||
qr.QueryConfig.RawText = &qr.Query
|
qr.QueryConfig.RawText = &qr.Query
|
||||||
|
qr.QueryTemplated = &query
|
||||||
}
|
}
|
||||||
|
|
||||||
qr.QueryConfig.ID = q.ID
|
qr.QueryConfig.ID = q.ID
|
||||||
|
|
|
@ -0,0 +1,187 @@
|
||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/bouk/httprouter"
|
||||||
|
"github.com/influxdata/chronograf"
|
||||||
|
"github.com/influxdata/chronograf/mocks"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestService_Queries(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
SourcesStore chronograf.SourcesStore
|
||||||
|
ID string
|
||||||
|
w *httptest.ResponseRecorder
|
||||||
|
r *http.Request
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "bad json",
|
||||||
|
SourcesStore: &mocks.SourcesStore{
|
||||||
|
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||||
|
return chronograf.Source{
|
||||||
|
ID: ID,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ID: "1",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`howdy`))),
|
||||||
|
want: `{"code":400,"message":"Unparsable JSON"}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bad id",
|
||||||
|
ID: "howdy",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte{})),
|
||||||
|
want: `{"code":422,"message":"Error converting ID howdy"}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "query with no template vars",
|
||||||
|
SourcesStore: &mocks.SourcesStore{
|
||||||
|
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||||
|
return chronograf.Source{
|
||||||
|
ID: ID,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ID: "1",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time > now() - 1m",
|
||||||
|
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
|
||||||
|
}
|
||||||
|
]}`))),
|
||||||
|
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "query with unparsable query",
|
||||||
|
SourcesStore: &mocks.SourcesStore{
|
||||||
|
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||||
|
return chronograf.Source{
|
||||||
|
ID: ID,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ID: "1",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SHOW DATABASES",
|
||||||
|
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
|
||||||
|
}
|
||||||
|
]}`))),
|
||||||
|
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null}}]}
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "query with template vars",
|
||||||
|
SourcesStore: &mocks.SourcesStore{
|
||||||
|
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||||
|
return chronograf.Source{
|
||||||
|
ID: ID,
|
||||||
|
}, nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ID: "1",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > now() - 1m",
|
||||||
|
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"tempVars": [
|
||||||
|
{
|
||||||
|
"tempVar": ":dbs:",
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"value": "_internal",
|
||||||
|
"type": "database",
|
||||||
|
"selected": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": "792eda0d-2bb2-4de6-a86f-1f652889b044",
|
||||||
|
"type": "databases",
|
||||||
|
"label": "",
|
||||||
|
"query": {
|
||||||
|
"influxql": "SHOW DATABASES",
|
||||||
|
"measurement": "",
|
||||||
|
"tagKey": "",
|
||||||
|
"fieldKey": ""
|
||||||
|
},
|
||||||
|
"links": {
|
||||||
|
"self": "/chronograf/v1/dashboards/1/templates/792eda0d-2bb2-4de6-a86f-1f652889b044"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "dashtime",
|
||||||
|
"tempVar": ":dashboardTime:",
|
||||||
|
"type": "constant",
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"value": "now() - 15m",
|
||||||
|
"type": "constant",
|
||||||
|
"selected": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "upperdashtime",
|
||||||
|
"tempVar": ":upperDashboardTime:",
|
||||||
|
"type": "constant",
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"value": "now()",
|
||||||
|
"type": "constant",
|
||||||
|
"selected": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "interval",
|
||||||
|
"type": "constant",
|
||||||
|
"tempVar": ":interval:",
|
||||||
|
"resolution": 1000,
|
||||||
|
"reportingInterval": 10000000000,
|
||||||
|
"values": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}`))),
|
||||||
|
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"_internal","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"_internal","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","tempVars":[{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":interval:","duration":60000000000,"resolution":1000,"reportingInterval":10000000000}]}]}
|
||||||
|
`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
tt.r = tt.r.WithContext(httprouter.WithParams(
|
||||||
|
context.Background(),
|
||||||
|
httprouter.Params{
|
||||||
|
{
|
||||||
|
Key: "id",
|
||||||
|
Value: tt.ID,
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
s := &Service{
|
||||||
|
SourcesStore: tt.SourcesStore,
|
||||||
|
Logger: &mocks.TestLogger{},
|
||||||
|
}
|
||||||
|
s.Queries(tt.w, tt.r)
|
||||||
|
got := tt.w.Body.String()
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("got:\n%s\nwant:\n%s\n", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -2357,6 +2357,7 @@
|
||||||
"name": "kapa",
|
"name": "kapa",
|
||||||
"url": "http://localhost:9092",
|
"url": "http://localhost:9092",
|
||||||
"active": false,
|
"active": false,
|
||||||
|
"insecureSkipVerify": false,
|
||||||
"links": {
|
"links": {
|
||||||
"proxy": "/chronograf/v1/sources/4/kapacitors/4/proxy",
|
"proxy": "/chronograf/v1/sources/4/kapacitors/4/proxy",
|
||||||
"self": "/chronograf/v1/sources/4/kapacitors/4",
|
"self": "/chronograf/v1/sources/4/kapacitors/4",
|
||||||
|
@ -2387,6 +2388,11 @@
|
||||||
"description":
|
"description":
|
||||||
"URL for the kapacitor backend (e.g. http://localhost:9092)"
|
"URL for the kapacitor backend (e.g. http://localhost:9092)"
|
||||||
},
|
},
|
||||||
|
"insecureSkipVerify": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description":
|
||||||
|
"True means any certificate presented by the kapacitor is accepted. Typically used for self-signed certs. Probably should only be used for testing."
|
||||||
|
},
|
||||||
"active": {
|
"active": {
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description":
|
"description":
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
"build": "yarn run clean && env NODE_ENV=production webpack --optimize-minimize --config ./webpack/prodConfig.js",
|
"build": "yarn run clean && env NODE_ENV=production webpack --optimize-minimize --config ./webpack/prodConfig.js",
|
||||||
"build:dev": "webpack --config ./webpack/devConfig.js",
|
"build:dev": "webpack --config ./webpack/devConfig.js",
|
||||||
"start": "webpack --watch --config ./webpack/devConfig.js",
|
"start": "webpack --watch --config ./webpack/devConfig.js",
|
||||||
|
"start:hmr": "webpack-dev-server --open --config ./webpack/devConfig.js",
|
||||||
"lint": "esw src/",
|
"lint": "esw src/",
|
||||||
"test": "karma start",
|
"test": "karma start",
|
||||||
"test:integration": "nightwatch tests --skip",
|
"test:integration": "nightwatch tests --skip",
|
||||||
|
@ -77,6 +78,7 @@
|
||||||
"mocha-loader": "^0.7.1",
|
"mocha-loader": "^0.7.1",
|
||||||
"mustache": "^2.2.1",
|
"mustache": "^2.2.1",
|
||||||
"node-sass": "^4.5.3",
|
"node-sass": "^4.5.3",
|
||||||
|
"on-build-webpack": "^0.1.0",
|
||||||
"postcss-browser-reporter": "^0.4.0",
|
"postcss-browser-reporter": "^0.4.0",
|
||||||
"postcss-calc": "^5.2.0",
|
"postcss-calc": "^5.2.0",
|
||||||
"postcss-loader": "^0.8.0",
|
"postcss-loader": "^0.8.0",
|
||||||
|
|
|
@ -3,13 +3,16 @@ import {
|
||||||
formatDate,
|
formatDate,
|
||||||
dashboardtoCSV,
|
dashboardtoCSV,
|
||||||
} from 'shared/parsing/resultsToCSV'
|
} from 'shared/parsing/resultsToCSV'
|
||||||
|
import moment from 'moment'
|
||||||
|
|
||||||
describe('formatDate', () => {
|
describe('formatDate', () => {
|
||||||
it('converts timestamp to an excel compatible date string', () => {
|
it('converts timestamp to an excel compatible date string', () => {
|
||||||
const timestamp = 1000000000000
|
const timestamp = 1000000000000
|
||||||
const result = formatDate(timestamp)
|
const result = formatDate(timestamp)
|
||||||
expect(result).to.be.a('string')
|
expect(result).to.be.a('string')
|
||||||
expect(+new Date(result)).to.equal(timestamp)
|
expect(moment(result, 'M/D/YYYY h:mm:ss.SSSSSSSSS A').valueOf()).to.equal(
|
||||||
|
timestamp
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -281,7 +281,8 @@ export const updateTempVarValues = (source, dashboard) => async dispatch => {
|
||||||
|
|
||||||
results.forEach(({data}, i) => {
|
results.forEach(({data}, i) => {
|
||||||
const {type, query, id} = tempsWithQueries[i]
|
const {type, query, id} = tempsWithQueries[i]
|
||||||
const vals = parsers[type](data, query.tagKey || query.measurement)[type]
|
const parsed = parsers[type](data, query.tagKey || query.measurement)
|
||||||
|
const vals = parsed[type]
|
||||||
dispatch(editTemplateVariableValues(dashboard.id, id, vals))
|
dispatch(editTemplateVariableValues(dashboard.id, id, vals))
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -6,6 +6,7 @@ import {Tabber, Tab} from 'src/dashboards/components/Tabber'
|
||||||
import {DISPLAY_OPTIONS, TOOLTIP_CONTENT} from 'src/dashboards/constants'
|
import {DISPLAY_OPTIONS, TOOLTIP_CONTENT} from 'src/dashboards/constants'
|
||||||
|
|
||||||
const {LINEAR, LOG, BASE_2, BASE_10} = DISPLAY_OPTIONS
|
const {LINEAR, LOG, BASE_2, BASE_10} = DISPLAY_OPTIONS
|
||||||
|
const getInputMin = scale => (scale === LOG ? '0' : null)
|
||||||
|
|
||||||
const AxesOptions = ({
|
const AxesOptions = ({
|
||||||
axes: {y: {bounds, label, prefix, suffix, base, scale, defaultYLabel}},
|
axes: {y: {bounds, label, prefix, suffix, base, scale, defaultYLabel}},
|
||||||
|
@ -38,6 +39,7 @@ const AxesOptions = ({
|
||||||
customValue={min}
|
customValue={min}
|
||||||
onSetValue={onSetYAxisBoundMin}
|
onSetValue={onSetYAxisBoundMin}
|
||||||
type="number"
|
type="number"
|
||||||
|
min={getInputMin(scale)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="form-group col-sm-6">
|
<div className="form-group col-sm-6">
|
||||||
|
@ -47,6 +49,7 @@ const AxesOptions = ({
|
||||||
customValue={max}
|
customValue={max}
|
||||||
onSetValue={onSetYAxisBoundMax}
|
onSetValue={onSetYAxisBoundMax}
|
||||||
type="number"
|
type="number"
|
||||||
|
min={getInputMin(scale)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<Input
|
<Input
|
||||||
|
|
|
@ -88,7 +88,14 @@ class ChronoTable extends Component {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
makeTabName = ({name, tags}) => (tags ? `${name}.${tags[name]}` : name)
|
makeTabName = ({name, tags}) => {
|
||||||
|
if (!tags) {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
const tagKeys = Object.keys(tags).sort()
|
||||||
|
const tagValues = tagKeys.map(key => tags[key]).join('.')
|
||||||
|
return `${name}.${tagValues}`
|
||||||
|
}
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {containerWidth, height, query} = this.props
|
const {containerWidth, height, query} = this.props
|
||||||
|
@ -135,9 +142,13 @@ class ChronoTable extends Component {
|
||||||
</div>
|
</div>
|
||||||
: <Dropdown
|
: <Dropdown
|
||||||
className="dropdown-160 table--tabs-dropdown"
|
className="dropdown-160 table--tabs-dropdown"
|
||||||
items={series.map((s, index) => ({...s, text: s.name, index}))}
|
items={series.map((s, index) => ({
|
||||||
|
...s,
|
||||||
|
text: this.makeTabName(s),
|
||||||
|
index,
|
||||||
|
}))}
|
||||||
onChoose={this.handleClickDropdown}
|
onChoose={this.handleClickDropdown}
|
||||||
selected={series[activeSeriesIndex].name}
|
selected={this.makeTabName(series[activeSeriesIndex])}
|
||||||
buttonSize="btn-xs"
|
buttonSize="btn-xs"
|
||||||
/>}
|
/>}
|
||||||
<div className="table--tabs-content">
|
<div className="table--tabs-content">
|
||||||
|
|
|
@ -283,11 +283,13 @@ Dygraph.prototype.findClosestPoint = function(domX, domY) {
|
||||||
minYDist = ydist
|
minYDist = ydist
|
||||||
closestRow = point.idx
|
closestRow = point.idx
|
||||||
closestSeries = setIdx
|
closestSeries = setIdx
|
||||||
|
closestPoint = point
|
||||||
} else if (xdist === minXDist && ydist < minYDist) {
|
} else if (xdist === minXDist && ydist < minYDist) {
|
||||||
minXDist = xdist
|
minXDist = xdist
|
||||||
minYDist = ydist
|
minYDist = ydist
|
||||||
closestRow = point.idx
|
closestRow = point.idx
|
||||||
closestSeries = setIdx
|
closestSeries = setIdx
|
||||||
|
closestPoint = point
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ export function getCpuAndLoadForHosts(proxyLink, telegrafDB) {
|
||||||
SELECT mean("Percent_Processor_Time") FROM win_cpu WHERE time > now() - 10m GROUP BY host;
|
SELECT mean("Percent_Processor_Time") FROM win_cpu WHERE time > now() - 10m GROUP BY host;
|
||||||
SELECT mean("Processor_Queue_Length") FROM win_system WHERE time > now() - 10s GROUP BY host;
|
SELECT mean("Processor_Queue_Length") FROM win_system WHERE time > now() - 10s GROUP BY host;
|
||||||
SELECT non_negative_derivative(mean("System_Up_Time")) AS winDeltaUptime FROM win_system WHERE time > now() - 10m GROUP BY host, time(1m) fill(0);
|
SELECT non_negative_derivative(mean("System_Up_Time")) AS winDeltaUptime FROM win_system WHERE time > now() - 10m GROUP BY host, time(1m) fill(0);
|
||||||
SHOW TAG VALUES FROM /win_system|system/ WITH KEY = "host"`,
|
SHOW TAG VALUES WITH KEY = "host";`,
|
||||||
db: telegrafDB,
|
db: telegrafDB,
|
||||||
}).then(resp => {
|
}).then(resp => {
|
||||||
const hosts = {}
|
const hosts = {}
|
||||||
|
@ -87,7 +87,7 @@ export async function getAllHosts(proxyLink, telegrafDB) {
|
||||||
try {
|
try {
|
||||||
const resp = await proxy({
|
const resp = await proxy({
|
||||||
source: proxyLink,
|
source: proxyLink,
|
||||||
query: 'show tag values from /win_system|system/ with key = "host"',
|
query: 'show tag values with key = "host"',
|
||||||
db: telegrafDB,
|
db: telegrafDB,
|
||||||
})
|
})
|
||||||
const hosts = {}
|
const hosts = {}
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import React, {PropTypes} from 'react'
|
import React, {PropTypes} from 'react'
|
||||||
import {CHANGES, OPERATORS, SHIFTS} from 'src/kapacitor/constants'
|
import {CHANGES, RELATIVE_OPERATORS, SHIFTS} from 'src/kapacitor/constants'
|
||||||
import Dropdown from 'shared/components/Dropdown'
|
import Dropdown from 'shared/components/Dropdown'
|
||||||
|
|
||||||
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
|
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
|
||||||
const changes = mapToItems(CHANGES, 'change')
|
const changes = mapToItems(CHANGES, 'change')
|
||||||
const shifts = mapToItems(SHIFTS, 'shift')
|
const shifts = mapToItems(SHIFTS, 'shift')
|
||||||
const operators = mapToItems(OPERATORS, 'operator')
|
const operators = mapToItems(RELATIVE_OPERATORS, 'operator')
|
||||||
|
|
||||||
const Relative = ({
|
const Relative = ({
|
||||||
onRuleTypeInputChange,
|
onRuleTypeInputChange,
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import React, {PropTypes} from 'react'
|
import React, {PropTypes} from 'react'
|
||||||
import {OPERATORS} from 'src/kapacitor/constants'
|
import {THRESHOLD_OPERATORS} from 'src/kapacitor/constants'
|
||||||
import Dropdown from 'shared/components/Dropdown'
|
import Dropdown from 'shared/components/Dropdown'
|
||||||
import _ from 'lodash'
|
import _ from 'lodash'
|
||||||
|
|
||||||
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
|
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
|
||||||
const operators = mapToItems(OPERATORS, 'operator')
|
const operators = mapToItems(THRESHOLD_OPERATORS, 'operator')
|
||||||
const noopSubmit = e => e.preventDefault()
|
const noopSubmit = e => e.preventDefault()
|
||||||
const getField = ({fields}) => {
|
const getField = ({fields}) => {
|
||||||
const alias = _.get(fields, ['0', 'alias'], false)
|
const alias = _.get(fields, ['0', 'alias'], false)
|
||||||
|
|
|
@ -31,7 +31,7 @@ export const OUTSIDE_RANGE = 'outside range'
|
||||||
export const EQUAL_TO_OR_GREATER_THAN = 'equal to or greater'
|
export const EQUAL_TO_OR_GREATER_THAN = 'equal to or greater'
|
||||||
export const EQUAL_TO_OR_LESS_THAN = 'equal to or less than'
|
export const EQUAL_TO_OR_LESS_THAN = 'equal to or less than'
|
||||||
|
|
||||||
export const OPERATORS = [
|
export const THRESHOLD_OPERATORS = [
|
||||||
GREATER_THAN,
|
GREATER_THAN,
|
||||||
EQUAL_TO_OR_GREATER_THAN,
|
EQUAL_TO_OR_GREATER_THAN,
|
||||||
EQUAL_TO_OR_LESS_THAN,
|
EQUAL_TO_OR_LESS_THAN,
|
||||||
|
@ -42,6 +42,15 @@ export const OPERATORS = [
|
||||||
OUTSIDE_RANGE,
|
OUTSIDE_RANGE,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
export const RELATIVE_OPERATORS = [
|
||||||
|
GREATER_THAN,
|
||||||
|
EQUAL_TO_OR_GREATER_THAN,
|
||||||
|
EQUAL_TO_OR_LESS_THAN,
|
||||||
|
LESS_THAN,
|
||||||
|
EQUAL_TO,
|
||||||
|
NOT_EQUAL_TO,
|
||||||
|
]
|
||||||
|
|
||||||
// export const RELATIONS = ['once', 'more than ', 'less than'];
|
// export const RELATIONS = ['once', 'more than ', 'less than'];
|
||||||
export const PERIODS = ['1m', '5m', '10m', '30m', '1h', '2h', '24h']
|
export const PERIODS = ['1m', '5m', '10m', '30m', '1h', '2h', '24h']
|
||||||
export const CHANGES = ['change', '% change']
|
export const CHANGES = ['change', '% change']
|
||||||
|
|
|
@ -136,13 +136,6 @@ const AutoRefresh = ComposedComponent => {
|
||||||
return this.renderFetching(timeSeries)
|
return this.renderFetching(timeSeries)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
|
||||||
!this._resultsForQuery(timeSeries) ||
|
|
||||||
!this.state.lastQuerySuccessful
|
|
||||||
) {
|
|
||||||
return this.renderNoResults()
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ComposedComponent
|
<ComposedComponent
|
||||||
{...this.props}
|
{...this.props}
|
||||||
|
@ -169,14 +162,6 @@ const AutoRefresh = ComposedComponent => {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
renderNoResults = () => {
|
|
||||||
return (
|
|
||||||
<div className="graph-empty">
|
|
||||||
<p data-test="data-explorer-no-results">No Results</p>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
_resultsForQuery = data =>
|
_resultsForQuery = data =>
|
||||||
data.length
|
data.length
|
||||||
? data.every(({response}) =>
|
? data.every(({response}) =>
|
||||||
|
|
|
@ -14,6 +14,7 @@ class ClickOutsideInput extends Component {
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
id,
|
id,
|
||||||
|
min,
|
||||||
type,
|
type,
|
||||||
onFocus,
|
onFocus,
|
||||||
onChange,
|
onChange,
|
||||||
|
@ -27,6 +28,7 @@ class ClickOutsideInput extends Component {
|
||||||
<input
|
<input
|
||||||
className="form-control input-sm"
|
className="form-control input-sm"
|
||||||
id={id}
|
id={id}
|
||||||
|
min={min}
|
||||||
type={type}
|
type={type}
|
||||||
name={customPlaceholder}
|
name={customPlaceholder}
|
||||||
ref={onGetRef}
|
ref={onGetRef}
|
||||||
|
@ -43,6 +45,7 @@ class ClickOutsideInput extends Component {
|
||||||
const {func, string} = PropTypes
|
const {func, string} = PropTypes
|
||||||
|
|
||||||
ClickOutsideInput.propTypes = {
|
ClickOutsideInput.propTypes = {
|
||||||
|
min: string,
|
||||||
id: string.isRequired,
|
id: string.isRequired,
|
||||||
type: string.isRequired,
|
type: string.isRequired,
|
||||||
customPlaceholder: string.isRequired,
|
customPlaceholder: string.isRequired,
|
||||||
|
|
|
@ -97,7 +97,7 @@ const DatabaseList = React.createClass({
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="query-builder--column query-builder--column-db">
|
<div className="query-builder--column query-builder--column-db">
|
||||||
<div className="query-builder--heading">Databases</div>
|
<div className="query-builder--heading">DB.RetentionPolicy</div>
|
||||||
<div className="query-builder--list">
|
<div className="query-builder--list">
|
||||||
<FancyScrollbar>
|
<FancyScrollbar>
|
||||||
{sortedNamespaces.map(namespace => {
|
{sortedNamespaces.map(namespace => {
|
||||||
|
|
|
@ -29,6 +29,7 @@ export default class Dygraph extends Component {
|
||||||
x: null,
|
x: null,
|
||||||
series: [],
|
series: [],
|
||||||
},
|
},
|
||||||
|
pageX: null,
|
||||||
sortType: '',
|
sortType: '',
|
||||||
filterText: '',
|
filterText: '',
|
||||||
isSynced: false,
|
isSynced: false,
|
||||||
|
@ -36,14 +37,12 @@ export default class Dygraph extends Component {
|
||||||
isAscending: true,
|
isAscending: true,
|
||||||
isSnipped: false,
|
isSnipped: false,
|
||||||
isFilterVisible: false,
|
isFilterVisible: false,
|
||||||
legendArrowPosition: 'top',
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidMount() {
|
componentDidMount() {
|
||||||
const {
|
const {
|
||||||
axes: {y, y2},
|
axes: {y, y2},
|
||||||
ruleValues,
|
|
||||||
isGraphFilled: fillGraph,
|
isGraphFilled: fillGraph,
|
||||||
isBarGraph,
|
isBarGraph,
|
||||||
options,
|
options,
|
||||||
|
@ -63,9 +62,7 @@ export default class Dygraph extends Component {
|
||||||
plugins: [new Dygraphs.Plugins.Crosshair({direction: 'vertical'})],
|
plugins: [new Dygraphs.Plugins.Crosshair({direction: 'vertical'})],
|
||||||
axes: {
|
axes: {
|
||||||
y: {
|
y: {
|
||||||
valueRange: options.stackedGraph
|
valueRange: this.getYRange(timeSeries),
|
||||||
? getStackedRange(y.bounds)
|
|
||||||
: getRange(timeSeries, y.bounds, ruleValues),
|
|
||||||
axisLabelFormatter: (yval, __, opts) =>
|
axisLabelFormatter: (yval, __, opts) =>
|
||||||
numberValueFormatter(yval, opts, y.prefix, y.suffix),
|
numberValueFormatter(yval, opts, y.prefix, y.suffix),
|
||||||
axisLabelWidth: this.getLabelWidth(),
|
axisLabelWidth: this.getLabelWidth(),
|
||||||
|
@ -130,7 +127,7 @@ export default class Dygraph extends Component {
|
||||||
}
|
}
|
||||||
|
|
||||||
componentDidUpdate() {
|
componentDidUpdate() {
|
||||||
const {labels, axes: {y, y2}, options, ruleValues, isBarGraph} = this.props
|
const {labels, axes: {y, y2}, options, isBarGraph} = this.props
|
||||||
|
|
||||||
const dygraph = this.dygraph
|
const dygraph = this.dygraph
|
||||||
if (!dygraph) {
|
if (!dygraph) {
|
||||||
|
@ -149,9 +146,7 @@ export default class Dygraph extends Component {
|
||||||
ylabel: this.getLabel('y'),
|
ylabel: this.getLabel('y'),
|
||||||
axes: {
|
axes: {
|
||||||
y: {
|
y: {
|
||||||
valueRange: options.stackedGraph
|
valueRange: this.getYRange(timeSeries),
|
||||||
? getStackedRange(y.bounds)
|
|
||||||
: getRange(timeSeries, y.bounds, ruleValues),
|
|
||||||
axisLabelFormatter: (yval, __, opts) =>
|
axisLabelFormatter: (yval, __, opts) =>
|
||||||
numberValueFormatter(yval, opts, y.prefix, y.suffix),
|
numberValueFormatter(yval, opts, y.prefix, y.suffix),
|
||||||
axisLabelWidth: this.getLabelWidth(),
|
axisLabelWidth: this.getLabelWidth(),
|
||||||
|
@ -171,9 +166,26 @@ export default class Dygraph extends Component {
|
||||||
dygraph.updateOptions(updateOptions)
|
dygraph.updateOptions(updateOptions)
|
||||||
|
|
||||||
const {w} = this.dygraph.getArea()
|
const {w} = this.dygraph.getArea()
|
||||||
this.resize()
|
|
||||||
this.dygraph.resize()
|
|
||||||
this.props.setResolution(w)
|
this.props.setResolution(w)
|
||||||
|
this.resize()
|
||||||
|
}
|
||||||
|
|
||||||
|
getYRange = timeSeries => {
|
||||||
|
const {options, axes: {y}, ruleValues} = this.props
|
||||||
|
|
||||||
|
if (options.stackedGraph) {
|
||||||
|
return getStackedRange(y.bounds)
|
||||||
|
}
|
||||||
|
|
||||||
|
const range = getRange(timeSeries, y.bounds, ruleValues)
|
||||||
|
const [min, max] = range
|
||||||
|
|
||||||
|
// Bug in Dygraph calculates a negative range for logscale when min range is 0
|
||||||
|
if (y.scale === LOG && timeSeries.length === 1 && min <= 0) {
|
||||||
|
return [0.1, max]
|
||||||
|
}
|
||||||
|
|
||||||
|
return range
|
||||||
}
|
}
|
||||||
|
|
||||||
handleZoom = (lower, upper) => {
|
handleZoom = (lower, upper) => {
|
||||||
|
@ -298,6 +310,7 @@ export default class Dygraph extends Component {
|
||||||
resize = () => {
|
resize = () => {
|
||||||
this.dygraph.resizeElements_()
|
this.dygraph.resizeElements_()
|
||||||
this.dygraph.predraw_()
|
this.dygraph.predraw_()
|
||||||
|
this.dygraph.resize()
|
||||||
}
|
}
|
||||||
|
|
||||||
formatTimeRange = timeRange => {
|
formatTimeRange = timeRange => {
|
||||||
|
@ -341,64 +354,8 @@ export default class Dygraph extends Component {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
highlightCallback = e => {
|
highlightCallback = ({pageX}) => {
|
||||||
const chronografChromeSize = 60 // Width & Height of navigation page elements
|
this.setState({isHidden: false, pageX})
|
||||||
|
|
||||||
// Move the Legend on hover
|
|
||||||
const graphRect = this.graphRef.getBoundingClientRect()
|
|
||||||
const legendRect = this.legendRef.getBoundingClientRect()
|
|
||||||
|
|
||||||
const graphWidth = graphRect.width + 32 // Factoring in padding from parent
|
|
||||||
const graphHeight = graphRect.height
|
|
||||||
const graphBottom = graphRect.bottom
|
|
||||||
const legendWidth = legendRect.width
|
|
||||||
const legendHeight = legendRect.height
|
|
||||||
const screenHeight = window.innerHeight
|
|
||||||
const legendMaxLeft = graphWidth - legendWidth / 2
|
|
||||||
const trueGraphX = e.pageX - graphRect.left
|
|
||||||
|
|
||||||
let legendLeft = trueGraphX
|
|
||||||
|
|
||||||
// Enforcing max & min legend offsets
|
|
||||||
if (trueGraphX < legendWidth / 2) {
|
|
||||||
legendLeft = legendWidth / 2
|
|
||||||
} else if (trueGraphX > legendMaxLeft) {
|
|
||||||
legendLeft = legendMaxLeft
|
|
||||||
}
|
|
||||||
|
|
||||||
// Disallow screen overflow of legend
|
|
||||||
const isLegendBottomClipped = graphBottom + legendHeight > screenHeight
|
|
||||||
const isLegendTopClipped =
|
|
||||||
legendHeight > graphRect.top - chronografChromeSize
|
|
||||||
const willLegendFitLeft = e.pageX - chronografChromeSize > legendWidth
|
|
||||||
|
|
||||||
let legendTop = graphHeight + 8
|
|
||||||
this.setState({legendArrowPosition: 'top'})
|
|
||||||
|
|
||||||
// If legend is only clipped on the bottom, position above graph
|
|
||||||
if (isLegendBottomClipped && !isLegendTopClipped) {
|
|
||||||
this.setState({legendArrowPosition: 'bottom'})
|
|
||||||
legendTop = -legendHeight
|
|
||||||
}
|
|
||||||
// If legend is clipped on top and bottom, posiition on either side of crosshair
|
|
||||||
if (isLegendBottomClipped && isLegendTopClipped) {
|
|
||||||
legendTop = 0
|
|
||||||
|
|
||||||
if (willLegendFitLeft) {
|
|
||||||
this.setState({legendArrowPosition: 'right'})
|
|
||||||
legendLeft = trueGraphX - legendWidth / 2
|
|
||||||
legendLeft -= 8
|
|
||||||
} else {
|
|
||||||
this.setState({legendArrowPosition: 'left'})
|
|
||||||
legendLeft = trueGraphX + legendWidth / 2
|
|
||||||
legendLeft += 32
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.legendRef.style.left = `${legendLeft}px`
|
|
||||||
this.legendRef.style.top = `${legendTop}px`
|
|
||||||
|
|
||||||
this.setState({isHidden: false})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
legendFormatter = legend => {
|
legendFormatter = legend => {
|
||||||
|
@ -424,12 +381,12 @@ export default class Dygraph extends Component {
|
||||||
render() {
|
render() {
|
||||||
const {
|
const {
|
||||||
legend,
|
legend,
|
||||||
|
pageX,
|
||||||
sortType,
|
sortType,
|
||||||
isHidden,
|
isHidden,
|
||||||
isSnipped,
|
isSnipped,
|
||||||
filterText,
|
filterText,
|
||||||
isAscending,
|
isAscending,
|
||||||
legendArrowPosition,
|
|
||||||
isFilterVisible,
|
isFilterVisible,
|
||||||
} = this.state
|
} = this.state
|
||||||
|
|
||||||
|
@ -437,6 +394,9 @@ export default class Dygraph extends Component {
|
||||||
<div className="dygraph-child" onMouseLeave={this.deselectCrosshair}>
|
<div className="dygraph-child" onMouseLeave={this.deselectCrosshair}>
|
||||||
<DygraphLegend
|
<DygraphLegend
|
||||||
{...legend}
|
{...legend}
|
||||||
|
graph={this.graphRef}
|
||||||
|
legend={this.legendRef}
|
||||||
|
pageX={pageX}
|
||||||
sortType={sortType}
|
sortType={sortType}
|
||||||
onHide={this.handleHideLegend}
|
onHide={this.handleHideLegend}
|
||||||
isHidden={isHidden}
|
isHidden={isHidden}
|
||||||
|
@ -449,7 +409,6 @@ export default class Dygraph extends Component {
|
||||||
legendRef={this.handleLegendRef}
|
legendRef={this.handleLegendRef}
|
||||||
onToggleFilter={this.handleToggleFilter}
|
onToggleFilter={this.handleToggleFilter}
|
||||||
onInputChange={this.handleLegendInputChange}
|
onInputChange={this.handleLegendInputChange}
|
||||||
arrowPosition={legendArrowPosition}
|
|
||||||
/>
|
/>
|
||||||
<div
|
<div
|
||||||
ref={r => {
|
ref={r => {
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
import React, {PropTypes} from 'react'
|
import React, {PropTypes} from 'react'
|
||||||
import _ from 'lodash'
|
import _ from 'lodash'
|
||||||
import classnames from 'classnames'
|
import classnames from 'classnames'
|
||||||
|
import uuid from 'node-uuid'
|
||||||
|
|
||||||
|
import {makeLegendStyles} from 'shared/graphs/helpers'
|
||||||
|
|
||||||
const removeMeasurement = (label = '') => {
|
const removeMeasurement = (label = '') => {
|
||||||
const [measurement] = label.match(/^(.*)[.]/g) || ['']
|
const [measurement] = label.match(/^(.*)[.]/g) || ['']
|
||||||
|
@ -9,6 +12,9 @@ const removeMeasurement = (label = '') => {
|
||||||
|
|
||||||
const DygraphLegend = ({
|
const DygraphLegend = ({
|
||||||
xHTML,
|
xHTML,
|
||||||
|
pageX,
|
||||||
|
graph,
|
||||||
|
legend,
|
||||||
series,
|
series,
|
||||||
onSort,
|
onSort,
|
||||||
onSnip,
|
onSnip,
|
||||||
|
@ -20,7 +26,6 @@ const DygraphLegend = ({
|
||||||
filterText,
|
filterText,
|
||||||
isAscending,
|
isAscending,
|
||||||
onInputChange,
|
onInputChange,
|
||||||
arrowPosition,
|
|
||||||
isFilterVisible,
|
isFilterVisible,
|
||||||
onToggleFilter,
|
onToggleFilter,
|
||||||
}) => {
|
}) => {
|
||||||
|
@ -28,9 +33,11 @@ const DygraphLegend = ({
|
||||||
series,
|
series,
|
||||||
({y, label}) => (sortType === 'numeric' ? y : label)
|
({y, label}) => (sortType === 'numeric' ? y : label)
|
||||||
)
|
)
|
||||||
|
|
||||||
const ordered = isAscending ? sorted : sorted.reverse()
|
const ordered = isAscending ? sorted : sorted.reverse()
|
||||||
const filtered = ordered.filter(s => s.label.match(filterText))
|
const filtered = ordered.filter(s => s.label.match(filterText))
|
||||||
const hidden = isHidden ? 'hidden' : ''
|
const hidden = isHidden ? 'hidden' : ''
|
||||||
|
const style = makeLegendStyles(graph, legend, pageX)
|
||||||
|
|
||||||
const renderSortAlpha = (
|
const renderSortAlpha = (
|
||||||
<div
|
<div
|
||||||
|
@ -62,12 +69,12 @@ const DygraphLegend = ({
|
||||||
<div className="sort-btn--bottom">9</div>
|
<div className="sort-btn--bottom">9</div>
|
||||||
</button>
|
</button>
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={`dygraph-legend dygraph-legend--${arrowPosition} ${hidden}`}
|
className={`dygraph-legend ${hidden}`}
|
||||||
ref={legendRef}
|
ref={legendRef}
|
||||||
onMouseLeave={onHide}
|
onMouseLeave={onHide}
|
||||||
|
style={style}
|
||||||
>
|
>
|
||||||
<div className="dygraph-legend--header">
|
<div className="dygraph-legend--header">
|
||||||
<div className="dygraph-legend--timestamp">
|
<div className="dygraph-legend--timestamp">
|
||||||
|
@ -111,7 +118,7 @@ const DygraphLegend = ({
|
||||||
? 'dygraph-legend--row highlight'
|
? 'dygraph-legend--row highlight'
|
||||||
: 'dygraph-legend--row'
|
: 'dygraph-legend--row'
|
||||||
return (
|
return (
|
||||||
<div key={label + color} className={seriesClass}>
|
<div key={uuid.v4()} className={seriesClass}>
|
||||||
<span style={{color}}>
|
<span style={{color}}>
|
||||||
{isSnipped ? removeMeasurement(label) : label}
|
{isSnipped ? removeMeasurement(label) : label}
|
||||||
</span>
|
</span>
|
||||||
|
@ -141,7 +148,9 @@ DygraphLegend.propTypes = {
|
||||||
yHTML: string,
|
yHTML: string,
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
dygraph: shape(),
|
pageX: number,
|
||||||
|
legend: shape({}),
|
||||||
|
graph: shape({}),
|
||||||
onSnip: func.isRequired,
|
onSnip: func.isRequired,
|
||||||
onHide: func.isRequired,
|
onHide: func.isRequired,
|
||||||
onSort: func.isRequired,
|
onSort: func.isRequired,
|
||||||
|
@ -154,7 +163,6 @@ DygraphLegend.propTypes = {
|
||||||
legendRef: func.isRequired,
|
legendRef: func.isRequired,
|
||||||
isSnipped: bool.isRequired,
|
isSnipped: bool.isRequired,
|
||||||
isFilterVisible: bool.isRequired,
|
isFilterVisible: bool.isRequired,
|
||||||
arrowPosition: string.isRequired,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default DygraphLegend
|
export default DygraphLegend
|
||||||
|
|
|
@ -99,7 +99,7 @@ class OptIn extends Component {
|
||||||
handleInputRef = el => (this.customValueInput = el)
|
handleInputRef = el => (this.customValueInput = el)
|
||||||
|
|
||||||
render() {
|
render() {
|
||||||
const {fixedPlaceholder, customPlaceholder, type} = this.props
|
const {fixedPlaceholder, customPlaceholder, type, min} = this.props
|
||||||
const {useCustomValue, customValue} = this.state
|
const {useCustomValue, customValue} = this.state
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -110,6 +110,7 @@ class OptIn extends Component {
|
||||||
>
|
>
|
||||||
<ClickOutsideInput
|
<ClickOutsideInput
|
||||||
id={this.id}
|
id={this.id}
|
||||||
|
min={min}
|
||||||
type={type}
|
type={type}
|
||||||
customValue={customValue}
|
customValue={customValue}
|
||||||
onGetRef={this.handleInputRef}
|
onGetRef={this.handleInputRef}
|
||||||
|
@ -119,7 +120,6 @@ class OptIn extends Component {
|
||||||
onKeyDown={this.handleKeyDownCustomValueInput}
|
onKeyDown={this.handleKeyDownCustomValueInput}
|
||||||
handleClickOutsideInput={this.handleClickOutsideInput}
|
handleClickOutsideInput={this.handleClickOutsideInput}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
className="opt-in--groove-knob-container"
|
className="opt-in--groove-knob-container"
|
||||||
id={this.id}
|
id={this.id}
|
||||||
|
@ -141,15 +141,16 @@ class OptIn extends Component {
|
||||||
}
|
}
|
||||||
|
|
||||||
OptIn.defaultProps = {
|
OptIn.defaultProps = {
|
||||||
fixedPlaceholder: 'auto',
|
|
||||||
fixedValue: '',
|
fixedValue: '',
|
||||||
customPlaceholder: 'Custom Value',
|
customPlaceholder: 'Custom Value',
|
||||||
|
fixedPlaceholder: 'auto',
|
||||||
customValue: '',
|
customValue: '',
|
||||||
}
|
}
|
||||||
|
|
||||||
const {func, oneOf, string} = PropTypes
|
const {func, oneOf, string} = PropTypes
|
||||||
|
|
||||||
OptIn.propTypes = {
|
OptIn.propTypes = {
|
||||||
|
min: string,
|
||||||
fixedPlaceholder: string,
|
fixedPlaceholder: string,
|
||||||
fixedValue: string,
|
fixedValue: string,
|
||||||
customPlaceholder: string,
|
customPlaceholder: string,
|
||||||
|
|
|
@ -114,6 +114,65 @@ export const barPlotter = e => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const makeLegendStyles = (graph, legend, pageX) => {
|
||||||
|
if (!graph || !legend || pageX === null) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move the Legend on hover
|
||||||
|
const chronografChromeSize = 60 // Width & Height of navigation page elements
|
||||||
|
const graphRect = graph.getBoundingClientRect()
|
||||||
|
const legendRect = legend.getBoundingClientRect()
|
||||||
|
|
||||||
|
const graphWidth = graphRect.width + 32 // Factoring in padding from parent
|
||||||
|
const graphHeight = graphRect.height
|
||||||
|
const graphBottom = graphRect.bottom
|
||||||
|
const legendWidth = legendRect.width
|
||||||
|
const legendHeight = legendRect.height
|
||||||
|
const screenHeight = window.innerHeight
|
||||||
|
const legendMaxLeft = graphWidth - legendWidth / 2
|
||||||
|
const trueGraphX = pageX - graphRect.left
|
||||||
|
|
||||||
|
let legendLeft = trueGraphX
|
||||||
|
|
||||||
|
// Enforcing max & min legend offsets
|
||||||
|
if (trueGraphX < legendWidth / 2) {
|
||||||
|
legendLeft = legendWidth / 2
|
||||||
|
} else if (trueGraphX > legendMaxLeft) {
|
||||||
|
legendLeft = legendMaxLeft
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disallow screen overflow of legend
|
||||||
|
const isLegendBottomClipped = graphBottom + legendHeight > screenHeight
|
||||||
|
const isLegendTopClipped = legendHeight > graphRect.top - chronografChromeSize
|
||||||
|
const willLegendFitLeft = pageX - chronografChromeSize > legendWidth
|
||||||
|
|
||||||
|
let legendTop = graphHeight + 8
|
||||||
|
|
||||||
|
// If legend is only clipped on the bottom, position above graph
|
||||||
|
if (isLegendBottomClipped && !isLegendTopClipped) {
|
||||||
|
legendTop = -legendHeight
|
||||||
|
}
|
||||||
|
|
||||||
|
// If legend is clipped on top and bottom, posiition on either side of crosshair
|
||||||
|
if (isLegendBottomClipped && isLegendTopClipped) {
|
||||||
|
legendTop = 0
|
||||||
|
|
||||||
|
if (willLegendFitLeft) {
|
||||||
|
legendLeft = trueGraphX - legendWidth / 2
|
||||||
|
legendLeft -= 8
|
||||||
|
} else {
|
||||||
|
legendLeft = trueGraphX + legendWidth / 2
|
||||||
|
legendLeft += 32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
left: `${legendLeft}px`,
|
||||||
|
top: `${legendTop}px`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export const OPTIONS = {
|
export const OPTIONS = {
|
||||||
rightGap: 0,
|
rightGap: 0,
|
||||||
axisLineWidth: 2,
|
axisLineWidth: 2,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import _ from 'lodash'
|
||||||
import databases from 'shared/parsing/showDatabases'
|
import databases from 'shared/parsing/showDatabases'
|
||||||
import measurements from 'shared/parsing/showMeasurements'
|
import measurements from 'shared/parsing/showMeasurements'
|
||||||
import fieldKeys from 'shared/parsing/showFieldKeys'
|
import fieldKeys from 'shared/parsing/showFieldKeys'
|
||||||
|
@ -8,16 +9,19 @@ const parsers = {
|
||||||
databases,
|
databases,
|
||||||
measurements: data => {
|
measurements: data => {
|
||||||
const {errors, measurementSets} = measurements(data)
|
const {errors, measurementSets} = measurements(data)
|
||||||
return {errors, measurements: measurementSets[0].measurements}
|
return {
|
||||||
|
errors,
|
||||||
|
measurements: _.get(measurementSets, ['0', 'measurements'], []),
|
||||||
|
}
|
||||||
},
|
},
|
||||||
fieldKeys: (data, key) => {
|
fieldKeys: (data, key) => {
|
||||||
const {errors, fieldSets} = fieldKeys(data)
|
const {errors, fieldSets} = fieldKeys(data)
|
||||||
return {errors, fieldKeys: fieldSets[key]}
|
return {errors, fieldKeys: _.get(fieldSets, key, [])}
|
||||||
},
|
},
|
||||||
tagKeys,
|
tagKeys,
|
||||||
tagValues: (data, key) => {
|
tagValues: (data, key) => {
|
||||||
const {errors, tags} = tagValues(data)
|
const {errors, tags} = tagValues(data)
|
||||||
return {errors, tagValues: tags[key]}
|
return {errors, tagValues: _.get(tags, key, [])}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import _ from 'lodash'
|
||||||
import moment from 'moment'
|
import moment from 'moment'
|
||||||
|
|
||||||
export const formatDate = timestamp =>
|
export const formatDate = timestamp =>
|
||||||
moment(timestamp).format('M/D/YYYY h:mm:ss A')
|
moment(timestamp).format('M/D/YYYY h:mm:ss.SSSSSSSSS A')
|
||||||
|
|
||||||
export const resultsToCSV = results => {
|
export const resultsToCSV = results => {
|
||||||
if (!_.get(results, ['0', 'series', '0'])) {
|
if (!_.get(results, ['0', 'series', '0'])) {
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
var webpack = require('webpack');
|
var webpack = require('webpack')
|
||||||
var path = require('path');
|
var path = require('path')
|
||||||
var ExtractTextPlugin = require("extract-text-webpack-plugin");
|
var ExtractTextPlugin = require('extract-text-webpack-plugin')
|
||||||
var HtmlWebpackPlugin = require("html-webpack-plugin");
|
var HtmlWebpackPlugin = require('html-webpack-plugin')
|
||||||
var package = require('../package.json');
|
var package = require('../package.json')
|
||||||
var dependencies = package.dependencies;
|
const WebpackOnBuildPlugin = require('on-build-webpack')
|
||||||
|
const fs = require('fs')
|
||||||
|
var dependencies = package.dependencies
|
||||||
|
|
||||||
|
const buildDir = path.resolve(__dirname, '../build')
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
watch: true,
|
||||||
devtool: 'source-map',
|
devtool: 'source-map',
|
||||||
entry: {
|
entry: {
|
||||||
app: path.resolve(__dirname, '..', 'src', 'index.js'),
|
app: path.resolve(__dirname, '..', 'src', 'index.js'),
|
||||||
|
@ -14,7 +19,7 @@ module.exports = {
|
||||||
output: {
|
output: {
|
||||||
publicPath: '/',
|
publicPath: '/',
|
||||||
path: path.resolve(__dirname, '../build'),
|
path: path.resolve(__dirname, '../build'),
|
||||||
filename: '[name].[chunkhash].dev.js',
|
filename: '[name].[hash].dev.js',
|
||||||
},
|
},
|
||||||
resolve: {
|
resolve: {
|
||||||
alias: {
|
alias: {
|
||||||
|
@ -48,15 +53,21 @@ module.exports = {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.scss$/,
|
test: /\.scss$/,
|
||||||
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!sass-loader!resolve-url!sass?sourceMap'),
|
loader: ExtractTextPlugin.extract(
|
||||||
|
'style-loader',
|
||||||
|
'css-loader!sass-loader!resolve-url!sass?sourceMap'
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.css$/,
|
test: /\.css$/,
|
||||||
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!postcss-loader'),
|
loader: ExtractTextPlugin.extract(
|
||||||
|
'style-loader',
|
||||||
|
'css-loader!postcss-loader'
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test : /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
|
test: /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
|
||||||
loader : 'file',
|
loader: 'file',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.js$/,
|
test: /\.js$/,
|
||||||
|
@ -70,18 +81,19 @@ module.exports = {
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
sassLoader: {
|
sassLoader: {
|
||||||
includePaths: [path.resolve(__dirname, "node_modules")],
|
includePaths: [path.resolve(__dirname, 'node_modules')],
|
||||||
},
|
},
|
||||||
eslint: {
|
eslint: {
|
||||||
failOnWarning: false,
|
failOnWarning: false,
|
||||||
failOnError: false,
|
failOnError: false,
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
|
new webpack.HotModuleReplacementPlugin(),
|
||||||
new webpack.ProvidePlugin({
|
new webpack.ProvidePlugin({
|
||||||
$: "jquery",
|
$: 'jquery',
|
||||||
jQuery: "jquery",
|
jQuery: 'jquery',
|
||||||
}),
|
}),
|
||||||
new ExtractTextPlugin("chronograf.css"),
|
new ExtractTextPlugin('chronograf.css'),
|
||||||
new HtmlWebpackPlugin({
|
new HtmlWebpackPlugin({
|
||||||
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
|
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
|
||||||
inject: 'body',
|
inject: 'body',
|
||||||
|
@ -93,7 +105,45 @@ module.exports = {
|
||||||
new webpack.DefinePlugin({
|
new webpack.DefinePlugin({
|
||||||
VERSION: JSON.stringify(require('../package.json').version),
|
VERSION: JSON.stringify(require('../package.json').version),
|
||||||
}),
|
}),
|
||||||
|
new WebpackOnBuildPlugin(function(stats) {
|
||||||
|
const newlyCreatedAssets = stats.compilation.assets
|
||||||
|
|
||||||
|
const unlinked = []
|
||||||
|
fs.readdir(path.resolve(buildDir), (err, files) => {
|
||||||
|
files.forEach(file => {
|
||||||
|
if (!newlyCreatedAssets[file]) {
|
||||||
|
const del = path.resolve(buildDir + file)
|
||||||
|
fs.stat(del, function(err, stat) {
|
||||||
|
if (err == null) {
|
||||||
|
try {
|
||||||
|
fs.unlink(path.resolve(buildDir + file))
|
||||||
|
unlinked.push(file)
|
||||||
|
} catch (e) {}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}),
|
||||||
],
|
],
|
||||||
postcss: require('./postcss'),
|
postcss: require('./postcss'),
|
||||||
target: 'web',
|
target: 'web',
|
||||||
};
|
devServer: {
|
||||||
|
hot: true,
|
||||||
|
historyApiFallback: true,
|
||||||
|
clientLogLevel: 'info',
|
||||||
|
stats: {colors: true},
|
||||||
|
contentBase: 'build',
|
||||||
|
quiet: false,
|
||||||
|
watchOptions: {
|
||||||
|
aggregateTimeout: 300,
|
||||||
|
poll: 1000,
|
||||||
|
},
|
||||||
|
proxy: {
|
||||||
|
'/chronograf/v1': {
|
||||||
|
target: 'http://localhost:8888',
|
||||||
|
secure: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
/* eslint-disable no-var */
|
/* eslint-disable no-var */
|
||||||
var webpack = require('webpack');
|
var webpack = require('webpack')
|
||||||
var path = require('path');
|
var path = require('path')
|
||||||
var ExtractTextPlugin = require("extract-text-webpack-plugin");
|
var ExtractTextPlugin = require('extract-text-webpack-plugin')
|
||||||
var HtmlWebpackPlugin = require("html-webpack-plugin");
|
var HtmlWebpackPlugin = require('html-webpack-plugin')
|
||||||
var package = require('../package.json');
|
var package = require('../package.json')
|
||||||
var dependencies = package.dependencies;
|
var dependencies = package.dependencies
|
||||||
|
|
||||||
var config = {
|
var config = {
|
||||||
bail: true,
|
bail: true,
|
||||||
devtool: 'eval',
|
devtool: 'eval',
|
||||||
entry: {
|
entry: {
|
||||||
app: path.resolve(__dirname, '..', 'src', 'index.js'),
|
app: path.resolve(__dirname, '..', 'src', 'index.js'),
|
||||||
vendor: Object.keys(dependencies),
|
vendor: Object.keys(dependencies),
|
||||||
|
@ -28,6 +28,15 @@ var config = {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
module: {
|
module: {
|
||||||
|
noParse: [
|
||||||
|
path.resolve(
|
||||||
|
__dirname,
|
||||||
|
'..',
|
||||||
|
'node_modules',
|
||||||
|
'memoizerific',
|
||||||
|
'memoizerific.js'
|
||||||
|
),
|
||||||
|
],
|
||||||
preLoaders: [
|
preLoaders: [
|
||||||
{
|
{
|
||||||
test: /\.js$/,
|
test: /\.js$/,
|
||||||
|
@ -42,15 +51,21 @@ var config = {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.scss$/,
|
test: /\.scss$/,
|
||||||
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!sass-loader!resolve-url!sass?sourceMap'),
|
loader: ExtractTextPlugin.extract(
|
||||||
|
'style-loader',
|
||||||
|
'css-loader!sass-loader!resolve-url!sass?sourceMap'
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.css$/,
|
test: /\.css$/,
|
||||||
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!postcss-loader'),
|
loader: ExtractTextPlugin.extract(
|
||||||
|
'style-loader',
|
||||||
|
'css-loader!postcss-loader'
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test : /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
|
test: /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
|
||||||
loader : 'file',
|
loader: 'file',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.js$/,
|
test: /\.js$/,
|
||||||
|
@ -74,10 +89,10 @@ var config = {
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
new webpack.ProvidePlugin({
|
new webpack.ProvidePlugin({
|
||||||
$: "jquery",
|
$: 'jquery',
|
||||||
jQuery: "jquery",
|
jQuery: 'jquery',
|
||||||
}),
|
}),
|
||||||
new ExtractTextPlugin("chronograf.css"),
|
new ExtractTextPlugin('chronograf.css'),
|
||||||
new HtmlWebpackPlugin({
|
new HtmlWebpackPlugin({
|
||||||
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
|
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
|
||||||
inject: 'body',
|
inject: 'body',
|
||||||
|
@ -86,21 +101,28 @@ var config = {
|
||||||
}),
|
}),
|
||||||
new webpack.optimize.UglifyJsPlugin({
|
new webpack.optimize.UglifyJsPlugin({
|
||||||
compress: {
|
compress: {
|
||||||
warnings: false
|
warnings: false,
|
||||||
}
|
},
|
||||||
}),
|
}),
|
||||||
new webpack.optimize.CommonsChunkPlugin({
|
new webpack.optimize.CommonsChunkPlugin({
|
||||||
names: ['vendor', 'manifest'],
|
names: ['vendor', 'manifest'],
|
||||||
}),
|
}),
|
||||||
function() { /* Webpack does not exit with non-zero status if error. */
|
function() {
|
||||||
this.plugin("done", function(stats) {
|
/* Webpack does not exit with non-zero status if error. */
|
||||||
if (stats.compilation.errors && stats.compilation.errors.length && process.argv.indexOf("--watch") == -1) {
|
this.plugin('done', function(stats) {
|
||||||
console.log(stats.compilation.errors.toString({
|
if (
|
||||||
colors: true
|
stats.compilation.errors &&
|
||||||
}));
|
stats.compilation.errors.length &&
|
||||||
process.exit(1);
|
process.argv.indexOf('--watch') == -1
|
||||||
|
) {
|
||||||
|
console.log(
|
||||||
|
stats.compilation.errors.toString({
|
||||||
|
colors: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
process.exit(1)
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
},
|
},
|
||||||
new webpack.DefinePlugin({
|
new webpack.DefinePlugin({
|
||||||
VERSION: JSON.stringify(require('../package.json').version),
|
VERSION: JSON.stringify(require('../package.json').version),
|
||||||
|
@ -108,6 +130,6 @@ var config = {
|
||||||
],
|
],
|
||||||
postcss: require('./postcss'),
|
postcss: require('./postcss'),
|
||||||
target: 'web',
|
target: 'web',
|
||||||
};
|
}
|
||||||
|
|
||||||
module.exports = config;
|
module.exports = config
|
||||||
|
|
|
@ -5134,6 +5134,10 @@ object.values@^1.0.3:
|
||||||
function-bind "^1.1.0"
|
function-bind "^1.1.0"
|
||||||
has "^1.0.1"
|
has "^1.0.1"
|
||||||
|
|
||||||
|
on-build-webpack@^0.1.0:
|
||||||
|
version "0.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/on-build-webpack/-/on-build-webpack-0.1.0.tgz#a287c0e17766e6141926e5f2cbb0d8bb53b76814"
|
||||||
|
|
||||||
on-finished@~2.3.0:
|
on-finished@~2.3.0:
|
||||||
version "2.3.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
|
resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
|
||||||
|
|
Loading…
Reference in New Issue