Merge branch 'master' into multitenancy

pull/10616/head
Jared Scheib 2017-11-14 00:05:18 -08:00
commit 59fb016a4e
45 changed files with 1045 additions and 319 deletions

View File

@ -1,5 +1,15 @@
## v1.3.11.0 [unreleased]
### Bug Fixes
1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
1. [#2158](https://github.com/influxdata/chronograf/pull/2158): Fix 'Cannot connect to source' false error flag on Dashboard page
1. [#2167](https://github.com/influxdata/chronograf/pull/2167): Add fractions of seconds to time field in csv export
1. [#1077](https://github.com/influxdata/chronograf/pull/2087): Fix Chronograf requiring Telegraf's CPU and system plugins to ensure that all Apps appear on the HOST LIST page.
1. [#2222](https://github.com/influxdata/chronograf/pull/2222): Fix template variables in dashboard query building.
1. [#2291](https://github.com/influxdata/chronograf/pull/2291): Fix several kapacitor alert creation panics.
1. [#2303](https://github.com/influxdata/chronograf/pull/2303): Add shadow-utils to RPM release packages
1. [#2292](https://github.com/influxdata/chronograf/pull/2292): Source extra command line options from defaults file
1. [#2329](https://github.com/influxdata/chronograf/pull/2329): Include tag values alongside measurement name in Data Explorer result tabs
### Features
### UI Improvements

View File

@ -1,4 +1,4 @@
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev ctags continuous
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev run-hmr ctags
VERSION ?= $(shell git describe --always --tags)
COMMIT ?= $(shell git rev-parse --short=8 HEAD)
@ -23,14 +23,42 @@ ${BINARY}: $(SOURCES) .bindata .jsdep .godep
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
define CHRONOGIRAFFE
._ o o
\_`-)|_
,"" _\_
," ## | 0 0.
," ## ,-\__ `.
," / `--._;) - "HAI, I'm Chronogiraffe. Let's be friends!"
," ## /
," ## /
tLf iCf.
.CCC. tCC:
CGG; CGG:
tG0Gt: GGGGGGGGGGGGGGGG1 .,:,
LG1,,:1CC: .GGL;iLC1iii1LCi;GG1 .1GCL1iGG1
LG1:::;i1CGGt;;;;;;L0t;;;;;;GGGC1;;::,iGC
,ii:. 1GG1iiii;;tfiC;;;;;;;GGCfCGCGGC,
fGCiiiiGi1Lt;;iCLL,i;;;CGt
fGG11iiii1C1iiiiiGt1;;;;;CGf
.GGLLL1i1CitfiiL1iCi;;iLCGGt
.CGL11LGCCCCCCCLLCGG1;1GG;
CGL1tf1111iiiiiiL1ifGG,
LGCff1fCt1tCfiiCiCGC
LGGf111111111iCGGt
fGGGGGGGGGGGGGGi
ifii111111itL
;f1i11111iitf
;f1iiiiiii1tf
:fi111iii11tf
:fi111ii1i1tf
:f111111ii1tt
,L111111ii1tt
.Li1111i1111CCCCCCCCCCCCCCLt;
L111ii11111ittttt1tttttittti1fC;
f1111ii111i1ttttt1;iii1ittt1ttttCt.
tt11ii111tti1ttt1tt1;11;;;;iitttifCCCL,
11i1i11ttttti;1t1;;;ttt1;;ii;itti;L,;CCL
;f;;;;1tttti;;ttti;;;;;;;;;;;1tt1ifi .CCi
,L;itti;;;it;;;;;tt1;;;t1;;;;;;ii;t; :CC,
L;;;;iti;;;;;;;;;;;;;;;;;;;;;;;i;L, ;CC.
ti;;;iLLfffi;;;;;ittt11i;;;;;;;;;L tCCfff;
it;;;;;;L,ti;;;;;1Ltttft1t;;;;;;1t ;CCCL;
:f;;;;;;L.ti;;;;;tftttf1,f;;;;;;f: ;CC1:
.L;;;;;;L.t1;;;;;tt111fi,f;;;;;;L.
1Li;;iL1 :Ci;;;tL1i1fC, Lt;;;;Li
.;tt; ifLt:;fLf; ;LCCt,
endef
export CHRONOGIRAFFE
chronogiraffe: ${BINARY}
@ -106,6 +134,9 @@ run: ${BINARY}
run-dev: chronogiraffe
./chronograf -d --log-level=debug
run-hmr:
cd ui && npm run start:hmr
clean:
if [ -f ${BINARY} ] ; then rm ${BINARY} ; fi
cd ui && yarn run clean

View File

@ -566,14 +566,15 @@ type KapacitorProperty struct {
// Server represents a proxy connection to an HTTP server
type Server struct {
ID int // ID is the unique ID of the server
SrcID int // SrcID of the data source
Name string // Name is the user-defined name for the server
Username string // Username is the username to connect to the server
Password string // Password is in CLEARTEXT
URL string // URL are the connections to the server
Active bool // Is this the active server for the source?
Organization string // Organization is the organization ID that resource belongs to
ID int // ID is the unique ID of the server
SrcID int // SrcID of the data source
Name string // Name is the user-defined name for the server
Username string // Username is the username to connect to the server
Password string // Password is in CLEARTEXT
URL string // URL are the connections to the server
InsecureSkipVerify bool // InsecureSkipVerify as true means any certificate presented by the server is accepted.
Active bool // Is this the active server for the source?
Organization string // Organization is the organization ID that resource belongs to
}
// ServersStore stores connection information for a `Server`

View File

@ -3,7 +3,7 @@ machine:
services:
- docker
environment:
DOCKER_TAG: chronograf-20170516
DOCKER_TAG: chronograf-20171027
dependencies:
override:

View File

@ -18,7 +18,7 @@ RUN pip install boto requests python-jose --upgrade
RUN gem install fpm
# Install node
ENV NODE_VERSION v6.10.3
ENV NODE_VERSION v6.11.5
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-${NODE_VERSION}-linux-x64.tar.gz; \
tar -xvf node-${NODE_VERSION}-linux-x64.tar.gz -C / --strip-components=1; \
rm -f node-${NODE_VERSION}-linux-x64.tar.gz
@ -35,7 +35,7 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
# Install go
ENV GOPATH /root/go
ENV GO_VERSION 1.8.1
ENV GO_VERSION 1.9.2
ENV GO_ARCH amd64
RUN wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \

View File

@ -674,7 +674,7 @@ def package(build_output, pkg_name, version, nightly=False, iteration=1, static=
package_build_root,
current_location)
if package_type == "rpm":
fpm_command += "--depends coreutils"
fpm_command += "--depends coreutils --depends shadow-utils"
# TODO: Check for changelog
# elif package_type == "deb":
# fpm_command += "--deb-changelog {} ".format(os.path.join(os.getcwd(), "CHANGELOG.md"))

View File

@ -9,7 +9,7 @@ After=network-online.target
User=chronograf
Group=chronograf
EnvironmentFile=-/etc/default/chronograf
ExecStart=/usr/bin/chronograf --host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned
ExecStart=/usr/bin/chronograf --host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned $CHRONOGRAF_OPTS
KillMode=control-group
Restart=on-failure

View File

@ -13,7 +13,8 @@
# Script to execute when starting
SCRIPT="/usr/bin/chronograf"
# Options to pass to the script on startup
SCRIPT_OPTS="--host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned"
. /etc/default/chronograf
SCRIPT_OPTS="--host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned ${CHRONOGRAF_OPTS}"
# User to run the process under
RUNAS=chronograf

View File

@ -5,4 +5,5 @@
dateext
copytruncate
compress
notifempty
}

View File

@ -19,12 +19,13 @@ const (
// Client communicates to kapacitor
type Client struct {
URL string
Username string
Password string
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string) (KapaClient, error)
URL string
Username string
Password string
InsecureSkipVerify bool
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
}
// KapaClient represents a connection to a kapacitor instance
@ -37,14 +38,15 @@ type KapaClient interface {
}
// NewClient creates a client that interfaces with Kapacitor tasks
func NewClient(url, username, password string) *Client {
func NewClient(url, username, password string, insecureSkipVerify bool) *Client {
return &Client{
URL: url,
Username: username,
Password: password,
ID: &uuid.V4{},
Ticker: &Alert{},
kapaClient: NewKapaClient,
URL: url,
Username: username,
Password: password,
InsecureSkipVerify: insecureSkipVerify,
ID: &uuid.V4{},
Ticker: &Alert{},
kapaClient: NewKapaClient,
}
}
@ -121,7 +123,7 @@ func (c *Client) Create(ctx context.Context, rule chronograf.AlertRule) (*Task,
return nil, err
}
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return nil, err
}
@ -189,7 +191,7 @@ func (c *Client) createFromQueryConfig(rule chronograf.AlertRule) (*client.Creat
// Delete removes tickscript task from kapacitor
func (c *Client) Delete(ctx context.Context, href string) error {
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return err
}
@ -197,7 +199,7 @@ func (c *Client) Delete(ctx context.Context, href string) error {
}
func (c *Client) updateStatus(ctx context.Context, href string, status client.TaskStatus) (*Task, error) {
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return nil, err
}
@ -235,7 +237,7 @@ func (c *Client) Status(ctx context.Context, href string) (string, error) {
}
func (c *Client) status(ctx context.Context, href string) (client.TaskStatus, error) {
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return 0, err
}
@ -249,7 +251,7 @@ func (c *Client) status(ctx context.Context, href string) (client.TaskStatus, er
// All returns all tasks in kapacitor
func (c *Client) All(ctx context.Context) (map[string]*Task, error) {
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return nil, err
}
@ -286,7 +288,7 @@ func (c *Client) Reverse(id string, script chronograf.TICKScript) chronograf.Ale
// Get returns a single alert in kapacitor
func (c *Client) Get(ctx context.Context, id string) (*Task, error) {
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return nil, err
}
@ -301,7 +303,7 @@ func (c *Client) Get(ctx context.Context, id string) (*Task, error) {
// Update changes the tickscript of a given id.
func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertRule) (*Task, error) {
kapa, err := c.kapaClient(c.URL, c.Username, c.Password)
kapa, err := c.kapaClient(c.URL, c.Username, c.Password, c.InsecureSkipVerify)
if err != nil {
return nil, err
}
@ -317,6 +319,9 @@ func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertR
} else {
opt, err = c.updateFromTick(rule)
}
if err != nil {
return nil, err
}
task, err := kapa.UpdateTask(client.Link{Href: href}, *opt)
if err != nil {
@ -386,7 +391,7 @@ func toTask(q *chronograf.QueryConfig) client.TaskType {
}
// NewKapaClient creates a Kapacitor client connection
func NewKapaClient(url, username, password string) (KapaClient, error) {
func NewKapaClient(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
var creds *client.Credentials
if username != "" {
creds = &client.Credentials{
@ -397,8 +402,9 @@ func NewKapaClient(url, username, password string) (KapaClient, error) {
}
clnt, err := client.New(client.Config{
URL: url,
Credentials: creds,
URL: url,
Credentials: creds,
InsecureSkipVerify: insecureSkipVerify,
})
if err != nil {

View File

@ -7,6 +7,7 @@ import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/influxdata/chronograf"
client "github.com/influxdata/kapacitor/client/v1"
)
@ -75,7 +76,7 @@ func TestClient_All(t *testing.T) {
Password string
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string) (KapaClient, error)
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
}
type args struct {
ctx context.Context
@ -100,7 +101,7 @@ func TestClient_All(t *testing.T) {
{
name: "return no tasks",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
},
@ -110,7 +111,7 @@ func TestClient_All(t *testing.T) {
{
name: "return a non-reversible task",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
},
@ -141,7 +142,7 @@ func TestClient_All(t *testing.T) {
{
name: "return a reversible task",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
},
@ -380,7 +381,7 @@ func TestClient_Get(t *testing.T) {
Password string
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string) (KapaClient, error)
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
}
type args struct {
ctx context.Context
@ -406,7 +407,7 @@ func TestClient_Get(t *testing.T) {
{
name: "return no task",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
},
@ -423,7 +424,7 @@ func TestClient_Get(t *testing.T) {
{
name: "return non-reversible task",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
},
@ -465,7 +466,7 @@ func TestClient_Get(t *testing.T) {
{
name: "return reversible task",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
},
@ -706,7 +707,7 @@ func TestClient_updateStatus(t *testing.T) {
Password string
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string) (KapaClient, error)
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
}
type args struct {
ctx context.Context
@ -727,7 +728,7 @@ func TestClient_updateStatus(t *testing.T) {
{
name: "disable alert rule",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -777,7 +778,7 @@ func TestClient_updateStatus(t *testing.T) {
{
name: "fail to enable alert rule",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -797,7 +798,7 @@ func TestClient_updateStatus(t *testing.T) {
{
name: "enable alert rule",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -880,7 +881,7 @@ func TestClient_Update(t *testing.T) {
Password string
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string) (KapaClient, error)
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
}
type args struct {
ctx context.Context
@ -902,7 +903,7 @@ func TestClient_Update(t *testing.T) {
{
name: "update alert rule error",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -936,7 +937,7 @@ func TestClient_Update(t *testing.T) {
{
name: "update alert rule",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -945,10 +946,22 @@ func TestClient_Update(t *testing.T) {
ctx: context.Background(),
href: "/kapacitor/v1/tasks/howdy",
rule: chronograf.AlertRule{
ID: "howdy",
ID: "howdy",
Name: "myname",
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Measurement: "meas",
Fields: []chronograf.Field{
{
Type: "field",
Value: "usage_user",
},
},
},
Trigger: "threshold",
TriggerValues: chronograf.TriggerValues{
Operator: greaterThan,
},
},
},
@ -1000,7 +1013,7 @@ func TestClient_Update(t *testing.T) {
{
name: "stays disabled when already disabled",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -1009,10 +1022,22 @@ func TestClient_Update(t *testing.T) {
ctx: context.Background(),
href: "/kapacitor/v1/tasks/howdy",
rule: chronograf.AlertRule{
ID: "howdy",
ID: "howdy",
Name: "myname",
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Measurement: "meas",
Fields: []chronograf.Field{
{
Type: "field",
Value: "usage_user",
},
},
},
Trigger: "threshold",
TriggerValues: chronograf.TriggerValues{
Operator: greaterThan,
},
},
},
@ -1061,6 +1086,135 @@ func TestClient_Update(t *testing.T) {
},
wantStatus: client.Disabled,
},
{
name: "error because relative cannot have inside range",
wantErr: true,
fields: fields{
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
},
args: args{
ctx: context.Background(),
href: "/kapacitor/v1/tasks/error",
rule: chronograf.AlertRule{
ID: "error",
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Fields: []chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
},
},
Trigger: Relative,
TriggerValues: chronograf.TriggerValues{
Operator: InsideRange,
},
},
},
},
{
name: "error because rule has an unknown trigger mechanism",
wantErr: true,
fields: fields{
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
},
args: args{
ctx: context.Background(),
href: "/kapacitor/v1/tasks/error",
rule: chronograf.AlertRule{
ID: "error",
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
},
},
},
},
{
name: "error because query has no fields",
wantErr: true,
fields: fields{
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
},
args: args{
ctx: context.Background(),
href: "/kapacitor/v1/tasks/error",
rule: chronograf.AlertRule{
ID: "error",
Trigger: Threshold,
TriggerValues: chronograf.TriggerValues{
Period: "1d",
},
Name: "myname",
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Measurement: "meas",
},
},
},
},
{
name: "error because alert has no name",
wantErr: true,
fields: fields{
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
},
args: args{
ctx: context.Background(),
href: "/kapacitor/v1/tasks/error",
rule: chronograf.AlertRule{
ID: "error",
Trigger: Deadman,
TriggerValues: chronograf.TriggerValues{
Period: "1d",
},
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Measurement: "meas",
},
},
},
},
{
name: "error because alert period cannot be an empty string in deadman alert",
wantErr: true,
fields: fields{
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
},
args: args{
ctx: context.Background(),
href: "/kapacitor/v1/tasks/error",
rule: chronograf.AlertRule{
ID: "error",
Name: "myname",
Trigger: Deadman,
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Measurement: "meas",
},
},
},
},
}
for _, tt := range tests {
kapa.ResTask = tt.resTask
@ -1079,11 +1233,17 @@ func TestClient_Update(t *testing.T) {
t.Errorf("Client.Update() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.wantErr {
return
}
if !cmp.Equal(got, tt.want) {
t.Errorf("%q. Client.Update() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
}
if !reflect.DeepEqual(kapa.UpdateTaskOptions, tt.updateTaskOptions) {
t.Errorf("Client.Update() = %v, want %v", kapa.UpdateTaskOptions, tt.updateTaskOptions)
var cmpOptions = cmp.Options{
cmpopts.IgnoreFields(client.UpdateTaskOptions{}, "TICKscript"),
}
if !cmp.Equal(kapa.UpdateTaskOptions, tt.updateTaskOptions, cmpOptions...) {
t.Errorf("Client.Update() = %s", cmp.Diff(got, tt.updateTaskOptions, cmpOptions...))
}
if tt.wantStatus != kapa.LastStatus {
t.Errorf("Client.Update() = %v, want %v", kapa.LastStatus, tt.wantStatus)
@ -1099,7 +1259,7 @@ func TestClient_Create(t *testing.T) {
Password string
ID chronograf.ID
Ticker chronograf.Ticker
kapaClient func(url, username, password string) (KapaClient, error)
kapaClient func(url, username, password string, insecureSkipVerify bool) (KapaClient, error)
}
type args struct {
ctx context.Context
@ -1119,7 +1279,7 @@ func TestClient_Create(t *testing.T) {
{
name: "create alert rule",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -1130,10 +1290,16 @@ func TestClient_Create(t *testing.T) {
args: args{
ctx: context.Background(),
rule: chronograf.AlertRule{
ID: "howdy",
ID: "howdy",
Name: "myname's",
Query: &chronograf.QueryConfig{
Database: "db",
RetentionPolicy: "rp",
Measurement: "meas",
},
Trigger: Deadman,
TriggerValues: chronograf.TriggerValues{
Period: "1d",
},
},
},
@ -1152,10 +1318,79 @@ func TestClient_Create(t *testing.T) {
},
},
createTaskOptions: &client.CreateTaskOptions{
TICKscript: "",
ID: "chronograf-v1-howdy",
Type: client.StreamTask,
Status: client.Enabled,
TICKscript: `var db = 'db'
var rp = 'rp'
var measurement = 'meas'
var groupBy = []
var whereFilter = lambda: TRUE
var period = 1d
var name = 'myname\'s'
var idVar = name + ':{{.Group}}'
var message = ''
var idTag = 'alertID'
var levelTag = 'level'
var messageField = 'message'
var durationField = 'duration'
var outputDB = 'chronograf'
var outputRP = 'autogen'
var outputMeasurement = 'alerts'
var triggerType = 'deadman'
var threshold = 0.0
var data = stream
|from()
.database(db)
.retentionPolicy(rp)
.measurement(measurement)
.groupBy(groupBy)
.where(whereFilter)
var trigger = data
|deadman(threshold, period)
.stateChangesOnly()
.message(message)
.id(idVar)
.idTag(idTag)
.levelTag(levelTag)
.messageField(messageField)
.durationField(durationField)
trigger
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
|influxDBOut()
.create()
.database(outputDB)
.retentionPolicy(outputRP)
.measurement(outputMeasurement)
.tag('alertName', name)
.tag('triggerType', triggerType)
trigger
|httpOut('output')
`,
ID: "chronograf-v1-howdy",
Type: client.StreamTask,
Status: client.Enabled,
DBRPs: []client.DBRP{
{
Database: "db",
@ -1185,7 +1420,7 @@ func TestClient_Create(t *testing.T) {
{
name: "create alert rule error",
fields: fields{
kapaClient: func(url, username, password string) (KapaClient, error) {
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
return kapa, nil
},
Ticker: &Alert{},
@ -1205,10 +1440,9 @@ func TestClient_Create(t *testing.T) {
},
resError: fmt.Errorf("error"),
createTaskOptions: &client.CreateTaskOptions{
TICKscript: "",
ID: "chronograf-v1-howdy",
Type: client.StreamTask,
Status: client.Enabled,
ID: "chronograf-v1-howdy",
Type: client.StreamTask,
Status: client.Enabled,
DBRPs: []client.DBRP{
{
Database: "db",
@ -1236,6 +1470,9 @@ func TestClient_Create(t *testing.T) {
t.Errorf("Client.Create() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.wantErr {
return
}
if !cmp.Equal(got, tt.want) {
t.Errorf("%q. Client.Create() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
}

View File

@ -1,6 +1,8 @@
package kapacitor
import "fmt"
import (
"fmt"
)
const (
greaterThan = "greater than"

View File

@ -15,11 +15,11 @@ type Alert struct{}
func (a *Alert) Generate(rule chronograf.AlertRule) (chronograf.TICKScript, error) {
vars, err := Vars(rule)
if err != nil {
return "", nil
return "", err
}
data, err := Data(rule)
if err != nil {
return "", nil
return "", err
}
trigger, err := Trigger(rule)
if err != nil {

View File

@ -1,7 +1,10 @@
package kapacitor
import "github.com/influxdata/chronograf"
import "fmt"
import (
"fmt"
"github.com/influxdata/chronograf"
)
const (
// Deadman triggers when data is missing for a period of time

View File

@ -76,7 +76,41 @@ func Vars(rule chronograf.AlertRule) (string, error) {
}
}
type NotEmpty struct {
Err error
}
func (n *NotEmpty) Valid(name, s string) error {
if n.Err != nil {
return n.Err
}
if s == "" {
n.Err = fmt.Errorf("%s cannot be an empty string", name)
}
return n.Err
}
func Escape(str string) string {
return strings.Replace(str, "'", `\'`, -1)
}
func commonVars(rule chronograf.AlertRule) (string, error) {
n := new(NotEmpty)
n.Valid("database", rule.Query.Database)
n.Valid("retention policy", rule.Query.RetentionPolicy)
n.Valid("measurement", rule.Query.Measurement)
n.Valid("alert name", rule.Name)
n.Valid("trigger type", rule.Trigger)
if n.Err != nil {
return "", n.Err
}
wind, err := window(rule)
if err != nil {
return "", err
}
common := `
var db = '%s'
var rp = '%s'
@ -99,14 +133,14 @@ func commonVars(rule chronograf.AlertRule) (string, error) {
var triggerType = '%s'
`
res := fmt.Sprintf(common,
rule.Query.Database,
rule.Query.RetentionPolicy,
rule.Query.Measurement,
Escape(rule.Query.Database),
Escape(rule.Query.RetentionPolicy),
Escape(rule.Query.Measurement),
groupBy(rule.Query),
whereFilter(rule.Query),
window(rule),
rule.Name,
rule.Message,
wind,
Escape(rule.Name),
Escape(rule.Message),
IDTag,
LevelTag,
MessageField,
@ -127,17 +161,27 @@ func commonVars(rule chronograf.AlertRule) (string, error) {
// window is only used if deadman or threshold/relative with aggregate. Will return empty
// if no period.
func window(rule chronograf.AlertRule) string {
func window(rule chronograf.AlertRule) (string, error) {
if rule.Trigger == Deadman {
return fmt.Sprintf("var period = %s", rule.TriggerValues.Period)
if rule.TriggerValues.Period == "" {
return "", fmt.Errorf("period cannot be an empty string in deadman alert")
}
return fmt.Sprintf("var period = %s", rule.TriggerValues.Period), nil
}
// Period only makes sense if the field has a been grouped via a time duration.
for _, field := range rule.Query.Fields {
if field.Type == "func" {
return fmt.Sprintf("var period = %s\nvar every = %s", rule.Query.GroupBy.Time, rule.Every)
n := new(NotEmpty)
n.Valid("group by time", rule.Query.GroupBy.Time)
n.Valid("every", rule.Every)
if n.Err != nil {
return "", n.Err
}
return fmt.Sprintf("var period = %s\nvar every = %s", rule.Query.GroupBy.Time, rule.Every), nil
}
}
return ""
return "", nil
}
func groupBy(q *chronograf.QueryConfig) string {

View File

@ -12,12 +12,13 @@ import (
)
type postKapacitorRequest struct {
Name *string `json:"name"` // User facing name of kapacitor instance.; Required: true
URL *string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092);/ Required: true
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
Password string `json:"password,omitempty"`
Active bool `json:"active"`
Organization string `json:"organization"` // Organization is the organization ID that resource belongs to
Name *string `json:"name"` // User facing name of kapacitor instance.; Required: true
URL *string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092);/ Required: true
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
Password string `json:"password,omitempty"`
InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` // InsecureSkipVerify as true means any certificate presented by the kapacitor is accepted.
Active bool `json:"active"`
Organization string `json:"organization"` // Organization is the organization ID that resource belongs to
}
func (p *postKapacitorRequest) Valid(defaultOrgID string) error {
@ -49,13 +50,14 @@ type kapaLinks struct {
}
type kapacitor struct {
ID int `json:"id,string"` // Unique identifier representing a kapacitor instance.
Name string `json:"name"` // User facing name of kapacitor instance.
URL string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092)
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
Password string `json:"password,omitempty"`
Active bool `json:"active"`
Links kapaLinks `json:"links"` // Links are URI locations related to kapacitor
ID int `json:"id,string"` // Unique identifier representing a kapacitor instance.
Name string `json:"name"` // User facing name of kapacitor instance.
URL string `json:"url"` // URL for the kapacitor backend (e.g. http://localhost:9092)
Username string `json:"username,omitempty"` // Username for authentication to kapacitor
Password string `json:"password,omitempty"`
InsecureSkipVerify bool `json:"insecureSkipVerify,omitempty"` // InsecureSkipVerify as true means any certificate presented by the kapacitor is accepted.
Active bool `json:"active"`
Links kapaLinks `json:"links"` // Links are URI locations related to kapacitor
}
// NewKapacitor adds valid kapacitor store store.
@ -91,13 +93,14 @@ func (s *Service) NewKapacitor(w http.ResponseWriter, r *http.Request) {
}
srv := chronograf.Server{
SrcID: srcID,
Name: *req.Name,
Username: req.Username,
Password: req.Password,
URL: *req.URL,
Active: req.Active,
Organization: req.Organization,
SrcID: srcID,
Name: *req.Name,
Username: req.Username,
Password: req.Password,
InsecureSkipVerify: req.InsecureSkipVerify,
URL: *req.URL,
Active: req.Active,
Organization: req.Organization,
}
if srv, err = s.Store.Servers(ctx).Add(ctx, srv); err != nil {
@ -114,11 +117,12 @@ func (s *Service) NewKapacitor(w http.ResponseWriter, r *http.Request) {
func newKapacitor(srv chronograf.Server) kapacitor {
httpAPISrcs := "/chronograf/v1/sources"
return kapacitor{
ID: srv.ID,
Name: srv.Name,
Username: srv.Username,
URL: srv.URL,
Active: srv.Active,
ID: srv.ID,
Name: srv.Name,
Username: srv.Username,
URL: srv.URL,
Active: srv.Active,
InsecureSkipVerify: srv.InsecureSkipVerify,
Links: kapaLinks{
Self: fmt.Sprintf("%s/%d/kapacitors/%d", httpAPISrcs, srv.SrcID, srv.ID),
Proxy: fmt.Sprintf("%s/%d/kapacitors/%d/proxy", httpAPISrcs, srv.SrcID, srv.ID),
@ -217,11 +221,12 @@ func (s *Service) RemoveKapacitor(w http.ResponseWriter, r *http.Request) {
}
type patchKapacitorRequest struct {
Name *string `json:"name,omitempty"` // User facing name of kapacitor instance.
URL *string `json:"url,omitempty"` // URL for the kapacitor
Username *string `json:"username,omitempty"` // Username for kapacitor auth
Password *string `json:"password,omitempty"`
Active *bool `json:"active"`
Name *string `json:"name,omitempty"` // User facing name of kapacitor instance.
URL *string `json:"url,omitempty"` // URL for the kapacitor
Username *string `json:"username,omitempty"` // Username for kapacitor auth
Password *string `json:"password,omitempty"`
InsecureSkipVerify *bool `json:"insecureSkipVerify,omitempty"` // InsecureSkipVerify as true means any certificate presented by the kapacitor is accepted.
Active *bool `json:"active"`
}
func (p *patchKapacitorRequest) Valid() error {
@ -281,6 +286,9 @@ func (s *Service) UpdateKapacitor(w http.ResponseWriter, r *http.Request) {
if req.Username != nil {
srv.Username = *req.Username
}
if req.InsecureSkipVerify != nil {
srv.InsecureSkipVerify = *req.InsecureSkipVerify
}
if req.Active != nil {
srv.Active = *req.Active
}
@ -316,7 +324,7 @@ func (s *Service) KapacitorRulesPost(w http.ResponseWriter, r *http.Request) {
return
}
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
var req chronograf.AlertRule
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
@ -446,7 +454,7 @@ func (s *Service) KapacitorRulesPut(w http.ResponseWriter, r *http.Request) {
}
tid := httprouter.GetParamFromContext(ctx, "tid")
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
var req chronograf.AlertRule
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
invalidJSON(w, s.Logger)
@ -516,7 +524,7 @@ func (s *Service) KapacitorRulesStatus(w http.ResponseWriter, r *http.Request) {
}
tid := httprouter.GetParamFromContext(ctx, "tid")
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
var req KapacitorStatus
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
@ -576,7 +584,7 @@ func (s *Service) KapacitorRulesGet(w http.ResponseWriter, r *http.Request) {
return
}
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
tasks, err := c.All(ctx)
if err != nil {
Error(w, http.StatusInternalServerError, err.Error(), s.Logger)
@ -619,7 +627,7 @@ func (s *Service) KapacitorRulesID(w http.ResponseWriter, r *http.Request) {
}
tid := httprouter.GetParamFromContext(ctx, "tid")
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
// Check if the rule exists within scope
task, err := c.Get(ctx, tid)
@ -657,7 +665,7 @@ func (s *Service) KapacitorRulesDelete(w http.ResponseWriter, r *http.Request) {
return
}
c := kapa.NewClient(srv.URL, srv.Username, srv.Password)
c := kapa.NewClient(srv.URL, srv.Username, srv.Password, srv.InsecureSkipVerify)
tid := httprouter.GetParamFromContext(ctx, "tid")
// Check if the rule is linked to this server and kapacitor

View File

@ -187,6 +187,14 @@ func Test_KapacitorRulesGet(t *testing.T) {
testLogger := mocks.TestLogger{}
svc := &server.Service{
Store: &mocks.Store{
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: ID,
InsecureSkipVerify: true,
}, nil
},
},
ServersStore: &mocks.ServersStore{
GetF: func(ctx context.Context, ID int) (chronograf.Server, error) {
return chronograf.Server{

View File

@ -7,39 +7,57 @@ import (
"github.com/influxdata/chronograf"
)
type logResponseWriter struct {
// statusWriterFlusher captures the status header of an http.ResponseWriter
// and is a flusher
type statusWriter struct {
http.ResponseWriter
responseCode int
Flusher http.Flusher
status int
}
func (l *logResponseWriter) WriteHeader(status int) {
l.responseCode = status
l.ResponseWriter.WriteHeader(status)
func (w *statusWriter) WriteHeader(status int) {
w.status = status
w.ResponseWriter.WriteHeader(status)
}
func (w *statusWriter) Status() int { return w.status }
// Flush is here because the underlying HTTP chunked transfer response writer
// to implement http.Flusher. Without it data is silently buffered. This
// was discovered when proxying kapacitor chunked logs.
func (w *statusWriter) Flush() {
if w.Flusher != nil {
w.Flusher.Flush()
}
}
// Logger is middleware that logs the request
func Logger(logger chronograf.Logger, next http.Handler) http.Handler {
fn := func(w http.ResponseWriter, r *http.Request) {
now := time.Now()
logger.
WithField("component", "server").
logger.WithField("component", "server").
WithField("remote_addr", r.RemoteAddr).
WithField("method", r.Method).
WithField("url", r.URL).
Info("Request")
Debug("Request")
lrr := &logResponseWriter{w, 0}
next.ServeHTTP(lrr, r)
sw := &statusWriter{
ResponseWriter: w,
}
if f, ok := w.(http.Flusher); ok {
sw.Flusher = f
}
next.ServeHTTP(sw, r)
later := time.Now()
elapsed := later.Sub(now)
logger.
WithField("component", "server").
WithField("remote_addr", r.RemoteAddr).
WithField("method", r.Method).
WithField("response_time", elapsed.String()).
WithField("code", lrr.responseCode).
Info("Response: ", http.StatusText(lrr.responseCode))
WithField("status", sw.Status()).
Info("Response: ", http.StatusText(sw.Status()))
}
return http.HandlerFunc(fn)
}

View File

@ -9,7 +9,8 @@ import (
type interceptingResponseWriter struct {
http.ResponseWriter
Prefix string
Flusher http.Flusher
Prefix string
}
func (i *interceptingResponseWriter) WriteHeader(status int) {
@ -25,11 +26,26 @@ func (i *interceptingResponseWriter) WriteHeader(status int) {
i.ResponseWriter.WriteHeader(status)
}
// PrefixingRedirector alters the Location header of downstream http.Handlers
// Flush is here because the underlying HTTP chunked transfer response writer
// to implement http.Flusher. Without it data is silently buffered. This
// was discovered when proxying kapacitor chunked logs.
func (i *interceptingResponseWriter) Flush() {
if i.Flusher != nil {
i.Flusher.Flush()
}
}
// PrefixedRedirect alters the Location header of downstream http.Handlers
// to include a specified prefix
func PrefixedRedirect(prefix string, next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
iw := &interceptingResponseWriter{w, prefix}
iw := &interceptingResponseWriter{
ResponseWriter: w,
Prefix: prefix,
}
if flusher, ok := w.(http.Flusher); ok {
iw.Flusher = flusher
}
next.ServeHTTP(iw, r)
})
}

View File

@ -5,6 +5,8 @@ import (
"net/http"
"net/http/httputil"
"net/url"
"strings"
"time"
)
// KapacitorProxy proxies requests to kapacitor using the path query parameter.
@ -34,28 +36,33 @@ func (s *Service) KapacitorProxy(w http.ResponseWriter, r *http.Request) {
return
}
u, err := url.Parse(srv.URL)
// To preserve any HTTP query arguments to the kapacitor path,
// we concat and parse them into u.
uri := singleJoiningSlash(srv.URL, path)
u, err := url.Parse(uri)
if err != nil {
msg := fmt.Sprintf("Error parsing kapacitor url: %v", err)
Error(w, http.StatusUnprocessableEntity, msg, s.Logger)
return
}
u.Path = path
director := func(req *http.Request) {
// Set the Host header of the original Kapacitor URL
req.Host = u.Host
req.URL = u
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
// a header directly
if srv.Username != "" && srv.Password != "" {
req.SetBasicAuth(srv.Username, srv.Password)
}
}
// Without a FlushInterval the HTTP Chunked response for kapacitor logs is
// buffered and flushed every 30 seconds.
proxy := &httputil.ReverseProxy{
Director: director,
Director: director,
FlushInterval: time.Second,
}
proxy.ServeHTTP(w, r)
}
@ -79,3 +86,15 @@ func (s *Service) KapacitorProxyGet(w http.ResponseWriter, r *http.Request) {
func (s *Service) KapacitorProxyDelete(w http.ResponseWriter, r *http.Request) {
s.KapacitorProxy(w, r)
}
func singleJoiningSlash(a, b string) string {
aslash := strings.HasSuffix(a, "/")
bslash := strings.HasPrefix(b, "/")
if aslash && bslash {
return a + b[1:]
}
if !aslash && !bslash {
return a + "/" + b
}
return a + b
}

View File

@ -12,16 +12,21 @@ import (
"github.com/influxdata/chronograf/influx/queries"
)
// QueryRequest is query that will be converted to a queryConfig
type QueryRequest struct {
ID string `json:"id"`
Query string `json:"query"`
ID string `json:"id"`
Query string `json:"query"`
}
// QueriesRequest converts all queries to queryConfigs with the help
// of the template variables
type QueriesRequest struct {
Queries []QueryRequest `json:"queries"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
}
type QueriesRequest struct {
Queries []QueryRequest `json:"queries"`
}
// QueryResponse is the return result of a QueryRequest including
// the raw query, the templated query, the queryConfig and the queryAST
type QueryResponse struct {
ID string `json:"id"`
Query string `json:"query"`
@ -31,11 +36,12 @@ type QueryResponse struct {
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
}
// QueriesResponse is the response for a QueriesRequest
type QueriesResponse struct {
Queries []QueryResponse `json:"queries"`
}
// Queries parses InfluxQL and returns the JSON
// Queries analyzes InfluxQL to produce front-end friendly QueryConfig
func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
srcID, err := paramID("id", r)
if err != nil {
@ -66,12 +72,7 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
Query: q.Query,
}
query := q.Query
if len(q.TemplateVars) > 0 {
query = influx.TemplateReplace(query, q.TemplateVars)
qr.QueryTemplated = &query
}
query := influx.TemplateReplace(q.Query, req.TemplateVars)
qc := ToQueryConfig(query)
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
@ -83,9 +84,10 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
qr.QueryAST = stmt
}
if len(q.TemplateVars) > 0 {
qr.TemplateVars = q.TemplateVars
if len(req.TemplateVars) > 0 {
qr.TemplateVars = req.TemplateVars
qr.QueryConfig.RawText = &qr.Query
qr.QueryTemplated = &query
}
qr.QueryConfig.ID = q.ID

187
server/queries_test.go Normal file
View File

@ -0,0 +1,187 @@
package server
import (
"bytes"
"context"
"net/http"
"net/http/httptest"
"testing"
"github.com/bouk/httprouter"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/mocks"
)
func TestService_Queries(t *testing.T) {
tests := []struct {
name string
SourcesStore chronograf.SourcesStore
ID string
w *httptest.ResponseRecorder
r *http.Request
want string
}{
{
name: "bad json",
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: ID,
}, nil
},
},
ID: "1",
w: httptest.NewRecorder(),
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`howdy`))),
want: `{"code":400,"message":"Unparsable JSON"}`,
},
{
name: "bad id",
ID: "howdy",
w: httptest.NewRecorder(),
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte{})),
want: `{"code":422,"message":"Error converting ID howdy"}`,
},
{
name: "query with no template vars",
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: ID,
}, nil
},
},
ID: "1",
w: httptest.NewRecorder(),
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time > now() - 1m",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
`,
},
{
name: "query with unparsable query",
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: ID,
}, nil
},
},
ID: "1",
w: httptest.NewRecorder(),
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SHOW DATABASES",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null}}]}
`,
},
{
name: "query with template vars",
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: ID,
}, nil
},
},
ID: "1",
w: httptest.NewRecorder(),
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > now() - 1m",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
],
"tempVars": [
{
"tempVar": ":dbs:",
"values": [
{
"value": "_internal",
"type": "database",
"selected": true
}
],
"id": "792eda0d-2bb2-4de6-a86f-1f652889b044",
"type": "databases",
"label": "",
"query": {
"influxql": "SHOW DATABASES",
"measurement": "",
"tagKey": "",
"fieldKey": ""
},
"links": {
"self": "/chronograf/v1/dashboards/1/templates/792eda0d-2bb2-4de6-a86f-1f652889b044"
}
},
{
"id": "dashtime",
"tempVar": ":dashboardTime:",
"type": "constant",
"values": [
{
"value": "now() - 15m",
"type": "constant",
"selected": true
}
]
},
{
"id": "upperdashtime",
"tempVar": ":upperDashboardTime:",
"type": "constant",
"values": [
{
"value": "now()",
"type": "constant",
"selected": true
}
]
},
{
"id": "interval",
"type": "constant",
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10000000000,
"values": []
}
]
}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"_internal","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"_internal","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","tempVars":[{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":interval:","duration":60000000000,"resolution":1000,"reportingInterval":10000000000}]}]}
`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.r = tt.r.WithContext(httprouter.WithParams(
context.Background(),
httprouter.Params{
{
Key: "id",
Value: tt.ID,
},
}))
s := &Service{
SourcesStore: tt.SourcesStore,
Logger: &mocks.TestLogger{},
}
s.Queries(tt.w, tt.r)
got := tt.w.Body.String()
if got != tt.want {
t.Errorf("got:\n%s\nwant:\n%s\n", got, tt.want)
}
})
}
}

View File

@ -2357,6 +2357,7 @@
"name": "kapa",
"url": "http://localhost:9092",
"active": false,
"insecureSkipVerify": false,
"links": {
"proxy": "/chronograf/v1/sources/4/kapacitors/4/proxy",
"self": "/chronograf/v1/sources/4/kapacitors/4",
@ -2387,6 +2388,11 @@
"description":
"URL for the kapacitor backend (e.g. http://localhost:9092)"
},
"insecureSkipVerify": {
"type": "boolean",
"description":
"True means any certificate presented by the kapacitor is accepted. Typically used for self-signed certs. Probably should only be used for testing."
},
"active": {
"type": "boolean",
"description":

View File

@ -12,6 +12,7 @@
"build": "yarn run clean && env NODE_ENV=production webpack --optimize-minimize --config ./webpack/prodConfig.js",
"build:dev": "webpack --config ./webpack/devConfig.js",
"start": "webpack --watch --config ./webpack/devConfig.js",
"start:hmr": "webpack-dev-server --open --config ./webpack/devConfig.js",
"lint": "esw src/",
"test": "karma start",
"test:integration": "nightwatch tests --skip",
@ -77,6 +78,7 @@
"mocha-loader": "^0.7.1",
"mustache": "^2.2.1",
"node-sass": "^4.5.3",
"on-build-webpack": "^0.1.0",
"postcss-browser-reporter": "^0.4.0",
"postcss-calc": "^5.2.0",
"postcss-loader": "^0.8.0",

View File

@ -3,13 +3,16 @@ import {
formatDate,
dashboardtoCSV,
} from 'shared/parsing/resultsToCSV'
import moment from 'moment'
describe('formatDate', () => {
it('converts timestamp to an excel compatible date string', () => {
const timestamp = 1000000000000
const result = formatDate(timestamp)
expect(result).to.be.a('string')
expect(+new Date(result)).to.equal(timestamp)
expect(moment(result, 'M/D/YYYY h:mm:ss.SSSSSSSSS A').valueOf()).to.equal(
timestamp
)
})
})

View File

@ -281,7 +281,8 @@ export const updateTempVarValues = (source, dashboard) => async dispatch => {
results.forEach(({data}, i) => {
const {type, query, id} = tempsWithQueries[i]
const vals = parsers[type](data, query.tagKey || query.measurement)[type]
const parsed = parsers[type](data, query.tagKey || query.measurement)
const vals = parsed[type]
dispatch(editTemplateVariableValues(dashboard.id, id, vals))
})
} catch (error) {

View File

@ -6,6 +6,7 @@ import {Tabber, Tab} from 'src/dashboards/components/Tabber'
import {DISPLAY_OPTIONS, TOOLTIP_CONTENT} from 'src/dashboards/constants'
const {LINEAR, LOG, BASE_2, BASE_10} = DISPLAY_OPTIONS
const getInputMin = scale => (scale === LOG ? '0' : null)
const AxesOptions = ({
axes: {y: {bounds, label, prefix, suffix, base, scale, defaultYLabel}},
@ -38,6 +39,7 @@ const AxesOptions = ({
customValue={min}
onSetValue={onSetYAxisBoundMin}
type="number"
min={getInputMin(scale)}
/>
</div>
<div className="form-group col-sm-6">
@ -47,6 +49,7 @@ const AxesOptions = ({
customValue={max}
onSetValue={onSetYAxisBoundMax}
type="number"
min={getInputMin(scale)}
/>
</div>
<Input

View File

@ -88,7 +88,14 @@ class ChronoTable extends Component {
)
}
makeTabName = ({name, tags}) => (tags ? `${name}.${tags[name]}` : name)
makeTabName = ({name, tags}) => {
if (!tags) {
return name
}
const tagKeys = Object.keys(tags).sort()
const tagValues = tagKeys.map(key => tags[key]).join('.')
return `${name}.${tagValues}`
}
render() {
const {containerWidth, height, query} = this.props
@ -135,9 +142,13 @@ class ChronoTable extends Component {
</div>
: <Dropdown
className="dropdown-160 table--tabs-dropdown"
items={series.map((s, index) => ({...s, text: s.name, index}))}
items={series.map((s, index) => ({
...s,
text: this.makeTabName(s),
index,
}))}
onChoose={this.handleClickDropdown}
selected={series[activeSeriesIndex].name}
selected={this.makeTabName(series[activeSeriesIndex])}
buttonSize="btn-xs"
/>}
<div className="table--tabs-content">

View File

@ -283,11 +283,13 @@ Dygraph.prototype.findClosestPoint = function(domX, domY) {
minYDist = ydist
closestRow = point.idx
closestSeries = setIdx
closestPoint = point
} else if (xdist === minXDist && ydist < minYDist) {
minXDist = xdist
minYDist = ydist
closestRow = point.idx
closestSeries = setIdx
closestPoint = point
}
}
}

View File

@ -11,7 +11,7 @@ export function getCpuAndLoadForHosts(proxyLink, telegrafDB) {
SELECT mean("Percent_Processor_Time") FROM win_cpu WHERE time > now() - 10m GROUP BY host;
SELECT mean("Processor_Queue_Length") FROM win_system WHERE time > now() - 10s GROUP BY host;
SELECT non_negative_derivative(mean("System_Up_Time")) AS winDeltaUptime FROM win_system WHERE time > now() - 10m GROUP BY host, time(1m) fill(0);
SHOW TAG VALUES FROM /win_system|system/ WITH KEY = "host"`,
SHOW TAG VALUES WITH KEY = "host";`,
db: telegrafDB,
}).then(resp => {
const hosts = {}
@ -87,7 +87,7 @@ export async function getAllHosts(proxyLink, telegrafDB) {
try {
const resp = await proxy({
source: proxyLink,
query: 'show tag values from /win_system|system/ with key = "host"',
query: 'show tag values with key = "host"',
db: telegrafDB,
})
const hosts = {}

View File

@ -1,11 +1,11 @@
import React, {PropTypes} from 'react'
import {CHANGES, OPERATORS, SHIFTS} from 'src/kapacitor/constants'
import {CHANGES, RELATIVE_OPERATORS, SHIFTS} from 'src/kapacitor/constants'
import Dropdown from 'shared/components/Dropdown'
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
const changes = mapToItems(CHANGES, 'change')
const shifts = mapToItems(SHIFTS, 'shift')
const operators = mapToItems(OPERATORS, 'operator')
const operators = mapToItems(RELATIVE_OPERATORS, 'operator')
const Relative = ({
onRuleTypeInputChange,

View File

@ -1,10 +1,10 @@
import React, {PropTypes} from 'react'
import {OPERATORS} from 'src/kapacitor/constants'
import {THRESHOLD_OPERATORS} from 'src/kapacitor/constants'
import Dropdown from 'shared/components/Dropdown'
import _ from 'lodash'
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
const operators = mapToItems(OPERATORS, 'operator')
const operators = mapToItems(THRESHOLD_OPERATORS, 'operator')
const noopSubmit = e => e.preventDefault()
const getField = ({fields}) => {
const alias = _.get(fields, ['0', 'alias'], false)

View File

@ -31,7 +31,7 @@ export const OUTSIDE_RANGE = 'outside range'
export const EQUAL_TO_OR_GREATER_THAN = 'equal to or greater'
export const EQUAL_TO_OR_LESS_THAN = 'equal to or less than'
export const OPERATORS = [
export const THRESHOLD_OPERATORS = [
GREATER_THAN,
EQUAL_TO_OR_GREATER_THAN,
EQUAL_TO_OR_LESS_THAN,
@ -42,6 +42,15 @@ export const OPERATORS = [
OUTSIDE_RANGE,
]
export const RELATIVE_OPERATORS = [
GREATER_THAN,
EQUAL_TO_OR_GREATER_THAN,
EQUAL_TO_OR_LESS_THAN,
LESS_THAN,
EQUAL_TO,
NOT_EQUAL_TO,
]
// export const RELATIONS = ['once', 'more than ', 'less than'];
export const PERIODS = ['1m', '5m', '10m', '30m', '1h', '2h', '24h']
export const CHANGES = ['change', '% change']

View File

@ -136,13 +136,6 @@ const AutoRefresh = ComposedComponent => {
return this.renderFetching(timeSeries)
}
if (
!this._resultsForQuery(timeSeries) ||
!this.state.lastQuerySuccessful
) {
return this.renderNoResults()
}
return (
<ComposedComponent
{...this.props}
@ -169,14 +162,6 @@ const AutoRefresh = ComposedComponent => {
)
}
renderNoResults = () => {
return (
<div className="graph-empty">
<p data-test="data-explorer-no-results">No Results</p>
</div>
)
}
_resultsForQuery = data =>
data.length
? data.every(({response}) =>

View File

@ -14,6 +14,7 @@ class ClickOutsideInput extends Component {
render() {
const {
id,
min,
type,
onFocus,
onChange,
@ -27,6 +28,7 @@ class ClickOutsideInput extends Component {
<input
className="form-control input-sm"
id={id}
min={min}
type={type}
name={customPlaceholder}
ref={onGetRef}
@ -43,6 +45,7 @@ class ClickOutsideInput extends Component {
const {func, string} = PropTypes
ClickOutsideInput.propTypes = {
min: string,
id: string.isRequired,
type: string.isRequired,
customPlaceholder: string.isRequired,

View File

@ -97,7 +97,7 @@ const DatabaseList = React.createClass({
return (
<div className="query-builder--column query-builder--column-db">
<div className="query-builder--heading">Databases</div>
<div className="query-builder--heading">DB.RetentionPolicy</div>
<div className="query-builder--list">
<FancyScrollbar>
{sortedNamespaces.map(namespace => {

View File

@ -29,6 +29,7 @@ export default class Dygraph extends Component {
x: null,
series: [],
},
pageX: null,
sortType: '',
filterText: '',
isSynced: false,
@ -36,14 +37,12 @@ export default class Dygraph extends Component {
isAscending: true,
isSnipped: false,
isFilterVisible: false,
legendArrowPosition: 'top',
}
}
componentDidMount() {
const {
axes: {y, y2},
ruleValues,
isGraphFilled: fillGraph,
isBarGraph,
options,
@ -63,9 +62,7 @@ export default class Dygraph extends Component {
plugins: [new Dygraphs.Plugins.Crosshair({direction: 'vertical'})],
axes: {
y: {
valueRange: options.stackedGraph
? getStackedRange(y.bounds)
: getRange(timeSeries, y.bounds, ruleValues),
valueRange: this.getYRange(timeSeries),
axisLabelFormatter: (yval, __, opts) =>
numberValueFormatter(yval, opts, y.prefix, y.suffix),
axisLabelWidth: this.getLabelWidth(),
@ -130,7 +127,7 @@ export default class Dygraph extends Component {
}
componentDidUpdate() {
const {labels, axes: {y, y2}, options, ruleValues, isBarGraph} = this.props
const {labels, axes: {y, y2}, options, isBarGraph} = this.props
const dygraph = this.dygraph
if (!dygraph) {
@ -149,9 +146,7 @@ export default class Dygraph extends Component {
ylabel: this.getLabel('y'),
axes: {
y: {
valueRange: options.stackedGraph
? getStackedRange(y.bounds)
: getRange(timeSeries, y.bounds, ruleValues),
valueRange: this.getYRange(timeSeries),
axisLabelFormatter: (yval, __, opts) =>
numberValueFormatter(yval, opts, y.prefix, y.suffix),
axisLabelWidth: this.getLabelWidth(),
@ -171,9 +166,26 @@ export default class Dygraph extends Component {
dygraph.updateOptions(updateOptions)
const {w} = this.dygraph.getArea()
this.resize()
this.dygraph.resize()
this.props.setResolution(w)
this.resize()
}
getYRange = timeSeries => {
const {options, axes: {y}, ruleValues} = this.props
if (options.stackedGraph) {
return getStackedRange(y.bounds)
}
const range = getRange(timeSeries, y.bounds, ruleValues)
const [min, max] = range
// Bug in Dygraph calculates a negative range for logscale when min range is 0
if (y.scale === LOG && timeSeries.length === 1 && min <= 0) {
return [0.1, max]
}
return range
}
handleZoom = (lower, upper) => {
@ -298,6 +310,7 @@ export default class Dygraph extends Component {
resize = () => {
this.dygraph.resizeElements_()
this.dygraph.predraw_()
this.dygraph.resize()
}
formatTimeRange = timeRange => {
@ -341,64 +354,8 @@ export default class Dygraph extends Component {
}
}
highlightCallback = e => {
const chronografChromeSize = 60 // Width & Height of navigation page elements
// Move the Legend on hover
const graphRect = this.graphRef.getBoundingClientRect()
const legendRect = this.legendRef.getBoundingClientRect()
const graphWidth = graphRect.width + 32 // Factoring in padding from parent
const graphHeight = graphRect.height
const graphBottom = graphRect.bottom
const legendWidth = legendRect.width
const legendHeight = legendRect.height
const screenHeight = window.innerHeight
const legendMaxLeft = graphWidth - legendWidth / 2
const trueGraphX = e.pageX - graphRect.left
let legendLeft = trueGraphX
// Enforcing max & min legend offsets
if (trueGraphX < legendWidth / 2) {
legendLeft = legendWidth / 2
} else if (trueGraphX > legendMaxLeft) {
legendLeft = legendMaxLeft
}
// Disallow screen overflow of legend
const isLegendBottomClipped = graphBottom + legendHeight > screenHeight
const isLegendTopClipped =
legendHeight > graphRect.top - chronografChromeSize
const willLegendFitLeft = e.pageX - chronografChromeSize > legendWidth
let legendTop = graphHeight + 8
this.setState({legendArrowPosition: 'top'})
// If legend is only clipped on the bottom, position above graph
if (isLegendBottomClipped && !isLegendTopClipped) {
this.setState({legendArrowPosition: 'bottom'})
legendTop = -legendHeight
}
// If legend is clipped on top and bottom, posiition on either side of crosshair
if (isLegendBottomClipped && isLegendTopClipped) {
legendTop = 0
if (willLegendFitLeft) {
this.setState({legendArrowPosition: 'right'})
legendLeft = trueGraphX - legendWidth / 2
legendLeft -= 8
} else {
this.setState({legendArrowPosition: 'left'})
legendLeft = trueGraphX + legendWidth / 2
legendLeft += 32
}
}
this.legendRef.style.left = `${legendLeft}px`
this.legendRef.style.top = `${legendTop}px`
this.setState({isHidden: false})
highlightCallback = ({pageX}) => {
this.setState({isHidden: false, pageX})
}
legendFormatter = legend => {
@ -424,12 +381,12 @@ export default class Dygraph extends Component {
render() {
const {
legend,
pageX,
sortType,
isHidden,
isSnipped,
filterText,
isAscending,
legendArrowPosition,
isFilterVisible,
} = this.state
@ -437,6 +394,9 @@ export default class Dygraph extends Component {
<div className="dygraph-child" onMouseLeave={this.deselectCrosshair}>
<DygraphLegend
{...legend}
graph={this.graphRef}
legend={this.legendRef}
pageX={pageX}
sortType={sortType}
onHide={this.handleHideLegend}
isHidden={isHidden}
@ -449,7 +409,6 @@ export default class Dygraph extends Component {
legendRef={this.handleLegendRef}
onToggleFilter={this.handleToggleFilter}
onInputChange={this.handleLegendInputChange}
arrowPosition={legendArrowPosition}
/>
<div
ref={r => {

View File

@ -1,6 +1,9 @@
import React, {PropTypes} from 'react'
import _ from 'lodash'
import classnames from 'classnames'
import uuid from 'node-uuid'
import {makeLegendStyles} from 'shared/graphs/helpers'
const removeMeasurement = (label = '') => {
const [measurement] = label.match(/^(.*)[.]/g) || ['']
@ -9,6 +12,9 @@ const removeMeasurement = (label = '') => {
const DygraphLegend = ({
xHTML,
pageX,
graph,
legend,
series,
onSort,
onSnip,
@ -20,7 +26,6 @@ const DygraphLegend = ({
filterText,
isAscending,
onInputChange,
arrowPosition,
isFilterVisible,
onToggleFilter,
}) => {
@ -28,9 +33,11 @@ const DygraphLegend = ({
series,
({y, label}) => (sortType === 'numeric' ? y : label)
)
const ordered = isAscending ? sorted : sorted.reverse()
const filtered = ordered.filter(s => s.label.match(filterText))
const hidden = isHidden ? 'hidden' : ''
const style = makeLegendStyles(graph, legend, pageX)
const renderSortAlpha = (
<div
@ -62,12 +69,12 @@ const DygraphLegend = ({
<div className="sort-btn--bottom">9</div>
</button>
)
return (
<div
className={`dygraph-legend dygraph-legend--${arrowPosition} ${hidden}`}
className={`dygraph-legend ${hidden}`}
ref={legendRef}
onMouseLeave={onHide}
style={style}
>
<div className="dygraph-legend--header">
<div className="dygraph-legend--timestamp">
@ -111,7 +118,7 @@ const DygraphLegend = ({
? 'dygraph-legend--row highlight'
: 'dygraph-legend--row'
return (
<div key={label + color} className={seriesClass}>
<div key={uuid.v4()} className={seriesClass}>
<span style={{color}}>
{isSnipped ? removeMeasurement(label) : label}
</span>
@ -141,7 +148,9 @@ DygraphLegend.propTypes = {
yHTML: string,
})
),
dygraph: shape(),
pageX: number,
legend: shape({}),
graph: shape({}),
onSnip: func.isRequired,
onHide: func.isRequired,
onSort: func.isRequired,
@ -154,7 +163,6 @@ DygraphLegend.propTypes = {
legendRef: func.isRequired,
isSnipped: bool.isRequired,
isFilterVisible: bool.isRequired,
arrowPosition: string.isRequired,
}
export default DygraphLegend

View File

@ -99,7 +99,7 @@ class OptIn extends Component {
handleInputRef = el => (this.customValueInput = el)
render() {
const {fixedPlaceholder, customPlaceholder, type} = this.props
const {fixedPlaceholder, customPlaceholder, type, min} = this.props
const {useCustomValue, customValue} = this.state
return (
@ -110,6 +110,7 @@ class OptIn extends Component {
>
<ClickOutsideInput
id={this.id}
min={min}
type={type}
customValue={customValue}
onGetRef={this.handleInputRef}
@ -119,7 +120,6 @@ class OptIn extends Component {
onKeyDown={this.handleKeyDownCustomValueInput}
handleClickOutsideInput={this.handleClickOutsideInput}
/>
<div
className="opt-in--groove-knob-container"
id={this.id}
@ -141,15 +141,16 @@ class OptIn extends Component {
}
OptIn.defaultProps = {
fixedPlaceholder: 'auto',
fixedValue: '',
customPlaceholder: 'Custom Value',
fixedPlaceholder: 'auto',
customValue: '',
}
const {func, oneOf, string} = PropTypes
OptIn.propTypes = {
min: string,
fixedPlaceholder: string,
fixedValue: string,
customPlaceholder: string,

View File

@ -114,6 +114,65 @@ export const barPlotter = e => {
}
}
export const makeLegendStyles = (graph, legend, pageX) => {
if (!graph || !legend || pageX === null) {
return {}
}
// Move the Legend on hover
const chronografChromeSize = 60 // Width & Height of navigation page elements
const graphRect = graph.getBoundingClientRect()
const legendRect = legend.getBoundingClientRect()
const graphWidth = graphRect.width + 32 // Factoring in padding from parent
const graphHeight = graphRect.height
const graphBottom = graphRect.bottom
const legendWidth = legendRect.width
const legendHeight = legendRect.height
const screenHeight = window.innerHeight
const legendMaxLeft = graphWidth - legendWidth / 2
const trueGraphX = pageX - graphRect.left
let legendLeft = trueGraphX
// Enforcing max & min legend offsets
if (trueGraphX < legendWidth / 2) {
legendLeft = legendWidth / 2
} else if (trueGraphX > legendMaxLeft) {
legendLeft = legendMaxLeft
}
// Disallow screen overflow of legend
const isLegendBottomClipped = graphBottom + legendHeight > screenHeight
const isLegendTopClipped = legendHeight > graphRect.top - chronografChromeSize
const willLegendFitLeft = pageX - chronografChromeSize > legendWidth
let legendTop = graphHeight + 8
// If legend is only clipped on the bottom, position above graph
if (isLegendBottomClipped && !isLegendTopClipped) {
legendTop = -legendHeight
}
// If legend is clipped on top and bottom, posiition on either side of crosshair
if (isLegendBottomClipped && isLegendTopClipped) {
legendTop = 0
if (willLegendFitLeft) {
legendLeft = trueGraphX - legendWidth / 2
legendLeft -= 8
} else {
legendLeft = trueGraphX + legendWidth / 2
legendLeft += 32
}
}
return {
left: `${legendLeft}px`,
top: `${legendTop}px`,
}
}
export const OPTIONS = {
rightGap: 0,
axisLineWidth: 2,

View File

@ -1,3 +1,4 @@
import _ from 'lodash'
import databases from 'shared/parsing/showDatabases'
import measurements from 'shared/parsing/showMeasurements'
import fieldKeys from 'shared/parsing/showFieldKeys'
@ -8,16 +9,19 @@ const parsers = {
databases,
measurements: data => {
const {errors, measurementSets} = measurements(data)
return {errors, measurements: measurementSets[0].measurements}
return {
errors,
measurements: _.get(measurementSets, ['0', 'measurements'], []),
}
},
fieldKeys: (data, key) => {
const {errors, fieldSets} = fieldKeys(data)
return {errors, fieldKeys: fieldSets[key]}
return {errors, fieldKeys: _.get(fieldSets, key, [])}
},
tagKeys,
tagValues: (data, key) => {
const {errors, tags} = tagValues(data)
return {errors, tagValues: tags[key]}
return {errors, tagValues: _.get(tags, key, [])}
},
}

View File

@ -2,7 +2,7 @@ import _ from 'lodash'
import moment from 'moment'
export const formatDate = timestamp =>
moment(timestamp).format('M/D/YYYY h:mm:ss A')
moment(timestamp).format('M/D/YYYY h:mm:ss.SSSSSSSSS A')
export const resultsToCSV = results => {
if (!_.get(results, ['0', 'series', '0'])) {

View File

@ -1,11 +1,16 @@
var webpack = require('webpack');
var path = require('path');
var ExtractTextPlugin = require("extract-text-webpack-plugin");
var HtmlWebpackPlugin = require("html-webpack-plugin");
var package = require('../package.json');
var dependencies = package.dependencies;
var webpack = require('webpack')
var path = require('path')
var ExtractTextPlugin = require('extract-text-webpack-plugin')
var HtmlWebpackPlugin = require('html-webpack-plugin')
var package = require('../package.json')
const WebpackOnBuildPlugin = require('on-build-webpack')
const fs = require('fs')
var dependencies = package.dependencies
const buildDir = path.resolve(__dirname, '../build')
module.exports = {
watch: true,
devtool: 'source-map',
entry: {
app: path.resolve(__dirname, '..', 'src', 'index.js'),
@ -14,7 +19,7 @@ module.exports = {
output: {
publicPath: '/',
path: path.resolve(__dirname, '../build'),
filename: '[name].[chunkhash].dev.js',
filename: '[name].[hash].dev.js',
},
resolve: {
alias: {
@ -48,15 +53,21 @@ module.exports = {
},
{
test: /\.scss$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!sass-loader!resolve-url!sass?sourceMap'),
loader: ExtractTextPlugin.extract(
'style-loader',
'css-loader!sass-loader!resolve-url!sass?sourceMap'
),
},
{
test: /\.css$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!postcss-loader'),
loader: ExtractTextPlugin.extract(
'style-loader',
'css-loader!postcss-loader'
),
},
{
test : /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
loader : 'file',
test: /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
loader: 'file',
},
{
test: /\.js$/,
@ -70,18 +81,19 @@ module.exports = {
],
},
sassLoader: {
includePaths: [path.resolve(__dirname, "node_modules")],
includePaths: [path.resolve(__dirname, 'node_modules')],
},
eslint: {
failOnWarning: false,
failOnError: false,
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.ProvidePlugin({
$: "jquery",
jQuery: "jquery",
$: 'jquery',
jQuery: 'jquery',
}),
new ExtractTextPlugin("chronograf.css"),
new ExtractTextPlugin('chronograf.css'),
new HtmlWebpackPlugin({
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
inject: 'body',
@ -93,7 +105,45 @@ module.exports = {
new webpack.DefinePlugin({
VERSION: JSON.stringify(require('../package.json').version),
}),
new WebpackOnBuildPlugin(function(stats) {
const newlyCreatedAssets = stats.compilation.assets
const unlinked = []
fs.readdir(path.resolve(buildDir), (err, files) => {
files.forEach(file => {
if (!newlyCreatedAssets[file]) {
const del = path.resolve(buildDir + file)
fs.stat(del, function(err, stat) {
if (err == null) {
try {
fs.unlink(path.resolve(buildDir + file))
unlinked.push(file)
} catch (e) {}
}
})
}
})
})
}),
],
postcss: require('./postcss'),
target: 'web',
};
devServer: {
hot: true,
historyApiFallback: true,
clientLogLevel: 'info',
stats: {colors: true},
contentBase: 'build',
quiet: false,
watchOptions: {
aggregateTimeout: 300,
poll: 1000,
},
proxy: {
'/chronograf/v1': {
target: 'http://localhost:8888',
secure: false,
},
},
},
}

View File

@ -1,14 +1,14 @@
/* eslint-disable no-var */
var webpack = require('webpack');
var path = require('path');
var ExtractTextPlugin = require("extract-text-webpack-plugin");
var HtmlWebpackPlugin = require("html-webpack-plugin");
var package = require('../package.json');
var dependencies = package.dependencies;
var webpack = require('webpack')
var path = require('path')
var ExtractTextPlugin = require('extract-text-webpack-plugin')
var HtmlWebpackPlugin = require('html-webpack-plugin')
var package = require('../package.json')
var dependencies = package.dependencies
var config = {
bail: true,
devtool: 'eval',
devtool: 'eval',
entry: {
app: path.resolve(__dirname, '..', 'src', 'index.js'),
vendor: Object.keys(dependencies),
@ -28,6 +28,15 @@ var config = {
},
},
module: {
noParse: [
path.resolve(
__dirname,
'..',
'node_modules',
'memoizerific',
'memoizerific.js'
),
],
preLoaders: [
{
test: /\.js$/,
@ -42,15 +51,21 @@ var config = {
},
{
test: /\.scss$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!sass-loader!resolve-url!sass?sourceMap'),
loader: ExtractTextPlugin.extract(
'style-loader',
'css-loader!sass-loader!resolve-url!sass?sourceMap'
),
},
{
test: /\.css$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!postcss-loader'),
loader: ExtractTextPlugin.extract(
'style-loader',
'css-loader!postcss-loader'
),
},
{
test : /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
loader : 'file',
test: /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
loader: 'file',
},
{
test: /\.js$/,
@ -74,10 +89,10 @@ var config = {
},
}),
new webpack.ProvidePlugin({
$: "jquery",
jQuery: "jquery",
$: 'jquery',
jQuery: 'jquery',
}),
new ExtractTextPlugin("chronograf.css"),
new ExtractTextPlugin('chronograf.css'),
new HtmlWebpackPlugin({
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
inject: 'body',
@ -86,21 +101,28 @@ var config = {
}),
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: false
}
warnings: false,
},
}),
new webpack.optimize.CommonsChunkPlugin({
names: ['vendor', 'manifest'],
}),
function() { /* Webpack does not exit with non-zero status if error. */
this.plugin("done", function(stats) {
if (stats.compilation.errors && stats.compilation.errors.length && process.argv.indexOf("--watch") == -1) {
console.log(stats.compilation.errors.toString({
colors: true
}));
process.exit(1);
function() {
/* Webpack does not exit with non-zero status if error. */
this.plugin('done', function(stats) {
if (
stats.compilation.errors &&
stats.compilation.errors.length &&
process.argv.indexOf('--watch') == -1
) {
console.log(
stats.compilation.errors.toString({
colors: true,
})
)
process.exit(1)
}
});
})
},
new webpack.DefinePlugin({
VERSION: JSON.stringify(require('../package.json').version),
@ -108,6 +130,6 @@ var config = {
],
postcss: require('./postcss'),
target: 'web',
};
}
module.exports = config;
module.exports = config

View File

@ -5134,6 +5134,10 @@ object.values@^1.0.3:
function-bind "^1.1.0"
has "^1.0.1"
on-build-webpack@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/on-build-webpack/-/on-build-webpack-0.1.0.tgz#a287c0e17766e6141926e5f2cbb0d8bb53b76814"
on-finished@~2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"