Merge branch 'multitenancy' into multitenancy_fix_layouts

pull/2484/head
Jared Scheib 2017-12-05 16:03:05 -08:00
commit 808163fe69
125 changed files with 3594 additions and 2357 deletions

View File

@ -1,5 +1,7 @@
## v1.3.11.0 [unreleased]
### Bug Fixes
1. [#2449](https://github.com/influxdata/chronograf/pull/2449): Fix .jsdep step fails when LDFLAGS is exported
1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
1. [#2158](https://github.com/influxdata/chronograf/pull/2158): Fix 'Cannot connect to source' false error flag on Dashboard page
1. [#2167](https://github.com/influxdata/chronograf/pull/2167): Add fractions of seconds to time field in csv export
@ -8,9 +10,32 @@
1. [#2291](https://github.com/influxdata/chronograf/pull/2291): Fix several kapacitor alert creation panics.
1. [#2303](https://github.com/influxdata/chronograf/pull/2303): Add shadow-utils to RPM release packages
1. [#2292](https://github.com/influxdata/chronograf/pull/2292): Source extra command line options from defaults file
1. [#2327](https://github.com/influxdata/chronograf/pull/2327): After CREATE/DELETE queries, refresh list of databases in Data Explorer
1. [#2327](https://github.com/influxdata/chronograf/pull/2327): Visualize CREATE/DELETE queries with Table view in Data Explorer
1. [#2329](https://github.com/influxdata/chronograf/pull/2329): Include tag values alongside measurement name in Data Explorer result tabs
1. [#2386](https://github.com/influxdata/chronograf/pull/2386): Fix queries that include regex, numbers and wildcard
1. [#2398](https://github.com/influxdata/chronograf/pull/2398): Fix apps on hosts page from parsing tags with null values
1. [#2408](https://github.com/influxdata/chronograf/pull/2408): Fix updated Dashboard names not updating dashboard list
1. [#2444](https://github.com/influxdata/chronograf/pull/2444): Fix create dashboard button
1. [#2416](https://github.com/influxdata/chronograf/pull/2416): Fix default y-axis labels not displaying properly
1. [#2423](https://github.com/influxdata/chronograf/pull/2423): Gracefully scale Template Variables Manager overlay on smaller displays
1. [#2426](https://github.com/influxdata/chronograf/pull/2426): Fix Influx Enterprise users from deletion in race condition
1. [#2467](https://github.com/influxdata/chronograf/pull/2467): Fix oauth2 logout link not having basepath
1. [#2466](https://github.com/influxdata/chronograf/pull/2466): Fix supplying a role link to sources that do not have a metaURL
1. [#2477](https://github.com/influxdata/chronograf/pull/2477): Fix hoverline intermittently not rendering
1. [#2483](https://github.com/influxdata/chronograf/pull/2483): Update MySQL pre-canned dashboard to have query derivative correctly
### Features
1. [#2188](https://github.com/influxdata/chronograf/pull/2188): Add Kapacitor logs to the TICKscript editor
1. [#2384](https://github.com/influxdata/chronograf/pull/2384): Add filtering by name to Dashboard index page
1. [#2385](https://github.com/influxdata/chronograf/pull/2385): Add time shift feature to DataExplorer and Dashboards
1. [#2400](https://github.com/influxdata/chronograf/pull/2400): Allow override of generic oauth2 keys for email
1. [#2426](https://github.com/influxdata/chronograf/pull/2426): Add auto group by time to Data Explorer
1. [#2456](https://github.com/influxdata/chronograf/pull/2456): Add boolean thresholds for kapacitor threshold alerts
1. [#2460](https://github.com/influxdata/chronograf/pull/2460): Update kapacitor alerts to cast to float before sending to influx
1. [#2479](https://github.com/influxdata/chronograf/pull/2479): Support authentication for Enterprise Meta Nodes
1. [#2477](https://github.com/influxdata/chronograf/pull/2477): Improve performance of hoverline rendering
### UI Improvements
## v1.3.10.0 [2017-10-24]
@ -33,7 +58,7 @@
### UI Improvements
1. [#2111](https://github.com/influxdata/chronograf/pull/2111): Increase size of Cell Editor query tabs to reveal more of their query strings
1. [#2120](https://github.com/influxdata/chronograf/pull/2120): Improve appearance of Admin Page tabs on smaller screens
1. [#2119](https://github.com/influxdata/chronograf/pull/2119): Add cancel button to Tickscript editor
1. [#2119](https://github.com/influxdata/chronograf/pull/2119): Add cancel button to TICKscript editor
1. [#2104](https://github.com/influxdata/chronograf/pull/2104): Redesign dashboard naming & renaming interaction
1. [#2104](https://github.com/influxdata/chronograf/pull/2104): Redesign dashboard switching dropdown
@ -53,7 +78,7 @@
### Features
1. [#1885](https://github.com/influxdata/chronograf/pull/1885): Add `fill` options to data explorer and dashboard queries
1. [#1978](https://github.com/influxdata/chronograf/pull/1978): Support editing kapacitor TICKScript
1. [#1978](https://github.com/influxdata/chronograf/pull/1978): Support editing kapacitor TICKscript
1. [#1721](https://github.com/influxdata/chronograf/pull/1721): Introduce the TICKscript editor UI
1. [#1992](https://github.com/influxdata/chronograf/pull/1992): Add .csv download button to data explorer
1. [#2082](https://github.com/influxdata/chronograf/pull/2082): Add Data Explorer InfluxQL query and location query synchronization, so queries can be shared via a a URL

View File

@ -8,6 +8,7 @@ YARN := $(shell command -v yarn 2> /dev/null)
SOURCES := $(shell find . -name '*.go' ! -name '*_gen.go' -not -path "./vendor/*" )
UISOURCES := $(shell find ui -type f -not \( -path ui/build/\* -o -path ui/node_modules/\* -prune \) )
unexport LDFLAGS
LDFLAGS=-ldflags "-s -X main.version=${VERSION} -X main.commit=${COMMIT}"
BINARY=chronograf
@ -23,42 +24,14 @@ ${BINARY}: $(SOURCES) .bindata .jsdep .godep
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
define CHRONOGIRAFFE
tLf iCf.
.CCC. tCC:
CGG; CGG:
tG0Gt: GGGGGGGGGGGGGGGG1 .,:,
LG1,,:1CC: .GGL;iLC1iii1LCi;GG1 .1GCL1iGG1
LG1:::;i1CGGt;;;;;;L0t;;;;;;GGGC1;;::,iGC
,ii:. 1GG1iiii;;tfiC;;;;;;;GGCfCGCGGC,
fGCiiiiGi1Lt;;iCLL,i;;;CGt
fGG11iiii1C1iiiiiGt1;;;;;CGf
.GGLLL1i1CitfiiL1iCi;;iLCGGt
.CGL11LGCCCCCCCLLCGG1;1GG;
CGL1tf1111iiiiiiL1ifGG,
LGCff1fCt1tCfiiCiCGC
LGGf111111111iCGGt
fGGGGGGGGGGGGGGi
ifii111111itL
;f1i11111iitf
;f1iiiiiii1tf
:fi111iii11tf
:fi111ii1i1tf
:f111111ii1tt
,L111111ii1tt
.Li1111i1111CCCCCCCCCCCCCCLt;
L111ii11111ittttt1tttttittti1fC;
f1111ii111i1ttttt1;iii1ittt1ttttCt.
tt11ii111tti1ttt1tt1;11;;;;iitttifCCCL,
11i1i11ttttti;1t1;;;ttt1;;ii;itti;L,;CCL
;f;;;;1tttti;;ttti;;;;;;;;;;;1tt1ifi .CCi
,L;itti;;;it;;;;;tt1;;;t1;;;;;;ii;t; :CC,
L;;;;iti;;;;;;;;;;;;;;;;;;;;;;;i;L, ;CC.
ti;;;iLLfffi;;;;;ittt11i;;;;;;;;;L tCCfff;
it;;;;;;L,ti;;;;;1Ltttft1t;;;;;;1t ;CCCL;
:f;;;;;;L.ti;;;;;tftttf1,f;;;;;;f: ;CC1:
.L;;;;;;L.t1;;;;;tt111fi,f;;;;;;L.
1Li;;iL1 :Ci;;;tL1i1fC, Lt;;;;Li
.;tt; ifLt:;fLf; ;LCCt,
._ o o
\_`-)|_
,"" _\_
," ## | 0 0.
," ## ,-\__ `.
," / `--._;) - "HAI, I'm Chronogiraffe. Let's be friends!"
," ## /
," ## /
endef
export CHRONOGIRAFFE
chronogiraffe: ${BINARY}

127
README.md
View File

@ -1,6 +1,8 @@
# Chronograf
Chronograf is an open-source web application written in Go and React.js that provides the tools to visualize your monitoring data and easily create alerting and automation rules.
Chronograf is an open-source web application written in Go and React.js that
provides the tools to visualize your monitoring data and easily create alerting
and automation rules.
<p align="left">
<img src="https://github.com/influxdata/chronograf/blob/master/docs/images/overview-readme.png"/>
@ -16,8 +18,11 @@ Chronograf is an open-source web application written in Go and React.js that pro
### Dashboard Templates
Chronograf's [pre-canned dashboards](https://github.com/influxdata/chronograf/tree/master/canned) for the supported [Telegraf](https://github.com/influxdata/telegraf) input plugins.
Currently, Chronograf offers dashboard templates for the following Telegraf input plugins:
Chronograf's
[pre-canned dashboards](https://github.com/influxdata/chronograf/tree/master/canned)
for the supported [Telegraf](https://github.com/influxdata/telegraf) input
plugins. Currently, Chronograf offers dashboard templates for the following
Telegraf input plugins:
* [Apache](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/apache)
* [Consul](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/consul)
@ -43,40 +48,49 @@ Currently, Chronograf offers dashboard templates for the following Telegraf inpu
* [Redis](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/redis)
* [Riak](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/riak)
* [System](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md)
* [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
* [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
* [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
* [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
* [Net](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/net.go)
* [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
* [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
* [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
* [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
* [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
* [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
* [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
* [Net](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/net.go)
* [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
* [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
* [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
* [Varnish](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/varnish)
* [Windows Performance Counters](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/win_perf_counters)
> Note: If a `telegraf` instance isn't running the `system` and `cpu` plugins the canned dashboards from that instance won't be generated.
> Note: If a `telegraf` instance isn't running the `system` and `cpu` plugins
> the canned dashboards from that instance won't be generated.
### Data Explorer
Chronograf's graphing tool that allows you to dig in and create personalized visualizations of your data.
Chronograf's graphing tool that allows you to dig in and create personalized
visualizations of your data.
* Generate and edit [InfluxQL](https://docs.influxdata.com/influxdb/latest/query_language/) statements with the query editor
* Generate and edit
[InfluxQL](https://docs.influxdata.com/influxdb/latest/query_language/)
statements with the query editor
* Use Chronograf's query templates to easily explore your data
* Create visualizations and view query results in tabular format
### Dashboards
Create and edit customized dashboards. The dashboards support several visualization types including line graphs, stacked graphs, step plots, single statistic graphs, and line-single-statistic graphs.
Create and edit customized dashboards. The dashboards support several
visualization types including line graphs, stacked graphs, step plots, single
statistic graphs, and line-single-statistic graphs.
Use Chronograf's template variables to easily adjust the data that appear in your graphs and gain deeper insight into your data.
Use Chronograf's template variables to easily adjust the data that appear in
your graphs and gain deeper insight into your data.
### Kapacitor UI
A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and alert tracking.
A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and
alert tracking.
* Simply generate threshold, relative, and deadman alerts
* Preview data and alert boundaries while creating an alert
* Configure alert destinations - Currently, Chronograf supports sending alerts to:
* Configure alert destinations - Currently, Chronograf supports sending alerts
to:
* [Alerta](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#alerta)
* [Exec](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#exec)
* [HipChat](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#hipchat)
@ -96,45 +110,71 @@ A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and
### User and Query Management
Manage users, roles, permissions for [OSS InfluxDB](https://github.com/influxdata/influxdb) and InfluxData's [Enterprise](https://docs.influxdata.com/enterprise/v1.2/) product.
View actively running queries and stop expensive queries on the Query Management page.
Manage users, roles, permissions for
[OSS InfluxDB](https://github.com/influxdata/influxdb) and InfluxData's
[Enterprise](https://docs.influxdata.com/enterprise/v1.2/) product. View
actively running queries and stop expensive queries on the Query Management
page.
### TLS/HTTPS Support
See [Chronograf with TLS](https://github.com/influxdata/chronograf/blob/master/docs/tls.md) for more information.
See
[Chronograf with TLS](https://github.com/influxdata/chronograf/blob/master/docs/tls.md)
for more information.
### OAuth Login
See [Chronograf with OAuth 2.0](https://github.com/influxdata/chronograf/blob/master/docs/auth.md) for more information.
See
[Chronograf with OAuth 2.0](https://github.com/influxdata/chronograf/blob/master/docs/auth.md)
for more information.
### Advanced Routing
Change the default root path of the Chronograf server with the `--basepath` option.
Change the default root path of the Chronograf server with the `--basepath`
option.
## Versions
The most recent version of Chronograf is [v1.3.10.0](https://www.influxdata.com/downloads/).
The most recent version of Chronograf is
[v1.3.10.0](https://www.influxdata.com/downloads/).
Spotted a bug or have a feature request?
Please open [an issue](https://github.com/influxdata/chronograf/issues/new)!
Spotted a bug or have a feature request? Please open
[an issue](https://github.com/influxdata/chronograf/issues/new)!
### Known Issues
The Chronograf team has identified and is working on the following issues:
* Chronograf requires users to run Telegraf's [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md) and [system](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md) plugins to ensure that all Apps appear on the [HOST LIST](https://github.com/influxdata/chronograf/blob/master/docs/GETTING_STARTED.md#host-list) page.
* Chronograf requires users to run Telegraf's
[CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
and
[system](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md)
plugins to ensure that all Apps appear on the
[HOST LIST](https://github.com/influxdata/chronograf/blob/master/docs/GETTING_STARTED.md#host-list)
page.
## Installation
Check out the [INSTALLATION](https://docs.influxdata.com/chronograf/v1.3/introduction/installation/) guide to get up and running with Chronograf with as little configuration and code as possible.
Check out the
[INSTALLATION](https://docs.influxdata.com/chronograf/v1.3/introduction/installation/)
guide to get up and running with Chronograf with as little configuration and
code as possible.
We recommend installing Chronograf using one of the [pre-built packages](https://influxdata.com/downloads/#chronograf). Then start Chronograf using:
We recommend installing Chronograf using one of the
[pre-built packages](https://influxdata.com/downloads/#chronograf). Then start
Chronograf using:
* `service chronograf start` if you have installed Chronograf using an official Debian or RPM package.
* `systemctl start chronograf` if you have installed Chronograf using an official Debian or RPM package, and are running a distro with `systemd`. For example, Ubuntu 15 or later.
* `service chronograf start` if you have installed Chronograf using an official
Debian or RPM package.
* `systemctl start chronograf` if you have installed Chronograf using an
official Debian or RPM package, and are running a distro with `systemd`. For
example, Ubuntu 15 or later.
* `$GOPATH/bin/chronograf` if you have built Chronograf from source.
By default, chronograf runs on port `8888`.
### With Docker
To get started right away with Docker, you can pull down our latest release:
```sh
@ -144,7 +184,8 @@ docker pull chronograf:1.3.10.0
### From Source
* Chronograf works with go 1.8.x, node 6.x/7.x, and yarn 0.18+.
* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor) 1.2.x+ to create and store alerts.
* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor)
1.2.x+ to create and store alerts.
1. [Install Go](https://golang.org/doc/install)
1. [Install Node and NPM](https://nodejs.org/en/download/)
@ -157,11 +198,23 @@ docker pull chronograf:1.3.10.0
## Documentation
[Getting Started](https://docs.influxdata.com/chronograf/v1.3/introduction/getting-started/) will get you up and running with Chronograf with as little configuration and code as possible.
See our [guides](https://docs.influxdata.com/chronograf/v1.3/guides/) to get familiar with Chronograf's main features.
[Getting Started](https://docs.influxdata.com/chronograf/v1.3/introduction/getting-started/)
will get you up and running with Chronograf with as little configuration and
code as possible. See our
[guides](https://docs.influxdata.com/chronograf/v1.3/guides/) to get familiar
with Chronograf's main features.
Documentation for Telegraf, InfluxDB, and Kapacitor are available at https://docs.influxdata.com/.
Documentation for Telegraf, InfluxDB, and Kapacitor are available at
https://docs.influxdata.com/.
Chronograf uses
[swagger](https://swagger.io/specification://swagger.io/specification/) to
document its REST interfaces. To reach the documentation, run the server and go
to the `/docs` for example at http://localhost:8888/docs
The swagger JSON document is in `server/swagger.json`
## Contributing
Please see the [contributing guide](CONTRIBUTING.md) for details on contributing to Chronograf.
Please see the [contributing guide](CONTRIBUTING.md) for details on contributing
to Chronograf.

View File

@ -107,6 +107,7 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
id, _ := b.NextSequence()
src.ID = chronograf.DashboardID(id)
// TODO: use FormatInt
strID := strconv.Itoa(int(id))
for i, cell := range src.Cells {
cid, err := d.IDs.Generate()
@ -116,12 +117,11 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
cell.ID = cid
src.Cells[i] = cell
}
if v, err := internal.MarshalDashboard(src); err != nil {
return err
} else if err := b.Put([]byte(strID), v); err != nil {
v, err := internal.MarshalDashboard(src)
if err != nil {
return err
}
return nil
return b.Put([]byte(strID), v)
}); err != nil {
return chronograf.Dashboard{}, err
}

View File

@ -197,12 +197,26 @@ func MarshalDashboard(d chronograf.Dashboard) ([]byte, error) {
if q.Range != nil {
r.Upper, r.Lower = q.Range.Upper, q.Range.Lower
}
q.Shifts = q.QueryConfig.Shifts
queries[j] = &Query{
Command: q.Command,
Label: q.Label,
Range: r,
Source: q.Source,
}
shifts := make([]*TimeShift, len(q.Shifts))
for k := range q.Shifts {
shift := &TimeShift{
Label: q.Shifts[k].Label,
Unit: q.Shifts[k].Unit,
Quantity: q.Shifts[k].Quantity,
}
shifts[k] = shift
}
queries[j].Shifts = shifts
}
axes := make(map[string]*Axis, len(c.Axes))
@ -284,12 +298,26 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
Label: q.Label,
Source: q.Source,
}
if q.Range.Upper != q.Range.Lower {
queries[j].Range = &chronograf.Range{
Upper: q.Range.Upper,
Lower: q.Range.Lower,
}
}
shifts := make([]chronograf.TimeShift, len(q.Shifts))
for k := range q.Shifts {
shift := chronograf.TimeShift{
Label: q.Shifts[k].Label,
Unit: q.Shifts[k].Unit,
Quantity: q.Shifts[k].Quantity,
}
shifts[k] = shift
}
queries[j].Shifts = shifts
}
axes := make(map[string]chronograf.Axis, len(c.Axes))
@ -337,9 +365,9 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
templates := make([]chronograf.Template, len(pb.Templates))
for i, t := range pb.Templates {
vals := make([]chronograf.BasicTemplateValue, len(t.Values))
vals := make([]chronograf.TemplateValue, len(t.Values))
for j, v := range t.Values {
vals[j] = chronograf.BasicTemplateValue{
vals[j] = chronograf.TemplateValue{
Selected: v.Selected,
Type: v.Type,
Value: v.Value,
@ -348,7 +376,7 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
template := chronograf.Template{
ID: chronograf.TemplateID(t.ID),
BasicTemplateVar: chronograf.BasicTemplateVar{
TemplateVar: chronograf.TemplateVar{
Var: t.TempVar,
Values: vals,
},
@ -467,10 +495,7 @@ func UnmarshalUser(data []byte, u *chronograf.User) error {
// UnmarshalUserPB decodes a user from binary protobuf data.
// We are ignoring the password for now.
func UnmarshalUserPB(data []byte, u *User) error {
if err := proto.Unmarshal(data, u); err != nil {
return err
}
return nil
return proto.Unmarshal(data, u)
}
// MarshalRole encodes a role to binary protobuf format.

View File

@ -20,6 +20,7 @@ It has these top-level messages:
Layout
Cell
Query
TimeShift
Range
AlertRule
User
@ -64,97 +65,6 @@ func (m *Source) String() string { return proto.CompactTextString(m)
func (*Source) ProtoMessage() {}
func (*Source) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
func (m *Source) GetID() int64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Source) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Source) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *Source) GetUsername() string {
if m != nil {
return m.Username
}
return ""
}
func (m *Source) GetPassword() string {
if m != nil {
return m.Password
}
return ""
}
func (m *Source) GetURL() string {
if m != nil {
return m.URL
}
return ""
}
func (m *Source) GetDefault() bool {
if m != nil {
return m.Default
}
return false
}
func (m *Source) GetTelegraf() string {
if m != nil {
return m.Telegraf
}
return ""
}
func (m *Source) GetInsecureSkipVerify() bool {
if m != nil {
return m.InsecureSkipVerify
}
return false
}
func (m *Source) GetMetaURL() string {
if m != nil {
return m.MetaURL
}
return ""
}
func (m *Source) GetSharedSecret() string {
if m != nil {
return m.SharedSecret
}
return ""
}
func (m *Source) GetOrganization() string {
if m != nil {
return m.Organization
}
return ""
}
func (m *Source) GetRole() string {
if m != nil {
return m.Role
}
return ""
}
type Dashboard struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@ -168,20 +78,6 @@ func (m *Dashboard) String() string { return proto.CompactTextString(
func (*Dashboard) ProtoMessage() {}
func (*Dashboard) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
func (m *Dashboard) GetID() int64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Dashboard) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Dashboard) GetCells() []*DashboardCell {
if m != nil {
return m.Cells
@ -196,13 +92,6 @@ func (m *Dashboard) GetTemplates() []*Template {
return nil
}
func (m *Dashboard) GetOrganization() string {
if m != nil {
return m.Organization
}
return ""
}
type DashboardCell struct {
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
@ -220,34 +109,6 @@ func (m *DashboardCell) String() string { return proto.CompactTextStr
func (*DashboardCell) ProtoMessage() {}
func (*DashboardCell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
func (m *DashboardCell) GetX() int32 {
if m != nil {
return m.X
}
return 0
}
func (m *DashboardCell) GetY() int32 {
if m != nil {
return m.Y
}
return 0
}
func (m *DashboardCell) GetW() int32 {
if m != nil {
return m.W
}
return 0
}
func (m *DashboardCell) GetH() int32 {
if m != nil {
return m.H
}
return 0
}
func (m *DashboardCell) GetQueries() []*Query {
if m != nil {
return m.Queries
@ -255,27 +116,6 @@ func (m *DashboardCell) GetQueries() []*Query {
return nil
}
func (m *DashboardCell) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *DashboardCell) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *DashboardCell) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *DashboardCell) GetAxes() map[string]*Axis {
if m != nil {
return m.Axes
@ -284,7 +124,7 @@ func (m *DashboardCell) GetAxes() map[string]*Axis {
}
type Axis struct {
LegacyBounds []int64 `protobuf:"varint,1,rep,packed,name=legacyBounds" json:"legacyBounds,omitempty"`
LegacyBounds []int64 `protobuf:"varint,1,rep,name=legacyBounds" json:"legacyBounds,omitempty"`
Bounds []string `protobuf:"bytes,2,rep,name=bounds" json:"bounds,omitempty"`
Label string `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"`
Prefix string `protobuf:"bytes,4,opt,name=prefix,proto3" json:"prefix,omitempty"`
@ -298,55 +138,6 @@ func (m *Axis) String() string { return proto.CompactTextString(m) }
func (*Axis) ProtoMessage() {}
func (*Axis) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
func (m *Axis) GetLegacyBounds() []int64 {
if m != nil {
return m.LegacyBounds
}
return nil
}
func (m *Axis) GetBounds() []string {
if m != nil {
return m.Bounds
}
return nil
}
func (m *Axis) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *Axis) GetPrefix() string {
if m != nil {
return m.Prefix
}
return ""
}
func (m *Axis) GetSuffix() string {
if m != nil {
return m.Suffix
}
return ""
}
func (m *Axis) GetBase() string {
if m != nil {
return m.Base
}
return ""
}
func (m *Axis) GetScale() string {
if m != nil {
return m.Scale
}
return ""
}
type Template struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
TempVar string `protobuf:"bytes,2,opt,name=temp_var,json=tempVar,proto3" json:"temp_var,omitempty"`
@ -361,20 +152,6 @@ func (m *Template) String() string { return proto.CompactTextString(m
func (*Template) ProtoMessage() {}
func (*Template) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
func (m *Template) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *Template) GetTempVar() string {
if m != nil {
return m.TempVar
}
return ""
}
func (m *Template) GetValues() []*TemplateValue {
if m != nil {
return m.Values
@ -382,20 +159,6 @@ func (m *Template) GetValues() []*TemplateValue {
return nil
}
func (m *Template) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *Template) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *Template) GetQuery() *TemplateQuery {
if m != nil {
return m.Query
@ -414,27 +177,6 @@ func (m *TemplateValue) String() string { return proto.CompactTextStr
func (*TemplateValue) ProtoMessage() {}
func (*TemplateValue) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
func (m *TemplateValue) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *TemplateValue) GetValue() string {
if m != nil {
return m.Value
}
return ""
}
func (m *TemplateValue) GetSelected() bool {
if m != nil {
return m.Selected
}
return false
}
type TemplateQuery struct {
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
Db string `protobuf:"bytes,2,opt,name=db,proto3" json:"db,omitempty"`
@ -449,48 +191,6 @@ func (m *TemplateQuery) String() string { return proto.CompactTextStr
func (*TemplateQuery) ProtoMessage() {}
func (*TemplateQuery) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
func (m *TemplateQuery) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *TemplateQuery) GetDb() string {
if m != nil {
return m.Db
}
return ""
}
func (m *TemplateQuery) GetRp() string {
if m != nil {
return m.Rp
}
return ""
}
func (m *TemplateQuery) GetMeasurement() string {
if m != nil {
return m.Measurement
}
return ""
}
func (m *TemplateQuery) GetTagKey() string {
if m != nil {
return m.TagKey
}
return ""
}
func (m *TemplateQuery) GetFieldKey() string {
if m != nil {
return m.FieldKey
}
return ""
}
type Server struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@ -507,62 +207,6 @@ func (m *Server) String() string { return proto.CompactTextString(m)
func (*Server) ProtoMessage() {}
func (*Server) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
func (m *Server) GetID() int64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Server) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Server) GetUsername() string {
if m != nil {
return m.Username
}
return ""
}
func (m *Server) GetPassword() string {
if m != nil {
return m.Password
}
return ""
}
func (m *Server) GetURL() string {
if m != nil {
return m.URL
}
return ""
}
func (m *Server) GetSrcID() int64 {
if m != nil {
return m.SrcID
}
return 0
}
func (m *Server) GetActive() bool {
if m != nil {
return m.Active
}
return false
}
func (m *Server) GetOrganization() string {
if m != nil {
return m.Organization
}
return ""
}
type Layout struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
Application string `protobuf:"bytes,2,opt,name=Application,proto3" json:"Application,omitempty"`
@ -576,27 +220,6 @@ func (m *Layout) String() string { return proto.CompactTextString(m)
func (*Layout) ProtoMessage() {}
func (*Layout) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{8} }
func (m *Layout) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *Layout) GetApplication() string {
if m != nil {
return m.Application
}
return ""
}
func (m *Layout) GetMeasurement() string {
if m != nil {
return m.Measurement
}
return ""
}
func (m *Layout) GetCells() []*Cell {
if m != nil {
return m.Cells
@ -604,13 +227,6 @@ func (m *Layout) GetCells() []*Cell {
return nil
}
func (m *Layout) GetAutoflow() bool {
if m != nil {
return m.Autoflow
}
return false
}
type Cell struct {
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
@ -619,7 +235,7 @@ type Cell struct {
Queries []*Query `protobuf:"bytes,5,rep,name=queries" json:"queries,omitempty"`
I string `protobuf:"bytes,6,opt,name=i,proto3" json:"i,omitempty"`
Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"`
Yranges []int64 `protobuf:"varint,8,rep,packed,name=yranges" json:"yranges,omitempty"`
Yranges []int64 `protobuf:"varint,8,rep,name=yranges" json:"yranges,omitempty"`
Ylabels []string `protobuf:"bytes,9,rep,name=ylabels" json:"ylabels,omitempty"`
Type string `protobuf:"bytes,10,opt,name=type,proto3" json:"type,omitempty"`
Axes map[string]*Axis `protobuf:"bytes,11,rep,name=axes" json:"axes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
@ -630,34 +246,6 @@ func (m *Cell) String() string { return proto.CompactTextString(m) }
func (*Cell) ProtoMessage() {}
func (*Cell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{9} }
func (m *Cell) GetX() int32 {
if m != nil {
return m.X
}
return 0
}
func (m *Cell) GetY() int32 {
if m != nil {
return m.Y
}
return 0
}
func (m *Cell) GetW() int32 {
if m != nil {
return m.W
}
return 0
}
func (m *Cell) GetH() int32 {
if m != nil {
return m.H
}
return 0
}
func (m *Cell) GetQueries() []*Query {
if m != nil {
return m.Queries
@ -665,41 +253,6 @@ func (m *Cell) GetQueries() []*Query {
return nil
}
func (m *Cell) GetI() string {
if m != nil {
return m.I
}
return ""
}
func (m *Cell) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Cell) GetYranges() []int64 {
if m != nil {
return m.Yranges
}
return nil
}
func (m *Cell) GetYlabels() []string {
if m != nil {
return m.Ylabels
}
return nil
}
func (m *Cell) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *Cell) GetAxes() map[string]*Axis {
if m != nil {
return m.Axes
@ -708,14 +261,15 @@ func (m *Cell) GetAxes() map[string]*Axis {
}
type Query struct {
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
Shifts []*TimeShift `protobuf:"bytes,9,rep,name=Shifts" json:"Shifts,omitempty"`
}
func (m *Query) Reset() { *m = Query{} }
@ -723,48 +277,6 @@ func (m *Query) String() string { return proto.CompactTextString(m) }
func (*Query) ProtoMessage() {}
func (*Query) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{10} }
func (m *Query) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *Query) GetDB() string {
if m != nil {
return m.DB
}
return ""
}
func (m *Query) GetRP() string {
if m != nil {
return m.RP
}
return ""
}
func (m *Query) GetGroupBys() []string {
if m != nil {
return m.GroupBys
}
return nil
}
func (m *Query) GetWheres() []string {
if m != nil {
return m.Wheres
}
return nil
}
func (m *Query) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *Query) GetRange() *Range {
if m != nil {
return m.Range
@ -772,13 +284,24 @@ func (m *Query) GetRange() *Range {
return nil
}
func (m *Query) GetSource() string {
func (m *Query) GetShifts() []*TimeShift {
if m != nil {
return m.Source
return m.Shifts
}
return ""
return nil
}
type TimeShift struct {
Label string `protobuf:"bytes,1,opt,name=Label,proto3" json:"Label,omitempty"`
Unit string `protobuf:"bytes,2,opt,name=Unit,proto3" json:"Unit,omitempty"`
Quantity string `protobuf:"bytes,3,opt,name=Quantity,proto3" json:"Quantity,omitempty"`
}
func (m *TimeShift) Reset() { *m = TimeShift{} }
func (m *TimeShift) String() string { return proto.CompactTextString(m) }
func (*TimeShift) ProtoMessage() {}
func (*TimeShift) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
type Range struct {
Upper int64 `protobuf:"varint,1,opt,name=Upper,proto3" json:"Upper,omitempty"`
Lower int64 `protobuf:"varint,2,opt,name=Lower,proto3" json:"Lower,omitempty"`
@ -787,21 +310,7 @@ type Range struct {
func (m *Range) Reset() { *m = Range{} }
func (m *Range) String() string { return proto.CompactTextString(m) }
func (*Range) ProtoMessage() {}
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
func (m *Range) GetUpper() int64 {
if m != nil {
return m.Upper
}
return 0
}
func (m *Range) GetLower() int64 {
if m != nil {
return m.Lower
}
return 0
}
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
type AlertRule struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -813,35 +322,7 @@ type AlertRule struct {
func (m *AlertRule) Reset() { *m = AlertRule{} }
func (m *AlertRule) String() string { return proto.CompactTextString(m) }
func (*AlertRule) ProtoMessage() {}
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
func (m *AlertRule) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *AlertRule) GetJSON() string {
if m != nil {
return m.JSON
}
return ""
}
func (m *AlertRule) GetSrcID() int64 {
if m != nil {
return m.SrcID
}
return 0
}
func (m *AlertRule) GetKapaID() int64 {
if m != nil {
return m.KapaID
}
return 0
}
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
type User struct {
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -855,35 +336,7 @@ type User struct {
func (m *User) Reset() { *m = User{} }
func (m *User) String() string { return proto.CompactTextString(m) }
func (*User) ProtoMessage() {}
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
func (m *User) GetID() uint64 {
if m != nil {
return m.ID
}
return 0
}
func (m *User) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *User) GetProvider() string {
if m != nil {
return m.Provider
}
return ""
}
func (m *User) GetScheme() string {
if m != nil {
return m.Scheme
}
return ""
}
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{14} }
func (m *User) GetRoles() []*Role {
if m != nil {
@ -892,13 +345,6 @@ func (m *User) GetRoles() []*Role {
return nil
}
func (m *User) GetSuperAdmin() bool {
if m != nil {
return m.SuperAdmin
}
return false
}
type Role struct {
Organization string `protobuf:"bytes,1,opt,name=Organization,proto3" json:"Organization,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@ -907,21 +353,7 @@ type Role struct {
func (m *Role) Reset() { *m = Role{} }
func (m *Role) String() string { return proto.CompactTextString(m) }
func (*Role) ProtoMessage() {}
func (*Role) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{14} }
func (m *Role) GetOrganization() string {
if m != nil {
return m.Organization
}
return ""
}
func (m *Role) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (*Role) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{15} }
type Organization struct {
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -933,35 +365,7 @@ type Organization struct {
func (m *Organization) Reset() { *m = Organization{} }
func (m *Organization) String() string { return proto.CompactTextString(m) }
func (*Organization) ProtoMessage() {}
func (*Organization) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{15} }
func (m *Organization) GetID() uint64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Organization) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Organization) GetDefaultRole() string {
if m != nil {
return m.DefaultRole
}
return ""
}
func (m *Organization) GetPublic() bool {
if m != nil {
return m.Public
}
return false
}
func (*Organization) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{16} }
func init() {
proto.RegisterType((*Source)(nil), "internal.Source")
@ -975,6 +379,7 @@ func init() {
proto.RegisterType((*Layout)(nil), "internal.Layout")
proto.RegisterType((*Cell)(nil), "internal.Cell")
proto.RegisterType((*Query)(nil), "internal.Query")
proto.RegisterType((*TimeShift)(nil), "internal.TimeShift")
proto.RegisterType((*Range)(nil), "internal.Range")
proto.RegisterType((*AlertRule)(nil), "internal.AlertRule")
proto.RegisterType((*User)(nil), "internal.User")
@ -985,78 +390,81 @@ func init() {
func init() { proto.RegisterFile("internal.proto", fileDescriptorInternal) }
var fileDescriptorInternal = []byte{
// 1155 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x51, 0x8f, 0xdb, 0xc4,
0x13, 0x97, 0xe3, 0x38, 0xb1, 0x27, 0xd7, 0xfb, 0xff, 0xb5, 0xaa, 0xa8, 0x29, 0x12, 0x0a, 0x16,
0x48, 0x41, 0xa2, 0x07, 0x6a, 0x85, 0x84, 0x78, 0x40, 0xca, 0x5d, 0x50, 0x75, 0xdc, 0xb5, 0x3d,
0x36, 0x77, 0xc7, 0x13, 0xaa, 0x36, 0xce, 0x24, 0xb1, 0xea, 0xc4, 0x66, 0x6d, 0xdf, 0xc5, 0x7c,
0x18, 0x24, 0x24, 0x9e, 0x78, 0x44, 0xbc, 0xf3, 0x8a, 0xfa, 0x41, 0xf8, 0x1c, 0x68, 0x76, 0xd7,
0x8e, 0xd3, 0x84, 0xea, 0x5e, 0xe0, 0x6d, 0x7f, 0x33, 0xbb, 0xb3, 0x3b, 0xbf, 0x99, 0xf9, 0x69,
0xe1, 0x30, 0x5a, 0xe5, 0x28, 0x57, 0x22, 0x3e, 0x4a, 0x65, 0x92, 0x27, 0xcc, 0xad, 0x70, 0xf0,
0x57, 0x0b, 0x3a, 0xe3, 0xa4, 0x90, 0x21, 0xb2, 0x43, 0x68, 0x9d, 0x8e, 0x7c, 0xab, 0x6f, 0x0d,
0x6c, 0xde, 0x3a, 0x1d, 0x31, 0x06, 0xed, 0xe7, 0x62, 0x89, 0x7e, 0xab, 0x6f, 0x0d, 0x3c, 0xae,
0xd6, 0x64, 0xbb, 0x2c, 0x53, 0xf4, 0x6d, 0x6d, 0xa3, 0x35, 0x7b, 0x08, 0xee, 0x55, 0x46, 0xd1,
0x96, 0xe8, 0xb7, 0x95, 0xbd, 0xc6, 0xe4, 0xbb, 0x10, 0x59, 0x76, 0x9b, 0xc8, 0xa9, 0xef, 0x68,
0x5f, 0x85, 0xd9, 0xff, 0xc1, 0xbe, 0xe2, 0xe7, 0x7e, 0x47, 0x99, 0x69, 0xc9, 0x7c, 0xe8, 0x8e,
0x70, 0x26, 0x8a, 0x38, 0xf7, 0xbb, 0x7d, 0x6b, 0xe0, 0xf2, 0x0a, 0x52, 0x9c, 0x4b, 0x8c, 0x71,
0x2e, 0xc5, 0xcc, 0x77, 0x75, 0x9c, 0x0a, 0xb3, 0x23, 0x60, 0xa7, 0xab, 0x0c, 0xc3, 0x42, 0xe2,
0xf8, 0x55, 0x94, 0x5e, 0xa3, 0x8c, 0x66, 0xa5, 0xef, 0xa9, 0x00, 0x7b, 0x3c, 0x74, 0xcb, 0x33,
0xcc, 0x05, 0xdd, 0x0d, 0x2a, 0x54, 0x05, 0x59, 0x00, 0x07, 0xe3, 0x85, 0x90, 0x38, 0x1d, 0x63,
0x28, 0x31, 0xf7, 0x7b, 0xca, 0xbd, 0x65, 0xa3, 0x3d, 0x2f, 0xe4, 0x5c, 0xac, 0xa2, 0x1f, 0x45,
0x1e, 0x25, 0x2b, 0xff, 0x40, 0xef, 0x69, 0xda, 0x88, 0x25, 0x9e, 0xc4, 0xe8, 0xdf, 0xd3, 0x2c,
0xd1, 0x3a, 0xf8, 0xdd, 0x02, 0x6f, 0x24, 0xb2, 0xc5, 0x24, 0x11, 0x72, 0x7a, 0x27, 0xae, 0x1f,
0x81, 0x13, 0x62, 0x1c, 0x67, 0xbe, 0xdd, 0xb7, 0x07, 0xbd, 0xc7, 0x0f, 0x8e, 0xea, 0x22, 0xd6,
0x71, 0x4e, 0x30, 0x8e, 0xb9, 0xde, 0xc5, 0x3e, 0x03, 0x2f, 0xc7, 0x65, 0x1a, 0x8b, 0x1c, 0x33,
0xbf, 0xad, 0x8e, 0xb0, 0xcd, 0x91, 0x4b, 0xe3, 0xe2, 0x9b, 0x4d, 0x3b, 0xa9, 0x38, 0xbb, 0xa9,
0x04, 0xbf, 0xb6, 0xe0, 0xde, 0xd6, 0x75, 0xec, 0x00, 0xac, 0xb5, 0x7a, 0xb9, 0xc3, 0xad, 0x35,
0xa1, 0x52, 0xbd, 0xda, 0xe1, 0x56, 0x49, 0xe8, 0x56, 0xf5, 0x86, 0xc3, 0xad, 0x5b, 0x42, 0x0b,
0xd5, 0x11, 0x0e, 0xb7, 0x16, 0xec, 0x63, 0xe8, 0xfe, 0x50, 0xa0, 0x8c, 0x30, 0xf3, 0x1d, 0xf5,
0xba, 0xff, 0x6d, 0x5e, 0xf7, 0x6d, 0x81, 0xb2, 0xe4, 0x95, 0x9f, 0xd8, 0x50, 0xdd, 0xa4, 0x5b,
0x43, 0xad, 0xc9, 0x96, 0x53, 0xe7, 0x75, 0xb5, 0x8d, 0xd6, 0x86, 0x45, 0xdd, 0x0f, 0xc4, 0xe2,
0xe7, 0xd0, 0x16, 0x6b, 0xcc, 0x7c, 0x4f, 0xc5, 0xff, 0xe0, 0x1f, 0x08, 0x3b, 0x1a, 0xae, 0x31,
0xfb, 0x7a, 0x95, 0xcb, 0x92, 0xab, 0xed, 0x0f, 0x9f, 0x82, 0x57, 0x9b, 0xa8, 0x2b, 0x5f, 0x61,
0xa9, 0x12, 0xf4, 0x38, 0x2d, 0xd9, 0x87, 0xe0, 0xdc, 0x88, 0xb8, 0xd0, 0xc5, 0xe9, 0x3d, 0x3e,
0xdc, 0x84, 0x1d, 0xae, 0xa3, 0x8c, 0x6b, 0xe7, 0x97, 0xad, 0x2f, 0xac, 0xe0, 0x37, 0x0b, 0xda,
0x64, 0x23, 0x66, 0x63, 0x9c, 0x8b, 0xb0, 0x3c, 0x4e, 0x8a, 0xd5, 0x34, 0xf3, 0xad, 0xbe, 0x3d,
0xb0, 0xf9, 0x96, 0x8d, 0xbd, 0x03, 0x9d, 0x89, 0xf6, 0xb6, 0xfa, 0xf6, 0xc0, 0xe3, 0x06, 0xb1,
0xfb, 0xe0, 0xc4, 0x62, 0x82, 0xb1, 0x99, 0x31, 0x0d, 0x68, 0x77, 0x2a, 0x71, 0x16, 0xad, 0xcd,
0x88, 0x19, 0x44, 0xf6, 0xac, 0x98, 0x91, 0x5d, 0x57, 0xcf, 0x20, 0xa2, 0x6b, 0x22, 0xb2, 0x9a,
0x42, 0x5a, 0x53, 0xe4, 0x2c, 0x14, 0x71, 0xc5, 0xa1, 0x06, 0xc1, 0x1f, 0x16, 0xcd, 0x96, 0xee,
0x89, 0x46, 0x5f, 0x6a, 0x46, 0xdf, 0x05, 0x97, 0xfa, 0xe5, 0xe5, 0x8d, 0x90, 0xa6, 0x37, 0xbb,
0x84, 0xaf, 0x85, 0x64, 0x9f, 0x42, 0x47, 0x65, 0xbe, 0xa7, 0x3f, 0xab, 0x70, 0xd7, 0xe4, 0xe7,
0x66, 0x5b, 0x5d, 0xc1, 0x76, 0xa3, 0x82, 0x75, 0xb2, 0x4e, 0x33, 0xd9, 0x47, 0xe0, 0x50, 0x2b,
0x94, 0xea, 0xf5, 0x7b, 0x23, 0xeb, 0x86, 0xd1, 0xbb, 0x82, 0x2b, 0xb8, 0xb7, 0x75, 0x63, 0x7d,
0x93, 0xb5, 0x7d, 0xd3, 0xa6, 0x8a, 0x9e, 0xa9, 0x1a, 0xe9, 0x4a, 0x86, 0x31, 0x86, 0x39, 0x4e,
0x15, 0xdf, 0x2e, 0xaf, 0x71, 0xf0, 0xb3, 0xb5, 0x89, 0xab, 0xee, 0x23, 0xe5, 0x08, 0x93, 0xe5,
0x52, 0xac, 0xa6, 0x26, 0x74, 0x05, 0x89, 0xb7, 0xe9, 0xc4, 0x84, 0x6e, 0x4d, 0x27, 0x84, 0x65,
0x6a, 0x2a, 0xd8, 0x92, 0x29, 0xeb, 0x43, 0x6f, 0x89, 0x22, 0x2b, 0x24, 0x2e, 0x71, 0x95, 0x1b,
0x0a, 0x9a, 0x26, 0xf6, 0x00, 0xba, 0xb9, 0x98, 0xbf, 0xa4, 0xde, 0x33, 0x95, 0xcc, 0xc5, 0xfc,
0x0c, 0x4b, 0xf6, 0x1e, 0x78, 0xb3, 0x08, 0xe3, 0xa9, 0x72, 0xe9, 0x72, 0xba, 0xca, 0x70, 0x86,
0x65, 0xf0, 0xa7, 0x05, 0x9d, 0x31, 0xca, 0x1b, 0x94, 0x77, 0x92, 0x94, 0xa6, 0x54, 0xdb, 0x6f,
0x91, 0xea, 0xf6, 0x7e, 0xa9, 0x76, 0x36, 0x52, 0x7d, 0x1f, 0x9c, 0xb1, 0x0c, 0x4f, 0x47, 0xea,
0x45, 0x36, 0xd7, 0x80, 0xba, 0x71, 0x18, 0xe6, 0xd1, 0x0d, 0x1a, 0xfd, 0x36, 0x68, 0x47, 0x69,
0xdc, 0x3d, 0x4a, 0xf3, 0x93, 0x05, 0x9d, 0x73, 0x51, 0x26, 0x45, 0xbe, 0xd3, 0x85, 0x7d, 0xe8,
0x0d, 0xd3, 0x34, 0x8e, 0x42, 0x7d, 0x5a, 0x67, 0xd4, 0x34, 0xd1, 0x8e, 0x67, 0x0d, 0x7e, 0x75,
0x6e, 0x4d, 0x13, 0x4d, 0xf1, 0x89, 0x52, 0x53, 0x2d, 0x8d, 0x8d, 0x29, 0xd6, 0x22, 0xaa, 0x9c,
0x44, 0xc2, 0xb0, 0xc8, 0x93, 0x59, 0x9c, 0xdc, 0xaa, 0x6c, 0x5d, 0x5e, 0xe3, 0xe0, 0x75, 0x0b,
0xda, 0xff, 0x95, 0x02, 0x1e, 0x80, 0x15, 0x99, 0x62, 0x5b, 0x51, 0xad, 0x87, 0xdd, 0x86, 0x1e,
0xfa, 0xd0, 0x2d, 0xa5, 0x58, 0xcd, 0x31, 0xf3, 0x5d, 0xa5, 0x2e, 0x15, 0x54, 0x1e, 0x35, 0x47,
0x5a, 0x08, 0x3d, 0x5e, 0xc1, 0x7a, 0x2e, 0xa0, 0x31, 0x17, 0x9f, 0x18, 0xcd, 0xec, 0xa9, 0x17,
0xf9, 0xdb, 0xb4, 0xfc, 0x7b, 0x52, 0xf9, 0xda, 0x02, 0xa7, 0x1e, 0xaa, 0x93, 0xed, 0xa1, 0x3a,
0xd9, 0x0c, 0xd5, 0xe8, 0xb8, 0x1a, 0xaa, 0xd1, 0x31, 0x61, 0x7e, 0x51, 0x0d, 0x15, 0xbf, 0xa0,
0x62, 0x3d, 0x95, 0x49, 0x91, 0x1e, 0x97, 0xba, 0xaa, 0x1e, 0xaf, 0x31, 0x75, 0xe2, 0x77, 0x0b,
0x94, 0x86, 0x6a, 0x8f, 0x1b, 0x44, 0x7d, 0x7b, 0xae, 0x04, 0x47, 0x93, 0xab, 0x01, 0xfb, 0x08,
0x1c, 0x4e, 0xe4, 0x29, 0x86, 0xb7, 0xea, 0xa2, 0xcc, 0x5c, 0x7b, 0x29, 0xa8, 0xfe, 0x2b, 0x99,
0x06, 0x36, 0x28, 0x78, 0x62, 0x8e, 0x53, 0xf4, 0xab, 0x34, 0x45, 0x69, 0xc6, 0x50, 0x03, 0x75,
0x67, 0x72, 0x8b, 0x5a, 0x41, 0x6d, 0xae, 0x41, 0xf0, 0x3d, 0x78, 0xc3, 0x18, 0x65, 0xce, 0x8b,
0x78, 0x57, 0x77, 0x19, 0xb4, 0xbf, 0x19, 0xbf, 0x78, 0x5e, 0x0d, 0x2f, 0xad, 0x37, 0x23, 0x67,
0xbf, 0x31, 0x72, 0x67, 0x22, 0x15, 0xa7, 0x23, 0xd5, 0x67, 0x36, 0x37, 0x28, 0xf8, 0xc5, 0x82,
0x36, 0xcd, 0x76, 0x23, 0x74, 0xfb, 0x6d, 0xba, 0x70, 0x21, 0x93, 0x9b, 0x68, 0x8a, 0xb2, 0xd2,
0x85, 0x0a, 0xab, 0xa4, 0xc3, 0x05, 0xd6, 0x9f, 0x3b, 0x83, 0xa8, 0xd6, 0xf4, 0xb1, 0xa9, 0x7a,
0xb9, 0x51, 0x6b, 0x32, 0x73, 0xed, 0x64, 0xef, 0x03, 0x8c, 0x8b, 0x14, 0xe5, 0x70, 0xba, 0x8c,
0x56, 0x8a, 0x74, 0x97, 0x37, 0x2c, 0xc1, 0x57, 0xfa, 0xab, 0xb4, 0xa3, 0x10, 0xd6, 0xfe, 0x6f,
0xd5, 0x9b, 0x2f, 0x0f, 0xe2, 0xed, 0x73, 0x77, 0xca, 0xb6, 0x0f, 0x3d, 0xf3, 0xaf, 0x54, 0xbf,
0x34, 0x23, 0x16, 0x0d, 0x13, 0xe5, 0x7c, 0x51, 0x4c, 0xe2, 0x28, 0x54, 0x39, 0xbb, 0xdc, 0xa0,
0x49, 0x47, 0x7d, 0x9f, 0x9f, 0xfc, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xa6, 0xde, 0x3f, 0x80, 0x50,
0x0b, 0x00, 0x00,
// 1207 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xbc, 0x56, 0x5f, 0x8f, 0xdb, 0x44,
0x10, 0xd7, 0xc6, 0x71, 0x62, 0x4f, 0xae, 0x05, 0x2d, 0x15, 0x35, 0x45, 0x42, 0xc1, 0x02, 0xe9,
0x10, 0xf4, 0x40, 0xad, 0x90, 0x10, 0x0f, 0x48, 0xb9, 0x0b, 0xaa, 0x8e, 0xfe, 0xbb, 0x6e, 0x7a,
0xe5, 0x09, 0x55, 0x1b, 0x67, 0x72, 0xb1, 0xea, 0xd8, 0x66, 0x6d, 0xdf, 0x9d, 0xf9, 0x30, 0x48,
0x48, 0x3c, 0xf1, 0x88, 0x78, 0xe7, 0x15, 0xf1, 0x41, 0xf8, 0x0a, 0xbc, 0xa2, 0xd9, 0x5d, 0x3b,
0x4e, 0x2f, 0x54, 0x7d, 0x81, 0xb7, 0xfd, 0xcd, 0xac, 0x67, 0x77, 0x66, 0x7e, 0xf3, 0xf3, 0xc2,
0xf5, 0x38, 0x2d, 0x51, 0xa5, 0x32, 0x39, 0xc8, 0x55, 0x56, 0x66, 0xdc, 0x6b, 0x70, 0xf8, 0x57,
0x0f, 0x06, 0xb3, 0xac, 0x52, 0x11, 0xf2, 0xeb, 0xd0, 0x3b, 0x9e, 0x06, 0x6c, 0xcc, 0xf6, 0x1d,
0xd1, 0x3b, 0x9e, 0x72, 0x0e, 0xfd, 0x47, 0x72, 0x8d, 0x41, 0x6f, 0xcc, 0xf6, 0x7d, 0xa1, 0xd7,
0x64, 0x7b, 0x5a, 0xe7, 0x18, 0x38, 0xc6, 0x46, 0x6b, 0x7e, 0x0b, 0xbc, 0xd3, 0x82, 0xa2, 0xad,
0x31, 0xe8, 0x6b, 0x7b, 0x8b, 0xc9, 0x77, 0x22, 0x8b, 0xe2, 0x22, 0x53, 0x8b, 0xc0, 0x35, 0xbe,
0x06, 0xf3, 0x37, 0xc1, 0x39, 0x15, 0x0f, 0x82, 0x81, 0x36, 0xd3, 0x92, 0x07, 0x30, 0x9c, 0xe2,
0x52, 0x56, 0x49, 0x19, 0x0c, 0xc7, 0x6c, 0xdf, 0x13, 0x0d, 0xa4, 0x38, 0x4f, 0x31, 0xc1, 0x33,
0x25, 0x97, 0x81, 0x67, 0xe2, 0x34, 0x98, 0x1f, 0x00, 0x3f, 0x4e, 0x0b, 0x8c, 0x2a, 0x85, 0xb3,
0x17, 0x71, 0xfe, 0x0c, 0x55, 0xbc, 0xac, 0x03, 0x5f, 0x07, 0xd8, 0xe1, 0xa1, 0x53, 0x1e, 0x62,
0x29, 0xe9, 0x6c, 0xd0, 0xa1, 0x1a, 0xc8, 0x43, 0xd8, 0x9b, 0xad, 0xa4, 0xc2, 0xc5, 0x0c, 0x23,
0x85, 0x65, 0x30, 0xd2, 0xee, 0x2d, 0x1b, 0xed, 0x79, 0xac, 0xce, 0x64, 0x1a, 0xff, 0x20, 0xcb,
0x38, 0x4b, 0x83, 0x3d, 0xb3, 0xa7, 0x6b, 0xa3, 0x2a, 0x89, 0x2c, 0xc1, 0xe0, 0x9a, 0xa9, 0x12,
0xad, 0xc3, 0xdf, 0x18, 0xf8, 0x53, 0x59, 0xac, 0xe6, 0x99, 0x54, 0x8b, 0xd7, 0xaa, 0xf5, 0x6d,
0x70, 0x23, 0x4c, 0x92, 0x22, 0x70, 0xc6, 0xce, 0xfe, 0xe8, 0xce, 0xcd, 0x83, 0xb6, 0x89, 0x6d,
0x9c, 0x23, 0x4c, 0x12, 0x61, 0x76, 0xf1, 0xcf, 0xc0, 0x2f, 0x71, 0x9d, 0x27, 0xb2, 0xc4, 0x22,
0xe8, 0xeb, 0x4f, 0xf8, 0xe6, 0x93, 0xa7, 0xd6, 0x25, 0x36, 0x9b, 0xae, 0xa4, 0xe2, 0x5e, 0x4d,
0x25, 0xfc, 0xa5, 0x07, 0xd7, 0xb6, 0x8e, 0xe3, 0x7b, 0xc0, 0x2e, 0xf5, 0xcd, 0x5d, 0xc1, 0x2e,
0x09, 0xd5, 0xfa, 0xd6, 0xae, 0x60, 0x35, 0xa1, 0x0b, 0xcd, 0x0d, 0x57, 0xb0, 0x0b, 0x42, 0x2b,
0xcd, 0x08, 0x57, 0xb0, 0x15, 0xff, 0x08, 0x86, 0xdf, 0x57, 0xa8, 0x62, 0x2c, 0x02, 0x57, 0xdf,
0xee, 0x8d, 0xcd, 0xed, 0x9e, 0x54, 0xa8, 0x6a, 0xd1, 0xf8, 0xa9, 0x1a, 0x9a, 0x4d, 0x86, 0x1a,
0x7a, 0x4d, 0xb6, 0x92, 0x98, 0x37, 0x34, 0x36, 0x5a, 0xdb, 0x2a, 0x1a, 0x3e, 0x50, 0x15, 0x3f,
0x87, 0xbe, 0xbc, 0xc4, 0x22, 0xf0, 0x75, 0xfc, 0xf7, 0xff, 0xa5, 0x60, 0x07, 0x93, 0x4b, 0x2c,
0xbe, 0x4e, 0x4b, 0x55, 0x0b, 0xbd, 0xfd, 0xd6, 0x3d, 0xf0, 0x5b, 0x13, 0xb1, 0xf2, 0x05, 0xd6,
0x3a, 0x41, 0x5f, 0xd0, 0x92, 0x7f, 0x00, 0xee, 0xb9, 0x4c, 0x2a, 0xd3, 0x9c, 0xd1, 0x9d, 0xeb,
0x9b, 0xb0, 0x93, 0xcb, 0xb8, 0x10, 0xc6, 0xf9, 0x65, 0xef, 0x0b, 0x16, 0xfe, 0xca, 0xa0, 0x4f,
0x36, 0xaa, 0x6c, 0x82, 0x67, 0x32, 0xaa, 0x0f, 0xb3, 0x2a, 0x5d, 0x14, 0x01, 0x1b, 0x3b, 0xfb,
0x8e, 0xd8, 0xb2, 0xf1, 0xb7, 0x61, 0x30, 0x37, 0xde, 0xde, 0xd8, 0xd9, 0xf7, 0x85, 0x45, 0xfc,
0x06, 0xb8, 0x89, 0x9c, 0x63, 0x62, 0x67, 0xcc, 0x00, 0xda, 0x9d, 0x2b, 0x5c, 0xc6, 0x97, 0x76,
0xc4, 0x2c, 0x22, 0x7b, 0x51, 0x2d, 0xc9, 0x6e, 0xba, 0x67, 0x11, 0x95, 0x6b, 0x2e, 0x8b, 0xb6,
0x84, 0xb4, 0xa6, 0xc8, 0x45, 0x24, 0x93, 0xa6, 0x86, 0x06, 0x84, 0xbf, 0x33, 0x9a, 0x2d, 0xc3,
0x89, 0x0e, 0x2f, 0x4d, 0x45, 0xdf, 0x01, 0x8f, 0xf8, 0xf2, 0xfc, 0x5c, 0x2a, 0xcb, 0xcd, 0x21,
0xe1, 0x67, 0x52, 0xf1, 0x4f, 0x61, 0xa0, 0x33, 0xdf, 0xc1, 0xcf, 0x26, 0xdc, 0x33, 0xf2, 0x0b,
0xbb, 0xad, 0xed, 0x60, 0xbf, 0xd3, 0xc1, 0x36, 0x59, 0xb7, 0x9b, 0xec, 0x6d, 0x70, 0x89, 0x0a,
0xb5, 0xbe, 0xfd, 0xce, 0xc8, 0x86, 0x30, 0x66, 0x57, 0x78, 0x0a, 0xd7, 0xb6, 0x4e, 0x6c, 0x4f,
0x62, 0xdb, 0x27, 0x6d, 0xba, 0xe8, 0xdb, 0xae, 0x91, 0xae, 0x14, 0x98, 0x60, 0x54, 0xe2, 0x42,
0xd7, 0xdb, 0x13, 0x2d, 0x0e, 0x7f, 0x62, 0x9b, 0xb8, 0xfa, 0x3c, 0x52, 0x8e, 0x28, 0x5b, 0xaf,
0x65, 0xba, 0xb0, 0xa1, 0x1b, 0x48, 0x75, 0x5b, 0xcc, 0x6d, 0xe8, 0xde, 0x62, 0x4e, 0x58, 0xe5,
0xb6, 0x83, 0x3d, 0x95, 0xf3, 0x31, 0x8c, 0xd6, 0x28, 0x8b, 0x4a, 0xe1, 0x1a, 0xd3, 0xd2, 0x96,
0xa0, 0x6b, 0xe2, 0x37, 0x61, 0x58, 0xca, 0xb3, 0xe7, 0xc4, 0x3d, 0xdb, 0xc9, 0x52, 0x9e, 0xdd,
0xc7, 0x9a, 0xbf, 0x0b, 0xfe, 0x32, 0xc6, 0x64, 0xa1, 0x5d, 0xa6, 0x9d, 0x9e, 0x36, 0xdc, 0xc7,
0x3a, 0xfc, 0x83, 0xc1, 0x60, 0x86, 0xea, 0x1c, 0xd5, 0x6b, 0x49, 0x4a, 0x57, 0xaa, 0x9d, 0x57,
0x48, 0x75, 0x7f, 0xb7, 0x54, 0xbb, 0x1b, 0xa9, 0xbe, 0x01, 0xee, 0x4c, 0x45, 0xc7, 0x53, 0x7d,
0x23, 0x47, 0x18, 0x40, 0x6c, 0x9c, 0x44, 0x65, 0x7c, 0x8e, 0x56, 0xbf, 0x2d, 0xba, 0xa2, 0x34,
0xde, 0x0e, 0xa5, 0xf9, 0x91, 0xc1, 0xe0, 0x81, 0xac, 0xb3, 0xaa, 0xbc, 0xc2, 0xc2, 0x31, 0x8c,
0x26, 0x79, 0x9e, 0xc4, 0x91, 0xf9, 0xda, 0x64, 0xd4, 0x35, 0xd1, 0x8e, 0x87, 0x9d, 0xfa, 0x9a,
0xdc, 0xba, 0x26, 0x9a, 0xe2, 0x23, 0xad, 0xa6, 0x46, 0x1a, 0x3b, 0x53, 0x6c, 0x44, 0x54, 0x3b,
0xa9, 0x08, 0x93, 0xaa, 0xcc, 0x96, 0x49, 0x76, 0xa1, 0xb3, 0xf5, 0x44, 0x8b, 0xc3, 0x3f, 0x7b,
0xd0, 0xff, 0xbf, 0x14, 0x70, 0x0f, 0x58, 0x6c, 0x9b, 0xcd, 0xe2, 0x56, 0x0f, 0x87, 0x1d, 0x3d,
0x0c, 0x60, 0x58, 0x2b, 0x99, 0x9e, 0x61, 0x11, 0x78, 0x5a, 0x5d, 0x1a, 0xa8, 0x3d, 0x7a, 0x8e,
0x8c, 0x10, 0xfa, 0xa2, 0x81, 0xed, 0x5c, 0x40, 0x67, 0x2e, 0x3e, 0xb1, 0x9a, 0x39, 0xd2, 0x37,
0x0a, 0xb6, 0xcb, 0xf2, 0xdf, 0x49, 0xe5, 0xdf, 0x0c, 0xdc, 0x76, 0xa8, 0x8e, 0xb6, 0x87, 0xea,
0x68, 0x33, 0x54, 0xd3, 0xc3, 0x66, 0xa8, 0xa6, 0x87, 0x84, 0xc5, 0x49, 0x33, 0x54, 0xe2, 0x84,
0x9a, 0x75, 0x4f, 0x65, 0x55, 0x7e, 0x58, 0x9b, 0xae, 0xfa, 0xa2, 0xc5, 0xc4, 0xc4, 0x6f, 0x57,
0xa8, 0x6c, 0xa9, 0x7d, 0x61, 0x11, 0xf1, 0xf6, 0x81, 0x16, 0x1c, 0x53, 0x5c, 0x03, 0xf8, 0x87,
0xe0, 0x0a, 0x2a, 0x9e, 0xae, 0xf0, 0x56, 0x5f, 0xb4, 0x59, 0x18, 0x2f, 0x05, 0x35, 0x6f, 0x25,
0x4b, 0xe0, 0xe6, 0xe5, 0xf4, 0x31, 0x0c, 0x66, 0xab, 0x78, 0x59, 0x36, 0x7f, 0x9e, 0xb7, 0x3a,
0x82, 0x15, 0xaf, 0x51, 0xfb, 0x84, 0xdd, 0x12, 0x3e, 0x01, 0xbf, 0x35, 0x6e, 0xae, 0xc3, 0xba,
0xd7, 0xe1, 0xd0, 0x3f, 0x4d, 0xe3, 0xb2, 0x19, 0x5d, 0x5a, 0x53, 0xb2, 0x4f, 0x2a, 0x99, 0x96,
0x71, 0x59, 0x37, 0xa3, 0xdb, 0xe0, 0xf0, 0xae, 0xbd, 0x3e, 0x85, 0x3b, 0xcd, 0x73, 0x54, 0x56,
0x06, 0x0c, 0xd0, 0x87, 0x64, 0x17, 0x68, 0x14, 0xdc, 0x11, 0x06, 0x84, 0xdf, 0x81, 0x3f, 0x49,
0x50, 0x95, 0xa2, 0x4a, 0xae, 0xea, 0x3e, 0x87, 0xfe, 0x37, 0xb3, 0xc7, 0x8f, 0x9a, 0x1b, 0xd0,
0x7a, 0x33, 0xf2, 0xce, 0x4b, 0x23, 0x7f, 0x5f, 0xe6, 0xf2, 0x78, 0xaa, 0x79, 0xee, 0x08, 0x8b,
0xc2, 0x9f, 0x19, 0xf4, 0x49, 0x5b, 0x3a, 0xa1, 0xfb, 0xaf, 0xd2, 0xa5, 0x13, 0x95, 0x9d, 0xc7,
0x0b, 0x54, 0x4d, 0x72, 0x0d, 0xd6, 0x45, 0x8f, 0x56, 0xd8, 0x3e, 0x2e, 0x2d, 0x22, 0xae, 0xd1,
0xc3, 0xaa, 0x99, 0xa5, 0x0e, 0xd7, 0xc8, 0x2c, 0x8c, 0x93, 0xbf, 0x07, 0x30, 0xab, 0x72, 0x54,
0x93, 0xc5, 0x3a, 0x4e, 0x75, 0xd3, 0x3d, 0xd1, 0xb1, 0x84, 0x5f, 0x99, 0xa7, 0xda, 0x15, 0x85,
0x62, 0xbb, 0x9f, 0x75, 0x2f, 0xdf, 0x3c, 0x4c, 0xb6, 0xbf, 0x7b, 0xad, 0x6c, 0xc7, 0x30, 0xb2,
0xef, 0x5a, 0xfd, 0x4a, 0xb4, 0x62, 0xd5, 0x31, 0x51, 0xce, 0x27, 0xd5, 0x3c, 0x89, 0x23, 0x9d,
0xb3, 0x27, 0x2c, 0x9a, 0x0f, 0xf4, 0xf3, 0xfd, 0xee, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xaa,
0x43, 0x90, 0xf1, 0xd0, 0x0b, 0x00, 0x00,
}

View File

@ -26,15 +26,15 @@ message Dashboard {
}
message DashboardCell {
int32 x = 1; // X-coordinate of Cell in the Dashboard
int32 y = 2; // Y-coordinate of Cell in the Dashboard
int32 w = 3; // Width of Cell in the Dashboard
int32 h = 4; // Height of Cell in the Dashboard
repeated Query queries = 5; // Time-series data queries for Dashboard
string name = 6; // User-facing name for this Dashboard
string type = 7; // Dashboard visualization type
string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
map<string, Axis> axes = 9; // Axes represent the graphical viewport for a cell's visualizations
int32 x = 1; // X-coordinate of Cell in the Dashboard
int32 y = 2; // Y-coordinate of Cell in the Dashboard
int32 w = 3; // Width of Cell in the Dashboard
int32 h = 4; // Height of Cell in the Dashboard
repeated Query queries = 5; // Time-series data queries for Dashboard
string name = 6; // User-facing name for this Dashboard
string type = 7; // Dashboard visualization type
string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
map<string, Axis> axes = 9; // Axes represent the graphical viewport for a cell's visualizations
}
message Axis {
@ -57,18 +57,18 @@ message Template {
}
message TemplateValue {
string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
bool selected = 3; // Selected states that this variable has been picked to use for replacement
string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
bool selected = 3; // Selected states that this variable has been picked to use for replacement
}
message TemplateQuery {
string command = 1; // Command is the query itself
string db = 2; // DB the database for the query (optional)
string rp = 3; // RP is a retention policy and optional;
string measurement = 4; // Measurement is the optinally selected measurement for the query
string tag_key = 5; // TagKey is the optionally selected tag key for the query
string field_key = 6; // FieldKey is the optionally selected field key for the query
string command = 1; // Command is the query itself
string db = 2; // DB the database for the query (optional)
string rp = 3; // RP is a retention policy and optional;
string measurement = 4; // Measurement is the optinally selected measurement for the query
string tag_key = 5; // TagKey is the optionally selected tag key for the query
string field_key = 6; // FieldKey is the optionally selected field key for the query
}
message Server {
@ -105,26 +105,33 @@ message Cell {
}
message Query {
string Command = 1; // Command is the query itself
string DB = 2; // DB the database for the query (optional)
string RP = 3; // RP is a retention policy and optional;
repeated string GroupBys= 4; // GroupBys define the groups to combine in the query
repeated string Wheres = 5; // Wheres define the restrictions on the query
string Label = 6; // Label is the name of the Y-Axis
Range Range = 7; // Range is the upper and lower bound of the Y-Axis
string Source = 8; // Source is the optional URI to the data source
string Command = 1; // Command is the query itself
string DB = 2; // DB the database for the query (optional)
string RP = 3; // RP is a retention policy and optional;
repeated string GroupBys = 4; // GroupBys define the groups to combine in the query
repeated string Wheres = 5; // Wheres define the restrictions on the query
string Label = 6; // Label is the name of the Y-Axis
Range Range = 7; // Range is the upper and lower bound of the Y-Axis
string Source = 8; // Source is the optional URI to the data source
repeated TimeShift Shifts = 9; // TimeShift represents a shift to apply to an influxql query's time range
}
message TimeShift {
string Label = 1; // Label user facing description
string Unit = 2; // Unit influxql time unit representation i.e. ms, s, m, h, d
string Quantity = 3; // Quantity number of units
}
message Range {
int64 Upper = 1; // Upper is the upper-bound of the range
int64 Lower = 2; // Lower is the lower-bound of the range
int64 Upper = 1; // Upper is the upper-bound of the range
int64 Lower = 2; // Lower is the lower-bound of the range
}
message AlertRule {
string ID = 1; // ID is the unique ID of this alert rule
string JSON = 2; // JSON byte representation of the alert
int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
string ID = 1; // ID is the unique ID of this alert rule
string JSON = 2; // JSON byte representation of the alert
int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
}
message User {

View File

@ -163,6 +163,7 @@ func Test_MarshalDashboard(t *testing.T) {
Upper: int64(100),
},
Source: "/chronograf/v1/sources/1",
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -210,6 +211,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -241,6 +243,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -285,6 +288,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -316,6 +320,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{

View File

@ -13,7 +13,7 @@
"name": "MySQL Reads/Second",
"queries": [
{
"query": "SELECT non_negative_derivative(max(\"commands_select\")) AS selects_per_second FROM mysql",
"query": "SELECT non_negative_derivative(last(\"commands_select\"), 1s) AS selects_per_second FROM mysql",
"groupbys": [
"\"server\""
],
@ -30,7 +30,7 @@
"name": "MySQL Writes/Second",
"queries": [
{
"query": "SELECT non_negative_derivative(max(\"commands_insert\")) AS inserts_per_second, non_negative_derivative(max(\"commands_update\")) AS updates_per_second, non_negative_derivative(max(\"commands_delete\")) AS deletes_per_second FROM mysql",
"query": "SELECT non_negative_derivative(last(\"commands_insert\"), 1s) AS inserts_per_second, non_negative_derivative(last(\"commands_update\"), 1s) AS updates_per_second, non_negative_derivative(last(\"commands_delete\"), 1s) AS deletes_per_second FROM mysql",
"groupbys": [
"\"server\""
],
@ -47,7 +47,7 @@
"name": "MySQL Connections/Second",
"queries": [
{
"query": "SELECT non_negative_derivative(max(\"threads_connected\")) AS cxn_per_second, non_negative_derivative(max(\"threads_running\")) AS threads_running_per_second FROM mysql",
"query": "SELECT non_negative_derivative(last(\"threads_connected\"), 1s) AS cxn_per_second, non_negative_derivative(last(\"threads_running\"), 1s) AS threads_running_per_second FROM mysql",
"groupbys": [
"\"server\""
],
@ -64,7 +64,7 @@
"name": "MySQL Connections Errors/Second",
"queries": [
{
"query": "SELECT non_negative_derivative(max(\"connection_errors_max_connections\")) AS cxn_errors_per_second, non_negative_derivative(max(\"connection_errors_internal\")) AS internal_cxn_errors_per_second, non_negative_derivative(max(\"aborted_connects\")) AS cxn_aborted_per_second FROM mysql",
"query": "SELECT non_negative_derivative(last(\"connection_errors_max_connections\"), 1s) AS cxn_errors_per_second, non_negative_derivative(last(\"connection_errors_internal\"), 1s) AS internal_cxn_errors_per_second, non_negative_derivative(last(\"aborted_connects\"), 1s) AS cxn_aborted_per_second FROM mysql",
"groupbys": [
"\"server\""
],

View File

@ -1,21 +1,10 @@
package chronograf
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"regexp"
"strconv"
"strings"
"time"
"unicode"
"unicode/utf8"
"github.com/influxdata/influxdb/influxql"
)
// General errors.
@ -141,196 +130,17 @@ type Range struct {
Lower int64 `json:"lower"` // Lower is the lower bound
}
type TemplateVariable interface {
fmt.Stringer
Name() string // returns the variable name
Precedence() uint // ordinal indicating precedence level for replacement
}
type ExecutableVar interface {
Exec(string)
}
// TemplateValue is a value use to replace a template in an InfluxQL query
type BasicTemplateValue struct {
type TemplateValue struct {
Value string `json:"value"` // Value is the specific value used to replace a template in an InfluxQL query
Type string `json:"type"` // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
Selected bool `json:"selected"` // Selected states that this variable has been picked to use for replacement
}
// TemplateVar is a named variable within an InfluxQL query to be replaced with Values
type BasicTemplateVar struct {
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
Values []BasicTemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
func (t BasicTemplateVar) Name() string {
return t.Var
}
// String converts the template variable into a correct InfluxQL string based
// on its type
func (t BasicTemplateVar) String() string {
if len(t.Values) == 0 {
return ""
}
switch t.Values[0].Type {
case "tagKey", "fieldKey", "measurement", "database":
return `"` + t.Values[0].Value + `"`
case "tagValue", "timeStamp":
return `'` + t.Values[0].Value + `'`
case "csv", "constant":
return t.Values[0].Value
default:
return ""
}
}
func (t BasicTemplateVar) Precedence() uint {
return 0
}
type GroupByVar struct {
Var string `json:"tempVar"` // the name of the variable as present in the query
Duration time.Duration `json:"duration,omitempty"` // the Duration supplied by the query
Resolution uint `json:"resolution"` // the available screen resolution to render the results of this query
ReportingInterval time.Duration `json:"reportingInterval,omitempty"` // the interval at which data is reported to this series
}
// Exec is responsible for extracting the Duration from the query
func (g *GroupByVar) Exec(query string) {
whereClause := "WHERE"
start := strings.Index(query, whereClause)
if start == -1 {
// no where clause
return
}
// reposition start to after the 'where' keyword
durStr := query[start+len(whereClause):]
// attempt to parse out a relative time range
// locate duration literal start
prefix := "time > now() - "
lowerDuration, err := g.parseRelative(durStr, prefix)
if err == nil {
prefix := "time < now() - "
upperDuration, err := g.parseRelative(durStr, prefix)
if err != nil {
g.Duration = lowerDuration
return
}
g.Duration = lowerDuration - upperDuration
if g.Duration < 0 {
g.Duration = -g.Duration
}
}
dur, err := g.parseAbsolute(durStr)
if err == nil {
// we found an absolute time range
g.Duration = dur
}
}
// parseRelative locates and extracts a duration value from a fragment of an
// InfluxQL query following the "where" keyword. For example, in the fragment
// "time > now() - 180d GROUP BY :interval:", parseRelative would return a
// duration equal to 180d
func (g *GroupByVar) parseRelative(fragment string, prefix string) (time.Duration, error) {
start := strings.Index(fragment, prefix)
if start == -1 {
return time.Duration(0), errors.New("not a relative duration")
}
// reposition to duration literal
durFragment := fragment[start+len(prefix):]
// init counters
pos := 0
// locate end of duration literal
for pos < len(durFragment) {
rn, _ := utf8.DecodeRuneInString(durFragment[pos:])
if unicode.IsSpace(rn) {
break
}
pos++
}
// attempt to parse what we suspect is a duration literal
dur, err := influxql.ParseDuration(durFragment[:pos])
if err != nil {
return dur, err
}
return dur, nil
}
// parseAbsolute will determine the duration between two absolute timestamps
// found within an InfluxQL fragment following the "where" keyword. For
// example, the fragement "time > '1985-10-25T00:01:21-0800 and time <
// '1985-10-25T00:01:22-0800'" would yield a duration of 1m'
func (g *GroupByVar) parseAbsolute(fragment string) (time.Duration, error) {
timePtn := `time\s[>|<]\s'([0-9\-T\:\.Z]+)'` // Playground: http://gobular.com/x/208f66bd-1889-4269-ab47-1efdfeeb63f0
re, err := regexp.Compile(timePtn)
if err != nil {
// this is a developer error and should complain loudly
panic("Bad Regex: err:" + err.Error())
}
if !re.Match([]byte(fragment)) {
return time.Duration(0), errors.New("absolute duration not found")
}
// extract at most two times
matches := re.FindAll([]byte(fragment), 2)
// parse out absolute times
durs := make([]time.Time, 0, 2)
for _, match := range matches {
durStr := re.FindSubmatch(match)
if tm, err := time.Parse(time.RFC3339Nano, string(durStr[1])); err == nil {
durs = append(durs, tm)
}
}
if len(durs) == 1 {
durs = append(durs, time.Now())
}
// reject more than 2 times found
if len(durs) != 2 {
return time.Duration(0), errors.New("must provide exactly two absolute times")
}
dur := durs[1].Sub(durs[0])
return dur, nil
}
func (g *GroupByVar) String() string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(g.Resolution) / 3.0
msPerPixel := float64(g.Duration/time.Millisecond) / pixels
secPerPixel := float64(g.Duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
}
func (g *GroupByVar) Name() string {
return g.Var
}
func (g *GroupByVar) Precedence() uint {
return 1
type TemplateVar struct {
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
Values []TemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
// TemplateID is the unique ID used to identify a template
@ -338,7 +148,7 @@ type TemplateID string
// Template represents a series of choices to replace TemplateVars within InfluxQL
type Template struct {
BasicTemplateVar
TemplateVar
ID TemplateID `json:"id"` // ID is the unique ID associated with this template
Type string `json:"type"` // Type can be fieldKeys, tagKeys, tagValues, CSV, constant, query, measurements, databases
Label string `json:"label"` // Label is a user-facing description of the Template
@ -347,69 +157,15 @@ type Template struct {
// Query retrieves a Response from a TimeSeries.
type Query struct {
Command string `json:"query"` // Command is the query itself
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
TemplateVars TemplateVars `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// TemplateVars are a heterogeneous collection of different TemplateVariables
// with the capability to decode arbitrary JSON into the appropriate template
// variable type
type TemplateVars []TemplateVariable
func (t *TemplateVars) UnmarshalJSON(text []byte) error {
// TODO: Need to test that server throws an error when :interval:'s Resolution or ReportingInterval or zero-value
rawVars := bytes.NewReader(text)
dec := json.NewDecoder(rawVars)
// read open bracket
rawTok, err := dec.Token()
if err != nil {
return err
}
tok, isDelim := rawTok.(json.Delim)
if !isDelim || tok != '[' {
return errors.New("Expected JSON array, but found " + tok.String())
}
for dec.More() {
var halfBakedVar json.RawMessage
err := dec.Decode(&halfBakedVar)
if err != nil {
return err
}
var agb GroupByVar
err = json.Unmarshal(halfBakedVar, &agb)
if err != nil {
return err
}
// ensure that we really have a GroupByVar
if agb.Resolution != 0 {
(*t) = append(*t, &agb)
continue
}
var tvar BasicTemplateVar
err = json.Unmarshal(halfBakedVar, &tvar)
if err != nil {
return err
}
// ensure that we really have a BasicTemplateVar
if len(tvar.Values) != 0 {
(*t) = append(*t, tvar)
}
}
return nil
Command string `json:"query"` // Command is the query itself
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
TemplateVars []TemplateVar `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// DashboardQuery includes state for the query builder. This is a transition
@ -420,6 +176,7 @@ type DashboardQuery struct {
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
QueryConfig QueryConfig `json:"queryConfig,omitempty"` // QueryConfig represents the query state that is understood by the data explorer
Source string `json:"source"` // Source is the optional URI to the data source for this queryConfig
Shifts []TimeShift `json:"-"` // Shifts represents shifts to apply to an influxql query's time range. Clients expect the shift to be in the generated QueryConfig
}
// TemplateQuery is used to retrieve choices for template replacement
@ -535,6 +292,13 @@ type DurationRange struct {
Lower string `json:"lower"`
}
// TimeShift represents a shift to apply to an influxql query's time range
type TimeShift struct {
Label string `json:"label"` // Label user facing description
Unit string `json:"unit"` // Unit influxql time unit representation i.e. ms, s, m, h, d
Quantity string `json:"quantity"` // Quantity number of units
}
// QueryConfig represents UI query from the data explorer
type QueryConfig struct {
ID string `json:"id,omitempty"`
@ -548,6 +312,7 @@ type QueryConfig struct {
Fill string `json:"fill,omitempty"`
RawText *string `json:"rawText"`
Range *DurationRange `json:"range"`
Shifts []TimeShift `json:"shifts"`
}
// KapacitorNode adds arguments and properties to an alert

View File

@ -1,63 +0,0 @@
package chronograf_test
import (
"testing"
"github.com/influxdata/chronograf"
)
func Test_GroupByVar(t *testing.T) {
gbvTests := []struct {
name string
query string
want string
resolution uint // the screen resolution to render queries into
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
want: "time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "time(180ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
want: "time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "time(180ms)",
},
{
name: "absolute time with nano seconds and zero duraiton",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
want: "time(1ms)",
},
}
for _, test := range gbvTests {
t.Run(test.name, func(t *testing.T) {
gbv := chronograf.GroupByVar{
Var: ":interval:",
Resolution: test.resolution,
}
gbv.Exec(test.query)
got := gbv.String()
if got != test.want {
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", test.name, test.want, got)
}
})
}
}

View File

@ -51,13 +51,13 @@ type Client struct {
}
// NewClientWithTimeSeries initializes a Client with a known set of TimeSeries.
func NewClientWithTimeSeries(lg chronograf.Logger, mu, username, password string, tls bool, series ...chronograf.TimeSeries) (*Client, error) {
func NewClientWithTimeSeries(lg chronograf.Logger, mu string, authorizer influx.Authorizer, tls bool, series ...chronograf.TimeSeries) (*Client, error) {
metaURL, err := parseMetaURL(mu, tls)
if err != nil {
return nil, err
}
metaURL.User = url.UserPassword(username, password)
ctrl := NewMetaClient(metaURL)
ctrl := NewMetaClient(metaURL, authorizer)
c := &Client{
Ctrl: ctrl,
UsersStore: &UserStore{
@ -83,15 +83,15 @@ func NewClientWithTimeSeries(lg chronograf.Logger, mu, username, password string
// NewClientWithURL initializes an Enterprise client with a URL to a Meta Node.
// Acceptable URLs include host:port combinations as well as scheme://host:port
// varieties. TLS is used when the URL contains "https" or when the TLS
// parameter is set. The latter option is provided for host:port combinations
// Username and Password are used for Basic Auth
func NewClientWithURL(mu, username, password string, tls bool, lg chronograf.Logger) (*Client, error) {
// parameter is set. authorizer will add the correct `Authorization` headers
// on the out-bound request.
func NewClientWithURL(mu string, authorizer influx.Authorizer, tls bool, lg chronograf.Logger) (*Client, error) {
metaURL, err := parseMetaURL(mu, tls)
if err != nil {
return nil, err
}
metaURL.User = url.UserPassword(username, password)
ctrl := NewMetaClient(metaURL)
ctrl := NewMetaClient(metaURL, authorizer)
return &Client{
Ctrl: ctrl,
UsersStore: &UserStore{

View File

@ -9,6 +9,7 @@ import (
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/enterprise"
"github.com/influxdata/chronograf/influx"
"github.com/influxdata/chronograf/log"
)
@ -75,7 +76,16 @@ func Test_Enterprise_IssuesQueries(t *testing.T) {
func Test_Enterprise_AdvancesDataNodes(t *testing.T) {
m1 := NewMockTimeSeries("http://host-1.example.com:8086")
m2 := NewMockTimeSeries("http://host-2.example.com:8086")
cl, err := enterprise.NewClientWithTimeSeries(log.New(log.DebugLevel), "http://meta.example.com:8091", "marty", "thelake", false, chronograf.TimeSeries(m1), chronograf.TimeSeries(m2))
cl, err := enterprise.NewClientWithTimeSeries(
log.New(log.DebugLevel),
"http://meta.example.com:8091",
&influx.BasicAuth{
Username: "marty",
Password: "thelake",
},
false,
chronograf.TimeSeries(m1),
chronograf.TimeSeries(m2))
if err != nil {
t.Error("Unexpected error while initializing client: err:", err)
}
@ -124,7 +134,14 @@ func Test_Enterprise_NewClientWithURL(t *testing.T) {
}
for _, testURL := range urls {
_, err := enterprise.NewClientWithURL(testURL.url, testURL.username, testURL.password, testURL.tls, log.New(log.DebugLevel))
_, err := enterprise.NewClientWithURL(
testURL.url,
&influx.BasicAuth{
Username: testURL.username,
Password: testURL.password,
},
testURL.tls,
log.New(log.DebugLevel))
if err != nil && !testURL.shouldErr {
t.Errorf("Unexpected error creating Client with URL %s and TLS preference %t. err: %s", testURL.url, testURL.tls, err.Error())
} else if err == nil && testURL.shouldErr {
@ -135,7 +152,14 @@ func Test_Enterprise_NewClientWithURL(t *testing.T) {
func Test_Enterprise_ComplainsIfNotOpened(t *testing.T) {
m1 := NewMockTimeSeries("http://host-1.example.com:8086")
cl, err := enterprise.NewClientWithTimeSeries(log.New(log.DebugLevel), "http://meta.example.com:8091", "docbrown", "1.21 gigawatts", false, chronograf.TimeSeries(m1))
cl, err := enterprise.NewClientWithTimeSeries(
log.New(log.DebugLevel),
"http://meta.example.com:8091",
&influx.BasicAuth{
Username: "docbrown",
Password: "1.21 gigawatts",
},
false, chronograf.TimeSeries(m1))
if err != nil {
t.Error("Expected ErrUnitialized, but was this err:", err)
}

View File

@ -11,31 +11,32 @@ import (
"net/url"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/influx"
)
type client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
// MetaClient represents a Meta node in an Influx Enterprise cluster
type MetaClient struct {
URL *url.URL
client client
URL *url.URL
client client
authorizer influx.Authorizer
}
type ClientBuilder func() client
// NewMetaClient represents a meta node in an Influx Enterprise cluster
func NewMetaClient(url *url.URL) *MetaClient {
func NewMetaClient(url *url.URL, authorizer influx.Authorizer) *MetaClient {
return &MetaClient{
URL: url,
client: &defaultClient{},
URL: url,
client: &defaultClient{},
authorizer: authorizer,
}
}
// ShowCluster returns the cluster configuration (not health)
func (m *MetaClient) ShowCluster(ctx context.Context) (*Cluster, error) {
res, err := m.Do(ctx, "GET", "/show-cluster", nil, nil)
res, err := m.Do(ctx, "/show-cluster", "GET", m.authorizer, nil, nil)
if err != nil {
return nil, err
}
@ -56,7 +57,7 @@ func (m *MetaClient) Users(ctx context.Context, name *string) (*Users, error) {
if name != nil {
params["name"] = *name
}
res, err := m.Do(ctx, "GET", "/user", params, nil)
res, err := m.Do(ctx, "/user", "GET", m.authorizer, params, nil)
if err != nil {
return nil, err
}
@ -118,39 +119,10 @@ func (m *MetaClient) DeleteUser(ctx context.Context, name string) error {
return m.Post(ctx, "/user", a, nil)
}
// RemoveAllUserPerms revokes all permissions for a user in Influx Enterprise
func (m *MetaClient) RemoveAllUserPerms(ctx context.Context, name string) error {
user, err := m.User(ctx, name)
if err != nil {
return err
}
// No permissions to remove
if len(user.Permissions) == 0 {
return nil
}
// RemoveUserPerms revokes permissions for a user in Influx Enterprise
func (m *MetaClient) RemoveUserPerms(ctx context.Context, name string, perms Permissions) error {
a := &UserAction{
Action: "remove-permissions",
User: user,
}
return m.Post(ctx, "/user", a, nil)
}
// SetUserPerms removes all permissions and then adds the requested perms
func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permissions) error {
err := m.RemoveAllUserPerms(ctx, name)
if err != nil {
return err
}
// No permissions to add, so, user is in the right state
if len(perms) == 0 {
return nil
}
a := &UserAction{
Action: "add-permissions",
User: &User{
Name: name,
Permissions: perms,
@ -159,6 +131,38 @@ func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permis
return m.Post(ctx, "/user", a, nil)
}
// SetUserPerms removes permissions not in set and then adds the requested perms
func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permissions) error {
user, err := m.User(ctx, name)
if err != nil {
return err
}
revoke, add := permissionsDifference(perms, user.Permissions)
// first, revoke all the permissions the user currently has, but,
// shouldn't...
if len(revoke) > 0 {
err := m.RemoveUserPerms(ctx, name, revoke)
if err != nil {
return err
}
}
// ... next, add any permissions the user should have
if len(add) > 0 {
a := &UserAction{
Action: "add-permissions",
User: &User{
Name: name,
Permissions: add,
},
}
return m.Post(ctx, "/user", a, nil)
}
return nil
}
// UserRoles returns a map of users to all of their current roles
func (m *MetaClient) UserRoles(ctx context.Context) (map[string]Roles, error) {
res, err := m.Roles(ctx, nil)
@ -186,7 +190,7 @@ func (m *MetaClient) Roles(ctx context.Context, name *string) (*Roles, error) {
if name != nil {
params["name"] = *name
}
res, err := m.Do(ctx, "GET", "/role", params, nil)
res, err := m.Do(ctx, "/role", "GET", m.authorizer, params, nil)
if err != nil {
return nil, err
}
@ -235,39 +239,10 @@ func (m *MetaClient) DeleteRole(ctx context.Context, name string) error {
return m.Post(ctx, "/role", a, nil)
}
// RemoveAllRolePerms removes all permissions from a role
func (m *MetaClient) RemoveAllRolePerms(ctx context.Context, name string) error {
role, err := m.Role(ctx, name)
if err != nil {
return err
}
// No permissions to remove
if len(role.Permissions) == 0 {
return nil
}
// RemoveRolePerms revokes permissions from a role
func (m *MetaClient) RemoveRolePerms(ctx context.Context, name string, perms Permissions) error {
a := &RoleAction{
Action: "remove-permissions",
Role: role,
}
return m.Post(ctx, "/role", a, nil)
}
// SetRolePerms removes all permissions and then adds the requested perms to role
func (m *MetaClient) SetRolePerms(ctx context.Context, name string, perms Permissions) error {
err := m.RemoveAllRolePerms(ctx, name)
if err != nil {
return err
}
// No permissions to add, so, role is in the right state
if len(perms) == 0 {
return nil
}
a := &RoleAction{
Action: "add-permissions",
Role: &Role{
Name: name,
Permissions: perms,
@ -276,7 +251,39 @@ func (m *MetaClient) SetRolePerms(ctx context.Context, name string, perms Permis
return m.Post(ctx, "/role", a, nil)
}
// SetRoleUsers removes all users and then adds the requested users to role
// SetRolePerms removes permissions not in set and then adds the requested perms to role
func (m *MetaClient) SetRolePerms(ctx context.Context, name string, perms Permissions) error {
role, err := m.Role(ctx, name)
if err != nil {
return err
}
revoke, add := permissionsDifference(perms, role.Permissions)
// first, revoke all the permissions the role currently has, but,
// shouldn't...
if len(revoke) > 0 {
err := m.RemoveRolePerms(ctx, name, revoke)
if err != nil {
return err
}
}
// ... next, add any permissions the role should have
if len(add) > 0 {
a := &RoleAction{
Action: "add-permissions",
Role: &Role{
Name: name,
Permissions: add,
},
}
return m.Post(ctx, "/role", a, nil)
}
return nil
}
// SetRoleUsers removes users not in role and then adds the requested users to role
func (m *MetaClient) SetRoleUsers(ctx context.Context, name string, users []string) error {
role, err := m.Role(ctx, name)
if err != nil {
@ -320,6 +327,29 @@ func Difference(wants []string, haves []string) (revoke []string, add []string)
return
}
func permissionsDifference(wants Permissions, haves Permissions) (revoke Permissions, add Permissions) {
revoke = make(Permissions)
add = make(Permissions)
for scope, want := range wants {
have, ok := haves[scope]
if ok {
r, a := Difference(want, have)
revoke[scope] = r
add[scope] = a
} else {
add[scope] = want
}
}
for scope, have := range haves {
_, ok := wants[scope]
if !ok {
revoke[scope] = have
}
}
return
}
// AddRoleUsers updates a role to have additional users.
func (m *MetaClient) AddRoleUsers(ctx context.Context, name string, users []string) error {
// No permissions to add, so, role is in the right state
@ -361,7 +391,7 @@ func (m *MetaClient) Post(ctx context.Context, path string, action interface{},
return err
}
body := bytes.NewReader(b)
_, err = m.Do(ctx, "POST", path, params, body)
_, err = m.Do(ctx, path, "POST", m.authorizer, params, body)
if err != nil {
return err
}
@ -373,7 +403,7 @@ type defaultClient struct {
}
// Do is a helper function to interface with Influx Enterprise's Meta API
func (d *defaultClient) Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error) {
func (d *defaultClient) Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
p := url.Values{}
for k, v := range params {
p.Add(k, v)
@ -391,15 +421,23 @@ func (d *defaultClient) Do(URL *url.URL, path, method string, params map[string]
if err != nil {
return nil, err
}
if body != nil {
req.Header.Set("Content-Type", "application/json")
}
if authorizer != nil {
if err = authorizer.Set(req); err != nil {
return nil, err
}
}
// Meta servers will redirect (307) to leader. We need
// special handling to preserve authentication headers.
client := &http.Client{
CheckRedirect: d.AuthedCheckRedirect,
}
res, err := client.Do(req)
if err != nil {
return nil, err
@ -437,14 +475,14 @@ func (d *defaultClient) AuthedCheckRedirect(req *http.Request, via []*http.Reque
}
// Do is a cancelable function to interface with Influx Enterprise's Meta API
func (m *MetaClient) Do(ctx context.Context, method, path string, params map[string]string, body io.Reader) (*http.Response, error) {
func (m *MetaClient) Do(ctx context.Context, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
type result struct {
Response *http.Response
Err error
}
resps := make(chan (result))
go func() {
resp, err := m.client.Do(m.URL, path, method, params, body)
resp, err := m.client.Do(m.URL, path, method, authorizer, params, body)
resps <- result{resp, err}
}()

View File

@ -11,13 +11,16 @@ import (
"net/url"
"reflect"
"testing"
"time"
"github.com/influxdata/chronograf/influx"
)
func TestMetaClient_ShowCluster(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
tests := []struct {
@ -128,7 +131,7 @@ func TestMetaClient_Users(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -265,7 +268,7 @@ func TestMetaClient_User(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -366,7 +369,7 @@ func TestMetaClient_CreateUser(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -437,7 +440,7 @@ func TestMetaClient_ChangePassword(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -509,7 +512,7 @@ func TestMetaClient_DeleteUser(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -578,7 +581,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -595,7 +598,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
wantErr bool
}{
{
name: "Successful set permissions User",
name: "Remove all permissions for a user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@ -615,7 +618,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
wantRm: `{"action":"remove-permissions","user":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]}}}`,
},
{
name: "Successful set permissions User",
name: "Remove some permissions and add others",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@ -699,7 +702,7 @@ func TestMetaClient_Roles(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -798,7 +801,7 @@ func TestMetaClient_Role(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -881,7 +884,7 @@ func TestMetaClient_UserRoles(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -985,7 +988,7 @@ func TestMetaClient_CreateRole(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -1051,7 +1054,7 @@ func TestMetaClient_DeleteRole(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -1120,7 +1123,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -1137,7 +1140,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
wantErr bool
}{
{
name: "Successful set permissions role",
name: "Remove all roles from user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@ -1154,10 +1157,10 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
ctx: context.Background(),
name: "admin",
},
wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]},"users":["marty"]}}`,
wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]}}}`,
},
{
name: "Successful set single permissions role",
name: "Remove some users and add permissions to other",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@ -1179,7 +1182,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
},
},
},
wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]},"users":["marty"]}}`,
wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]}}}`,
wantAdd: `{"action":"add-permissions","role":{"name":"admin","permissions":{"telegraf":["ReadData"]}}}`,
},
}
@ -1218,7 +1221,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
got, _ := ioutil.ReadAll(prm.Body)
if string(got) != tt.wantRm {
t.Errorf("%q. MetaClient.SetRolePerms() = %v, want %v", tt.name, string(got), tt.wantRm)
t.Errorf("%q. MetaClient.SetRolePerms() removal = \n%v\n, want \n%v\n", tt.name, string(got), tt.wantRm)
}
if tt.wantAdd != "" {
prm := reqs[2]
@ -1231,7 +1234,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
got, _ := ioutil.ReadAll(prm.Body)
if string(got) != tt.wantAdd {
t.Errorf("%q. MetaClient.SetRolePerms() = %v, want %v", tt.name, string(got), tt.wantAdd)
t.Errorf("%q. MetaClient.SetRolePerms() addition = \n%v\n, want \n%v\n", tt.name, string(got), tt.wantAdd)
}
}
}
@ -1241,7 +1244,7 @@ func TestMetaClient_SetRoleUsers(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -1361,7 +1364,7 @@ func NewMockClient(code int, body []byte, headers http.Header, err error) *MockC
}
}
func (c *MockClient) Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error) {
func (c *MockClient) Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
if c == nil {
return nil, fmt.Errorf("NIL MockClient")
}
@ -1453,3 +1456,71 @@ func Test_AuthedCheckRedirect_Do(t *testing.T) {
t.Errorf("result = %q; want ok", got)
}
}
func Test_defaultClient_Do(t *testing.T) {
type args struct {
path string
method string
authorizer influx.Authorizer
params map[string]string
body io.Reader
}
tests := []struct {
name string
args args
want string
wantErr bool
}{
{
name: "test authorizer",
args: args{
path: "/tictactoe",
method: "GET",
authorizer: &influx.BasicAuth{
Username: "Steven Falken",
Password: "JOSHUA",
},
},
want: "Basic U3RldmVuIEZhbGtlbjpKT1NIVUE=",
},
{
name: "test authorizer",
args: args{
path: "/tictactoe",
method: "GET",
authorizer: &influx.BearerJWT{
Username: "minifig",
SharedSecret: "legos",
Now: func() time.Time { return time.Time{} },
},
},
want: "Bearer eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOi02MjEzNTU5Njc0MCwidXNlcm5hbWUiOiJtaW5pZmlnIn0.uwFGBQ3MykqEmk9Zx0sBdJGefcESVEXG_qt0C1J8b_aS62EAES-Q1FwtURsbITNvSnfzMxYFnkbSG0AA1pEzWw",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/tictactoe" {
t.Fatal("Expected request to '/query' but was", r.URL.Path)
}
got, ok := r.Header["Authorization"]
if !ok {
t.Fatal("No Authorization header")
}
if got[0] != tt.want {
t.Fatalf("Expected auth %s got %s", tt.want, got)
}
rw.Write([]byte(`{}`))
}))
defer ts.Close()
d := &defaultClient{}
u, _ := url.Parse(ts.URL)
_, err := d.Do(u, tt.args.path, tt.args.method, tt.args.authorizer, tt.args.params, tt.args.body)
if (err != nil) != tt.wantErr {
t.Errorf("defaultClient.Do() error = %v, wantErr %v", err, tt.wantErr)
return
}
})
}
}

View File

@ -84,44 +84,49 @@ func (c *UserStore) Update(ctx context.Context, u *chronograf.User) error {
return c.Ctrl.ChangePassword(ctx, u.Name, u.Passwd)
}
// Make a list of the roles we want this user to have:
want := make([]string, len(u.Roles))
for i, r := range u.Roles {
want[i] = r.Name
}
if u.Roles != nil {
// Make a list of the roles we want this user to have:
want := make([]string, len(u.Roles))
for i, r := range u.Roles {
want[i] = r.Name
}
// Find the list of all roles this user is currently in
userRoles, err := c.UserRoles(ctx)
if err != nil {
return nil
}
// Make a list of the roles the user currently has
roles := userRoles[u.Name]
have := make([]string, len(roles.Roles))
for i, r := range roles.Roles {
have[i] = r.Name
}
// Find the list of all roles this user is currently in
userRoles, err := c.UserRoles(ctx)
if err != nil {
return nil
}
// Make a list of the roles the user currently has
roles := userRoles[u.Name]
have := make([]string, len(roles.Roles))
for i, r := range roles.Roles {
have[i] = r.Name
}
// Calculate the roles the user will be removed from and the roles the user
// will be added to.
revoke, add := Difference(want, have)
// Calculate the roles the user will be removed from and the roles the user
// will be added to.
revoke, add := Difference(want, have)
// First, add the user to the new roles
for _, role := range add {
if err := c.Ctrl.AddRoleUsers(ctx, role, []string{u.Name}); err != nil {
return err
// First, add the user to the new roles
for _, role := range add {
if err := c.Ctrl.AddRoleUsers(ctx, role, []string{u.Name}); err != nil {
return err
}
}
// ... and now remove the user from an extra roles
for _, role := range revoke {
if err := c.Ctrl.RemoveRoleUsers(ctx, role, []string{u.Name}); err != nil {
return err
}
}
}
// ... and now remove the user from an extra roles
for _, role := range revoke {
if err := c.Ctrl.RemoveRoleUsers(ctx, role, []string{u.Name}); err != nil {
return err
}
if u.Permissions != nil {
perms := ToEnterprise(u.Permissions)
return c.Ctrl.SetUserPerms(ctx, u.Name, perms)
}
perms := ToEnterprise(u.Permissions)
return c.Ctrl.SetUserPerms(ctx, u.Name, perms)
return nil
}
// All is all users in influx

View File

@ -54,6 +54,7 @@ func (b *BasicAuth) Set(r *http.Request) error {
type BearerJWT struct {
Username string
SharedSecret string
Now Now
}
// Set adds an Authorization Bearer to the request if has a shared secret
@ -70,7 +71,10 @@ func (b *BearerJWT) Set(r *http.Request) error {
// Token returns the expected InfluxDB JWT signed with the sharedSecret
func (b *BearerJWT) Token(username string) (string, error) {
return JWT(username, b.SharedSecret, time.Now)
if b.Now == nil {
b.Now = time.Now
}
return JWT(username, b.SharedSecret, b.Now)
}
// Now returns the current time

View File

@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"strings"
"time"
"github.com/influxdata/chronograf"
)
@ -55,7 +56,10 @@ func (c *Client) query(u *url.URL, q chronograf.Query) (chronograf.Response, err
command := q.Command
// TODO(timraymond): move this upper Query() function
if len(q.TemplateVars) > 0 {
command = TemplateReplace(q.Command, q.TemplateVars)
command, err = TemplateReplace(q.Command, q.TemplateVars, time.Now())
if err != nil {
return nil, err
}
}
logs := c.Logger.
WithField("component", "proxy").

View File

@ -276,11 +276,11 @@ func Test_Influx_HTTPS_InsecureSkipVerify(t *testing.T) {
called = false
q = ""
query = chronograf.Query{
Command: "select $field from cpu",
TemplateVars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Command: "select :field: from cpu",
TemplateVars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: ":field:",
Values: []chronograf.TemplateValue{
{
Value: "usage_user",
Type: "fieldKey",

View File

@ -10,6 +10,52 @@ import (
"github.com/influxdata/influxdb/influxql"
)
func TimeRangeAsEpochNano(expr influxql.Expr, now time.Time) (min, max int64, err error) {
tmin, tmax, err := influxql.TimeRange(expr)
if err != nil {
return 0, 0, err
}
if tmin.IsZero() {
min = time.Unix(0, influxql.MinTime).UnixNano()
} else {
min = tmin.UnixNano()
}
if tmax.IsZero() {
max = now.UnixNano()
} else {
max = tmax.UnixNano()
}
return
}
const WhereToken = "WHERE"
func ParseTime(influxQL string, now time.Time) (time.Duration, error) {
start := strings.Index(strings.ToUpper(influxQL), WhereToken)
if start == -1 {
return 0, fmt.Errorf("not a relative duration")
}
start += len(WhereToken)
where := influxQL[start:]
cond, err := influxql.ParseExpr(where)
if err != nil {
return 0, err
}
nowVal := &influxql.NowValuer{
Now: now,
}
cond = influxql.Reduce(cond, nowVal)
min, max, err := TimeRangeAsEpochNano(cond, now)
if err != nil {
return 0, err
}
dur := time.Duration(max - min)
if dur < 0 {
dur = 0
}
return dur, nil
}
// Convert changes an InfluxQL query to a QueryConfig
func Convert(influxQL string) (chronograf.QueryConfig, error) {
itsDashboardTime := false

View File

@ -2,6 +2,7 @@ package influx
import (
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/influxdata/chronograf"
@ -767,3 +768,43 @@ func TestConvert(t *testing.T) {
})
}
}
func TestParseTime(t *testing.T) {
tests := []struct {
name string
influxQL string
now string
want time.Duration
wantErr bool
}{
{
name: "time equal",
now: "2000-01-01T00:00:00Z",
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
want: 0,
},
{
name: "time shifted by one hour",
now: "2000-01-01T00:00:00Z",
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
want: 3599999999998,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
now, err := time.Parse(time.RFC3339, tt.now)
if err != nil {
t.Fatalf("%v", err)
}
got, err := ParseTime(tt.influxQL, now)
if (err != nil) != tt.wantErr {
t.Errorf("ParseTime() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Logf("%d", got)
t.Errorf("ParseTime() = %v, want %v", got, tt.want)
}
})
}
}

View File

@ -1,40 +1,106 @@
package influx
import (
"sort"
"strconv"
"strings"
"time"
"github.com/influxdata/chronograf"
)
// TemplateReplace replaces templates with values within the query string
func TemplateReplace(query string, templates chronograf.TemplateVars) string {
tvarsByPrecedence := make(map[uint]chronograf.TemplateVars, len(templates))
maxPrecedence := uint(0)
for _, tmp := range templates {
precedence := tmp.Precedence()
if precedence > maxPrecedence {
maxPrecedence = precedence
}
tvarsByPrecedence[precedence] = append(tvarsByPrecedence[precedence], tmp)
}
replaced := query
for prc := uint(0); prc <= maxPrecedence; prc++ {
replacements := []string{}
for _, v := range tvarsByPrecedence[prc] {
if evar, ok := v.(chronograf.ExecutableVar); ok {
evar.Exec(replaced)
}
newVal := v.String()
if newVal != "" {
replacements = append(replacements, v.Name(), newVal)
}
func SortTemplates(ts []chronograf.TemplateVar) []chronograf.TemplateVar {
sort.Slice(ts, func(i, j int) bool {
if len(ts[i].Values) != len(ts[j].Values) {
return len(ts[i].Values) < len(ts[j].Values)
}
replacer := strings.NewReplacer(replacements...)
replaced = replacer.Replace(replaced)
}
if len(ts[i].Values) == 0 {
return i < j
}
return replaced
for k := range ts[i].Values {
if ts[i].Values[k].Type != ts[j].Values[k].Type {
return ts[i].Values[k].Type < ts[j].Values[k].Type
}
if ts[i].Values[k].Value != ts[j].Values[k].Value {
return ts[i].Values[k].Value < ts[j].Values[k].Value
}
}
return i < j
})
return ts
}
// RenderTemplate converts the template variable into a correct InfluxQL string based
// on its type
func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (string, error) {
if len(t.Values) == 0 {
return query, nil
}
switch t.Values[0].Type {
case "tagKey", "fieldKey", "measurement", "database":
return strings.Replace(query, t.Var, `"`+t.Values[0].Value+`"`, -1), nil
case "tagValue", "timeStamp":
return strings.Replace(query, t.Var, `'`+t.Values[0].Value+`'`, -1), nil
case "csv", "constant":
return strings.Replace(query, t.Var, t.Values[0].Value, -1), nil
}
tv := map[string]string{}
for i := range t.Values {
tv[t.Values[i].Type] = t.Values[i].Value
}
if res, ok := tv["resolution"]; ok {
resolution, err := strconv.ParseInt(res, 0, 64)
if err != nil {
return "", err
}
ppp, ok := tv["pointsPerPixel"]
if !ok {
ppp = "3"
}
pixelsPerPoint, err := strconv.ParseInt(ppp, 0, 64)
if err != nil {
return "", err
}
dur, err := ParseTime(query, now)
if err != nil {
return "", err
}
interval := AutoGroupBy(resolution, pixelsPerPoint, dur)
return strings.Replace(query, t.Var, interval, -1), nil
}
return query, nil
}
func AutoGroupBy(resolution, pixelsPerPoint int64, duration time.Duration) string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(resolution) / float64(pixelsPerPoint)
msPerPixel := float64(duration/time.Millisecond) / pixels
secPerPixel := float64(duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
}
// TemplateReplace replaces templates with values within the query string
func TemplateReplace(query string, templates []chronograf.TemplateVar, now time.Time) (string, error) {
templates = SortTemplates(templates)
for i := range templates {
var err error
query, err = RenderTemplate(query, templates[i], now)
if err != nil {
return "", err
}
}
return query, nil
}

View File

@ -2,6 +2,7 @@ package influx
import (
"encoding/json"
"fmt"
"reflect"
"testing"
"time"
@ -13,43 +14,43 @@ func TestTemplateReplace(t *testing.T) {
tests := []struct {
name string
query string
vars chronograf.TemplateVars
vars []chronograf.TemplateVar
want string
}{
{
name: "select with parameters",
query: "$METHOD field1, $field FROM $measurement WHERE temperature > $temperature",
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
Var: "$temperature",
Values: []chronograf.BasicTemplateValue{
query: ":method: field1, :field: FROM :measurement: WHERE temperature > :temperature:",
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: ":temperature:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "10",
},
},
},
chronograf.BasicTemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":field:",
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field2",
},
},
},
chronograf.BasicTemplateVar{
Var: "$METHOD",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":method:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "SELECT",
},
},
},
chronograf.BasicTemplateVar{
Var: "$measurement",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":measurement:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: `"cpu"`,
@ -62,28 +63,28 @@ func TestTemplateReplace(t *testing.T) {
{
name: "select with parameters and aggregates",
query: `SELECT mean($field) FROM "cpu" WHERE $tag = $value GROUP BY $tag`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$value",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "tagValue",
Value: "howdy.com",
},
},
},
chronograf.BasicTemplateVar{
chronograf.TemplateVar{
Var: "$tag",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "tagKey",
Value: "host",
},
},
},
chronograf.BasicTemplateVar{
chronograf.TemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field",
@ -101,8 +102,8 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var without a value",
query: `SELECT $field FROM "cpu"`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$field",
},
},
@ -111,10 +112,10 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var with unknown type",
query: `SELECT $field FROM "cpu"`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "who knows?",
Value: "field",
@ -127,42 +128,63 @@ func TestTemplateReplace(t *testing.T) {
{
name: "auto group by",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 180 * 24 * time.Hour,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by without duration",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 0 * time.Minute,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by with :dashboardTime:",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 0 * time.Minute,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
&chronograf.BasicTemplateVar{
{
Var: ":dashboardTime:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "constant",
Value: "now() - 4320h",
@ -170,20 +192,28 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by failing condition",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
vars: []chronograf.TemplateVariable{
&chronograf.GroupByVar{
Var: ":interval:",
Resolution: 115,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "115",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
chronograf.BasicTemplateVar{
{
Var: ":dashboardTime:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Value: "now() - 1h",
Type: "constant",
@ -197,7 +227,14 @@ func TestTemplateReplace(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := TemplateReplace(tt.query, tt.vars)
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
if err != nil {
t.Fatal(err)
}
got, err := TemplateReplace(tt.query, tt.vars, now)
if err != nil {
t.Fatalf("TestParse unexpected TemplateReplace error: %v", err)
}
if got != tt.want {
t.Errorf("TestParse %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
@ -209,8 +246,20 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
req := `[
{
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
},
{
"value": "10",
"type": "reportingInterval"
}
]
},
{
"tempVar": ":cpu:",
@ -224,15 +273,27 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
}
]`
expected := []chronograf.TemplateVariable{
&chronograf.GroupByVar{
Var: ":interval:",
Resolution: 1000,
ReportingInterval: 10 * time.Nanosecond,
want := []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
{
Value: "10",
Type: "reportingInterval",
},
},
},
chronograf.BasicTemplateVar{
{
Var: ":cpu:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Value: "cpu-total",
Type: "tagValue",
@ -242,65 +303,128 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
},
}
var tvars chronograf.TemplateVars
err := json.Unmarshal([]byte(req), &tvars)
var got []chronograf.TemplateVar
err := json.Unmarshal([]byte(req), &got)
if err != nil {
t.Fatal("Err unmarshaling:", err)
}
if len(tvars) != len(expected) {
t.Fatal("Expected", len(expected), "vars but found", len(tvars))
}
if !reflect.DeepEqual(*(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar))) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", *(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar)))
}
if !reflect.DeepEqual(tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar)) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar))
if !reflect.DeepEqual(got, want) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", got, want)
}
}
func TestGroupByVarString(t *testing.T) {
func TestAutoGroupBy(t *testing.T) {
tests := []struct {
name string
tvar *chronograf.GroupByVar
want string
name string
resolution int64
pixelsPerPoint int64
duration time.Duration
want string
}{
{
name: "String() calculates the GROUP BY interval",
tvar: &chronograf.GroupByVar{
Resolution: 700,
ReportingInterval: 10 * time.Second,
Duration: 24 * time.Hour,
},
want: "time(370s)",
name: "String() calculates the GROUP BY interval",
resolution: 700,
pixelsPerPoint: 3,
duration: 24 * time.Hour,
want: "time(370s)",
},
{
name: "String() milliseconds if less than one second intervals",
tvar: &chronograf.GroupByVar{
Resolution: 100000,
ReportingInterval: 10 * time.Second,
Duration: time.Hour,
},
want: "time(107ms)",
name: "String() milliseconds if less than one second intervals",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Hour,
want: "time(107ms)",
},
{
name: "String() milliseconds if less than one millisecond",
tvar: &chronograf.GroupByVar{
Resolution: 100000,
ReportingInterval: 10 * time.Second,
Duration: time.Second,
},
want: "time(1ms)",
name: "String() milliseconds if less than one millisecond",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Second,
want: "time(1ms)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.tvar.String()
got := AutoGroupBy(tt.resolution, tt.pixelsPerPoint, tt.duration)
if got != tt.want {
t.Errorf("TestGroupByVarString %s =\n%s\nwant\n%s", tt.name, got, tt.want)
t.Errorf("TestAutoGroupBy %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
})
}
}
func Test_RenderTemplate(t *testing.T) {
gbvTests := []struct {
name string
query string
want string
resolution uint // the screen resolution to render queries into
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(259s)",
},
{
name: "relative time offset by week",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(179ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(179ms)",
},
{
name: "absolute time with nano seconds and zero duraiton",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(1ms)",
},
}
for _, tt := range gbvTests {
t.Run(tt.name, func(t *testing.T) {
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
if err != nil {
t.Fatal(err)
}
tvar := chronograf.TemplateVar{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: fmt.Sprintf("%d", tt.resolution),
Type: "resolution",
},
},
}
got, err := RenderTemplate(tt.query, tvar, now)
if err != nil {
t.Fatalf("unexpected error rendering template %v", err)
}
if got != tt.want {
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", tt.name, tt.want, got)
}
})
}
}
// SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h GROUP BY :interval: FILL(null);SELECT mean("numSeries") AS "mean_numSeries_shifted__1__h" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h AND time < now() - 1h GROUP BY :interval: FILL(null)

View File

@ -214,6 +214,9 @@ var trigger = data
.durationField(durationField)
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -300,6 +303,9 @@ var trigger = data
.durationField(durationField)
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -540,6 +546,9 @@ var trigger = data
.durationField(durationField)
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -623,6 +632,9 @@ var trigger = data
.durationField(durationField)
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -1376,6 +1388,9 @@ trigger
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)

View File

@ -20,11 +20,14 @@ func InfluxOut(rule chronograf.AlertRule) (string, error) {
return fmt.Sprintf(`
trigger
%s
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
.retentionPolicy(outputRP)
.measurement(outputMeasurement)
.create()
.database(outputDB)
.retentionPolicy(outputRP)
.measurement(outputMeasurement)
.tag('alertName', name)
.tag('triggerType', triggerType)
`, rename), nil

View File

@ -14,6 +14,9 @@ func TestInfluxOut(t *testing.T) {
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)

View File

@ -181,6 +181,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -323,6 +326,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -467,6 +473,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -620,6 +629,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -772,6 +784,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -924,6 +939,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -1059,6 +1077,9 @@ var trigger = data
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -1222,6 +1243,9 @@ var trigger = past
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -1385,6 +1409,9 @@ var trigger = past
.email()
trigger
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)
@ -1527,6 +1554,9 @@ trigger
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
|eval(lambda: float("value"))
.as('value')
.keep()
|influxDBOut()
.create()
.database(outputDB)

View File

@ -76,10 +76,12 @@ func Vars(rule chronograf.AlertRule) (string, error) {
}
}
// NotEmpty is an error collector checking if strings are empty values
type NotEmpty struct {
Err error
}
// Valid checks if string s is empty and if so reports an error using name
func (n *NotEmpty) Valid(name, s string) error {
if n.Err != nil {
return n.Err
@ -91,6 +93,7 @@ func (n *NotEmpty) Valid(name, s string) error {
return n.Err
}
// Escape sanitizes strings with single quotes for kapacitor
func Escape(str string) string {
return strings.Replace(str, "'", `\'`, -1)
}
@ -251,5 +254,10 @@ func formatValue(value string) string {
if _, err := strconv.ParseFloat(value, 64); err == nil {
return value
}
return "'" + value + "'"
// If the value is a kapacitor boolean value perform no formatting
if value == "TRUE" || value == "FALSE" {
return value
}
return "'" + Escape(value) + "'"
}

View File

@ -49,3 +49,39 @@ func TestVarsCritStringEqual(t *testing.T) {
t.Errorf("Error validating alert: %v %s", err, tick)
}
}
func Test_formatValue(t *testing.T) {
tests := []struct {
name string
value string
want string
}{
{
name: "parses floats",
value: "3.14",
want: "3.14",
},
{
name: "parses booleans",
value: "TRUE",
want: "TRUE",
},
{
name: "single quotes for strings",
value: "up",
want: "'up'",
},
{
name: "handles escaping of single quotes",
value: "down's",
want: "'down\\'s'",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := formatValue(tt.value); got != tt.want {
t.Errorf("formatValue() = %v, want %v", got, tt.want)
}
})
}
}

View File

@ -27,6 +27,7 @@ type Generic struct {
AuthURL string
TokenURL string
APIURL string // APIURL returns OpenID Userinfo
APIKey string // APIKey is the JSON key to lookup email address in APIURL response
Logger chronograf.Logger
}
@ -69,9 +70,7 @@ func (g *Generic) Config() *oauth2.Config {
// PrincipalID returns the email address of the user.
func (g *Generic) PrincipalID(provider *http.Client) (string, error) {
res := struct {
Email string `json:"email"`
}{}
res := map[string]interface{}{}
r, err := provider.Get(g.APIURL)
if err != nil {
@ -83,7 +82,11 @@ func (g *Generic) PrincipalID(provider *http.Client) (string, error) {
return "", err
}
email := res.Email
email := ""
value := res[g.APIKey]
if e, ok := value.(string); ok {
email = e
}
// If we did not receive an email address, try to lookup the email
// in a similar way as github

View File

@ -34,6 +34,7 @@ func TestGenericPrincipalID(t *testing.T) {
prov := oauth2.Generic{
Logger: logger,
APIURL: mockAPI.URL,
APIKey: "email",
}
tt, err := oauth2.NewTestTripper(logger, mockAPI, http.DefaultTransport)
if err != nil {

View File

@ -31,7 +31,6 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
cells := make([]dashboardCellResponse, len(dcells))
for i, cell := range dcells {
newCell := chronograf.DashboardCell{}
newCell.Queries = make([]chronograf.DashboardQuery, len(cell.Queries))
copy(newCell.Queries, cell.Queries)
@ -70,7 +69,17 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
// ValidDashboardCellRequest verifies that the dashboard cells have a query and
// have the correct axes specified
func ValidDashboardCellRequest(c *chronograf.DashboardCell) error {
if c == nil {
return fmt.Errorf("Chronograf dashboard cell was nil")
}
CorrectWidthHeight(c)
for _, q := range c.Queries {
if err := ValidateQueryConfig(&q.QueryConfig); err != nil {
return err
}
}
MoveTimeShift(c)
return HasCorrectAxes(c)
}
@ -115,12 +124,22 @@ func CorrectWidthHeight(c *chronograf.DashboardCell) {
}
}
// MoveTimeShift moves TimeShift from the QueryConfig to the DashboardQuery
func MoveTimeShift(c *chronograf.DashboardCell) {
for i, query := range c.Queries {
query.Shifts = query.QueryConfig.Shifts
c.Queries[i] = query
}
}
// AddQueryConfig updates a cell by converting InfluxQL into queryconfigs
// If influxql cannot be represented by a full query config, then, the
// query config's raw text is set to the command.
func AddQueryConfig(c *chronograf.DashboardCell) {
for i, q := range c.Queries {
qc := ToQueryConfig(q.Command)
qc.Shifts = append([]chronograf.TimeShift(nil), q.Shifts...)
q.Shifts = nil
q.QueryConfig = qc
c.Queries[i] = q
}

View File

@ -162,14 +162,14 @@ func Test_Service_DashboardCells(t *testing.T) {
http.StatusOK,
},
{
"cell axes should always be \"x\", \"y\", and \"y2\"",
&url.URL{
name: "cell axes should always be \"x\", \"y\", and \"y2\"",
reqURL: &url.URL{
Path: "/chronograf/v1/dashboards/1/cells",
},
map[string]string{
ctxParams: map[string]string{
"id": "1",
},
[]chronograf.DashboardCell{
mockResponse: []chronograf.DashboardCell{
{
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
X: 0,
@ -182,7 +182,7 @@ func Test_Service_DashboardCells(t *testing.T) {
Axes: map[string]chronograf.Axis{},
},
},
[]chronograf.DashboardCell{
expected: []chronograf.DashboardCell{
{
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
X: 0,
@ -205,7 +205,7 @@ func Test_Service_DashboardCells(t *testing.T) {
},
},
},
http.StatusOK,
expectedCode: http.StatusOK,
},
}
@ -217,7 +217,10 @@ func Test_Service_DashboardCells(t *testing.T) {
ctx := context.Background()
params := httprouter.Params{}
for k, v := range test.ctxParams {
params = append(params, httprouter.Param{k, v})
params = append(params, httprouter.Param{
Key: k,
Value: v,
})
}
ctx = httprouter.WithParams(ctx, params)

View File

@ -223,6 +223,13 @@ func Test_newDashboardResponse(t *testing.T) {
{
Source: "/chronograf/v1/sources/1",
Command: "SELECT donors from hill_valley_preservation_society where time > '1985-10-25 08:00:00'",
Shifts: []chronograf.TimeShift{
{
Label: "Best Week Evar",
Unit: "d",
Quantity: "7",
},
},
},
},
Axes: map[string]chronograf.Axis{
@ -272,6 +279,13 @@ func Test_newDashboardResponse(t *testing.T) {
},
Tags: make(map[string][]string, 0),
AreTagsAccepted: false,
Shifts: []chronograf.TimeShift{
{
Label: "Best Week Evar",
Unit: "d",
Quantity: "7",
},
},
},
},
},

View File

@ -215,12 +215,12 @@ func Test_KapacitorRulesGet(t *testing.T) {
bg := context.Background()
params := httprouter.Params{
{
"id",
"1",
Key: "id",
Value: "1",
},
{
"kid",
"1",
Key: "kid",
Value: "1",
},
}
ctx := httprouter.WithParams(bg, params)
@ -246,8 +246,8 @@ func Test_KapacitorRulesGet(t *testing.T) {
actual := make([]chronograf.AlertRule, len(frame.Rules))
for idx, _ := range frame.Rules {
actual[idx] = frame.Rules[idx].AlertRule
for i := range frame.Rules {
actual[i] = frame.Rules[i].AlertRule
}
if resp.StatusCode != http.StatusOK {

View File

@ -257,7 +257,7 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
// Encapsulate the router with OAuth2
var auth http.Handler
auth, allRoutes.AuthRoutes = AuthAPI(opts, router)
allRoutes.LogoutLink = "/oauth/logout"
allRoutes.LogoutLink = path.Join(opts.Basepath, "/oauth/logout")
// Create middleware that redirects to the appropriate provider logout
router.GET(allRoutes.LogoutLink, Logout("/", basepath, allRoutes.AuthRoutes))

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"net/http"
"time"
"golang.org/x/net/context"
@ -21,8 +22,8 @@ type QueryRequest struct {
// QueriesRequest converts all queries to queryConfigs with the help
// of the template variables
type QueriesRequest struct {
Queries []QueryRequest `json:"queries"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
Queries []QueryRequest `json:"queries"`
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueryResponse is the return result of a QueryRequest including
@ -33,7 +34,7 @@ type QueryResponse struct {
QueryConfig chronograf.QueryConfig `json:"queryConfig"`
QueryAST *queries.SelectStatement `json:"queryAST,omitempty"`
QueryTemplated *string `json:"queryTemplated,omitempty"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueriesResponse is the response for a QueriesRequest
@ -72,12 +73,18 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
Query: q.Query,
}
query := influx.TemplateReplace(q.Query, req.TemplateVars)
query, err := influx.TemplateReplace(q.Query, req.TemplateVars, time.Now())
if err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
qc := ToQueryConfig(query)
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
qc.Shifts = []chronograf.TimeShift{}
qr.QueryConfig = qc
if stmt, err := queries.ParseSelect(query); err == nil {

View File

@ -60,7 +60,7 @@ func TestService_Queries(t *testing.T) {
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"},"shifts":[]},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
`,
},
{
@ -81,7 +81,7 @@ func TestService_Queries(t *testing.T) {
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null}}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null,"shifts":[]}}]}
`,
},
{
@ -98,7 +98,7 @@ func TestService_Queries(t *testing.T) {
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > now() - 1m",
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY :interval:",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
],
@ -153,13 +153,20 @@ func TestService_Queries(t *testing.T) {
"id": "interval",
"type": "constant",
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10000000000,
"values": []
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
}
]
}
]
}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"_internal","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"_internal","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","tempVars":[{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":interval:","duration":60000000000,"resolution":1000,"reportingInterval":10000000000}]}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","range":null,"shifts":[]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"1000","type":"resolution","selected":false},{"value":"3","type":"pointsPerPixel","selected":false}]}]}]}
`,
},
}

View File

@ -1,6 +1,8 @@
package server
import (
"fmt"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/influx"
)
@ -22,3 +24,28 @@ func ToQueryConfig(query string) chronograf.QueryConfig {
Tags: make(map[string][]string, 0),
}
}
var validFieldTypes = map[string]bool{
"func": true,
"field": true,
"integer": true,
"number": true,
"regex": true,
"wildcard": true,
}
// ValidateQueryConfig checks any query config input
func ValidateQueryConfig(q *chronograf.QueryConfig) error {
for _, fld := range q.Fields {
invalid := fmt.Errorf(`invalid field type "%s" ; expect func, field, integer, number, regex, wildcard`, fld.Type)
if !validFieldTypes[fld.Type] {
return invalid
}
for _, arg := range fld.Args {
if !validFieldTypes[arg.Type] {
return invalid
}
}
}
return nil
}

View File

@ -0,0 +1,50 @@
package server
import (
"testing"
"github.com/influxdata/chronograf"
)
func TestValidateQueryConfig(t *testing.T) {
tests := []struct {
name string
q *chronograf.QueryConfig
wantErr bool
}{
{
name: "invalid field type",
q: &chronograf.QueryConfig{
Fields: []chronograf.Field{
{
Type: "invalid",
},
},
},
wantErr: true,
},
{
name: "invalid field args",
q: &chronograf.QueryConfig{
Fields: []chronograf.Field{
{
Type: "func",
Args: []chronograf.Field{
{
Type: "invalid",
},
},
},
},
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if err := ValidateQueryConfig(tt.q); (err != nil) != tt.wantErr {
t.Errorf("ValidateQueryConfig() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

View File

@ -80,6 +80,7 @@ type Server struct {
GenericAuthURL string `long:"generic-auth-url" description:"OAuth 2.0 provider's authorization endpoint URL" env:"GENERIC_AUTH_URL"`
GenericTokenURL string `long:"generic-token-url" description:"OAuth 2.0 provider's token endpoint URL" env:"GENERIC_TOKEN_URL"`
GenericAPIURL string `long:"generic-api-url" description:"URL that returns OpenID UserInfo compatible information." env:"GENERIC_API_URL"`
GenericAPIKey string `long:"generic-api-key" description:"JSON lookup key into OpenID UserInfo. (Azure should be userPrincipalName)" default:"email" env:"GENERIC_API_KEY"`
Auth0Domain string `long:"auth0-domain" description:"Subdomain of auth0.com used for Auth0 OAuth2 authentication" env:"AUTH0_DOMAIN"`
Auth0ClientID string `long:"auth0-client-id" description:"Auth0 Client ID for OAuth2 support" env:"AUTH0_CLIENT_ID"`
@ -182,6 +183,7 @@ func (s *Server) genericOAuth(logger chronograf.Logger, auth oauth2.Authenticato
AuthURL: s.GenericAuthURL,
TokenURL: s.GenericTokenURL,
APIURL: s.GenericAPIURL,
APIKey: s.GenericAPIKey,
Logger: logger,
}
jwt := oauth2.NewJWT(s.TokenSecret)

View File

@ -48,7 +48,7 @@ func (c *InfluxClient) New(src chronograf.Source, logger chronograf.Logger) (chr
}
if src.Type == chronograf.InfluxEnterprise && src.MetaURL != "" {
tls := strings.Contains(src.MetaURL, "https")
return enterprise.NewClientWithTimeSeries(logger, src.MetaURL, src.Username, src.Password, tls, client)
return enterprise.NewClientWithTimeSeries(logger, src.MetaURL, influx.DefaultAuthorization(&src), tls, client)
}
return client, nil
}

View File

@ -55,7 +55,10 @@ func newSourceResponse(src chronograf.Source) sourceResponse {
},
}
if src.Type == chronograf.InfluxEnterprise {
// MetaURL is currently a string, but eventually, we'd like to change it
// to a slice. Checking len(src.MetaURL) is functionally equivalent to
// checking if it is equal to the empty string.
if src.Type == chronograf.InfluxEnterprise && len(src.MetaURL) != 0 {
res.Links.Roles = fmt.Sprintf("%s/%d/roles", httpAPISrcs, src.ID)
}
return res
@ -251,7 +254,9 @@ func (s *Service) UpdateSource(w http.ResponseWriter, r *http.Request) {
if req.URL != "" {
src.URL = req.URL
}
if req.MetaURL != "" {
// If the supplied MetaURL is different from the
// one supplied on the request, update the value
if req.MetaURL != src.MetaURL {
src.MetaURL = req.MetaURL
}
if req.Type != "" {

View File

@ -550,6 +550,7 @@
"patch": {
"tags": ["sources", "users"],
"summary": "Update user configuration",
"description": "Update one parameter at a time (one of password, permissions or roles)",
"parameters": [
{
"name": "id",

View File

@ -16,8 +16,8 @@ func TestValidTemplateRequest(t *testing.T) {
name: "Valid Template",
template: &chronograf.Template{
Type: "fieldKeys",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@ -30,8 +30,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "Unknown Type",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@ -44,8 +44,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "csv",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "unknown value",
},

View File

@ -48,7 +48,7 @@
'arrow-parens': 0,
'comma-dangle': [2, 'always-multiline'],
'no-cond-assign': 2,
'no-console': ['error', {allow: ['error']}],
'no-console': ['error', {allow: ['error', 'warn']}],
'no-constant-condition': 2,
'no-control-regex': 2,
'no-debugger': 2,

View File

@ -11,14 +11,14 @@
"scripts": {
"build": "yarn run clean && env NODE_ENV=production webpack --optimize-minimize --config ./webpack/prodConfig.js",
"build:dev": "webpack --config ./webpack/devConfig.js",
"start": "webpack --watch --config ./webpack/devConfig.js",
"start": "yarn run clean && webpack --watch --config ./webpack/devConfig.js",
"start:hmr": "webpack-dev-server --open --config ./webpack/devConfig.js",
"lint": "esw src/",
"test": "karma start",
"test:integration": "nightwatch tests --skip",
"test:lint": "yarn run lint; yarn run test",
"test:dev": "concurrently \"yarn run lint -- --watch\" \"yarn run test -- --no-single-run --reporters=verbose\"",
"clean": "rm -rf build",
"test:dev": "concurrently \"yarn run lint --watch\" \"yarn run test --no-single-run --reporters=verbose\"",
"clean": "rm -rf build/*",
"storybook": "node ./storybook.js",
"prettier": "prettier --single-quote --trailing-comma es5 --bracket-spacing false --semi false --write \"{src,spec}/**/*.js\"; eslint src --fix"
},

View File

@ -1,7 +1,9 @@
import reducer from 'src/data_explorer/reducers/queryConfigs'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
fill,
timeShift,
chooseTag,
groupByTag,
groupByTime,
@ -26,63 +28,63 @@ const fakeAddQueryAction = (panelID, queryID) => {
}
}
function buildInitialState(queryId, params) {
return Object.assign({}, defaultQueryConfig({id: queryId}), params)
function buildInitialState(queryID, params) {
return Object.assign({}, defaultQueryConfig({id: queryID}), params)
}
describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const queryId = 123
const queryID = 123
it('can add a query', () => {
const state = reducer({}, fakeAddQueryAction('blah', queryId))
const state = reducer({}, fakeAddQueryAction('blah', queryID))
const actual = state[queryId]
const expected = defaultQueryConfig({id: queryId})
const actual = state[queryID]
const expected = defaultQueryConfig({id: queryID})
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state
beforeEach(() => {
state = reducer({}, fakeAddQueryAction('any', queryId))
state = reducer({}, fakeAddQueryAction('any', queryID))
})
it('sets the db and rp', () => {
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'telegraf',
retentionPolicy: 'monitor',
})
)
expect(newState[queryId].database).to.equal('telegraf')
expect(newState[queryId].retentionPolicy).to.equal('monitor')
expect(newState[queryID].database).to.equal('telegraf')
expect(newState[queryID].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
expect(newState[queryId].measurement).to.equal('mem')
expect(newState[queryID].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state
beforeEach(() => {
const one = reducer({}, fakeAddQueryAction('any', queryId))
const one = reducer({}, fakeAddQueryAction('any', queryID))
const two = reducer(
one,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: '_internal',
retentionPolicy: 'daily',
})
)
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
const three = reducer(two, chooseMeasurement(queryID, 'disk'))
state = reducer(
three,
addInitialField(queryId, {
addInitialField(queryID, {
value: 'a great field',
type: 'field',
})
@ -92,91 +94,91 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => {
// what about tags?
expect(state[queryId].measurement).to.equal('disk')
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].measurement).to.equal('disk')
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'newdb',
retentionPolicy: 'newrp',
})
)
expect(newState[queryId].measurement).to.be.null
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].measurement).to.be.null
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => {
// what about tags?
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseMeasurement(queryId, 'newmeasurement')
chooseMeasurement(queryID, 'newmeasurement')
)
expect(state[queryId].database).to.equal(newState[queryId].database)
expect(state[queryId].retentionPolicy).to.equal(
newState[queryId].retentionPolicy
expect(state[queryID].database).to.equal(newState[queryID].database)
expect(state[queryID].retentionPolicy).to.equal(
newState[queryID].retentionPolicy
)
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('DE_TOGGLE_FIELD', () => {
it('can toggle multiple fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {
toggleField(queryID, {
value: 'f2',
type: 'field',
})
)
expect(newState[queryId].fields.length).to.equal(2)
expect(newState[queryId].fields[1].alias).to.deep.equal('mean_f2')
expect(newState[queryId].fields[1].args).to.deep.equal([
expect(newState[queryID].fields.length).to.equal(2)
expect(newState[queryID].fields[1].alias).to.deep.equal('mean_f2')
expect(newState[queryID].fields[1].args).to.deep.equal([
{value: 'f2', type: 'field'},
])
expect(newState[queryId].fields[1].value).to.deep.equal('mean')
expect(newState[queryID].fields[1].value).to.deep.equal('mean')
})
it('applies a func to newly selected fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryId].fields[0].type).to.equal('func')
expect(state[queryId].fields[0].value).to.equal('mean')
expect(state[queryID].fields.length).to.equal(1)
expect(state[queryID].fields[0].type).to.equal('func')
expect(state[queryID].fields[0].value).to.equal('mean')
const newState = reducer(
state,
toggleField(queryId, {
toggleField(queryID, {
value: 'f2',
type: 'field',
})
)
expect(newState[queryId].fields[1].value).to.equal('mean')
expect(newState[queryId].fields[1].alias).to.equal('mean_f2')
expect(newState[queryId].fields[1].args).to.deep.equal([
expect(newState[queryID].fields[1].value).to.equal('mean')
expect(newState[queryID].fields[1].alias).to.equal('mean_f2')
expect(newState[queryID].fields[1].args).to.deep.equal([
{value: 'f2', type: 'field'},
])
expect(newState[queryId].fields[1].type).to.equal('func')
expect(newState[queryID].fields[1].type).to.equal('func')
})
it('adds the field property to query config if not found', () => {
delete state[queryId].fields
expect(state[queryId].fields).to.equal(undefined)
delete state[queryID].fields
expect(state[queryID].fields).to.equal(undefined)
const newState = reducer(
state,
toggleField(queryId, {value: 'fk1', type: 'field'})
toggleField(queryID, {value: 'fk1', type: 'field'})
)
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryID].fields.length).to.equal(1)
})
})
})
@ -189,7 +191,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const f4 = {value: 'f4', type: 'field'}
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -201,7 +203,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
},
}
const action = applyFuncsToField(queryId, {
const action = applyFuncsToField(queryID, {
field: {value: 'f1', type: 'field'},
funcs: [
{value: 'fn3', type: 'func', args: []},
@ -211,7 +213,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const nextState = reducer(initialState, action)
expect(nextState[queryId].fields).to.deep.equal([
expect(nextState[queryID].fields).to.deep.equal([
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
{value: 'fn1', type: 'func', args: [f2], alias: `fn1_${f2.value}`},
@ -230,7 +232,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const groupBy = {time: '1m', tags: []}
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -239,35 +241,35 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
},
}
const action = removeFuncs(queryId, fields, groupBy)
const action = removeFuncs(queryID, fields, groupBy)
const nextState = reducer(initialState, action)
const actual = nextState[queryId].fields
const actual = nextState[queryID].fields
const expected = [f1, f2]
expect(actual).to.eql(expected)
expect(nextState[queryId].groupBy.time).to.equal(null)
expect(nextState[queryID].groupBy.time).to.equal(null)
})
})
describe('DE_CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v0'],
k2: ['foo'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
})
@ -275,31 +277,31 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it("creates a new entry if it's the first key", () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v1'],
})
})
it('removes a value that is already in the list', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v1'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
@ -307,14 +309,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
expect(nextState[queryId].tags).to.eql({})
expect(nextState[queryID].tags).to.eql({})
})
})
describe('DE_GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -323,11 +325,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
groupBy: {tags: [], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: ['k1'],
})
@ -335,7 +337,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -344,11 +346,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
groupBy: {tags: ['k1'], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: [],
})
@ -358,14 +360,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
describe('DE_TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = toggleTagAcceptance(queryId)
const action = toggleTagAcceptance(queryID)
const nextState = reducer(initialState, action)
expect(nextState[queryId].areTagsAccepted).to.equal(
!initialState[queryId].areTagsAccepted
expect(nextState[queryID].areTagsAccepted).to.equal(
!initialState[queryID].areTagsAccepted
)
})
})
@ -374,99 +376,113 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it('applys the appropriate group by time', () => {
const time = '100y'
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = groupByTime(queryId, time)
const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy.time).to.equal(time)
expect(nextState[queryID].groupBy.time).to.equal(time)
})
})
it('updates entire config', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const expected = defaultQueryConfig({id: queryId}, {rawText: 'hello'})
const expected = defaultQueryConfig({id: queryID}, {rawText: 'hello'})
const action = updateQueryConfig(expected)
const nextState = reducer(initialState, action)
expect(nextState[queryId]).to.deep.equal(expected)
expect(nextState[queryID]).to.deep.equal(expected)
})
it("updates a query's raw text", () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const text = 'foo'
const action = updateRawQuery(queryId, text)
const action = updateRawQuery(queryID, text)
const nextState = reducer(initialState, action)
expect(nextState[queryId].rawText).to.equal('foo')
expect(nextState[queryID].rawText).to.equal('foo')
})
it("updates a query's raw status", () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const status = 'your query was sweet'
const action = editQueryStatus(queryId, status)
const action = editQueryStatus(queryID, status)
const nextState = reducer(initialState, action)
expect(nextState[queryId].status).to.equal(status)
expect(nextState[queryID].status).to.equal(status)
})
describe('DE_FILL', () => {
it('applies an explicit fill when group by time is used', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const time = '10s'
const action = groupByTime(queryId, time)
const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(NULL_STRING)
expect(nextState[queryID].fill).to.equal(NULL_STRING)
})
it('updates fill to non-null-string non-number string value', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = fill(queryId, LINEAR)
const action = fill(queryID, LINEAR)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(LINEAR)
expect(nextState[queryID].fill).to.equal(LINEAR)
})
it('updates fill to string integer value', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const INT_STRING = '1337'
const action = fill(queryId, INT_STRING)
const action = fill(queryID, INT_STRING)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(INT_STRING)
expect(nextState[queryID].fill).to.equal(INT_STRING)
})
it('updates fill to string float value', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const FLOAT_STRING = '1.337'
const action = fill(queryId, FLOAT_STRING)
const action = fill(queryID, FLOAT_STRING)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(FLOAT_STRING)
expect(nextState[queryID].fill).to.equal(FLOAT_STRING)
})
})
describe('DE_TIME_SHIFT', () => {
it('can shift the time', () => {
const initialState = {
[queryID]: buildInitialState(queryID),
}
const shift = {quantity: 1, unit: 'd', duration: '1d'}
const action = timeShift(queryID, shift)
const nextState = reducer(initialState, action)
expect(nextState[queryID].shifts).to.deep.equal([shift])
})
})
})

View File

@ -1,14 +1,15 @@
import reducer from 'src/kapacitor/reducers/queryConfigs'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
chooseTag,
timeShift,
groupByTag,
toggleField,
groupByTime,
chooseNamespace,
chooseMeasurement,
chooseTag,
groupByTag,
toggleTagAcceptance,
toggleField,
applyFuncsToField,
groupByTime,
toggleTagAcceptance,
} from 'src/kapacitor/actions/queryConfigs'
const fakeAddQueryAction = (panelID, queryID) => {
@ -18,142 +19,142 @@ const fakeAddQueryAction = (panelID, queryID) => {
}
}
function buildInitialState(queryId, params) {
function buildInitialState(queryID, params) {
return Object.assign(
{},
defaultQueryConfig({id: queryId, isKapacitorRule: true}),
defaultQueryConfig({id: queryID, isKapacitorRule: true}),
params
)
}
describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
const queryId = 123
const queryID = 123
it('can add a query', () => {
const state = reducer({}, fakeAddQueryAction('blah', queryId))
const state = reducer({}, fakeAddQueryAction('blah', queryID))
const actual = state[queryId]
const expected = defaultQueryConfig({id: queryId, isKapacitorRule: true})
const actual = state[queryID]
const expected = defaultQueryConfig({id: queryID, isKapacitorRule: true})
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state
beforeEach(() => {
state = reducer({}, fakeAddQueryAction('any', queryId))
state = reducer({}, fakeAddQueryAction('any', queryID))
})
it('sets the db and rp', () => {
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'telegraf',
retentionPolicy: 'monitor',
})
)
expect(newState[queryId].database).to.equal('telegraf')
expect(newState[queryId].retentionPolicy).to.equal('monitor')
expect(newState[queryID].database).to.equal('telegraf')
expect(newState[queryID].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
expect(newState[queryId].measurement).to.equal('mem')
expect(newState[queryID].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state
beforeEach(() => {
const one = reducer({}, fakeAddQueryAction('any', queryId))
const one = reducer({}, fakeAddQueryAction('any', queryID))
const two = reducer(
one,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: '_internal',
retentionPolicy: 'daily',
})
)
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
const three = reducer(two, chooseMeasurement(queryID, 'disk'))
state = reducer(
three,
toggleField(queryId, {value: 'a great field', funcs: []})
toggleField(queryID, {value: 'a great field', funcs: []})
)
})
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => {
// what about tags?
expect(state[queryId].measurement).to.exist
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].measurement).to.exist
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'newdb',
retentionPolicy: 'newrp',
})
)
expect(newState[queryId].measurement).not.to.exist
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].measurement).not.to.exist
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => {
// what about tags?
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseMeasurement(queryId, 'newmeasurement')
chooseMeasurement(queryID, 'newmeasurement')
)
expect(state[queryId].database).to.equal(newState[queryId].database)
expect(state[queryId].retentionPolicy).to.equal(
newState[queryId].retentionPolicy
expect(state[queryID].database).to.equal(newState[queryID].database)
expect(state[queryID].retentionPolicy).to.equal(
newState[queryID].retentionPolicy
)
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('when the query is part of a kapacitor rule', () => {
it('only allows one field', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {value: 'a different field', type: 'field'})
toggleField(queryID, {value: 'a different field', type: 'field'})
)
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryId].fields[0].value).to.equal('a different field')
expect(newState[queryID].fields.length).to.equal(1)
expect(newState[queryID].fields[0].value).to.equal('a different field')
})
})
describe('KAPA_TOGGLE_FIELD', () => {
it('cannot toggle multiple fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {value: 'a different field', type: 'field'})
toggleField(queryID, {value: 'a different field', type: 'field'})
)
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryId].fields[0].value).to.equal('a different field')
expect(newState[queryID].fields.length).to.equal(1)
expect(newState[queryID].fields[0].value).to.equal('a different field')
})
it('applies no funcs to newly selected fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {value: 'a different field', type: 'field'})
toggleField(queryID, {value: 'a different field', type: 'field'})
)
expect(newState[queryId].fields[0].type).to.equal('field')
expect(newState[queryID].fields[0].type).to.equal('field')
})
})
})
@ -162,7 +163,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('applies functions to a field without any existing functions', () => {
const f1 = {value: 'f1', type: 'field'}
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -174,13 +175,13 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
},
}
const action = applyFuncsToField(queryId, {
const action = applyFuncsToField(queryID, {
field: {value: 'f1', type: 'field'},
funcs: [{value: 'fn3', type: 'func'}, {value: 'fn4', type: 'func'}],
})
const nextState = reducer(initialState, action)
const actual = nextState[queryId].fields
const actual = nextState[queryID].fields
const expected = [
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
@ -193,21 +194,21 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
describe('KAPA_CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v0'],
k2: ['foo'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
})
@ -215,31 +216,31 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it("creates a new entry if it's the first key", () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v1'],
})
})
it('removes a value that is already in the list', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v1'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
@ -247,14 +248,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
expect(nextState[queryId].tags).to.eql({})
expect(nextState[queryID].tags).to.eql({})
})
})
describe('KAPA_GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -263,11 +264,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
groupBy: {tags: [], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: ['k1'],
})
@ -275,7 +276,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -284,11 +285,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
groupBy: {tags: ['k1'], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: [],
})
@ -298,14 +299,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
describe('KAPA_TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = toggleTagAcceptance(queryId)
const action = toggleTagAcceptance(queryID)
const nextState = reducer(initialState, action)
expect(nextState[queryId].areTagsAccepted).to.equal(
!initialState[queryId].areTagsAccepted
expect(nextState[queryID].areTagsAccepted).to.equal(
!initialState[queryID].areTagsAccepted
)
})
})
@ -314,14 +315,28 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('applys the appropriate group by time', () => {
const time = '100y'
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = groupByTime(queryId, time)
const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy.time).to.equal(time)
expect(nextState[queryID].groupBy.time).to.equal(time)
})
})
describe('KAPA_TIME_SHIFT', () => {
it('can shift the time', () => {
const initialState = {
[queryID]: buildInitialState(queryID),
}
const shift = {quantity: 1, unit: 'd', duration: '1d'}
const action = timeShift(queryID, shift)
const nextState = reducer(initialState, action)
expect(nextState[queryID].shifts).to.deep.equal([shift])
})
})
})

View File

@ -1,10 +1,15 @@
import {buildRoles, buildClusterAccounts} from 'shared/presenters'
import {
buildRoles,
buildClusterAccounts,
buildDefaultYLabel,
} from 'shared/presenters'
import defaultQueryConfig from 'utils/defaultQueryConfig'
describe('Presenters', function() {
describe('roles utils', function() {
describe('buildRoles', function() {
describe('when a role has no users', function() {
it("sets a role's users as an empty array", function() {
describe('Presenters', () => {
describe('roles utils', () => {
describe('buildRoles', () => {
describe('when a role has no users', () => {
it("sets a role's users as an empty array", () => {
const roles = [
{
name: 'Marketing',
@ -20,8 +25,8 @@ describe('Presenters', function() {
})
})
describe('when a role has no permissions', function() {
it("set's a roles permission as an empty array", function() {
describe('when a role has no permissions', () => {
it("set's a roles permission as an empty array", () => {
const roles = [
{
name: 'Marketing',
@ -35,9 +40,10 @@ describe('Presenters', function() {
})
})
describe('when a role has users and permissions', function() {
beforeEach(function() {
const roles = [
describe('when a role has users and permissions', () => {
let roles
beforeEach(() => {
const rs = [
{
name: 'Marketing',
permissions: {
@ -49,18 +55,18 @@ describe('Presenters', function() {
},
]
this.roles = buildRoles(roles)
roles = buildRoles(rs)
})
it('each role has a name and a list of users (if they exist)', function() {
const role = this.roles[0]
it('each role has a name and a list of users (if they exist)', () => {
const role = roles[0]
expect(role.name).to.equal('Marketing')
expect(role.users).to.contain('roley@influxdb.com')
expect(role.users).to.contain('will@influxdb.com')
})
it('transforms permissions into a list of objects and each permission has a list of resources', function() {
expect(this.roles[0].permissions).to.eql([
it('transforms permissions into a list of objects and each permission has a list of resources', () => {
expect(roles[0].permissions).to.eql([
{
name: 'ViewAdmin',
displayName: 'View Admin',
@ -85,10 +91,10 @@ describe('Presenters', function() {
})
})
describe('cluster utils', function() {
describe('buildClusterAccounts', function() {
describe('cluster utils', () => {
describe('buildClusterAccounts', () => {
// TODO: break down this test into smaller individual assertions.
it('adds role information to each cluster account and parses permissions', function() {
it('adds role information to each cluster account and parses permissions', () => {
const users = [
{
name: 'jon@example.com',
@ -192,7 +198,7 @@ describe('Presenters', function() {
expect(actual).to.eql(expected)
})
it('can handle empty results for users and roles', function() {
it('can handle empty results for users and roles', () => {
const users = undefined
const roles = undefined
@ -201,7 +207,7 @@ describe('Presenters', function() {
expect(actual).to.eql([])
})
it('sets roles to an empty array if a user has no roles', function() {
it('sets roles to an empty array if a user has no roles', () => {
const users = [
{
name: 'ned@example.com',
@ -216,4 +222,41 @@ describe('Presenters', function() {
})
})
})
describe('buildDefaultYLabel', () => {
it('can return the correct string for field', () => {
const query = defaultQueryConfig({id: 1})
const fields = [{value: 'usage_system', type: 'field'}]
const measurement = 'm1'
const queryConfig = {...query, measurement, fields}
const actual = buildDefaultYLabel(queryConfig)
expect(actual).to.equal('m1.usage_system')
})
it('can return the correct string for funcs with args', () => {
const query = defaultQueryConfig({id: 1})
const field = {value: 'usage_system', type: 'field'}
const args = {
value: 'mean',
type: 'func',
args: [field],
alias: '',
}
const f1 = {
value: 'derivative',
type: 'func',
args: [args],
alias: '',
}
const fields = [f1]
const measurement = 'm1'
const queryConfig = {...query, measurement, fields}
const actual = buildDefaultYLabel(queryConfig)
expect(actual).to.equal('m1.derivative_mean_usage_system')
})
})
})

View File

@ -0,0 +1,109 @@
import {timeRangeType, shiftTimeRange} from 'shared/query/helpers'
import moment from 'moment'
import {
INVALID,
ABSOLUTE,
INFLUXQL,
RELATIVE_LOWER,
RELATIVE_UPPER,
} from 'shared/constants/timeRange'
const format = INFLUXQL
describe('Shared.Query.Helpers', () => {
describe('timeRangeTypes', () => {
it('returns invalid if no upper and lower', () => {
const upper = null
const lower = null
const timeRange = {lower, upper}
expect(timeRangeType(timeRange)).to.equal(INVALID)
})
it('can detect absolute type', () => {
const tenMinutes = 600000
const upper = Date.now()
const lower = upper - tenMinutes
const timeRange = {lower, upper, format}
expect(timeRangeType(timeRange)).to.equal(ABSOLUTE)
})
it('can detect exclusive relative lower', () => {
const lower = 'now() - 15m'
const upper = null
const timeRange = {lower, upper, format}
expect(timeRangeType(timeRange)).to.equal(RELATIVE_LOWER)
})
it('can detect relative upper', () => {
const upper = 'now()'
const oneMinute = 60000
const lower = Date.now() - oneMinute
const timeRange = {lower, upper, format}
expect(timeRangeType(timeRange)).to.equal(RELATIVE_UPPER)
})
})
describe('timeRangeShift', () => {
it('can calculate the shift for absolute timeRanges', () => {
const upper = Date.now()
const oneMinute = 60000
const lower = Date.now() - oneMinute
const shift = {quantity: 7, unit: 'd'}
const timeRange = {upper, lower}
const type = timeRangeType(timeRange)
const actual = shiftTimeRange(timeRange, shift)
const expected = {
lower: `${lower} - 7d`,
upper: `${upper} - 7d`,
type: 'shifted',
}
expect(type).to.equal(ABSOLUTE)
expect(actual).to.deep.equal(expected)
})
it('can calculate the shift for relative lower timeRanges', () => {
const shift = {quantity: 7, unit: 'd'}
const lower = 'now() - 15m'
const timeRange = {lower, upper: null}
const type = timeRangeType(timeRange)
const actual = shiftTimeRange(timeRange, shift)
const expected = {
lower: `${lower} - 7d`,
upper: `now() - 7d`,
type: 'shifted',
}
expect(type).to.equal(RELATIVE_LOWER)
expect(actual).to.deep.equal(expected)
})
it('can calculate the shift for relative upper timeRanges', () => {
const upper = Date.now()
const oneMinute = 60000
const lower = Date.now() - oneMinute
const shift = {quantity: 7, unit: 'd'}
const timeRange = {upper, lower}
const type = timeRangeType(timeRange)
const actual = shiftTimeRange(timeRange, shift)
const expected = {
lower: `${lower} - 7d`,
upper: `${upper} - 7d`,
type: 'shifted',
}
expect(type).to.equal(ABSOLUTE)
expect(actual).to.deep.equal(expected)
})
})
})

View File

@ -228,11 +228,7 @@ describe('timeSeriesToDygraph', () => {
]
const isInDataExplorer = true
const actual = timeSeriesToDygraph(
influxResponse,
undefined,
isInDataExplorer
)
const actual = timeSeriesToDygraph(influxResponse, isInDataExplorer)
const expected = {}

View File

@ -1,58 +1,85 @@
import React, {PropTypes} from 'react'
import React, {PropTypes, Component} from 'react'
import Authorized, {EDITOR_ROLE} from 'src/auth/Authorized'
import DashboardsTable from 'src/dashboards/components/DashboardsTable'
import SearchBar from 'src/hosts/components/SearchBar'
import FancyScrollbar from 'shared/components/FancyScrollbar'
const DashboardsPageContents = ({
dashboards,
onDeleteDashboard,
onCreateDashboard,
dashboardLink,
}) => {
let tableHeader
if (dashboards === null) {
tableHeader = 'Loading Dashboards...'
} else if (dashboards.length === 1) {
tableHeader = '1 Dashboard'
} else {
tableHeader = `${dashboards.length} Dashboards`
class DashboardsPageContents extends Component {
constructor(props) {
super(props)
this.state = {
searchTerm: '',
}
}
return (
<FancyScrollbar className="page-contents">
<div className="container-fluid">
<div className="row">
<div className="col-md-12">
<div className="panel panel-minimal">
<div className="panel-heading u-flex u-ai-center u-jc-space-between">
<h2 className="panel-title">
{tableHeader}
</h2>
<Authorized requiredRole={EDITOR_ROLE}>
<button
className="btn btn-sm btn-primary"
onClick={onCreateDashboard}
>
<span className="icon plus" /> Create Dashboard
</button>
</Authorized>
</div>
<div className="panel-body">
<DashboardsTable
dashboards={dashboards}
onDeleteDashboard={onDeleteDashboard}
onCreateDashboard={onCreateDashboard}
dashboardLink={dashboardLink}
/>
filterDashboards = searchTerm => {
this.setState({searchTerm})
}
render() {
const {
dashboards,
onDeleteDashboard,
onCreateDashboard,
dashboardLink,
} = this.props
const {searchTerm} = this.state
let tableHeader
if (dashboards === null) {
tableHeader = 'Loading Dashboards...'
} else if (dashboards.length === 1) {
tableHeader = '1 Dashboard'
} else {
tableHeader = `${dashboards.length} Dashboards`
}
const filteredDashboards = dashboards.filter(d =>
d.name.toLowerCase().includes(searchTerm.toLowerCase())
)
return (
<FancyScrollbar className="page-contents">
<div className="container-fluid">
<div className="row">
<div className="col-md-12">
<div className="panel panel-minimal">
<div className="panel-heading u-flex u-ai-center u-jc-space-between">
<h2 className="panel-title">
{tableHeader}
</h2>
<div className="u-flex u-ai-center dashboards-page--actions">
<SearchBar
placeholder="Filter by Name..."
onSearch={this.filterDashboards}
/>
<Authorized requiredRole={EDITOR_ROLE}>
<button
className="btn btn-sm btn-primary"
onClick={onCreateDashboard}
>
<span className="icon plus" /> Create Dashboard
</button>
</Authorized>
</div>
</div>
<div className="panel-body">
<DashboardsTable
dashboards={filteredDashboards}
onDeleteDashboard={onDeleteDashboard}
onCreateDashboard={onCreateDashboard}
dashboardLink={dashboardLink}
/>
</div>
</div>
</div>
</div>
</div>
</div>
</FancyScrollbar>
)
</FancyScrollbar>
)
}
}
const {arrayOf, func, shape, string} = PropTypes

View File

@ -1,33 +1,30 @@
import React, {PropTypes} from 'react'
import DeleteConfirmButtons from 'shared/components/DeleteConfirmButtons'
const RowButtons = ({
onStartEdit,
isEditing,
onCancelEdit,
onDelete,
id,
selectedType,
}) => {
const RowButtons = ({onStartEdit, isEditing, onCancelEdit, onDelete, id}) => {
if (isEditing) {
return (
<div className="tvm-actions">
<button
className="btn btn-sm btn-info"
className="btn btn-sm btn-info btn-square"
type="button"
onClick={onCancelEdit}
>
Cancel
<span className="icon remove" />
</button>
<button className="btn btn-sm btn-success" type="submit">
{selectedType === 'csv' ? 'Save Values' : 'Get Values'}
<button className="btn btn-sm btn-success btn-square" type="submit">
<span className="icon checkmark" />
</button>
</div>
)
}
return (
<div className="tvm-actions">
<DeleteConfirmButtons onDelete={onDelete(id)} />
<DeleteConfirmButtons
onDelete={onDelete(id)}
icon="remove"
square={true}
/>
<button
className="btn btn-sm btn-info btn-edit btn-square"
type="button"

View File

@ -79,35 +79,29 @@ export const applyMasks = query => {
const maskForWholeTemplates = '😸$1😸'
return query.replace(matchWholeTemplates, maskForWholeTemplates)
}
export const insertTempVar = (query, tempVar) => {
return query.replace(MATCH_INCOMPLETE_TEMPLATES, tempVar)
}
export const unMask = query => {
return query.replace(/😸/g, ':')
}
export const removeUnselectedTemplateValues = templates => {
return templates.map(template => {
const selectedValues = template.values.filter(value => value.selected)
return {...template, values: selectedValues}
})
}
export const DISPLAY_OPTIONS = {
LINEAR: 'linear',
LOG: 'log',
BASE_2: '2',
BASE_10: '10',
}
export const TOOLTIP_CONTENT = {
FORMAT:
'<p><strong>K/M/B</strong> = Thousand / Million / Billion<br/><strong>K/M/G</strong> = Kilo / Mega / Giga </p>',
}
export const TYPE_QUERY_CONFIG = 'queryConfig'
export const TYPE_SHIFTED = 'shifted queryConfig'
export const TYPE_IFQL = 'ifql'
export const DASHBOARD_NAME_MAX_LENGTH = 50

View File

@ -2,8 +2,11 @@ import React, {PropTypes, Component} from 'react'
import {connect} from 'react-redux'
import {bindActionCreators} from 'redux'
import _ from 'lodash'
import Dygraph from 'src/external/dygraph'
import {isUserAuthorized, EDITOR_ROLE} from 'src/auth/Authorized'
import OverlayTechnologies from 'shared/components/OverlayTechnologies'
import CellEditorOverlay from 'src/dashboards/components/CellEditorOverlay'
import DashboardHeader from 'src/dashboards/components/DashboardHeader'
@ -34,24 +37,26 @@ class DashboardPage extends Component {
super(props)
this.state = {
dygraphs: [],
isEditMode: false,
selectedCell: null,
isTemplating: false,
zoomedTimeRange: {zoomedLower: null, zoomedUpper: null},
names: [],
}
}
dygraphs = []
async componentDidMount() {
const {
params: {dashboardID, sourceID},
params: {dashboardID},
dashboardActions: {
getDashboardsAsync,
updateTempVarValues,
putDashboardByID,
},
source,
meRole,
isUsingAuth,
} = this.props
const dashboards = await getDashboardsAsync()
@ -59,16 +64,13 @@ class DashboardPage extends Component {
d => d.id === idNormalizer(TYPE_ID, dashboardID)
)
// Refresh and persists influxql generated template variable values
await updateTempVarValues(source, dashboard)
await putDashboardByID(dashboardID)
const names = dashboards.map(d => ({
name: d.name,
link: `/sources/${sourceID}/dashboards/${d.id}`,
}))
this.setState({names})
// Refresh and persists influxql generated template variable values.
// If using auth and role is Viewer, temp vars will be stale until dashboard
// is refactored so as not to require a write operation (a PUT in this case)
if (!isUsingAuth || isUserAuthorized(meRole, EDITOR_ROLE)) {
await updateTempVarValues(source, dashboard)
await putDashboardByID(dashboardID)
}
}
handleOpenTemplateManager = () => {
@ -109,11 +111,16 @@ class DashboardPage extends Component {
}
handleUpdatePosition = cells => {
const {dashboardActions, dashboard} = this.props
const {dashboardActions, dashboard, meRole, isUsingAuth} = this.props
const newDashboard = {...dashboard, cells}
dashboardActions.updateDashboard(newDashboard)
dashboardActions.putDashboard(newDashboard)
// GridLayout invokes onLayoutChange on first load, which bubbles up to
// invoke handleUpdatePosition. If using auth, Viewer is not authorized to
// PUT, so until the need for PUT is removed, this is prevented.
if (!isUsingAuth || isUserAuthorized(meRole, EDITOR_ROLE)) {
dashboardActions.updateDashboard(newDashboard)
dashboardActions.putDashboard(newDashboard)
}
}
handleAddCell = () => {
@ -178,16 +185,19 @@ class DashboardPage extends Component {
}
synchronizer = dygraph => {
const dygraphs = [...this.state.dygraphs, dygraph].filter(d => d.graphDiv)
const dygraphs = [...this.dygraphs, dygraph].filter(d => d.graphDiv)
const {dashboards, params: {dashboardID}} = this.props
const dashboard = dashboards.find(
d => d.id === idNormalizer(TYPE_ID, dashboardID)
)
// Get only the graphs that can sync the hover line
const graphsToSync = dashboard.cells.filter(c => c.type !== 'single-stat')
if (
dashboard &&
dygraphs.length === dashboard.cells.length &&
dygraphs.length === graphsToSync.length &&
dashboard.cells.length > 1
) {
Dygraph.synchronize(dygraphs, {
@ -197,7 +207,7 @@ class DashboardPage extends Component {
})
}
this.setState({dygraphs})
this.dygraphs = dygraphs
}
handleToggleTempVarControls = () => {
@ -263,14 +273,23 @@ class DashboardPage extends Component {
],
}
// this controls the auto group by behavior
const interval = {
id: 'interval',
type: 'constant',
type: 'autoGroupBy',
tempVar: ':interval:',
resolution: 1000,
reportingInterval: 10000000000,
values: [],
label: 'automatically determine the best group by time',
values: [
{
value: '1000', // pixels
type: 'resolution',
selected: true,
},
{
value: '3',
type: 'pointsPerPixel',
selected: true,
},
],
}
let templatesIncludingDashTime
@ -285,7 +304,11 @@ class DashboardPage extends Component {
templatesIncludingDashTime = []
}
const {selectedCell, isEditMode, isTemplating, names} = this.state
const {selectedCell, isEditMode, isTemplating} = this.state
const names = dashboards.map(d => ({
name: d.name,
link: `/sources/${sourceID}/dashboards/${d.id}`,
}))
return (
<div className="page">
@ -431,6 +454,8 @@ DashboardPage.propTypes = {
errorThrown: func,
manualRefresh: number.isRequired,
onManualRefresh: func.isRequired,
meRole: string,
isUsingAuth: bool.isRequired,
}
const mapStateToProps = (state, {params: {dashboardID}}) => {
@ -442,7 +467,9 @@ const mapStateToProps = (state, {params: {dashboardID}}) => {
dashboardUI: {dashboards, cellQueryStatus},
sources,
dashTimeV1,
auth: {me, isUsingAuth},
} = state
const meRole = _.get(me, 'role', null)
const timeRange =
dashTimeV1.ranges.find(
@ -462,6 +489,8 @@ const mapStateToProps = (state, {params: {dashboardID}}) => {
inPresentationMode,
cellQueryStatus,
sources,
meRole,
isUsingAuth,
}
}

View File

@ -1,4 +1,4 @@
import React, {PropTypes} from 'react'
import React, {PropTypes, Component} from 'react'
import {withRouter} from 'react-router'
import {connect} from 'react-redux'
import {bindActionCreators} from 'redux'
@ -11,40 +11,20 @@ import {getDashboardsAsync, deleteDashboardAsync} from 'src/dashboards/actions'
import {NEW_DASHBOARD} from 'src/dashboards/constants'
const {arrayOf, func, string, shape} = PropTypes
const DashboardsPage = React.createClass({
propTypes: {
source: shape({
id: string.isRequired,
name: string.isRequired,
type: string,
links: shape({
proxy: string.isRequired,
}).isRequired,
telegraf: string.isRequired,
}),
router: shape({
push: func.isRequired,
}).isRequired,
handleGetDashboards: func.isRequired,
handleDeleteDashboard: func.isRequired,
dashboards: arrayOf(shape()),
},
class DashboardsPage extends Component {
componentDidMount() {
this.props.handleGetDashboards()
},
}
async handleCreateDashbord() {
handleCreateDashbord = async () => {
const {source: {id}, router: {push}} = this.props
const {data} = await createDashboard(NEW_DASHBOARD)
push(`/sources/${id}/dashboards/${data.id}`)
},
}
handleDeleteDashboard(dashboard) {
handleDeleteDashboard = dashboard => {
this.props.handleDeleteDashboard(dashboard)
},
}
render() {
const {dashboards} = this.props
@ -61,8 +41,28 @@ const DashboardsPage = React.createClass({
/>
</div>
)
},
})
}
}
const {arrayOf, func, string, shape} = PropTypes
DashboardsPage.propTypes = {
source: shape({
id: string.isRequired,
name: string.isRequired,
type: string,
links: shape({
proxy: string.isRequired,
}).isRequired,
telegraf: string.isRequired,
}),
router: shape({
push: func.isRequired,
}).isRequired,
handleGetDashboards: func.isRequired,
handleDeleteDashboard: func.isRequired,
dashboards: arrayOf(shape()),
}
const mapStateToProps = ({dashboardUI: {dashboards, dashboard}}) => ({
dashboards,

View File

@ -18,26 +18,26 @@ export const deleteQuery = queryID => ({
},
})
export const toggleField = (queryId, fieldFunc) => ({
export const toggleField = (queryID, fieldFunc) => ({
type: 'DE_TOGGLE_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
},
})
export const groupByTime = (queryId, time) => ({
export const groupByTime = (queryID, time) => ({
type: 'DE_GROUP_BY_TIME',
payload: {
queryId,
queryID,
time,
},
})
export const fill = (queryId, value) => ({
export const fill = (queryID, value) => ({
type: 'DE_FILL',
payload: {
queryId,
queryID,
value,
},
})
@ -51,44 +51,44 @@ export const removeFuncs = (queryID, fields, groupBy) => ({
},
})
export const applyFuncsToField = (queryId, fieldFunc, groupBy) => ({
export const applyFuncsToField = (queryID, fieldFunc, groupBy) => ({
type: 'DE_APPLY_FUNCS_TO_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
groupBy,
},
})
export const chooseTag = (queryId, tag) => ({
export const chooseTag = (queryID, tag) => ({
type: 'DE_CHOOSE_TAG',
payload: {
queryId,
queryID,
tag,
},
})
export const chooseNamespace = (queryId, {database, retentionPolicy}) => ({
export const chooseNamespace = (queryID, {database, retentionPolicy}) => ({
type: 'DE_CHOOSE_NAMESPACE',
payload: {
queryId,
queryID,
database,
retentionPolicy,
},
})
export const chooseMeasurement = (queryId, measurement) => ({
export const chooseMeasurement = (queryID, measurement) => ({
type: 'DE_CHOOSE_MEASUREMENT',
payload: {
queryId,
queryID,
measurement,
},
})
export const editRawText = (queryId, rawText) => ({
export const editRawText = (queryID, rawText) => ({
type: 'DE_EDIT_RAW_TEXT',
payload: {
queryId,
queryID,
rawText,
},
})
@ -100,18 +100,18 @@ export const setTimeRange = bounds => ({
},
})
export const groupByTag = (queryId, tagKey) => ({
export const groupByTag = (queryID, tagKey) => ({
type: 'DE_GROUP_BY_TAG',
payload: {
queryId,
queryID,
tagKey,
},
})
export const toggleTagAcceptance = queryId => ({
export const toggleTagAcceptance = queryID => ({
type: 'DE_TOGGLE_TAG_ACCEPTANCE',
payload: {
queryId,
queryID,
},
})
@ -147,6 +147,14 @@ export const editQueryStatus = (queryID, status) => ({
},
})
export const timeShift = (queryID, shift) => ({
type: 'DE_TIME_SHIFT',
payload: {
queryID,
shift,
},
})
// Async actions
export const editRawTextAsync = (url, id, text) => async dispatch => {
try {

View File

@ -7,13 +7,10 @@ import Dropdown from 'shared/components/Dropdown'
import {AUTO_GROUP_BY} from 'shared/constants'
const {func, string, shape} = PropTypes
const isInRuleBuilder = pathname => pathname.includes('alert-rules')
const isInDataExplorer = pathname => pathname.includes('data-explorer')
const getOptions = pathname =>
isInDataExplorer(pathname) || isInRuleBuilder(pathname)
isInRuleBuilder(pathname)
? groupByTimeOptions.filter(({menuOption}) => menuOption !== AUTO_GROUP_BY)
: groupByTimeOptions
@ -37,6 +34,8 @@ const GroupByTimeDropdown = ({
/>
</div>
const {func, string, shape} = PropTypes
GroupByTimeDropdown.propTypes = {
location: shape({
pathname: string.isRequired,

View File

@ -7,6 +7,7 @@ import {Table, Column, Cell} from 'fixed-data-table'
import Dropdown from 'shared/components/Dropdown'
import CustomCell from 'src/data_explorer/components/CustomCell'
import TabItem from 'src/data_explorer/components/TableTabItem'
import {TEMPLATES} from 'src/data_explorer/constants'
import {fetchTimeSeriesAsync} from 'shared/actions/timeSeries'
@ -43,7 +44,11 @@ class ChronoTable extends Component {
this.setState({isLoading: true})
// second param is db, we want to leave this blank
try {
const {results} = await fetchTimeSeriesAsync({source: query.host, query})
const {results} = await fetchTimeSeriesAsync({
source: query.host,
query,
tempVars: TEMPLATES,
})
this.setState({isLoading: false})
let series = _.get(results, ['0', 'series'], [])

View File

@ -1,12 +1,12 @@
import React, {PropTypes, Component} from 'react'
import buildInfluxQLQuery from 'utils/influxql'
import classnames from 'classnames'
import VisHeader from 'src/data_explorer/components/VisHeader'
import VisView from 'src/data_explorer/components/VisView'
import {GRAPH, TABLE} from 'shared/constants'
import buildQueries from 'utils/buildQueriesForGraphs'
import _ from 'lodash'
const META_QUERY_REGEX = /^show/i
const META_QUERY_REGEX = /^(show|create|drop)/i
class Visualization extends Component {
constructor(props) {
@ -61,19 +61,11 @@ class Visualization extends Component {
resizerBottomHeight,
errorThrown,
} = this.props
const {source: {links: {proxy}}} = this.context
const {view} = this.state
const statements = queryConfigs.map(query => {
const text =
query.rawText || buildInfluxQLQuery(query.range || timeRange, query)
return {text, id: query.id, queryConfig: query}
})
const queries = statements.filter(s => s.text !== null).map(s => {
return {host: [proxy], text: s.text, id: s.id, queryConfig: s.queryConfig}
})
const queries = buildQueries(proxy, queryConfigs, timeRange)
const activeQuery = queries[activeQueryIndex]
const defaultQuery = queries[0]
const query = activeQuery || defaultQuery
@ -81,12 +73,12 @@ class Visualization extends Component {
return (
<div className="graph" style={{height}}>
<VisHeader
views={views}
view={view}
onToggleView={this.handleToggleView}
name={cellName}
views={views}
query={query}
name={cellName}
errorThrown={errorThrown}
onToggleView={this.handleToggleView}
/>
<div
className={classnames({

View File

@ -81,3 +81,16 @@ export const QUERY_TEMPLATES = [
{text: 'Show Stats', query: 'SHOW STATS'},
{text: 'Show Diagnostics', query: 'SHOW DIAGNOSTICS'},
]
const interval = {
id: 'interval',
type: 'autoGroupBy',
tempVar: ':interval:',
label: 'automatically determine the best group by time',
values: [
{value: '1000', type: 'resolution', selected: true},
{value: '3', type: 'pointsPerPixel', selected: true},
],
} // pixels
export const TEMPLATES = [interval]

View File

@ -14,8 +14,8 @@ import ResizeContainer from 'shared/components/ResizeContainer'
import OverlayTechnologies from 'shared/components/OverlayTechnologies'
import ManualRefresh from 'src/shared/components/ManualRefresh'
import {VIS_VIEWS, INITIAL_GROUP_BY_TIME} from 'shared/constants'
import {MINIMUM_HEIGHTS, INITIAL_HEIGHTS} from '../constants'
import {VIS_VIEWS, AUTO_GROUP_BY} from 'shared/constants'
import {MINIMUM_HEIGHTS, INITIAL_HEIGHTS, TEMPLATES} from '../constants'
import {errorThrown} from 'shared/actions/errors'
import {setAutoRefresh} from 'shared/actions/app'
import * as dataExplorerActionCreators from 'src/data_explorer/actions/view'
@ -88,7 +88,6 @@ class DataExplorer extends Component {
const {showWriteForm} = this.state
const selectedDatabase = _.get(queryConfigs, ['0', 'database'], null)
return (
<div className="data-explorer">
{showWriteForm
@ -122,12 +121,13 @@ class DataExplorer extends Component {
actions={queryConfigActions}
timeRange={timeRange}
activeQuery={this.getActiveQuery()}
initialGroupByTime={INITIAL_GROUP_BY_TIME}
initialGroupByTime={AUTO_GROUP_BY}
/>
<Visualization
views={VIS_VIEWS}
activeQueryIndex={0}
timeRange={timeRange}
templates={TEMPLATES}
autoRefresh={autoRefresh}
queryConfigs={queryConfigs}
manualRefresh={manualRefresh}

View File

@ -3,6 +3,7 @@ import _ from 'lodash'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
fill,
timeShift,
chooseTag,
groupByTag,
removeFuncs,
@ -20,24 +21,24 @@ import {
const queryConfigs = (state = {}, action) => {
switch (action.type) {
case 'DE_CHOOSE_NAMESPACE': {
const {queryId, database, retentionPolicy} = action.payload
const nextQueryConfig = chooseNamespace(state[queryId], {
const {queryID, database, retentionPolicy} = action.payload
const nextQueryConfig = chooseNamespace(state[queryID], {
database,
retentionPolicy,
})
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {rawText: null}),
[queryID]: Object.assign(nextQueryConfig, {rawText: null}),
})
}
case 'DE_CHOOSE_MEASUREMENT': {
const {queryId, measurement} = action.payload
const nextQueryConfig = chooseMeasurement(state[queryId], measurement)
const {queryID, measurement} = action.payload
const nextQueryConfig = chooseMeasurement(state[queryID], measurement)
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {
rawText: state[queryId].rawText,
[queryID]: Object.assign(nextQueryConfig, {
rawText: state[queryID].rawText,
}),
})
}
@ -64,78 +65,78 @@ const queryConfigs = (state = {}, action) => {
}
case 'DE_EDIT_RAW_TEXT': {
const {queryId, rawText} = action.payload
const nextQueryConfig = editRawText(state[queryId], rawText)
const {queryID, rawText} = action.payload
const nextQueryConfig = editRawText(state[queryID], rawText)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_GROUP_BY_TIME': {
const {queryId, time} = action.payload
const nextQueryConfig = groupByTime(state[queryId], time)
const {queryID, time} = action.payload
const nextQueryConfig = groupByTime(state[queryID], time)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_TOGGLE_TAG_ACCEPTANCE': {
const {queryId} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryId])
const {queryID} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryID])
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_TOGGLE_FIELD': {
const {queryId, fieldFunc} = action.payload
const nextQueryConfig = toggleField(state[queryId], fieldFunc)
const {queryID, fieldFunc} = action.payload
const nextQueryConfig = toggleField(state[queryID], fieldFunc)
return Object.assign({}, state, {
[queryId]: {...nextQueryConfig, rawText: null},
[queryID]: {...nextQueryConfig, rawText: null},
})
}
case 'DE_APPLY_FUNCS_TO_FIELD': {
const {queryId, fieldFunc, groupBy} = action.payload
const {queryID, fieldFunc, groupBy} = action.payload
const nextQueryConfig = applyFuncsToField(
state[queryId],
state[queryID],
fieldFunc,
groupBy
)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_CHOOSE_TAG': {
const {queryId, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryId], tag)
const {queryID, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryID], tag)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_GROUP_BY_TAG': {
const {queryId, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryId], tagKey)
const {queryID, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryID], tagKey)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_FILL': {
const {queryId, value} = action.payload
const nextQueryConfig = fill(state[queryId], value)
const {queryID, value} = action.payload
const nextQueryConfig = fill(state[queryID], value)
return {
...state,
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
}
}
@ -171,6 +172,13 @@ const queryConfigs = (state = {}, action) => {
return {...state, [queryID]: nextQuery}
}
case 'DE_TIME_SHIFT': {
const {queryID, shift} = action.payload
const nextQuery = timeShift(state[queryID], shift)
return {...state, [queryID]: nextQuery}
}
}
return state
}

View File

@ -195,17 +195,21 @@ function parseSeries(series) {
function parseTag(s, obj) {
const match = tag.exec(s)
const kv = match[0]
const key = match[1]
const value = match[2]
if (match) {
const kv = match[0]
const key = match[1]
const value = match[2]
if (key) {
if (!obj.tags) {
obj.tags = {}
if (key) {
if (!obj.tags) {
obj.tags = {}
}
obj.tags[key] = value
}
obj.tags[key] = value
return s.slice(match.index + kv.length)
}
return s.slice(match.index + kv.length)
return ''
}
let workStr = series.slice()

View File

@ -103,7 +103,10 @@ class HostsTable extends Component {
<h2 className="panel-title">
{hostsTitle}
</h2>
<SearchBar onSearch={this.updateSearchTerm} />
<SearchBar
placeholder="Filter by Host..."
onSearch={this.updateSearchTerm}
/>
</div>
<div className="panel-body">
{hostCount > 0 && !hostsError.length

View File

@ -10,8 +10,7 @@ class SearchBar extends Component {
}
componentWillMount() {
const waitPeriod = 300
this.handleSearch = _.debounce(this.handleSearch, waitPeriod)
this.handleSearch = _.debounce(this.handleSearch, 50)
}
handleSearch = () => {
@ -23,12 +22,13 @@ class SearchBar extends Component {
}
render() {
const {placeholder} = this.props
return (
<div className="users__search-widget input-group">
<input
type="text"
className="form-control"
placeholder="Filter by Host..."
placeholder={placeholder}
ref="searchInput"
onChange={this.handleChange}
/>
@ -40,10 +40,11 @@ class SearchBar extends Component {
}
}
const {func} = PropTypes
const {func, string} = PropTypes
SearchBar.propTypes = {
onSearch: func.isRequired,
placeholder: string.isRequired,
}
export default SearchBar

View File

@ -1,4 +1,4 @@
import React, {PropTypes} from 'react'
import React, {PropTypes, Component} from 'react'
import _ from 'lodash'
import HostsTable from 'src/hosts/components/HostsTable'
@ -7,27 +7,16 @@ import SourceIndicator from 'shared/components/SourceIndicator'
import {getCpuAndLoadForHosts, getLayouts, getAppsForHosts} from '../apis'
export const HostsPage = React.createClass({
propTypes: {
source: PropTypes.shape({
id: PropTypes.string.isRequired,
name: PropTypes.string.isRequired,
type: PropTypes.string, // 'influx-enterprise'
links: PropTypes.shape({
proxy: PropTypes.string.isRequired,
}).isRequired,
telegraf: PropTypes.string.isRequired,
}),
addFlashMessage: PropTypes.func,
},
class HostsPage extends Component {
constructor(props) {
super(props)
getInitialState() {
return {
this.state = {
hosts: {},
hostsLoading: true,
hostsError: '',
}
},
}
componentDidMount() {
const {source, addFlashMessage} = this.props
@ -71,7 +60,7 @@ export const HostsPage = React.createClass({
// (like with a bogus proxy link). We should provide better messaging to the user in this catch after that's fixed.
console.error(reason) // eslint-disable-line no-console
})
},
}
render() {
const {source} = this.props
@ -104,7 +93,22 @@ export const HostsPage = React.createClass({
</FancyScrollbar>
</div>
)
},
})
}
}
const {func, shape, string} = PropTypes
HostsPage.propTypes = {
source: shape({
id: string.isRequired,
name: string.isRequired,
type: string, // 'influx-enterprise'
links: shape({
proxy: string.isRequired,
}).isRequired,
telegraf: string.isRequired,
}),
addFlashMessage: func,
}
export default HostsPage

View File

@ -1,63 +1,63 @@
export const chooseNamespace = (queryId, {database, retentionPolicy}) => ({
export const chooseNamespace = (queryID, {database, retentionPolicy}) => ({
type: 'KAPA_CHOOSE_NAMESPACE',
payload: {
queryId,
queryID,
database,
retentionPolicy,
},
})
export const chooseMeasurement = (queryId, measurement) => ({
export const chooseMeasurement = (queryID, measurement) => ({
type: 'KAPA_CHOOSE_MEASUREMENT',
payload: {
queryId,
queryID,
measurement,
},
})
export const chooseTag = (queryId, tag) => ({
export const chooseTag = (queryID, tag) => ({
type: 'KAPA_CHOOSE_TAG',
payload: {
queryId,
queryID,
tag,
},
})
export const groupByTag = (queryId, tagKey) => ({
export const groupByTag = (queryID, tagKey) => ({
type: 'KAPA_GROUP_BY_TAG',
payload: {
queryId,
queryID,
tagKey,
},
})
export const toggleTagAcceptance = queryId => ({
export const toggleTagAcceptance = queryID => ({
type: 'KAPA_TOGGLE_TAG_ACCEPTANCE',
payload: {
queryId,
queryID,
},
})
export const toggleField = (queryId, fieldFunc) => ({
export const toggleField = (queryID, fieldFunc) => ({
type: 'KAPA_TOGGLE_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
},
})
export const applyFuncsToField = (queryId, fieldFunc) => ({
export const applyFuncsToField = (queryID, fieldFunc) => ({
type: 'KAPA_APPLY_FUNCS_TO_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
},
})
export const groupByTime = (queryId, time) => ({
export const groupByTime = (queryID, time) => ({
type: 'KAPA_GROUP_BY_TIME',
payload: {
queryId,
queryID,
time,
},
})
@ -69,3 +69,11 @@ export const removeFuncs = (queryID, fields) => ({
fields,
},
})
export const timeShift = (queryID, shift) => ({
type: 'KAPA_TIME_SHIFT',
payload: {
queryID,
shift,
},
})

View File

@ -66,7 +66,7 @@ export const getRule = (kapacitor, ruleID) => async dispatch => {
}
}
export function loadDefaultRule() {
export const loadDefaultRule = () => {
return dispatch => {
const queryID = uuid.v4()
dispatch({
@ -88,15 +88,13 @@ export const fetchRules = kapacitor => async dispatch => {
}
}
export function chooseTrigger(ruleID, trigger) {
return {
type: 'CHOOSE_TRIGGER',
payload: {
ruleID,
trigger,
},
}
}
export const chooseTrigger = (ruleID, trigger) => ({
type: 'CHOOSE_TRIGGER',
payload: {
ruleID,
trigger,
},
})
export const addEvery = (ruleID, frequency) => ({
type: 'ADD_EVERY',
@ -113,36 +111,30 @@ export const removeEvery = ruleID => ({
},
})
export function updateRuleValues(ruleID, trigger, values) {
return {
type: 'UPDATE_RULE_VALUES',
payload: {
ruleID,
trigger,
values,
},
}
}
export const updateRuleValues = (ruleID, trigger, values) => ({
type: 'UPDATE_RULE_VALUES',
payload: {
ruleID,
trigger,
values,
},
})
export function updateMessage(ruleID, message) {
return {
type: 'UPDATE_RULE_MESSAGE',
payload: {
ruleID,
message,
},
}
}
export const updateMessage = (ruleID, message) => ({
type: 'UPDATE_RULE_MESSAGE',
payload: {
ruleID,
message,
},
})
export function updateDetails(ruleID, details) {
return {
type: 'UPDATE_RULE_DETAILS',
payload: {
ruleID,
details,
},
}
}
export const updateDetails = (ruleID, details) => ({
type: 'UPDATE_RULE_DETAILS',
payload: {
ruleID,
details,
},
})
export const updateAlertProperty = (ruleID, alertNodeName, alertProperty) => ({
type: 'UPDATE_RULE_ALERT_PROPERTY',
@ -153,87 +145,73 @@ export const updateAlertProperty = (ruleID, alertNodeName, alertProperty) => ({
},
})
export function updateAlerts(ruleID, alerts) {
return {
type: 'UPDATE_RULE_ALERTS',
payload: {
ruleID,
alerts,
},
}
export const updateAlerts = (ruleID, alerts) => ({
type: 'UPDATE_RULE_ALERTS',
payload: {
ruleID,
alerts,
},
})
export const updateAlertNodes = (ruleID, alertNodeName, alertNodesText) => ({
type: 'UPDATE_RULE_ALERT_NODES',
payload: {
ruleID,
alertNodeName,
alertNodesText,
},
})
export const updateRuleName = (ruleID, name) => ({
type: 'UPDATE_RULE_NAME',
payload: {
ruleID,
name,
},
})
export const deleteRuleSuccess = ruleID => ({
type: 'DELETE_RULE_SUCCESS',
payload: {
ruleID,
},
})
export const updateRuleStatusSuccess = (ruleID, status) => ({
type: 'UPDATE_RULE_STATUS_SUCCESS',
payload: {
ruleID,
status,
},
})
export const deleteRule = rule => dispatch => {
deleteRuleAPI(rule)
.then(() => {
dispatch(deleteRuleSuccess(rule.id))
dispatch(
publishNotification('success', `${rule.name} deleted successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be deleted`)
)
})
}
export function updateAlertNodes(ruleID, alertNodeName, alertNodesText) {
return {
type: 'UPDATE_RULE_ALERT_NODES',
payload: {
ruleID,
alertNodeName,
alertNodesText,
},
}
}
export function updateRuleName(ruleID, name) {
return {
type: 'UPDATE_RULE_NAME',
payload: {
ruleID,
name,
},
}
}
export function deleteRuleSuccess(ruleID) {
return {
type: 'DELETE_RULE_SUCCESS',
payload: {
ruleID,
},
}
}
export function updateRuleStatusSuccess(ruleID, status) {
return {
type: 'UPDATE_RULE_STATUS_SUCCESS',
payload: {
ruleID,
status,
},
}
}
export function deleteRule(rule) {
return dispatch => {
deleteRuleAPI(rule)
.then(() => {
dispatch(deleteRuleSuccess(rule.id))
dispatch(
publishNotification('success', `${rule.name} deleted successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be deleted`)
)
})
}
}
export function updateRuleStatus(rule, status) {
return dispatch => {
updateRuleStatusAPI(rule, status)
.then(() => {
dispatch(
publishNotification('success', `${rule.name} ${status} successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be ${status}`)
)
})
}
export const updateRuleStatus = (rule, status) => dispatch => {
updateRuleStatusAPI(rule, status)
.then(() => {
dispatch(
publishNotification('success', `${rule.name} ${status} successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be ${status}`)
)
})
}
export const createTask = (

View File

@ -100,3 +100,41 @@ export const updateTask = async (
throw error
}
}
const kapacitorLogHeaders = {
'Content-Type': 'application/json',
Accept: 'application/json',
}
export const getLogStream = kapacitor =>
fetch(`${kapacitor.links.proxy}?path=/kapacitor/v1preview/logs`, {
method: 'GET',
headers: kapacitorLogHeaders,
credentials: 'include',
})
export const getLogStreamByRuleID = (kapacitor, ruleID) =>
fetch(
`${kapacitor.links.proxy}?path=/kapacitor/v1preview/logs?task=${ruleID}`,
{
method: 'GET',
headers: kapacitorLogHeaders,
credentials: 'include',
}
)
export const pingKapacitorVersion = async kapacitor => {
try {
const result = await AJAX({
method: 'GET',
url: `${kapacitor.links.proxy}?path=/kapacitor/v1preview/ping`,
headers: kapacitorLogHeaders,
credentials: 'include',
})
const kapVersion = result.headers['x-kapacitor-version']
return kapVersion === '' ? null : kapVersion
} catch (error) {
console.error(error)
throw error
}
}

View File

@ -0,0 +1,32 @@
import React, {PropTypes} from 'react'
const LogItemHTTP = ({logItem}) =>
<div className="logs-table--row">
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service">HTTP Request</div>
<div className="logs-table--http">
{logItem.method} {logItem.username}@{logItem.host} ({logItem.duration})
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemHTTP.propTypes = {
logItem: shape({
lvl: string.isRequired,
ts: string.isRequired,
method: string.isRequired,
username: string.isRequired,
host: string.isRequired,
duration: string.isRequired,
}),
}
export default LogItemHTTP

View File

@ -0,0 +1,32 @@
import React, {PropTypes} from 'react'
const LogItemHTTPError = ({logItem}) =>
<div className="logs-table--row" key={logItem.key}>
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service error">HTTP Server</div>
<div className="logs-table--blah">
<div className="logs-table--key-values error">
ERROR: {logItem.msg}
</div>
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemHTTPError.propTypes = {
logItem: shape({
key: string.isRequired,
lvl: string.isRequired,
ts: string.isRequired,
msg: string.isRequired,
}),
}
export default LogItemHTTPError

View File

@ -0,0 +1,34 @@
import React, {PropTypes} from 'react'
const LogItemInfluxDBDebug = ({logItem}) =>
<div className="logs-table--row">
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service debug">InfluxDB</div>
<div className="logs-table--blah">
<div className="logs-table--key-values debug">
DEBUG: {logItem.msg}
<br />
Cluster: {logItem.cluster}
</div>
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemInfluxDBDebug.propTypes = {
logItem: shape({
lvl: string.isRequired,
ts: string.isRequired,
msg: string.isRequired,
cluster: string.isRequired,
}),
}
export default LogItemInfluxDBDebug

View File

@ -0,0 +1,31 @@
import React, {PropTypes} from 'react'
const LogItemKapacitorDebug = ({logItem}) =>
<div className="logs-table--row">
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service debug">Kapacitor</div>
<div className="logs-table--blah">
<div className="logs-table--key-values debug">
DEBUG: {logItem.msg}
</div>
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemKapacitorDebug.propTypes = {
logItem: shape({
lvl: string.isRequired,
ts: string.isRequired,
msg: string.isRequired,
}),
}
export default LogItemKapacitorDebug

View File

@ -0,0 +1,31 @@
import React, {PropTypes} from 'react'
const LogItemKapacitorError = ({logItem}) =>
<div className="logs-table--row">
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service error">Kapacitor</div>
<div className="logs-table--blah">
<div className="logs-table--key-values error">
ERROR: {logItem.msg}
</div>
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemKapacitorError.propTypes = {
logItem: shape({
lvl: string.isRequired,
ts: string.isRequired,
msg: string.isRequired,
}),
}
export default LogItemKapacitorError

View File

@ -0,0 +1,51 @@
import React, {PropTypes} from 'react'
const renderKeysAndValues = object => {
if (!object) {
return <span className="logs-table--empty-cell">--</span>
}
const objKeys = Object.keys(object)
const objValues = Object.values(object)
const objElements = objKeys.map((objKey, i) =>
<div key={i} className="logs-table--key-value">
{objKey}: <span>{objValues[i]}</span>
</div>
)
return objElements
}
const LogItemKapacitorPoint = ({logItem}) =>
<div className="logs-table--row">
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service">Kapacitor Point</div>
<div className="logs-table--blah">
<div className="logs-table--key-values">
TAGS<br />
{renderKeysAndValues(logItem.tag)}
</div>
<div className="logs-table--key-values">
FIELDS<br />
{renderKeysAndValues(logItem.field)}
</div>
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemKapacitorPoint.propTypes = {
logItem: shape({
lvl: string.isRequired,
ts: string.isRequired,
tag: shape.isRequired,
field: shape.isRequired,
}),
}
export default LogItemKapacitorPoint

View File

@ -0,0 +1,28 @@
import React, {PropTypes} from 'react'
const LogItemSession = ({logItem}) =>
<div className="logs-table--row">
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--session">
{logItem.msg}
</div>
</div>
</div>
const {shape, string} = PropTypes
LogItemSession.propTypes = {
logItem: shape({
lvl: string.isRequired,
ts: string.isRequired,
msg: string.isRequired,
}),
}
export default LogItemSession

View File

@ -0,0 +1,38 @@
import React, {PropTypes} from 'react'
import FancyScrollbar from 'shared/components/FancyScrollbar'
import LogsTableRow from 'src/kapacitor/components/LogsTableRow'
const LogsTable = ({logs}) =>
<div className="logs-table--container">
<div className="logs-table--header">
<h2 className="panel-title">Logs</h2>
</div>
<FancyScrollbar
className="logs-table--panel fancy-scroll--kapacitor"
autoHide={false}
>
<div className="logs-table">
{logs.length
? logs.map((log, i) =>
<LogsTableRow key={log.key} logItem={log} index={i} />
)
: <div className="page-spinner" />}
</div>
</FancyScrollbar>
</div>
const {arrayOf, shape, string} = PropTypes
LogsTable.propTypes = {
logs: arrayOf(
shape({
key: string.isRequired,
ts: string.isRequired,
lvl: string.isRequired,
msg: string.isRequired,
})
).isRequired,
}
export default LogsTable

View File

@ -0,0 +1,68 @@
import React, {PropTypes} from 'react'
import LogItemSession from 'src/kapacitor/components/LogItemSession'
import LogItemHTTP from 'src/kapacitor/components/LogItemHTTP'
import LogItemHTTPError from 'src/kapacitor/components/LogItemHTTPError'
import LogItemKapacitorPoint from 'src/kapacitor/components/LogItemKapacitorPoint'
import LogItemKapacitorError from 'src/kapacitor/components/LogItemKapacitorError'
import LogItemKapacitorDebug from 'src/kapacitor/components/LogItemKapacitorDebug'
import LogItemInfluxDBDebug from 'src/kapacitor/components/LogItemInfluxDBDebug'
const LogsTableRow = ({logItem, index}) => {
if (logItem.service === 'sessions') {
return <LogItemSession logItem={logItem} key={index} />
}
if (logItem.service === 'http' && logItem.msg === 'http request') {
return <LogItemHTTP logItem={logItem} key={index} />
}
if (logItem.service === 'kapacitor' && logItem.msg === 'point') {
return <LogItemKapacitorPoint logItem={logItem} key={index} />
}
if (logItem.service === 'httpd_server_errors' && logItem.lvl === 'error') {
return <LogItemHTTPError logItem={logItem} key={index} />
}
if (logItem.service === 'kapacitor' && logItem.lvl === 'error') {
return <LogItemKapacitorError logItem={logItem} key={index} />
}
if (logItem.service === 'kapacitor' && logItem.lvl === 'debug') {
return <LogItemKapacitorDebug logItem={logItem} key={index} />
}
if (logItem.service === 'influxdb' && logItem.lvl === 'debug') {
return <LogItemInfluxDBDebug logItem={logItem} key={index} />
}
return (
<div className="logs-table--row" key={index}>
<div className="logs-table--divider">
<div className={`logs-table--level ${logItem.lvl}`} />
<div className="logs-table--timestamp">
{logItem.ts}
</div>
</div>
<div className="logs-table--details">
<div className="logs-table--service">
{logItem.service || '--'}
</div>
<div className="logs-table--blah">
<div className="logs-table--key-values">
{logItem.msg || '--'}
</div>
</div>
</div>
</div>
)
}
const {number, shape, string} = PropTypes
LogsTableRow.propTypes = {
logItem: shape({
key: string.isRequired,
ts: string.isRequired,
lvl: string.isRequired,
msg: string.isRequired,
}).isRequired,
index: number,
}
export default LogsTableRow

View File

@ -0,0 +1,26 @@
import React, {PropTypes} from 'react'
const LogsToggle = ({areLogsVisible, onToggleLogsVisbility}) =>
<ul className="nav nav-tablist nav-tablist-sm nav-tablist-malachite logs-toggle">
<li
className={areLogsVisible ? null : 'active'}
onClick={onToggleLogsVisbility}
>
Editor
</li>
<li
className={areLogsVisible ? 'active' : null}
onClick={onToggleLogsVisbility}
>
Editor + Logs
</li>
</ul>
const {bool, func} = PropTypes
LogsToggle.propTypes = {
areLogsVisible: bool,
onToggleLogsVisbility: func.isRequired,
}
export default LogsToggle

View File

@ -2,56 +2,63 @@ import React, {PropTypes} from 'react'
import TickscriptHeader from 'src/kapacitor/components/TickscriptHeader'
import TickscriptEditor from 'src/kapacitor/components/TickscriptEditor'
import TickscriptEditorControls from 'src/kapacitor/components/TickscriptEditorControls'
import TickscriptEditorConsole from 'src/kapacitor/components/TickscriptEditorConsole'
import LogsTable from 'src/kapacitor/components/LogsTable'
const Tickscript = ({
source,
onSave,
task,
logs,
validation,
onSelectDbrps,
onChangeScript,
onChangeType,
onChangeID,
isNewTickscript,
areLogsVisible,
areLogsEnabled,
onToggleLogsVisbility,
}) =>
<div className="page">
<TickscriptHeader
task={task}
source={source}
onSave={onSave}
onChangeID={onChangeID}
onChangeType={onChangeType}
onSelectDbrps={onSelectDbrps}
areLogsVisible={areLogsVisible}
areLogsEnabled={areLogsEnabled}
onToggleLogsVisbility={onToggleLogsVisbility}
isNewTickscript={isNewTickscript}
/>
<div className="page-contents">
<div className="tickscript-console">
<div className="tickscript-console--output">
{validation
? <p>
{validation}
</p>
: <p className="tickscript-console--default">
Save your TICKscript to validate it
</p>}
</div>
</div>
<div className="tickscript-editor">
<div className="page-contents--split">
<div className="tickscript">
<TickscriptEditorControls
isNewTickscript={isNewTickscript}
onSelectDbrps={onSelectDbrps}
onChangeType={onChangeType}
onChangeID={onChangeID}
task={task}
/>
<TickscriptEditorConsole validation={validation} />
<TickscriptEditor
script={task.tickscript}
onChangeScript={onChangeScript}
/>
</div>
{areLogsVisible ? <LogsTable logs={logs} /> : null}
</div>
</div>
const {arrayOf, bool, func, shape, string} = PropTypes
Tickscript.propTypes = {
logs: arrayOf(shape()).isRequired,
onSave: func.isRequired,
source: shape({
id: string,
}),
areLogsVisible: bool,
areLogsEnabled: bool,
onToggleLogsVisbility: func.isRequired,
task: shape({
id: string,
script: string,

View File

@ -21,7 +21,13 @@ class TickscriptEditor extends Component {
}
return (
<CodeMirror value={script} onChange={this.updateCode} options={options} />
<div className="tickscript-editor">
<CodeMirror
value={script}
onChange={this.updateCode}
options={options}
/>
</div>
)
}
}

View File

@ -0,0 +1,22 @@
import React, {PropTypes} from 'react'
const TickscriptEditorConsole = ({validation}) =>
<div className="tickscript-console">
<div className="tickscript-console--output">
{validation
? <p>
{validation}
</p>
: <p className="tickscript-console--default">
Save your TICKscript to validate it
</p>}
</div>
</div>
const {string} = PropTypes
TickscriptEditorConsole.propTypes = {
validation: string,
}
export default TickscriptEditorConsole

View File

@ -0,0 +1,44 @@
import React, {PropTypes} from 'react'
import TickscriptType from 'src/kapacitor/components/TickscriptType'
import MultiSelectDBDropdown from 'shared/components/MultiSelectDBDropdown'
import TickscriptID, {
TickscriptStaticID,
} from 'src/kapacitor/components/TickscriptID'
const addName = list => list.map(l => ({...l, name: `${l.db}.${l.rp}`}))
const TickscriptEditorControls = ({
isNewTickscript,
onSelectDbrps,
onChangeType,
onChangeID,
task,
}) =>
<div className="tickscript-controls">
{isNewTickscript
? <TickscriptID onChangeID={onChangeID} id={task.id} />
: <TickscriptStaticID id={task.name} />}
<div className="tickscript-controls--right">
<TickscriptType type={task.type} onChangeType={onChangeType} />
<MultiSelectDBDropdown
selectedItems={addName(task.dbrps)}
onApply={onSelectDbrps}
/>
</div>
</div>
const {arrayOf, bool, func, shape, string} = PropTypes
TickscriptEditorControls.propTypes = {
isNewTickscript: bool.isRequired,
onSelectDbrps: func.isRequired,
onChangeType: func.isRequired,
onChangeID: func.isRequired,
task: shape({
id: string,
script: string,
dbsrps: arrayOf(shape()),
}).isRequired,
}
export default TickscriptEditorControls

View File

@ -1,52 +1,36 @@
import React, {PropTypes} from 'react'
import {Link} from 'react-router'
import SourceIndicator from 'shared/components/SourceIndicator'
import TickscriptType from 'src/kapacitor/components/TickscriptType'
import MultiSelectDBDropdown from 'shared/components/MultiSelectDBDropdown'
import TickscriptID, {
TickscriptStaticID,
} from 'src/kapacitor/components/TickscriptID'
const addName = list => list.map(l => ({...l, name: `${l.db}.${l.rp}`}))
import LogsToggle from 'src/kapacitor/components/LogsToggle'
const TickscriptHeader = ({
task: {id, type, dbrps},
task,
source,
task: {id},
onSave,
onChangeType,
onChangeID,
onSelectDbrps,
areLogsVisible,
areLogsEnabled,
isNewTickscript,
onToggleLogsVisbility,
}) =>
<div className="page-header">
<div className="page-header full-width">
<div className="page-header__container">
<div className="page-header__left">
{isNewTickscript
? <TickscriptID onChangeID={onChangeID} id={id} />
: <TickscriptStaticID id={task.name} />}
<h1 className="page-header__title">TICKscript Editor</h1>
</div>
{areLogsEnabled &&
<LogsToggle
areLogsVisible={areLogsVisible}
areLogsEnabled={areLogsEnabled}
onToggleLogsVisbility={onToggleLogsVisbility}
/>}
<div className="page-header__right">
<SourceIndicator />
<TickscriptType type={type} onChangeType={onChangeType} />
<MultiSelectDBDropdown
selectedItems={addName(dbrps)}
onApply={onSelectDbrps}
/>
<Link
className="btn btn-sm btn-default"
to={`/sources/${source.id}/alert-rules`}
>
Cancel
</Link>
<button
className="btn btn-success btn-sm"
title={id ? '' : 'ID your TICKscript to save'}
onClick={onSave}
disabled={!id}
>
Save Rule
{isNewTickscript ? 'Save New TICKscript' : 'Save TICKscript'}
</button>
</div>
</div>
@ -55,11 +39,11 @@ const TickscriptHeader = ({
const {arrayOf, bool, func, shape, string} = PropTypes
TickscriptHeader.propTypes = {
isNewTickscript: bool,
onSave: func,
source: shape({
id: string,
}),
onSelectDbrps: func.isRequired,
areLogsVisible: bool,
areLogsEnabled: bool,
onToggleLogsVisbility: func.isRequired,
task: shape({
dbrps: arrayOf(
shape({
@ -68,9 +52,6 @@ TickscriptHeader.propTypes = {
})
),
}),
onChangeType: func.isRequired,
onChangeID: func.isRequired,
isNewTickscript: bool.isRequired,
}
export default TickscriptHeader

View File

@ -10,7 +10,7 @@ class TickscriptID extends Component {
return (
<input
className="page-header--editing kapacitor-theme"
className="form-control input-sm form-malachite"
autoFocus={true}
value={id}
onChange={onChangeID}
@ -23,10 +23,7 @@ class TickscriptID extends Component {
}
export const TickscriptStaticID = ({id}) =>
<h1
className="page-header--editing kapacitor-theme"
style={{display: 'flex', justifyContent: 'baseline'}}
>
<h1 className="tickscript-controls--name">
{id}
</h1>

View File

@ -61,13 +61,13 @@ class KapacitorRulePage extends Component {
render() {
const {
rules,
queryConfigs,
params,
ruleActions,
source,
queryConfigActions,
addFlashMessage,
router,
ruleActions,
queryConfigs,
addFlashMessage,
queryConfigActions,
} = this.props
const {enabledAlerts, kapacitor} = this.state
const rule = this.isEditing()
@ -80,17 +80,17 @@ class KapacitorRulePage extends Component {
}
return (
<KapacitorRule
source={source}
rule={rule}
query={query}
queryConfigs={queryConfigs}
queryConfigActions={queryConfigActions}
ruleActions={ruleActions}
addFlashMessage={addFlashMessage}
enabledAlerts={enabledAlerts}
isEditing={this.isEditing()}
router={router}
source={source}
kapacitor={kapacitor}
ruleActions={ruleActions}
queryConfigs={queryConfigs}
isEditing={this.isEditing()}
enabledAlerts={enabledAlerts}
addFlashMessage={addFlashMessage}
queryConfigActions={queryConfigActions}
/>
)
}

View File

@ -1,11 +1,14 @@
import React, {PropTypes, Component} from 'react'
import {connect} from 'react-redux'
import {bindActionCreators} from 'redux'
import uuid from 'node-uuid'
import Tickscript from 'src/kapacitor/components/Tickscript'
import * as kapactiorActionCreators from 'src/kapacitor/actions/view'
import * as errorActionCreators from 'shared/actions/errors'
import {getActiveKapacitor} from 'src/shared/apis'
import {getLogStreamByRuleID, pingKapacitorVersion} from 'src/kapacitor/apis'
import {publishNotification} from 'shared/actions/notifications'
class TickscriptPage extends Component {
constructor(props) {
@ -23,6 +26,96 @@ class TickscriptPage extends Component {
},
validation: '',
isEditingID: true,
logs: [],
areLogsEnabled: false,
failStr: '',
}
}
fetchChunkedLogs = async (kapacitor, ruleID) => {
const {notify} = this.props
try {
const version = await pingKapacitorVersion(kapacitor)
if (version && parseInt(version.split('.')[1], 10) < 4) {
this.setState({
areLogsEnabled: false,
})
notify(
'warning',
'Could not use logging, requires Kapacitor version 1.4'
)
return
}
if (this.state.logs.length === 0) {
this.setState({
areLogsEnabled: true,
logs: [
{
id: uuid.v4(),
key: uuid.v4(),
lvl: 'info',
msg: 'created log session',
service: 'sessions',
tags: 'nil',
ts: new Date().toISOString(),
},
],
})
}
const response = await getLogStreamByRuleID(kapacitor, ruleID)
const reader = await response.body.getReader()
const decoder = new TextDecoder()
let result
while (this.state.areLogsEnabled === true && !(result && result.done)) {
result = await reader.read()
const chunk = decoder.decode(result.value || new Uint8Array(), {
stream: !result.done,
})
const json = chunk.split('\n')
let logs = []
let failStr = this.state.failStr
try {
for (let objStr of json) {
objStr = failStr + objStr
failStr = objStr
const jsonStr = `[${objStr.split('}{').join('},{')}]`
logs = [
...logs,
...JSON.parse(jsonStr).map(log => ({
...log,
key: uuid.v4(),
})),
]
failStr = ''
}
this.setState({
logs: [...this.state.logs, ...logs],
failStr,
})
} catch (err) {
console.warn(err, failStr)
this.setState({
logs: [...this.state.logs, ...logs],
failStr,
})
}
}
} catch (error) {
console.error(error)
notify('error', error)
throw error
}
}
@ -50,9 +143,17 @@ class TickscriptPage extends Component {
this.setState({task: {tickscript, dbrps, type, status, name, id}})
}
this.fetchChunkedLogs(kapacitor, ruleID)
this.setState({kapacitor})
}
componentWillUnmount() {
this.setState({
areLogsEnabled: false,
})
}
handleSave = async () => {
const {kapacitor, task} = this.state
const {
@ -96,13 +197,18 @@ class TickscriptPage extends Component {
this.setState({task: {...this.state.task, id: e.target.value}})
}
handleToggleLogsVisbility = () => {
this.setState({areLogsVisible: !this.state.areLogsVisible})
}
render() {
const {source} = this.props
const {task, validation} = this.state
const {task, validation, logs, areLogsVisible, areLogsEnabled} = this.state
return (
<Tickscript
task={task}
logs={logs}
source={source}
validation={validation}
onSave={this.handleSave}
@ -111,6 +217,9 @@ class TickscriptPage extends Component {
onChangeScript={this.handleChangeScript}
onChangeType={this.handleChangeType}
onChangeID={this.handleChangeID}
areLogsVisible={areLogsVisible}
areLogsEnabled={areLogsEnabled}
onToggleLogsVisbility={this.handleToggleLogsVisbility}
/>
)
}
@ -142,6 +251,7 @@ TickscriptPage.propTypes = {
ruleID: string,
}).isRequired,
rules: arrayOf(shape()),
notify: func.isRequired,
}
const mapStateToProps = state => {
@ -153,6 +263,7 @@ const mapStateToProps = state => {
const mapDispatchToProps = dispatch => ({
kapacitorActions: bindActionCreators(kapactiorActionCreators, dispatch),
errorActions: bindActionCreators(errorActionCreators, dispatch),
notify: bindActionCreators(publishNotification, dispatch),
})
export default connect(mapStateToProps, mapDispatchToProps)(TickscriptPage)

View File

@ -1,13 +1,14 @@
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
applyFuncsToField,
chooseMeasurement,
chooseNamespace,
timeShift,
chooseTag,
groupByTag,
groupByTime,
removeFuncs,
chooseNamespace,
toggleKapaField,
applyFuncsToField,
chooseMeasurement,
toggleTagAcceptance,
} from 'src/utils/queryTransitions'
@ -34,9 +35,9 @@ const queryConfigs = (state = {}, action) => {
}
case 'KAPA_CHOOSE_NAMESPACE': {
const {queryId, database, retentionPolicy} = action.payload
const {queryID, database, retentionPolicy} = action.payload
const nextQueryConfig = chooseNamespace(
state[queryId],
state[queryID],
{
database,
retentionPolicy,
@ -45,75 +46,75 @@ const queryConfigs = (state = {}, action) => {
)
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {rawText: null}),
[queryID]: Object.assign(nextQueryConfig, {rawText: null}),
})
}
case 'KAPA_CHOOSE_MEASUREMENT': {
const {queryId, measurement} = action.payload
const {queryID, measurement} = action.payload
const nextQueryConfig = chooseMeasurement(
state[queryId],
state[queryID],
measurement,
IS_KAPACITOR_RULE
)
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {
rawText: state[queryId].rawText,
[queryID]: Object.assign(nextQueryConfig, {
rawText: state[queryID].rawText,
}),
})
}
case 'KAPA_CHOOSE_TAG': {
const {queryId, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryId], tag)
const {queryID, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryID], tag)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'KAPA_GROUP_BY_TAG': {
const {queryId, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryId], tagKey)
const {queryID, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryID], tagKey)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'KAPA_TOGGLE_TAG_ACCEPTANCE': {
const {queryId} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryId])
const {queryID} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryID])
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'KAPA_TOGGLE_FIELD': {
const {queryId, fieldFunc} = action.payload
const nextQueryConfig = toggleKapaField(state[queryId], fieldFunc)
const {queryID, fieldFunc} = action.payload
const nextQueryConfig = toggleKapaField(state[queryID], fieldFunc)
return {...state, [queryId]: {...nextQueryConfig, rawText: null}}
return {...state, [queryID]: {...nextQueryConfig, rawText: null}}
}
case 'KAPA_APPLY_FUNCS_TO_FIELD': {
const {queryId, fieldFunc} = action.payload
const {groupBy} = state[queryId]
const nextQueryConfig = applyFuncsToField(state[queryId], fieldFunc, {
const {queryID, fieldFunc} = action.payload
const {groupBy} = state[queryID]
const nextQueryConfig = applyFuncsToField(state[queryID], fieldFunc, {
...groupBy,
time: groupBy.time ? groupBy.time : '10s',
})
return {...state, [queryId]: nextQueryConfig}
return {...state, [queryID]: nextQueryConfig}
}
case 'KAPA_GROUP_BY_TIME': {
const {queryId, time} = action.payload
const nextQueryConfig = groupByTime(state[queryId], time)
const {queryID, time} = action.payload
const nextQueryConfig = groupByTime(state[queryID], time)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
@ -124,6 +125,13 @@ const queryConfigs = (state = {}, action) => {
// fields with no functions cannot have a group by time
return {...state, [queryID]: nextQuery}
}
case 'KAPA_TIME_SHIFT': {
const {queryID, shift} = action.payload
const nextQuery = timeShift(state[queryID], shift)
return {...state, [queryID]: nextQuery}
}
}
return state
}

View File

@ -24,8 +24,8 @@ export function showQueries(source, db) {
return proxy({source, query, db})
}
export function killQuery(source, queryId) {
const query = `KILL QUERY ${queryId}`
export function killQuery(source, queryID) {
const query = `KILL QUERY ${queryID}`
return proxy({source, query})
}

View File

@ -81,20 +81,35 @@ const AutoRefresh = ComposedComponent => {
const templatesWithResolution = templates.map(temp => {
if (temp.tempVar === ':interval:') {
if (resolution) {
return {...temp, resolution}
return {
...temp,
values: temp.values.map(
v => (temp.type === 'resolution' ? {...v, resolution} : v)
),
}
}
return {
...temp,
values: [
...temp.values,
{value: '1000', type: 'resolution', selected: true},
],
}
return {...temp, resolution: 1000}
}
return {...temp}
return temp
})
const tempVars = removeUnselectedTemplateValues(templatesWithResolution)
return fetchTimeSeriesAsync(
{
source: host,
db: database,
rp,
query,
tempVars: removeUnselectedTemplateValues(templatesWithResolution),
tempVars,
resolution,
},
editQueryStatus

View File

@ -45,12 +45,20 @@ const DatabaseList = React.createClass({
this.getDbRp()
},
componentDidUpdate(prevProps) {
if (_.isEqual(prevProps.querySource, this.props.querySource)) {
componentDidUpdate({querySource: prevSource, query: prevQuery}) {
const {querySource: nextSource, query: nextQuery} = this.props
const differentSource = !_.isEqual(prevSource, nextSource)
if (prevQuery.rawText === nextQuery.rawText) {
return
}
this.getDbRp()
const newMetaQuery =
nextQuery.rawText && nextQuery.rawText.match(/^(create|drop)/i)
if (differentSource || newMetaQuery) {
setTimeout(this.getDbRp, 100)
}
},
getDbRp() {

View File

@ -4,15 +4,24 @@ import classnames from 'classnames'
import OnClickOutside from 'shared/components/OnClickOutside'
import ConfirmButtons from 'shared/components/ConfirmButtons'
const DeleteButton = ({onClickDelete, buttonSize, text, disabled}) =>
const DeleteButton = ({
onClickDelete,
buttonSize,
icon,
square,
text,
disabled,
}) =>
<button
className={classnames('btn btn-danger table--show-on-row-hover', {
[buttonSize]: buttonSize,
'btn-square': square,
disabled,
})}
onClick={onClickDelete}
>
{text}
{icon ? <span className={`icon ${icon}`} /> : null}
{square ? null : text}
</button>
class DeleteConfirmButtons extends Component {
@ -38,9 +47,23 @@ class DeleteConfirmButtons extends Component {
}
render() {
const {onDelete, item, buttonSize, text, disabled} = this.props
const {
onDelete,
item,
buttonSize,
icon,
square,
text,
disabled,
} = this.props
const {isConfirming} = this.state
if (square && !icon) {
console.error(
'DeleteButton component requires both icon if passing in square.'
)
}
return isConfirming
? <ConfirmButtons
onConfirm={onDelete}
@ -52,6 +75,8 @@ class DeleteConfirmButtons extends Component {
text={text}
onClickDelete={disabled ? () => {} : this.handleClickDelete}
buttonSize={buttonSize}
icon={icon}
square={square}
disabled={disabled}
/>
}
@ -60,10 +85,12 @@ class DeleteConfirmButtons extends Component {
const {bool, func, oneOfType, shape, string} = PropTypes
DeleteButton.propTypes = {
text: string.isRequired,
onClickDelete: func.isRequired,
buttonSize: string,
icon: string,
square: bool,
disabled: bool,
text: string.isRequired,
}
DeleteButton.defaultProps = {
@ -75,6 +102,8 @@ DeleteConfirmButtons.propTypes = {
item: oneOfType([(string, shape())]),
onDelete: func.isRequired,
buttonSize: string,
square: bool,
icon: string,
disabled: bool,
}

View File

@ -355,7 +355,8 @@ export default class Dygraph extends Component {
}
highlightCallback = ({pageX}) => {
this.setState({isHidden: false, pageX})
this.pageX = pageX
this.setState({isHidden: false})
}
legendFormatter = legend => {
@ -381,7 +382,6 @@ export default class Dygraph extends Component {
render() {
const {
legend,
pageX,
sortType,
isHidden,
isSnipped,
@ -396,7 +396,7 @@ export default class Dygraph extends Component {
{...legend}
graph={this.graphRef}
legend={this.legendRef}
pageX={pageX}
pageX={this.pageX}
sortType={sortType}
onHide={this.handleHideLegend}
isHidden={isHidden}

View File

@ -1,9 +1,8 @@
import React, {PropTypes, Component} from 'react'
import _ from 'lodash'
import QueryOptions from 'shared/components/QueryOptions'
import FieldListItem from 'src/data_explorer/components/FieldListItem'
import GroupByTimeDropdown from 'src/data_explorer/components/GroupByTimeDropdown'
import FillQuery from 'shared/components/FillQuery'
import FancyScrollbar from 'shared/components/FancyScrollbar'
import {showFieldKeys} from 'shared/apis/metaQuery'
@ -107,6 +106,10 @@ class FieldList extends Component {
applyFuncsToField(fieldFunc, groupBy)
}
handleTimeShift = shift => {
this.props.onTimeShift(shift)
}
_getFields = () => {
const {database, measurement, retentionPolicy} = this.props.query
const {source} = this.context
@ -129,12 +132,11 @@ class FieldList extends Component {
render() {
const {
query: {database, measurement, fields = [], groupBy, fill},
query: {database, measurement, fields = [], groupBy, fill, shifts},
isKapacitorRule,
} = this.props
const hasAggregates = numFunctions(fields) > 0
const hasGroupByTime = groupBy.time
const noDBorMeas = !database || !measurement
return (
@ -142,16 +144,15 @@ class FieldList extends Component {
<div className="query-builder--heading">
<span>Fields</span>
{hasAggregates
? <div className="query-builder--groupby-fill-container">
<GroupByTimeDropdown
isOpen={!hasGroupByTime}
selected={groupBy.time}
onChooseGroupByTime={this.handleGroupByTime}
/>
{isKapacitorRule
? null
: <FillQuery value={fill} onChooseFill={this.handleFill} />}
</div>
? <QueryOptions
fill={fill}
shift={_.first(shifts)}
groupBy={groupBy}
onFill={this.handleFill}
isKapacitorRule={isKapacitorRule}
onTimeShift={this.handleTimeShift}
onGroupByTime={this.handleGroupByTime}
/>
: null}
</div>
{noDBorMeas
@ -192,7 +193,7 @@ class FieldList extends Component {
}
}
const {bool, func, shape, string} = PropTypes
const {arrayOf, bool, func, shape, string} = PropTypes
FieldList.defaultProps = {
isKapacitorRule: false,
@ -212,7 +213,15 @@ FieldList.propTypes = {
database: string,
retentionPolicy: string,
measurement: string,
shifts: arrayOf(
shape({
label: string,
unit: string,
quantity: string,
})
),
}).isRequired,
onTimeShift: func,
onToggleField: func.isRequired,
onGroupByTime: func.isRequired,
onFill: func,

View File

@ -2,7 +2,7 @@ import React, {Component, PropTypes} from 'react'
import WidgetCell from 'shared/components/WidgetCell'
import LayoutCell from 'shared/components/LayoutCell'
import RefreshingGraph from 'shared/components/RefreshingGraph'
import {buildQueriesForLayouts} from 'utils/influxql'
import {buildQueriesForLayouts} from 'utils/buildQueriesForLayouts'
import _ from 'lodash'

View File

@ -17,12 +17,8 @@ class LineGraph extends Component {
}
componentWillMount() {
const {data, activeQueryIndex, isInDataExplorer} = this.props
this._timeSeries = timeSeriesToDygraph(
data,
activeQueryIndex,
isInDataExplorer
)
const {data, isInDataExplorer} = this.props
this._timeSeries = timeSeriesToDygraph(data, isInDataExplorer)
}
componentWillUpdate(nextProps) {
@ -33,7 +29,6 @@ class LineGraph extends Component {
) {
this._timeSeries = timeSeriesToDygraph(
nextProps.data,
nextProps.activeQueryIndex,
nextProps.isInDataExplorer
)
}

View File

@ -0,0 +1,45 @@
import React, {PropTypes} from 'react'
import GroupByTimeDropdown from 'src/data_explorer/components/GroupByTimeDropdown'
import TimeShiftDropdown from 'src/shared/components/TimeShiftDropdown'
import FillQuery from 'shared/components/FillQuery'
const QueryOptions = ({
fill,
shift,
onFill,
groupBy,
onTimeShift,
onGroupByTime,
isKapacitorRule,
}) =>
<div className="query-builder--groupby-fill-container">
<GroupByTimeDropdown
selected={groupBy.time}
onChooseGroupByTime={onGroupByTime}
/>
{isKapacitorRule
? null
: <TimeShiftDropdown
selected={shift && shift.label}
onChooseTimeShift={onTimeShift}
/>}
{isKapacitorRule ? null : <FillQuery value={fill} onChooseFill={onFill} />}
</div>
const {bool, func, shape, string} = PropTypes
QueryOptions.propTypes = {
fill: string,
onFill: func.isRequired,
groupBy: shape({
time: string,
}).isRequired,
shift: shape({
label: string,
}),
onGroupByTime: func.isRequired,
isKapacitorRule: bool.isRequired,
onTimeShift: func.isRequired,
}
export default QueryOptions

View File

@ -97,7 +97,7 @@ class ResizeContainer extends Component {
render() {
const {bottomHeightPixels, topHeight, bottomHeight, isDragging} = this.state
const {containerClass, children} = this.props
const {containerClass, children, theme} = this.props
if (React.Children.count(children) > maximumNumChildren) {
console.error(
@ -122,6 +122,7 @@ class ResizeContainer extends Component {
})}
</div>
<ResizeHandle
theme={theme}
isDragging={isDragging}
onHandleStartDrag={this.handleStartDrag}
top={topHeight}
@ -149,6 +150,7 @@ ResizeContainer.propTypes = {
minBottomHeight: number,
initialTopHeight: string,
initialBottomHeight: string,
theme: string,
}
export default ResizeContainer

Some files were not shown because too many files have changed in this diff Show More