diff --git a/CHANGELOG.md b/CHANGELOG.md
index 191ae6f28..10f0d5365 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,7 @@
## v1.3.11.0 [unreleased]
### Bug Fixes
+1. [#2449](https://github.com/influxdata/chronograf/pull/2449): Fix .jsdep step fails when LDFLAGS is exported
+1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
1. [#2158](https://github.com/influxdata/chronograf/pull/2158): Fix 'Cannot connect to source' false error flag on Dashboard page
1. [#2167](https://github.com/influxdata/chronograf/pull/2167): Add fractions of seconds to time field in csv export
@@ -8,9 +10,32 @@
1. [#2291](https://github.com/influxdata/chronograf/pull/2291): Fix several kapacitor alert creation panics.
1. [#2303](https://github.com/influxdata/chronograf/pull/2303): Add shadow-utils to RPM release packages
1. [#2292](https://github.com/influxdata/chronograf/pull/2292): Source extra command line options from defaults file
+1. [#2327](https://github.com/influxdata/chronograf/pull/2327): After CREATE/DELETE queries, refresh list of databases in Data Explorer
+1. [#2327](https://github.com/influxdata/chronograf/pull/2327): Visualize CREATE/DELETE queries with Table view in Data Explorer
1. [#2329](https://github.com/influxdata/chronograf/pull/2329): Include tag values alongside measurement name in Data Explorer result tabs
+1. [#2386](https://github.com/influxdata/chronograf/pull/2386): Fix queries that include regex, numbers and wildcard
+1. [#2398](https://github.com/influxdata/chronograf/pull/2398): Fix apps on hosts page from parsing tags with null values
+1. [#2408](https://github.com/influxdata/chronograf/pull/2408): Fix updated Dashboard names not updating dashboard list
+1. [#2444](https://github.com/influxdata/chronograf/pull/2444): Fix create dashboard button
+1. [#2416](https://github.com/influxdata/chronograf/pull/2416): Fix default y-axis labels not displaying properly
+1. [#2423](https://github.com/influxdata/chronograf/pull/2423): Gracefully scale Template Variables Manager overlay on smaller displays
+1. [#2426](https://github.com/influxdata/chronograf/pull/2426): Fix Influx Enterprise users from deletion in race condition
+1. [#2467](https://github.com/influxdata/chronograf/pull/2467): Fix oauth2 logout link not having basepath
+1. [#2466](https://github.com/influxdata/chronograf/pull/2466): Fix supplying a role link to sources that do not have a metaURL
+1. [#2477](https://github.com/influxdata/chronograf/pull/2477): Fix hoverline intermittently not rendering
+1. [#2483](https://github.com/influxdata/chronograf/pull/2483): Update MySQL pre-canned dashboard to have query derivative correctly
### Features
+1. [#2188](https://github.com/influxdata/chronograf/pull/2188): Add Kapacitor logs to the TICKscript editor
+1. [#2384](https://github.com/influxdata/chronograf/pull/2384): Add filtering by name to Dashboard index page
+1. [#2385](https://github.com/influxdata/chronograf/pull/2385): Add time shift feature to DataExplorer and Dashboards
+1. [#2400](https://github.com/influxdata/chronograf/pull/2400): Allow override of generic oauth2 keys for email
+1. [#2426](https://github.com/influxdata/chronograf/pull/2426): Add auto group by time to Data Explorer
+1. [#2456](https://github.com/influxdata/chronograf/pull/2456): Add boolean thresholds for kapacitor threshold alerts
+1. [#2460](https://github.com/influxdata/chronograf/pull/2460): Update kapacitor alerts to cast to float before sending to influx
+1. [#2479](https://github.com/influxdata/chronograf/pull/2479): Support authentication for Enterprise Meta Nodes
+1. [#2477](https://github.com/influxdata/chronograf/pull/2477): Improve performance of hoverline rendering
+
### UI Improvements
## v1.3.10.0 [2017-10-24]
@@ -33,7 +58,7 @@
### UI Improvements
1. [#2111](https://github.com/influxdata/chronograf/pull/2111): Increase size of Cell Editor query tabs to reveal more of their query strings
1. [#2120](https://github.com/influxdata/chronograf/pull/2120): Improve appearance of Admin Page tabs on smaller screens
-1. [#2119](https://github.com/influxdata/chronograf/pull/2119): Add cancel button to Tickscript editor
+1. [#2119](https://github.com/influxdata/chronograf/pull/2119): Add cancel button to TICKscript editor
1. [#2104](https://github.com/influxdata/chronograf/pull/2104): Redesign dashboard naming & renaming interaction
1. [#2104](https://github.com/influxdata/chronograf/pull/2104): Redesign dashboard switching dropdown
@@ -53,7 +78,7 @@
### Features
1. [#1885](https://github.com/influxdata/chronograf/pull/1885): Add `fill` options to data explorer and dashboard queries
-1. [#1978](https://github.com/influxdata/chronograf/pull/1978): Support editing kapacitor TICKScript
+1. [#1978](https://github.com/influxdata/chronograf/pull/1978): Support editing kapacitor TICKscript
1. [#1721](https://github.com/influxdata/chronograf/pull/1721): Introduce the TICKscript editor UI
1. [#1992](https://github.com/influxdata/chronograf/pull/1992): Add .csv download button to data explorer
1. [#2082](https://github.com/influxdata/chronograf/pull/2082): Add Data Explorer InfluxQL query and location query synchronization, so queries can be shared via a a URL
diff --git a/Makefile b/Makefile
index 29cc23061..8f2b5be11 100644
--- a/Makefile
+++ b/Makefile
@@ -8,6 +8,7 @@ YARN := $(shell command -v yarn 2> /dev/null)
SOURCES := $(shell find . -name '*.go' ! -name '*_gen.go' -not -path "./vendor/*" )
UISOURCES := $(shell find ui -type f -not \( -path ui/build/\* -o -path ui/node_modules/\* -prune \) )
+unexport LDFLAGS
LDFLAGS=-ldflags "-s -X main.version=${VERSION} -X main.commit=${COMMIT}"
BINARY=chronograf
@@ -23,42 +24,14 @@ ${BINARY}: $(SOURCES) .bindata .jsdep .godep
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
define CHRONOGIRAFFE
- tLf iCf.
- .CCC. tCC:
- CGG; CGG:
-tG0Gt: GGGGGGGGGGGGGGGG1 .,:,
-LG1,,:1CC: .GGL;iLC1iii1LCi;GG1 .1GCL1iGG1
- LG1:::;i1CGGt;;;;;;L0t;;;;;;GGGC1;;::,iGC
- ,ii:. 1GG1iiii;;tfiC;;;;;;;GGCfCGCGGC,
- fGCiiiiGi1Lt;;iCLL,i;;;CGt
- fGG11iiii1C1iiiiiGt1;;;;;CGf
- .GGLLL1i1CitfiiL1iCi;;iLCGGt
- .CGL11LGCCCCCCCLLCGG1;1GG;
- CGL1tf1111iiiiiiL1ifGG,
- LGCff1fCt1tCfiiCiCGC
- LGGf111111111iCGGt
- fGGGGGGGGGGGGGGi
- ifii111111itL
- ;f1i11111iitf
- ;f1iiiiiii1tf
- :fi111iii11tf
- :fi111ii1i1tf
- :f111111ii1tt
- ,L111111ii1tt
- .Li1111i1111CCCCCCCCCCCCCCLt;
- L111ii11111ittttt1tttttittti1fC;
- f1111ii111i1ttttt1;iii1ittt1ttttCt.
- tt11ii111tti1ttt1tt1;11;;;;iitttifCCCL,
- 11i1i11ttttti;1t1;;;ttt1;;ii;itti;L,;CCL
- ;f;;;;1tttti;;ttti;;;;;;;;;;;1tt1ifi .CCi
- ,L;itti;;;it;;;;;tt1;;;t1;;;;;;ii;t; :CC,
- L;;;;iti;;;;;;;;;;;;;;;;;;;;;;;i;L, ;CC.
- ti;;;iLLfffi;;;;;ittt11i;;;;;;;;;L tCCfff;
- it;;;;;;L,ti;;;;;1Ltttft1t;;;;;;1t ;CCCL;
- :f;;;;;;L.ti;;;;;tftttf1,f;;;;;;f: ;CC1:
- .L;;;;;;L.t1;;;;;tt111fi,f;;;;;;L.
- 1Li;;iL1 :Ci;;;tL1i1fC, Lt;;;;Li
- .;tt; ifLt:;fLf; ;LCCt,
+ ._ o o
+ \_`-)|_
+ ,"" _\_
+ ," ## | 0 0.
+ ," ## ,-\__ `.
+ ," / `--._;) - "HAI, I'm Chronogiraffe. Let's be friends!"
+ ," ## /
+," ## /
endef
export CHRONOGIRAFFE
chronogiraffe: ${BINARY}
diff --git a/README.md b/README.md
index 8ae69adb6..05cf900a6 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,8 @@
# Chronograf
-Chronograf is an open-source web application written in Go and React.js that provides the tools to visualize your monitoring data and easily create alerting and automation rules.
+Chronograf is an open-source web application written in Go and React.js that
+provides the tools to visualize your monitoring data and easily create alerting
+and automation rules.
@@ -16,8 +18,11 @@ Chronograf is an open-source web application written in Go and React.js that pro
### Dashboard Templates
-Chronograf's [pre-canned dashboards](https://github.com/influxdata/chronograf/tree/master/canned) for the supported [Telegraf](https://github.com/influxdata/telegraf) input plugins.
-Currently, Chronograf offers dashboard templates for the following Telegraf input plugins:
+Chronograf's
+[pre-canned dashboards](https://github.com/influxdata/chronograf/tree/master/canned)
+for the supported [Telegraf](https://github.com/influxdata/telegraf) input
+plugins. Currently, Chronograf offers dashboard templates for the following
+Telegraf input plugins:
* [Apache](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/apache)
* [Consul](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/consul)
@@ -43,40 +48,49 @@ Currently, Chronograf offers dashboard templates for the following Telegraf inpu
* [Redis](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/redis)
* [Riak](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/riak)
* [System](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md)
- * [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
- * [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
- * [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
- * [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
- * [Net](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/net.go)
- * [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
- * [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
- * [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
+ * [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
+ * [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
+ * [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
+ * [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
+ * [Net](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/net.go)
+ * [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
+ * [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
+ * [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
* [Varnish](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/varnish)
* [Windows Performance Counters](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/win_perf_counters)
-> Note: If a `telegraf` instance isn't running the `system` and `cpu` plugins the canned dashboards from that instance won't be generated.
+> Note: If a `telegraf` instance isn't running the `system` and `cpu` plugins
+> the canned dashboards from that instance won't be generated.
### Data Explorer
-Chronograf's graphing tool that allows you to dig in and create personalized visualizations of your data.
+Chronograf's graphing tool that allows you to dig in and create personalized
+visualizations of your data.
-* Generate and edit [InfluxQL](https://docs.influxdata.com/influxdb/latest/query_language/) statements with the query editor
+* Generate and edit
+ [InfluxQL](https://docs.influxdata.com/influxdb/latest/query_language/)
+ statements with the query editor
* Use Chronograf's query templates to easily explore your data
* Create visualizations and view query results in tabular format
### Dashboards
-Create and edit customized dashboards. The dashboards support several visualization types including line graphs, stacked graphs, step plots, single statistic graphs, and line-single-statistic graphs.
+Create and edit customized dashboards. The dashboards support several
+visualization types including line graphs, stacked graphs, step plots, single
+statistic graphs, and line-single-statistic graphs.
-Use Chronograf's template variables to easily adjust the data that appear in your graphs and gain deeper insight into your data.
+Use Chronograf's template variables to easily adjust the data that appear in
+your graphs and gain deeper insight into your data.
### Kapacitor UI
-A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and alert tracking.
+A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and
+alert tracking.
* Simply generate threshold, relative, and deadman alerts
* Preview data and alert boundaries while creating an alert
-* Configure alert destinations - Currently, Chronograf supports sending alerts to:
+* Configure alert destinations - Currently, Chronograf supports sending alerts
+ to:
* [Alerta](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#alerta)
* [Exec](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#exec)
* [HipChat](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#hipchat)
@@ -96,45 +110,71 @@ A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and
### User and Query Management
-Manage users, roles, permissions for [OSS InfluxDB](https://github.com/influxdata/influxdb) and InfluxData's [Enterprise](https://docs.influxdata.com/enterprise/v1.2/) product.
-View actively running queries and stop expensive queries on the Query Management page.
+Manage users, roles, permissions for
+[OSS InfluxDB](https://github.com/influxdata/influxdb) and InfluxData's
+[Enterprise](https://docs.influxdata.com/enterprise/v1.2/) product. View
+actively running queries and stop expensive queries on the Query Management
+page.
### TLS/HTTPS Support
-See [Chronograf with TLS](https://github.com/influxdata/chronograf/blob/master/docs/tls.md) for more information.
+
+See
+[Chronograf with TLS](https://github.com/influxdata/chronograf/blob/master/docs/tls.md)
+for more information.
### OAuth Login
-See [Chronograf with OAuth 2.0](https://github.com/influxdata/chronograf/blob/master/docs/auth.md) for more information.
+
+See
+[Chronograf with OAuth 2.0](https://github.com/influxdata/chronograf/blob/master/docs/auth.md)
+for more information.
### Advanced Routing
-Change the default root path of the Chronograf server with the `--basepath` option.
+
+Change the default root path of the Chronograf server with the `--basepath`
+option.
## Versions
-The most recent version of Chronograf is [v1.3.10.0](https://www.influxdata.com/downloads/).
+The most recent version of Chronograf is
+[v1.3.10.0](https://www.influxdata.com/downloads/).
-Spotted a bug or have a feature request?
-Please open [an issue](https://github.com/influxdata/chronograf/issues/new)!
+Spotted a bug or have a feature request? Please open
+[an issue](https://github.com/influxdata/chronograf/issues/new)!
### Known Issues
The Chronograf team has identified and is working on the following issues:
-* Chronograf requires users to run Telegraf's [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md) and [system](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md) plugins to ensure that all Apps appear on the [HOST LIST](https://github.com/influxdata/chronograf/blob/master/docs/GETTING_STARTED.md#host-list) page.
+* Chronograf requires users to run Telegraf's
+ [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
+ and
+ [system](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md)
+ plugins to ensure that all Apps appear on the
+ [HOST LIST](https://github.com/influxdata/chronograf/blob/master/docs/GETTING_STARTED.md#host-list)
+ page.
## Installation
-Check out the [INSTALLATION](https://docs.influxdata.com/chronograf/v1.3/introduction/installation/) guide to get up and running with Chronograf with as little configuration and code as possible.
+Check out the
+[INSTALLATION](https://docs.influxdata.com/chronograf/v1.3/introduction/installation/)
+guide to get up and running with Chronograf with as little configuration and
+code as possible.
-We recommend installing Chronograf using one of the [pre-built packages](https://influxdata.com/downloads/#chronograf). Then start Chronograf using:
+We recommend installing Chronograf using one of the
+[pre-built packages](https://influxdata.com/downloads/#chronograf). Then start
+Chronograf using:
-* `service chronograf start` if you have installed Chronograf using an official Debian or RPM package.
-* `systemctl start chronograf` if you have installed Chronograf using an official Debian or RPM package, and are running a distro with `systemd`. For example, Ubuntu 15 or later.
+* `service chronograf start` if you have installed Chronograf using an official
+ Debian or RPM package.
+* `systemctl start chronograf` if you have installed Chronograf using an
+ official Debian or RPM package, and are running a distro with `systemd`. For
+ example, Ubuntu 15 or later.
* `$GOPATH/bin/chronograf` if you have built Chronograf from source.
By default, chronograf runs on port `8888`.
-
### With Docker
+
To get started right away with Docker, you can pull down our latest release:
```sh
@@ -144,7 +184,8 @@ docker pull chronograf:1.3.10.0
### From Source
* Chronograf works with go 1.8.x, node 6.x/7.x, and yarn 0.18+.
-* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor) 1.2.x+ to create and store alerts.
+* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor)
+ 1.2.x+ to create and store alerts.
1. [Install Go](https://golang.org/doc/install)
1. [Install Node and NPM](https://nodejs.org/en/download/)
@@ -157,11 +198,23 @@ docker pull chronograf:1.3.10.0
## Documentation
-[Getting Started](https://docs.influxdata.com/chronograf/v1.3/introduction/getting-started/) will get you up and running with Chronograf with as little configuration and code as possible.
-See our [guides](https://docs.influxdata.com/chronograf/v1.3/guides/) to get familiar with Chronograf's main features.
+[Getting Started](https://docs.influxdata.com/chronograf/v1.3/introduction/getting-started/)
+will get you up and running with Chronograf with as little configuration and
+code as possible. See our
+[guides](https://docs.influxdata.com/chronograf/v1.3/guides/) to get familiar
+with Chronograf's main features.
-Documentation for Telegraf, InfluxDB, and Kapacitor are available at https://docs.influxdata.com/.
+Documentation for Telegraf, InfluxDB, and Kapacitor are available at
+https://docs.influxdata.com/.
+
+Chronograf uses
+[swagger](https://swagger.io/specification://swagger.io/specification/) to
+document its REST interfaces. To reach the documentation, run the server and go
+to the `/docs` for example at http://localhost:8888/docs
+
+The swagger JSON document is in `server/swagger.json`
## Contributing
-Please see the [contributing guide](CONTRIBUTING.md) for details on contributing to Chronograf.
+Please see the [contributing guide](CONTRIBUTING.md) for details on contributing
+to Chronograf.
diff --git a/bolt/dashboards.go b/bolt/dashboards.go
index bf756dad9..f8fe0a6df 100644
--- a/bolt/dashboards.go
+++ b/bolt/dashboards.go
@@ -107,6 +107,7 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
id, _ := b.NextSequence()
src.ID = chronograf.DashboardID(id)
+ // TODO: use FormatInt
strID := strconv.Itoa(int(id))
for i, cell := range src.Cells {
cid, err := d.IDs.Generate()
@@ -116,12 +117,11 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
cell.ID = cid
src.Cells[i] = cell
}
- if v, err := internal.MarshalDashboard(src); err != nil {
- return err
- } else if err := b.Put([]byte(strID), v); err != nil {
+ v, err := internal.MarshalDashboard(src)
+ if err != nil {
return err
}
- return nil
+ return b.Put([]byte(strID), v)
}); err != nil {
return chronograf.Dashboard{}, err
}
diff --git a/bolt/internal/internal.go b/bolt/internal/internal.go
index 5a3e838f7..9cb243be0 100644
--- a/bolt/internal/internal.go
+++ b/bolt/internal/internal.go
@@ -197,12 +197,26 @@ func MarshalDashboard(d chronograf.Dashboard) ([]byte, error) {
if q.Range != nil {
r.Upper, r.Lower = q.Range.Upper, q.Range.Lower
}
+ q.Shifts = q.QueryConfig.Shifts
queries[j] = &Query{
Command: q.Command,
Label: q.Label,
Range: r,
Source: q.Source,
}
+
+ shifts := make([]*TimeShift, len(q.Shifts))
+ for k := range q.Shifts {
+ shift := &TimeShift{
+ Label: q.Shifts[k].Label,
+ Unit: q.Shifts[k].Unit,
+ Quantity: q.Shifts[k].Quantity,
+ }
+
+ shifts[k] = shift
+ }
+
+ queries[j].Shifts = shifts
}
axes := make(map[string]*Axis, len(c.Axes))
@@ -284,12 +298,26 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
Label: q.Label,
Source: q.Source,
}
+
if q.Range.Upper != q.Range.Lower {
queries[j].Range = &chronograf.Range{
Upper: q.Range.Upper,
Lower: q.Range.Lower,
}
}
+
+ shifts := make([]chronograf.TimeShift, len(q.Shifts))
+ for k := range q.Shifts {
+ shift := chronograf.TimeShift{
+ Label: q.Shifts[k].Label,
+ Unit: q.Shifts[k].Unit,
+ Quantity: q.Shifts[k].Quantity,
+ }
+
+ shifts[k] = shift
+ }
+
+ queries[j].Shifts = shifts
}
axes := make(map[string]chronograf.Axis, len(c.Axes))
@@ -337,9 +365,9 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
templates := make([]chronograf.Template, len(pb.Templates))
for i, t := range pb.Templates {
- vals := make([]chronograf.BasicTemplateValue, len(t.Values))
+ vals := make([]chronograf.TemplateValue, len(t.Values))
for j, v := range t.Values {
- vals[j] = chronograf.BasicTemplateValue{
+ vals[j] = chronograf.TemplateValue{
Selected: v.Selected,
Type: v.Type,
Value: v.Value,
@@ -348,7 +376,7 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
template := chronograf.Template{
ID: chronograf.TemplateID(t.ID),
- BasicTemplateVar: chronograf.BasicTemplateVar{
+ TemplateVar: chronograf.TemplateVar{
Var: t.TempVar,
Values: vals,
},
@@ -467,10 +495,7 @@ func UnmarshalUser(data []byte, u *chronograf.User) error {
// UnmarshalUserPB decodes a user from binary protobuf data.
// We are ignoring the password for now.
func UnmarshalUserPB(data []byte, u *User) error {
- if err := proto.Unmarshal(data, u); err != nil {
- return err
- }
- return nil
+ return proto.Unmarshal(data, u)
}
// MarshalRole encodes a role to binary protobuf format.
diff --git a/bolt/internal/internal.pb.go b/bolt/internal/internal.pb.go
index 675f7b5a6..01fbd30e3 100644
--- a/bolt/internal/internal.pb.go
+++ b/bolt/internal/internal.pb.go
@@ -20,6 +20,7 @@ It has these top-level messages:
Layout
Cell
Query
+ TimeShift
Range
AlertRule
User
@@ -64,97 +65,6 @@ func (m *Source) String() string { return proto.CompactTextString(m)
func (*Source) ProtoMessage() {}
func (*Source) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
-func (m *Source) GetID() int64 {
- if m != nil {
- return m.ID
- }
- return 0
-}
-
-func (m *Source) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *Source) GetType() string {
- if m != nil {
- return m.Type
- }
- return ""
-}
-
-func (m *Source) GetUsername() string {
- if m != nil {
- return m.Username
- }
- return ""
-}
-
-func (m *Source) GetPassword() string {
- if m != nil {
- return m.Password
- }
- return ""
-}
-
-func (m *Source) GetURL() string {
- if m != nil {
- return m.URL
- }
- return ""
-}
-
-func (m *Source) GetDefault() bool {
- if m != nil {
- return m.Default
- }
- return false
-}
-
-func (m *Source) GetTelegraf() string {
- if m != nil {
- return m.Telegraf
- }
- return ""
-}
-
-func (m *Source) GetInsecureSkipVerify() bool {
- if m != nil {
- return m.InsecureSkipVerify
- }
- return false
-}
-
-func (m *Source) GetMetaURL() string {
- if m != nil {
- return m.MetaURL
- }
- return ""
-}
-
-func (m *Source) GetSharedSecret() string {
- if m != nil {
- return m.SharedSecret
- }
- return ""
-}
-
-func (m *Source) GetOrganization() string {
- if m != nil {
- return m.Organization
- }
- return ""
-}
-
-func (m *Source) GetRole() string {
- if m != nil {
- return m.Role
- }
- return ""
-}
-
type Dashboard struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@@ -168,20 +78,6 @@ func (m *Dashboard) String() string { return proto.CompactTextString(
func (*Dashboard) ProtoMessage() {}
func (*Dashboard) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
-func (m *Dashboard) GetID() int64 {
- if m != nil {
- return m.ID
- }
- return 0
-}
-
-func (m *Dashboard) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
func (m *Dashboard) GetCells() []*DashboardCell {
if m != nil {
return m.Cells
@@ -196,13 +92,6 @@ func (m *Dashboard) GetTemplates() []*Template {
return nil
}
-func (m *Dashboard) GetOrganization() string {
- if m != nil {
- return m.Organization
- }
- return ""
-}
-
type DashboardCell struct {
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
@@ -220,34 +109,6 @@ func (m *DashboardCell) String() string { return proto.CompactTextStr
func (*DashboardCell) ProtoMessage() {}
func (*DashboardCell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
-func (m *DashboardCell) GetX() int32 {
- if m != nil {
- return m.X
- }
- return 0
-}
-
-func (m *DashboardCell) GetY() int32 {
- if m != nil {
- return m.Y
- }
- return 0
-}
-
-func (m *DashboardCell) GetW() int32 {
- if m != nil {
- return m.W
- }
- return 0
-}
-
-func (m *DashboardCell) GetH() int32 {
- if m != nil {
- return m.H
- }
- return 0
-}
-
func (m *DashboardCell) GetQueries() []*Query {
if m != nil {
return m.Queries
@@ -255,27 +116,6 @@ func (m *DashboardCell) GetQueries() []*Query {
return nil
}
-func (m *DashboardCell) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *DashboardCell) GetType() string {
- if m != nil {
- return m.Type
- }
- return ""
-}
-
-func (m *DashboardCell) GetID() string {
- if m != nil {
- return m.ID
- }
- return ""
-}
-
func (m *DashboardCell) GetAxes() map[string]*Axis {
if m != nil {
return m.Axes
@@ -284,7 +124,7 @@ func (m *DashboardCell) GetAxes() map[string]*Axis {
}
type Axis struct {
- LegacyBounds []int64 `protobuf:"varint,1,rep,packed,name=legacyBounds" json:"legacyBounds,omitempty"`
+ LegacyBounds []int64 `protobuf:"varint,1,rep,name=legacyBounds" json:"legacyBounds,omitempty"`
Bounds []string `protobuf:"bytes,2,rep,name=bounds" json:"bounds,omitempty"`
Label string `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"`
Prefix string `protobuf:"bytes,4,opt,name=prefix,proto3" json:"prefix,omitempty"`
@@ -298,55 +138,6 @@ func (m *Axis) String() string { return proto.CompactTextString(m) }
func (*Axis) ProtoMessage() {}
func (*Axis) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
-func (m *Axis) GetLegacyBounds() []int64 {
- if m != nil {
- return m.LegacyBounds
- }
- return nil
-}
-
-func (m *Axis) GetBounds() []string {
- if m != nil {
- return m.Bounds
- }
- return nil
-}
-
-func (m *Axis) GetLabel() string {
- if m != nil {
- return m.Label
- }
- return ""
-}
-
-func (m *Axis) GetPrefix() string {
- if m != nil {
- return m.Prefix
- }
- return ""
-}
-
-func (m *Axis) GetSuffix() string {
- if m != nil {
- return m.Suffix
- }
- return ""
-}
-
-func (m *Axis) GetBase() string {
- if m != nil {
- return m.Base
- }
- return ""
-}
-
-func (m *Axis) GetScale() string {
- if m != nil {
- return m.Scale
- }
- return ""
-}
-
type Template struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
TempVar string `protobuf:"bytes,2,opt,name=temp_var,json=tempVar,proto3" json:"temp_var,omitempty"`
@@ -361,20 +152,6 @@ func (m *Template) String() string { return proto.CompactTextString(m
func (*Template) ProtoMessage() {}
func (*Template) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
-func (m *Template) GetID() string {
- if m != nil {
- return m.ID
- }
- return ""
-}
-
-func (m *Template) GetTempVar() string {
- if m != nil {
- return m.TempVar
- }
- return ""
-}
-
func (m *Template) GetValues() []*TemplateValue {
if m != nil {
return m.Values
@@ -382,20 +159,6 @@ func (m *Template) GetValues() []*TemplateValue {
return nil
}
-func (m *Template) GetType() string {
- if m != nil {
- return m.Type
- }
- return ""
-}
-
-func (m *Template) GetLabel() string {
- if m != nil {
- return m.Label
- }
- return ""
-}
-
func (m *Template) GetQuery() *TemplateQuery {
if m != nil {
return m.Query
@@ -414,27 +177,6 @@ func (m *TemplateValue) String() string { return proto.CompactTextStr
func (*TemplateValue) ProtoMessage() {}
func (*TemplateValue) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
-func (m *TemplateValue) GetType() string {
- if m != nil {
- return m.Type
- }
- return ""
-}
-
-func (m *TemplateValue) GetValue() string {
- if m != nil {
- return m.Value
- }
- return ""
-}
-
-func (m *TemplateValue) GetSelected() bool {
- if m != nil {
- return m.Selected
- }
- return false
-}
-
type TemplateQuery struct {
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
Db string `protobuf:"bytes,2,opt,name=db,proto3" json:"db,omitempty"`
@@ -449,48 +191,6 @@ func (m *TemplateQuery) String() string { return proto.CompactTextStr
func (*TemplateQuery) ProtoMessage() {}
func (*TemplateQuery) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
-func (m *TemplateQuery) GetCommand() string {
- if m != nil {
- return m.Command
- }
- return ""
-}
-
-func (m *TemplateQuery) GetDb() string {
- if m != nil {
- return m.Db
- }
- return ""
-}
-
-func (m *TemplateQuery) GetRp() string {
- if m != nil {
- return m.Rp
- }
- return ""
-}
-
-func (m *TemplateQuery) GetMeasurement() string {
- if m != nil {
- return m.Measurement
- }
- return ""
-}
-
-func (m *TemplateQuery) GetTagKey() string {
- if m != nil {
- return m.TagKey
- }
- return ""
-}
-
-func (m *TemplateQuery) GetFieldKey() string {
- if m != nil {
- return m.FieldKey
- }
- return ""
-}
-
type Server struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@@ -507,62 +207,6 @@ func (m *Server) String() string { return proto.CompactTextString(m)
func (*Server) ProtoMessage() {}
func (*Server) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
-func (m *Server) GetID() int64 {
- if m != nil {
- return m.ID
- }
- return 0
-}
-
-func (m *Server) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *Server) GetUsername() string {
- if m != nil {
- return m.Username
- }
- return ""
-}
-
-func (m *Server) GetPassword() string {
- if m != nil {
- return m.Password
- }
- return ""
-}
-
-func (m *Server) GetURL() string {
- if m != nil {
- return m.URL
- }
- return ""
-}
-
-func (m *Server) GetSrcID() int64 {
- if m != nil {
- return m.SrcID
- }
- return 0
-}
-
-func (m *Server) GetActive() bool {
- if m != nil {
- return m.Active
- }
- return false
-}
-
-func (m *Server) GetOrganization() string {
- if m != nil {
- return m.Organization
- }
- return ""
-}
-
type Layout struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
Application string `protobuf:"bytes,2,opt,name=Application,proto3" json:"Application,omitempty"`
@@ -576,27 +220,6 @@ func (m *Layout) String() string { return proto.CompactTextString(m)
func (*Layout) ProtoMessage() {}
func (*Layout) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{8} }
-func (m *Layout) GetID() string {
- if m != nil {
- return m.ID
- }
- return ""
-}
-
-func (m *Layout) GetApplication() string {
- if m != nil {
- return m.Application
- }
- return ""
-}
-
-func (m *Layout) GetMeasurement() string {
- if m != nil {
- return m.Measurement
- }
- return ""
-}
-
func (m *Layout) GetCells() []*Cell {
if m != nil {
return m.Cells
@@ -604,13 +227,6 @@ func (m *Layout) GetCells() []*Cell {
return nil
}
-func (m *Layout) GetAutoflow() bool {
- if m != nil {
- return m.Autoflow
- }
- return false
-}
-
type Cell struct {
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
@@ -619,7 +235,7 @@ type Cell struct {
Queries []*Query `protobuf:"bytes,5,rep,name=queries" json:"queries,omitempty"`
I string `protobuf:"bytes,6,opt,name=i,proto3" json:"i,omitempty"`
Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"`
- Yranges []int64 `protobuf:"varint,8,rep,packed,name=yranges" json:"yranges,omitempty"`
+ Yranges []int64 `protobuf:"varint,8,rep,name=yranges" json:"yranges,omitempty"`
Ylabels []string `protobuf:"bytes,9,rep,name=ylabels" json:"ylabels,omitempty"`
Type string `protobuf:"bytes,10,opt,name=type,proto3" json:"type,omitempty"`
Axes map[string]*Axis `protobuf:"bytes,11,rep,name=axes" json:"axes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
@@ -630,34 +246,6 @@ func (m *Cell) String() string { return proto.CompactTextString(m) }
func (*Cell) ProtoMessage() {}
func (*Cell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{9} }
-func (m *Cell) GetX() int32 {
- if m != nil {
- return m.X
- }
- return 0
-}
-
-func (m *Cell) GetY() int32 {
- if m != nil {
- return m.Y
- }
- return 0
-}
-
-func (m *Cell) GetW() int32 {
- if m != nil {
- return m.W
- }
- return 0
-}
-
-func (m *Cell) GetH() int32 {
- if m != nil {
- return m.H
- }
- return 0
-}
-
func (m *Cell) GetQueries() []*Query {
if m != nil {
return m.Queries
@@ -665,41 +253,6 @@ func (m *Cell) GetQueries() []*Query {
return nil
}
-func (m *Cell) GetI() string {
- if m != nil {
- return m.I
- }
- return ""
-}
-
-func (m *Cell) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *Cell) GetYranges() []int64 {
- if m != nil {
- return m.Yranges
- }
- return nil
-}
-
-func (m *Cell) GetYlabels() []string {
- if m != nil {
- return m.Ylabels
- }
- return nil
-}
-
-func (m *Cell) GetType() string {
- if m != nil {
- return m.Type
- }
- return ""
-}
-
func (m *Cell) GetAxes() map[string]*Axis {
if m != nil {
return m.Axes
@@ -708,14 +261,15 @@ func (m *Cell) GetAxes() map[string]*Axis {
}
type Query struct {
- Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
- DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
- RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
- GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
- Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
- Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
- Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
- Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
+ Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
+ DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
+ RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
+ GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
+ Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
+ Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
+ Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
+ Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
+ Shifts []*TimeShift `protobuf:"bytes,9,rep,name=Shifts" json:"Shifts,omitempty"`
}
func (m *Query) Reset() { *m = Query{} }
@@ -723,48 +277,6 @@ func (m *Query) String() string { return proto.CompactTextString(m) }
func (*Query) ProtoMessage() {}
func (*Query) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{10} }
-func (m *Query) GetCommand() string {
- if m != nil {
- return m.Command
- }
- return ""
-}
-
-func (m *Query) GetDB() string {
- if m != nil {
- return m.DB
- }
- return ""
-}
-
-func (m *Query) GetRP() string {
- if m != nil {
- return m.RP
- }
- return ""
-}
-
-func (m *Query) GetGroupBys() []string {
- if m != nil {
- return m.GroupBys
- }
- return nil
-}
-
-func (m *Query) GetWheres() []string {
- if m != nil {
- return m.Wheres
- }
- return nil
-}
-
-func (m *Query) GetLabel() string {
- if m != nil {
- return m.Label
- }
- return ""
-}
-
func (m *Query) GetRange() *Range {
if m != nil {
return m.Range
@@ -772,13 +284,24 @@ func (m *Query) GetRange() *Range {
return nil
}
-func (m *Query) GetSource() string {
+func (m *Query) GetShifts() []*TimeShift {
if m != nil {
- return m.Source
+ return m.Shifts
}
- return ""
+ return nil
}
+type TimeShift struct {
+ Label string `protobuf:"bytes,1,opt,name=Label,proto3" json:"Label,omitempty"`
+ Unit string `protobuf:"bytes,2,opt,name=Unit,proto3" json:"Unit,omitempty"`
+ Quantity string `protobuf:"bytes,3,opt,name=Quantity,proto3" json:"Quantity,omitempty"`
+}
+
+func (m *TimeShift) Reset() { *m = TimeShift{} }
+func (m *TimeShift) String() string { return proto.CompactTextString(m) }
+func (*TimeShift) ProtoMessage() {}
+func (*TimeShift) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
+
type Range struct {
Upper int64 `protobuf:"varint,1,opt,name=Upper,proto3" json:"Upper,omitempty"`
Lower int64 `protobuf:"varint,2,opt,name=Lower,proto3" json:"Lower,omitempty"`
@@ -787,21 +310,7 @@ type Range struct {
func (m *Range) Reset() { *m = Range{} }
func (m *Range) String() string { return proto.CompactTextString(m) }
func (*Range) ProtoMessage() {}
-func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
-
-func (m *Range) GetUpper() int64 {
- if m != nil {
- return m.Upper
- }
- return 0
-}
-
-func (m *Range) GetLower() int64 {
- if m != nil {
- return m.Lower
- }
- return 0
-}
+func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
type AlertRule struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
@@ -813,35 +322,7 @@ type AlertRule struct {
func (m *AlertRule) Reset() { *m = AlertRule{} }
func (m *AlertRule) String() string { return proto.CompactTextString(m) }
func (*AlertRule) ProtoMessage() {}
-func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
-
-func (m *AlertRule) GetID() string {
- if m != nil {
- return m.ID
- }
- return ""
-}
-
-func (m *AlertRule) GetJSON() string {
- if m != nil {
- return m.JSON
- }
- return ""
-}
-
-func (m *AlertRule) GetSrcID() int64 {
- if m != nil {
- return m.SrcID
- }
- return 0
-}
-
-func (m *AlertRule) GetKapaID() int64 {
- if m != nil {
- return m.KapaID
- }
- return 0
-}
+func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
type User struct {
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@@ -855,35 +336,7 @@ type User struct {
func (m *User) Reset() { *m = User{} }
func (m *User) String() string { return proto.CompactTextString(m) }
func (*User) ProtoMessage() {}
-func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
-
-func (m *User) GetID() uint64 {
- if m != nil {
- return m.ID
- }
- return 0
-}
-
-func (m *User) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *User) GetProvider() string {
- if m != nil {
- return m.Provider
- }
- return ""
-}
-
-func (m *User) GetScheme() string {
- if m != nil {
- return m.Scheme
- }
- return ""
-}
+func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{14} }
func (m *User) GetRoles() []*Role {
if m != nil {
@@ -892,13 +345,6 @@ func (m *User) GetRoles() []*Role {
return nil
}
-func (m *User) GetSuperAdmin() bool {
- if m != nil {
- return m.SuperAdmin
- }
- return false
-}
-
type Role struct {
Organization string `protobuf:"bytes,1,opt,name=Organization,proto3" json:"Organization,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@@ -907,21 +353,7 @@ type Role struct {
func (m *Role) Reset() { *m = Role{} }
func (m *Role) String() string { return proto.CompactTextString(m) }
func (*Role) ProtoMessage() {}
-func (*Role) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{14} }
-
-func (m *Role) GetOrganization() string {
- if m != nil {
- return m.Organization
- }
- return ""
-}
-
-func (m *Role) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
+func (*Role) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{15} }
type Organization struct {
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@@ -933,35 +365,7 @@ type Organization struct {
func (m *Organization) Reset() { *m = Organization{} }
func (m *Organization) String() string { return proto.CompactTextString(m) }
func (*Organization) ProtoMessage() {}
-func (*Organization) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{15} }
-
-func (m *Organization) GetID() uint64 {
- if m != nil {
- return m.ID
- }
- return 0
-}
-
-func (m *Organization) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
-}
-
-func (m *Organization) GetDefaultRole() string {
- if m != nil {
- return m.DefaultRole
- }
- return ""
-}
-
-func (m *Organization) GetPublic() bool {
- if m != nil {
- return m.Public
- }
- return false
-}
+func (*Organization) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{16} }
func init() {
proto.RegisterType((*Source)(nil), "internal.Source")
@@ -975,6 +379,7 @@ func init() {
proto.RegisterType((*Layout)(nil), "internal.Layout")
proto.RegisterType((*Cell)(nil), "internal.Cell")
proto.RegisterType((*Query)(nil), "internal.Query")
+ proto.RegisterType((*TimeShift)(nil), "internal.TimeShift")
proto.RegisterType((*Range)(nil), "internal.Range")
proto.RegisterType((*AlertRule)(nil), "internal.AlertRule")
proto.RegisterType((*User)(nil), "internal.User")
@@ -985,78 +390,81 @@ func init() {
func init() { proto.RegisterFile("internal.proto", fileDescriptorInternal) }
var fileDescriptorInternal = []byte{
- // 1155 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x51, 0x8f, 0xdb, 0xc4,
- 0x13, 0x97, 0xe3, 0x38, 0xb1, 0x27, 0xd7, 0xfb, 0xff, 0xb5, 0xaa, 0xa8, 0x29, 0x12, 0x0a, 0x16,
- 0x48, 0x41, 0xa2, 0x07, 0x6a, 0x85, 0x84, 0x78, 0x40, 0xca, 0x5d, 0x50, 0x75, 0xdc, 0xb5, 0x3d,
- 0x36, 0x77, 0xc7, 0x13, 0xaa, 0x36, 0xce, 0x24, 0xb1, 0xea, 0xc4, 0x66, 0x6d, 0xdf, 0xc5, 0x7c,
- 0x18, 0x24, 0x24, 0x9e, 0x78, 0x44, 0xbc, 0xf3, 0x8a, 0xfa, 0x41, 0xf8, 0x1c, 0x68, 0x76, 0xd7,
- 0x8e, 0xd3, 0x84, 0xea, 0x5e, 0xe0, 0x6d, 0x7f, 0x33, 0xbb, 0xb3, 0x3b, 0xbf, 0x99, 0xf9, 0x69,
- 0xe1, 0x30, 0x5a, 0xe5, 0x28, 0x57, 0x22, 0x3e, 0x4a, 0x65, 0x92, 0x27, 0xcc, 0xad, 0x70, 0xf0,
- 0x57, 0x0b, 0x3a, 0xe3, 0xa4, 0x90, 0x21, 0xb2, 0x43, 0x68, 0x9d, 0x8e, 0x7c, 0xab, 0x6f, 0x0d,
- 0x6c, 0xde, 0x3a, 0x1d, 0x31, 0x06, 0xed, 0xe7, 0x62, 0x89, 0x7e, 0xab, 0x6f, 0x0d, 0x3c, 0xae,
- 0xd6, 0x64, 0xbb, 0x2c, 0x53, 0xf4, 0x6d, 0x6d, 0xa3, 0x35, 0x7b, 0x08, 0xee, 0x55, 0x46, 0xd1,
- 0x96, 0xe8, 0xb7, 0x95, 0xbd, 0xc6, 0xe4, 0xbb, 0x10, 0x59, 0x76, 0x9b, 0xc8, 0xa9, 0xef, 0x68,
- 0x5f, 0x85, 0xd9, 0xff, 0xc1, 0xbe, 0xe2, 0xe7, 0x7e, 0x47, 0x99, 0x69, 0xc9, 0x7c, 0xe8, 0x8e,
- 0x70, 0x26, 0x8a, 0x38, 0xf7, 0xbb, 0x7d, 0x6b, 0xe0, 0xf2, 0x0a, 0x52, 0x9c, 0x4b, 0x8c, 0x71,
- 0x2e, 0xc5, 0xcc, 0x77, 0x75, 0x9c, 0x0a, 0xb3, 0x23, 0x60, 0xa7, 0xab, 0x0c, 0xc3, 0x42, 0xe2,
- 0xf8, 0x55, 0x94, 0x5e, 0xa3, 0x8c, 0x66, 0xa5, 0xef, 0xa9, 0x00, 0x7b, 0x3c, 0x74, 0xcb, 0x33,
- 0xcc, 0x05, 0xdd, 0x0d, 0x2a, 0x54, 0x05, 0x59, 0x00, 0x07, 0xe3, 0x85, 0x90, 0x38, 0x1d, 0x63,
- 0x28, 0x31, 0xf7, 0x7b, 0xca, 0xbd, 0x65, 0xa3, 0x3d, 0x2f, 0xe4, 0x5c, 0xac, 0xa2, 0x1f, 0x45,
- 0x1e, 0x25, 0x2b, 0xff, 0x40, 0xef, 0x69, 0xda, 0x88, 0x25, 0x9e, 0xc4, 0xe8, 0xdf, 0xd3, 0x2c,
- 0xd1, 0x3a, 0xf8, 0xdd, 0x02, 0x6f, 0x24, 0xb2, 0xc5, 0x24, 0x11, 0x72, 0x7a, 0x27, 0xae, 0x1f,
- 0x81, 0x13, 0x62, 0x1c, 0x67, 0xbe, 0xdd, 0xb7, 0x07, 0xbd, 0xc7, 0x0f, 0x8e, 0xea, 0x22, 0xd6,
- 0x71, 0x4e, 0x30, 0x8e, 0xb9, 0xde, 0xc5, 0x3e, 0x03, 0x2f, 0xc7, 0x65, 0x1a, 0x8b, 0x1c, 0x33,
- 0xbf, 0xad, 0x8e, 0xb0, 0xcd, 0x91, 0x4b, 0xe3, 0xe2, 0x9b, 0x4d, 0x3b, 0xa9, 0x38, 0xbb, 0xa9,
- 0x04, 0xbf, 0xb6, 0xe0, 0xde, 0xd6, 0x75, 0xec, 0x00, 0xac, 0xb5, 0x7a, 0xb9, 0xc3, 0xad, 0x35,
- 0xa1, 0x52, 0xbd, 0xda, 0xe1, 0x56, 0x49, 0xe8, 0x56, 0xf5, 0x86, 0xc3, 0xad, 0x5b, 0x42, 0x0b,
- 0xd5, 0x11, 0x0e, 0xb7, 0x16, 0xec, 0x63, 0xe8, 0xfe, 0x50, 0xa0, 0x8c, 0x30, 0xf3, 0x1d, 0xf5,
- 0xba, 0xff, 0x6d, 0x5e, 0xf7, 0x6d, 0x81, 0xb2, 0xe4, 0x95, 0x9f, 0xd8, 0x50, 0xdd, 0xa4, 0x5b,
- 0x43, 0xad, 0xc9, 0x96, 0x53, 0xe7, 0x75, 0xb5, 0x8d, 0xd6, 0x86, 0x45, 0xdd, 0x0f, 0xc4, 0xe2,
- 0xe7, 0xd0, 0x16, 0x6b, 0xcc, 0x7c, 0x4f, 0xc5, 0xff, 0xe0, 0x1f, 0x08, 0x3b, 0x1a, 0xae, 0x31,
- 0xfb, 0x7a, 0x95, 0xcb, 0x92, 0xab, 0xed, 0x0f, 0x9f, 0x82, 0x57, 0x9b, 0xa8, 0x2b, 0x5f, 0x61,
- 0xa9, 0x12, 0xf4, 0x38, 0x2d, 0xd9, 0x87, 0xe0, 0xdc, 0x88, 0xb8, 0xd0, 0xc5, 0xe9, 0x3d, 0x3e,
- 0xdc, 0x84, 0x1d, 0xae, 0xa3, 0x8c, 0x6b, 0xe7, 0x97, 0xad, 0x2f, 0xac, 0xe0, 0x37, 0x0b, 0xda,
- 0x64, 0x23, 0x66, 0x63, 0x9c, 0x8b, 0xb0, 0x3c, 0x4e, 0x8a, 0xd5, 0x34, 0xf3, 0xad, 0xbe, 0x3d,
- 0xb0, 0xf9, 0x96, 0x8d, 0xbd, 0x03, 0x9d, 0x89, 0xf6, 0xb6, 0xfa, 0xf6, 0xc0, 0xe3, 0x06, 0xb1,
- 0xfb, 0xe0, 0xc4, 0x62, 0x82, 0xb1, 0x99, 0x31, 0x0d, 0x68, 0x77, 0x2a, 0x71, 0x16, 0xad, 0xcd,
- 0x88, 0x19, 0x44, 0xf6, 0xac, 0x98, 0x91, 0x5d, 0x57, 0xcf, 0x20, 0xa2, 0x6b, 0x22, 0xb2, 0x9a,
- 0x42, 0x5a, 0x53, 0xe4, 0x2c, 0x14, 0x71, 0xc5, 0xa1, 0x06, 0xc1, 0x1f, 0x16, 0xcd, 0x96, 0xee,
- 0x89, 0x46, 0x5f, 0x6a, 0x46, 0xdf, 0x05, 0x97, 0xfa, 0xe5, 0xe5, 0x8d, 0x90, 0xa6, 0x37, 0xbb,
- 0x84, 0xaf, 0x85, 0x64, 0x9f, 0x42, 0x47, 0x65, 0xbe, 0xa7, 0x3f, 0xab, 0x70, 0xd7, 0xe4, 0xe7,
- 0x66, 0x5b, 0x5d, 0xc1, 0x76, 0xa3, 0x82, 0x75, 0xb2, 0x4e, 0x33, 0xd9, 0x47, 0xe0, 0x50, 0x2b,
- 0x94, 0xea, 0xf5, 0x7b, 0x23, 0xeb, 0x86, 0xd1, 0xbb, 0x82, 0x2b, 0xb8, 0xb7, 0x75, 0x63, 0x7d,
- 0x93, 0xb5, 0x7d, 0xd3, 0xa6, 0x8a, 0x9e, 0xa9, 0x1a, 0xe9, 0x4a, 0x86, 0x31, 0x86, 0x39, 0x4e,
- 0x15, 0xdf, 0x2e, 0xaf, 0x71, 0xf0, 0xb3, 0xb5, 0x89, 0xab, 0xee, 0x23, 0xe5, 0x08, 0x93, 0xe5,
- 0x52, 0xac, 0xa6, 0x26, 0x74, 0x05, 0x89, 0xb7, 0xe9, 0xc4, 0x84, 0x6e, 0x4d, 0x27, 0x84, 0x65,
- 0x6a, 0x2a, 0xd8, 0x92, 0x29, 0xeb, 0x43, 0x6f, 0x89, 0x22, 0x2b, 0x24, 0x2e, 0x71, 0x95, 0x1b,
- 0x0a, 0x9a, 0x26, 0xf6, 0x00, 0xba, 0xb9, 0x98, 0xbf, 0xa4, 0xde, 0x33, 0x95, 0xcc, 0xc5, 0xfc,
- 0x0c, 0x4b, 0xf6, 0x1e, 0x78, 0xb3, 0x08, 0xe3, 0xa9, 0x72, 0xe9, 0x72, 0xba, 0xca, 0x70, 0x86,
- 0x65, 0xf0, 0xa7, 0x05, 0x9d, 0x31, 0xca, 0x1b, 0x94, 0x77, 0x92, 0x94, 0xa6, 0x54, 0xdb, 0x6f,
- 0x91, 0xea, 0xf6, 0x7e, 0xa9, 0x76, 0x36, 0x52, 0x7d, 0x1f, 0x9c, 0xb1, 0x0c, 0x4f, 0x47, 0xea,
- 0x45, 0x36, 0xd7, 0x80, 0xba, 0x71, 0x18, 0xe6, 0xd1, 0x0d, 0x1a, 0xfd, 0x36, 0x68, 0x47, 0x69,
- 0xdc, 0x3d, 0x4a, 0xf3, 0x93, 0x05, 0x9d, 0x73, 0x51, 0x26, 0x45, 0xbe, 0xd3, 0x85, 0x7d, 0xe8,
- 0x0d, 0xd3, 0x34, 0x8e, 0x42, 0x7d, 0x5a, 0x67, 0xd4, 0x34, 0xd1, 0x8e, 0x67, 0x0d, 0x7e, 0x75,
- 0x6e, 0x4d, 0x13, 0x4d, 0xf1, 0x89, 0x52, 0x53, 0x2d, 0x8d, 0x8d, 0x29, 0xd6, 0x22, 0xaa, 0x9c,
- 0x44, 0xc2, 0xb0, 0xc8, 0x93, 0x59, 0x9c, 0xdc, 0xaa, 0x6c, 0x5d, 0x5e, 0xe3, 0xe0, 0x75, 0x0b,
- 0xda, 0xff, 0x95, 0x02, 0x1e, 0x80, 0x15, 0x99, 0x62, 0x5b, 0x51, 0xad, 0x87, 0xdd, 0x86, 0x1e,
- 0xfa, 0xd0, 0x2d, 0xa5, 0x58, 0xcd, 0x31, 0xf3, 0x5d, 0xa5, 0x2e, 0x15, 0x54, 0x1e, 0x35, 0x47,
- 0x5a, 0x08, 0x3d, 0x5e, 0xc1, 0x7a, 0x2e, 0xa0, 0x31, 0x17, 0x9f, 0x18, 0xcd, 0xec, 0xa9, 0x17,
- 0xf9, 0xdb, 0xb4, 0xfc, 0x7b, 0x52, 0xf9, 0xda, 0x02, 0xa7, 0x1e, 0xaa, 0x93, 0xed, 0xa1, 0x3a,
- 0xd9, 0x0c, 0xd5, 0xe8, 0xb8, 0x1a, 0xaa, 0xd1, 0x31, 0x61, 0x7e, 0x51, 0x0d, 0x15, 0xbf, 0xa0,
- 0x62, 0x3d, 0x95, 0x49, 0x91, 0x1e, 0x97, 0xba, 0xaa, 0x1e, 0xaf, 0x31, 0x75, 0xe2, 0x77, 0x0b,
- 0x94, 0x86, 0x6a, 0x8f, 0x1b, 0x44, 0x7d, 0x7b, 0xae, 0x04, 0x47, 0x93, 0xab, 0x01, 0xfb, 0x08,
- 0x1c, 0x4e, 0xe4, 0x29, 0x86, 0xb7, 0xea, 0xa2, 0xcc, 0x5c, 0x7b, 0x29, 0xa8, 0xfe, 0x2b, 0x99,
- 0x06, 0x36, 0x28, 0x78, 0x62, 0x8e, 0x53, 0xf4, 0xab, 0x34, 0x45, 0x69, 0xc6, 0x50, 0x03, 0x75,
- 0x67, 0x72, 0x8b, 0x5a, 0x41, 0x6d, 0xae, 0x41, 0xf0, 0x3d, 0x78, 0xc3, 0x18, 0x65, 0xce, 0x8b,
- 0x78, 0x57, 0x77, 0x19, 0xb4, 0xbf, 0x19, 0xbf, 0x78, 0x5e, 0x0d, 0x2f, 0xad, 0x37, 0x23, 0x67,
- 0xbf, 0x31, 0x72, 0x67, 0x22, 0x15, 0xa7, 0x23, 0xd5, 0x67, 0x36, 0x37, 0x28, 0xf8, 0xc5, 0x82,
- 0x36, 0xcd, 0x76, 0x23, 0x74, 0xfb, 0x6d, 0xba, 0x70, 0x21, 0x93, 0x9b, 0x68, 0x8a, 0xb2, 0xd2,
- 0x85, 0x0a, 0xab, 0xa4, 0xc3, 0x05, 0xd6, 0x9f, 0x3b, 0x83, 0xa8, 0xd6, 0xf4, 0xb1, 0xa9, 0x7a,
- 0xb9, 0x51, 0x6b, 0x32, 0x73, 0xed, 0x64, 0xef, 0x03, 0x8c, 0x8b, 0x14, 0xe5, 0x70, 0xba, 0x8c,
- 0x56, 0x8a, 0x74, 0x97, 0x37, 0x2c, 0xc1, 0x57, 0xfa, 0xab, 0xb4, 0xa3, 0x10, 0xd6, 0xfe, 0x6f,
- 0xd5, 0x9b, 0x2f, 0x0f, 0xe2, 0xed, 0x73, 0x77, 0xca, 0xb6, 0x0f, 0x3d, 0xf3, 0xaf, 0x54, 0xbf,
- 0x34, 0x23, 0x16, 0x0d, 0x13, 0xe5, 0x7c, 0x51, 0x4c, 0xe2, 0x28, 0x54, 0x39, 0xbb, 0xdc, 0xa0,
- 0x49, 0x47, 0x7d, 0x9f, 0x9f, 0xfc, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xa6, 0xde, 0x3f, 0x80, 0x50,
- 0x0b, 0x00, 0x00,
+ // 1207 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xbc, 0x56, 0x5f, 0x8f, 0xdb, 0x44,
+ 0x10, 0xd7, 0xc6, 0x71, 0x62, 0x4f, 0xae, 0x05, 0x2d, 0x15, 0x35, 0x45, 0x42, 0xc1, 0x02, 0xe9,
+ 0x10, 0xf4, 0x40, 0xad, 0x90, 0x10, 0x0f, 0x48, 0xb9, 0x0b, 0xaa, 0x8e, 0xfe, 0xbb, 0x6e, 0x7a,
+ 0xe5, 0x09, 0x55, 0x1b, 0x67, 0x72, 0xb1, 0xea, 0xd8, 0x66, 0x6d, 0xdf, 0x9d, 0xf9, 0x30, 0x48,
+ 0x48, 0x3c, 0xf1, 0x88, 0x78, 0xe7, 0x15, 0xf1, 0x41, 0xf8, 0x0a, 0xbc, 0xa2, 0xd9, 0x5d, 0x3b,
+ 0x4e, 0x2f, 0x54, 0x7d, 0x81, 0xb7, 0xfd, 0xcd, 0xac, 0x67, 0x77, 0x66, 0x7e, 0xf3, 0xf3, 0xc2,
+ 0xf5, 0x38, 0x2d, 0x51, 0xa5, 0x32, 0x39, 0xc8, 0x55, 0x56, 0x66, 0xdc, 0x6b, 0x70, 0xf8, 0x57,
+ 0x0f, 0x06, 0xb3, 0xac, 0x52, 0x11, 0xf2, 0xeb, 0xd0, 0x3b, 0x9e, 0x06, 0x6c, 0xcc, 0xf6, 0x1d,
+ 0xd1, 0x3b, 0x9e, 0x72, 0x0e, 0xfd, 0x47, 0x72, 0x8d, 0x41, 0x6f, 0xcc, 0xf6, 0x7d, 0xa1, 0xd7,
+ 0x64, 0x7b, 0x5a, 0xe7, 0x18, 0x38, 0xc6, 0x46, 0x6b, 0x7e, 0x0b, 0xbc, 0xd3, 0x82, 0xa2, 0xad,
+ 0x31, 0xe8, 0x6b, 0x7b, 0x8b, 0xc9, 0x77, 0x22, 0x8b, 0xe2, 0x22, 0x53, 0x8b, 0xc0, 0x35, 0xbe,
+ 0x06, 0xf3, 0x37, 0xc1, 0x39, 0x15, 0x0f, 0x82, 0x81, 0x36, 0xd3, 0x92, 0x07, 0x30, 0x9c, 0xe2,
+ 0x52, 0x56, 0x49, 0x19, 0x0c, 0xc7, 0x6c, 0xdf, 0x13, 0x0d, 0xa4, 0x38, 0x4f, 0x31, 0xc1, 0x33,
+ 0x25, 0x97, 0x81, 0x67, 0xe2, 0x34, 0x98, 0x1f, 0x00, 0x3f, 0x4e, 0x0b, 0x8c, 0x2a, 0x85, 0xb3,
+ 0x17, 0x71, 0xfe, 0x0c, 0x55, 0xbc, 0xac, 0x03, 0x5f, 0x07, 0xd8, 0xe1, 0xa1, 0x53, 0x1e, 0x62,
+ 0x29, 0xe9, 0x6c, 0xd0, 0xa1, 0x1a, 0xc8, 0x43, 0xd8, 0x9b, 0xad, 0xa4, 0xc2, 0xc5, 0x0c, 0x23,
+ 0x85, 0x65, 0x30, 0xd2, 0xee, 0x2d, 0x1b, 0xed, 0x79, 0xac, 0xce, 0x64, 0x1a, 0xff, 0x20, 0xcb,
+ 0x38, 0x4b, 0x83, 0x3d, 0xb3, 0xa7, 0x6b, 0xa3, 0x2a, 0x89, 0x2c, 0xc1, 0xe0, 0x9a, 0xa9, 0x12,
+ 0xad, 0xc3, 0xdf, 0x18, 0xf8, 0x53, 0x59, 0xac, 0xe6, 0x99, 0x54, 0x8b, 0xd7, 0xaa, 0xf5, 0x6d,
+ 0x70, 0x23, 0x4c, 0x92, 0x22, 0x70, 0xc6, 0xce, 0xfe, 0xe8, 0xce, 0xcd, 0x83, 0xb6, 0x89, 0x6d,
+ 0x9c, 0x23, 0x4c, 0x12, 0x61, 0x76, 0xf1, 0xcf, 0xc0, 0x2f, 0x71, 0x9d, 0x27, 0xb2, 0xc4, 0x22,
+ 0xe8, 0xeb, 0x4f, 0xf8, 0xe6, 0x93, 0xa7, 0xd6, 0x25, 0x36, 0x9b, 0xae, 0xa4, 0xe2, 0x5e, 0x4d,
+ 0x25, 0xfc, 0xa5, 0x07, 0xd7, 0xb6, 0x8e, 0xe3, 0x7b, 0xc0, 0x2e, 0xf5, 0xcd, 0x5d, 0xc1, 0x2e,
+ 0x09, 0xd5, 0xfa, 0xd6, 0xae, 0x60, 0x35, 0xa1, 0x0b, 0xcd, 0x0d, 0x57, 0xb0, 0x0b, 0x42, 0x2b,
+ 0xcd, 0x08, 0x57, 0xb0, 0x15, 0xff, 0x08, 0x86, 0xdf, 0x57, 0xa8, 0x62, 0x2c, 0x02, 0x57, 0xdf,
+ 0xee, 0x8d, 0xcd, 0xed, 0x9e, 0x54, 0xa8, 0x6a, 0xd1, 0xf8, 0xa9, 0x1a, 0x9a, 0x4d, 0x86, 0x1a,
+ 0x7a, 0x4d, 0xb6, 0x92, 0x98, 0x37, 0x34, 0x36, 0x5a, 0xdb, 0x2a, 0x1a, 0x3e, 0x50, 0x15, 0x3f,
+ 0x87, 0xbe, 0xbc, 0xc4, 0x22, 0xf0, 0x75, 0xfc, 0xf7, 0xff, 0xa5, 0x60, 0x07, 0x93, 0x4b, 0x2c,
+ 0xbe, 0x4e, 0x4b, 0x55, 0x0b, 0xbd, 0xfd, 0xd6, 0x3d, 0xf0, 0x5b, 0x13, 0xb1, 0xf2, 0x05, 0xd6,
+ 0x3a, 0x41, 0x5f, 0xd0, 0x92, 0x7f, 0x00, 0xee, 0xb9, 0x4c, 0x2a, 0xd3, 0x9c, 0xd1, 0x9d, 0xeb,
+ 0x9b, 0xb0, 0x93, 0xcb, 0xb8, 0x10, 0xc6, 0xf9, 0x65, 0xef, 0x0b, 0x16, 0xfe, 0xca, 0xa0, 0x4f,
+ 0x36, 0xaa, 0x6c, 0x82, 0x67, 0x32, 0xaa, 0x0f, 0xb3, 0x2a, 0x5d, 0x14, 0x01, 0x1b, 0x3b, 0xfb,
+ 0x8e, 0xd8, 0xb2, 0xf1, 0xb7, 0x61, 0x30, 0x37, 0xde, 0xde, 0xd8, 0xd9, 0xf7, 0x85, 0x45, 0xfc,
+ 0x06, 0xb8, 0x89, 0x9c, 0x63, 0x62, 0x67, 0xcc, 0x00, 0xda, 0x9d, 0x2b, 0x5c, 0xc6, 0x97, 0x76,
+ 0xc4, 0x2c, 0x22, 0x7b, 0x51, 0x2d, 0xc9, 0x6e, 0xba, 0x67, 0x11, 0x95, 0x6b, 0x2e, 0x8b, 0xb6,
+ 0x84, 0xb4, 0xa6, 0xc8, 0x45, 0x24, 0x93, 0xa6, 0x86, 0x06, 0x84, 0xbf, 0x33, 0x9a, 0x2d, 0xc3,
+ 0x89, 0x0e, 0x2f, 0x4d, 0x45, 0xdf, 0x01, 0x8f, 0xf8, 0xf2, 0xfc, 0x5c, 0x2a, 0xcb, 0xcd, 0x21,
+ 0xe1, 0x67, 0x52, 0xf1, 0x4f, 0x61, 0xa0, 0x33, 0xdf, 0xc1, 0xcf, 0x26, 0xdc, 0x33, 0xf2, 0x0b,
+ 0xbb, 0xad, 0xed, 0x60, 0xbf, 0xd3, 0xc1, 0x36, 0x59, 0xb7, 0x9b, 0xec, 0x6d, 0x70, 0x89, 0x0a,
+ 0xb5, 0xbe, 0xfd, 0xce, 0xc8, 0x86, 0x30, 0x66, 0x57, 0x78, 0x0a, 0xd7, 0xb6, 0x4e, 0x6c, 0x4f,
+ 0x62, 0xdb, 0x27, 0x6d, 0xba, 0xe8, 0xdb, 0xae, 0x91, 0xae, 0x14, 0x98, 0x60, 0x54, 0xe2, 0x42,
+ 0xd7, 0xdb, 0x13, 0x2d, 0x0e, 0x7f, 0x62, 0x9b, 0xb8, 0xfa, 0x3c, 0x52, 0x8e, 0x28, 0x5b, 0xaf,
+ 0x65, 0xba, 0xb0, 0xa1, 0x1b, 0x48, 0x75, 0x5b, 0xcc, 0x6d, 0xe8, 0xde, 0x62, 0x4e, 0x58, 0xe5,
+ 0xb6, 0x83, 0x3d, 0x95, 0xf3, 0x31, 0x8c, 0xd6, 0x28, 0x8b, 0x4a, 0xe1, 0x1a, 0xd3, 0xd2, 0x96,
+ 0xa0, 0x6b, 0xe2, 0x37, 0x61, 0x58, 0xca, 0xb3, 0xe7, 0xc4, 0x3d, 0xdb, 0xc9, 0x52, 0x9e, 0xdd,
+ 0xc7, 0x9a, 0xbf, 0x0b, 0xfe, 0x32, 0xc6, 0x64, 0xa1, 0x5d, 0xa6, 0x9d, 0x9e, 0x36, 0xdc, 0xc7,
+ 0x3a, 0xfc, 0x83, 0xc1, 0x60, 0x86, 0xea, 0x1c, 0xd5, 0x6b, 0x49, 0x4a, 0x57, 0xaa, 0x9d, 0x57,
+ 0x48, 0x75, 0x7f, 0xb7, 0x54, 0xbb, 0x1b, 0xa9, 0xbe, 0x01, 0xee, 0x4c, 0x45, 0xc7, 0x53, 0x7d,
+ 0x23, 0x47, 0x18, 0x40, 0x6c, 0x9c, 0x44, 0x65, 0x7c, 0x8e, 0x56, 0xbf, 0x2d, 0xba, 0xa2, 0x34,
+ 0xde, 0x0e, 0xa5, 0xf9, 0x91, 0xc1, 0xe0, 0x81, 0xac, 0xb3, 0xaa, 0xbc, 0xc2, 0xc2, 0x31, 0x8c,
+ 0x26, 0x79, 0x9e, 0xc4, 0x91, 0xf9, 0xda, 0x64, 0xd4, 0x35, 0xd1, 0x8e, 0x87, 0x9d, 0xfa, 0x9a,
+ 0xdc, 0xba, 0x26, 0x9a, 0xe2, 0x23, 0xad, 0xa6, 0x46, 0x1a, 0x3b, 0x53, 0x6c, 0x44, 0x54, 0x3b,
+ 0xa9, 0x08, 0x93, 0xaa, 0xcc, 0x96, 0x49, 0x76, 0xa1, 0xb3, 0xf5, 0x44, 0x8b, 0xc3, 0x3f, 0x7b,
+ 0xd0, 0xff, 0xbf, 0x14, 0x70, 0x0f, 0x58, 0x6c, 0x9b, 0xcd, 0xe2, 0x56, 0x0f, 0x87, 0x1d, 0x3d,
+ 0x0c, 0x60, 0x58, 0x2b, 0x99, 0x9e, 0x61, 0x11, 0x78, 0x5a, 0x5d, 0x1a, 0xa8, 0x3d, 0x7a, 0x8e,
+ 0x8c, 0x10, 0xfa, 0xa2, 0x81, 0xed, 0x5c, 0x40, 0x67, 0x2e, 0x3e, 0xb1, 0x9a, 0x39, 0xd2, 0x37,
+ 0x0a, 0xb6, 0xcb, 0xf2, 0xdf, 0x49, 0xe5, 0xdf, 0x0c, 0xdc, 0x76, 0xa8, 0x8e, 0xb6, 0x87, 0xea,
+ 0x68, 0x33, 0x54, 0xd3, 0xc3, 0x66, 0xa8, 0xa6, 0x87, 0x84, 0xc5, 0x49, 0x33, 0x54, 0xe2, 0x84,
+ 0x9a, 0x75, 0x4f, 0x65, 0x55, 0x7e, 0x58, 0x9b, 0xae, 0xfa, 0xa2, 0xc5, 0xc4, 0xc4, 0x6f, 0x57,
+ 0xa8, 0x6c, 0xa9, 0x7d, 0x61, 0x11, 0xf1, 0xf6, 0x81, 0x16, 0x1c, 0x53, 0x5c, 0x03, 0xf8, 0x87,
+ 0xe0, 0x0a, 0x2a, 0x9e, 0xae, 0xf0, 0x56, 0x5f, 0xb4, 0x59, 0x18, 0x2f, 0x05, 0x35, 0x6f, 0x25,
+ 0x4b, 0xe0, 0xe6, 0xe5, 0xf4, 0x31, 0x0c, 0x66, 0xab, 0x78, 0x59, 0x36, 0x7f, 0x9e, 0xb7, 0x3a,
+ 0x82, 0x15, 0xaf, 0x51, 0xfb, 0x84, 0xdd, 0x12, 0x3e, 0x01, 0xbf, 0x35, 0x6e, 0xae, 0xc3, 0xba,
+ 0xd7, 0xe1, 0xd0, 0x3f, 0x4d, 0xe3, 0xb2, 0x19, 0x5d, 0x5a, 0x53, 0xb2, 0x4f, 0x2a, 0x99, 0x96,
+ 0x71, 0x59, 0x37, 0xa3, 0xdb, 0xe0, 0xf0, 0xae, 0xbd, 0x3e, 0x85, 0x3b, 0xcd, 0x73, 0x54, 0x56,
+ 0x06, 0x0c, 0xd0, 0x87, 0x64, 0x17, 0x68, 0x14, 0xdc, 0x11, 0x06, 0x84, 0xdf, 0x81, 0x3f, 0x49,
+ 0x50, 0x95, 0xa2, 0x4a, 0xae, 0xea, 0x3e, 0x87, 0xfe, 0x37, 0xb3, 0xc7, 0x8f, 0x9a, 0x1b, 0xd0,
+ 0x7a, 0x33, 0xf2, 0xce, 0x4b, 0x23, 0x7f, 0x5f, 0xe6, 0xf2, 0x78, 0xaa, 0x79, 0xee, 0x08, 0x8b,
+ 0xc2, 0x9f, 0x19, 0xf4, 0x49, 0x5b, 0x3a, 0xa1, 0xfb, 0xaf, 0xd2, 0xa5, 0x13, 0x95, 0x9d, 0xc7,
+ 0x0b, 0x54, 0x4d, 0x72, 0x0d, 0xd6, 0x45, 0x8f, 0x56, 0xd8, 0x3e, 0x2e, 0x2d, 0x22, 0xae, 0xd1,
+ 0xc3, 0xaa, 0x99, 0xa5, 0x0e, 0xd7, 0xc8, 0x2c, 0x8c, 0x93, 0xbf, 0x07, 0x30, 0xab, 0x72, 0x54,
+ 0x93, 0xc5, 0x3a, 0x4e, 0x75, 0xd3, 0x3d, 0xd1, 0xb1, 0x84, 0x5f, 0x99, 0xa7, 0xda, 0x15, 0x85,
+ 0x62, 0xbb, 0x9f, 0x75, 0x2f, 0xdf, 0x3c, 0x4c, 0xb6, 0xbf, 0x7b, 0xad, 0x6c, 0xc7, 0x30, 0xb2,
+ 0xef, 0x5a, 0xfd, 0x4a, 0xb4, 0x62, 0xd5, 0x31, 0x51, 0xce, 0x27, 0xd5, 0x3c, 0x89, 0x23, 0x9d,
+ 0xb3, 0x27, 0x2c, 0x9a, 0x0f, 0xf4, 0xf3, 0xfd, 0xee, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xaa,
+ 0x43, 0x90, 0xf1, 0xd0, 0x0b, 0x00, 0x00,
}
diff --git a/bolt/internal/internal.proto b/bolt/internal/internal.proto
index 0a4b314dc..a56dd62a3 100644
--- a/bolt/internal/internal.proto
+++ b/bolt/internal/internal.proto
@@ -26,15 +26,15 @@ message Dashboard {
}
message DashboardCell {
- int32 x = 1; // X-coordinate of Cell in the Dashboard
- int32 y = 2; // Y-coordinate of Cell in the Dashboard
- int32 w = 3; // Width of Cell in the Dashboard
- int32 h = 4; // Height of Cell in the Dashboard
- repeated Query queries = 5; // Time-series data queries for Dashboard
- string name = 6; // User-facing name for this Dashboard
- string type = 7; // Dashboard visualization type
- string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
- map axes = 9; // Axes represent the graphical viewport for a cell's visualizations
+ int32 x = 1; // X-coordinate of Cell in the Dashboard
+ int32 y = 2; // Y-coordinate of Cell in the Dashboard
+ int32 w = 3; // Width of Cell in the Dashboard
+ int32 h = 4; // Height of Cell in the Dashboard
+ repeated Query queries = 5; // Time-series data queries for Dashboard
+ string name = 6; // User-facing name for this Dashboard
+ string type = 7; // Dashboard visualization type
+ string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
+ map axes = 9; // Axes represent the graphical viewport for a cell's visualizations
}
message Axis {
@@ -57,18 +57,18 @@ message Template {
}
message TemplateValue {
- string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
- string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
- bool selected = 3; // Selected states that this variable has been picked to use for replacement
+ string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
+ string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
+ bool selected = 3; // Selected states that this variable has been picked to use for replacement
}
message TemplateQuery {
- string command = 1; // Command is the query itself
- string db = 2; // DB the database for the query (optional)
- string rp = 3; // RP is a retention policy and optional;
- string measurement = 4; // Measurement is the optinally selected measurement for the query
- string tag_key = 5; // TagKey is the optionally selected tag key for the query
- string field_key = 6; // FieldKey is the optionally selected field key for the query
+ string command = 1; // Command is the query itself
+ string db = 2; // DB the database for the query (optional)
+ string rp = 3; // RP is a retention policy and optional;
+ string measurement = 4; // Measurement is the optinally selected measurement for the query
+ string tag_key = 5; // TagKey is the optionally selected tag key for the query
+ string field_key = 6; // FieldKey is the optionally selected field key for the query
}
message Server {
@@ -105,26 +105,33 @@ message Cell {
}
message Query {
- string Command = 1; // Command is the query itself
- string DB = 2; // DB the database for the query (optional)
- string RP = 3; // RP is a retention policy and optional;
- repeated string GroupBys= 4; // GroupBys define the groups to combine in the query
- repeated string Wheres = 5; // Wheres define the restrictions on the query
- string Label = 6; // Label is the name of the Y-Axis
- Range Range = 7; // Range is the upper and lower bound of the Y-Axis
- string Source = 8; // Source is the optional URI to the data source
+ string Command = 1; // Command is the query itself
+ string DB = 2; // DB the database for the query (optional)
+ string RP = 3; // RP is a retention policy and optional;
+ repeated string GroupBys = 4; // GroupBys define the groups to combine in the query
+ repeated string Wheres = 5; // Wheres define the restrictions on the query
+ string Label = 6; // Label is the name of the Y-Axis
+ Range Range = 7; // Range is the upper and lower bound of the Y-Axis
+ string Source = 8; // Source is the optional URI to the data source
+ repeated TimeShift Shifts = 9; // TimeShift represents a shift to apply to an influxql query's time range
+}
+
+message TimeShift {
+ string Label = 1; // Label user facing description
+ string Unit = 2; // Unit influxql time unit representation i.e. ms, s, m, h, d
+ string Quantity = 3; // Quantity number of units
}
message Range {
- int64 Upper = 1; // Upper is the upper-bound of the range
- int64 Lower = 2; // Lower is the lower-bound of the range
+ int64 Upper = 1; // Upper is the upper-bound of the range
+ int64 Lower = 2; // Lower is the lower-bound of the range
}
message AlertRule {
- string ID = 1; // ID is the unique ID of this alert rule
- string JSON = 2; // JSON byte representation of the alert
- int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
- int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
+ string ID = 1; // ID is the unique ID of this alert rule
+ string JSON = 2; // JSON byte representation of the alert
+ int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
+ int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
}
message User {
diff --git a/bolt/internal/internal_test.go b/bolt/internal/internal_test.go
index 244dd471c..b94ea5c7e 100644
--- a/bolt/internal/internal_test.go
+++ b/bolt/internal/internal_test.go
@@ -163,6 +163,7 @@ func Test_MarshalDashboard(t *testing.T) {
Upper: int64(100),
},
Source: "/chronograf/v1/sources/1",
+ Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@@ -210,6 +211,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
+ Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@@ -241,6 +243,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
+ Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@@ -285,6 +288,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
+ Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@@ -316,6 +320,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
+ Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
diff --git a/canned/mysql.json b/canned/mysql.json
index 78f7eb167..a82ec98be 100644
--- a/canned/mysql.json
+++ b/canned/mysql.json
@@ -13,7 +13,7 @@
"name": "MySQL – Reads/Second",
"queries": [
{
- "query": "SELECT non_negative_derivative(max(\"commands_select\")) AS selects_per_second FROM mysql",
+ "query": "SELECT non_negative_derivative(last(\"commands_select\"), 1s) AS selects_per_second FROM mysql",
"groupbys": [
"\"server\""
],
@@ -30,7 +30,7 @@
"name": "MySQL – Writes/Second",
"queries": [
{
- "query": "SELECT non_negative_derivative(max(\"commands_insert\")) AS inserts_per_second, non_negative_derivative(max(\"commands_update\")) AS updates_per_second, non_negative_derivative(max(\"commands_delete\")) AS deletes_per_second FROM mysql",
+ "query": "SELECT non_negative_derivative(last(\"commands_insert\"), 1s) AS inserts_per_second, non_negative_derivative(last(\"commands_update\"), 1s) AS updates_per_second, non_negative_derivative(last(\"commands_delete\"), 1s) AS deletes_per_second FROM mysql",
"groupbys": [
"\"server\""
],
@@ -47,7 +47,7 @@
"name": "MySQL – Connections/Second",
"queries": [
{
- "query": "SELECT non_negative_derivative(max(\"threads_connected\")) AS cxn_per_second, non_negative_derivative(max(\"threads_running\")) AS threads_running_per_second FROM mysql",
+ "query": "SELECT non_negative_derivative(last(\"threads_connected\"), 1s) AS cxn_per_second, non_negative_derivative(last(\"threads_running\"), 1s) AS threads_running_per_second FROM mysql",
"groupbys": [
"\"server\""
],
@@ -64,7 +64,7 @@
"name": "MySQL – Connections Errors/Second",
"queries": [
{
- "query": "SELECT non_negative_derivative(max(\"connection_errors_max_connections\")) AS cxn_errors_per_second, non_negative_derivative(max(\"connection_errors_internal\")) AS internal_cxn_errors_per_second, non_negative_derivative(max(\"aborted_connects\")) AS cxn_aborted_per_second FROM mysql",
+ "query": "SELECT non_negative_derivative(last(\"connection_errors_max_connections\"), 1s) AS cxn_errors_per_second, non_negative_derivative(last(\"connection_errors_internal\"), 1s) AS internal_cxn_errors_per_second, non_negative_derivative(last(\"aborted_connects\"), 1s) AS cxn_aborted_per_second FROM mysql",
"groupbys": [
"\"server\""
],
diff --git a/chronograf.go b/chronograf.go
index 2d7073aa5..9dd3cb458 100644
--- a/chronograf.go
+++ b/chronograf.go
@@ -1,21 +1,10 @@
package chronograf
import (
- "bytes"
"context"
- "encoding/json"
- "errors"
- "fmt"
"io"
"net/http"
- "regexp"
- "strconv"
- "strings"
"time"
- "unicode"
- "unicode/utf8"
-
- "github.com/influxdata/influxdb/influxql"
)
// General errors.
@@ -141,196 +130,17 @@ type Range struct {
Lower int64 `json:"lower"` // Lower is the lower bound
}
-type TemplateVariable interface {
- fmt.Stringer
- Name() string // returns the variable name
- Precedence() uint // ordinal indicating precedence level for replacement
-}
-
-type ExecutableVar interface {
- Exec(string)
-}
-
// TemplateValue is a value use to replace a template in an InfluxQL query
-type BasicTemplateValue struct {
+type TemplateValue struct {
Value string `json:"value"` // Value is the specific value used to replace a template in an InfluxQL query
Type string `json:"type"` // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
Selected bool `json:"selected"` // Selected states that this variable has been picked to use for replacement
}
// TemplateVar is a named variable within an InfluxQL query to be replaced with Values
-type BasicTemplateVar struct {
- Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
- Values []BasicTemplateValue `json:"values"` // Values are the replacement values within InfluxQL
-}
-
-func (t BasicTemplateVar) Name() string {
- return t.Var
-}
-
-// String converts the template variable into a correct InfluxQL string based
-// on its type
-func (t BasicTemplateVar) String() string {
- if len(t.Values) == 0 {
- return ""
- }
- switch t.Values[0].Type {
- case "tagKey", "fieldKey", "measurement", "database":
- return `"` + t.Values[0].Value + `"`
- case "tagValue", "timeStamp":
- return `'` + t.Values[0].Value + `'`
- case "csv", "constant":
- return t.Values[0].Value
- default:
- return ""
- }
-}
-
-func (t BasicTemplateVar) Precedence() uint {
- return 0
-}
-
-type GroupByVar struct {
- Var string `json:"tempVar"` // the name of the variable as present in the query
- Duration time.Duration `json:"duration,omitempty"` // the Duration supplied by the query
- Resolution uint `json:"resolution"` // the available screen resolution to render the results of this query
- ReportingInterval time.Duration `json:"reportingInterval,omitempty"` // the interval at which data is reported to this series
-}
-
-// Exec is responsible for extracting the Duration from the query
-func (g *GroupByVar) Exec(query string) {
- whereClause := "WHERE"
- start := strings.Index(query, whereClause)
- if start == -1 {
- // no where clause
- return
- }
-
- // reposition start to after the 'where' keyword
- durStr := query[start+len(whereClause):]
-
- // attempt to parse out a relative time range
- // locate duration literal start
- prefix := "time > now() - "
- lowerDuration, err := g.parseRelative(durStr, prefix)
- if err == nil {
- prefix := "time < now() - "
- upperDuration, err := g.parseRelative(durStr, prefix)
- if err != nil {
- g.Duration = lowerDuration
- return
- }
- g.Duration = lowerDuration - upperDuration
- if g.Duration < 0 {
- g.Duration = -g.Duration
- }
- }
-
- dur, err := g.parseAbsolute(durStr)
- if err == nil {
- // we found an absolute time range
- g.Duration = dur
- }
-}
-
-// parseRelative locates and extracts a duration value from a fragment of an
-// InfluxQL query following the "where" keyword. For example, in the fragment
-// "time > now() - 180d GROUP BY :interval:", parseRelative would return a
-// duration equal to 180d
-func (g *GroupByVar) parseRelative(fragment string, prefix string) (time.Duration, error) {
- start := strings.Index(fragment, prefix)
- if start == -1 {
- return time.Duration(0), errors.New("not a relative duration")
- }
-
- // reposition to duration literal
- durFragment := fragment[start+len(prefix):]
-
- // init counters
- pos := 0
-
- // locate end of duration literal
- for pos < len(durFragment) {
- rn, _ := utf8.DecodeRuneInString(durFragment[pos:])
- if unicode.IsSpace(rn) {
- break
- }
- pos++
- }
-
- // attempt to parse what we suspect is a duration literal
- dur, err := influxql.ParseDuration(durFragment[:pos])
- if err != nil {
- return dur, err
- }
-
- return dur, nil
-}
-
-// parseAbsolute will determine the duration between two absolute timestamps
-// found within an InfluxQL fragment following the "where" keyword. For
-// example, the fragement "time > '1985-10-25T00:01:21-0800 and time <
-// '1985-10-25T00:01:22-0800'" would yield a duration of 1m'
-func (g *GroupByVar) parseAbsolute(fragment string) (time.Duration, error) {
- timePtn := `time\s[>|<]\s'([0-9\-T\:\.Z]+)'` // Playground: http://gobular.com/x/208f66bd-1889-4269-ab47-1efdfeeb63f0
- re, err := regexp.Compile(timePtn)
- if err != nil {
- // this is a developer error and should complain loudly
- panic("Bad Regex: err:" + err.Error())
- }
-
- if !re.Match([]byte(fragment)) {
- return time.Duration(0), errors.New("absolute duration not found")
- }
-
- // extract at most two times
- matches := re.FindAll([]byte(fragment), 2)
-
- // parse out absolute times
- durs := make([]time.Time, 0, 2)
- for _, match := range matches {
- durStr := re.FindSubmatch(match)
- if tm, err := time.Parse(time.RFC3339Nano, string(durStr[1])); err == nil {
- durs = append(durs, tm)
- }
- }
-
- if len(durs) == 1 {
- durs = append(durs, time.Now())
- }
-
- // reject more than 2 times found
- if len(durs) != 2 {
- return time.Duration(0), errors.New("must provide exactly two absolute times")
- }
-
- dur := durs[1].Sub(durs[0])
-
- return dur, nil
-}
-
-func (g *GroupByVar) String() string {
- // The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
- // Number of points given the pixels
- pixels := float64(g.Resolution) / 3.0
- msPerPixel := float64(g.Duration/time.Millisecond) / pixels
- secPerPixel := float64(g.Duration/time.Second) / pixels
- if secPerPixel < 1.0 {
- if msPerPixel < 1.0 {
- msPerPixel = 1.0
- }
- return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
- }
- // If groupby is more than 1 second round to the second
- return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
-}
-
-func (g *GroupByVar) Name() string {
- return g.Var
-}
-
-func (g *GroupByVar) Precedence() uint {
- return 1
+type TemplateVar struct {
+ Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
+ Values []TemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
// TemplateID is the unique ID used to identify a template
@@ -338,7 +148,7 @@ type TemplateID string
// Template represents a series of choices to replace TemplateVars within InfluxQL
type Template struct {
- BasicTemplateVar
+ TemplateVar
ID TemplateID `json:"id"` // ID is the unique ID associated with this template
Type string `json:"type"` // Type can be fieldKeys, tagKeys, tagValues, CSV, constant, query, measurements, databases
Label string `json:"label"` // Label is a user-facing description of the Template
@@ -347,69 +157,15 @@ type Template struct {
// Query retrieves a Response from a TimeSeries.
type Query struct {
- Command string `json:"query"` // Command is the query itself
- DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
- RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
- TemplateVars TemplateVars `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
- Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
- GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
- Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
- Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
- Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
-}
-
-// TemplateVars are a heterogeneous collection of different TemplateVariables
-// with the capability to decode arbitrary JSON into the appropriate template
-// variable type
-type TemplateVars []TemplateVariable
-
-func (t *TemplateVars) UnmarshalJSON(text []byte) error {
- // TODO: Need to test that server throws an error when :interval:'s Resolution or ReportingInterval or zero-value
- rawVars := bytes.NewReader(text)
- dec := json.NewDecoder(rawVars)
-
- // read open bracket
- rawTok, err := dec.Token()
- if err != nil {
- return err
- }
-
- tok, isDelim := rawTok.(json.Delim)
- if !isDelim || tok != '[' {
- return errors.New("Expected JSON array, but found " + tok.String())
- }
-
- for dec.More() {
- var halfBakedVar json.RawMessage
- err := dec.Decode(&halfBakedVar)
- if err != nil {
- return err
- }
-
- var agb GroupByVar
- err = json.Unmarshal(halfBakedVar, &agb)
- if err != nil {
- return err
- }
-
- // ensure that we really have a GroupByVar
- if agb.Resolution != 0 {
- (*t) = append(*t, &agb)
- continue
- }
-
- var tvar BasicTemplateVar
- err = json.Unmarshal(halfBakedVar, &tvar)
- if err != nil {
- return err
- }
-
- // ensure that we really have a BasicTemplateVar
- if len(tvar.Values) != 0 {
- (*t) = append(*t, tvar)
- }
- }
- return nil
+ Command string `json:"query"` // Command is the query itself
+ DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
+ RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
+ TemplateVars []TemplateVar `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
+ Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
+ GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
+ Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
+ Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
+ Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// DashboardQuery includes state for the query builder. This is a transition
@@ -420,6 +176,7 @@ type DashboardQuery struct {
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
QueryConfig QueryConfig `json:"queryConfig,omitempty"` // QueryConfig represents the query state that is understood by the data explorer
Source string `json:"source"` // Source is the optional URI to the data source for this queryConfig
+ Shifts []TimeShift `json:"-"` // Shifts represents shifts to apply to an influxql query's time range. Clients expect the shift to be in the generated QueryConfig
}
// TemplateQuery is used to retrieve choices for template replacement
@@ -535,6 +292,13 @@ type DurationRange struct {
Lower string `json:"lower"`
}
+// TimeShift represents a shift to apply to an influxql query's time range
+type TimeShift struct {
+ Label string `json:"label"` // Label user facing description
+ Unit string `json:"unit"` // Unit influxql time unit representation i.e. ms, s, m, h, d
+ Quantity string `json:"quantity"` // Quantity number of units
+}
+
// QueryConfig represents UI query from the data explorer
type QueryConfig struct {
ID string `json:"id,omitempty"`
@@ -548,6 +312,7 @@ type QueryConfig struct {
Fill string `json:"fill,omitempty"`
RawText *string `json:"rawText"`
Range *DurationRange `json:"range"`
+ Shifts []TimeShift `json:"shifts"`
}
// KapacitorNode adds arguments and properties to an alert
diff --git a/chronograf_test.go b/chronograf_test.go
deleted file mode 100644
index 850674a4d..000000000
--- a/chronograf_test.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package chronograf_test
-
-import (
- "testing"
-
- "github.com/influxdata/chronograf"
-)
-
-func Test_GroupByVar(t *testing.T) {
- gbvTests := []struct {
- name string
- query string
- want string
- resolution uint // the screen resolution to render queries into
- }{
- {
- name: "relative time only lower bound with one day of duration",
- query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
- resolution: 1000,
- want: "time(259s)",
- },
- {
- name: "relative time with relative upper bound with one minute of duration",
- query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
- resolution: 1000,
- want: "time(180ms)",
- },
- {
- name: "relative time with relative lower bound and now upper with one day of duration",
- query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
- resolution: 1000,
- want: "time(259s)",
- },
- {
- name: "absolute time with one minute of duration",
- query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
- resolution: 1000,
- want: "time(180ms)",
- },
- {
- name: "absolute time with nano seconds and zero duraiton",
- query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
- resolution: 1000,
- want: "time(1ms)",
- },
- }
-
- for _, test := range gbvTests {
- t.Run(test.name, func(t *testing.T) {
- gbv := chronograf.GroupByVar{
- Var: ":interval:",
- Resolution: test.resolution,
- }
-
- gbv.Exec(test.query)
- got := gbv.String()
-
- if got != test.want {
- t.Fatalf("%q - durations not equal! Want: %s, Got: %s", test.name, test.want, got)
- }
- })
- }
-}
diff --git a/enterprise/enterprise.go b/enterprise/enterprise.go
index 7776b89de..b1a7e5cf5 100644
--- a/enterprise/enterprise.go
+++ b/enterprise/enterprise.go
@@ -51,13 +51,13 @@ type Client struct {
}
// NewClientWithTimeSeries initializes a Client with a known set of TimeSeries.
-func NewClientWithTimeSeries(lg chronograf.Logger, mu, username, password string, tls bool, series ...chronograf.TimeSeries) (*Client, error) {
+func NewClientWithTimeSeries(lg chronograf.Logger, mu string, authorizer influx.Authorizer, tls bool, series ...chronograf.TimeSeries) (*Client, error) {
metaURL, err := parseMetaURL(mu, tls)
if err != nil {
return nil, err
}
- metaURL.User = url.UserPassword(username, password)
- ctrl := NewMetaClient(metaURL)
+
+ ctrl := NewMetaClient(metaURL, authorizer)
c := &Client{
Ctrl: ctrl,
UsersStore: &UserStore{
@@ -83,15 +83,15 @@ func NewClientWithTimeSeries(lg chronograf.Logger, mu, username, password string
// NewClientWithURL initializes an Enterprise client with a URL to a Meta Node.
// Acceptable URLs include host:port combinations as well as scheme://host:port
// varieties. TLS is used when the URL contains "https" or when the TLS
-// parameter is set. The latter option is provided for host:port combinations
-// Username and Password are used for Basic Auth
-func NewClientWithURL(mu, username, password string, tls bool, lg chronograf.Logger) (*Client, error) {
+// parameter is set. authorizer will add the correct `Authorization` headers
+// on the out-bound request.
+func NewClientWithURL(mu string, authorizer influx.Authorizer, tls bool, lg chronograf.Logger) (*Client, error) {
metaURL, err := parseMetaURL(mu, tls)
if err != nil {
return nil, err
}
- metaURL.User = url.UserPassword(username, password)
- ctrl := NewMetaClient(metaURL)
+
+ ctrl := NewMetaClient(metaURL, authorizer)
return &Client{
Ctrl: ctrl,
UsersStore: &UserStore{
diff --git a/enterprise/enterprise_test.go b/enterprise/enterprise_test.go
index 20aa0c873..033a51872 100644
--- a/enterprise/enterprise_test.go
+++ b/enterprise/enterprise_test.go
@@ -9,6 +9,7 @@ import (
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/enterprise"
+ "github.com/influxdata/chronograf/influx"
"github.com/influxdata/chronograf/log"
)
@@ -75,7 +76,16 @@ func Test_Enterprise_IssuesQueries(t *testing.T) {
func Test_Enterprise_AdvancesDataNodes(t *testing.T) {
m1 := NewMockTimeSeries("http://host-1.example.com:8086")
m2 := NewMockTimeSeries("http://host-2.example.com:8086")
- cl, err := enterprise.NewClientWithTimeSeries(log.New(log.DebugLevel), "http://meta.example.com:8091", "marty", "thelake", false, chronograf.TimeSeries(m1), chronograf.TimeSeries(m2))
+ cl, err := enterprise.NewClientWithTimeSeries(
+ log.New(log.DebugLevel),
+ "http://meta.example.com:8091",
+ &influx.BasicAuth{
+ Username: "marty",
+ Password: "thelake",
+ },
+ false,
+ chronograf.TimeSeries(m1),
+ chronograf.TimeSeries(m2))
if err != nil {
t.Error("Unexpected error while initializing client: err:", err)
}
@@ -124,7 +134,14 @@ func Test_Enterprise_NewClientWithURL(t *testing.T) {
}
for _, testURL := range urls {
- _, err := enterprise.NewClientWithURL(testURL.url, testURL.username, testURL.password, testURL.tls, log.New(log.DebugLevel))
+ _, err := enterprise.NewClientWithURL(
+ testURL.url,
+ &influx.BasicAuth{
+ Username: testURL.username,
+ Password: testURL.password,
+ },
+ testURL.tls,
+ log.New(log.DebugLevel))
if err != nil && !testURL.shouldErr {
t.Errorf("Unexpected error creating Client with URL %s and TLS preference %t. err: %s", testURL.url, testURL.tls, err.Error())
} else if err == nil && testURL.shouldErr {
@@ -135,7 +152,14 @@ func Test_Enterprise_NewClientWithURL(t *testing.T) {
func Test_Enterprise_ComplainsIfNotOpened(t *testing.T) {
m1 := NewMockTimeSeries("http://host-1.example.com:8086")
- cl, err := enterprise.NewClientWithTimeSeries(log.New(log.DebugLevel), "http://meta.example.com:8091", "docbrown", "1.21 gigawatts", false, chronograf.TimeSeries(m1))
+ cl, err := enterprise.NewClientWithTimeSeries(
+ log.New(log.DebugLevel),
+ "http://meta.example.com:8091",
+ &influx.BasicAuth{
+ Username: "docbrown",
+ Password: "1.21 gigawatts",
+ },
+ false, chronograf.TimeSeries(m1))
if err != nil {
t.Error("Expected ErrUnitialized, but was this err:", err)
}
diff --git a/enterprise/meta.go b/enterprise/meta.go
index f27012224..984c802c2 100644
--- a/enterprise/meta.go
+++ b/enterprise/meta.go
@@ -11,31 +11,32 @@ import (
"net/url"
"github.com/influxdata/chronograf"
+ "github.com/influxdata/chronograf/influx"
)
type client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
// MetaClient represents a Meta node in an Influx Enterprise cluster
type MetaClient struct {
- URL *url.URL
- client client
+ URL *url.URL
+ client client
+ authorizer influx.Authorizer
}
-type ClientBuilder func() client
-
// NewMetaClient represents a meta node in an Influx Enterprise cluster
-func NewMetaClient(url *url.URL) *MetaClient {
+func NewMetaClient(url *url.URL, authorizer influx.Authorizer) *MetaClient {
return &MetaClient{
- URL: url,
- client: &defaultClient{},
+ URL: url,
+ client: &defaultClient{},
+ authorizer: authorizer,
}
}
// ShowCluster returns the cluster configuration (not health)
func (m *MetaClient) ShowCluster(ctx context.Context) (*Cluster, error) {
- res, err := m.Do(ctx, "GET", "/show-cluster", nil, nil)
+ res, err := m.Do(ctx, "/show-cluster", "GET", m.authorizer, nil, nil)
if err != nil {
return nil, err
}
@@ -56,7 +57,7 @@ func (m *MetaClient) Users(ctx context.Context, name *string) (*Users, error) {
if name != nil {
params["name"] = *name
}
- res, err := m.Do(ctx, "GET", "/user", params, nil)
+ res, err := m.Do(ctx, "/user", "GET", m.authorizer, params, nil)
if err != nil {
return nil, err
}
@@ -118,39 +119,10 @@ func (m *MetaClient) DeleteUser(ctx context.Context, name string) error {
return m.Post(ctx, "/user", a, nil)
}
-// RemoveAllUserPerms revokes all permissions for a user in Influx Enterprise
-func (m *MetaClient) RemoveAllUserPerms(ctx context.Context, name string) error {
- user, err := m.User(ctx, name)
- if err != nil {
- return err
- }
-
- // No permissions to remove
- if len(user.Permissions) == 0 {
- return nil
- }
-
+// RemoveUserPerms revokes permissions for a user in Influx Enterprise
+func (m *MetaClient) RemoveUserPerms(ctx context.Context, name string, perms Permissions) error {
a := &UserAction{
Action: "remove-permissions",
- User: user,
- }
- return m.Post(ctx, "/user", a, nil)
-}
-
-// SetUserPerms removes all permissions and then adds the requested perms
-func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permissions) error {
- err := m.RemoveAllUserPerms(ctx, name)
- if err != nil {
- return err
- }
-
- // No permissions to add, so, user is in the right state
- if len(perms) == 0 {
- return nil
- }
-
- a := &UserAction{
- Action: "add-permissions",
User: &User{
Name: name,
Permissions: perms,
@@ -159,6 +131,38 @@ func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permis
return m.Post(ctx, "/user", a, nil)
}
+// SetUserPerms removes permissions not in set and then adds the requested perms
+func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permissions) error {
+ user, err := m.User(ctx, name)
+ if err != nil {
+ return err
+ }
+
+ revoke, add := permissionsDifference(perms, user.Permissions)
+
+ // first, revoke all the permissions the user currently has, but,
+ // shouldn't...
+ if len(revoke) > 0 {
+ err := m.RemoveUserPerms(ctx, name, revoke)
+ if err != nil {
+ return err
+ }
+ }
+
+ // ... next, add any permissions the user should have
+ if len(add) > 0 {
+ a := &UserAction{
+ Action: "add-permissions",
+ User: &User{
+ Name: name,
+ Permissions: add,
+ },
+ }
+ return m.Post(ctx, "/user", a, nil)
+ }
+ return nil
+}
+
// UserRoles returns a map of users to all of their current roles
func (m *MetaClient) UserRoles(ctx context.Context) (map[string]Roles, error) {
res, err := m.Roles(ctx, nil)
@@ -186,7 +190,7 @@ func (m *MetaClient) Roles(ctx context.Context, name *string) (*Roles, error) {
if name != nil {
params["name"] = *name
}
- res, err := m.Do(ctx, "GET", "/role", params, nil)
+ res, err := m.Do(ctx, "/role", "GET", m.authorizer, params, nil)
if err != nil {
return nil, err
}
@@ -235,39 +239,10 @@ func (m *MetaClient) DeleteRole(ctx context.Context, name string) error {
return m.Post(ctx, "/role", a, nil)
}
-// RemoveAllRolePerms removes all permissions from a role
-func (m *MetaClient) RemoveAllRolePerms(ctx context.Context, name string) error {
- role, err := m.Role(ctx, name)
- if err != nil {
- return err
- }
-
- // No permissions to remove
- if len(role.Permissions) == 0 {
- return nil
- }
-
+// RemoveRolePerms revokes permissions from a role
+func (m *MetaClient) RemoveRolePerms(ctx context.Context, name string, perms Permissions) error {
a := &RoleAction{
Action: "remove-permissions",
- Role: role,
- }
- return m.Post(ctx, "/role", a, nil)
-}
-
-// SetRolePerms removes all permissions and then adds the requested perms to role
-func (m *MetaClient) SetRolePerms(ctx context.Context, name string, perms Permissions) error {
- err := m.RemoveAllRolePerms(ctx, name)
- if err != nil {
- return err
- }
-
- // No permissions to add, so, role is in the right state
- if len(perms) == 0 {
- return nil
- }
-
- a := &RoleAction{
- Action: "add-permissions",
Role: &Role{
Name: name,
Permissions: perms,
@@ -276,7 +251,39 @@ func (m *MetaClient) SetRolePerms(ctx context.Context, name string, perms Permis
return m.Post(ctx, "/role", a, nil)
}
-// SetRoleUsers removes all users and then adds the requested users to role
+// SetRolePerms removes permissions not in set and then adds the requested perms to role
+func (m *MetaClient) SetRolePerms(ctx context.Context, name string, perms Permissions) error {
+ role, err := m.Role(ctx, name)
+ if err != nil {
+ return err
+ }
+
+ revoke, add := permissionsDifference(perms, role.Permissions)
+
+ // first, revoke all the permissions the role currently has, but,
+ // shouldn't...
+ if len(revoke) > 0 {
+ err := m.RemoveRolePerms(ctx, name, revoke)
+ if err != nil {
+ return err
+ }
+ }
+
+ // ... next, add any permissions the role should have
+ if len(add) > 0 {
+ a := &RoleAction{
+ Action: "add-permissions",
+ Role: &Role{
+ Name: name,
+ Permissions: add,
+ },
+ }
+ return m.Post(ctx, "/role", a, nil)
+ }
+ return nil
+}
+
+// SetRoleUsers removes users not in role and then adds the requested users to role
func (m *MetaClient) SetRoleUsers(ctx context.Context, name string, users []string) error {
role, err := m.Role(ctx, name)
if err != nil {
@@ -320,6 +327,29 @@ func Difference(wants []string, haves []string) (revoke []string, add []string)
return
}
+func permissionsDifference(wants Permissions, haves Permissions) (revoke Permissions, add Permissions) {
+ revoke = make(Permissions)
+ add = make(Permissions)
+ for scope, want := range wants {
+ have, ok := haves[scope]
+ if ok {
+ r, a := Difference(want, have)
+ revoke[scope] = r
+ add[scope] = a
+ } else {
+ add[scope] = want
+ }
+ }
+
+ for scope, have := range haves {
+ _, ok := wants[scope]
+ if !ok {
+ revoke[scope] = have
+ }
+ }
+ return
+}
+
// AddRoleUsers updates a role to have additional users.
func (m *MetaClient) AddRoleUsers(ctx context.Context, name string, users []string) error {
// No permissions to add, so, role is in the right state
@@ -361,7 +391,7 @@ func (m *MetaClient) Post(ctx context.Context, path string, action interface{},
return err
}
body := bytes.NewReader(b)
- _, err = m.Do(ctx, "POST", path, params, body)
+ _, err = m.Do(ctx, path, "POST", m.authorizer, params, body)
if err != nil {
return err
}
@@ -373,7 +403,7 @@ type defaultClient struct {
}
// Do is a helper function to interface with Influx Enterprise's Meta API
-func (d *defaultClient) Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error) {
+func (d *defaultClient) Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
p := url.Values{}
for k, v := range params {
p.Add(k, v)
@@ -391,15 +421,23 @@ func (d *defaultClient) Do(URL *url.URL, path, method string, params map[string]
if err != nil {
return nil, err
}
+
if body != nil {
req.Header.Set("Content-Type", "application/json")
}
+ if authorizer != nil {
+ if err = authorizer.Set(req); err != nil {
+ return nil, err
+ }
+ }
+
// Meta servers will redirect (307) to leader. We need
// special handling to preserve authentication headers.
client := &http.Client{
CheckRedirect: d.AuthedCheckRedirect,
}
+
res, err := client.Do(req)
if err != nil {
return nil, err
@@ -437,14 +475,14 @@ func (d *defaultClient) AuthedCheckRedirect(req *http.Request, via []*http.Reque
}
// Do is a cancelable function to interface with Influx Enterprise's Meta API
-func (m *MetaClient) Do(ctx context.Context, method, path string, params map[string]string, body io.Reader) (*http.Response, error) {
+func (m *MetaClient) Do(ctx context.Context, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
type result struct {
Response *http.Response
Err error
}
resps := make(chan (result))
go func() {
- resp, err := m.client.Do(m.URL, path, method, params, body)
+ resp, err := m.client.Do(m.URL, path, method, authorizer, params, body)
resps <- result{resp, err}
}()
diff --git a/enterprise/meta_test.go b/enterprise/meta_test.go
index fd1a22301..7cdd6fa81 100644
--- a/enterprise/meta_test.go
+++ b/enterprise/meta_test.go
@@ -11,13 +11,16 @@ import (
"net/url"
"reflect"
"testing"
+ "time"
+
+ "github.com/influxdata/chronograf/influx"
)
func TestMetaClient_ShowCluster(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
tests := []struct {
@@ -128,7 +131,7 @@ func TestMetaClient_Users(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -265,7 +268,7 @@ func TestMetaClient_User(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -366,7 +369,7 @@ func TestMetaClient_CreateUser(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -437,7 +440,7 @@ func TestMetaClient_ChangePassword(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -509,7 +512,7 @@ func TestMetaClient_DeleteUser(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -578,7 +581,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -595,7 +598,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
wantErr bool
}{
{
- name: "Successful set permissions User",
+ name: "Remove all permissions for a user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@@ -615,7 +618,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
wantRm: `{"action":"remove-permissions","user":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]}}}`,
},
{
- name: "Successful set permissions User",
+ name: "Remove some permissions and add others",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@@ -699,7 +702,7 @@ func TestMetaClient_Roles(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -798,7 +801,7 @@ func TestMetaClient_Role(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -881,7 +884,7 @@ func TestMetaClient_UserRoles(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -985,7 +988,7 @@ func TestMetaClient_CreateRole(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -1051,7 +1054,7 @@ func TestMetaClient_DeleteRole(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -1120,7 +1123,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -1137,7 +1140,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
wantErr bool
}{
{
- name: "Successful set permissions role",
+ name: "Remove all roles from user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@@ -1154,10 +1157,10 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
ctx: context.Background(),
name: "admin",
},
- wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]},"users":["marty"]}}`,
+ wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]}}}`,
},
{
- name: "Successful set single permissions role",
+ name: "Remove some users and add permissions to other",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
@@ -1179,7 +1182,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
},
},
},
- wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]},"users":["marty"]}}`,
+ wantRm: `{"action":"remove-permissions","role":{"name":"admin","permissions":{"":["ViewAdmin","ViewChronograf"]}}}`,
wantAdd: `{"action":"add-permissions","role":{"name":"admin","permissions":{"telegraf":["ReadData"]}}}`,
},
}
@@ -1218,7 +1221,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
got, _ := ioutil.ReadAll(prm.Body)
if string(got) != tt.wantRm {
- t.Errorf("%q. MetaClient.SetRolePerms() = %v, want %v", tt.name, string(got), tt.wantRm)
+ t.Errorf("%q. MetaClient.SetRolePerms() removal = \n%v\n, want \n%v\n", tt.name, string(got), tt.wantRm)
}
if tt.wantAdd != "" {
prm := reqs[2]
@@ -1231,7 +1234,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
got, _ := ioutil.ReadAll(prm.Body)
if string(got) != tt.wantAdd {
- t.Errorf("%q. MetaClient.SetRolePerms() = %v, want %v", tt.name, string(got), tt.wantAdd)
+ t.Errorf("%q. MetaClient.SetRolePerms() addition = \n%v\n, want \n%v\n", tt.name, string(got), tt.wantAdd)
}
}
}
@@ -1241,7 +1244,7 @@ func TestMetaClient_SetRoleUsers(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
- Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error)
+ Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@@ -1361,7 +1364,7 @@ func NewMockClient(code int, body []byte, headers http.Header, err error) *MockC
}
}
-func (c *MockClient) Do(URL *url.URL, path, method string, params map[string]string, body io.Reader) (*http.Response, error) {
+func (c *MockClient) Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
if c == nil {
return nil, fmt.Errorf("NIL MockClient")
}
@@ -1453,3 +1456,71 @@ func Test_AuthedCheckRedirect_Do(t *testing.T) {
t.Errorf("result = %q; want ok", got)
}
}
+
+func Test_defaultClient_Do(t *testing.T) {
+ type args struct {
+ path string
+ method string
+ authorizer influx.Authorizer
+ params map[string]string
+ body io.Reader
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ wantErr bool
+ }{
+ {
+ name: "test authorizer",
+ args: args{
+ path: "/tictactoe",
+ method: "GET",
+ authorizer: &influx.BasicAuth{
+ Username: "Steven Falken",
+ Password: "JOSHUA",
+ },
+ },
+ want: "Basic U3RldmVuIEZhbGtlbjpKT1NIVUE=",
+ },
+ {
+ name: "test authorizer",
+ args: args{
+ path: "/tictactoe",
+ method: "GET",
+ authorizer: &influx.BearerJWT{
+ Username: "minifig",
+ SharedSecret: "legos",
+ Now: func() time.Time { return time.Time{} },
+ },
+ },
+ want: "Bearer eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOi02MjEzNTU5Njc0MCwidXNlcm5hbWUiOiJtaW5pZmlnIn0.uwFGBQ3MykqEmk9Zx0sBdJGefcESVEXG_qt0C1J8b_aS62EAES-Q1FwtURsbITNvSnfzMxYFnkbSG0AA1pEzWw",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if r.URL.Path != "/tictactoe" {
+ t.Fatal("Expected request to '/query' but was", r.URL.Path)
+ }
+ got, ok := r.Header["Authorization"]
+ if !ok {
+ t.Fatal("No Authorization header")
+ }
+ if got[0] != tt.want {
+ t.Fatalf("Expected auth %s got %s", tt.want, got)
+ }
+ rw.Write([]byte(`{}`))
+ }))
+ defer ts.Close()
+
+ d := &defaultClient{}
+ u, _ := url.Parse(ts.URL)
+ _, err := d.Do(u, tt.args.path, tt.args.method, tt.args.authorizer, tt.args.params, tt.args.body)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("defaultClient.Do() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ })
+ }
+}
diff --git a/enterprise/users.go b/enterprise/users.go
index 2ecfaaf7b..03ad17a8d 100644
--- a/enterprise/users.go
+++ b/enterprise/users.go
@@ -84,44 +84,49 @@ func (c *UserStore) Update(ctx context.Context, u *chronograf.User) error {
return c.Ctrl.ChangePassword(ctx, u.Name, u.Passwd)
}
- // Make a list of the roles we want this user to have:
- want := make([]string, len(u.Roles))
- for i, r := range u.Roles {
- want[i] = r.Name
- }
+ if u.Roles != nil {
+ // Make a list of the roles we want this user to have:
+ want := make([]string, len(u.Roles))
+ for i, r := range u.Roles {
+ want[i] = r.Name
+ }
- // Find the list of all roles this user is currently in
- userRoles, err := c.UserRoles(ctx)
- if err != nil {
- return nil
- }
- // Make a list of the roles the user currently has
- roles := userRoles[u.Name]
- have := make([]string, len(roles.Roles))
- for i, r := range roles.Roles {
- have[i] = r.Name
- }
+ // Find the list of all roles this user is currently in
+ userRoles, err := c.UserRoles(ctx)
+ if err != nil {
+ return nil
+ }
+ // Make a list of the roles the user currently has
+ roles := userRoles[u.Name]
+ have := make([]string, len(roles.Roles))
+ for i, r := range roles.Roles {
+ have[i] = r.Name
+ }
- // Calculate the roles the user will be removed from and the roles the user
- // will be added to.
- revoke, add := Difference(want, have)
+ // Calculate the roles the user will be removed from and the roles the user
+ // will be added to.
+ revoke, add := Difference(want, have)
- // First, add the user to the new roles
- for _, role := range add {
- if err := c.Ctrl.AddRoleUsers(ctx, role, []string{u.Name}); err != nil {
- return err
+ // First, add the user to the new roles
+ for _, role := range add {
+ if err := c.Ctrl.AddRoleUsers(ctx, role, []string{u.Name}); err != nil {
+ return err
+ }
+ }
+
+ // ... and now remove the user from an extra roles
+ for _, role := range revoke {
+ if err := c.Ctrl.RemoveRoleUsers(ctx, role, []string{u.Name}); err != nil {
+ return err
+ }
}
}
- // ... and now remove the user from an extra roles
- for _, role := range revoke {
- if err := c.Ctrl.RemoveRoleUsers(ctx, role, []string{u.Name}); err != nil {
- return err
- }
+ if u.Permissions != nil {
+ perms := ToEnterprise(u.Permissions)
+ return c.Ctrl.SetUserPerms(ctx, u.Name, perms)
}
-
- perms := ToEnterprise(u.Permissions)
- return c.Ctrl.SetUserPerms(ctx, u.Name, perms)
+ return nil
}
// All is all users in influx
diff --git a/influx/authorization.go b/influx/authorization.go
index 86ed48665..2f19bf06e 100644
--- a/influx/authorization.go
+++ b/influx/authorization.go
@@ -54,6 +54,7 @@ func (b *BasicAuth) Set(r *http.Request) error {
type BearerJWT struct {
Username string
SharedSecret string
+ Now Now
}
// Set adds an Authorization Bearer to the request if has a shared secret
@@ -70,7 +71,10 @@ func (b *BearerJWT) Set(r *http.Request) error {
// Token returns the expected InfluxDB JWT signed with the sharedSecret
func (b *BearerJWT) Token(username string) (string, error) {
- return JWT(username, b.SharedSecret, time.Now)
+ if b.Now == nil {
+ b.Now = time.Now
+ }
+ return JWT(username, b.SharedSecret, b.Now)
}
// Now returns the current time
diff --git a/influx/influx.go b/influx/influx.go
index 520d2614b..1106b96ea 100644
--- a/influx/influx.go
+++ b/influx/influx.go
@@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"strings"
+ "time"
"github.com/influxdata/chronograf"
)
@@ -55,7 +56,10 @@ func (c *Client) query(u *url.URL, q chronograf.Query) (chronograf.Response, err
command := q.Command
// TODO(timraymond): move this upper Query() function
if len(q.TemplateVars) > 0 {
- command = TemplateReplace(q.Command, q.TemplateVars)
+ command, err = TemplateReplace(q.Command, q.TemplateVars, time.Now())
+ if err != nil {
+ return nil, err
+ }
}
logs := c.Logger.
WithField("component", "proxy").
diff --git a/influx/influx_test.go b/influx/influx_test.go
index a6dce9e11..d165ccb86 100644
--- a/influx/influx_test.go
+++ b/influx/influx_test.go
@@ -276,11 +276,11 @@ func Test_Influx_HTTPS_InsecureSkipVerify(t *testing.T) {
called = false
q = ""
query = chronograf.Query{
- Command: "select $field from cpu",
- TemplateVars: chronograf.TemplateVars{
- chronograf.BasicTemplateVar{
- Var: "$field",
- Values: []chronograf.BasicTemplateValue{
+ Command: "select :field: from cpu",
+ TemplateVars: []chronograf.TemplateVar{
+ chronograf.TemplateVar{
+ Var: ":field:",
+ Values: []chronograf.TemplateValue{
{
Value: "usage_user",
Type: "fieldKey",
diff --git a/influx/query.go b/influx/query.go
index 2346fde7c..15b0a62bc 100644
--- a/influx/query.go
+++ b/influx/query.go
@@ -10,6 +10,52 @@ import (
"github.com/influxdata/influxdb/influxql"
)
+func TimeRangeAsEpochNano(expr influxql.Expr, now time.Time) (min, max int64, err error) {
+ tmin, tmax, err := influxql.TimeRange(expr)
+ if err != nil {
+ return 0, 0, err
+ }
+ if tmin.IsZero() {
+ min = time.Unix(0, influxql.MinTime).UnixNano()
+ } else {
+ min = tmin.UnixNano()
+ }
+ if tmax.IsZero() {
+ max = now.UnixNano()
+ } else {
+ max = tmax.UnixNano()
+ }
+ return
+}
+
+const WhereToken = "WHERE"
+
+func ParseTime(influxQL string, now time.Time) (time.Duration, error) {
+ start := strings.Index(strings.ToUpper(influxQL), WhereToken)
+ if start == -1 {
+ return 0, fmt.Errorf("not a relative duration")
+ }
+ start += len(WhereToken)
+ where := influxQL[start:]
+ cond, err := influxql.ParseExpr(where)
+ if err != nil {
+ return 0, err
+ }
+ nowVal := &influxql.NowValuer{
+ Now: now,
+ }
+ cond = influxql.Reduce(cond, nowVal)
+ min, max, err := TimeRangeAsEpochNano(cond, now)
+ if err != nil {
+ return 0, err
+ }
+ dur := time.Duration(max - min)
+ if dur < 0 {
+ dur = 0
+ }
+ return dur, nil
+}
+
// Convert changes an InfluxQL query to a QueryConfig
func Convert(influxQL string) (chronograf.QueryConfig, error) {
itsDashboardTime := false
diff --git a/influx/query_test.go b/influx/query_test.go
index d01a46fb5..dba50dfc7 100644
--- a/influx/query_test.go
+++ b/influx/query_test.go
@@ -2,6 +2,7 @@ package influx
import (
"testing"
+ "time"
"github.com/google/go-cmp/cmp"
"github.com/influxdata/chronograf"
@@ -767,3 +768,43 @@ func TestConvert(t *testing.T) {
})
}
}
+
+func TestParseTime(t *testing.T) {
+ tests := []struct {
+ name string
+ influxQL string
+ now string
+ want time.Duration
+ wantErr bool
+ }{
+ {
+ name: "time equal",
+ now: "2000-01-01T00:00:00Z",
+ influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
+ want: 0,
+ },
+ {
+ name: "time shifted by one hour",
+ now: "2000-01-01T00:00:00Z",
+ influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
+ want: 3599999999998,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ now, err := time.Parse(time.RFC3339, tt.now)
+ if err != nil {
+ t.Fatalf("%v", err)
+ }
+ got, err := ParseTime(tt.influxQL, now)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("ParseTime() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if got != tt.want {
+ t.Logf("%d", got)
+ t.Errorf("ParseTime() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/influx/templates.go b/influx/templates.go
index c006f7fa1..8c0ad0e28 100644
--- a/influx/templates.go
+++ b/influx/templates.go
@@ -1,40 +1,106 @@
package influx
import (
+ "sort"
+ "strconv"
"strings"
+ "time"
"github.com/influxdata/chronograf"
)
-// TemplateReplace replaces templates with values within the query string
-func TemplateReplace(query string, templates chronograf.TemplateVars) string {
- tvarsByPrecedence := make(map[uint]chronograf.TemplateVars, len(templates))
- maxPrecedence := uint(0)
- for _, tmp := range templates {
- precedence := tmp.Precedence()
- if precedence > maxPrecedence {
- maxPrecedence = precedence
- }
- tvarsByPrecedence[precedence] = append(tvarsByPrecedence[precedence], tmp)
- }
-
- replaced := query
- for prc := uint(0); prc <= maxPrecedence; prc++ {
- replacements := []string{}
-
- for _, v := range tvarsByPrecedence[prc] {
- if evar, ok := v.(chronograf.ExecutableVar); ok {
- evar.Exec(replaced)
- }
- newVal := v.String()
- if newVal != "" {
- replacements = append(replacements, v.Name(), newVal)
- }
+func SortTemplates(ts []chronograf.TemplateVar) []chronograf.TemplateVar {
+ sort.Slice(ts, func(i, j int) bool {
+ if len(ts[i].Values) != len(ts[j].Values) {
+ return len(ts[i].Values) < len(ts[j].Values)
}
- replacer := strings.NewReplacer(replacements...)
- replaced = replacer.Replace(replaced)
- }
+ if len(ts[i].Values) == 0 {
+ return i < j
+ }
- return replaced
+ for k := range ts[i].Values {
+ if ts[i].Values[k].Type != ts[j].Values[k].Type {
+ return ts[i].Values[k].Type < ts[j].Values[k].Type
+ }
+ if ts[i].Values[k].Value != ts[j].Values[k].Value {
+ return ts[i].Values[k].Value < ts[j].Values[k].Value
+ }
+ }
+ return i < j
+ })
+ return ts
+}
+
+// RenderTemplate converts the template variable into a correct InfluxQL string based
+// on its type
+func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (string, error) {
+ if len(t.Values) == 0 {
+ return query, nil
+ }
+ switch t.Values[0].Type {
+ case "tagKey", "fieldKey", "measurement", "database":
+ return strings.Replace(query, t.Var, `"`+t.Values[0].Value+`"`, -1), nil
+ case "tagValue", "timeStamp":
+ return strings.Replace(query, t.Var, `'`+t.Values[0].Value+`'`, -1), nil
+ case "csv", "constant":
+ return strings.Replace(query, t.Var, t.Values[0].Value, -1), nil
+ }
+
+ tv := map[string]string{}
+ for i := range t.Values {
+ tv[t.Values[i].Type] = t.Values[i].Value
+ }
+
+ if res, ok := tv["resolution"]; ok {
+ resolution, err := strconv.ParseInt(res, 0, 64)
+ if err != nil {
+ return "", err
+ }
+ ppp, ok := tv["pointsPerPixel"]
+ if !ok {
+ ppp = "3"
+ }
+ pixelsPerPoint, err := strconv.ParseInt(ppp, 0, 64)
+ if err != nil {
+ return "", err
+ }
+
+ dur, err := ParseTime(query, now)
+ if err != nil {
+ return "", err
+ }
+ interval := AutoGroupBy(resolution, pixelsPerPoint, dur)
+ return strings.Replace(query, t.Var, interval, -1), nil
+ }
+ return query, nil
+}
+
+func AutoGroupBy(resolution, pixelsPerPoint int64, duration time.Duration) string {
+ // The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
+ // Number of points given the pixels
+ pixels := float64(resolution) / float64(pixelsPerPoint)
+ msPerPixel := float64(duration/time.Millisecond) / pixels
+ secPerPixel := float64(duration/time.Second) / pixels
+ if secPerPixel < 1.0 {
+ if msPerPixel < 1.0 {
+ msPerPixel = 1.0
+ }
+ return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
+ }
+ // If groupby is more than 1 second round to the second
+ return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
+}
+
+// TemplateReplace replaces templates with values within the query string
+func TemplateReplace(query string, templates []chronograf.TemplateVar, now time.Time) (string, error) {
+ templates = SortTemplates(templates)
+ for i := range templates {
+ var err error
+ query, err = RenderTemplate(query, templates[i], now)
+ if err != nil {
+ return "", err
+ }
+ }
+ return query, nil
}
diff --git a/influx/templates_test.go b/influx/templates_test.go
index bb6ffc4bb..482a16dc5 100644
--- a/influx/templates_test.go
+++ b/influx/templates_test.go
@@ -2,6 +2,7 @@ package influx
import (
"encoding/json"
+ "fmt"
"reflect"
"testing"
"time"
@@ -13,43 +14,43 @@ func TestTemplateReplace(t *testing.T) {
tests := []struct {
name string
query string
- vars chronograf.TemplateVars
+ vars []chronograf.TemplateVar
want string
}{
{
name: "select with parameters",
- query: "$METHOD field1, $field FROM $measurement WHERE temperature > $temperature",
- vars: chronograf.TemplateVars{
- chronograf.BasicTemplateVar{
- Var: "$temperature",
- Values: []chronograf.BasicTemplateValue{
+ query: ":method: field1, :field: FROM :measurement: WHERE temperature > :temperature:",
+ vars: []chronograf.TemplateVar{
+ chronograf.TemplateVar{
+ Var: ":temperature:",
+ Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "10",
},
},
},
- chronograf.BasicTemplateVar{
- Var: "$field",
- Values: []chronograf.BasicTemplateValue{
+ chronograf.TemplateVar{
+ Var: ":field:",
+ Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field2",
},
},
},
- chronograf.BasicTemplateVar{
- Var: "$METHOD",
- Values: []chronograf.BasicTemplateValue{
+ chronograf.TemplateVar{
+ Var: ":method:",
+ Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "SELECT",
},
},
},
- chronograf.BasicTemplateVar{
- Var: "$measurement",
- Values: []chronograf.BasicTemplateValue{
+ chronograf.TemplateVar{
+ Var: ":measurement:",
+ Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: `"cpu"`,
@@ -62,28 +63,28 @@ func TestTemplateReplace(t *testing.T) {
{
name: "select with parameters and aggregates",
query: `SELECT mean($field) FROM "cpu" WHERE $tag = $value GROUP BY $tag`,
- vars: chronograf.TemplateVars{
- chronograf.BasicTemplateVar{
+ vars: []chronograf.TemplateVar{
+ chronograf.TemplateVar{
Var: "$value",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Type: "tagValue",
Value: "howdy.com",
},
},
},
- chronograf.BasicTemplateVar{
+ chronograf.TemplateVar{
Var: "$tag",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Type: "tagKey",
Value: "host",
},
},
},
- chronograf.BasicTemplateVar{
+ chronograf.TemplateVar{
Var: "$field",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field",
@@ -101,8 +102,8 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var without a value",
query: `SELECT $field FROM "cpu"`,
- vars: chronograf.TemplateVars{
- chronograf.BasicTemplateVar{
+ vars: []chronograf.TemplateVar{
+ chronograf.TemplateVar{
Var: "$field",
},
},
@@ -111,10 +112,10 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var with unknown type",
query: `SELECT $field FROM "cpu"`,
- vars: chronograf.TemplateVars{
- chronograf.BasicTemplateVar{
+ vars: []chronograf.TemplateVar{
+ chronograf.TemplateVar{
Var: "$field",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Type: "who knows?",
Value: "field",
@@ -127,42 +128,63 @@ func TestTemplateReplace(t *testing.T) {
{
name: "auto group by",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by :interval:`,
- vars: chronograf.TemplateVars{
- &chronograf.GroupByVar{
- Var: ":interval:",
- Duration: 180 * 24 * time.Hour,
- Resolution: 1000,
- ReportingInterval: 10 * time.Second,
+ vars: []chronograf.TemplateVar{
+ {
+ Var: ":interval:",
+ Values: []chronograf.TemplateValue{
+ {
+ Value: "1000",
+ Type: "resolution",
+ },
+ {
+ Value: "3",
+ Type: "pointsPerPixel",
+ },
+ },
},
},
- want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46656s)`,
+ want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by without duration",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by :interval:`,
- vars: chronograf.TemplateVars{
- &chronograf.GroupByVar{
- Var: ":interval:",
- Duration: 0 * time.Minute,
- Resolution: 1000,
- ReportingInterval: 10 * time.Second,
+ vars: []chronograf.TemplateVar{
+ {
+ Var: ":interval:",
+ Values: []chronograf.TemplateValue{
+ {
+ Value: "1000",
+ Type: "resolution",
+ },
+ {
+ Value: "3",
+ Type: "pointsPerPixel",
+ },
+ },
},
},
- want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
+ want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by with :dashboardTime:",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by :interval:`,
- vars: chronograf.TemplateVars{
- &chronograf.GroupByVar{
- Var: ":interval:",
- Duration: 0 * time.Minute,
- Resolution: 1000,
- ReportingInterval: 10 * time.Second,
+ vars: []chronograf.TemplateVar{
+ {
+ Var: ":interval:",
+ Values: []chronograf.TemplateValue{
+ {
+ Value: "1000",
+ Type: "resolution",
+ },
+ {
+ Value: "3",
+ Type: "pointsPerPixel",
+ },
+ },
},
- &chronograf.BasicTemplateVar{
+ {
Var: ":dashboardTime:",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Type: "constant",
Value: "now() - 4320h",
@@ -170,20 +192,28 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
- want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
+ want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by failing condition",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
- vars: []chronograf.TemplateVariable{
- &chronograf.GroupByVar{
- Var: ":interval:",
- Resolution: 115,
- ReportingInterval: 10 * time.Second,
+ vars: []chronograf.TemplateVar{
+ {
+ Var: ":interval:",
+ Values: []chronograf.TemplateValue{
+ {
+ Value: "115",
+ Type: "resolution",
+ },
+ {
+ Value: "3",
+ Type: "pointsPerPixel",
+ },
+ },
},
- chronograf.BasicTemplateVar{
+ {
Var: ":dashboardTime:",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Value: "now() - 1h",
Type: "constant",
@@ -197,7 +227,14 @@ func TestTemplateReplace(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- got := TemplateReplace(tt.query, tt.vars)
+ now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
+ if err != nil {
+ t.Fatal(err)
+ }
+ got, err := TemplateReplace(tt.query, tt.vars, now)
+ if err != nil {
+ t.Fatalf("TestParse unexpected TemplateReplace error: %v", err)
+ }
if got != tt.want {
t.Errorf("TestParse %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
@@ -209,8 +246,20 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
req := `[
{
"tempVar": ":interval:",
- "resolution": 1000,
- "reportingInterval": 10
+ "values": [
+ {
+ "value": "1000",
+ "type": "resolution"
+ },
+ {
+ "value": "3",
+ "type": "pointsPerPixel"
+ },
+ {
+ "value": "10",
+ "type": "reportingInterval"
+ }
+ ]
},
{
"tempVar": ":cpu:",
@@ -224,15 +273,27 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
}
]`
- expected := []chronograf.TemplateVariable{
- &chronograf.GroupByVar{
- Var: ":interval:",
- Resolution: 1000,
- ReportingInterval: 10 * time.Nanosecond,
+ want := []chronograf.TemplateVar{
+ {
+ Var: ":interval:",
+ Values: []chronograf.TemplateValue{
+ {
+ Value: "1000",
+ Type: "resolution",
+ },
+ {
+ Value: "3",
+ Type: "pointsPerPixel",
+ },
+ {
+ Value: "10",
+ Type: "reportingInterval",
+ },
+ },
},
- chronograf.BasicTemplateVar{
+ {
Var: ":cpu:",
- Values: []chronograf.BasicTemplateValue{
+ Values: []chronograf.TemplateValue{
{
Value: "cpu-total",
Type: "tagValue",
@@ -242,65 +303,128 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
},
}
- var tvars chronograf.TemplateVars
- err := json.Unmarshal([]byte(req), &tvars)
+ var got []chronograf.TemplateVar
+ err := json.Unmarshal([]byte(req), &got)
if err != nil {
t.Fatal("Err unmarshaling:", err)
}
- if len(tvars) != len(expected) {
- t.Fatal("Expected", len(expected), "vars but found", len(tvars))
- }
-
- if !reflect.DeepEqual(*(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar))) {
- t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", *(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar)))
- }
-
- if !reflect.DeepEqual(tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar)) {
- t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar))
+ if !reflect.DeepEqual(got, want) {
+ t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", got, want)
}
}
-func TestGroupByVarString(t *testing.T) {
+func TestAutoGroupBy(t *testing.T) {
tests := []struct {
- name string
- tvar *chronograf.GroupByVar
- want string
+ name string
+ resolution int64
+ pixelsPerPoint int64
+ duration time.Duration
+ want string
}{
{
- name: "String() calculates the GROUP BY interval",
- tvar: &chronograf.GroupByVar{
- Resolution: 700,
- ReportingInterval: 10 * time.Second,
- Duration: 24 * time.Hour,
- },
- want: "time(370s)",
+ name: "String() calculates the GROUP BY interval",
+ resolution: 700,
+ pixelsPerPoint: 3,
+ duration: 24 * time.Hour,
+ want: "time(370s)",
},
{
- name: "String() milliseconds if less than one second intervals",
- tvar: &chronograf.GroupByVar{
- Resolution: 100000,
- ReportingInterval: 10 * time.Second,
- Duration: time.Hour,
- },
- want: "time(107ms)",
+ name: "String() milliseconds if less than one second intervals",
+ resolution: 100000,
+ pixelsPerPoint: 3,
+ duration: time.Hour,
+ want: "time(107ms)",
},
{
- name: "String() milliseconds if less than one millisecond",
- tvar: &chronograf.GroupByVar{
- Resolution: 100000,
- ReportingInterval: 10 * time.Second,
- Duration: time.Second,
- },
- want: "time(1ms)",
+ name: "String() milliseconds if less than one millisecond",
+ resolution: 100000,
+ pixelsPerPoint: 3,
+ duration: time.Second,
+ want: "time(1ms)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- got := tt.tvar.String()
+ got := AutoGroupBy(tt.resolution, tt.pixelsPerPoint, tt.duration)
if got != tt.want {
- t.Errorf("TestGroupByVarString %s =\n%s\nwant\n%s", tt.name, got, tt.want)
+ t.Errorf("TestAutoGroupBy %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
})
}
}
+
+func Test_RenderTemplate(t *testing.T) {
+ gbvTests := []struct {
+ name string
+ query string
+ want string
+ resolution uint // the screen resolution to render queries into
+ }{
+ {
+ name: "relative time only lower bound with one day of duration",
+ query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
+ resolution: 1000,
+ want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(259s)",
+ },
+ {
+ name: "relative time offset by week",
+ query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY :interval:",
+ resolution: 1000,
+ want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(259s)",
+ },
+ {
+ name: "relative time with relative upper bound with one minute of duration",
+ query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
+ resolution: 1000,
+ want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(179ms)",
+ },
+ {
+ name: "relative time with relative lower bound and now upper with one day of duration",
+ query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
+ resolution: 1000,
+ want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(259s)",
+ },
+ {
+ name: "absolute time with one minute of duration",
+ query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
+ resolution: 1000,
+ want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(179ms)",
+ },
+ {
+ name: "absolute time with nano seconds and zero duraiton",
+ query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
+ resolution: 1000,
+ want: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(1ms)",
+ },
+ }
+
+ for _, tt := range gbvTests {
+ t.Run(tt.name, func(t *testing.T) {
+ now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
+ if err != nil {
+ t.Fatal(err)
+ }
+ tvar := chronograf.TemplateVar{
+ Var: ":interval:",
+ Values: []chronograf.TemplateValue{
+ {
+ Value: fmt.Sprintf("%d", tt.resolution),
+ Type: "resolution",
+ },
+ },
+ }
+
+ got, err := RenderTemplate(tt.query, tvar, now)
+ if err != nil {
+ t.Fatalf("unexpected error rendering template %v", err)
+ }
+
+ if got != tt.want {
+ t.Fatalf("%q - durations not equal! Want: %s, Got: %s", tt.name, tt.want, got)
+ }
+ })
+ }
+}
+
+// SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h GROUP BY :interval: FILL(null);SELECT mean("numSeries") AS "mean_numSeries_shifted__1__h" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h AND time < now() - 1h GROUP BY :interval: FILL(null)
diff --git a/kapacitor/client_test.go b/kapacitor/client_test.go
index ee82e2d23..88bc04e79 100644
--- a/kapacitor/client_test.go
+++ b/kapacitor/client_test.go
@@ -214,6 +214,9 @@ var trigger = data
.durationField(durationField)
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -300,6 +303,9 @@ var trigger = data
.durationField(durationField)
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -540,6 +546,9 @@ var trigger = data
.durationField(durationField)
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -623,6 +632,9 @@ var trigger = data
.durationField(durationField)
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -1376,6 +1388,9 @@ trigger
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
diff --git a/kapacitor/influxout.go b/kapacitor/influxout.go
index f7e1fca89..b0cfe0719 100644
--- a/kapacitor/influxout.go
+++ b/kapacitor/influxout.go
@@ -20,11 +20,14 @@ func InfluxOut(rule chronograf.AlertRule) (string, error) {
return fmt.Sprintf(`
trigger
%s
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
- .create()
- .database(outputDB)
- .retentionPolicy(outputRP)
- .measurement(outputMeasurement)
+ .create()
+ .database(outputDB)
+ .retentionPolicy(outputRP)
+ .measurement(outputMeasurement)
.tag('alertName', name)
.tag('triggerType', triggerType)
`, rename), nil
diff --git a/kapacitor/influxout_test.go b/kapacitor/influxout_test.go
index 87b6ee7c3..39489750d 100644
--- a/kapacitor/influxout_test.go
+++ b/kapacitor/influxout_test.go
@@ -14,6 +14,9 @@ func TestInfluxOut(t *testing.T) {
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
diff --git a/kapacitor/tickscripts_test.go b/kapacitor/tickscripts_test.go
index 4af735616..eb78e3056 100644
--- a/kapacitor/tickscripts_test.go
+++ b/kapacitor/tickscripts_test.go
@@ -181,6 +181,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -323,6 +326,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -467,6 +473,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -620,6 +629,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -772,6 +784,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -924,6 +939,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -1059,6 +1077,9 @@ var trigger = data
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -1222,6 +1243,9 @@ var trigger = past
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -1385,6 +1409,9 @@ var trigger = past
.email()
trigger
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
@@ -1527,6 +1554,9 @@ trigger
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
+ |eval(lambda: float("value"))
+ .as('value')
+ .keep()
|influxDBOut()
.create()
.database(outputDB)
diff --git a/kapacitor/vars.go b/kapacitor/vars.go
index 4ef867a2d..182150872 100644
--- a/kapacitor/vars.go
+++ b/kapacitor/vars.go
@@ -76,10 +76,12 @@ func Vars(rule chronograf.AlertRule) (string, error) {
}
}
+// NotEmpty is an error collector checking if strings are empty values
type NotEmpty struct {
Err error
}
+// Valid checks if string s is empty and if so reports an error using name
func (n *NotEmpty) Valid(name, s string) error {
if n.Err != nil {
return n.Err
@@ -91,6 +93,7 @@ func (n *NotEmpty) Valid(name, s string) error {
return n.Err
}
+// Escape sanitizes strings with single quotes for kapacitor
func Escape(str string) string {
return strings.Replace(str, "'", `\'`, -1)
}
@@ -251,5 +254,10 @@ func formatValue(value string) string {
if _, err := strconv.ParseFloat(value, 64); err == nil {
return value
}
- return "'" + value + "'"
+
+ // If the value is a kapacitor boolean value perform no formatting
+ if value == "TRUE" || value == "FALSE" {
+ return value
+ }
+ return "'" + Escape(value) + "'"
}
diff --git a/kapacitor/vars_test.go b/kapacitor/vars_test.go
index 90d79390a..305685ad7 100644
--- a/kapacitor/vars_test.go
+++ b/kapacitor/vars_test.go
@@ -49,3 +49,39 @@ func TestVarsCritStringEqual(t *testing.T) {
t.Errorf("Error validating alert: %v %s", err, tick)
}
}
+
+func Test_formatValue(t *testing.T) {
+ tests := []struct {
+ name string
+ value string
+ want string
+ }{
+ {
+ name: "parses floats",
+ value: "3.14",
+ want: "3.14",
+ },
+ {
+ name: "parses booleans",
+ value: "TRUE",
+ want: "TRUE",
+ },
+ {
+ name: "single quotes for strings",
+ value: "up",
+ want: "'up'",
+ },
+ {
+ name: "handles escaping of single quotes",
+ value: "down's",
+ want: "'down\\'s'",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := formatValue(tt.value); got != tt.want {
+ t.Errorf("formatValue() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/oauth2/generic.go b/oauth2/generic.go
index aa18e716f..0172c70af 100644
--- a/oauth2/generic.go
+++ b/oauth2/generic.go
@@ -27,6 +27,7 @@ type Generic struct {
AuthURL string
TokenURL string
APIURL string // APIURL returns OpenID Userinfo
+ APIKey string // APIKey is the JSON key to lookup email address in APIURL response
Logger chronograf.Logger
}
@@ -69,9 +70,7 @@ func (g *Generic) Config() *oauth2.Config {
// PrincipalID returns the email address of the user.
func (g *Generic) PrincipalID(provider *http.Client) (string, error) {
- res := struct {
- Email string `json:"email"`
- }{}
+ res := map[string]interface{}{}
r, err := provider.Get(g.APIURL)
if err != nil {
@@ -83,7 +82,11 @@ func (g *Generic) PrincipalID(provider *http.Client) (string, error) {
return "", err
}
- email := res.Email
+ email := ""
+ value := res[g.APIKey]
+ if e, ok := value.(string); ok {
+ email = e
+ }
// If we did not receive an email address, try to lookup the email
// in a similar way as github
diff --git a/oauth2/generic_test.go b/oauth2/generic_test.go
index a773c686a..f33cc9ef4 100644
--- a/oauth2/generic_test.go
+++ b/oauth2/generic_test.go
@@ -34,6 +34,7 @@ func TestGenericPrincipalID(t *testing.T) {
prov := oauth2.Generic{
Logger: logger,
APIURL: mockAPI.URL,
+ APIKey: "email",
}
tt, err := oauth2.NewTestTripper(logger, mockAPI, http.DefaultTransport)
if err != nil {
diff --git a/server/cells.go b/server/cells.go
index ea9dbf4a3..344f99b64 100644
--- a/server/cells.go
+++ b/server/cells.go
@@ -31,7 +31,6 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
cells := make([]dashboardCellResponse, len(dcells))
for i, cell := range dcells {
newCell := chronograf.DashboardCell{}
-
newCell.Queries = make([]chronograf.DashboardQuery, len(cell.Queries))
copy(newCell.Queries, cell.Queries)
@@ -70,7 +69,17 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
// ValidDashboardCellRequest verifies that the dashboard cells have a query and
// have the correct axes specified
func ValidDashboardCellRequest(c *chronograf.DashboardCell) error {
+ if c == nil {
+ return fmt.Errorf("Chronograf dashboard cell was nil")
+ }
+
CorrectWidthHeight(c)
+ for _, q := range c.Queries {
+ if err := ValidateQueryConfig(&q.QueryConfig); err != nil {
+ return err
+ }
+ }
+ MoveTimeShift(c)
return HasCorrectAxes(c)
}
@@ -115,12 +124,22 @@ func CorrectWidthHeight(c *chronograf.DashboardCell) {
}
}
+// MoveTimeShift moves TimeShift from the QueryConfig to the DashboardQuery
+func MoveTimeShift(c *chronograf.DashboardCell) {
+ for i, query := range c.Queries {
+ query.Shifts = query.QueryConfig.Shifts
+ c.Queries[i] = query
+ }
+}
+
// AddQueryConfig updates a cell by converting InfluxQL into queryconfigs
// If influxql cannot be represented by a full query config, then, the
// query config's raw text is set to the command.
func AddQueryConfig(c *chronograf.DashboardCell) {
for i, q := range c.Queries {
qc := ToQueryConfig(q.Command)
+ qc.Shifts = append([]chronograf.TimeShift(nil), q.Shifts...)
+ q.Shifts = nil
q.QueryConfig = qc
c.Queries[i] = q
}
diff --git a/server/cells_test.go b/server/cells_test.go
index 0a694b9e1..546a0902c 100644
--- a/server/cells_test.go
+++ b/server/cells_test.go
@@ -162,14 +162,14 @@ func Test_Service_DashboardCells(t *testing.T) {
http.StatusOK,
},
{
- "cell axes should always be \"x\", \"y\", and \"y2\"",
- &url.URL{
+ name: "cell axes should always be \"x\", \"y\", and \"y2\"",
+ reqURL: &url.URL{
Path: "/chronograf/v1/dashboards/1/cells",
},
- map[string]string{
+ ctxParams: map[string]string{
"id": "1",
},
- []chronograf.DashboardCell{
+ mockResponse: []chronograf.DashboardCell{
{
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
X: 0,
@@ -182,7 +182,7 @@ func Test_Service_DashboardCells(t *testing.T) {
Axes: map[string]chronograf.Axis{},
},
},
- []chronograf.DashboardCell{
+ expected: []chronograf.DashboardCell{
{
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
X: 0,
@@ -205,7 +205,7 @@ func Test_Service_DashboardCells(t *testing.T) {
},
},
},
- http.StatusOK,
+ expectedCode: http.StatusOK,
},
}
@@ -217,7 +217,10 @@ func Test_Service_DashboardCells(t *testing.T) {
ctx := context.Background()
params := httprouter.Params{}
for k, v := range test.ctxParams {
- params = append(params, httprouter.Param{k, v})
+ params = append(params, httprouter.Param{
+ Key: k,
+ Value: v,
+ })
}
ctx = httprouter.WithParams(ctx, params)
diff --git a/server/dashboards_test.go b/server/dashboards_test.go
index 4d7a65172..a33a6d856 100644
--- a/server/dashboards_test.go
+++ b/server/dashboards_test.go
@@ -223,6 +223,13 @@ func Test_newDashboardResponse(t *testing.T) {
{
Source: "/chronograf/v1/sources/1",
Command: "SELECT donors from hill_valley_preservation_society where time > '1985-10-25 08:00:00'",
+ Shifts: []chronograf.TimeShift{
+ {
+ Label: "Best Week Evar",
+ Unit: "d",
+ Quantity: "7",
+ },
+ },
},
},
Axes: map[string]chronograf.Axis{
@@ -272,6 +279,13 @@ func Test_newDashboardResponse(t *testing.T) {
},
Tags: make(map[string][]string, 0),
AreTagsAccepted: false,
+ Shifts: []chronograf.TimeShift{
+ {
+ Label: "Best Week Evar",
+ Unit: "d",
+ Quantity: "7",
+ },
+ },
},
},
},
diff --git a/server/kapacitors_test.go b/server/kapacitors_test.go
index 63f7ddefb..0979beca9 100644
--- a/server/kapacitors_test.go
+++ b/server/kapacitors_test.go
@@ -215,12 +215,12 @@ func Test_KapacitorRulesGet(t *testing.T) {
bg := context.Background()
params := httprouter.Params{
{
- "id",
- "1",
+ Key: "id",
+ Value: "1",
},
{
- "kid",
- "1",
+ Key: "kid",
+ Value: "1",
},
}
ctx := httprouter.WithParams(bg, params)
@@ -246,8 +246,8 @@ func Test_KapacitorRulesGet(t *testing.T) {
actual := make([]chronograf.AlertRule, len(frame.Rules))
- for idx, _ := range frame.Rules {
- actual[idx] = frame.Rules[idx].AlertRule
+ for i := range frame.Rules {
+ actual[i] = frame.Rules[i].AlertRule
}
if resp.StatusCode != http.StatusOK {
diff --git a/server/mux.go b/server/mux.go
index aa1847105..2848fc593 100644
--- a/server/mux.go
+++ b/server/mux.go
@@ -257,7 +257,7 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
// Encapsulate the router with OAuth2
var auth http.Handler
auth, allRoutes.AuthRoutes = AuthAPI(opts, router)
- allRoutes.LogoutLink = "/oauth/logout"
+ allRoutes.LogoutLink = path.Join(opts.Basepath, "/oauth/logout")
// Create middleware that redirects to the appropriate provider logout
router.GET(allRoutes.LogoutLink, Logout("/", basepath, allRoutes.AuthRoutes))
diff --git a/server/queries.go b/server/queries.go
index 0f1e73a44..fbd20635b 100644
--- a/server/queries.go
+++ b/server/queries.go
@@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"net/http"
+ "time"
"golang.org/x/net/context"
@@ -21,8 +22,8 @@ type QueryRequest struct {
// QueriesRequest converts all queries to queryConfigs with the help
// of the template variables
type QueriesRequest struct {
- Queries []QueryRequest `json:"queries"`
- TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
+ Queries []QueryRequest `json:"queries"`
+ TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueryResponse is the return result of a QueryRequest including
@@ -33,7 +34,7 @@ type QueryResponse struct {
QueryConfig chronograf.QueryConfig `json:"queryConfig"`
QueryAST *queries.SelectStatement `json:"queryAST,omitempty"`
QueryTemplated *string `json:"queryTemplated,omitempty"`
- TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
+ TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueriesResponse is the response for a QueriesRequest
@@ -72,12 +73,18 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
Query: q.Query,
}
- query := influx.TemplateReplace(q.Query, req.TemplateVars)
+ query, err := influx.TemplateReplace(q.Query, req.TemplateVars, time.Now())
+ if err != nil {
+ Error(w, http.StatusBadRequest, err.Error(), s.Logger)
+ return
+ }
+
qc := ToQueryConfig(query)
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
+ qc.Shifts = []chronograf.TimeShift{}
qr.QueryConfig = qc
if stmt, err := queries.ParseSelect(query); err == nil {
diff --git a/server/queries_test.go b/server/queries_test.go
index bd107775d..d161db677 100644
--- a/server/queries_test.go
+++ b/server/queries_test.go
@@ -60,7 +60,7 @@ func TestService_Queries(t *testing.T) {
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
- want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
+ want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"},"shifts":[]},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
`,
},
{
@@ -81,7 +81,7 @@ func TestService_Queries(t *testing.T) {
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
- want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null}}]}
+ want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null,"shifts":[]}}]}
`,
},
{
@@ -98,7 +98,7 @@ func TestService_Queries(t *testing.T) {
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
- "query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > now() - 1m",
+ "query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY :interval:",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
],
@@ -153,13 +153,20 @@ func TestService_Queries(t *testing.T) {
"id": "interval",
"type": "constant",
"tempVar": ":interval:",
- "resolution": 1000,
- "reportingInterval": 10000000000,
- "values": []
+ "values": [
+ {
+ "value": "1000",
+ "type": "resolution"
+ },
+ {
+ "value": "3",
+ "type": "pointsPerPixel"
+ }
+ ]
}
]
}`))),
- want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"_internal","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"_internal","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","tempVars":[{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":interval:","duration":60000000000,"resolution":1000,"reportingInterval":10000000000}]}]}
+ want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","range":null,"shifts":[]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"1000","type":"resolution","selected":false},{"value":"3","type":"pointsPerPixel","selected":false}]}]}]}
`,
},
}
diff --git a/server/queryconfig.go b/server/queryconfig.go
index 8ec22be8f..6370986ce 100644
--- a/server/queryconfig.go
+++ b/server/queryconfig.go
@@ -1,6 +1,8 @@
package server
import (
+ "fmt"
+
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/influx"
)
@@ -22,3 +24,28 @@ func ToQueryConfig(query string) chronograf.QueryConfig {
Tags: make(map[string][]string, 0),
}
}
+
+var validFieldTypes = map[string]bool{
+ "func": true,
+ "field": true,
+ "integer": true,
+ "number": true,
+ "regex": true,
+ "wildcard": true,
+}
+
+// ValidateQueryConfig checks any query config input
+func ValidateQueryConfig(q *chronograf.QueryConfig) error {
+ for _, fld := range q.Fields {
+ invalid := fmt.Errorf(`invalid field type "%s" ; expect func, field, integer, number, regex, wildcard`, fld.Type)
+ if !validFieldTypes[fld.Type] {
+ return invalid
+ }
+ for _, arg := range fld.Args {
+ if !validFieldTypes[arg.Type] {
+ return invalid
+ }
+ }
+ }
+ return nil
+}
diff --git a/server/queryconfig_test.go b/server/queryconfig_test.go
new file mode 100644
index 000000000..03b84558e
--- /dev/null
+++ b/server/queryconfig_test.go
@@ -0,0 +1,50 @@
+package server
+
+import (
+ "testing"
+
+ "github.com/influxdata/chronograf"
+)
+
+func TestValidateQueryConfig(t *testing.T) {
+ tests := []struct {
+ name string
+ q *chronograf.QueryConfig
+ wantErr bool
+ }{
+ {
+ name: "invalid field type",
+ q: &chronograf.QueryConfig{
+ Fields: []chronograf.Field{
+ {
+ Type: "invalid",
+ },
+ },
+ },
+ wantErr: true,
+ },
+ {
+ name: "invalid field args",
+ q: &chronograf.QueryConfig{
+ Fields: []chronograf.Field{
+ {
+ Type: "func",
+ Args: []chronograf.Field{
+ {
+ Type: "invalid",
+ },
+ },
+ },
+ },
+ },
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if err := ValidateQueryConfig(tt.q); (err != nil) != tt.wantErr {
+ t.Errorf("ValidateQueryConfig() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
diff --git a/server/server.go b/server/server.go
index dd3f675c0..7687a4f87 100644
--- a/server/server.go
+++ b/server/server.go
@@ -80,6 +80,7 @@ type Server struct {
GenericAuthURL string `long:"generic-auth-url" description:"OAuth 2.0 provider's authorization endpoint URL" env:"GENERIC_AUTH_URL"`
GenericTokenURL string `long:"generic-token-url" description:"OAuth 2.0 provider's token endpoint URL" env:"GENERIC_TOKEN_URL"`
GenericAPIURL string `long:"generic-api-url" description:"URL that returns OpenID UserInfo compatible information." env:"GENERIC_API_URL"`
+ GenericAPIKey string `long:"generic-api-key" description:"JSON lookup key into OpenID UserInfo. (Azure should be userPrincipalName)" default:"email" env:"GENERIC_API_KEY"`
Auth0Domain string `long:"auth0-domain" description:"Subdomain of auth0.com used for Auth0 OAuth2 authentication" env:"AUTH0_DOMAIN"`
Auth0ClientID string `long:"auth0-client-id" description:"Auth0 Client ID for OAuth2 support" env:"AUTH0_CLIENT_ID"`
@@ -182,6 +183,7 @@ func (s *Server) genericOAuth(logger chronograf.Logger, auth oauth2.Authenticato
AuthURL: s.GenericAuthURL,
TokenURL: s.GenericTokenURL,
APIURL: s.GenericAPIURL,
+ APIKey: s.GenericAPIKey,
Logger: logger,
}
jwt := oauth2.NewJWT(s.TokenSecret)
diff --git a/server/service.go b/server/service.go
index 06a2d525f..3b127835d 100644
--- a/server/service.go
+++ b/server/service.go
@@ -48,7 +48,7 @@ func (c *InfluxClient) New(src chronograf.Source, logger chronograf.Logger) (chr
}
if src.Type == chronograf.InfluxEnterprise && src.MetaURL != "" {
tls := strings.Contains(src.MetaURL, "https")
- return enterprise.NewClientWithTimeSeries(logger, src.MetaURL, src.Username, src.Password, tls, client)
+ return enterprise.NewClientWithTimeSeries(logger, src.MetaURL, influx.DefaultAuthorization(&src), tls, client)
}
return client, nil
}
diff --git a/server/sources.go b/server/sources.go
index 40eb69148..3793aea51 100644
--- a/server/sources.go
+++ b/server/sources.go
@@ -55,7 +55,10 @@ func newSourceResponse(src chronograf.Source) sourceResponse {
},
}
- if src.Type == chronograf.InfluxEnterprise {
+ // MetaURL is currently a string, but eventually, we'd like to change it
+ // to a slice. Checking len(src.MetaURL) is functionally equivalent to
+ // checking if it is equal to the empty string.
+ if src.Type == chronograf.InfluxEnterprise && len(src.MetaURL) != 0 {
res.Links.Roles = fmt.Sprintf("%s/%d/roles", httpAPISrcs, src.ID)
}
return res
@@ -251,7 +254,9 @@ func (s *Service) UpdateSource(w http.ResponseWriter, r *http.Request) {
if req.URL != "" {
src.URL = req.URL
}
- if req.MetaURL != "" {
+ // If the supplied MetaURL is different from the
+ // one supplied on the request, update the value
+ if req.MetaURL != src.MetaURL {
src.MetaURL = req.MetaURL
}
if req.Type != "" {
diff --git a/server/swagger.json b/server/swagger.json
index 0580955f6..b8a1c34e7 100644
--- a/server/swagger.json
+++ b/server/swagger.json
@@ -550,6 +550,7 @@
"patch": {
"tags": ["sources", "users"],
"summary": "Update user configuration",
+ "description": "Update one parameter at a time (one of password, permissions or roles)",
"parameters": [
{
"name": "id",
diff --git a/server/templates_test.go b/server/templates_test.go
index afd220afe..8a9bec46f 100644
--- a/server/templates_test.go
+++ b/server/templates_test.go
@@ -16,8 +16,8 @@ func TestValidTemplateRequest(t *testing.T) {
name: "Valid Template",
template: &chronograf.Template{
Type: "fieldKeys",
- BasicTemplateVar: chronograf.BasicTemplateVar{
- Values: []chronograf.BasicTemplateValue{
+ TemplateVar: chronograf.TemplateVar{
+ Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@@ -30,8 +30,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "Unknown Type",
- BasicTemplateVar: chronograf.BasicTemplateVar{
- Values: []chronograf.BasicTemplateValue{
+ TemplateVar: chronograf.TemplateVar{
+ Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@@ -44,8 +44,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "csv",
- BasicTemplateVar: chronograf.BasicTemplateVar{
- Values: []chronograf.BasicTemplateValue{
+ TemplateVar: chronograf.TemplateVar{
+ Values: []chronograf.TemplateValue{
{
Type: "unknown value",
},
diff --git a/ui/.eslintrc b/ui/.eslintrc
index 5e8a1a3cb..b2bea4ad0 100644
--- a/ui/.eslintrc
+++ b/ui/.eslintrc
@@ -48,7 +48,7 @@
'arrow-parens': 0,
'comma-dangle': [2, 'always-multiline'],
'no-cond-assign': 2,
- 'no-console': ['error', {allow: ['error']}],
+ 'no-console': ['error', {allow: ['error', 'warn']}],
'no-constant-condition': 2,
'no-control-regex': 2,
'no-debugger': 2,
diff --git a/ui/package.json b/ui/package.json
index 13dc39531..6bb2a3bfd 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -11,14 +11,14 @@
"scripts": {
"build": "yarn run clean && env NODE_ENV=production webpack --optimize-minimize --config ./webpack/prodConfig.js",
"build:dev": "webpack --config ./webpack/devConfig.js",
- "start": "webpack --watch --config ./webpack/devConfig.js",
+ "start": "yarn run clean && webpack --watch --config ./webpack/devConfig.js",
"start:hmr": "webpack-dev-server --open --config ./webpack/devConfig.js",
"lint": "esw src/",
"test": "karma start",
"test:integration": "nightwatch tests --skip",
"test:lint": "yarn run lint; yarn run test",
- "test:dev": "concurrently \"yarn run lint -- --watch\" \"yarn run test -- --no-single-run --reporters=verbose\"",
- "clean": "rm -rf build",
+ "test:dev": "concurrently \"yarn run lint --watch\" \"yarn run test --no-single-run --reporters=verbose\"",
+ "clean": "rm -rf build/*",
"storybook": "node ./storybook.js",
"prettier": "prettier --single-quote --trailing-comma es5 --bracket-spacing false --semi false --write \"{src,spec}/**/*.js\"; eslint src --fix"
},
diff --git a/ui/spec/data_explorer/reducers/queryConfigSpec.js b/ui/spec/data_explorer/reducers/queryConfigSpec.js
index 7dfa1399f..7830393d9 100644
--- a/ui/spec/data_explorer/reducers/queryConfigSpec.js
+++ b/ui/spec/data_explorer/reducers/queryConfigSpec.js
@@ -1,7 +1,9 @@
import reducer from 'src/data_explorer/reducers/queryConfigs'
+
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
fill,
+ timeShift,
chooseTag,
groupByTag,
groupByTime,
@@ -26,63 +28,63 @@ const fakeAddQueryAction = (panelID, queryID) => {
}
}
-function buildInitialState(queryId, params) {
- return Object.assign({}, defaultQueryConfig({id: queryId}), params)
+function buildInitialState(queryID, params) {
+ return Object.assign({}, defaultQueryConfig({id: queryID}), params)
}
describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
- const queryId = 123
+ const queryID = 123
it('can add a query', () => {
- const state = reducer({}, fakeAddQueryAction('blah', queryId))
+ const state = reducer({}, fakeAddQueryAction('blah', queryID))
- const actual = state[queryId]
- const expected = defaultQueryConfig({id: queryId})
+ const actual = state[queryID]
+ const expected = defaultQueryConfig({id: queryID})
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state
beforeEach(() => {
- state = reducer({}, fakeAddQueryAction('any', queryId))
+ state = reducer({}, fakeAddQueryAction('any', queryID))
})
it('sets the db and rp', () => {
const newState = reducer(
state,
- chooseNamespace(queryId, {
+ chooseNamespace(queryID, {
database: 'telegraf',
retentionPolicy: 'monitor',
})
)
- expect(newState[queryId].database).to.equal('telegraf')
- expect(newState[queryId].retentionPolicy).to.equal('monitor')
+ expect(newState[queryID].database).to.equal('telegraf')
+ expect(newState[queryID].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
- const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
+ const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
- expect(newState[queryId].measurement).to.equal('mem')
+ expect(newState[queryID].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state
beforeEach(() => {
- const one = reducer({}, fakeAddQueryAction('any', queryId))
+ const one = reducer({}, fakeAddQueryAction('any', queryID))
const two = reducer(
one,
- chooseNamespace(queryId, {
+ chooseNamespace(queryID, {
database: '_internal',
retentionPolicy: 'daily',
})
)
- const three = reducer(two, chooseMeasurement(queryId, 'disk'))
+ const three = reducer(two, chooseMeasurement(queryID, 'disk'))
state = reducer(
three,
- addInitialField(queryId, {
+ addInitialField(queryID, {
value: 'a great field',
type: 'field',
})
@@ -92,91 +94,91 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => {
// what about tags?
- expect(state[queryId].measurement).to.equal('disk')
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].measurement).to.equal('disk')
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- chooseNamespace(queryId, {
+ chooseNamespace(queryID, {
database: 'newdb',
retentionPolicy: 'newrp',
})
)
- expect(newState[queryId].measurement).to.be.null
- expect(newState[queryId].fields.length).to.equal(0)
+ expect(newState[queryID].measurement).to.be.null
+ expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => {
// what about tags?
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- chooseMeasurement(queryId, 'newmeasurement')
+ chooseMeasurement(queryID, 'newmeasurement')
)
- expect(state[queryId].database).to.equal(newState[queryId].database)
- expect(state[queryId].retentionPolicy).to.equal(
- newState[queryId].retentionPolicy
+ expect(state[queryID].database).to.equal(newState[queryID].database)
+ expect(state[queryID].retentionPolicy).to.equal(
+ newState[queryID].retentionPolicy
)
- expect(newState[queryId].fields.length).to.equal(0)
+ expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('DE_TOGGLE_FIELD', () => {
it('can toggle multiple fields', () => {
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- toggleField(queryId, {
+ toggleField(queryID, {
value: 'f2',
type: 'field',
})
)
- expect(newState[queryId].fields.length).to.equal(2)
- expect(newState[queryId].fields[1].alias).to.deep.equal('mean_f2')
- expect(newState[queryId].fields[1].args).to.deep.equal([
+ expect(newState[queryID].fields.length).to.equal(2)
+ expect(newState[queryID].fields[1].alias).to.deep.equal('mean_f2')
+ expect(newState[queryID].fields[1].args).to.deep.equal([
{value: 'f2', type: 'field'},
])
- expect(newState[queryId].fields[1].value).to.deep.equal('mean')
+ expect(newState[queryID].fields[1].value).to.deep.equal('mean')
})
it('applies a func to newly selected fields', () => {
- expect(state[queryId].fields.length).to.equal(1)
- expect(state[queryId].fields[0].type).to.equal('func')
- expect(state[queryId].fields[0].value).to.equal('mean')
+ expect(state[queryID].fields.length).to.equal(1)
+ expect(state[queryID].fields[0].type).to.equal('func')
+ expect(state[queryID].fields[0].value).to.equal('mean')
const newState = reducer(
state,
- toggleField(queryId, {
+ toggleField(queryID, {
value: 'f2',
type: 'field',
})
)
- expect(newState[queryId].fields[1].value).to.equal('mean')
- expect(newState[queryId].fields[1].alias).to.equal('mean_f2')
- expect(newState[queryId].fields[1].args).to.deep.equal([
+ expect(newState[queryID].fields[1].value).to.equal('mean')
+ expect(newState[queryID].fields[1].alias).to.equal('mean_f2')
+ expect(newState[queryID].fields[1].args).to.deep.equal([
{value: 'f2', type: 'field'},
])
- expect(newState[queryId].fields[1].type).to.equal('func')
+ expect(newState[queryID].fields[1].type).to.equal('func')
})
it('adds the field property to query config if not found', () => {
- delete state[queryId].fields
- expect(state[queryId].fields).to.equal(undefined)
+ delete state[queryID].fields
+ expect(state[queryID].fields).to.equal(undefined)
const newState = reducer(
state,
- toggleField(queryId, {value: 'fk1', type: 'field'})
+ toggleField(queryID, {value: 'fk1', type: 'field'})
)
- expect(newState[queryId].fields.length).to.equal(1)
+ expect(newState[queryID].fields.length).to.equal(1)
})
})
})
@@ -189,7 +191,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const f4 = {value: 'f4', type: 'field'}
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -201,7 +203,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
},
}
- const action = applyFuncsToField(queryId, {
+ const action = applyFuncsToField(queryID, {
field: {value: 'f1', type: 'field'},
funcs: [
{value: 'fn3', type: 'func', args: []},
@@ -211,7 +213,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const nextState = reducer(initialState, action)
- expect(nextState[queryId].fields).to.deep.equal([
+ expect(nextState[queryID].fields).to.deep.equal([
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
{value: 'fn1', type: 'func', args: [f2], alias: `fn1_${f2.value}`},
@@ -230,7 +232,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const groupBy = {time: '1m', tags: []}
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -239,35 +241,35 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
},
}
- const action = removeFuncs(queryId, fields, groupBy)
+ const action = removeFuncs(queryID, fields, groupBy)
const nextState = reducer(initialState, action)
- const actual = nextState[queryId].fields
+ const actual = nextState[queryID].fields
const expected = [f1, f2]
expect(actual).to.eql(expected)
- expect(nextState[queryId].groupBy.time).to.equal(null)
+ expect(nextState[queryID].groupBy.time).to.equal(null)
})
})
describe('DE_CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
- [queryId]: buildInitialState(queryId, {
+ [queryID]: buildInitialState(queryID, {
tags: {
k1: ['v0'],
k2: ['foo'],
},
}),
}
- const action = chooseTag(queryId, {
+ const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
- expect(nextState[queryId].tags).to.eql({
+ expect(nextState[queryID].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
})
@@ -275,31 +277,31 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it("creates a new entry if it's the first key", () => {
const initialState = {
- [queryId]: buildInitialState(queryId, {
+ [queryID]: buildInitialState(queryID, {
tags: {},
}),
}
- const action = chooseTag(queryId, {
+ const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
- expect(nextState[queryId].tags).to.eql({
+ expect(nextState[queryID].tags).to.eql({
k1: ['v1'],
})
})
it('removes a value that is already in the list', () => {
const initialState = {
- [queryId]: buildInitialState(queryId, {
+ [queryID]: buildInitialState(queryID, {
tags: {
k1: ['v1'],
},
}),
}
- const action = chooseTag(queryId, {
+ const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
@@ -307,14 +309,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
- expect(nextState[queryId].tags).to.eql({})
+ expect(nextState[queryID].tags).to.eql({})
})
})
describe('DE_GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -323,11 +325,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
groupBy: {tags: [], time: null},
},
}
- const action = groupByTag(queryId, 'k1')
+ const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
- expect(nextState[queryId].groupBy).to.eql({
+ expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: ['k1'],
})
@@ -335,7 +337,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -344,11 +346,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
groupBy: {tags: ['k1'], time: null},
},
}
- const action = groupByTag(queryId, 'k1')
+ const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
- expect(nextState[queryId].groupBy).to.eql({
+ expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: [],
})
@@ -358,14 +360,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
describe('DE_TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
- const action = toggleTagAcceptance(queryId)
+ const action = toggleTagAcceptance(queryID)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].areTagsAccepted).to.equal(
- !initialState[queryId].areTagsAccepted
+ expect(nextState[queryID].areTagsAccepted).to.equal(
+ !initialState[queryID].areTagsAccepted
)
})
})
@@ -374,99 +376,113 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it('applys the appropriate group by time', () => {
const time = '100y'
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
- const action = groupByTime(queryId, time)
+ const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].groupBy.time).to.equal(time)
+ expect(nextState[queryID].groupBy.time).to.equal(time)
})
})
it('updates entire config', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
- const expected = defaultQueryConfig({id: queryId}, {rawText: 'hello'})
+ const expected = defaultQueryConfig({id: queryID}, {rawText: 'hello'})
const action = updateQueryConfig(expected)
const nextState = reducer(initialState, action)
- expect(nextState[queryId]).to.deep.equal(expected)
+ expect(nextState[queryID]).to.deep.equal(expected)
})
it("updates a query's raw text", () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
const text = 'foo'
- const action = updateRawQuery(queryId, text)
+ const action = updateRawQuery(queryID, text)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].rawText).to.equal('foo')
+ expect(nextState[queryID].rawText).to.equal('foo')
})
it("updates a query's raw status", () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
const status = 'your query was sweet'
- const action = editQueryStatus(queryId, status)
+ const action = editQueryStatus(queryID, status)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].status).to.equal(status)
+ expect(nextState[queryID].status).to.equal(status)
})
describe('DE_FILL', () => {
it('applies an explicit fill when group by time is used', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
const time = '10s'
- const action = groupByTime(queryId, time)
+ const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].fill).to.equal(NULL_STRING)
+ expect(nextState[queryID].fill).to.equal(NULL_STRING)
})
it('updates fill to non-null-string non-number string value', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
- const action = fill(queryId, LINEAR)
+ const action = fill(queryID, LINEAR)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].fill).to.equal(LINEAR)
+ expect(nextState[queryID].fill).to.equal(LINEAR)
})
it('updates fill to string integer value', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
const INT_STRING = '1337'
- const action = fill(queryId, INT_STRING)
+ const action = fill(queryID, INT_STRING)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].fill).to.equal(INT_STRING)
+ expect(nextState[queryID].fill).to.equal(INT_STRING)
})
it('updates fill to string float value', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
const FLOAT_STRING = '1.337'
- const action = fill(queryId, FLOAT_STRING)
+ const action = fill(queryID, FLOAT_STRING)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].fill).to.equal(FLOAT_STRING)
+ expect(nextState[queryID].fill).to.equal(FLOAT_STRING)
+ })
+ })
+
+ describe('DE_TIME_SHIFT', () => {
+ it('can shift the time', () => {
+ const initialState = {
+ [queryID]: buildInitialState(queryID),
+ }
+
+ const shift = {quantity: 1, unit: 'd', duration: '1d'}
+ const action = timeShift(queryID, shift)
+ const nextState = reducer(initialState, action)
+
+ expect(nextState[queryID].shifts).to.deep.equal([shift])
})
})
})
diff --git a/ui/spec/kapacitor/reducers/queryConfigSpec.js b/ui/spec/kapacitor/reducers/queryConfigSpec.js
index f21b04649..a654e4efd 100644
--- a/ui/spec/kapacitor/reducers/queryConfigSpec.js
+++ b/ui/spec/kapacitor/reducers/queryConfigSpec.js
@@ -1,14 +1,15 @@
import reducer from 'src/kapacitor/reducers/queryConfigs'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
+ chooseTag,
+ timeShift,
+ groupByTag,
+ toggleField,
+ groupByTime,
chooseNamespace,
chooseMeasurement,
- chooseTag,
- groupByTag,
- toggleTagAcceptance,
- toggleField,
applyFuncsToField,
- groupByTime,
+ toggleTagAcceptance,
} from 'src/kapacitor/actions/queryConfigs'
const fakeAddQueryAction = (panelID, queryID) => {
@@ -18,142 +19,142 @@ const fakeAddQueryAction = (panelID, queryID) => {
}
}
-function buildInitialState(queryId, params) {
+function buildInitialState(queryID, params) {
return Object.assign(
{},
- defaultQueryConfig({id: queryId, isKapacitorRule: true}),
+ defaultQueryConfig({id: queryID, isKapacitorRule: true}),
params
)
}
describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
- const queryId = 123
+ const queryID = 123
it('can add a query', () => {
- const state = reducer({}, fakeAddQueryAction('blah', queryId))
+ const state = reducer({}, fakeAddQueryAction('blah', queryID))
- const actual = state[queryId]
- const expected = defaultQueryConfig({id: queryId, isKapacitorRule: true})
+ const actual = state[queryID]
+ const expected = defaultQueryConfig({id: queryID, isKapacitorRule: true})
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state
beforeEach(() => {
- state = reducer({}, fakeAddQueryAction('any', queryId))
+ state = reducer({}, fakeAddQueryAction('any', queryID))
})
it('sets the db and rp', () => {
const newState = reducer(
state,
- chooseNamespace(queryId, {
+ chooseNamespace(queryID, {
database: 'telegraf',
retentionPolicy: 'monitor',
})
)
- expect(newState[queryId].database).to.equal('telegraf')
- expect(newState[queryId].retentionPolicy).to.equal('monitor')
+ expect(newState[queryID].database).to.equal('telegraf')
+ expect(newState[queryID].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
- const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
+ const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
- expect(newState[queryId].measurement).to.equal('mem')
+ expect(newState[queryID].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state
beforeEach(() => {
- const one = reducer({}, fakeAddQueryAction('any', queryId))
+ const one = reducer({}, fakeAddQueryAction('any', queryID))
const two = reducer(
one,
- chooseNamespace(queryId, {
+ chooseNamespace(queryID, {
database: '_internal',
retentionPolicy: 'daily',
})
)
- const three = reducer(two, chooseMeasurement(queryId, 'disk'))
+ const three = reducer(two, chooseMeasurement(queryID, 'disk'))
state = reducer(
three,
- toggleField(queryId, {value: 'a great field', funcs: []})
+ toggleField(queryID, {value: 'a great field', funcs: []})
)
})
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => {
// what about tags?
- expect(state[queryId].measurement).to.exist
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].measurement).to.exist
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- chooseNamespace(queryId, {
+ chooseNamespace(queryID, {
database: 'newdb',
retentionPolicy: 'newrp',
})
)
- expect(newState[queryId].measurement).not.to.exist
- expect(newState[queryId].fields.length).to.equal(0)
+ expect(newState[queryID].measurement).not.to.exist
+ expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => {
// what about tags?
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- chooseMeasurement(queryId, 'newmeasurement')
+ chooseMeasurement(queryID, 'newmeasurement')
)
- expect(state[queryId].database).to.equal(newState[queryId].database)
- expect(state[queryId].retentionPolicy).to.equal(
- newState[queryId].retentionPolicy
+ expect(state[queryID].database).to.equal(newState[queryID].database)
+ expect(state[queryID].retentionPolicy).to.equal(
+ newState[queryID].retentionPolicy
)
- expect(newState[queryId].fields.length).to.equal(0)
+ expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('when the query is part of a kapacitor rule', () => {
it('only allows one field', () => {
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- toggleField(queryId, {value: 'a different field', type: 'field'})
+ toggleField(queryID, {value: 'a different field', type: 'field'})
)
- expect(newState[queryId].fields.length).to.equal(1)
- expect(newState[queryId].fields[0].value).to.equal('a different field')
+ expect(newState[queryID].fields.length).to.equal(1)
+ expect(newState[queryID].fields[0].value).to.equal('a different field')
})
})
describe('KAPA_TOGGLE_FIELD', () => {
it('cannot toggle multiple fields', () => {
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- toggleField(queryId, {value: 'a different field', type: 'field'})
+ toggleField(queryID, {value: 'a different field', type: 'field'})
)
- expect(newState[queryId].fields.length).to.equal(1)
- expect(newState[queryId].fields[0].value).to.equal('a different field')
+ expect(newState[queryID].fields.length).to.equal(1)
+ expect(newState[queryID].fields[0].value).to.equal('a different field')
})
it('applies no funcs to newly selected fields', () => {
- expect(state[queryId].fields.length).to.equal(1)
+ expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
- toggleField(queryId, {value: 'a different field', type: 'field'})
+ toggleField(queryID, {value: 'a different field', type: 'field'})
)
- expect(newState[queryId].fields[0].type).to.equal('field')
+ expect(newState[queryID].fields[0].type).to.equal('field')
})
})
})
@@ -162,7 +163,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('applies functions to a field without any existing functions', () => {
const f1 = {value: 'f1', type: 'field'}
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -174,13 +175,13 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
},
}
- const action = applyFuncsToField(queryId, {
+ const action = applyFuncsToField(queryID, {
field: {value: 'f1', type: 'field'},
funcs: [{value: 'fn3', type: 'func'}, {value: 'fn4', type: 'func'}],
})
const nextState = reducer(initialState, action)
- const actual = nextState[queryId].fields
+ const actual = nextState[queryID].fields
const expected = [
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
@@ -193,21 +194,21 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
describe('KAPA_CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
- [queryId]: buildInitialState(queryId, {
+ [queryID]: buildInitialState(queryID, {
tags: {
k1: ['v0'],
k2: ['foo'],
},
}),
}
- const action = chooseTag(queryId, {
+ const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
- expect(nextState[queryId].tags).to.eql({
+ expect(nextState[queryID].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
})
@@ -215,31 +216,31 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it("creates a new entry if it's the first key", () => {
const initialState = {
- [queryId]: buildInitialState(queryId, {
+ [queryID]: buildInitialState(queryID, {
tags: {},
}),
}
- const action = chooseTag(queryId, {
+ const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
- expect(nextState[queryId].tags).to.eql({
+ expect(nextState[queryID].tags).to.eql({
k1: ['v1'],
})
})
it('removes a value that is already in the list', () => {
const initialState = {
- [queryId]: buildInitialState(queryId, {
+ [queryID]: buildInitialState(queryID, {
tags: {
k1: ['v1'],
},
}),
}
- const action = chooseTag(queryId, {
+ const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
@@ -247,14 +248,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
- expect(nextState[queryId].tags).to.eql({})
+ expect(nextState[queryID].tags).to.eql({})
})
})
describe('KAPA_GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -263,11 +264,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
groupBy: {tags: [], time: null},
},
}
- const action = groupByTag(queryId, 'k1')
+ const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
- expect(nextState[queryId].groupBy).to.eql({
+ expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: ['k1'],
})
@@ -275,7 +276,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const initialState = {
- [queryId]: {
+ [queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@@ -284,11 +285,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
groupBy: {tags: ['k1'], time: null},
},
}
- const action = groupByTag(queryId, 'k1')
+ const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
- expect(nextState[queryId].groupBy).to.eql({
+ expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: [],
})
@@ -298,14 +299,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
describe('KAPA_TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
- const action = toggleTagAcceptance(queryId)
+ const action = toggleTagAcceptance(queryID)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].areTagsAccepted).to.equal(
- !initialState[queryId].areTagsAccepted
+ expect(nextState[queryID].areTagsAccepted).to.equal(
+ !initialState[queryID].areTagsAccepted
)
})
})
@@ -314,14 +315,28 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('applys the appropriate group by time', () => {
const time = '100y'
const initialState = {
- [queryId]: buildInitialState(queryId),
+ [queryID]: buildInitialState(queryID),
}
- const action = groupByTime(queryId, time)
+ const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
- expect(nextState[queryId].groupBy.time).to.equal(time)
+ expect(nextState[queryID].groupBy.time).to.equal(time)
+ })
+ })
+
+ describe('KAPA_TIME_SHIFT', () => {
+ it('can shift the time', () => {
+ const initialState = {
+ [queryID]: buildInitialState(queryID),
+ }
+
+ const shift = {quantity: 1, unit: 'd', duration: '1d'}
+ const action = timeShift(queryID, shift)
+ const nextState = reducer(initialState, action)
+
+ expect(nextState[queryID].shifts).to.deep.equal([shift])
})
})
})
diff --git a/ui/spec/shared/presenters/presentersSpec.js b/ui/spec/shared/presenters/presentersSpec.js
index 2cd1d7347..d751e9cd4 100644
--- a/ui/spec/shared/presenters/presentersSpec.js
+++ b/ui/spec/shared/presenters/presentersSpec.js
@@ -1,10 +1,15 @@
-import {buildRoles, buildClusterAccounts} from 'shared/presenters'
+import {
+ buildRoles,
+ buildClusterAccounts,
+ buildDefaultYLabel,
+} from 'shared/presenters'
+import defaultQueryConfig from 'utils/defaultQueryConfig'
-describe('Presenters', function() {
- describe('roles utils', function() {
- describe('buildRoles', function() {
- describe('when a role has no users', function() {
- it("sets a role's users as an empty array", function() {
+describe('Presenters', () => {
+ describe('roles utils', () => {
+ describe('buildRoles', () => {
+ describe('when a role has no users', () => {
+ it("sets a role's users as an empty array", () => {
const roles = [
{
name: 'Marketing',
@@ -20,8 +25,8 @@ describe('Presenters', function() {
})
})
- describe('when a role has no permissions', function() {
- it("set's a roles permission as an empty array", function() {
+ describe('when a role has no permissions', () => {
+ it("set's a roles permission as an empty array", () => {
const roles = [
{
name: 'Marketing',
@@ -35,9 +40,10 @@ describe('Presenters', function() {
})
})
- describe('when a role has users and permissions', function() {
- beforeEach(function() {
- const roles = [
+ describe('when a role has users and permissions', () => {
+ let roles
+ beforeEach(() => {
+ const rs = [
{
name: 'Marketing',
permissions: {
@@ -49,18 +55,18 @@ describe('Presenters', function() {
},
]
- this.roles = buildRoles(roles)
+ roles = buildRoles(rs)
})
- it('each role has a name and a list of users (if they exist)', function() {
- const role = this.roles[0]
+ it('each role has a name and a list of users (if they exist)', () => {
+ const role = roles[0]
expect(role.name).to.equal('Marketing')
expect(role.users).to.contain('roley@influxdb.com')
expect(role.users).to.contain('will@influxdb.com')
})
- it('transforms permissions into a list of objects and each permission has a list of resources', function() {
- expect(this.roles[0].permissions).to.eql([
+ it('transforms permissions into a list of objects and each permission has a list of resources', () => {
+ expect(roles[0].permissions).to.eql([
{
name: 'ViewAdmin',
displayName: 'View Admin',
@@ -85,10 +91,10 @@ describe('Presenters', function() {
})
})
- describe('cluster utils', function() {
- describe('buildClusterAccounts', function() {
+ describe('cluster utils', () => {
+ describe('buildClusterAccounts', () => {
// TODO: break down this test into smaller individual assertions.
- it('adds role information to each cluster account and parses permissions', function() {
+ it('adds role information to each cluster account and parses permissions', () => {
const users = [
{
name: 'jon@example.com',
@@ -192,7 +198,7 @@ describe('Presenters', function() {
expect(actual).to.eql(expected)
})
- it('can handle empty results for users and roles', function() {
+ it('can handle empty results for users and roles', () => {
const users = undefined
const roles = undefined
@@ -201,7 +207,7 @@ describe('Presenters', function() {
expect(actual).to.eql([])
})
- it('sets roles to an empty array if a user has no roles', function() {
+ it('sets roles to an empty array if a user has no roles', () => {
const users = [
{
name: 'ned@example.com',
@@ -216,4 +222,41 @@ describe('Presenters', function() {
})
})
})
+
+ describe('buildDefaultYLabel', () => {
+ it('can return the correct string for field', () => {
+ const query = defaultQueryConfig({id: 1})
+ const fields = [{value: 'usage_system', type: 'field'}]
+ const measurement = 'm1'
+ const queryConfig = {...query, measurement, fields}
+ const actual = buildDefaultYLabel(queryConfig)
+
+ expect(actual).to.equal('m1.usage_system')
+ })
+
+ it('can return the correct string for funcs with args', () => {
+ const query = defaultQueryConfig({id: 1})
+ const field = {value: 'usage_system', type: 'field'}
+ const args = {
+ value: 'mean',
+ type: 'func',
+ args: [field],
+ alias: '',
+ }
+
+ const f1 = {
+ value: 'derivative',
+ type: 'func',
+ args: [args],
+ alias: '',
+ }
+
+ const fields = [f1]
+ const measurement = 'm1'
+ const queryConfig = {...query, measurement, fields}
+ const actual = buildDefaultYLabel(queryConfig)
+
+ expect(actual).to.equal('m1.derivative_mean_usage_system')
+ })
+ })
})
diff --git a/ui/spec/shared/query/helpersSpec.js b/ui/spec/shared/query/helpersSpec.js
new file mode 100644
index 000000000..c21c248a7
--- /dev/null
+++ b/ui/spec/shared/query/helpersSpec.js
@@ -0,0 +1,109 @@
+import {timeRangeType, shiftTimeRange} from 'shared/query/helpers'
+import moment from 'moment'
+import {
+ INVALID,
+ ABSOLUTE,
+ INFLUXQL,
+ RELATIVE_LOWER,
+ RELATIVE_UPPER,
+} from 'shared/constants/timeRange'
+const format = INFLUXQL
+
+describe('Shared.Query.Helpers', () => {
+ describe('timeRangeTypes', () => {
+ it('returns invalid if no upper and lower', () => {
+ const upper = null
+ const lower = null
+
+ const timeRange = {lower, upper}
+
+ expect(timeRangeType(timeRange)).to.equal(INVALID)
+ })
+
+ it('can detect absolute type', () => {
+ const tenMinutes = 600000
+ const upper = Date.now()
+ const lower = upper - tenMinutes
+
+ const timeRange = {lower, upper, format}
+
+ expect(timeRangeType(timeRange)).to.equal(ABSOLUTE)
+ })
+
+ it('can detect exclusive relative lower', () => {
+ const lower = 'now() - 15m'
+ const upper = null
+
+ const timeRange = {lower, upper, format}
+
+ expect(timeRangeType(timeRange)).to.equal(RELATIVE_LOWER)
+ })
+
+ it('can detect relative upper', () => {
+ const upper = 'now()'
+ const oneMinute = 60000
+ const lower = Date.now() - oneMinute
+
+ const timeRange = {lower, upper, format}
+
+ expect(timeRangeType(timeRange)).to.equal(RELATIVE_UPPER)
+ })
+ })
+
+ describe('timeRangeShift', () => {
+ it('can calculate the shift for absolute timeRanges', () => {
+ const upper = Date.now()
+ const oneMinute = 60000
+ const lower = Date.now() - oneMinute
+ const shift = {quantity: 7, unit: 'd'}
+ const timeRange = {upper, lower}
+
+ const type = timeRangeType(timeRange)
+ const actual = shiftTimeRange(timeRange, shift)
+ const expected = {
+ lower: `${lower} - 7d`,
+ upper: `${upper} - 7d`,
+ type: 'shifted',
+ }
+
+ expect(type).to.equal(ABSOLUTE)
+ expect(actual).to.deep.equal(expected)
+ })
+
+ it('can calculate the shift for relative lower timeRanges', () => {
+ const shift = {quantity: 7, unit: 'd'}
+ const lower = 'now() - 15m'
+ const timeRange = {lower, upper: null}
+
+ const type = timeRangeType(timeRange)
+ const actual = shiftTimeRange(timeRange, shift)
+ const expected = {
+ lower: `${lower} - 7d`,
+ upper: `now() - 7d`,
+ type: 'shifted',
+ }
+
+ expect(type).to.equal(RELATIVE_LOWER)
+ expect(actual).to.deep.equal(expected)
+ })
+
+ it('can calculate the shift for relative upper timeRanges', () => {
+ const upper = Date.now()
+ const oneMinute = 60000
+ const lower = Date.now() - oneMinute
+ const shift = {quantity: 7, unit: 'd'}
+ const timeRange = {upper, lower}
+
+ const type = timeRangeType(timeRange)
+ const actual = shiftTimeRange(timeRange, shift)
+ const expected = {
+ lower: `${lower} - 7d`,
+ upper: `${upper} - 7d`,
+ type: 'shifted',
+ }
+
+ expect(type).to.equal(ABSOLUTE)
+ expect(actual).to.deep.equal(expected)
+ })
+ })
+})
diff --git a/ui/spec/utils/timeSeriesToDygraphSpec.js b/ui/spec/utils/timeSeriesToDygraphSpec.js
index 167ed2036..9dd78cfcb 100644
--- a/ui/spec/utils/timeSeriesToDygraphSpec.js
+++ b/ui/spec/utils/timeSeriesToDygraphSpec.js
@@ -228,11 +228,7 @@ describe('timeSeriesToDygraph', () => {
]
const isInDataExplorer = true
- const actual = timeSeriesToDygraph(
- influxResponse,
- undefined,
- isInDataExplorer
- )
+ const actual = timeSeriesToDygraph(influxResponse, isInDataExplorer)
const expected = {}
diff --git a/ui/src/dashboards/components/DashboardsPageContents.js b/ui/src/dashboards/components/DashboardsPageContents.js
index 88fac7551..1adeef209 100644
--- a/ui/src/dashboards/components/DashboardsPageContents.js
+++ b/ui/src/dashboards/components/DashboardsPageContents.js
@@ -1,58 +1,85 @@
-import React, {PropTypes} from 'react'
+import React, {PropTypes, Component} from 'react'
import Authorized, {EDITOR_ROLE} from 'src/auth/Authorized'
import DashboardsTable from 'src/dashboards/components/DashboardsTable'
+import SearchBar from 'src/hosts/components/SearchBar'
import FancyScrollbar from 'shared/components/FancyScrollbar'
-const DashboardsPageContents = ({
- dashboards,
- onDeleteDashboard,
- onCreateDashboard,
- dashboardLink,
-}) => {
- let tableHeader
- if (dashboards === null) {
- tableHeader = 'Loading Dashboards...'
- } else if (dashboards.length === 1) {
- tableHeader = '1 Dashboard'
- } else {
- tableHeader = `${dashboards.length} Dashboards`
+class DashboardsPageContents extends Component {
+ constructor(props) {
+ super(props)
+
+ this.state = {
+ searchTerm: '',
+ }
}
- return (
-
-