55
CHANGELOG.md
|
@ -1,13 +1,56 @@
|
|||
## v1.2.0 [unreleased]
|
||||
|
||||
### Upcoming Bug Fixes
|
||||
### Bug Fixes
|
||||
1. [#882](https://github.com/influxdata/chronograf/pull/882): Fix y-axis graph padding
|
||||
2. [#907](https://github.com/influxdata/chronograf/pull/907): Fix react-router warning
|
||||
|
||||
### Upcoming Features
|
||||
1. [#838](https://github.com/influxdata/chronograf/issues/838): Add detail node to kapacitor alerts
|
||||
2. [#853](https://github.com/influxdata/chronograf/issues/853): Updated builds to use yarn over npm install
|
||||
### Features
|
||||
1. [#873](https://github.com/influxdata/chronograf/pull/873): Add [TLS](https://github.com/influxdata/chronograf/blob/master/docs/tls.md) support
|
||||
2. [#885](https://github.com/influxdata/chronograf/issues/885): Add presentation mode to dashboard page
|
||||
3. [#891](https://github.com/influxdata/chronograf/issues/891): Make dashboard visualizations draggable
|
||||
4. [#892](https://github.com/influxdata/chronograf/issues/891): Make dashboard visualizations resizable
|
||||
5. [#893](https://github.com/influxdata/chronograf/issues/893): Persist dashboard visualization position
|
||||
6. [#922](https://github.com/influxdata/chronograf/issues/922): Additional OAuth2 support for Heroku and Google
|
||||
|
||||
### Upcoming UI Improvements
|
||||
1. [#851](https://github.com/influxdata/chronograf/pull/851): Add field for meta server information in connection config
|
||||
### UI Improvements
|
||||
1. [#905](https://github.com/influxdata/chronograf/pull/905): Make scroll bar thumb element bigger
|
||||
2. [#917](https://github.com/influxdata/chronograf/pull/917): Simplify side navigation
|
||||
3. [#920](https://github.com/influxdata/chronograf/pull/920): Display stacked and step plot graphs
|
||||
4. [#851](https://github.com/influxdata/chronograf/pull/851): Add configuration for Influx Enterprise Meta nodes
|
||||
|
||||
## v1.2.0-beta3 [2017-02-15]
|
||||
|
||||
### Bug Fixes
|
||||
1. [#879](https://github.com/influxdata/chronograf/pull/879): Fix several Kapacitor configuration page state bugs: [#875](https://github.com/influxdata/chronograf/issues/875), [#876](https://github.com/influxdata/chronograf/issues/876), [#878](https://github.com/influxdata/chronograf/issues/878)
|
||||
2. [#872](https://github.com/influxdata/chronograf/pull/872): Fix incorrect data source response
|
||||
|
||||
### Features
|
||||
1. [#896](https://github.com/influxdata/chronograf/pull/896) Add more docker stats
|
||||
|
||||
## v1.2.0-beta2 [2017-02-10]
|
||||
|
||||
### Bug Fixes
|
||||
1. [#865](https://github.com/influxdata/chronograf/issues/865): Support for String fields compare Kapacitor rules in Chronograf UI
|
||||
|
||||
### Features
|
||||
1. [#838](https://github.com/influxdata/chronograf/issues/838): Add [detail node](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#details) to Kapacitor alerts
|
||||
2. [#847](https://github.com/influxdata/chronograf/issues/847): Enable and disable Kapacitor alerts from the alert manager page
|
||||
3. [#853](https://github.com/influxdata/chronograf/issues/853): Update builds to use yarn over npm install
|
||||
4. [#860](https://github.com/influxdata/chronograf/issues/860): Add gzip encoding and caching of static assets to server
|
||||
5. [#864](https://github.com/influxdata/chronograf/issues/864): Add support to Kapacitor rule alert configuration for:
|
||||
- HTTP
|
||||
- TCP
|
||||
- Exec
|
||||
- SMTP
|
||||
- Alerta
|
||||
|
||||
### UI Improvements
|
||||
1. [#822](https://github.com/influxdata/chronograf/issues/822): Simplify and improve the layout of the Data Explorer
|
||||
- The Data Explorer's intention and purpose has always been the ad hoc and ephemeral exploration of your schema and data.
|
||||
The concept of `Exploration` sessions and `Panels` betrayed this initial intention. The DE turned into a "poor man's"
|
||||
dashboarding tool. In turn, this introduced complexity in the code and the UI. In the future if I want to save, manipulate,
|
||||
and view multiple visualizations this will be done more efficiently and effectively in our dashboarding solution.
|
||||
>>>>>>> master
|
||||
|
||||
## v1.2.0-beta1 [2017-01-27]
|
||||
|
||||
|
|
2
Godeps
|
@ -1,3 +1,4 @@
|
|||
github.com/NYTimes/gziphandler 6710af535839f57c687b62c4c23d649f9545d885
|
||||
github.com/Sirupsen/logrus 3ec0642a7fb6488f65b06f9040adc67e3990296a
|
||||
github.com/boltdb/bolt 5cc10bbbc5c141029940133bb33c9e969512a698
|
||||
github.com/bouk/httprouter ee8b3818a7f51fbc94cc709b5744b52c2c725e91
|
||||
|
@ -14,3 +15,4 @@ github.com/sergi/go-diff 1d28411638c1e67fe1930830df207bef72496ae9
|
|||
github.com/tylerb/graceful 50a48b6e73fcc75b45e22c05b79629a67c79e938
|
||||
golang.org/x/net 749a502dd1eaf3e5bfd4f8956748c502357c0bbe
|
||||
golang.org/x/oauth2 1e695b1c8febf17aad3bfa7bf0a819ef94b98ad5
|
||||
google.golang.org/api bc20c61134e1d25265dd60049f5735381e79b631
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
### Go
|
||||
* github.com/NYTimes/gziphandler [APACHE-2.0](https://github.com/NYTimes/gziphandler/blob/master/LICENSE.md)
|
||||
* github.com/Sirupsen/logrus [MIT](https://github.com/Sirupsen/logrus/blob/master/LICENSE)
|
||||
* github.com/boltdb/bolt [MIT](https://github.com/boltdb/bolt/blob/master/LICENSE)
|
||||
* github.com/bouk/httprouter [BSD](https://github.com/bouk/httprouter/blob/master/LICENSE)
|
||||
|
@ -15,6 +16,7 @@
|
|||
* github.com/tylerb/graceful [MIT](https://github.com/tylerb/graceful/blob/master/LICENSE)
|
||||
* golang.org/x/net [BSD](https://github.com/golang/net/blob/master/LICENSE)
|
||||
* golang.org/x/oauth2 [BSD](https://github.com/golang/oauth2/blob/master/LICENSE)
|
||||
* google.golang.org/api/oauth2/v2 [BSD](https://github.com/google/google-api-go-client/blob/master/LICENSE)
|
||||
|
||||
### Javascript
|
||||
* Base64 0.2.1 [WTFPL](http://github.com/davidchambers/Base64.js)
|
||||
|
|
5
Makefile
|
@ -1,4 +1,4 @@
|
|||
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev
|
||||
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev ctags
|
||||
|
||||
VERSION ?= $(shell git describe --always --tags)
|
||||
COMMIT ?= $(shell git rev-parse --short=8 HEAD)
|
||||
|
@ -102,3 +102,6 @@ clean:
|
|||
cd ui && rm -rf node_modules
|
||||
rm -f dist/dist_gen.go canned/bin_gen.go server/swagger_gen.go
|
||||
@rm -f .godep .jsdep .jssrc .dev-jssrc .bindata
|
||||
|
||||
ctags:
|
||||
ctags -R --languages="Go" --exclude=.git --exclude=ui .
|
||||
|
|
72
README.md
|
@ -17,27 +17,27 @@ Chronograf is an open-source web application written in Go and React.js that pro
|
|||
Chronograf's [pre-canned dashboards](https://github.com/influxdata/chronograf/tree/master/canned) for the supported [Telegraf](https://github.com/influxdata/telegraf) input plugins.
|
||||
Currently, Chronograf offers dashboard templates for the following Telegraf input plugins:
|
||||
|
||||
* Apache
|
||||
* Consul
|
||||
* Docker
|
||||
* Elastic
|
||||
* [Apache](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/apache)
|
||||
* [Consul](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/consul)
|
||||
* [Docker](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/docker)
|
||||
* [Elastic](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/elasticsearch)
|
||||
* etcd
|
||||
* HAProxy
|
||||
* [HAProxy](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/haproxy)
|
||||
* IIS
|
||||
* InfluxDB
|
||||
* Kubernetes
|
||||
* Memcached
|
||||
* MongoDB
|
||||
* MySQL
|
||||
* [InfluxDB](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/influxdb)
|
||||
* [Kubernetes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/kubernetes)
|
||||
* [Memcached](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/memcached)
|
||||
* [MongoDB](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/mongodb)
|
||||
* [MySQL](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/mysql)
|
||||
* Network
|
||||
* NGINX
|
||||
* NSQ
|
||||
* Ping
|
||||
* PostgreSQL
|
||||
* [NGINX](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/nginx)
|
||||
* [NSQ](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/nsq)
|
||||
* [Ping](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/ping)
|
||||
* [PostgreSQL](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/postgresql)
|
||||
* Processes
|
||||
* RabbitMQ
|
||||
* Redis
|
||||
* Riak
|
||||
* [RabbitMQ](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/rabbitmq)
|
||||
* [Redis](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/redis)
|
||||
* [Riak](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/riak)
|
||||
* [System](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/SYSTEM_README.md)
|
||||
* [CPU](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/CPU_README.md)
|
||||
* [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
|
||||
|
@ -47,8 +47,8 @@ Currently, Chronograf offers dashboard templates for the following Telegraf inpu
|
|||
* [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
|
||||
* [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
|
||||
* [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
|
||||
* Varnish
|
||||
* Windows Performance Counters
|
||||
* [Varnish](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/varnish)
|
||||
* [Windows Performance Counters](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/win_perf_counters)
|
||||
|
||||
> Note: If a `telegraf` instance isn't running the `system` and `cpu` plugins the canned dashboards from that instance won't be generated.
|
||||
|
||||
|
@ -62,7 +62,7 @@ Chronograf's graphing tool that allows you to dig in and create personalized vis
|
|||
|
||||
### Dashboards
|
||||
|
||||
While there is an API and presentation layer for dashboards released in version 1.2.0-beta1, it is not recommended that you try to use Chronograf as a general purpose dashboard solution. The visualization around editing is under way and will be in a future release. Meanwhile, if you would like to try it out you can use `curl` or other HTTP tools to push dashboard definitions directly to the API. If you do so, they should be shown when selected in the application.
|
||||
While there is an API and presentation layer for dashboards released in version 1.2.0-beta1+, it is not recommended that you try to use Chronograf as a general purpose dashboard solution. The visualization around editing is under way and will be in a future release. Meanwhile, if you would like to try it out you can use `curl` or other HTTP tools to push dashboard definitions directly to the API. If you do so, they should be shown when selected in the application.
|
||||
|
||||
Example:
|
||||
```
|
||||
|
@ -91,20 +91,26 @@ A UI for [Kapacitor](https://github.com/influxdata/kapacitor) alert creation and
|
|||
* Simply generate threshold, relative, and deadman alerts
|
||||
* Preview data and alert boundaries while creating an alert
|
||||
* Configure alert destinations - Currently, Chronograf supports sending alerts to:
|
||||
* HTTP/Post
|
||||
* HipChat
|
||||
* OpsGenie
|
||||
* PagerDuty
|
||||
* Sensu
|
||||
* Slack
|
||||
* SMTP/email
|
||||
* Talk
|
||||
* Telegram
|
||||
* TCP
|
||||
* VictorOps
|
||||
* [Alerta](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#alerta)
|
||||
* [Exec](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#exec)
|
||||
* [HipChat](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#hipchat)
|
||||
* [HTTP/Post](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#post)
|
||||
* [OpsGenie](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#opsgenie)
|
||||
* [PagerDuty](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#pagerduty)
|
||||
* [Sensu](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#sensu)
|
||||
* [Slack](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#slack)
|
||||
* [SMTP/Email](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#email)
|
||||
* [Talk](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#talk)
|
||||
* [Telegram](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#telegram)
|
||||
* [TCP](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#tcp)
|
||||
* [VictorOps](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#victorops)
|
||||
* View all active alerts at a glance on the alerting dashboard
|
||||
* Enable and disable existing alert rules with the check of a box
|
||||
|
||||
### GitHub OAuth Login
|
||||
### TLS/HTTPS support
|
||||
See [Chronograf with TLS](https://github.com/influxdata/chronograf/blob/master/docs/tls.md) for more information.
|
||||
|
||||
### OAuth Login
|
||||
See [Chronograf with OAuth 2.0](https://github.com/influxdata/chronograf/blob/master/docs/auth.md) for more information.
|
||||
|
||||
### Advanced Routing
|
||||
|
@ -112,7 +118,7 @@ Change the default root path of the Chronograf server with the `--basepath` opti
|
|||
|
||||
## Versions
|
||||
|
||||
Chronograf v1.2.0-beta1 is a beta release.
|
||||
Chronograf v1.2.0-beta3 is a beta release.
|
||||
We will be iterating quickly based on user feedback and recommend using the [nightly builds](https://www.influxdata.com/downloads/) for the time being.
|
||||
|
||||
Spotted a bug or have a feature request?
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"id": "0e980b97-c162-487b-a815-3f955df6243f",
|
||||
"measurement": "docker",
|
||||
"app": "docker",
|
||||
"measurement": "docker",
|
||||
"autoflow": true,
|
||||
"cells": [
|
||||
{
|
||||
|
@ -16,10 +16,10 @@
|
|||
"query": "SELECT mean(\"usage_percent\") AS \"usage_percent\" FROM \"docker_container_cpu\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"type": "line-stacked"
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
|
@ -33,10 +33,73 @@
|
|||
"query": "SELECT mean(\"usage\") AS \"usage\" FROM \"docker_container_mem\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"type": "line-stepplot"
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "4c79cefb-5152-410c-9b88-74f9bff7ef01",
|
||||
"name": "Docker - Containers",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT max(\"n_containers\") AS \"max_n_containers\" FROM \"docker\"",
|
||||
"groupbys": [
|
||||
"\"host\""
|
||||
]
|
||||
}
|
||||
],
|
||||
"type": "single-stat"
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "4c79cefb-5152-410c-9b88-74f9bff7ef02",
|
||||
"name": "Docker - Images",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT max(\"n_images\") AS \"max_n_images\" FROM \"docker\"",
|
||||
"groupbys": [
|
||||
"\"host\""
|
||||
]
|
||||
}
|
||||
],
|
||||
"type": "single-stat"
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "4c79cefb-5152-410c-9b88-74f9bff7ef03",
|
||||
"name": "Docker - Container State",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT max(\"n_containers_running\") AS \"max_n_containers_running\" FROM \"docker\"",
|
||||
"groupbys": [
|
||||
"\"host\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "SELECT max(\"n_containers_stopped\") AS \"max_n_containers_stopped\" FROM \"docker\"",
|
||||
"groupbys": [
|
||||
"\"host\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "SELECT max(\"n_containers_paused\") AS \"max_n_containers_paused\" FROM \"docker\"",
|
||||
"groupbys": [
|
||||
"\"host\""
|
||||
]
|
||||
}
|
||||
],
|
||||
"type": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"id": "0e980b97-c162-487b-a815-3f955df62440",
|
||||
"measurement": "docker_container_blkio",
|
||||
"app": "docker",
|
||||
"autoflow": true,
|
||||
"cells": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "4c79cefb-5152-410c-9b88-74f9bff7ef50",
|
||||
"name": "Docker - Container Block IO",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT max(\"io_serviced_recursive_read\") AS \"max_io_read\" FROM \"docker_container_blkio\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
},
|
||||
{
|
||||
"query": "SELECT max(\"io_serviced_recursive_sync\") AS \"max_io_sync\" FROM \"docker_container_blkio\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
},
|
||||
{
|
||||
"query": "SELECT max(\"io_serviced_recursive_write\") AS \"max_io_write\" FROM \"docker_container_blkio\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
},
|
||||
{
|
||||
"query": "SELECT max(\"io_serviced_recursive_total\") AS \"max_io_total\" FROM \"docker_container_blkio\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
"id": "0e980b97-c162-487b-a815-3f955df62430",
|
||||
"measurement": "docker_container_net",
|
||||
"app": "docker",
|
||||
"autoflow": true,
|
||||
"cells": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "4c79cefb-5152-410c-9b88-74f9bff7ef23",
|
||||
"name": "Docker - Container Network",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT derivative(mean(\"tx_bytes\"), 10s) AS \"net_tx_bytes\" FROM \"docker_container_net\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
},
|
||||
{
|
||||
"query": "SELECT derivative(mean(\"rx_bytes\"), 10s) AS \"net_rx_bytes\" FROM \"docker_container_net\"",
|
||||
"groupbys": [
|
||||
"\"container_name\""
|
||||
],
|
||||
"wheres": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -3,7 +3,6 @@ package chronograf
|
|||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// General errors.
|
||||
|
@ -16,7 +15,6 @@ const (
|
|||
ErrUserNotFound = Error("user not found")
|
||||
ErrLayoutInvalid = Error("layout is invalid")
|
||||
ErrAlertNotFound = Error("alert not found")
|
||||
ErrAuthentication = Error("user not authenticated")
|
||||
)
|
||||
|
||||
// Error is a domain error encountered while processing chronograf requests
|
||||
|
@ -311,25 +309,3 @@ type LayoutStore interface {
|
|||
// Update the dashboard in the store.
|
||||
Update(context.Context, Layout) error
|
||||
}
|
||||
|
||||
// Principal is any entity that can be authenticated
|
||||
type Principal string
|
||||
|
||||
// PrincipalKey is used to pass principal
|
||||
// via context.Context to request-scoped
|
||||
// functions.
|
||||
const PrincipalKey Principal = "principal"
|
||||
|
||||
// Authenticator represents a service for authenticating users.
|
||||
type Authenticator interface {
|
||||
// Authenticate returns User associated with token if successful.
|
||||
Authenticate(ctx context.Context, token string) (Principal, error)
|
||||
// Token generates a valid token for Principal lasting a duration
|
||||
Token(context.Context, Principal, time.Duration) (string, error)
|
||||
}
|
||||
|
||||
// TokenExtractor extracts tokens from http requests
|
||||
type TokenExtractor interface {
|
||||
// Extract will return the token or an error.
|
||||
Extract(r *http.Request) (string, error)
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package dist
|
|||
//go:generate go-bindata -o dist_gen.go -ignore 'map|go' -pkg dist ../ui/build/...
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/elazarl/go-bindata-assetfs"
|
||||
|
@ -32,6 +33,21 @@ func (b *BindataAssets) Handler() http.Handler {
|
|||
return b
|
||||
}
|
||||
|
||||
// addCacheHeaders requests an hour of Cache-Control and sets an ETag based on file size and modtime
|
||||
func (b *BindataAssets) addCacheHeaders(filename string, w http.ResponseWriter) error {
|
||||
w.Header().Add("Cache-Control", "public, max-age=3600")
|
||||
fi, err := AssetInfo(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
hour, minute, second := fi.ModTime().Clock()
|
||||
etag := fmt.Sprintf(`"%d%d%d%d%d"`, fi.Size(), fi.ModTime().Day(), hour, minute, second)
|
||||
|
||||
w.Header().Set("ETag", etag)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ServeHTTP wraps http.FileServer by returning a default asset if the asset
|
||||
// doesn't exist. This supports single-page react-apps with its own
|
||||
// built-in router. Additionally, we override the content-type if the
|
||||
|
@ -52,8 +68,14 @@ func (b *BindataAssets) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
// Additionally, because we know we are returning the default asset,
|
||||
// we need to set the default asset's content-type.
|
||||
w.Header().Set("Content-Type", b.DefaultContentType)
|
||||
if err := b.addCacheHeaders(b.Default, w); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return Asset(b.Default)
|
||||
}
|
||||
if err := b.addCacheHeaders(name, w); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return octets, nil
|
||||
}
|
||||
var dir http.FileSystem = &assetfs.AssetFS{
|
||||
|
|
|
@ -89,3 +89,22 @@ See all active alerts on the `ALERTING` page, and filter them by `Name`,
|
|||
`Level`, and `Host`:
|
||||
|
||||

|
||||
|
||||
### Alerta TICKscript Parser
|
||||
|
||||
Chronograf offers a parser for TICKscripts that use the [Alerta](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#alerta) output.
|
||||
This is a new feature in version 1.2.0-beta2.
|
||||
|
||||
To use the TICKscript parser:
|
||||
|
||||
* Select Alerta as the output when creating or editing an alert rule
|
||||
* Paste your existing TICKscript in the text input (spacing doesn't matter!)
|
||||
* Save your rule
|
||||
|
||||
You're good to go! The system automatically parses your TICKscript and creates a
|
||||
Chronograf-friendly alert rule.
|
||||
|
||||
> **Notes:**
|
||||
>
|
||||
* Currently, the Alerta TICKscript parser requires users to **paste** their existing TICKscript in the text input. The parser does not support manually entering or editing a TICKscript.
|
||||
* The parser requires users to whitespace delimit any services listed in the TICKscript's [`.services()` attribute](https://docs.influxdata.com/kapacitor/latest/nodes/alert_node/#alerta-services).
|
||||
|
|
|
@ -88,8 +88,8 @@ This is a known issue.
|
|||
|
||||
#### 1. Download and Install Telegraf
|
||||
```
|
||||
wget https://dl.influxdata.com/telegraf/releases/telegraf_1.2.0_amd64.deb
|
||||
sudo dpkg -i telegraf_1.2.0_amd64.deb
|
||||
wget https://dl.influxdata.com/telegraf/releases/telegraf_1.2.1_amd64.deb
|
||||
sudo dpkg -i telegraf_1.2.1_amd64.deb
|
||||
```
|
||||
|
||||
#### 2. Start Telegraf
|
||||
|
|
Before Width: | Height: | Size: 59 KiB |
120
docs/auth.md
|
@ -7,6 +7,18 @@ OAuth 2.0 Style Authentication
|
|||
|
||||
To use authentication in Chronograf, both Github OAuth and JWT signature need to be configured.
|
||||
|
||||
#### Configuring JWT signature
|
||||
|
||||
Set a [JWT](https://tools.ietf.org/html/rfc7519) signature to a random string. This is needed for all OAuth2 providers that you choose to configure. *Keep this random string around!*
|
||||
|
||||
You'll need it each time you start a chronograf server because it is used to verify user authorization. If you are running multiple chronograf servers in an HA configuration set the `TOKEN_SECRET` on each to allow users to stay logged in.
|
||||
|
||||
```sh
|
||||
export TOKEN_SECRET=supersupersecret
|
||||
```
|
||||
|
||||
# Github
|
||||
|
||||
#### Creating Github OAuth Application
|
||||
|
||||
To create a Github OAuth Application follow the [Register your app](https://developer.github.com/guides/basics-of-authentication/#registering-your-app) instructions.
|
||||
|
@ -29,18 +41,6 @@ export GH_CLIENT_ID=b339dd4fddd95abec9aa
|
|||
export GH_CLIENT_SECRET=260041897d3252c146ece6b46ba39bc1e54416dc
|
||||
```
|
||||
|
||||
#### Configuring JWT signature
|
||||
|
||||
Set a [JWT](https://tools.ietf.org/html/rfc7519) signature to a random string.
|
||||
*Keep this random string around!*
|
||||
|
||||
You'll need it each time you start a chronograf server because it is used to verify
|
||||
user authorization. If you are running multiple chronograf servers in an HA configuration set the `TOKEN_SECRET` on each to allow users to stay logged in.
|
||||
|
||||
```sh
|
||||
export TOKEN_SECRET=supersupersecret
|
||||
```
|
||||
|
||||
#### Optional Github Organizations
|
||||
|
||||
To require an organization membership for a user, set the `GH_ORGS` environment variables
|
||||
|
@ -56,72 +56,50 @@ To support multiple organizations use a comma delimted list like so:
|
|||
export GH_ORGS=hill-valley-preservation-sociey,the-pinheads
|
||||
```
|
||||
|
||||
### Design
|
||||
# Google
|
||||
|
||||
The Chronograf authentication scheme is a standard [web application](https://developer.github.com/v3/oauth/#web-application-flow) OAuth flow.
|
||||
#### Creating Google OAuth Application
|
||||
|
||||

|
||||
You will need to obtain a client ID and an application secret by following the steps under "Basic Steps" [here](https://developers.google.com/identity/protocols/OAuth2). Chronograf will also need to be publicly accessible via a fully qualified domain name so that Google properly redirects users back to the application.
|
||||
|
||||
The browser receives a cookie from Chronograf, authorizing it. The contents of the cookie is a JWT whose "sub" claim is the user's primary
|
||||
github email address.
|
||||
This information should be set in the following ENVs:
|
||||
|
||||
On each request to Chronograf, the JWT contained in the cookie will be validated against the `TOKEN_SECRET` signature and checked for expiration.
|
||||
The JWT's "sub" becomes the [principal](https://en.wikipedia.org/wiki/Principal_(computer_security)) used for authorization to resources.
|
||||
* `GOOGLE_CLIENT_ID`
|
||||
* `GOOGLE_CLIENT_SECRET`
|
||||
* `PUBLIC_URL`
|
||||
|
||||
The API provides three endpoints `/oauth`, `/oauth/logout` and `/oauth/github/callback`.
|
||||
Alternatively, this can also be set using the command line switches:
|
||||
|
||||
#### /oauth
|
||||
* `--google-client-id`
|
||||
* `--google-client-secret`
|
||||
* `--public-url`
|
||||
|
||||
The `/oauth` endpoint redirects to Github for OAuth. Chronograf sets the OAuth `state` request parameter to a JWT with a random "sub". Using $TOKEN_SECRET `/oauth/github/callback`
|
||||
can validate the `state` parameter without needing `state` to be saved.
|
||||
#### Optional Google Domains
|
||||
|
||||
#### /oauth/github/callback
|
||||
Similar to Github's organization restriction, Google authentication can be restricted to permit access to Chronograf from only specific domains. These are configured using the `GOOGLE_DOMAINS` ENV or the `--google-domains` switch. Multiple domains are separated with a comma. For example, if we wanted to permit access only from biffspleasurepalace.com and savetheclocktower.com the ENV would be set as follows:
|
||||
|
||||
The `/oauth/github/callback` receives the OAuth `authorization code` and `state`.
|
||||
|
||||
First, it will validate the `state` JWT from the `/oauth` endpoint. `JWT` validation
|
||||
only requires access to the signature token. Therefore, there is no need for `state`
|
||||
to be saved. Additionally, multiple Chronograf servers will not need to share third
|
||||
party storage to synchronize `state`. If this validation fails, the request
|
||||
will be redirected to `/login`.
|
||||
|
||||
Secondly, the endpoint will use the `authorization code` to retrieve a valid OAuth token
|
||||
with the `user:email` scope. If unable to get a token from Github, the request will
|
||||
be redirected to `/login`.
|
||||
|
||||
Finally, the endpoint will attempt to get the primary email address of the Github user.
|
||||
Again, if not successful, the request will redirect to `/login`.
|
||||
|
||||
The email address is used as the subject claim for a new JWT. This JWT becomes the
|
||||
value of the cookie sent back to the browser. The cookie is valid for thirty days.
|
||||
|
||||
Next, the request is redirected to `/`.
|
||||
|
||||
For all API calls to `/chronograf/v1`, the server checks for the existence and validity
|
||||
of the JWT within the cookie value.
|
||||
If the request did not have a valid JWT, the API returns `HTTP/1.1 401 Unauthorized`.
|
||||
|
||||
#### /oauth/logout
|
||||
|
||||
Simply expires the session cookie and redirects to `/`.
|
||||
|
||||
### Authorization
|
||||
|
||||
After successful validation of the JWT, each API endpoint of `/chronograf/v1` receives the
|
||||
JWT subject within the `http.Request` as a `context.Context` value.
|
||||
|
||||
Within the Go API code all interfaces take `context.Context`. This means that each
|
||||
interface can use the value as a principal. The design allows for authorization to happen
|
||||
at the level of design most closely related to the problem.
|
||||
|
||||
An example usage in Go would be:
|
||||
|
||||
```go
|
||||
func ShallIPass(ctx context.Context) (string, error) {
|
||||
principal := ctx.Value(chronograf.PrincipalKey).(chronograf.Principal)
|
||||
if principal != "gandolf@moria.misty.mt" {
|
||||
return "you shall not pass", chronograf.ErrAuthentication
|
||||
}
|
||||
return "run you fools", nil
|
||||
}
|
||||
```sh
|
||||
export GOOGLE_DOMAINS=biffspleasurepalance.com,savetheclocktower.com
|
||||
```
|
||||
|
||||
# Heroku
|
||||
|
||||
#### Creating Heroku Application
|
||||
|
||||
To obtain a client ID and application secret for Heroku, you will need to follow the guide posted [here](https://devcenter.heroku.com/articles/oauth#register-client). Once your application has been created, those two values should be inserted into the following ENVs:
|
||||
|
||||
* `HEROKU_CLIENT_ID`
|
||||
* `HEROKU_SECRET`
|
||||
|
||||
The equivalent command line switches are:
|
||||
|
||||
* `--heroku-client-id`
|
||||
* `--heroku-secret`
|
||||
|
||||
#### Optional Heroku Organizations
|
||||
|
||||
Like the other OAuth2 providers, access to Chronograf via Heroku can be restricted to members of specific Heroku organizations. This is controlled using the `HEROKU_ORGS` ENV or the `--heroku-organizations` switch and is comma-separated. If we wanted to permit access from the `hill-valley-preservation-society` orgization and `the-pinheads` organization, we would use the following ENV:
|
||||
|
||||
```sh
|
||||
export HEROKU_ORGS=hill-valley-preservation-sociey,the-pinheads
|
||||
```
|
||||
|
|
Before Width: | Height: | Size: 55 KiB After Width: | Height: | Size: 71 KiB |
Before Width: | Height: | Size: 86 KiB After Width: | Height: | Size: 163 KiB |
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 66 KiB After Width: | Height: | Size: 82 KiB |
|
@ -0,0 +1,69 @@
|
|||
## Chronograf TLS
|
||||
|
||||
Chronograf supports TLS to securely communicate between the browser and server via
|
||||
HTTPS.
|
||||
|
||||
We recommend using HTTPS with Chronograf. If you are not using a TLS termination proxy,
|
||||
you can run Chronograf's server with TLS connections.
|
||||
### TL;DR
|
||||
|
||||
```sh
|
||||
chronograf --cert=my.crt --key=my.key
|
||||
```
|
||||
|
||||
### Running Chronograf with TLS
|
||||
|
||||
Chronograf server has command line and environment variable options to specify
|
||||
the certificate and key files. The server reads and parses a public/private key
|
||||
pair from these files. The files must contain PEM encoded data.
|
||||
|
||||
In Chronograf all command line options also have a corresponding environment
|
||||
variable.
|
||||
|
||||
To specify the certificate file either use the `--cert` CLI option or `TLS_CERTIFICATE`
|
||||
environment variable.
|
||||
|
||||
To specify the key file either use the `--key` CLI option or `TLS_PRIVATE_KEY`
|
||||
environment variable.
|
||||
|
||||
To specify the certificate and key if both are in the same file either use the `--cert`
|
||||
CLI option or `TLS_CERTIFICATE` environment variable.
|
||||
|
||||
#### Example with CLI options
|
||||
```sh
|
||||
chronograf --cert=my.crt --key=my.key
|
||||
```
|
||||
|
||||
#### Example with environment variables
|
||||
```sh
|
||||
TLS_CERTIFICATE=my.crt TLS_PRIVATE_KEY=my.key chronograf
|
||||
```
|
||||
|
||||
#### Docker example with environment variables
|
||||
```sh
|
||||
docker run -v /host/path/to/certs:/certs -e TLS_CERTIFICATE=/certs/my.crt -e TLS_PRIVATE_KEY=/certs/my.key quay.io/influxdb/chronograf:latest
|
||||
```
|
||||
|
||||
### Testing with self-signed certificates
|
||||
In a production environment you should not use self-signed certificates. However,
|
||||
for testing it is fast to create your own certs.
|
||||
|
||||
To create a cert and key in one file with openssl:
|
||||
|
||||
```sh
|
||||
openssl req -x509 -newkey rsa:4096 -sha256 -nodes -keyout testing.pem -out testing.pem -subj "/CN=localhost" -days 365
|
||||
```
|
||||
|
||||
Next, set the environment variable `TLS_CERTIFICATE`:
|
||||
```sh
|
||||
export TLS_CERTIFICATE=$PWD/testing.pem
|
||||
```
|
||||
|
||||
Run chronograf:
|
||||
|
||||
```sh
|
||||
./chronograf
|
||||
INFO[0000] Serving chronograf at https://[::]:8888 component=server
|
||||
```
|
||||
|
||||
In the first log message you should see `https` rather than `http`.
|
|
@ -119,6 +119,44 @@ func (c *Client) Enable(ctx context.Context, href string) (*Task, error) {
|
|||
return c.updateStatus(ctx, href, client.Enabled)
|
||||
}
|
||||
|
||||
// AllStatus returns the status of all tasks in kapacitor
|
||||
func (c *Client) AllStatus(ctx context.Context) (map[string]string, error) {
|
||||
kapa, err := c.kapaClient(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Only get the status, id and link section back
|
||||
opts := &client.ListTasksOptions{
|
||||
Fields: []string{"status"},
|
||||
}
|
||||
tasks, err := kapa.ListTasks(opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
taskStatuses := map[string]string{}
|
||||
for _, task := range tasks {
|
||||
taskStatuses[task.ID] = task.Status.String()
|
||||
}
|
||||
|
||||
return taskStatuses, nil
|
||||
}
|
||||
|
||||
// Status returns the status of a task in kapacitor
|
||||
func (c *Client) Status(ctx context.Context, href string) (string, error) {
|
||||
kapa, err := c.kapaClient(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
task, err := kapa.Task(client.Link{Href: href}, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return task.Status.String(), nil
|
||||
}
|
||||
|
||||
// Update changes the tickscript of a given id.
|
||||
func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertRule) (*Task, error) {
|
||||
kapa, err := c.kapaClient(ctx)
|
||||
|
|
|
@ -199,6 +199,280 @@ trigger
|
|||
}
|
||||
}
|
||||
|
||||
func TestThresholdStringCrit(t *testing.T) {
|
||||
alert := chronograf.AlertRule{
|
||||
Name: "haproxy",
|
||||
Trigger: "threshold",
|
||||
Alerts: []string{"email"},
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Operator: "equal to",
|
||||
Value: "DOWN",
|
||||
},
|
||||
Every: "10s",
|
||||
Message: `Haproxy monitor : {{.ID}} : {{ index .Tags "server" }} : {{ index .Tags "pxname" }} is {{ .Level }} `,
|
||||
Details: "Email template",
|
||||
Query: chronograf.QueryConfig{
|
||||
Database: "influxdb",
|
||||
RetentionPolicy: "autogen",
|
||||
Measurement: "haproxy",
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Field: "status",
|
||||
Funcs: []string{"last"},
|
||||
},
|
||||
},
|
||||
GroupBy: chronograf.GroupBy{
|
||||
Time: "10s",
|
||||
Tags: []string{"pxname"},
|
||||
},
|
||||
AreTagsAccepted: true,
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
alert chronograf.AlertRule
|
||||
want chronograf.TICKScript
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Test valid template alert",
|
||||
alert: alert,
|
||||
want: `var db = 'influxdb'
|
||||
|
||||
var rp = 'autogen'
|
||||
|
||||
var measurement = 'haproxy'
|
||||
|
||||
var groupBy = ['pxname']
|
||||
|
||||
var whereFilter = lambda: TRUE
|
||||
|
||||
var period = 10s
|
||||
|
||||
var every = 10s
|
||||
|
||||
var name = 'haproxy'
|
||||
|
||||
var idVar = name + ':{{.Group}}'
|
||||
|
||||
var message = 'Haproxy monitor : {{.ID}} : {{ index .Tags "server" }} : {{ index .Tags "pxname" }} is {{ .Level }} '
|
||||
|
||||
var idTag = 'alertID'
|
||||
|
||||
var levelTag = 'level'
|
||||
|
||||
var messageField = 'message'
|
||||
|
||||
var durationField = 'duration'
|
||||
|
||||
var outputDB = 'chronograf'
|
||||
|
||||
var outputRP = 'autogen'
|
||||
|
||||
var outputMeasurement = 'alerts'
|
||||
|
||||
var triggerType = 'threshold'
|
||||
|
||||
var details = 'Email template'
|
||||
|
||||
var crit = 'DOWN'
|
||||
|
||||
var data = stream
|
||||
|from()
|
||||
.database(db)
|
||||
.retentionPolicy(rp)
|
||||
.measurement(measurement)
|
||||
.groupBy(groupBy)
|
||||
.where(whereFilter)
|
||||
|window()
|
||||
.period(period)
|
||||
.every(every)
|
||||
.align()
|
||||
|last('status')
|
||||
.as('value')
|
||||
|
||||
var trigger = data
|
||||
|alert()
|
||||
.crit(lambda: "value" == crit)
|
||||
.stateChangesOnly()
|
||||
.message(message)
|
||||
.id(idVar)
|
||||
.idTag(idTag)
|
||||
.levelTag(levelTag)
|
||||
.messageField(messageField)
|
||||
.durationField(durationField)
|
||||
.details(details)
|
||||
.email()
|
||||
|
||||
trigger
|
||||
|influxDBOut()
|
||||
.create()
|
||||
.database(outputDB)
|
||||
.retentionPolicy(outputRP)
|
||||
.measurement(outputMeasurement)
|
||||
.tag('alertName', name)
|
||||
.tag('triggerType', triggerType)
|
||||
|
||||
trigger
|
||||
|httpOut('output')
|
||||
`,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
gen := Alert{}
|
||||
got, err := gen.Generate(tt.alert)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("%q. Threshold() error = %v, wantErr %v", tt.name, err, tt.wantErr)
|
||||
continue
|
||||
}
|
||||
if got != tt.want {
|
||||
diff := diffmatchpatch.New()
|
||||
delta := diff.DiffMain(string(tt.want), string(got), true)
|
||||
t.Errorf("%q\n%s", tt.name, diff.DiffPrettyText(delta))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Check with Nathaniel if kapacitor can do inequalities on strings
|
||||
// If it cannot, I think we should add operator checks.
|
||||
func TestThresholdStringCritGreater(t *testing.T) {
|
||||
alert := chronograf.AlertRule{
|
||||
Name: "haproxy",
|
||||
Trigger: "threshold",
|
||||
Alerts: []string{"email"},
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Operator: "greater than",
|
||||
Value: "DOWN",
|
||||
},
|
||||
Every: "10s",
|
||||
Message: `Haproxy monitor : {{.ID}} : {{ index .Tags "server" }} : {{ index .Tags "pxname" }} is {{ .Level }} `,
|
||||
Details: "Email template",
|
||||
Query: chronograf.QueryConfig{
|
||||
Database: "influxdb",
|
||||
RetentionPolicy: "autogen",
|
||||
Measurement: "haproxy",
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Field: "status",
|
||||
Funcs: []string{"last"},
|
||||
},
|
||||
},
|
||||
GroupBy: chronograf.GroupBy{
|
||||
Time: "10s",
|
||||
Tags: []string{"pxname"},
|
||||
},
|
||||
AreTagsAccepted: true,
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
alert chronograf.AlertRule
|
||||
want chronograf.TICKScript
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Test valid template alert",
|
||||
alert: alert,
|
||||
want: `var db = 'influxdb'
|
||||
|
||||
var rp = 'autogen'
|
||||
|
||||
var measurement = 'haproxy'
|
||||
|
||||
var groupBy = ['pxname']
|
||||
|
||||
var whereFilter = lambda: TRUE
|
||||
|
||||
var period = 10s
|
||||
|
||||
var every = 10s
|
||||
|
||||
var name = 'haproxy'
|
||||
|
||||
var idVar = name + ':{{.Group}}'
|
||||
|
||||
var message = 'Haproxy monitor : {{.ID}} : {{ index .Tags "server" }} : {{ index .Tags "pxname" }} is {{ .Level }} '
|
||||
|
||||
var idTag = 'alertID'
|
||||
|
||||
var levelTag = 'level'
|
||||
|
||||
var messageField = 'message'
|
||||
|
||||
var durationField = 'duration'
|
||||
|
||||
var outputDB = 'chronograf'
|
||||
|
||||
var outputRP = 'autogen'
|
||||
|
||||
var outputMeasurement = 'alerts'
|
||||
|
||||
var triggerType = 'threshold'
|
||||
|
||||
var details = 'Email template'
|
||||
|
||||
var crit = 'DOWN'
|
||||
|
||||
var data = stream
|
||||
|from()
|
||||
.database(db)
|
||||
.retentionPolicy(rp)
|
||||
.measurement(measurement)
|
||||
.groupBy(groupBy)
|
||||
.where(whereFilter)
|
||||
|window()
|
||||
.period(period)
|
||||
.every(every)
|
||||
.align()
|
||||
|last('status')
|
||||
.as('value')
|
||||
|
||||
var trigger = data
|
||||
|alert()
|
||||
.crit(lambda: "value" > crit)
|
||||
.stateChangesOnly()
|
||||
.message(message)
|
||||
.id(idVar)
|
||||
.idTag(idTag)
|
||||
.levelTag(levelTag)
|
||||
.messageField(messageField)
|
||||
.durationField(durationField)
|
||||
.details(details)
|
||||
.email()
|
||||
|
||||
trigger
|
||||
|influxDBOut()
|
||||
.create()
|
||||
.database(outputDB)
|
||||
.retentionPolicy(outputRP)
|
||||
.measurement(outputMeasurement)
|
||||
.tag('alertName', name)
|
||||
.tag('triggerType', triggerType)
|
||||
|
||||
trigger
|
||||
|httpOut('output')
|
||||
`,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
gen := Alert{}
|
||||
got, err := gen.Generate(tt.alert)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("%q. Threshold() error = %v, wantErr %v", tt.name, err, tt.wantErr)
|
||||
continue
|
||||
}
|
||||
if got != tt.want {
|
||||
diff := diffmatchpatch.New()
|
||||
delta := diff.DiffMain(string(tt.want), string(got), true)
|
||||
t.Errorf("%q\n%s", tt.name, diff.DiffPrettyText(delta))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestThresholdDetail(t *testing.T) {
|
||||
alert := chronograf.AlertRule{
|
||||
Name: "name",
|
||||
|
|
|
@ -3,6 +3,7 @@ package kapacitor
|
|||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
|
@ -39,15 +40,13 @@ func Vars(rule chronograf.AlertRule) (string, error) {
|
|||
%s
|
||||
var crit = %s
|
||||
`
|
||||
return fmt.Sprintf(vars,
|
||||
common,
|
||||
rule.TriggerValues.Value), nil
|
||||
return fmt.Sprintf(vars, common, formatValue(rule.TriggerValues.Value)), nil
|
||||
} else {
|
||||
vars := `
|
||||
%s
|
||||
var lower = %s
|
||||
var upper = %s
|
||||
`
|
||||
`
|
||||
return fmt.Sprintf(vars,
|
||||
common,
|
||||
rule.TriggerValues.Value,
|
||||
|
@ -178,3 +177,13 @@ func whereFilter(q chronograf.QueryConfig) string {
|
|||
|
||||
return "lambda: TRUE"
|
||||
}
|
||||
|
||||
// formatValue return the same string if a numeric type or if it is a string
|
||||
// will return it as a kapacitor formatted single-quoted string
|
||||
func formatValue(value string) string {
|
||||
// Test if numeric if it can be converted to a float
|
||||
if _, err := strconv.ParseFloat(value, 64); err == nil {
|
||||
return value
|
||||
}
|
||||
return "'" + value + "'"
|
||||
}
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
package kapacitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
func TestVarsCritStringEqual(t *testing.T) {
|
||||
alert := chronograf.AlertRule{
|
||||
Name: "name",
|
||||
Trigger: "threshold",
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Operator: "equal to",
|
||||
Value: "DOWN",
|
||||
},
|
||||
Every: "30s",
|
||||
Query: chronograf.QueryConfig{
|
||||
Database: "telegraf",
|
||||
Measurement: "haproxy",
|
||||
RetentionPolicy: "autogen",
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Field: "status",
|
||||
},
|
||||
},
|
||||
GroupBy: chronograf.GroupBy{
|
||||
Time: "10m",
|
||||
Tags: []string{"pxname"},
|
||||
},
|
||||
AreTagsAccepted: true,
|
||||
},
|
||||
}
|
||||
|
||||
raw, err := Vars(alert)
|
||||
if err != nil {
|
||||
fmt.Printf("%s", raw)
|
||||
t.Fatalf("Error generating alert: %v %s", err, raw)
|
||||
}
|
||||
|
||||
tick, err := formatTick(raw)
|
||||
if err != nil {
|
||||
t.Errorf("Error formatting alert: %v %s", err, raw)
|
||||
}
|
||||
|
||||
if err := validateTick(tick); err != nil {
|
||||
t.Errorf("Error validating alert: %v %s", err, tick)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package server
|
||||
package oauth2
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
@ -17,7 +17,7 @@ type CookieExtractor struct {
|
|||
func (c *CookieExtractor) Extract(r *http.Request) (string, error) {
|
||||
cookie, err := r.Cookie(c.Name)
|
||||
if err != nil {
|
||||
return "", chronograf.ErrAuthentication
|
||||
return "", ErrAuthentication
|
||||
}
|
||||
return cookie.Value, nil
|
||||
}
|
||||
|
@ -29,14 +29,14 @@ type BearerExtractor struct{}
|
|||
func (b *BearerExtractor) Extract(r *http.Request) (string, error) {
|
||||
s := r.Header.Get("Authorization")
|
||||
if s == "" {
|
||||
return "", chronograf.ErrAuthentication
|
||||
return "", ErrAuthentication
|
||||
}
|
||||
|
||||
// Check for Bearer token.
|
||||
strs := strings.Split(s, " ")
|
||||
|
||||
if len(strs) != 2 || strs[0] != "Bearer" {
|
||||
return "", chronograf.ErrAuthentication
|
||||
return "", ErrAuthentication
|
||||
}
|
||||
return strs[1], nil
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ func (b *BearerExtractor) Extract(r *http.Request) (string, error) {
|
|||
// will be run. The principal will be sent to the next handler via the request's
|
||||
// Context. It is up to the next handler to determine if the principal has access.
|
||||
// On failure, will return http.StatusUnauthorized.
|
||||
func AuthorizedToken(auth chronograf.Authenticator, te chronograf.TokenExtractor, logger chronograf.Logger, next http.Handler) http.HandlerFunc {
|
||||
func AuthorizedToken(auth Authenticator, te TokenExtractor, logger chronograf.Logger, next http.Handler) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
log := logger.
|
||||
WithField("component", "auth").
|
||||
|
@ -55,12 +55,13 @@ func AuthorizedToken(auth chronograf.Authenticator, te chronograf.TokenExtractor
|
|||
|
||||
token, err := te.Extract(r)
|
||||
if err != nil {
|
||||
log.Error("Unable to extract token")
|
||||
// Happens when Provider okays authentication, but Token is bad
|
||||
log.Info("Unauthenticated user")
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
// We do not check the validity of the principal. Those
|
||||
// server further down the chain should do so.
|
||||
// served further down the chain should do so.
|
||||
principal, err := auth.Authenticate(r.Context(), token)
|
||||
if err != nil {
|
||||
log.Error("Invalid token")
|
||||
|
@ -69,7 +70,7 @@ func AuthorizedToken(auth chronograf.Authenticator, te chronograf.TokenExtractor
|
|||
}
|
||||
|
||||
// Send the principal to the next handler
|
||||
ctx := context.WithValue(r.Context(), chronograf.PrincipalKey, principal)
|
||||
ctx := context.WithValue(r.Context(), PrincipalKey, principal)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
return
|
||||
})
|
|
@ -1,4 +1,4 @@
|
|||
package server_test
|
||||
package oauth2_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
@ -8,9 +8,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
clog "github.com/influxdata/chronograf/log"
|
||||
"github.com/influxdata/chronograf/server"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
func TestCookieExtractor(t *testing.T) {
|
||||
|
@ -28,7 +27,7 @@ func TestCookieExtractor(t *testing.T) {
|
|||
Value: "reallyimportant",
|
||||
Lookup: "Doesntexist",
|
||||
Expected: "",
|
||||
Err: chronograf.ErrAuthentication,
|
||||
Err: oauth2.ErrAuthentication,
|
||||
},
|
||||
{
|
||||
Desc: "Cookie token extracted",
|
||||
|
@ -46,7 +45,7 @@ func TestCookieExtractor(t *testing.T) {
|
|||
Value: test.Value,
|
||||
})
|
||||
|
||||
var e chronograf.TokenExtractor = &server.CookieExtractor{
|
||||
var e oauth2.TokenExtractor = &oauth2.CookieExtractor{
|
||||
Name: test.Lookup,
|
||||
}
|
||||
actual, err := e.Extract(req)
|
||||
|
@ -74,21 +73,21 @@ func TestBearerExtractor(t *testing.T) {
|
|||
Header: "Doesntexist",
|
||||
Value: "reallyimportant",
|
||||
Expected: "",
|
||||
Err: chronograf.ErrAuthentication,
|
||||
Err: oauth2.ErrAuthentication,
|
||||
},
|
||||
{
|
||||
Desc: "Auth header doesn't have Bearer",
|
||||
Header: "Authorization",
|
||||
Value: "Bad Value",
|
||||
Expected: "",
|
||||
Err: chronograf.ErrAuthentication,
|
||||
Err: oauth2.ErrAuthentication,
|
||||
},
|
||||
{
|
||||
Desc: "Auth header doesn't have Bearer token",
|
||||
Header: "Authorization",
|
||||
Value: "Bearer",
|
||||
Expected: "",
|
||||
Err: chronograf.ErrAuthentication,
|
||||
Err: oauth2.ErrAuthentication,
|
||||
},
|
||||
{
|
||||
Desc: "Authorization Bearer token success",
|
||||
|
@ -102,7 +101,7 @@ func TestBearerExtractor(t *testing.T) {
|
|||
req, _ := http.NewRequest("", "http://howdy.com", nil)
|
||||
req.Header.Add(test.Header, test.Value)
|
||||
|
||||
var e chronograf.TokenExtractor = &server.BearerExtractor{}
|
||||
var e oauth2.TokenExtractor = &oauth2.BearerExtractor{}
|
||||
actual, err := e.Extract(req)
|
||||
if err != test.Err {
|
||||
t.Errorf("Bearer extract error; expected %v actual %v", test.Err, err)
|
||||
|
@ -123,15 +122,15 @@ func (m *MockExtractor) Extract(*http.Request) (string, error) {
|
|||
}
|
||||
|
||||
type MockAuthenticator struct {
|
||||
Principal chronograf.Principal
|
||||
Principal oauth2.Principal
|
||||
Err error
|
||||
}
|
||||
|
||||
func (m *MockAuthenticator) Authenticate(context.Context, string) (chronograf.Principal, error) {
|
||||
func (m *MockAuthenticator) Authenticate(context.Context, string) (oauth2.Principal, error) {
|
||||
return m.Principal, m.Err
|
||||
}
|
||||
|
||||
func (m *MockAuthenticator) Token(context.Context, chronograf.Principal, time.Duration) (string, error) {
|
||||
func (m *MockAuthenticator) Token(context.Context, oauth2.Principal, time.Duration) (string, error) {
|
||||
return "", m.Err
|
||||
}
|
||||
|
||||
|
@ -139,7 +138,7 @@ func TestAuthorizedToken(t *testing.T) {
|
|||
var tests = []struct {
|
||||
Desc string
|
||||
Code int
|
||||
Principal chronograf.Principal
|
||||
Principal oauth2.Principal
|
||||
ExtractorErr error
|
||||
AuthErr error
|
||||
Expected string
|
||||
|
@ -155,19 +154,21 @@ func TestAuthorizedToken(t *testing.T) {
|
|||
AuthErr: errors.New("error"),
|
||||
},
|
||||
{
|
||||
Desc: "Authorized ok",
|
||||
Code: http.StatusOK,
|
||||
Principal: "Principal Strickland",
|
||||
Expected: "Principal Strickland",
|
||||
Desc: "Authorized ok",
|
||||
Code: http.StatusOK,
|
||||
Principal: oauth2.Principal{
|
||||
Subject: "Principal Strickland",
|
||||
},
|
||||
Expected: "Principal Strickland",
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
// next is a sentinel StatusOK and
|
||||
// principal recorder.
|
||||
var principal chronograf.Principal
|
||||
var principal oauth2.Principal
|
||||
next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
principal = r.Context().Value(chronograf.PrincipalKey).(chronograf.Principal)
|
||||
principal = r.Context().Value(oauth2.PrincipalKey).(oauth2.Principal)
|
||||
})
|
||||
req, _ := http.NewRequest("GET", "", nil)
|
||||
w := httptest.NewRecorder()
|
||||
|
@ -181,7 +182,7 @@ func TestAuthorizedToken(t *testing.T) {
|
|||
}
|
||||
|
||||
logger := clog.New(clog.DebugLevel)
|
||||
handler := server.AuthorizedToken(a, e, logger, next)
|
||||
handler := oauth2.AuthorizedToken(a, e, logger, next)
|
||||
handler.ServeHTTP(w, req)
|
||||
if w.Code != test.Code {
|
||||
t.Errorf("Status code expected: %d actual %d", test.Code, w.Code)
|
|
@ -0,0 +1,140 @@
|
|||
// The oauth2 package provides http.Handlers necessary for implementing Oauth2
|
||||
// authentication with multiple Providers.
|
||||
//
|
||||
// This is how the pieces of this package fit together:
|
||||
//
|
||||
// ┌────────────────────────────────────────┐
|
||||
// │github.com/influxdata/chronograf/oauth2 │
|
||||
// ├────────────────────────────────────────┴────────────────────────────────────┐
|
||||
// │┌────────────────────┐ │
|
||||
// ││ <<interface>> │ ┌─────────────────────────┐ │
|
||||
// ││ Authenticator │ │ CookieMux │ │
|
||||
// │├────────────────────┤ ├─────────────────────────┤ │
|
||||
// ││Authenticate() │ Auth │+SuccessURL : string │ │
|
||||
// ││Token() ◀────────│+FailureURL : string │──────────┐ │
|
||||
// │└──────────△─────────┘ │+Now : func() time.Time │ │ │
|
||||
// │ │ └─────────────────────────┘ │ │
|
||||
// │ │ │ │ │
|
||||
// │ │ │ │ │
|
||||
// │ │ Provider│ │ │
|
||||
// │ │ ┌───┘ │ │
|
||||
// │┌──────────┴────────────┐ │ ▽ │
|
||||
// ││ JWT │ │ ┌───────────────┐ │
|
||||
// │├───────────────────────┤ ▼ │ <<interface>> │ │
|
||||
// ││+Secret : string │ ┌───────────────┐ │ OAuth2Mux │ │
|
||||
// ││+Now : func() time.Time│ │ <<interface>> │ ├───────────────┤ │
|
||||
// │└───────────────────────┘ │ Provider │ │Login() │ │
|
||||
// │ ├───────────────┤ │Logout() │ │
|
||||
// │ │ID() │ │Callback() │ │
|
||||
// │ │Scopes() │ └───────────────┘ │
|
||||
// │ │Secret() │ │
|
||||
// │ │Authenticator()│ │
|
||||
// │ └───────────────┘ │
|
||||
// │ △ │
|
||||
// │ │ │
|
||||
// │ ┌─────────────────────────┼─────────────────────────┐ │
|
||||
// │ │ │ │ │
|
||||
// │ │ │ │ │
|
||||
// │ │ │ │ │
|
||||
// │ ┌───────────────────────┐ ┌──────────────────────┐ ┌──────────────────────┐│
|
||||
// │ │ Github │ │ Google │ │ Heroku ││
|
||||
// │ ├───────────────────────┤ ├──────────────────────┤ ├──────────────────────┤│
|
||||
// │ │+ClientID : string │ │+ClientID : string │ │+ClientID : string ││
|
||||
// │ │+ClientSecret : string │ │+ClientSecret : string│ │+ClientSecret : string││
|
||||
// │ │+Orgs : []string │ │+Domains : []string │ └──────────────────────┘│
|
||||
// │ └───────────────────────┘ │+RedirectURL : string │ │
|
||||
// │ └──────────────────────┘ │
|
||||
// └─────────────────────────────────────────────────────────────────────────────┘
|
||||
//
|
||||
// The design focuses on an Authenticator, a Provider, and an OAuth2Mux. Their
|
||||
// responsibilities, respectively, are to decode and encode secrets received
|
||||
// from a Provider, to perform Provider specific operations in order to extract
|
||||
// information about a user, and to produce the handlers which persist secrets.
|
||||
// To add a new provider, You need only implement the Provider interface, and
|
||||
// add its endpoints to the server Mux.
|
||||
//
|
||||
// The Oauth2 flow between a browser, backend, and a Provider that this package
|
||||
// implements is pictured below for reference.
|
||||
//
|
||||
// ┌─────────┐ ┌───────────┐ ┌────────┐
|
||||
// │ Browser │ │Chronograf │ │Provider│
|
||||
// └─────────┘ └───────────┘ └────────┘
|
||||
// │ │ │
|
||||
// ├─────── GET /auth ─────────▶ │
|
||||
// │ │ │
|
||||
// │ │ │
|
||||
// ◀ ─ ─ ─302 to Provider ─ ─ ┤ │
|
||||
// │ │ │
|
||||
// │ │ │
|
||||
// ├──────────────── GET /auth w/ callback ─────────────────────▶
|
||||
// │ │ │
|
||||
// │ │ │
|
||||
// ◀─ ─ ─ ─ ─ ─ ─ 302 to Chronograf Callback ─ ─ ─ ─ ─ ─ ─ ─ ┤
|
||||
// │ │ │
|
||||
// │ Code and State from │ │
|
||||
// │ Provider │ │
|
||||
// ├───────────────────────────▶ Request token w/ code & │
|
||||
// │ │ state │
|
||||
// │ ├────────────────────────────────▶
|
||||
// │ │ │
|
||||
// │ │ Response with │
|
||||
// │ │ Token │
|
||||
// │ Set cookie, Redirect │◀ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┤
|
||||
// │ to / │ │
|
||||
// ◀───────────────────────────┤ │
|
||||
// │ │ │
|
||||
// │ │ │
|
||||
// │ │ │
|
||||
// │ │ │
|
||||
//
|
||||
// The browser ultimately receives a cookie from Chronograf, authorizing it.
|
||||
// Its contents are encoded as a JWT whose "sub" claim is the user's email
|
||||
// address for whatever provider they have authenticated with. Each request to
|
||||
// Chronograf will validate the contents of this JWT against the `TOKEN_SECRET`
|
||||
// and checked for expiration. The JWT's "sub" becomes the
|
||||
// https://en.wikipedia.org/wiki/Principal_(computer_security) used for
|
||||
// authorization to resources.
|
||||
//
|
||||
// The Mux is responsible for providing three http.Handlers for servicing the
|
||||
// above interaction. These are mounted at specific endpoints by convention
|
||||
// shared with the front end. Any future Provider routes should follow the same
|
||||
// convention to ensure compatibility with the front end logic. These routes
|
||||
// and their responsibilities are:
|
||||
//
|
||||
// /oauth/{provider}/login
|
||||
//
|
||||
// The `/oauth` endpoint redirects to the Provider for OAuth. Chronograf sets
|
||||
// the OAuth `state` request parameter to a JWT with a random "sub". Using
|
||||
// $TOKEN_SECRET `/oauth/github/callback` can validate the `state` parameter
|
||||
// without needing `state` to be saved.
|
||||
//
|
||||
// /oauth/{provider}/callback
|
||||
//
|
||||
// The `/oauth/github/callback` receives the OAuth `authorization code` and `state`.
|
||||
//
|
||||
// First, it will validate the `state` JWT from the `/oauth` endpoint. `JWT` validation
|
||||
// only requires access to the signature token. Therefore, there is no need for `state`
|
||||
// to be saved. Additionally, multiple Chronograf servers will not need to share third
|
||||
// party storage to synchronize `state`. If this validation fails, the request
|
||||
// will be redirected to `/login`.
|
||||
//
|
||||
// Secondly, the endpoint will use the `authorization code` to retrieve a valid OAuth token
|
||||
// with the `user:email` scope. If unable to get a token from Github, the request will
|
||||
// be redirected to `/login`.
|
||||
//
|
||||
// Finally, the endpoint will attempt to get the primary email address of the
|
||||
// user. Again, if not successful, the request will redirect to `/login`.
|
||||
//
|
||||
// The email address is used as the subject claim for a new JWT. This JWT becomes the
|
||||
// value of the cookie sent back to the browser. The cookie is valid for thirty days.
|
||||
//
|
||||
// Next, the request is redirected to `/`.
|
||||
//
|
||||
// For all API calls to `/chronograf/v1`, the server checks for the existence and validity
|
||||
// of the JWT within the cookie value.
|
||||
// If the request did not have a valid JWT, the API returns `HTTP/1.1 401 Unauthorized`.
|
||||
//
|
||||
// /oauth/{provider}/logout
|
||||
//
|
||||
// Simply expires the session cookie and redirects to `/`.
|
||||
package oauth2
|
|
@ -0,0 +1,162 @@
|
|||
package oauth2
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/go-github/github"
|
||||
"github.com/influxdata/chronograf"
|
||||
"golang.org/x/oauth2"
|
||||
ogh "golang.org/x/oauth2/github"
|
||||
)
|
||||
|
||||
var _ Provider = &Github{}
|
||||
|
||||
// Github provides OAuth Login and Callback server. Callback will set
|
||||
// an authentication cookie. This cookie's value is a JWT containing
|
||||
// the user's primary Github email address.
|
||||
type Github struct {
|
||||
ClientID string
|
||||
ClientSecret string
|
||||
Orgs []string // Optional github organization checking
|
||||
Logger chronograf.Logger
|
||||
}
|
||||
|
||||
// Name is the name of the provider
|
||||
func (g *Github) Name() string {
|
||||
return "github"
|
||||
}
|
||||
|
||||
// ID returns the github application client id
|
||||
func (g *Github) ID() string {
|
||||
return g.ClientID
|
||||
}
|
||||
|
||||
// Secret returns the github application client secret
|
||||
func (g *Github) Secret() string {
|
||||
return g.ClientSecret
|
||||
}
|
||||
|
||||
// Scopes for github is only the email addres and possible organizations if
|
||||
// we are filtering by organizations.
|
||||
func (g *Github) Scopes() []string {
|
||||
scopes := []string{"user:email"}
|
||||
if len(g.Orgs) > 0 {
|
||||
scopes = append(scopes, "read:org")
|
||||
}
|
||||
return scopes
|
||||
}
|
||||
|
||||
// Config is the Github OAuth2 exchange information and endpoints
|
||||
func (g *Github) Config() *oauth2.Config {
|
||||
return &oauth2.Config{
|
||||
ClientID: g.ID(),
|
||||
ClientSecret: g.Secret(),
|
||||
Scopes: g.Scopes(),
|
||||
Endpoint: ogh.Endpoint,
|
||||
}
|
||||
}
|
||||
|
||||
// PrincipalID returns the github email address of the user.
|
||||
func (g *Github) PrincipalID(provider *http.Client) (string, error) {
|
||||
client := github.NewClient(provider)
|
||||
// If we need to restrict to a set of organizations, we first get the org
|
||||
// and filter.
|
||||
if len(g.Orgs) > 0 {
|
||||
orgs, err := getOrganizations(client, g.Logger)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// Not a member, so, deny permission
|
||||
if ok := isMember(g.Orgs, orgs); !ok {
|
||||
g.Logger.Error("Not a member of required github organization")
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
email, err := getPrimaryEmail(client, g.Logger)
|
||||
if err != nil {
|
||||
return "", nil
|
||||
}
|
||||
return email, nil
|
||||
}
|
||||
|
||||
func randomString(length int) string {
|
||||
k := make([]byte, length)
|
||||
if _, err := io.ReadFull(rand.Reader, k); err != nil {
|
||||
return ""
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(k)
|
||||
}
|
||||
|
||||
func logResponseError(log chronograf.Logger, resp *github.Response, err error) {
|
||||
switch resp.StatusCode {
|
||||
case http.StatusUnauthorized, http.StatusForbidden:
|
||||
log.Error("OAuth access to email address forbidden ", err.Error())
|
||||
default:
|
||||
log.Error("Unable to retrieve Github email ", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// isMember makes sure that the user is in one of the required organizations
|
||||
func isMember(requiredOrgs []string, userOrgs []*github.Organization) bool {
|
||||
for _, requiredOrg := range requiredOrgs {
|
||||
for _, userOrg := range userOrgs {
|
||||
if userOrg.Login != nil && *userOrg.Login == requiredOrg {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getOrganizations gets all organization for the currently authenticated user
|
||||
func getOrganizations(client *github.Client, log chronograf.Logger) ([]*github.Organization, error) {
|
||||
// Get all pages of results
|
||||
var allOrgs []*github.Organization
|
||||
for {
|
||||
opt := &github.ListOptions{
|
||||
PerPage: 10,
|
||||
}
|
||||
// Get the organizations for the current authenticated user.
|
||||
orgs, resp, err := client.Organizations.List("", opt)
|
||||
if err != nil {
|
||||
logResponseError(log, resp, err)
|
||||
return nil, err
|
||||
}
|
||||
allOrgs = append(allOrgs, orgs...)
|
||||
if resp.NextPage == 0 {
|
||||
break
|
||||
}
|
||||
opt.Page = resp.NextPage
|
||||
}
|
||||
return allOrgs, nil
|
||||
}
|
||||
|
||||
// getPrimaryEmail gets the primary email account for the authenticated user.
|
||||
func getPrimaryEmail(client *github.Client, log chronograf.Logger) (string, error) {
|
||||
emails, resp, err := client.Users.ListEmails(nil)
|
||||
if err != nil {
|
||||
logResponseError(log, resp, err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
email, err := primaryEmail(emails)
|
||||
if err != nil {
|
||||
log.Error("Unable to retrieve primary Github email ", err.Error())
|
||||
return "", err
|
||||
}
|
||||
return email, nil
|
||||
}
|
||||
|
||||
func primaryEmail(emails []*github.UserEmail) (string, error) {
|
||||
for _, m := range emails {
|
||||
if m != nil && m.Primary != nil && m.Verified != nil && m.Email != nil {
|
||||
return *m.Email, nil
|
||||
}
|
||||
}
|
||||
return "", errors.New("No primary email address")
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
package oauth2_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
clog "github.com/influxdata/chronograf/log"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
func TestGithubPrincipalID(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expected := []struct {
|
||||
Email string `json:"email"`
|
||||
Primary bool `json:"primary"`
|
||||
Verified bool `json:"verified"`
|
||||
}{
|
||||
{"martymcfly@example.com", true, false},
|
||||
}
|
||||
mockAPI := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/user/emails" {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
enc := json.NewEncoder(rw)
|
||||
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expected)
|
||||
}))
|
||||
defer mockAPI.Close()
|
||||
|
||||
logger := clog.New(clog.ParseLevel("debug"))
|
||||
prov := oauth2.Github{
|
||||
Logger: logger,
|
||||
}
|
||||
tt, err := NewTestTripper(logger, mockAPI, http.DefaultTransport)
|
||||
if err != nil {
|
||||
t.Fatal("Error initializing TestTripper: err:", err)
|
||||
}
|
||||
|
||||
tc := &http.Client{
|
||||
Transport: tt,
|
||||
}
|
||||
|
||||
email, err := prov.PrincipalID(tc)
|
||||
if err != nil {
|
||||
t.Fatal("Unexpected error while retrieiving PrincipalID: err:", err)
|
||||
}
|
||||
|
||||
if email != expected[0].Email {
|
||||
t.Fatal("Retrieved email was not as expected. Want:", expected[0].Email, "Got:", email)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGithubPrincipalIDOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expectedUser := []struct {
|
||||
Email string `json:"email"`
|
||||
Primary bool `json:"primary"`
|
||||
Verified bool `json:"verified"`
|
||||
}{
|
||||
{"martymcfly@example.com", true, false},
|
||||
}
|
||||
expectedOrg := []struct {
|
||||
Login string `json:"login"`
|
||||
}{
|
||||
{"Hill Valley Preservation Society"},
|
||||
}
|
||||
|
||||
mockAPI := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/user/emails" {
|
||||
enc := json.NewEncoder(rw)
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expectedUser)
|
||||
return
|
||||
}
|
||||
if r.URL.Path == "/user/orgs" {
|
||||
enc := json.NewEncoder(rw)
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expectedOrg)
|
||||
return
|
||||
}
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
}))
|
||||
defer mockAPI.Close()
|
||||
|
||||
logger := clog.New(clog.ParseLevel("debug"))
|
||||
prov := oauth2.Github{
|
||||
Logger: logger,
|
||||
Orgs: []string{"Hill Valley Preservation Society"},
|
||||
}
|
||||
tt, err := NewTestTripper(logger, mockAPI, http.DefaultTransport)
|
||||
if err != nil {
|
||||
t.Fatal("Error initializing TestTripper: err:", err)
|
||||
}
|
||||
|
||||
tc := &http.Client{
|
||||
Transport: tt,
|
||||
}
|
||||
|
||||
email, err := prov.PrincipalID(tc)
|
||||
if err != nil {
|
||||
t.Fatal("Unexpected error while retrieiving PrincipalID: err:", err)
|
||||
}
|
||||
|
||||
if email != expectedUser[0].Email {
|
||||
t.Fatal("Retrieved email was not as expected. Want:", expectedUser[0].Email, "Got:", email)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
package oauth2
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
goauth2 "google.golang.org/api/oauth2/v2"
|
||||
)
|
||||
|
||||
// Endpoint is Google's OAuth 2.0 endpoint.
|
||||
// Copied here to remove tons of package dependencies
|
||||
var GoogleEndpoint = oauth2.Endpoint{
|
||||
AuthURL: "https://accounts.google.com/o/oauth2/auth",
|
||||
TokenURL: "https://accounts.google.com/o/oauth2/token",
|
||||
}
|
||||
var _ Provider = &Google{}
|
||||
|
||||
type Google struct {
|
||||
ClientID string
|
||||
ClientSecret string
|
||||
RedirectURL string
|
||||
Domains []string // Optional google email domain checking
|
||||
Logger chronograf.Logger
|
||||
}
|
||||
|
||||
// Name is the name of the provider
|
||||
func (g *Google) Name() string {
|
||||
return "google"
|
||||
}
|
||||
|
||||
// ID returns the google application client id
|
||||
func (g *Google) ID() string {
|
||||
return g.ClientID
|
||||
}
|
||||
|
||||
// Secret returns the google application client secret
|
||||
func (g *Google) Secret() string {
|
||||
return g.ClientSecret
|
||||
}
|
||||
|
||||
// Scopes for google is only the email address
|
||||
// Documentation is here: https://developers.google.com/+/web/api/rest/oauth#email
|
||||
func (g *Google) Scopes() []string {
|
||||
return []string{
|
||||
goauth2.UserinfoEmailScope,
|
||||
goauth2.UserinfoProfileScope,
|
||||
}
|
||||
}
|
||||
|
||||
// Config is the Google OAuth2 exchange information and endpoints
|
||||
func (g *Google) Config() *oauth2.Config {
|
||||
return &oauth2.Config{
|
||||
ClientID: g.ID(),
|
||||
ClientSecret: g.Secret(),
|
||||
Scopes: g.Scopes(),
|
||||
Endpoint: GoogleEndpoint,
|
||||
RedirectURL: g.RedirectURL,
|
||||
}
|
||||
}
|
||||
|
||||
// PrincipalID returns the google email address of the user.
|
||||
func (g *Google) PrincipalID(provider *http.Client) (string, error) {
|
||||
srv, err := goauth2.New(provider)
|
||||
if err != nil {
|
||||
g.Logger.Error("Unable to communicate with Google ", err.Error())
|
||||
return "", err
|
||||
}
|
||||
info, err := srv.Userinfo.Get().Do()
|
||||
if err != nil {
|
||||
g.Logger.Error("Unable to retrieve Google email ", err.Error())
|
||||
return "", err
|
||||
}
|
||||
// No domain filtering required, so, the user is autenticated.
|
||||
if len(g.Domains) == 0 {
|
||||
return info.Email, nil
|
||||
}
|
||||
|
||||
// Check if the account domain is acceptable
|
||||
for _, requiredDomain := range g.Domains {
|
||||
if info.Hd == requiredDomain {
|
||||
return info.Email, nil
|
||||
}
|
||||
}
|
||||
g.Logger.Error("Domain '", info.Hd, "' is not a member of required Google domain(s): ", g.Domains)
|
||||
return "", fmt.Errorf("Not in required domain")
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
package oauth2_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
clog "github.com/influxdata/chronograf/log"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
func TestGooglePrincipalID(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expected := struct {
|
||||
Email string `json:"email"`
|
||||
}{
|
||||
"martymcfly@example.com",
|
||||
}
|
||||
mockAPI := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/oauth2/v2/userinfo" {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
enc := json.NewEncoder(rw)
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expected)
|
||||
}))
|
||||
defer mockAPI.Close()
|
||||
|
||||
logger := clog.New(clog.ParseLevel("debug"))
|
||||
prov := oauth2.Google{
|
||||
Logger: logger,
|
||||
}
|
||||
tt, err := NewTestTripper(logger, mockAPI, http.DefaultTransport)
|
||||
if err != nil {
|
||||
t.Fatal("Error initializing TestTripper: err:", err)
|
||||
}
|
||||
|
||||
tc := &http.Client{
|
||||
Transport: tt,
|
||||
}
|
||||
|
||||
email, err := prov.PrincipalID(tc)
|
||||
if err != nil {
|
||||
t.Fatal("Unexpected error while retrieiving PrincipalID: err:", err)
|
||||
}
|
||||
|
||||
if email != expected.Email {
|
||||
t.Fatal("Retrieved email was not as expected. Want:", expected.Email, "Got:", email)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGooglePrincipalIDDomain(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expectedUser := struct {
|
||||
Email string `json:"email"`
|
||||
Hd string `json:"hd"`
|
||||
}{
|
||||
"martymcfly@example.com",
|
||||
"Hill Valley Preservation Society",
|
||||
}
|
||||
//a := goauth2.Userinfoplus{}
|
||||
|
||||
mockAPI := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/oauth2/v2/userinfo" {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
enc := json.NewEncoder(rw)
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expectedUser)
|
||||
}))
|
||||
defer mockAPI.Close()
|
||||
|
||||
logger := clog.New(clog.ParseLevel("debug"))
|
||||
prov := oauth2.Google{
|
||||
Logger: logger,
|
||||
Domains: []string{"Hill Valley Preservation Society"},
|
||||
}
|
||||
tt, err := NewTestTripper(logger, mockAPI, http.DefaultTransport)
|
||||
if err != nil {
|
||||
t.Fatal("Error initializing TestTripper: err:", err)
|
||||
}
|
||||
|
||||
tc := &http.Client{
|
||||
Transport: tt,
|
||||
}
|
||||
|
||||
email, err := prov.PrincipalID(tc)
|
||||
if err != nil {
|
||||
t.Fatal("Unexpected error while retrieiving PrincipalID: err:", err)
|
||||
}
|
||||
|
||||
if email != expectedUser.Email {
|
||||
t.Fatal("Retrieved email was not as expected. Want:", expectedUser.Email, "Got:", email)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
package oauth2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
hrk "golang.org/x/oauth2/heroku"
|
||||
)
|
||||
|
||||
// Ensure that Heroku is an oauth2.Provider
|
||||
var _ Provider = &Heroku{}
|
||||
|
||||
const (
|
||||
// Routes required for interacting with Heroku API
|
||||
HEROKU_ACCOUNT_ROUTE string = "https://api.heroku.com/account"
|
||||
)
|
||||
|
||||
// Heroku is an OAuth2 Provider allowing users to authenticate with Heroku to
|
||||
// gain access to Chronograf
|
||||
type Heroku struct {
|
||||
// OAuth2 Secrets
|
||||
ClientID string
|
||||
ClientSecret string
|
||||
|
||||
Organizations []string // set of organizations permitted to access the protected resource. Empty means "all"
|
||||
|
||||
Logger chronograf.Logger
|
||||
}
|
||||
|
||||
// Config returns the OAuth2 exchange information and endpoints
|
||||
func (h *Heroku) Config() *oauth2.Config {
|
||||
return &oauth2.Config{
|
||||
ClientID: h.ID(),
|
||||
ClientSecret: h.Secret(),
|
||||
Scopes: h.Scopes(),
|
||||
Endpoint: hrk.Endpoint,
|
||||
}
|
||||
}
|
||||
|
||||
// ID returns the Heroku application client ID
|
||||
func (h *Heroku) ID() string {
|
||||
return h.ClientID
|
||||
}
|
||||
|
||||
// Name returns the name of this provider (heroku)
|
||||
func (h *Heroku) Name() string {
|
||||
return "heroku"
|
||||
}
|
||||
|
||||
// PrincipalID returns the Heroku email address of the user.
|
||||
func (h *Heroku) PrincipalID(provider *http.Client) (string, error) {
|
||||
type DefaultOrg struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
type Account struct {
|
||||
Email string `json:"email"`
|
||||
DefaultOrganization DefaultOrg `json:"default_organization"`
|
||||
}
|
||||
|
||||
resp, err := provider.Get(HEROKU_ACCOUNT_ROUTE)
|
||||
if err != nil {
|
||||
h.Logger.Error("Unable to communicate with Heroku. err:", err)
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
d := json.NewDecoder(resp.Body)
|
||||
var account Account
|
||||
if err := d.Decode(&account); err != nil {
|
||||
h.Logger.Error("Unable to decode response from Heroku. err:", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
// check if member of org
|
||||
if len(h.Organizations) > 0 {
|
||||
for _, org := range h.Organizations {
|
||||
if account.DefaultOrganization.Name == org {
|
||||
return account.Email, nil
|
||||
}
|
||||
}
|
||||
h.Logger.Error(ErrOrgMembership)
|
||||
return "", ErrOrgMembership
|
||||
} else {
|
||||
return account.Email, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Scopes for heroku is "identity" which grants access to user account
|
||||
// information. This will grant us access to the user's email address which is
|
||||
// used as the Principal's identifier.
|
||||
func (h *Heroku) Scopes() []string {
|
||||
return []string{"identity"}
|
||||
}
|
||||
|
||||
// Secret returns the Heroku application client secret
|
||||
func (h *Heroku) Secret() string {
|
||||
return h.ClientSecret
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
package oauth2_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
clog "github.com/influxdata/chronograf/log"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
func Test_Heroku_PrincipalID_ExtractsEmailAddress(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expected := struct {
|
||||
Email string `json:"email"`
|
||||
}{
|
||||
"martymcfly@example.com",
|
||||
}
|
||||
|
||||
mockAPI := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/account" {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
enc := json.NewEncoder(rw)
|
||||
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expected)
|
||||
}))
|
||||
defer mockAPI.Close()
|
||||
|
||||
logger := clog.New(clog.ParseLevel("debug"))
|
||||
prov := oauth2.Heroku{
|
||||
Logger: logger,
|
||||
}
|
||||
tt, err := NewTestTripper(logger, mockAPI, http.DefaultTransport)
|
||||
if err != nil {
|
||||
t.Fatal("Error initializing TestTripper: err:", err)
|
||||
}
|
||||
|
||||
tc := &http.Client{
|
||||
Transport: tt,
|
||||
}
|
||||
|
||||
email, err := prov.PrincipalID(tc)
|
||||
if err != nil {
|
||||
t.Fatal("Unexpected error while retrieiving PrincipalID: err:", err)
|
||||
}
|
||||
|
||||
if email != expected.Email {
|
||||
t.Fatal("Retrieved email was not as expected. Want:", expected.Email, "Got:", email)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_Heroku_PrincipalID_RestrictsByOrganization(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
expected := struct {
|
||||
Email string `json:"email"`
|
||||
DefaultOrganization map[string]string `json:"default_organization"`
|
||||
}{
|
||||
"martymcfly@example.com",
|
||||
map[string]string{
|
||||
"id": "a85eac89-56cc-498e-9a89-d8f49f6aed71",
|
||||
"name": "hill-valley-preservation-society",
|
||||
},
|
||||
}
|
||||
|
||||
mockAPI := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path != "/account" {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
enc := json.NewEncoder(rw)
|
||||
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
_ = enc.Encode(expected)
|
||||
}))
|
||||
defer mockAPI.Close()
|
||||
|
||||
logger := clog.New(clog.ParseLevel("debug"))
|
||||
prov := oauth2.Heroku{
|
||||
Logger: logger,
|
||||
Organizations: []string{"enchantment-under-the-sea-dance-committee"},
|
||||
}
|
||||
|
||||
tt, err := NewTestTripper(logger, mockAPI, http.DefaultTransport)
|
||||
if err != nil {
|
||||
t.Fatal("Error initializing TestTripper: err:", err)
|
||||
}
|
||||
|
||||
tc := &http.Client{
|
||||
Transport: tt,
|
||||
}
|
||||
|
||||
_, err = prov.PrincipalID(tc)
|
||||
if err == nil {
|
||||
t.Fatal("Expected error while authenticating user with mismatched orgs, but received none")
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package jwt
|
||||
package oauth2
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
@ -6,11 +6,10 @@ import (
|
|||
"time"
|
||||
|
||||
gojwt "github.com/dgrijalva/jwt-go"
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
// Test if JWT implements Authenticator
|
||||
var _ chronograf.Authenticator = &JWT{}
|
||||
var _ Authenticator = &JWT{}
|
||||
|
||||
// JWT represents a javascript web token that can be validated or marshaled into string.
|
||||
type JWT struct {
|
||||
|
@ -45,7 +44,7 @@ func (c *Claims) Valid() error {
|
|||
}
|
||||
|
||||
// Authenticate checks if the jwtToken is signed correctly and validates with Claims.
|
||||
func (j *JWT) Authenticate(ctx context.Context, jwtToken string) (chronograf.Principal, error) {
|
||||
func (j *JWT) Authenticate(ctx context.Context, jwtToken string) (Principal, error) {
|
||||
gojwt.TimeFunc = j.Now
|
||||
|
||||
// Check for expected signing method.
|
||||
|
@ -62,27 +61,31 @@ func (j *JWT) Authenticate(ctx context.Context, jwtToken string) (chronograf.Pri
|
|||
// 4. Check if subject is not empty
|
||||
token, err := gojwt.ParseWithClaims(jwtToken, &Claims{}, alg)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return Principal{}, err
|
||||
} else if !token.Valid {
|
||||
return "", err
|
||||
return Principal{}, err
|
||||
}
|
||||
|
||||
claims, ok := token.Claims.(*Claims)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("unable to convert claims to standard claims")
|
||||
return Principal{}, fmt.Errorf("unable to convert claims to standard claims")
|
||||
}
|
||||
|
||||
return chronograf.Principal(claims.Subject), nil
|
||||
return Principal{
|
||||
Subject: claims.Subject,
|
||||
Issuer: claims.Issuer,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Token creates a signed JWT token from user that expires at Now + duration
|
||||
func (j *JWT) Token(ctx context.Context, user chronograf.Principal, duration time.Duration) (string, error) {
|
||||
func (j *JWT) Token(ctx context.Context, user Principal, duration time.Duration) (string, error) {
|
||||
// Create a new token object, specifying signing method and the claims
|
||||
// you would like it to contain.
|
||||
now := j.Now().UTC()
|
||||
claims := &Claims{
|
||||
gojwt.StandardClaims{
|
||||
Subject: string(user),
|
||||
Subject: user.Subject,
|
||||
Issuer: user.Issuer,
|
||||
ExpiresAt: now.Add(duration).Unix(),
|
||||
IssuedAt: now.Unix(),
|
||||
NotBefore: now.Unix(),
|
|
@ -1,4 +1,4 @@
|
|||
package jwt_test
|
||||
package oauth2_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
@ -6,8 +6,7 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
"github.com/influxdata/chronograf/jwt"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
func TestAuthenticate(t *testing.T) {
|
||||
|
@ -15,46 +14,56 @@ func TestAuthenticate(t *testing.T) {
|
|||
Desc string
|
||||
Secret string
|
||||
Token string
|
||||
User chronograf.Principal
|
||||
User oauth2.Principal
|
||||
Err error
|
||||
}{
|
||||
{
|
||||
Desc: "Test bad jwt token",
|
||||
Secret: "secret",
|
||||
Token: "badtoken",
|
||||
User: "",
|
||||
Err: errors.New("token contains an invalid number of segments"),
|
||||
User: oauth2.Principal{
|
||||
Subject: "",
|
||||
},
|
||||
Err: errors.New("token contains an invalid number of segments"),
|
||||
},
|
||||
{
|
||||
Desc: "Test valid jwt token",
|
||||
Secret: "secret",
|
||||
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIvY2hyb25vZ3JhZi92MS91c2Vycy8xIiwibmFtZSI6IkRvYyBCcm93biIsImlhdCI6LTQ0Njc3NDQwMCwiZXhwIjotNDQ2Nzc0NDAwLCJuYmYiOi00NDY3NzQ0MDB9._rZ4gOIei9PizHOABH6kLcJTA3jm8ls0YnDxtz1qeUI",
|
||||
User: "/chronograf/v1/users/1",
|
||||
User: oauth2.Principal{
|
||||
Subject: "/chronograf/v1/users/1",
|
||||
},
|
||||
},
|
||||
{
|
||||
Desc: "Test expired jwt token",
|
||||
Secret: "secret",
|
||||
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIvY2hyb25vZ3JhZi92MS91c2Vycy8xIiwibmFtZSI6IkRvYyBCcm93biIsImlhdCI6LTQ0Njc3NDQwMCwiZXhwIjotNDQ2Nzc0NDAxLCJuYmYiOi00NDY3NzQ0MDB9.vWXdm0-XQ_pW62yBpSISFFJN_yz0vqT9_INcUKTp5Q8",
|
||||
User: "",
|
||||
Err: errors.New("token is expired by 1s"),
|
||||
User: oauth2.Principal{
|
||||
Subject: "",
|
||||
},
|
||||
Err: errors.New("token is expired by 1s"),
|
||||
},
|
||||
{
|
||||
Desc: "Test jwt token not before time",
|
||||
Secret: "secret",
|
||||
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIvY2hyb25vZ3JhZi92MS91c2Vycy8xIiwibmFtZSI6IkRvYyBCcm93biIsImlhdCI6LTQ0Njc3NDQwMCwiZXhwIjotNDQ2Nzc0NDAwLCJuYmYiOi00NDY3NzQzOTl9.TMGAhv57u1aosjc4ywKC7cElP1tKyQH7GmRF2ToAxlE",
|
||||
User: "",
|
||||
Err: errors.New("token is not valid yet"),
|
||||
User: oauth2.Principal{
|
||||
Subject: "",
|
||||
},
|
||||
Err: errors.New("token is not valid yet"),
|
||||
},
|
||||
{
|
||||
Desc: "Test jwt with empty subject is invalid",
|
||||
Secret: "secret",
|
||||
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOi00NDY3NzQ0MDAsImV4cCI6LTQ0Njc3NDQwMCwibmJmIjotNDQ2Nzc0NDAwfQ.gxsA6_Ei3s0f2I1TAtrrb8FmGiO25OqVlktlF_ylhX4",
|
||||
User: "",
|
||||
Err: errors.New("claim has no subject"),
|
||||
User: oauth2.Principal{
|
||||
Subject: "",
|
||||
},
|
||||
Err: errors.New("claim has no subject"),
|
||||
},
|
||||
}
|
||||
for i, test := range tests {
|
||||
j := jwt.JWT{
|
||||
j := oauth2.JWT{
|
||||
Secret: test.Secret,
|
||||
Now: func() time.Time {
|
||||
return time.Unix(-446774400, 0)
|
||||
|
@ -77,13 +86,16 @@ func TestAuthenticate(t *testing.T) {
|
|||
func TestToken(t *testing.T) {
|
||||
duration := time.Second
|
||||
expected := "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOi00NDY3NzQzOTksImlhdCI6LTQ0Njc3NDQwMCwibmJmIjotNDQ2Nzc0NDAwLCJzdWIiOiIvY2hyb25vZ3JhZi92MS91c2Vycy8xIn0.ofQM6yTmrmve5JeEE0RcK4_euLXuZ_rdh6bLAbtbC9M"
|
||||
j := jwt.JWT{
|
||||
j := oauth2.JWT{
|
||||
Secret: "secret",
|
||||
Now: func() time.Time {
|
||||
return time.Unix(-446774400, 0)
|
||||
},
|
||||
}
|
||||
if token, err := j.Token(context.Background(), chronograf.Principal("/chronograf/v1/users/1"), duration); err != nil {
|
||||
p := oauth2.Principal{
|
||||
Subject: "/chronograf/v1/users/1",
|
||||
}
|
||||
if token, err := j.Token(context.Background(), p, duration); err != nil {
|
||||
t.Errorf("Error creating token for user: %v", err)
|
||||
} else if token != expected {
|
||||
t.Errorf("Error creating token; expected: %s actual: %s", "", token)
|
|
@ -0,0 +1,169 @@
|
|||
package oauth2
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultCookieName is the name of the stored cookie
|
||||
DefaultCookieName = "session"
|
||||
// DefaultCookieDuration is the length of time the cookie is valid
|
||||
DefaultCookieDuration = time.Hour * 24 * 30
|
||||
)
|
||||
|
||||
// Cookie represents the location and expiration time of new cookies.
|
||||
type cookie struct {
|
||||
Name string
|
||||
Duration time.Duration
|
||||
}
|
||||
|
||||
// Check to ensure CookieMux is an oauth2.Mux
|
||||
var _ Mux = &CookieMux{}
|
||||
|
||||
func NewCookieMux(p Provider, a Authenticator, l chronograf.Logger) *CookieMux {
|
||||
return &CookieMux{
|
||||
Provider: p,
|
||||
Auth: a,
|
||||
Logger: l,
|
||||
SuccessURL: "/",
|
||||
FailureURL: "/login",
|
||||
Now: time.Now,
|
||||
|
||||
cookie: cookie{
|
||||
Name: DefaultCookieName,
|
||||
Duration: DefaultCookieDuration,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// CookieMux services an Oauth2 interaction with a provider and browser and
|
||||
// stores the resultant token in the user's browser as a cookie. The benefit of
|
||||
// this is that the cookie's authenticity can be verified independently by any
|
||||
// Chronograf instance as long as the Authenticator has no external
|
||||
// dependencies (e.g. on a Database).
|
||||
type CookieMux struct {
|
||||
Provider Provider
|
||||
Auth Authenticator
|
||||
cookie cookie
|
||||
Logger chronograf.Logger
|
||||
SuccessURL string // SuccessURL is redirect location after successful authorization
|
||||
FailureURL string // FailureURL is redirect location after authorization failure
|
||||
Now func() time.Time // Now returns the current time
|
||||
}
|
||||
|
||||
// Uses a Cookie with a random string as the state validation method. JWTs are
|
||||
// a good choice here for encoding because they can be validated without
|
||||
// storing state.
|
||||
func (j *CookieMux) Login() http.Handler {
|
||||
conf := j.Provider.Config()
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// We are creating a token with an encoded random string to prevent CSRF attacks
|
||||
// This token will be validated during the OAuth callback.
|
||||
// We'll give our users 10 minutes from this point to type in their github password.
|
||||
// If the callback is not received within 10 minutes, then authorization will fail.
|
||||
csrf := randomString(32) // 32 is not important... just long
|
||||
p := Principal{
|
||||
Subject: csrf,
|
||||
}
|
||||
state, err := j.Auth.Token(r.Context(), p, 10*time.Minute)
|
||||
// This is likely an internal server error
|
||||
if err != nil {
|
||||
j.Logger.
|
||||
WithField("component", "auth").
|
||||
WithField("remote_addr", r.RemoteAddr).
|
||||
WithField("method", r.Method).
|
||||
WithField("url", r.URL).
|
||||
Error("Internal authentication error: ", err.Error())
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
url := conf.AuthCodeURL(state, oauth2.AccessTypeOnline)
|
||||
http.Redirect(w, r, url, http.StatusTemporaryRedirect)
|
||||
})
|
||||
}
|
||||
|
||||
// Callback is used by OAuth2 provider after authorization is granted. If
|
||||
// granted, Callback will set a cookie with a month-long expiration. It is
|
||||
// recommended that the value of the cookie be encoded as a JWT because the JWT
|
||||
// can be validated without the need for saving state. The JWT contains the
|
||||
// principal's identifier (e.g. email address).
|
||||
func (j *CookieMux) Callback() http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
log := j.Logger.
|
||||
WithField("component", "auth").
|
||||
WithField("remote_addr", r.RemoteAddr).
|
||||
WithField("method", r.Method).
|
||||
WithField("url", r.URL)
|
||||
|
||||
state := r.FormValue("state")
|
||||
// Check if the OAuth state token is valid to prevent CSRF
|
||||
_, err := j.Auth.Authenticate(r.Context(), state)
|
||||
if err != nil {
|
||||
log.Error("Invalid OAuth state received: ", err.Error())
|
||||
http.Redirect(w, r, j.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
// Exchange the code back with the provider to the the token
|
||||
conf := j.Provider.Config()
|
||||
code := r.FormValue("code")
|
||||
token, err := conf.Exchange(r.Context(), code)
|
||||
if err != nil {
|
||||
log.Error("Unable to exchange code for token ", err.Error())
|
||||
http.Redirect(w, r, j.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
// Using the token get the principal identifier from the provider
|
||||
oauthClient := conf.Client(r.Context(), token)
|
||||
id, err := j.Provider.PrincipalID(oauthClient)
|
||||
if err != nil {
|
||||
log.Error("Unable to get principal identifier ", err.Error())
|
||||
http.Redirect(w, r, j.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
p := Principal{
|
||||
Subject: id,
|
||||
Issuer: j.Provider.Name(),
|
||||
}
|
||||
// We create an auth token that will be used by all other endpoints to validate the principal has a claim
|
||||
authToken, err := j.Auth.Token(r.Context(), p, j.cookie.Duration)
|
||||
if err != nil {
|
||||
log.Error("Unable to create cookie auth token ", err.Error())
|
||||
http.Redirect(w, r, j.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
expireCookie := j.Now().UTC().Add(j.cookie.Duration)
|
||||
cookie := http.Cookie{
|
||||
Name: j.cookie.Name,
|
||||
Value: authToken,
|
||||
Expires: expireCookie,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
}
|
||||
log.Info("User ", id, " is authenticated")
|
||||
http.SetCookie(w, &cookie)
|
||||
http.Redirect(w, r, j.SuccessURL, http.StatusTemporaryRedirect)
|
||||
})
|
||||
} // Login returns a handler that redirects to the providers OAuth login.
|
||||
|
||||
// Logout handler will expire our authentication cookie and redirect to the successURL
|
||||
func (j *CookieMux) Logout() http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
deleteCookie := http.Cookie{
|
||||
Name: j.cookie.Name,
|
||||
Value: "none",
|
||||
Expires: j.Now().UTC().Add(-1 * time.Hour),
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
}
|
||||
http.SetCookie(w, &deleteCookie)
|
||||
http.Redirect(w, r, j.SuccessURL, http.StatusTemporaryRedirect)
|
||||
})
|
||||
}
|
|
@ -0,0 +1,157 @@
|
|||
package oauth2_test
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
clog "github.com/influxdata/chronograf/log"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
var testTime time.Time = time.Date(1985, time.October, 25, 18, 0, 0, 0, time.UTC)
|
||||
|
||||
// setupMuxTest produces an http.Client and an httptest.Server configured to
|
||||
// use a particular http.Handler selected from a CookieMux. As this selection is
|
||||
// done during the setup process, this configuration is performed by providing
|
||||
// a function, and returning the desired handler. Cleanup is still the
|
||||
// responsibility of the test writer, so the httptest.Server's Close() method
|
||||
// should be deferred.
|
||||
func setupMuxTest(selector func(*oauth2.CookieMux) http.Handler) (*http.Client, *httptest.Server, *httptest.Server) {
|
||||
provider := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
|
||||
mp := &MockProvider{"biff@example.com", provider.URL}
|
||||
|
||||
jm := oauth2.NewCookieMux(mp, &YesManAuthenticator{}, clog.New(clog.ParseLevel("debug")))
|
||||
|
||||
jm.Now = func() time.Time {
|
||||
return testTime
|
||||
}
|
||||
|
||||
ts := httptest.NewServer(selector(jm))
|
||||
|
||||
jar, _ := cookiejar.New(nil)
|
||||
|
||||
hc := http.Client{
|
||||
Jar: jar,
|
||||
CheckRedirect: func(r *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
},
|
||||
}
|
||||
return &hc, ts, provider
|
||||
}
|
||||
|
||||
// teardownMuxTest cleans up any resources created by setupMuxTest. This should
|
||||
// be deferred in your test after setupMuxTest is called
|
||||
func teardownMuxTest(hc *http.Client, backend *httptest.Server, provider *httptest.Server) {
|
||||
provider.Close()
|
||||
backend.Close()
|
||||
}
|
||||
|
||||
func Test_CookieMux_Logout_DeletesSessionCookie(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
hc, ts, prov := setupMuxTest(func(j *oauth2.CookieMux) http.Handler {
|
||||
return j.Logout()
|
||||
})
|
||||
defer teardownMuxTest(hc, ts, prov)
|
||||
|
||||
tsUrl, _ := url.Parse(ts.URL)
|
||||
|
||||
hc.Jar.SetCookies(tsUrl, []*http.Cookie{
|
||||
&http.Cookie{
|
||||
Name: oauth2.DefaultCookieName,
|
||||
Value: "",
|
||||
},
|
||||
})
|
||||
|
||||
resp, err := hc.Get(ts.URL)
|
||||
if err != nil {
|
||||
t.Fatal("Error communicating with Logout() handler: err:", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode < 300 || resp.StatusCode >= 400 {
|
||||
t.Fatal("Expected to be redirected, but received status code", resp.StatusCode)
|
||||
}
|
||||
|
||||
cookies := resp.Cookies()
|
||||
if len(cookies) != 1 {
|
||||
t.Fatal("Expected that cookie would be present but wasn't")
|
||||
}
|
||||
|
||||
c := cookies[0]
|
||||
if c.Name != oauth2.DefaultCookieName || c.Expires != testTime.Add(-1*time.Hour) {
|
||||
t.Fatal("Expected cookie to be expired but wasn't")
|
||||
}
|
||||
}
|
||||
|
||||
func Test_CookieMux_Login_RedirectsToCorrectURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
hc, ts, prov := setupMuxTest(func(j *oauth2.CookieMux) http.Handler {
|
||||
return j.Login() // Use Login handler for httptest server.
|
||||
})
|
||||
defer teardownMuxTest(hc, ts, prov)
|
||||
|
||||
resp, err := hc.Get(ts.URL)
|
||||
if err != nil {
|
||||
t.Fatal("Error communicating with Login() handler: err:", err)
|
||||
}
|
||||
|
||||
// Ensure we were redirected
|
||||
if resp.StatusCode < 300 || resp.StatusCode >= 400 {
|
||||
t.Fatal("Expected to be redirected, but received status code", resp.StatusCode)
|
||||
}
|
||||
|
||||
loc, err := resp.Location()
|
||||
if err != nil {
|
||||
t.Fatal("Expected a location to be redirected to, but wasn't present")
|
||||
}
|
||||
|
||||
if state := loc.Query().Get("state"); state != "HELLO?!MCFLY?!ANYONEINTHERE?!" {
|
||||
t.Fatal("Expected state to be set but was", state)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_CookieMux_Callback_SetsCookie(t *testing.T) {
|
||||
hc, ts, prov := setupMuxTest(func(j *oauth2.CookieMux) http.Handler {
|
||||
return j.Callback()
|
||||
})
|
||||
defer teardownMuxTest(hc, ts, prov)
|
||||
|
||||
tsURL, _ := url.Parse(ts.URL)
|
||||
|
||||
v := url.Values{
|
||||
"code": {"4815162342"},
|
||||
"state": {"foobar"},
|
||||
}
|
||||
|
||||
tsURL.RawQuery = v.Encode()
|
||||
|
||||
resp, err := hc.Get(tsURL.String())
|
||||
if err != nil {
|
||||
t.Fatal("Error communicating with Callback() handler: err", err)
|
||||
}
|
||||
|
||||
// Ensure we were redirected
|
||||
if resp.StatusCode < 300 || resp.StatusCode >= 400 {
|
||||
t.Fatal("Expected to be redirected, but received status code", resp.StatusCode)
|
||||
}
|
||||
|
||||
// Check that cookie was set
|
||||
cookies := resp.Cookies()
|
||||
if count := len(cookies); count != 1 {
|
||||
t.Fatal("Expected exactly one cookie to be set but found", count)
|
||||
}
|
||||
|
||||
c := cookies[0]
|
||||
|
||||
if c.Name != oauth2.DefaultCookieName {
|
||||
t.Fatal("Expected cookie to be named", oauth2.DefaultCookieName, "but was", c.Name)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package oauth2
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
/* Constants */
|
||||
const (
|
||||
// PrincipalKey is used to pass principal
|
||||
// via context.Context to request-scoped
|
||||
// functions.
|
||||
PrincipalKey string = "principal"
|
||||
)
|
||||
|
||||
var (
|
||||
/* Errors */
|
||||
ErrAuthentication = errors.New("user not authenticated")
|
||||
ErrOrgMembership = errors.New("Not a member of the required organization")
|
||||
)
|
||||
|
||||
/* Types */
|
||||
|
||||
// Principal is any entity that can be authenticated
|
||||
type Principal struct {
|
||||
Subject string
|
||||
Issuer string
|
||||
}
|
||||
|
||||
/* Interfaces */
|
||||
|
||||
// Provider are the common parameters for all providers (RFC 6749)
|
||||
type Provider interface {
|
||||
// ID is issued to the registered client by the authorization (RFC 6749 Section 2.2)
|
||||
ID() string
|
||||
// Secret associated is with the ID (Section 2.2)
|
||||
Secret() string
|
||||
// Scopes is used by the authorization server to "scope" responses (Section 3.3)
|
||||
Scopes() []string
|
||||
// Config is the OAuth2 configuration settings for this provider
|
||||
Config() *oauth2.Config
|
||||
// PrincipalID with fetch the identifier to be associated with the principal.
|
||||
PrincipalID(provider *http.Client) (string, error)
|
||||
|
||||
// Name is the name of the Provider
|
||||
Name() string
|
||||
}
|
||||
|
||||
// Mux is a collection of handlers responsible for servicing an Oauth2 interaction between a browser and a provider
|
||||
type Mux interface {
|
||||
Login() http.Handler
|
||||
Logout() http.Handler
|
||||
Callback() http.Handler
|
||||
}
|
||||
|
||||
// Authenticator represents a service for authenticating users.
|
||||
type Authenticator interface {
|
||||
// Authenticate returns User associated with token if successful.
|
||||
Authenticate(ctx context.Context, token string) (Principal, error)
|
||||
// Token generates a valid token for Principal lasting a duration
|
||||
Token(context.Context, Principal, time.Duration) (string, error)
|
||||
}
|
||||
|
||||
// TokenExtractor extracts tokens from http requests
|
||||
type TokenExtractor interface {
|
||||
// Extract will return the token or an error.
|
||||
Extract(r *http.Request) (string, error)
|
||||
}
|
|
@ -0,0 +1,100 @@
|
|||
package oauth2_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
goauth "golang.org/x/oauth2"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
var _ oauth2.Provider = &MockProvider{}
|
||||
|
||||
type MockProvider struct {
|
||||
Email string
|
||||
|
||||
ProviderURL string
|
||||
}
|
||||
|
||||
func (mp *MockProvider) Config() *goauth.Config {
|
||||
return &goauth.Config{
|
||||
RedirectURL: "http://www.example.com",
|
||||
ClientID: "4815162342",
|
||||
ClientSecret: "8675309",
|
||||
Endpoint: goauth.Endpoint{
|
||||
mp.ProviderURL + "/oauth/auth",
|
||||
mp.ProviderURL + "/oauth/token",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (mp *MockProvider) ID() string {
|
||||
return "8675309"
|
||||
}
|
||||
|
||||
func (mp *MockProvider) Name() string {
|
||||
return "mockly"
|
||||
}
|
||||
|
||||
func (mp *MockProvider) PrincipalID(provider *http.Client) (string, error) {
|
||||
return mp.Email, nil
|
||||
}
|
||||
|
||||
func (mp *MockProvider) Scopes() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (mp *MockProvider) Secret() string {
|
||||
return "4815162342"
|
||||
}
|
||||
|
||||
var _ oauth2.Authenticator = &YesManAuthenticator{}
|
||||
|
||||
type YesManAuthenticator struct{}
|
||||
|
||||
func (y *YesManAuthenticator) Authenticate(ctx context.Context, token string) (oauth2.Principal, error) {
|
||||
return oauth2.Principal{
|
||||
Subject: "biff@example.com",
|
||||
Issuer: "Biff Tannen's Pleasure Paradise",
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (y *YesManAuthenticator) Token(ctx context.Context, p oauth2.Principal, t time.Duration) (string, error) {
|
||||
return "HELLO?!MCFLY?!ANYONEINTHERE?!", nil
|
||||
}
|
||||
|
||||
func NewTestTripper(log chronograf.Logger, ts *httptest.Server, rt http.RoundTripper) (*TestTripper, error) {
|
||||
url, err := url.Parse(ts.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &TestTripper{log, rt, url}, nil
|
||||
}
|
||||
|
||||
type TestTripper struct {
|
||||
Log chronograf.Logger
|
||||
|
||||
rt http.RoundTripper
|
||||
tsURL *url.URL
|
||||
}
|
||||
|
||||
// RoundTrip modifies the Hostname of the incoming request to be directed to the
|
||||
// test server.
|
||||
func (tt *TestTripper) RoundTrip(r *http.Request) (*http.Response, error) {
|
||||
tt.Log.
|
||||
WithField("component", "test").
|
||||
WithField("remote_addr", r.RemoteAddr).
|
||||
WithField("method", r.Method).
|
||||
WithField("url", r.URL).
|
||||
Info("Request")
|
||||
|
||||
r.URL.Host = tt.tsURL.Host
|
||||
r.URL.Scheme = tt.tsURL.Scheme
|
||||
|
||||
return tt.rt.RoundTrip(r)
|
||||
}
|
|
@ -10,6 +10,13 @@ import (
|
|||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultWidth is used if not specified
|
||||
DefaultWidth = 4
|
||||
// DefaultHeight is used if not specified
|
||||
DefaultHeight = 4
|
||||
)
|
||||
|
||||
type dashboardLinks struct {
|
||||
Self string `json:"self"` // Self link mapping to this resource
|
||||
}
|
||||
|
@ -25,6 +32,7 @@ type getDashboardsResponse struct {
|
|||
|
||||
func newDashboardResponse(d chronograf.Dashboard) dashboardResponse {
|
||||
base := "/chronograf/v1/dashboards"
|
||||
DashboardDefaults(&d)
|
||||
return dashboardResponse{
|
||||
Dashboard: d,
|
||||
Links: dashboardLinks{
|
||||
|
@ -80,7 +88,7 @@ func (s *Service) NewDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if err := ValidDashboardRequest(dashboard); err != nil {
|
||||
if err := ValidDashboardRequest(&dashboard); err != nil {
|
||||
invalidData(w, err, s.Logger)
|
||||
return
|
||||
}
|
||||
|
@ -119,8 +127,8 @@ func (s *Service) RemoveDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// UpdateDashboard replaces a dashboard
|
||||
func (s *Service) UpdateDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
// ReplaceDashboard completely replaces a dashboard
|
||||
func (s *Service) ReplaceDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
idParam, err := strconv.Atoi(httprouter.GetParamFromContext(ctx, "id"))
|
||||
if err != nil {
|
||||
|
@ -142,7 +150,7 @@ func (s *Service) UpdateDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
req.ID = id
|
||||
|
||||
if err := ValidDashboardRequest(req); err != nil {
|
||||
if err := ValidDashboardRequest(&req); err != nil {
|
||||
invalidData(w, err, s.Logger)
|
||||
return
|
||||
}
|
||||
|
@ -157,17 +165,85 @@ func (s *Service) UpdateDashboard(w http.ResponseWriter, r *http.Request) {
|
|||
encodeJSON(w, http.StatusOK, res, s.Logger)
|
||||
}
|
||||
|
||||
// UpdateDashboard completely updates either the dashboard name or the cells
|
||||
func (s *Service) UpdateDashboard(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
idParam, err := strconv.Atoi(httprouter.GetParamFromContext(ctx, "id"))
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("Could not parse dashboard ID: %s", err)
|
||||
Error(w, http.StatusInternalServerError, msg, s.Logger)
|
||||
}
|
||||
id := chronograf.DashboardID(idParam)
|
||||
|
||||
orig, err := s.DashboardsStore.Get(ctx, id)
|
||||
if err != nil {
|
||||
Error(w, http.StatusNotFound, fmt.Sprintf("ID %d not found", id), s.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
var req chronograf.Dashboard
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
invalidJSON(w, s.Logger)
|
||||
return
|
||||
}
|
||||
req.ID = id
|
||||
|
||||
if req.Name != "" {
|
||||
orig.Name = req.Name
|
||||
} else if len(req.Cells) > 0 {
|
||||
if err := ValidDashboardRequest(&req); err != nil {
|
||||
invalidData(w, err, s.Logger)
|
||||
return
|
||||
}
|
||||
orig.Cells = req.Cells
|
||||
} else {
|
||||
invalidData(w, fmt.Errorf("Update must include either name or cells"), s.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.DashboardsStore.Update(ctx, orig); err != nil {
|
||||
msg := fmt.Sprintf("Error updating dashboard ID %d: %v", id, err)
|
||||
Error(w, http.StatusInternalServerError, msg, s.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
res := newDashboardResponse(orig)
|
||||
encodeJSON(w, http.StatusOK, res, s.Logger)
|
||||
}
|
||||
|
||||
// ValidDashboardRequest verifies that the dashboard cells have a query
|
||||
func ValidDashboardRequest(d chronograf.Dashboard) error {
|
||||
func ValidDashboardRequest(d *chronograf.Dashboard) error {
|
||||
if len(d.Cells) == 0 {
|
||||
return fmt.Errorf("cells are required")
|
||||
}
|
||||
|
||||
for _, c := range d.Cells {
|
||||
if (len(c.Queries) == 0) {
|
||||
for i, c := range d.Cells {
|
||||
if len(c.Queries) == 0 {
|
||||
return fmt.Errorf("query required")
|
||||
}
|
||||
CorrectWidthHeight(&c)
|
||||
d.Cells[i] = c
|
||||
}
|
||||
|
||||
DashboardDefaults(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
// DashboardDefaults updates the dashboard with the default values
|
||||
// if none are specified
|
||||
func DashboardDefaults(d *chronograf.Dashboard) {
|
||||
for i, c := range d.Cells {
|
||||
CorrectWidthHeight(&c)
|
||||
d.Cells[i] = c
|
||||
}
|
||||
}
|
||||
|
||||
// CorrectWidthHeight changes the cell to have at least the
|
||||
// minimum width and height
|
||||
func CorrectWidthHeight(c *chronograf.DashboardCell) {
|
||||
if c.W < 1 {
|
||||
c.W = DefaultWidth
|
||||
}
|
||||
if c.H < 1 {
|
||||
c.H = DefaultHeight
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,299 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
func TestCorrectWidthHeight(t *testing.T) {
|
||||
t.Parallel()
|
||||
tests := []struct {
|
||||
name string
|
||||
cell chronograf.DashboardCell
|
||||
want chronograf.DashboardCell
|
||||
}{
|
||||
{
|
||||
name: "updates width",
|
||||
cell: chronograf.DashboardCell{
|
||||
W: 0,
|
||||
H: 4,
|
||||
},
|
||||
want: chronograf.DashboardCell{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "updates height",
|
||||
cell: chronograf.DashboardCell{
|
||||
W: 4,
|
||||
H: 0,
|
||||
},
|
||||
want: chronograf.DashboardCell{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "updates both",
|
||||
cell: chronograf.DashboardCell{
|
||||
W: 0,
|
||||
H: 0,
|
||||
},
|
||||
want: chronograf.DashboardCell{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "updates neither",
|
||||
cell: chronograf.DashboardCell{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
want: chronograf.DashboardCell{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
if CorrectWidthHeight(&tt.cell); !reflect.DeepEqual(tt.cell, tt.want) {
|
||||
t.Errorf("%q. CorrectWidthHeight() = %v, want %v", tt.name, tt.cell, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDashboardDefaults(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
d chronograf.Dashboard
|
||||
want chronograf.Dashboard
|
||||
}{
|
||||
{
|
||||
name: "Updates all cell widths/heights",
|
||||
d: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 0,
|
||||
H: 0,
|
||||
},
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
want: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Updates no cell",
|
||||
d: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 4,
|
||||
H: 4,
|
||||
}, {
|
||||
W: 2,
|
||||
H: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
want: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 4,
|
||||
H: 4,
|
||||
},
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
if DashboardDefaults(&tt.d); !reflect.DeepEqual(tt.d, tt.want) {
|
||||
t.Errorf("%q. DashboardDefaults() = %v, want %v", tt.name, tt.d, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidDashboardRequest(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
d chronograf.Dashboard
|
||||
want chronograf.Dashboard
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Updates all cell widths/heights",
|
||||
d: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 0,
|
||||
H: 0,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT donors from hill_valley_preservation_society where time > 1985-10-25T08:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT winning_horses from grays_sports_alamanc where time > 1955-11-1T00:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 4,
|
||||
H: 4,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT donors from hill_valley_preservation_society where time > 1985-10-25T08:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT winning_horses from grays_sports_alamanc where time > 1955-11-1T00:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "No queries",
|
||||
d: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
Queries: []chronograf.Query{},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 2,
|
||||
H: 2,
|
||||
Queries: []chronograf.Query{},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Empty Cells",
|
||||
d: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{},
|
||||
},
|
||||
want: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
err := ValidDashboardRequest(&tt.d)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("%q. ValidDashboardRequest() error = %v, wantErr %v", tt.name, err, tt.wantErr)
|
||||
continue
|
||||
}
|
||||
if !reflect.DeepEqual(tt.d, tt.want) {
|
||||
t.Errorf("%q. ValidDashboardRequest() = %v, want %v", tt.name, tt.d, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test_newDashboardResponse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
d chronograf.Dashboard
|
||||
want dashboardResponse
|
||||
}{
|
||||
{
|
||||
name: "Updates all cell widths/heights",
|
||||
d: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 0,
|
||||
H: 0,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT donors from hill_valley_preservation_society where time > 1985-10-25T08:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
W: 0,
|
||||
H: 0,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT winning_horses from grays_sports_alamanc where time > 1955-11-1T00:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: dashboardResponse{
|
||||
Dashboard: chronograf.Dashboard{
|
||||
Cells: []chronograf.DashboardCell{
|
||||
{
|
||||
W: 4,
|
||||
H: 4,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT donors from hill_valley_preservation_society where time > 1985-10-25T08:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
W: 4,
|
||||
H: 4,
|
||||
Queries: []chronograf.Query{
|
||||
{
|
||||
Command: "SELECT winning_horses from grays_sports_alamanc where time > 1955-11-1T00:00:00",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Links: dashboardLinks{
|
||||
Self: "/chronograf/v1/dashboards/0",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
if got := newDashboardResponse(tt.d); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("%q. newDashboardResponse() = %v, want %v", tt.name, got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
279
server/github.go
|
@ -1,279 +0,0 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-github/github"
|
||||
"github.com/influxdata/chronograf"
|
||||
"golang.org/x/oauth2"
|
||||
ogh "golang.org/x/oauth2/github"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultCookieName is the name of the stored cookie
|
||||
DefaultCookieName = "session"
|
||||
// DefaultCookieDuration is the length of time the cookie is valid
|
||||
DefaultCookieDuration = time.Hour * 24 * 30
|
||||
)
|
||||
|
||||
// Cookie represents the location and expiration time of new cookies.
|
||||
type Cookie struct {
|
||||
Name string
|
||||
Duration time.Duration
|
||||
}
|
||||
|
||||
// NewCookie creates a Cookie with DefaultCookieName and DefaultCookieDuration
|
||||
func NewCookie() Cookie {
|
||||
return Cookie{
|
||||
Name: DefaultCookieName,
|
||||
Duration: DefaultCookieDuration,
|
||||
}
|
||||
}
|
||||
|
||||
// Github provides OAuth Login and Callback server. Callback will set
|
||||
// an authentication cookie. This cookie's value is a JWT containing
|
||||
// the user's primary Github email address.
|
||||
type Github struct {
|
||||
Cookie Cookie
|
||||
Authenticator chronograf.Authenticator
|
||||
ClientID string
|
||||
ClientSecret string
|
||||
Scopes []string
|
||||
SuccessURL string // SuccessURL is redirect location after successful authorization
|
||||
FailureURL string // FailureURL is redirect location after authorization failure
|
||||
Orgs []string // Optional github organization checking
|
||||
Now func() time.Time
|
||||
Logger chronograf.Logger
|
||||
}
|
||||
|
||||
// NewGithub constructs a Github with default cookie behavior and scopes.
|
||||
func NewGithub(clientID, clientSecret, successURL, failureURL string, orgs []string, auth chronograf.Authenticator, log chronograf.Logger) Github {
|
||||
scopes := []string{"user:email"}
|
||||
if len(orgs) > 0 {
|
||||
scopes = append(scopes, "read:org")
|
||||
}
|
||||
return Github{
|
||||
ClientID: clientID,
|
||||
ClientSecret: clientSecret,
|
||||
Cookie: NewCookie(),
|
||||
Scopes: scopes,
|
||||
Orgs: orgs,
|
||||
SuccessURL: successURL,
|
||||
FailureURL: failureURL,
|
||||
Authenticator: auth,
|
||||
Now: time.Now,
|
||||
Logger: log,
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Github) config() *oauth2.Config {
|
||||
return &oauth2.Config{
|
||||
ClientID: g.ClientID,
|
||||
ClientSecret: g.ClientSecret,
|
||||
Scopes: g.Scopes,
|
||||
Endpoint: ogh.Endpoint,
|
||||
}
|
||||
}
|
||||
|
||||
// Login returns a handler that redirects to Github's OAuth login.
|
||||
// Uses JWT with a random string as the state validation method.
|
||||
// JWTs are used because they can be validated without storing
|
||||
// state.
|
||||
func (g *Github) Login() http.HandlerFunc {
|
||||
conf := g.config()
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// We are creating a token with an encoded random string to prevent CSRF attacks
|
||||
// This token will be validated during the OAuth callback.
|
||||
// We'll give our users 10 minutes from this point to type in their github password.
|
||||
// If the callback is not received within 10 minutes, then authorization will fail.
|
||||
csrf := randomString(32) // 32 is not important... just long
|
||||
state, err := g.Authenticator.Token(r.Context(), chronograf.Principal(csrf), 10*time.Minute)
|
||||
// This is likely an internal server error
|
||||
if err != nil {
|
||||
g.Logger.
|
||||
WithField("component", "auth").
|
||||
WithField("remote_addr", r.RemoteAddr).
|
||||
WithField("method", r.Method).
|
||||
WithField("url", r.URL).
|
||||
Error("Internal authentication error: ", err.Error())
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
url := conf.AuthCodeURL(state, oauth2.AccessTypeOnline)
|
||||
http.Redirect(w, r, url, http.StatusTemporaryRedirect)
|
||||
})
|
||||
}
|
||||
|
||||
// Logout will expire our authentication cookie and redirect to the SuccessURL
|
||||
func (g *Github) Logout() http.HandlerFunc {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
deleteCookie := http.Cookie{
|
||||
Name: g.Cookie.Name,
|
||||
Value: "none",
|
||||
Expires: g.Now().UTC().Add(-1 * time.Hour),
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
}
|
||||
http.SetCookie(w, &deleteCookie)
|
||||
http.Redirect(w, r, g.SuccessURL, http.StatusTemporaryRedirect)
|
||||
})
|
||||
}
|
||||
|
||||
// Callback used by github callback after authorization is granted. If
|
||||
// granted, Callback will set a cookie with a month-long expiration. The
|
||||
// value of the cookie is a JWT because the JWT can be validated without
|
||||
// the need for saving state. The JWT contains the Github user's primary
|
||||
// email address.
|
||||
func (g *Github) Callback() http.HandlerFunc {
|
||||
conf := g.config()
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
log := g.Logger.
|
||||
WithField("component", "auth").
|
||||
WithField("remote_addr", r.RemoteAddr).
|
||||
WithField("method", r.Method).
|
||||
WithField("url", r.URL)
|
||||
|
||||
state := r.FormValue("state")
|
||||
// Check if the OAuth state token is valid to prevent CSRF
|
||||
_, err := g.Authenticator.Authenticate(r.Context(), state)
|
||||
if err != nil {
|
||||
log.Error("Invalid OAuth state received: ", err.Error())
|
||||
http.Redirect(w, r, g.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
code := r.FormValue("code")
|
||||
token, err := conf.Exchange(r.Context(), code)
|
||||
if err != nil {
|
||||
log.Error("Unable to exchange code for token ", err.Error())
|
||||
http.Redirect(w, r, g.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
oauthClient := conf.Client(r.Context(), token)
|
||||
client := github.NewClient(oauthClient)
|
||||
// If we need to restrict to a set of organizations, we first get the org
|
||||
// and filter.
|
||||
if len(g.Orgs) > 0 {
|
||||
orgs, err := getOrganizations(client, log)
|
||||
if err != nil {
|
||||
http.Redirect(w, r, g.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
// Not a member, so, deny permission
|
||||
if ok := isMember(g.Orgs, orgs); !ok {
|
||||
log.Error("Not a member of required github organization")
|
||||
http.Redirect(w, r, g.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
email, err := getPrimaryEmail(client, log)
|
||||
if err != nil {
|
||||
http.Redirect(w, r, g.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
// We create an auth token that will be used by all other endpoints to validate the principal has a claim
|
||||
authToken, err := g.Authenticator.Token(r.Context(), chronograf.Principal(email), g.Cookie.Duration)
|
||||
if err != nil {
|
||||
log.Error("Unable to create cookie auth token ", err.Error())
|
||||
http.Redirect(w, r, g.FailureURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
|
||||
expireCookie := time.Now().UTC().Add(g.Cookie.Duration)
|
||||
cookie := http.Cookie{
|
||||
Name: g.Cookie.Name,
|
||||
Value: authToken,
|
||||
Expires: expireCookie,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
}
|
||||
log.Info("User ", email, " is authenticated")
|
||||
http.SetCookie(w, &cookie)
|
||||
http.Redirect(w, r, g.SuccessURL, http.StatusTemporaryRedirect)
|
||||
})
|
||||
}
|
||||
|
||||
func randomString(length int) string {
|
||||
k := make([]byte, length)
|
||||
if _, err := io.ReadFull(rand.Reader, k); err != nil {
|
||||
return ""
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(k)
|
||||
}
|
||||
|
||||
func logResponseError(log chronograf.Logger, resp *github.Response, err error) {
|
||||
switch resp.StatusCode {
|
||||
case http.StatusUnauthorized, http.StatusForbidden:
|
||||
log.Error("OAuth access to email address forbidden ", err.Error())
|
||||
default:
|
||||
log.Error("Unable to retrieve Github email ", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// isMember makes sure that the user is in one of the required organizations
|
||||
func isMember(requiredOrgs []string, userOrgs []*github.Organization) bool {
|
||||
for _, requiredOrg := range requiredOrgs {
|
||||
for _, userOrg := range userOrgs {
|
||||
if userOrg.Login != nil && *userOrg.Login == requiredOrg {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getOrganizations gets all organization for the currently authenticated user
|
||||
func getOrganizations(client *github.Client, log chronograf.Logger) ([]*github.Organization, error) {
|
||||
// Get all pages of results
|
||||
var allOrgs []*github.Organization
|
||||
for {
|
||||
opt := &github.ListOptions{
|
||||
PerPage: 10,
|
||||
}
|
||||
// Get the organizations for the current authenticated user.
|
||||
orgs, resp, err := client.Organizations.List("", opt)
|
||||
if err != nil {
|
||||
logResponseError(log, resp, err)
|
||||
return nil, err
|
||||
}
|
||||
allOrgs = append(allOrgs, orgs...)
|
||||
if resp.NextPage == 0 {
|
||||
break
|
||||
}
|
||||
opt.Page = resp.NextPage
|
||||
}
|
||||
return allOrgs, nil
|
||||
}
|
||||
|
||||
// getPrimaryEmail gets the primary email account for the authenticated user.
|
||||
func getPrimaryEmail(client *github.Client, log chronograf.Logger) (string, error) {
|
||||
emails, resp, err := client.Users.ListEmails(nil)
|
||||
if err != nil {
|
||||
logResponseError(log, resp, err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
email, err := primaryEmail(emails)
|
||||
if err != nil {
|
||||
log.Error("Unable to retrieve primary Github email ", err.Error())
|
||||
return "", err
|
||||
}
|
||||
return email, nil
|
||||
}
|
||||
|
||||
func primaryEmail(emails []*github.UserEmail) (string, error) {
|
||||
for _, m := range emails {
|
||||
if m != nil && m.Primary != nil && m.Verified != nil && m.Email != nil {
|
||||
return *m.Email, nil
|
||||
}
|
||||
}
|
||||
return "", errors.New("No primary email address")
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package server
|
||||
|
||||
import "net/http"
|
||||
|
||||
// HSTS add HTTP Strict Transport Security header with a max-age of two years
|
||||
// Inspired from https://blog.bracebin.com/achieving-perfect-ssl-labs-score-with-go
|
||||
func HSTS(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("Strict-Transport-Security", "max-age=63072000; includeSubDomains")
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
|
@ -349,6 +349,7 @@ func (h *Service) KapacitorRulesPost(w http.ResponseWriter, r *http.Request) {
|
|||
Output: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/proxy?path=%s", srv.SrcID, srv.ID, url.QueryEscape(task.HrefOutput)),
|
||||
},
|
||||
TICKScript: string(task.TICKScript),
|
||||
Status: "enabled",
|
||||
}
|
||||
|
||||
w.Header().Add("Location", res.Links.Self)
|
||||
|
@ -364,6 +365,7 @@ type alertLinks struct {
|
|||
type alertResponse struct {
|
||||
chronograf.AlertRule
|
||||
TICKScript string `json:"tickscript"`
|
||||
Status string `json:"status"`
|
||||
Links alertLinks `json:"links"`
|
||||
}
|
||||
|
||||
|
@ -438,6 +440,92 @@ func (h *Service) KapacitorRulesPut(w http.ResponseWriter, r *http.Request) {
|
|||
Output: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/proxy?path=%s", srv.SrcID, srv.ID, url.QueryEscape(task.HrefOutput)),
|
||||
},
|
||||
TICKScript: string(task.TICKScript),
|
||||
Status: "enabled",
|
||||
}
|
||||
encodeJSON(w, http.StatusOK, res, h.Logger)
|
||||
}
|
||||
|
||||
type KapacitorStatus struct {
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func (k *KapacitorStatus) Valid() error {
|
||||
if k.Status == "enabled" || k.Status == "disabled" {
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("Invalid Kapacitor status: %s", k.Status)
|
||||
}
|
||||
|
||||
// KapacitorRulesStatus proxies PATCH to kapacitor to enable/disable tasks
|
||||
func (h *Service) KapacitorRulesStatus(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := paramID("kid", r)
|
||||
if err != nil {
|
||||
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
srcID, err := paramID("id", r)
|
||||
if err != nil {
|
||||
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
srv, err := h.ServersStore.Get(ctx, id)
|
||||
if err != nil || srv.SrcID != srcID {
|
||||
notFound(w, id, h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
tid := httprouter.GetParamFromContext(ctx, "tid")
|
||||
c := kapa.Client{
|
||||
URL: srv.URL,
|
||||
Username: srv.Username,
|
||||
Password: srv.Password,
|
||||
Ticker: &kapa.Alert{},
|
||||
}
|
||||
var req KapacitorStatus
|
||||
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
invalidJSON(w, h.Logger)
|
||||
return
|
||||
}
|
||||
if err := req.Valid(); err != nil {
|
||||
invalidData(w, err, h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the rule exists and is scoped correctly
|
||||
alert, err := h.AlertRulesStore.Get(ctx, srcID, id, tid)
|
||||
if err != nil {
|
||||
if err == chronograf.ErrAlertNotFound {
|
||||
notFound(w, id, h.Logger)
|
||||
return
|
||||
}
|
||||
Error(w, http.StatusInternalServerError, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
var task *kapa.Task
|
||||
if req.Status == "enabled" {
|
||||
task, err = c.Enable(ctx, c.Href(tid))
|
||||
} else {
|
||||
task, err = c.Disable(ctx, c.Href(tid))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
Error(w, http.StatusInternalServerError, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
res := alertResponse{
|
||||
AlertRule: alert,
|
||||
Links: alertLinks{
|
||||
Self: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/rules/%s", srv.SrcID, srv.ID, task.ID),
|
||||
Kapacitor: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/proxy?path=%s", srv.SrcID, srv.ID, url.QueryEscape(task.Href)),
|
||||
Output: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/proxy?path=%s", srv.SrcID, srv.ID, url.QueryEscape(task.HrefOutput)),
|
||||
},
|
||||
TICKScript: string(task.TICKScript),
|
||||
Status: req.Status,
|
||||
}
|
||||
encodeJSON(w, http.StatusOK, res, h.Logger)
|
||||
}
|
||||
|
@ -470,7 +558,18 @@ func (h *Service) KapacitorRulesGet(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
ticker := &kapa.Alert{}
|
||||
c := kapa.Client{}
|
||||
c := kapa.Client{
|
||||
URL: srv.URL,
|
||||
Username: srv.Username,
|
||||
Password: srv.Password,
|
||||
Ticker: ticker,
|
||||
}
|
||||
statuses, err := c.AllStatus(ctx)
|
||||
if err != nil {
|
||||
Error(w, http.StatusInternalServerError, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
res := allAlertsResponse{
|
||||
Rules: []alertResponse{},
|
||||
}
|
||||
|
@ -481,6 +580,11 @@ func (h *Service) KapacitorRulesGet(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
status, ok := statuses[rule.ID]
|
||||
// The defined rule is not actually in kapacitor
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
ar := alertResponse{
|
||||
AlertRule: rule,
|
||||
Links: alertLinks{
|
||||
|
@ -489,6 +593,7 @@ func (h *Service) KapacitorRulesGet(w http.ResponseWriter, r *http.Request) {
|
|||
Output: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/proxy?path=%s", srv.SrcID, srv.ID, url.QueryEscape(c.HrefOutput(rule.ID))),
|
||||
},
|
||||
TICKScript: string(tickscript),
|
||||
Status: status,
|
||||
}
|
||||
res.Rules = append(res.Rules, ar)
|
||||
}
|
||||
|
@ -532,13 +637,24 @@ func (h *Service) KapacitorRulesID(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
ticker := &kapa.Alert{}
|
||||
c := kapa.Client{}
|
||||
c := kapa.Client{
|
||||
URL: srv.URL,
|
||||
Username: srv.Username,
|
||||
Password: srv.Password,
|
||||
Ticker: ticker,
|
||||
}
|
||||
tickscript, err := ticker.Generate(rule)
|
||||
if err != nil {
|
||||
Error(w, http.StatusInternalServerError, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
status, err := c.Status(ctx, c.Href(rule.ID))
|
||||
if err != nil {
|
||||
Error(w, http.StatusInternalServerError, err.Error(), h.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
res := alertResponse{
|
||||
AlertRule: rule,
|
||||
Links: alertLinks{
|
||||
|
@ -547,6 +663,7 @@ func (h *Service) KapacitorRulesID(w http.ResponseWriter, r *http.Request) {
|
|||
Output: fmt.Sprintf("/chronograf/v1/sources/%d/kapacitors/%d/proxy?path=%s", srv.SrcID, srv.ID, url.QueryEscape(c.HrefOutput(rule.ID))),
|
||||
},
|
||||
TICKScript: string(tickscript),
|
||||
Status: status,
|
||||
}
|
||||
encodeJSON(w, http.StatusOK, res, h.Logger)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
package server
|
||||
|
||||
import "net/http"
|
||||
|
||||
// Logout chooses the correct provider logout route and redirects to it
|
||||
func Logout(nextURL string, routes AuthRoutes) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
principal, err := getPrincipal(ctx)
|
||||
if err != nil {
|
||||
http.Redirect(w, r, nextURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
route, ok := routes.Lookup(principal.Issuer)
|
||||
if !ok {
|
||||
http.Redirect(w, r, nextURL, http.StatusTemporaryRedirect)
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, route.Logout, http.StatusTemporaryRedirect)
|
||||
}
|
||||
}
|
|
@ -7,9 +7,10 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/NYTimes/gziphandler"
|
||||
"github.com/bouk/httprouter"
|
||||
"github.com/influxdata/chronograf" // When julienschmidt/httprouter v2 w/ context is out, switch
|
||||
"github.com/influxdata/chronograf/jwt"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -19,13 +20,13 @@ const (
|
|||
|
||||
// MuxOpts are the options for the router. Mostly related to auth.
|
||||
type MuxOpts struct {
|
||||
Logger chronograf.Logger
|
||||
Develop bool // Develop loads assets from filesystem instead of bindata
|
||||
UseAuth bool // UseAuth turns on Github OAuth and JWT
|
||||
TokenSecret string // TokenSecret is the JWT secret
|
||||
GithubClientID string // GithubClientID is the GH OAuth id
|
||||
GithubClientSecret string // GithubClientSecret is the GH OAuth secret
|
||||
GithubOrgs []string // GithubOrgs is the list of organizations a user my be a member of
|
||||
Logger chronograf.Logger
|
||||
Develop bool // Develop loads assets from filesystem instead of bindata
|
||||
Basepath string // URL path prefix under which all chronograf routes will be mounted
|
||||
UseAuth bool // UseAuth turns on Github OAuth and JWT
|
||||
TokenSecret string
|
||||
|
||||
ProviderFuncs []func(func(oauth2.Provider, oauth2.Mux))
|
||||
}
|
||||
|
||||
// NewMux attaches all the route handlers; handler returned servers chronograf.
|
||||
|
@ -41,19 +42,19 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
|
|||
// Prefix any URLs found in the React assets with any configured basepath
|
||||
prefixedAssets := NewDefaultURLPrefixer(basepath, assets, opts.Logger)
|
||||
|
||||
// Compress the assets with gzip if an accepted encoding
|
||||
compressed := gziphandler.GzipHandler(prefixedAssets)
|
||||
|
||||
// The react application handles all the routing if the server does not
|
||||
// know about the route. This means that we never have unknown
|
||||
// routes on the server.
|
||||
router.NotFound = prefixedAssets
|
||||
router.NotFound = compressed
|
||||
|
||||
/* Documentation */
|
||||
router.GET("/swagger.json", Spec())
|
||||
router.GET("/docs", Redoc("/swagger.json"))
|
||||
|
||||
/* API */
|
||||
// Root Routes returns all top-level routes in the API
|
||||
router.GET("/chronograf/v1/", AllRoutes(opts.Logger))
|
||||
|
||||
// Sources
|
||||
router.GET("/chronograf/v1/sources", service.Sources)
|
||||
router.POST("/chronograf/v1/sources", service.NewSource)
|
||||
|
@ -79,6 +80,7 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
|
|||
|
||||
router.GET("/chronograf/v1/sources/:id/kapacitors/:kid/rules/:tid", service.KapacitorRulesID)
|
||||
router.PUT("/chronograf/v1/sources/:id/kapacitors/:kid/rules/:tid", service.KapacitorRulesPut)
|
||||
router.PATCH("/chronograf/v1/sources/:id/kapacitors/:kid/rules/:tid", service.KapacitorRulesStatus)
|
||||
router.DELETE("/chronograf/v1/sources/:id/kapacitors/:kid/rules/:tid", service.KapacitorRulesDelete)
|
||||
|
||||
// Kapacitor Proxy
|
||||
|
@ -112,45 +114,65 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
|
|||
|
||||
router.GET("/chronograf/v1/dashboards/:id", service.DashboardID)
|
||||
router.DELETE("/chronograf/v1/dashboards/:id", service.RemoveDashboard)
|
||||
router.PUT("/chronograf/v1/dashboards/:id", service.UpdateDashboard)
|
||||
router.PUT("/chronograf/v1/dashboards/:id", service.ReplaceDashboard)
|
||||
router.PATCH("/chronograf/v1/dashboards/:id", service.UpdateDashboard)
|
||||
|
||||
var authRoutes AuthRoutes
|
||||
|
||||
var out http.Handler
|
||||
/* Authentication */
|
||||
if opts.UseAuth {
|
||||
auth := AuthAPI(opts, router)
|
||||
return Logger(opts.Logger, auth)
|
||||
// Encapsulate the router with OAuth2
|
||||
var auth http.Handler
|
||||
auth, authRoutes = AuthAPI(opts, router)
|
||||
|
||||
// Create middleware to redirect to the appropriate provider logout
|
||||
targetURL := "/"
|
||||
router.GET("/oauth/logout", Logout(targetURL, authRoutes))
|
||||
|
||||
out = Logger(opts.Logger, auth)
|
||||
} else {
|
||||
out = Logger(opts.Logger, router)
|
||||
}
|
||||
return Logger(opts.Logger, router)
|
||||
|
||||
router.GET("/chronograf/v1/", AllRoutes(authRoutes, opts.Logger))
|
||||
router.GET("/chronograf/v1", AllRoutes(authRoutes, opts.Logger))
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// AuthAPI adds the OAuth routes if auth is enabled.
|
||||
func AuthAPI(opts MuxOpts, router *httprouter.Router) http.Handler {
|
||||
auth := jwt.NewJWT(opts.TokenSecret)
|
||||
// TODO: this function is not great. Would be good if providers added their routes.
|
||||
func AuthAPI(opts MuxOpts, router *httprouter.Router) (http.Handler, AuthRoutes) {
|
||||
auth := oauth2.NewJWT(opts.TokenSecret)
|
||||
routes := AuthRoutes{}
|
||||
for _, pf := range opts.ProviderFuncs {
|
||||
pf(func(p oauth2.Provider, m oauth2.Mux) {
|
||||
loginPath := fmt.Sprintf("%s/oauth/%s/login", opts.Basepath, strings.ToLower(p.Name()))
|
||||
logoutPath := fmt.Sprintf("%s/oauth/%s/logout", opts.Basepath, strings.ToLower(p.Name()))
|
||||
callbackPath := fmt.Sprintf("%s/oauth/%s/callback", opts.Basepath, strings.ToLower(p.Name()))
|
||||
router.Handler("GET", loginPath, m.Login())
|
||||
router.Handler("GET", logoutPath, m.Logout())
|
||||
router.Handler("GET", callbackPath, m.Callback())
|
||||
routes = append(routes, AuthRoute{
|
||||
Name: p.Name(),
|
||||
Label: strings.Title(p.Name()),
|
||||
Login: loginPath,
|
||||
Logout: logoutPath,
|
||||
Callback: callbackPath,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
successURL := "/"
|
||||
failureURL := "/login"
|
||||
gh := NewGithub(
|
||||
opts.GithubClientID,
|
||||
opts.GithubClientSecret,
|
||||
successURL,
|
||||
failureURL,
|
||||
opts.GithubOrgs,
|
||||
&auth,
|
||||
opts.Logger,
|
||||
)
|
||||
|
||||
router.GET("/oauth/github", gh.Login())
|
||||
router.GET("/oauth/logout", gh.Logout())
|
||||
router.GET("/oauth/github/callback", gh.Callback())
|
||||
|
||||
tokenMiddleware := AuthorizedToken(&auth, &CookieExtractor{Name: "session"}, opts.Logger, router)
|
||||
tokenMiddleware := oauth2.AuthorizedToken(&auth, &oauth2.CookieExtractor{Name: "session"}, opts.Logger, router)
|
||||
// Wrap the API with token validation middleware.
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if strings.HasPrefix(r.URL.Path, "/chronograf/v1/") {
|
||||
if strings.HasPrefix(r.URL.Path, "/chronograf/v1/") || r.URL.Path == "/oauth/logout" {
|
||||
tokenMiddleware.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
router.ServeHTTP(w, r)
|
||||
})
|
||||
}), routes
|
||||
}
|
||||
|
||||
func encodeJSON(w http.ResponseWriter, status int, v interface{}, logger chronograf.Logger) {
|
||||
|
|
|
@ -6,17 +6,40 @@ import (
|
|||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
// AuthRoute are the routes for each type of OAuth2 provider
|
||||
type AuthRoute struct {
|
||||
Name string `json:"name"` // Name uniquely identifies the provider
|
||||
Label string `json:"label"` // Label is a user-facing string to present in the UI
|
||||
Login string `json:"login"` // Login is the route to the login redirect path
|
||||
Logout string `json:"logout"` // Logout is the route to the logout redirect path
|
||||
Callback string `json:"callback"` // Callback is the route the provider calls to exchange the code/state
|
||||
}
|
||||
|
||||
// AuthRoutes contains all OAuth2 provider routes.
|
||||
type AuthRoutes []AuthRoute
|
||||
|
||||
// Lookup searches all the routes for a specific provider
|
||||
func (r *AuthRoutes) Lookup(provider string) (AuthRoute, bool) {
|
||||
for _, route := range *r {
|
||||
if route.Name == provider {
|
||||
return route, true
|
||||
}
|
||||
}
|
||||
return AuthRoute{}, false
|
||||
}
|
||||
|
||||
type getRoutesResponse struct {
|
||||
Layouts string `json:"layouts"` // Location of the layouts endpoint
|
||||
Mappings string `json:"mappings"` // Location of the application mappings endpoint
|
||||
Sources string `json:"sources"` // Location of the sources endpoint
|
||||
Users string `json:"users"` // Location of the users endpoint
|
||||
Me string `json:"me"` // Location of the me endpoint
|
||||
Dashboards string `json:"dashboards"` // Location of the dashboards endpoint
|
||||
Layouts string `json:"layouts"` // Location of the layouts endpoint
|
||||
Mappings string `json:"mappings"` // Location of the application mappings endpoint
|
||||
Sources string `json:"sources"` // Location of the sources endpoint
|
||||
Users string `json:"users"` // Location of the users endpoint
|
||||
Me string `json:"me"` // Location of the me endpoint
|
||||
Dashboards string `json:"dashboards"` // Location of the dashboards endpoint
|
||||
Auth []AuthRoute `json:"auth"` // Location of all auth routes.
|
||||
}
|
||||
|
||||
// AllRoutes returns all top level routes within chronograf
|
||||
func AllRoutes(logger chronograf.Logger) http.HandlerFunc {
|
||||
func AllRoutes(authRoutes []AuthRoute, logger chronograf.Logger) http.HandlerFunc {
|
||||
routes := getRoutesResponse{
|
||||
Sources: "/chronograf/v1/sources",
|
||||
Layouts: "/chronograf/v1/layouts",
|
||||
|
@ -24,6 +47,11 @@ func AllRoutes(logger chronograf.Logger) http.HandlerFunc {
|
|||
Me: "/chronograf/v1/me",
|
||||
Mappings: "/chronograf/v1/mappings",
|
||||
Dashboards: "/chronograf/v1/dashboards",
|
||||
Auth: make([]AuthRoute, len(authRoutes)),
|
||||
}
|
||||
|
||||
for i, route := range authRoutes {
|
||||
routes.Auth[i] = route
|
||||
}
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -31,3 +59,33 @@ func AllRoutes(logger chronograf.Logger) http.HandlerFunc {
|
|||
return
|
||||
})
|
||||
}
|
||||
|
||||
func NewGithubRoute() AuthRoute {
|
||||
return AuthRoute{
|
||||
Name: "github",
|
||||
Label: "GitHub",
|
||||
Login: "/oauth/github/login",
|
||||
Logout: "/oauth/github/logout",
|
||||
Callback: "/oauth/github/callback",
|
||||
}
|
||||
}
|
||||
|
||||
func NewGoogleRoute() AuthRoute {
|
||||
return AuthRoute{
|
||||
Name: "google",
|
||||
Label: "Google",
|
||||
Login: "/oauth/google/login",
|
||||
Logout: "/oauth/google/logout",
|
||||
Callback: "/oauth/google/callback",
|
||||
}
|
||||
}
|
||||
|
||||
func NewHerokuRoute() AuthRoute {
|
||||
return AuthRoute{
|
||||
Name: "heroku",
|
||||
Label: "Heroku",
|
||||
Login: "/oauth/heroku/login",
|
||||
Logout: "/oauth/heroku/logout",
|
||||
Callback: "/oauth/heroku/callback",
|
||||
}
|
||||
}
|
||||
|
|
184
server/server.go
|
@ -1,6 +1,7 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"math/rand"
|
||||
"net"
|
||||
"net/http"
|
||||
|
@ -14,8 +15,10 @@ import (
|
|||
"github.com/influxdata/chronograf/influx"
|
||||
"github.com/influxdata/chronograf/layouts"
|
||||
clog "github.com/influxdata/chronograf/log"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
"github.com/influxdata/chronograf/uuid"
|
||||
client "github.com/influxdata/usage-client/v1"
|
||||
flags "github.com/jessevdk/go-flags"
|
||||
"github.com/tylerb/graceful"
|
||||
)
|
||||
|
||||
|
@ -30,30 +33,94 @@ func init() {
|
|||
|
||||
// Server for the chronograf API
|
||||
type Server struct {
|
||||
Host string `long:"host" description:"the IP to listen on" default:"0.0.0.0" env:"HOST"`
|
||||
Port int `long:"port" description:"the port to listen on for insecure connections, defaults to a random value" default:"8888" env:"PORT"`
|
||||
Host string `long:"host" description:"The IP to listen on" default:"0.0.0.0" env:"HOST"`
|
||||
Port int `long:"port" description:"The port to listen on for insecure connections, defaults to a random value" default:"8888" env:"PORT"`
|
||||
|
||||
/* TODO: add in support for TLS
|
||||
TLSHost string `long:"tls-host" description:"the IP to listen on for tls, when not specified it's the same as --host" env:"TLS_HOST"`
|
||||
TLSPort int `long:"tls-port" description:"the port to listen on for secure connections, defaults to a random value" env:"TLS_PORT"`
|
||||
TLSCertificate flags.Filename `long:"tls-certificate" description:"the certificate to use for secure connections" env:"TLS_CERTIFICATE"`
|
||||
TLSCertificateKey flags.Filename `long:"tls-key" description:"the private key to use for secure conections" env:"TLS_PRIVATE_KEY"`
|
||||
*/
|
||||
Cert flags.Filename `long:"cert" description:"Path to PEM encoded public key certificate. " env:"TLS_CERTIFICATE"`
|
||||
Key flags.Filename `long:"key" description:"Path to private key associated with given certificate. " env:"TLS_PRIVATE_KEY"`
|
||||
|
||||
Develop bool `short:"d" long:"develop" description:"Run server in develop mode."`
|
||||
BoltPath string `short:"b" long:"bolt-path" description:"Full path to boltDB file (/var/lib/chronograf/chronograf-v1.db)" env:"BOLT_PATH" default:"chronograf-v1.db"`
|
||||
CannedPath string `short:"c" long:"canned-path" description:"Path to directory of pre-canned application layouts (/usr/share/chronograf/canned)" env:"CANNED_PATH" default:"canned"`
|
||||
TokenSecret string `short:"t" long:"token-secret" description:"Secret to sign tokens" env:"TOKEN_SECRET"`
|
||||
|
||||
Develop bool `short:"d" long:"develop" description:"Run server in develop mode."`
|
||||
BoltPath string `short:"b" long:"bolt-path" description:"Full path to boltDB file (/var/lib/chronograf/chronograf-v1.db)" env:"BOLT_PATH" default:"chronograf-v1.db"`
|
||||
CannedPath string `short:"c" long:"canned-path" description:"Path to directory of pre-canned application layouts (/usr/share/chronograf/canned)" env:"CANNED_PATH" default:"canned"`
|
||||
TokenSecret string `short:"t" long:"token-secret" description:"Secret to sign tokens" env:"TOKEN_SECRET"`
|
||||
GithubClientID string `short:"i" long:"github-client-id" description:"Github Client ID for OAuth 2 support" env:"GH_CLIENT_ID"`
|
||||
GithubClientSecret string `short:"s" long:"github-client-secret" description:"Github Client Secret for OAuth 2 support" env:"GH_CLIENT_SECRET"`
|
||||
GithubOrgs []string `short:"o" long:"github-organization" description:"Github organization user is required to have active membership" env:"GH_ORGS" env-delim:","`
|
||||
ReportingDisabled bool `short:"r" long:"reporting-disabled" description:"Disable reporting of usage stats (os,arch,version,cluster_id,uptime) once every 24hr" env:"REPORTING_DISABLED"`
|
||||
LogLevel string `short:"l" long:"log-level" value-name:"choice" choice:"debug" choice:"info" choice:"warn" choice:"error" choice:"fatal" choice:"panic" default:"info" description:"Set the logging level" env:"LOG_LEVEL"`
|
||||
Basepath string `short:"p" long:"basepath" description:"A URL path prefix under which all chronograf routes will be mounted" env:"BASE_PATH"`
|
||||
ShowVersion bool `short:"v" long:"version" description:"Show Chronograf version info"`
|
||||
BuildInfo BuildInfo
|
||||
Listener net.Listener
|
||||
handler http.Handler
|
||||
|
||||
GoogleClientID string `long:"google-client-id" description:"Google Client ID for OAuth 2 support" env:"GOOGLE_CLIENT_ID"`
|
||||
GoogleClientSecret string `long:"google-client-secret" description:"Google Client Secret for OAuth 2 support" env:"GOGGLE_CLIENT_SECRET"`
|
||||
GoogleDomains []string `long:"google-domains" description:"Google email domain user is required to have active membership" env:"GOOGLE_DOMAINS" env-delim:","`
|
||||
PublicURL string `long:"public-url" description:"Full public URL used to access Chronograf from a web browser. Used for Google OAuth2 authentication. (http://localhost:8888)" env:"PUBLIC_URL"`
|
||||
|
||||
HerokuClientID string `long:"heroku-client-id" description:"Heroku Client ID for OAuth 2 support" env:"HEROKU_CLIENT_ID"`
|
||||
HerokuSecret string `long:"heroku-secret" description:"Heroku Secret for OAuth 2 support" env:"HEROKU_SECRET"`
|
||||
HerokuOrganizations []string `long:"heroku-organization" description:"Heroku Organization Memberships a user is required to have for access to Chronograf (comma separated)" env:"HEROKU_ORGS" env-delim:","`
|
||||
|
||||
ReportingDisabled bool `short:"r" long:"reporting-disabled" description:"Disable reporting of usage stats (os,arch,version,cluster_id,uptime) once every 24hr" env:"REPORTING_DISABLED"`
|
||||
LogLevel string `short:"l" long:"log-level" value-name:"choice" choice:"debug" choice:"info" choice:"warn" choice:"error" choice:"fatal" choice:"panic" default:"info" description:"Set the logging level" env:"LOG_LEVEL"`
|
||||
Basepath string `short:"p" long:"basepath" description:"A URL path prefix under which all chronograf routes will be mounted" env:"BASE_PATH"`
|
||||
ShowVersion bool `short:"v" long:"version" description:"Show Chronograf version info"`
|
||||
BuildInfo BuildInfo
|
||||
Listener net.Listener
|
||||
handler http.Handler
|
||||
}
|
||||
|
||||
func provide(p oauth2.Provider, m oauth2.Mux, ok func() bool) func(func(oauth2.Provider, oauth2.Mux)) {
|
||||
return func(configure func(oauth2.Provider, oauth2.Mux)) {
|
||||
if ok() {
|
||||
configure(p, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) UseGithub() bool {
|
||||
return s.TokenSecret != "" && s.GithubClientID != "" && s.GithubClientSecret != ""
|
||||
}
|
||||
|
||||
func (s *Server) UseGoogle() bool {
|
||||
return s.TokenSecret != "" && s.GoogleClientID != "" && s.GoogleClientSecret != "" && s.PublicURL != ""
|
||||
}
|
||||
|
||||
func (s *Server) UseHeroku() bool {
|
||||
return s.TokenSecret != "" && s.HerokuClientID != "" && s.HerokuSecret != ""
|
||||
}
|
||||
|
||||
func (s *Server) githubOAuth(logger chronograf.Logger, auth oauth2.Authenticator) (oauth2.Provider, oauth2.Mux, func() bool) {
|
||||
gh := oauth2.Github{
|
||||
ClientID: s.GithubClientID,
|
||||
ClientSecret: s.GithubClientSecret,
|
||||
Orgs: s.GithubOrgs,
|
||||
Logger: logger,
|
||||
}
|
||||
ghMux := oauth2.NewCookieMux(&gh, auth, logger)
|
||||
return &gh, ghMux, s.UseGithub
|
||||
}
|
||||
|
||||
func (s *Server) googleOAuth(logger chronograf.Logger, auth oauth2.Authenticator) (oauth2.Provider, oauth2.Mux, func() bool) {
|
||||
redirectURL := s.PublicURL + s.Basepath + "/oauth/google/callback"
|
||||
google := oauth2.Google{
|
||||
ClientID: s.GoogleClientID,
|
||||
ClientSecret: s.GoogleClientSecret,
|
||||
Domains: s.GoogleDomains,
|
||||
RedirectURL: redirectURL,
|
||||
Logger: logger,
|
||||
}
|
||||
|
||||
goMux := oauth2.NewCookieMux(&google, auth, logger)
|
||||
return &google, goMux, s.UseGoogle
|
||||
}
|
||||
|
||||
func (s *Server) herokuOAuth(logger chronograf.Logger, auth oauth2.Authenticator) (oauth2.Provider, oauth2.Mux, func() bool) {
|
||||
heroku := oauth2.Heroku{
|
||||
ClientID: s.HerokuClientID,
|
||||
ClientSecret: s.HerokuSecret,
|
||||
Organizations: s.HerokuOrganizations,
|
||||
Logger: logger,
|
||||
}
|
||||
|
||||
hMux := oauth2.NewCookieMux(&heroku, auth, logger)
|
||||
return &heroku, hMux, s.UseHeroku
|
||||
}
|
||||
|
||||
// BuildInfo is sent to the usage client to track versions and commits
|
||||
|
@ -63,7 +130,45 @@ type BuildInfo struct {
|
|||
}
|
||||
|
||||
func (s *Server) useAuth() bool {
|
||||
return s.TokenSecret != "" && s.GithubClientID != "" && s.GithubClientSecret != ""
|
||||
gh := s.TokenSecret != "" && s.GithubClientID != "" && s.GithubClientSecret != ""
|
||||
google := s.TokenSecret != "" && s.GoogleClientID != "" && s.GoogleClientSecret != "" && s.PublicURL != ""
|
||||
heroku := s.TokenSecret != "" && s.HerokuClientID != "" && s.HerokuSecret != ""
|
||||
return gh || google || heroku
|
||||
}
|
||||
|
||||
func (s *Server) useTLS() bool {
|
||||
return s.Cert != ""
|
||||
}
|
||||
|
||||
// NewListener will an http or https listener depending useTLS()
|
||||
func (s *Server) NewListener() (net.Listener, error) {
|
||||
addr := net.JoinHostPort(s.Host, strconv.Itoa(s.Port))
|
||||
if !s.useTLS() {
|
||||
listener, err := net.Listen("tcp", addr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return listener, nil
|
||||
}
|
||||
|
||||
// If no key specified, therefore, we assume it is in the cert
|
||||
if s.Key == "" {
|
||||
s.Key = s.Cert
|
||||
}
|
||||
|
||||
cert, err := tls.LoadX509KeyPair(string(s.Cert), string(s.Key))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
listener, err := tls.Listen("tcp", addr, &tls.Config{
|
||||
Certificates: []tls.Certificate{cert},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return listener, nil
|
||||
}
|
||||
|
||||
// Serve starts and runs the chronograf server
|
||||
|
@ -71,26 +176,38 @@ func (s *Server) Serve() error {
|
|||
logger := clog.New(clog.ParseLevel(s.LogLevel))
|
||||
service := openService(s.BoltPath, s.CannedPath, logger, s.useAuth())
|
||||
basepath = s.Basepath
|
||||
|
||||
providerFuncs := []func(func(oauth2.Provider, oauth2.Mux)){}
|
||||
|
||||
auth := oauth2.NewJWT(s.TokenSecret)
|
||||
providerFuncs = append(providerFuncs, provide(s.githubOAuth(logger, &auth)))
|
||||
providerFuncs = append(providerFuncs, provide(s.googleOAuth(logger, &auth)))
|
||||
providerFuncs = append(providerFuncs, provide(s.herokuOAuth(logger, &auth)))
|
||||
|
||||
s.handler = NewMux(MuxOpts{
|
||||
Develop: s.Develop,
|
||||
TokenSecret: s.TokenSecret,
|
||||
GithubClientID: s.GithubClientID,
|
||||
GithubClientSecret: s.GithubClientSecret,
|
||||
GithubOrgs: s.GithubOrgs,
|
||||
Logger: logger,
|
||||
UseAuth: s.useAuth(),
|
||||
Develop: s.Develop,
|
||||
TokenSecret: s.TokenSecret,
|
||||
Logger: logger,
|
||||
UseAuth: s.useAuth(),
|
||||
ProviderFuncs: providerFuncs,
|
||||
}, service)
|
||||
|
||||
// Add chronograf's version header to all requests
|
||||
s.handler = Version(s.BuildInfo.Version, s.handler)
|
||||
|
||||
var err error
|
||||
s.Listener, err = net.Listen("tcp", net.JoinHostPort(s.Host, strconv.Itoa(s.Port)))
|
||||
if s.useTLS() {
|
||||
// Add HSTS to instruct all browsers to change from http to https
|
||||
s.handler = HSTS(s.handler)
|
||||
}
|
||||
|
||||
listener, err := s.NewListener()
|
||||
if err != nil {
|
||||
logger.
|
||||
WithField("component", "server").
|
||||
Error(err)
|
||||
return err
|
||||
}
|
||||
s.Listener = listener
|
||||
|
||||
httpServer := &graceful.Server{Server: new(http.Server)}
|
||||
httpServer.SetKeepAlivesEnabled(true)
|
||||
|
@ -100,10 +217,13 @@ func (s *Server) Serve() error {
|
|||
if !s.ReportingDisabled {
|
||||
go reportUsageStats(s.BuildInfo, logger)
|
||||
}
|
||||
|
||||
scheme := "http"
|
||||
if s.useTLS() {
|
||||
scheme = "https"
|
||||
}
|
||||
logger.
|
||||
WithField("component", "server").
|
||||
Info("Serving chronograf at http://", s.Listener.Addr())
|
||||
Info("Serving chronograf at ", scheme, "://", s.Listener.Addr())
|
||||
|
||||
if err := httpServer.Serve(s.Listener); err != nil {
|
||||
logger.
|
||||
|
@ -114,7 +234,7 @@ func (s *Server) Serve() error {
|
|||
|
||||
logger.
|
||||
WithField("component", "server").
|
||||
Info("Stopped serving chronograf at http://", s.Listener.Addr())
|
||||
Info("Stopped serving chronograf at ", scheme, "://", s.Listener.Addr())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -27,6 +27,9 @@ func newSourceResponse(src chronograf.Source) sourceResponse {
|
|||
src.Telegraf = "telegraf"
|
||||
}
|
||||
|
||||
// Omit the password on response
|
||||
src.Password = ""
|
||||
|
||||
httpAPISrcs := "/chronograf/v1/sources"
|
||||
return sourceResponse{
|
||||
Source: src,
|
||||
|
|
|
@ -1519,6 +1519,51 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"patch": {
|
||||
"tags": [
|
||||
"layouts"
|
||||
],
|
||||
"summary": "Update dashboard information.",
|
||||
"description": "Update either the dashboard name or the dashboard cells",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"type": "integer",
|
||||
"description": "ID of a dashboard",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"name": "config",
|
||||
"in": "body",
|
||||
"description": "dashboard configuration update parameters. Must be either name or cells",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Dashboard"
|
||||
},
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Dashboard has been updated and the new dashboard is returned.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Dashboard"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Happens when trying to access a non-existent dashboard.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Error"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "A processing or an unexpected error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Error"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -1851,6 +1896,14 @@
|
|||
"type": "string",
|
||||
"description": "TICKscript representing this rule"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Represents if this rule is enabled or disabled in kapacitor",
|
||||
"enum": [
|
||||
"enabled",
|
||||
"disabled"
|
||||
]
|
||||
},
|
||||
"links": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
|
@ -2245,12 +2298,16 @@
|
|||
"w": {
|
||||
"description": "Width of Cell in the Dashboard",
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
"format": "int32",
|
||||
"minimum": 1,
|
||||
"default": 4
|
||||
},
|
||||
"h": {
|
||||
"description": "Height of Cell in the Dashboard",
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
"format": "int32",
|
||||
"minimum": 1,
|
||||
"default": 4
|
||||
},
|
||||
"queries": {
|
||||
"description": "Time-series data queries for Cell.",
|
||||
|
@ -2265,7 +2322,9 @@
|
|||
"enum": [
|
||||
"single-stat",
|
||||
"line",
|
||||
"line-plus-single-stat"
|
||||
"line-plus-single-stat",
|
||||
"line-stacked",
|
||||
"line-stepplot"
|
||||
],
|
||||
"default": "line"
|
||||
}
|
||||
|
|
|
@ -22,16 +22,27 @@ type wrapResponseWriter struct {
|
|||
Substitute *io.PipeWriter
|
||||
|
||||
headerWritten bool
|
||||
dupHeader http.Header
|
||||
dupHeader *http.Header
|
||||
}
|
||||
|
||||
func (wrw wrapResponseWriter) Write(p []byte) (int, error) {
|
||||
func (wrw *wrapResponseWriter) Write(p []byte) (int, error) {
|
||||
return wrw.Substitute.Write(p)
|
||||
}
|
||||
|
||||
func (wrw wrapResponseWriter) WriteHeader(code int) {
|
||||
func (wrw *wrapResponseWriter) WriteHeader(code int) {
|
||||
if !wrw.headerWritten {
|
||||
wrw.ResponseWriter.Header().Set("Content-Type", wrw.Header().Get("Content-Type"))
|
||||
wrw.ResponseWriter.Header().Set("Content-Type", wrw.dupHeader.Get("Content-Type"))
|
||||
header := wrw.ResponseWriter.Header()
|
||||
// Filter out content length header to prevent stopping writing
|
||||
if wrw.dupHeader != nil {
|
||||
for k, v := range *wrw.dupHeader {
|
||||
if k == "Content-Length" {
|
||||
continue
|
||||
}
|
||||
header[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
wrw.headerWritten = true
|
||||
}
|
||||
wrw.ResponseWriter.WriteHeader(code)
|
||||
|
@ -39,13 +50,16 @@ func (wrw wrapResponseWriter) WriteHeader(code int) {
|
|||
|
||||
// Header() copies the Header map from the underlying ResponseWriter to prevent
|
||||
// modifications to it by callers
|
||||
func (wrw wrapResponseWriter) Header() http.Header {
|
||||
wrw.dupHeader = http.Header{}
|
||||
origHeader := wrw.ResponseWriter.Header()
|
||||
for k, v := range origHeader {
|
||||
wrw.dupHeader[k] = v
|
||||
func (wrw *wrapResponseWriter) Header() http.Header {
|
||||
if wrw.dupHeader == nil {
|
||||
h := http.Header{}
|
||||
origHeader := wrw.ResponseWriter.Header()
|
||||
for k, v := range origHeader {
|
||||
h[k] = v
|
||||
}
|
||||
wrw.dupHeader = &h
|
||||
}
|
||||
return wrw.dupHeader
|
||||
return *wrw.dupHeader
|
||||
}
|
||||
|
||||
const CHUNK_SIZE int = 512
|
||||
|
@ -73,7 +87,7 @@ func (up *URLPrefixer) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
|
|||
nextRead, nextWrite := io.Pipe()
|
||||
go func() {
|
||||
defer nextWrite.Close()
|
||||
up.Next.ServeHTTP(wrapResponseWriter{ResponseWriter: rw, Substitute: nextWrite}, r)
|
||||
up.Next.ServeHTTP(&wrapResponseWriter{ResponseWriter: rw, Substitute: nextWrite}, r)
|
||||
}()
|
||||
|
||||
// setup a buffer which is the max length of our target attrs
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"golang.org/x/net/context"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
"github.com/influxdata/chronograf/oauth2"
|
||||
)
|
||||
|
||||
type userLinks struct {
|
||||
|
@ -19,14 +20,19 @@ type userResponse struct {
|
|||
Links userLinks `json:"links"`
|
||||
}
|
||||
|
||||
// If new user response is nil, return an empty userResponse because it
|
||||
// indicates authentication is not needed
|
||||
func newUserResponse(usr *chronograf.User) userResponse {
|
||||
base := "/chronograf/v1/users"
|
||||
return userResponse{
|
||||
User: usr,
|
||||
Links: userLinks{
|
||||
Self: fmt.Sprintf("%s/%d", base, usr.ID),
|
||||
},
|
||||
if usr != nil {
|
||||
return userResponse{
|
||||
User: usr,
|
||||
Links: userLinks{
|
||||
Self: fmt.Sprintf("%s/%d", base, usr.ID),
|
||||
},
|
||||
}
|
||||
}
|
||||
return userResponse{}
|
||||
}
|
||||
|
||||
// NewUser adds a new valid user to the store
|
||||
|
@ -135,19 +141,32 @@ func ValidUserRequest(s *chronograf.User) error {
|
|||
}
|
||||
|
||||
func getEmail(ctx context.Context) (string, error) {
|
||||
principal := ctx.Value(chronograf.PrincipalKey).(chronograf.Principal)
|
||||
if principal == "" {
|
||||
principal, err := getPrincipal(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if principal.Subject == "" {
|
||||
return "", fmt.Errorf("Token not found")
|
||||
}
|
||||
return string(principal), nil
|
||||
return principal.Subject, nil
|
||||
}
|
||||
|
||||
func getPrincipal(ctx context.Context) (oauth2.Principal, error) {
|
||||
principal, ok := ctx.Value(oauth2.PrincipalKey).(oauth2.Principal)
|
||||
if !ok {
|
||||
return oauth2.Principal{}, fmt.Errorf("Token not found")
|
||||
}
|
||||
|
||||
return principal, nil
|
||||
}
|
||||
|
||||
// Me does a findOrCreate based on the email in the context
|
||||
func (h *Service) Me(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
if !h.UseAuth {
|
||||
// Using status code to signal no need for authentication
|
||||
w.WriteHeader(http.StatusTeapot)
|
||||
// If there's no authentication, return an empty user
|
||||
res := newUserResponse(nil)
|
||||
encodeJSON(w, http.StatusOK, res, h.Logger)
|
||||
return
|
||||
}
|
||||
email, err := getEmail(ctx)
|
||||
|
|
|
@ -182,7 +182,7 @@
|
|||
'quote-props': [2, 'as-needed', {keywords: true, numbers: false }],
|
||||
'require-jsdoc': 0,
|
||||
'semi-spacing': [2, {before: false, after: true}],
|
||||
'semi': [2, 'always'],
|
||||
// 'semi': [2, 'always'],
|
||||
'sort-vars': 0,
|
||||
'keyword-spacing': 'error',
|
||||
'space-before-blocks': [2, 'always'],
|
||||
|
@ -194,7 +194,7 @@
|
|||
'wrap-regex': 0,
|
||||
'arrow-body-style': 0,
|
||||
'arrow-spacing': [2, {before: true, after: true}],
|
||||
'no-confusing-arrow': 2,
|
||||
'no-confusing-arrow': 0,
|
||||
'no-class-assign': 2,
|
||||
'no-const-assign': 2,
|
||||
'no-dupe-class-members': 2,
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
import reducer from 'src/dashboards/reducers/ui'
|
||||
import timeRanges from 'hson!src/shared/data/timeRanges.hson';
|
||||
|
||||
import {
|
||||
loadDashboards,
|
||||
setDashboard,
|
||||
setTimeRange,
|
||||
setEditMode,
|
||||
} from 'src/dashboards/actions'
|
||||
|
||||
const noopAction = () => {
|
||||
return {type: 'NOOP'}
|
||||
}
|
||||
|
||||
let state = undefined
|
||||
const timeRange = timeRanges[1];
|
||||
const d1 = {id: 1, cells: [], name: "d1"}
|
||||
const d2 = {id: 2, cells: [], name: "d2"}
|
||||
const dashboards = [d1, d2]
|
||||
|
||||
describe('DataExplorer.Reducers.UI', () => {
|
||||
it('can load the dashboards', () => {
|
||||
const actual = reducer(state, loadDashboards(dashboards, d1.id))
|
||||
const expected = {
|
||||
dashboards,
|
||||
dashboard: d1,
|
||||
}
|
||||
|
||||
expect(actual.dashboards).to.deep.equal(expected.dashboards)
|
||||
expect(actual.dashboard).to.deep.equal(expected.dashboard)
|
||||
})
|
||||
|
||||
it('can set a dashboard', () => {
|
||||
const loadedState = reducer(state, loadDashboards(dashboards, d1.id))
|
||||
const actual = reducer(loadedState, setDashboard(d2.id))
|
||||
|
||||
expect(actual.dashboard).to.deep.equal(d2)
|
||||
})
|
||||
|
||||
it('can set the time range', () => {
|
||||
const expected = {upper: null, lower: 'now() - 1h'}
|
||||
const actual = reducer(state, setTimeRange(expected))
|
||||
|
||||
expect(actual.timeRange).to.deep.equal(expected)
|
||||
})
|
||||
|
||||
it('can set edit mode', () => {
|
||||
const isEditMode = true
|
||||
const actual = reducer(state, setEditMode(isEditMode))
|
||||
expect(actual.isEditMode).to.equal(isEditMode)
|
||||
})
|
||||
})
|
|
@ -1,11 +0,0 @@
|
|||
import reducer from 'src/data_explorer/reducers/dataExplorerUI';
|
||||
import {activatePanel} from 'src/data_explorer/actions/view';
|
||||
|
||||
describe('DataExplorer.Reducers.UI', () => {
|
||||
it('can set the active panel', () => {
|
||||
const activePanel = 123;
|
||||
const actual = reducer({}, activatePanel(activePanel));
|
||||
|
||||
expect(actual).to.deep.equal({activePanel});
|
||||
});
|
||||
});
|
|
@ -1,34 +0,0 @@
|
|||
import reducer from 'src/data_explorer/reducers/panels';
|
||||
import {deletePanel} from 'src/data_explorer/actions/view';
|
||||
|
||||
const fakeAddPanelAction = (panelID, queryID) => {
|
||||
return {
|
||||
type: 'CREATE_PANEL',
|
||||
payload: {panelID, queryID},
|
||||
};
|
||||
};
|
||||
|
||||
describe('Chronograf.Reducers.Panel', () => {
|
||||
let state;
|
||||
const panelID = 123;
|
||||
const queryID = 456;
|
||||
|
||||
beforeEach(() => {
|
||||
state = reducer({}, fakeAddPanelAction(panelID, queryID));
|
||||
});
|
||||
|
||||
it('can add a panel', () => {
|
||||
const actual = state[panelID];
|
||||
expect(actual).to.deep.equal({
|
||||
id: panelID,
|
||||
queryIds: [queryID],
|
||||
});
|
||||
});
|
||||
|
||||
it('can delete a panel', () => {
|
||||
const nextState = reducer(state, deletePanel(panelID));
|
||||
|
||||
const actual = nextState[panelID];
|
||||
expect(actual).to.equal(undefined);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,31 @@
|
|||
import reducer from 'src/data_explorer/reducers/timeRange';
|
||||
|
||||
import {
|
||||
setTimeRange,
|
||||
} from 'src/data_explorer/actions/view';
|
||||
|
||||
const noopAction = () => {
|
||||
return {type: 'NOOP'};
|
||||
}
|
||||
|
||||
describe('DataExplorer.Reducers.TimeRange', () => {
|
||||
it('it sets the default timeRange', () => {
|
||||
const state = reducer(undefined, noopAction());
|
||||
const expected = {
|
||||
lower: 'now() - 15m',
|
||||
upper: null,
|
||||
};
|
||||
|
||||
expect(state).to.deep.equal(expected);
|
||||
});
|
||||
|
||||
it('it can set the time range', () => {
|
||||
const timeRange = {
|
||||
lower: 'now() - 5m',
|
||||
upper: null,
|
||||
};
|
||||
const expected = reducer(undefined, setTimeRange(timeRange));
|
||||
|
||||
expect(timeRange).to.deep.equal(expected);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,40 @@
|
|||
import reducer from 'src/data_explorer/reducers/ui';
|
||||
|
||||
import {
|
||||
addQuery,
|
||||
deleteQuery,
|
||||
} from 'src/data_explorer/actions/view';
|
||||
|
||||
const noopAction = () => {
|
||||
return {type: 'NOOP'};
|
||||
}
|
||||
|
||||
let state = undefined;
|
||||
|
||||
describe('DataExplorer.Reducers.UI', () => {
|
||||
it('it sets the default state for UI', () => {
|
||||
const actual = reducer(state, noopAction());
|
||||
const expected = {
|
||||
queryIDs: [],
|
||||
};
|
||||
|
||||
expect(actual).to.deep.equal(expected);
|
||||
});
|
||||
|
||||
it('it can add a query', () => {
|
||||
const actual = reducer(state, addQuery());
|
||||
expect(actual.queryIDs.length).to.equal(1);
|
||||
});
|
||||
|
||||
it('it can delete a query', () => {
|
||||
const queryID = '123';
|
||||
state = {queryIDs: ['456', queryID]};
|
||||
|
||||
const actual = reducer(state, deleteQuery(queryID));
|
||||
const expected = {
|
||||
queryIDs: ['456'],
|
||||
};
|
||||
|
||||
expect(actual).to.deep.equal(expected);
|
||||
});
|
||||
});
|
|
@ -1,5 +1,6 @@
|
|||
import reducer from 'src/kapacitor/reducers/rules';
|
||||
import {defaultRuleConfigs} from 'src/kapacitor/constants';
|
||||
import {ALERT_NODES_ACCESSORS} from 'src/kapacitor/constants';
|
||||
|
||||
import {
|
||||
chooseTrigger,
|
||||
|
@ -7,8 +8,10 @@ import {
|
|||
updateDetails,
|
||||
updateMessage,
|
||||
updateAlerts,
|
||||
updateAlertNodes,
|
||||
updateRuleName,
|
||||
deleteRuleSuccess,
|
||||
updateRuleStatusSuccess,
|
||||
} from 'src/kapacitor/actions/view';
|
||||
|
||||
describe('Kapacitor.Reducers.rules', () => {
|
||||
|
@ -86,6 +89,40 @@ describe('Kapacitor.Reducers.rules', () => {
|
|||
expect(newState[ruleID].alerts).to.equal(alerts);
|
||||
});
|
||||
|
||||
it('can update an alerta alert', () => {
|
||||
const ruleID = 1;
|
||||
const initialState = {
|
||||
[ruleID]: {
|
||||
id: ruleID,
|
||||
queryID: 988,
|
||||
alerts: [],
|
||||
alertNodes: [],
|
||||
}
|
||||
};
|
||||
|
||||
const tickScript = `stream
|
||||
|alert()
|
||||
.alerta()
|
||||
.resource('Hostname or service')
|
||||
.event('Something went wrong')
|
||||
.environment('Development')
|
||||
.group('Dev. Servers')
|
||||
.services('a b c')
|
||||
`;
|
||||
|
||||
let newState = reducer(initialState, updateAlertNodes(ruleID, 'alerta', tickScript));
|
||||
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`;
|
||||
let actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]);
|
||||
|
||||
// Test both data structure and accessor string
|
||||
expect(actualStr).to.equal(expectedStr);
|
||||
|
||||
// Test that accessor string is the same if fed back in
|
||||
newState = reducer(newState, updateAlertNodes(ruleID, 'alerta', actualStr));
|
||||
actualStr = ALERT_NODES_ACCESSORS.alerta(newState[ruleID]);
|
||||
expect(actualStr).to.equal(expectedStr);
|
||||
});
|
||||
|
||||
it('can update the name', () => {
|
||||
const ruleID = 1;
|
||||
const name = 'New name'
|
||||
|
@ -134,4 +171,20 @@ describe('Kapacitor.Reducers.rules', () => {
|
|||
const newState = reducer(initialState, updateDetails(ruleID, details));
|
||||
expect(newState[ruleID].details).to.equal(details);
|
||||
});
|
||||
|
||||
it('can update status', () => {
|
||||
const ruleID = 1;
|
||||
const status = 'enabled';
|
||||
|
||||
const initialState = {
|
||||
[ruleID]: {
|
||||
id: ruleID,
|
||||
queryID: 988,
|
||||
status: 'disabled',
|
||||
}
|
||||
};
|
||||
|
||||
const newState = reducer(initialState, updateRuleStatusSuccess(ruleID, status));
|
||||
expect(newState[ruleID].status).to.equal(status);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -40,4 +40,48 @@ describe('getRangeForDygraphSpec', () => {
|
|||
|
||||
expect(actual).to.deep.equal(expected);
|
||||
});
|
||||
|
||||
describe('when user provides a rule value', () => {
|
||||
const defaultMax = 20;
|
||||
const defaultMin = -10;
|
||||
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]];
|
||||
|
||||
it('can pad positive values', () => {
|
||||
const value = 20;
|
||||
const [min, max] = getRange(timeSeries, undefined, value);
|
||||
|
||||
expect(min).to.equal(defaultMin);
|
||||
expect(max).to.be.above(defaultMax);
|
||||
});
|
||||
|
||||
it('can pad negative values', () => {
|
||||
const value = -10;
|
||||
const [min, max] = getRange(timeSeries, undefined, value);
|
||||
|
||||
expect(min).to.be.below(defaultMin);
|
||||
expect(max).to.equal(defaultMax);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when user provides a rule range value', () => {
|
||||
const defaultMax = 20;
|
||||
const defaultMin = -10;
|
||||
const timeSeries = [[new Date(1000), defaultMax], [new Date(2000), 1], [new Date(3000), defaultMin]];
|
||||
|
||||
it('can pad positive values', () => {
|
||||
const rangeValue = 20;
|
||||
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue);
|
||||
|
||||
expect(min).to.equal(defaultMin);
|
||||
expect(max).to.be.above(defaultMax);
|
||||
});
|
||||
|
||||
it('can pad negative values', () => {
|
||||
const rangeValue = -10;
|
||||
const [min, max] = getRange(timeSeries, undefined, 0, rangeValue);
|
||||
|
||||
expect(min).to.be.below(defaultMin);
|
||||
expect(max).to.equal(defaultMax);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
import {parseAlerta} from 'src/shared/parsing/parseAlerta';
|
||||
|
||||
it('can parse an alerta tick script', () => {
|
||||
const tickScript = `stream
|
||||
|alert()
|
||||
.alerta()
|
||||
.resource('Hostname or service')
|
||||
.event('Something went wrong')
|
||||
.environment('Development')
|
||||
.group('Dev. Servers')
|
||||
.services('a b c')
|
||||
`;
|
||||
|
||||
let actualObj = parseAlerta(tickScript);
|
||||
|
||||
const expectedObj = [
|
||||
{
|
||||
"name": "resource",
|
||||
"args": [
|
||||
"Hostname or service"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "event",
|
||||
"args": [
|
||||
"Something went wrong"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "environment",
|
||||
"args": [
|
||||
"Development"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "group",
|
||||
"args": [
|
||||
"Dev. Servers"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "services",
|
||||
"args": [
|
||||
"a",
|
||||
"b",
|
||||
"c"
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
// Test data structure
|
||||
expect(actualObj).to.deep.equal(expectedObj);
|
||||
|
||||
// Test that data structure is the same if fed back in
|
||||
const expectedStr = `alerta().resource('Hostname or service').event('Something went wrong').environment('Development').group('Dev. Servers').services('a b c')`;
|
||||
actualObj = parseAlerta(expectedStr);
|
||||
expect(actualObj).to.deep.equal(expectedObj);
|
||||
});
|
|
@ -8,22 +8,29 @@ import {
|
|||
dismissAllNotifications as dismissAllNotificationsAction,
|
||||
} from 'src/shared/actions/notifications';
|
||||
|
||||
const {
|
||||
node,
|
||||
shape,
|
||||
string,
|
||||
func,
|
||||
} = PropTypes
|
||||
|
||||
const App = React.createClass({
|
||||
propTypes: {
|
||||
children: PropTypes.node.isRequired,
|
||||
location: PropTypes.shape({
|
||||
pathname: PropTypes.string,
|
||||
children: node.isRequired,
|
||||
location: shape({
|
||||
pathname: string,
|
||||
}),
|
||||
params: PropTypes.shape({
|
||||
sourceID: PropTypes.string.isRequired,
|
||||
params: shape({
|
||||
sourceID: string.isRequired,
|
||||
}).isRequired,
|
||||
publishNotification: PropTypes.func.isRequired,
|
||||
dismissNotification: PropTypes.func.isRequired,
|
||||
dismissAllNotifications: PropTypes.func.isRequired,
|
||||
notifications: PropTypes.shape({
|
||||
success: PropTypes.string,
|
||||
error: PropTypes.string,
|
||||
warning: PropTypes.string,
|
||||
publishNotification: func.isRequired,
|
||||
dismissNotification: func.isRequired,
|
||||
dismissAllNotifications: func.isRequired,
|
||||
notifications: shape({
|
||||
success: string,
|
||||
error: string,
|
||||
warning: string,
|
||||
}),
|
||||
},
|
||||
|
||||
|
@ -46,11 +53,15 @@ const App = React.createClass({
|
|||
},
|
||||
|
||||
render() {
|
||||
const {sourceID} = this.props.params;
|
||||
const {params: {sourceID}} = this.props;
|
||||
|
||||
return (
|
||||
<div className="chronograf-root">
|
||||
<SideNavContainer sourceID={sourceID} addFlashMessage={this.handleNotification} currentLocation={this.props.location.pathname} />
|
||||
<SideNavContainer
|
||||
sourceID={sourceID}
|
||||
addFlashMessage={this.handleNotification}
|
||||
currentLocation={this.props.location.pathname}
|
||||
/>
|
||||
{this.renderNotifications()}
|
||||
{this.props.children && React.cloneElement(this.props.children, {
|
||||
addFlashMessage: this.handleNotification,
|
||||
|
|
|
@ -1,21 +1,33 @@
|
|||
import React from 'react';
|
||||
import {withRouter} from 'react-router';
|
||||
/* global VERSION */
|
||||
import React, {PropTypes} from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
const Login = React.createClass({
|
||||
render() {
|
||||
return (
|
||||
<div className="auth-page">
|
||||
<div className="auth-box">
|
||||
<div className="auth-logo"></div>
|
||||
<h1 className="auth-text-logo">Chronograf</h1>
|
||||
<p><strong>v1.1</strong> / Time-Series Data Visualization</p>
|
||||
<a className="btn btn-primary" href="/oauth/github"><span className="icon github"></span> Login with GitHub</a>
|
||||
</div>
|
||||
<p className="auth-credits">Made by <span className="icon cubo-uniform"></span>InfluxData</p>
|
||||
<div className="auth-image"></div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
const {array} = PropTypes
|
||||
|
||||
export default withRouter(Login);
|
||||
const Login = ({auth}) => (
|
||||
<div className="auth-page">
|
||||
<div className="auth-box">
|
||||
<div className="auth-logo"></div>
|
||||
<h1 className="auth-text-logo">Chronograf</h1>
|
||||
<p><strong>{VERSION}</strong> / Time-Series Data Visualization</p>
|
||||
{auth.map(({name, login, label}) => (
|
||||
<a key={name} className="btn btn-primary" href={login}>
|
||||
<span className={`icon ${name}`}></span>
|
||||
Login with {label}
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
<p className="auth-credits">Made by <span className="icon cubo-uniform"></span>InfluxData</p>
|
||||
<div className="auth-image"></div>
|
||||
</div>
|
||||
)
|
||||
|
||||
Login.propTypes = {
|
||||
auth: array.isRequired,
|
||||
}
|
||||
|
||||
const mapStateToProps = (state) => ({
|
||||
auth: state.auth,
|
||||
})
|
||||
|
||||
export default connect(mapStateToProps)(Login)
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
import {
|
||||
getDashboards as getDashboardsAJAX,
|
||||
updateDashboard as updateDashboardAJAX,
|
||||
} from 'src/dashboards/apis'
|
||||
|
||||
export function loadDashboards(dashboards, dashboardID) {
|
||||
return {
|
||||
type: 'LOAD_DASHBOARDS',
|
||||
payload: {
|
||||
dashboards,
|
||||
dashboardID,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function setDashboard(dashboardID) {
|
||||
return {
|
||||
type: 'SET_DASHBOARD',
|
||||
payload: {
|
||||
dashboardID,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function setTimeRange(timeRange) {
|
||||
return {
|
||||
type: 'SET_DASHBOARD_TIME_RANGE',
|
||||
payload: {
|
||||
timeRange,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function setEditMode(isEditMode) {
|
||||
return {
|
||||
type: 'SET_EDIT_MODE',
|
||||
payload: {
|
||||
isEditMode,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function getDashboards(dashboardID) {
|
||||
return (dispatch) => {
|
||||
getDashboardsAJAX().then(({data: {dashboards}}) => {
|
||||
dispatch(loadDashboards(dashboards, dashboardID))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function putDashboard(dashboard) {
|
||||
return (dispatch) => {
|
||||
updateDashboardAJAX(dashboard).then(({data}) => {
|
||||
dispatch(updateDashboard(data))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function updateDashboard(dashboard) {
|
||||
return {
|
||||
type: 'UPDATE_DASHBOARD',
|
||||
payload: {
|
||||
dashboard,
|
||||
},
|
||||
}
|
||||
}
|
|
@ -3,6 +3,14 @@ import AJAX from 'utils/ajax';
|
|||
export function getDashboards() {
|
||||
return AJAX({
|
||||
method: 'GET',
|
||||
url: `/chronograf/v1/dashboards`,
|
||||
resource: 'dashboards',
|
||||
});
|
||||
}
|
||||
|
||||
export function updateDashboard(dashboard) {
|
||||
return AJAX({
|
||||
method: 'PUT',
|
||||
url: dashboard.links.self,
|
||||
data: dashboard,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import classnames from 'classnames'
|
||||
|
||||
import LayoutRenderer from 'shared/components/LayoutRenderer'
|
||||
import Visualizations from 'src/dashboards/components/VisualizationSelector'
|
||||
|
||||
const Dashboard = ({
|
||||
dashboard,
|
||||
isEditMode,
|
||||
inPresentationMode,
|
||||
onPositionChange,
|
||||
source,
|
||||
timeRange,
|
||||
}) => {
|
||||
if (dashboard.id === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={classnames({'page-contents': true, 'presentation-mode': inPresentationMode})}>
|
||||
<div className={classnames('container-fluid full-width dashboard', {'dashboard-edit': isEditMode})}>
|
||||
{isEditMode ? <Visualizations/> : null}
|
||||
{Dashboard.renderDashboard(dashboard, timeRange, source, onPositionChange)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
Dashboard.renderDashboard = (dashboard, timeRange, source, onPositionChange) => {
|
||||
const autoRefreshMs = 15000
|
||||
const cells = dashboard.cells.map((cell, i) => {
|
||||
i = `${i}`
|
||||
const dashboardCell = {...cell, i}
|
||||
dashboardCell.queries.forEach((q) => {
|
||||
q.text = q.query;
|
||||
q.database = source.telegraf;
|
||||
});
|
||||
return dashboardCell;
|
||||
})
|
||||
|
||||
return (
|
||||
<LayoutRenderer
|
||||
timeRange={timeRange}
|
||||
cells={cells}
|
||||
autoRefreshMs={autoRefreshMs}
|
||||
source={source.links.proxy}
|
||||
onPositionChange={onPositionChange}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
const {
|
||||
bool,
|
||||
func,
|
||||
shape,
|
||||
string,
|
||||
} = PropTypes
|
||||
|
||||
Dashboard.propTypes = {
|
||||
dashboard: shape({}).isRequired,
|
||||
isEditMode: bool,
|
||||
inPresentationMode: bool,
|
||||
onPositionChange: func,
|
||||
source: shape({
|
||||
links: shape({
|
||||
proxy: string,
|
||||
}).isRequired,
|
||||
}).isRequired,
|
||||
timeRange: shape({}).isRequired,
|
||||
}
|
||||
|
||||
export default Dashboard
|
|
@ -0,0 +1,77 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import ReactTooltip from 'react-tooltip'
|
||||
import {Link} from 'react-router';
|
||||
|
||||
import TimeRangeDropdown from 'shared/components/TimeRangeDropdown'
|
||||
|
||||
const DashboardHeader = ({
|
||||
children,
|
||||
buttonText,
|
||||
dashboard,
|
||||
headerText,
|
||||
timeRange,
|
||||
isHidden,
|
||||
handleChooseTimeRange,
|
||||
handleClickPresentationButton,
|
||||
sourceID,
|
||||
}) => isHidden ? null : (
|
||||
<div className="page-header full-width">
|
||||
<div className="page-header__container">
|
||||
<div className="page-header__left">
|
||||
{buttonText &&
|
||||
<div className="dropdown page-header-dropdown">
|
||||
<button className="dropdown-toggle" type="button" data-toggle="dropdown">
|
||||
<span className="button-text">{buttonText}</span>
|
||||
<span className="caret"></span>
|
||||
</button>
|
||||
<ul className="dropdown-menu" aria-labelledby="dropdownMenu1">
|
||||
{children}
|
||||
</ul>
|
||||
</div>
|
||||
}
|
||||
{headerText &&
|
||||
<h1>Kubernetes Dashboard</h1>
|
||||
}
|
||||
</div>
|
||||
<div className="page-header__right">
|
||||
{sourceID ?
|
||||
<Link className="btn btn-info btn-sm" to={`/sources/${sourceID}/dashboards/${dashboard && dashboard.id}/edit`} >
|
||||
<span className="icon pencil" />
|
||||
Edit
|
||||
</Link> : null
|
||||
}
|
||||
<div className="btn btn-info btn-sm" data-for="graph-tips-tooltip" data-tip="<p><code>Click + Drag</code> Zoom in (X or Y)</p><p><code>Shift + Click</code> Pan Graph Window</p><p><code>Double Click</code> Reset Graph Window</p>">
|
||||
<span className="icon heart"></span>
|
||||
Graph Tips
|
||||
</div>
|
||||
<ReactTooltip id="graph-tips-tooltip" effect="solid" html={true} offset={{top: 2}} place="bottom" class="influx-tooltip place-bottom" />
|
||||
<TimeRangeDropdown onChooseTimeRange={handleChooseTimeRange} selected={timeRange.inputValue} />
|
||||
<div className="btn btn-info btn-sm" onClick={handleClickPresentationButton}>
|
||||
<span className="icon keynote" style={{margin: 0}}></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
const {
|
||||
shape,
|
||||
array,
|
||||
string,
|
||||
func,
|
||||
bool,
|
||||
} = PropTypes
|
||||
|
||||
DashboardHeader.propTypes = {
|
||||
sourceID: string,
|
||||
children: array,
|
||||
buttonText: string,
|
||||
dashboard: shape({}),
|
||||
headerText: string,
|
||||
timeRange: shape({}).isRequired,
|
||||
isHidden: bool.isRequired,
|
||||
handleChooseTimeRange: func.isRequired,
|
||||
handleClickPresentationButton: func.isRequired,
|
||||
}
|
||||
|
||||
export default DashboardHeader
|
|
@ -0,0 +1,36 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
|
||||
const DashboardEditHeader = ({
|
||||
dashboard,
|
||||
onSave,
|
||||
}) => (
|
||||
<div className="page-header full-width">
|
||||
<div className="page-header__container">
|
||||
<div className="page-header__left">
|
||||
<input
|
||||
className="chronograf-header__editing"
|
||||
autoFocus={true}
|
||||
defaultValue={dashboard && dashboard.name}
|
||||
placeholder="Dashboard name"
|
||||
/>
|
||||
</div>
|
||||
<div className="page-header__right">
|
||||
<div className="btn btn-sm btn-success" onClick={onSave}>
|
||||
Save
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
const {
|
||||
shape,
|
||||
func,
|
||||
} = PropTypes
|
||||
|
||||
DashboardEditHeader.propTypes = {
|
||||
dashboard: shape({}),
|
||||
onSave: func.isRequired,
|
||||
}
|
||||
|
||||
export default DashboardEditHeader
|
|
@ -0,0 +1,24 @@
|
|||
import React from 'react'
|
||||
|
||||
const VisualizationSelector = () => (
|
||||
<div className="" style={{
|
||||
display: 'flex',
|
||||
width: '100%',
|
||||
background: '#676978',
|
||||
padding: '10px',
|
||||
borderRadius: '3px',
|
||||
marginBottom: '10px',
|
||||
}}>
|
||||
<div className="">
|
||||
VISUALIZATIONS
|
||||
<div className="btn btn-info" style={{margin: "0 5px 0 5px"}}>
|
||||
Line Graph
|
||||
</div>
|
||||
<div className="btn btn-info" style={{margin: "0 5px 0 5px"}}>
|
||||
SingleStat
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
export default VisualizationSelector
|
|
@ -0,0 +1,12 @@
|
|||
export const EMPTY_DASHBOARD = {
|
||||
id: 0,
|
||||
name: '',
|
||||
cells: [
|
||||
{
|
||||
x: 0,
|
||||
y: 0,
|
||||
queries: [],
|
||||
name: 'Loading...',
|
||||
},
|
||||
],
|
||||
}
|
|
@ -1,130 +1,169 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import ReactTooltip from 'react-tooltip';
|
||||
import {Link} from 'react-router';
|
||||
import _ from 'lodash';
|
||||
import React, {PropTypes} from 'react'
|
||||
import {Link} from 'react-router'
|
||||
import {connect} from 'react-redux'
|
||||
import {bindActionCreators} from 'redux'
|
||||
|
||||
import LayoutRenderer from 'shared/components/LayoutRenderer';
|
||||
import TimeRangeDropdown from '../../shared/components/TimeRangeDropdown';
|
||||
import timeRanges from 'hson!../../shared/data/timeRanges.hson';
|
||||
import Header from 'src/dashboards/components/DashboardHeader'
|
||||
import EditHeader from 'src/dashboards/components/DashboardHeaderEdit'
|
||||
import Dashboard from 'src/dashboards/components/Dashboard'
|
||||
import timeRanges from 'hson!../../shared/data/timeRanges.hson'
|
||||
|
||||
import {getDashboards} from '../apis';
|
||||
import {getSource} from 'shared/apis';
|
||||
import * as dashboardActionCreators from 'src/dashboards/actions'
|
||||
|
||||
import {presentationButtonDispatcher} from 'shared/dispatchers'
|
||||
|
||||
const {
|
||||
arrayOf,
|
||||
bool,
|
||||
func,
|
||||
number,
|
||||
shape,
|
||||
string,
|
||||
} = PropTypes
|
||||
|
||||
const DashboardPage = React.createClass({
|
||||
propTypes: {
|
||||
params: PropTypes.shape({
|
||||
sourceID: PropTypes.string.isRequired,
|
||||
dashboardID: PropTypes.string.isRequired,
|
||||
source: PropTypes.shape({
|
||||
links: PropTypes.shape({
|
||||
proxy: PropTypes.string,
|
||||
self: PropTypes.string,
|
||||
}),
|
||||
}),
|
||||
params: shape({
|
||||
sourceID: string.isRequired,
|
||||
dashboardID: string.isRequired,
|
||||
}).isRequired,
|
||||
},
|
||||
|
||||
getInitialState() {
|
||||
const fifteenMinutesIndex = 1;
|
||||
|
||||
return {
|
||||
dashboards: [],
|
||||
timeRange: timeRanges[fifteenMinutesIndex],
|
||||
};
|
||||
location: shape({
|
||||
pathname: string.isRequired,
|
||||
}).isRequired,
|
||||
dashboardActions: shape({
|
||||
putDashboard: func.isRequired,
|
||||
getDashboards: func.isRequired,
|
||||
setDashboard: func.isRequired,
|
||||
setTimeRange: func.isRequired,
|
||||
setEditMode: func.isRequired,
|
||||
}).isRequired,
|
||||
dashboards: arrayOf(shape({
|
||||
id: number.isRequired,
|
||||
cells: arrayOf(shape({})).isRequired,
|
||||
})).isRequired,
|
||||
dashboard: shape({
|
||||
id: number.isRequired,
|
||||
cells: arrayOf(shape({})).isRequired,
|
||||
}).isRequired,
|
||||
timeRange: shape({}).isRequired,
|
||||
inPresentationMode: bool.isRequired,
|
||||
isEditMode: bool.isRequired,
|
||||
handleClickPresentationButton: func,
|
||||
},
|
||||
|
||||
componentDidMount() {
|
||||
getDashboards().then((resp) => {
|
||||
getSource(this.props.params.sourceID).then(({data: source}) => {
|
||||
this.setState({
|
||||
dashboards: resp.data.dashboards,
|
||||
source,
|
||||
});
|
||||
});
|
||||
});
|
||||
const {
|
||||
params: {dashboardID},
|
||||
dashboardActions: {getDashboards},
|
||||
} = this.props;
|
||||
|
||||
getDashboards(dashboardID)
|
||||
},
|
||||
|
||||
currentDashboard(dashboards, dashboardID) {
|
||||
return _.find(dashboards, (d) => d.id.toString() === dashboardID);
|
||||
},
|
||||
componentWillReceiveProps(nextProps) {
|
||||
const {location: {pathname}} = this.props
|
||||
const {
|
||||
location: {pathname: nextPathname},
|
||||
params: {dashboardID: nextID},
|
||||
dashboardActions: {setDashboard, setEditMode},
|
||||
} = nextProps
|
||||
|
||||
renderDashboard(dashboard) {
|
||||
const autoRefreshMs = 15000;
|
||||
const {timeRange} = this.state;
|
||||
const {source} = this.state;
|
||||
if (nextPathname.pathname === pathname) {
|
||||
return
|
||||
}
|
||||
|
||||
const cellWidth = 4;
|
||||
const cellHeight = 4;
|
||||
|
||||
const cells = dashboard.cells.map((cell, i) => {
|
||||
const dashboardCell = Object.assign(cell, {
|
||||
w: cellWidth,
|
||||
h: cellHeight,
|
||||
queries: cell.queries,
|
||||
i: i.toString(),
|
||||
});
|
||||
|
||||
dashboardCell.queries.forEach((q) => {
|
||||
q.text = q.query;
|
||||
q.database = source.telegraf;
|
||||
});
|
||||
return dashboardCell;
|
||||
});
|
||||
|
||||
return (
|
||||
<LayoutRenderer
|
||||
timeRange={timeRange}
|
||||
cells={cells}
|
||||
autoRefreshMs={autoRefreshMs}
|
||||
source={source.links.proxy}
|
||||
/>
|
||||
);
|
||||
setDashboard(nextID)
|
||||
setEditMode(nextPathname.includes('/edit'))
|
||||
},
|
||||
|
||||
handleChooseTimeRange({lower}) {
|
||||
const timeRange = timeRanges.find((range) => range.queryValue === lower);
|
||||
this.setState({timeRange});
|
||||
this.props.dashboardActions.setTimeRange(timeRange)
|
||||
},
|
||||
|
||||
handleUpdatePosition(cells) {
|
||||
this.props.dashboardActions.putDashboard({...this.props.dashboard, cells})
|
||||
},
|
||||
|
||||
render() {
|
||||
const {dashboards, timeRange} = this.state;
|
||||
const dashboard = this.currentDashboard(dashboards, this.props.params.dashboardID);
|
||||
const {
|
||||
dashboards,
|
||||
dashboard,
|
||||
params: {sourceID},
|
||||
inPresentationMode,
|
||||
isEditMode,
|
||||
handleClickPresentationButton,
|
||||
source,
|
||||
timeRange,
|
||||
} = this.props
|
||||
|
||||
return (
|
||||
<div className="page">
|
||||
<div className="page-header full-width">
|
||||
<div className="page-header__container">
|
||||
<div className="page-header__left">
|
||||
<div className="dropdown page-header-dropdown">
|
||||
<button className="dropdown-toggle" type="button" data-toggle="dropdown">
|
||||
<span className="button-text">{dashboard ? dashboard.name : ''}</span>
|
||||
<span className="caret"></span>
|
||||
</button>
|
||||
<ul className="dropdown-menu" aria-labelledby="dropdownMenu1">
|
||||
{(dashboards).map((d, i) => {
|
||||
return (
|
||||
<li key={i}>
|
||||
<Link to={`/sources/${this.props.params.sourceID}/dashboards/${d.id}`} className="role-option">
|
||||
{d.name}
|
||||
</Link>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div className="page-header__right">
|
||||
<div className="btn btn-info btn-sm" data-for="graph-tips-tooltip" data-tip="<p><code>Click + Drag</code> Zoom in (X or Y)</p><p><code>Shift + Click</code> Pan Graph Window</p><p><code>Double Click</code> Reset Graph Window</p>">
|
||||
<span className="icon heart"></span>
|
||||
Graph Tips
|
||||
</div>
|
||||
<ReactTooltip id="graph-tips-tooltip" effect="solid" html={true} offset={{top: 2}} place="bottom" class="influx-tooltip place-bottom" />
|
||||
<TimeRangeDropdown onChooseTimeRange={this.handleChooseTimeRange} selected={timeRange.inputValue} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="page-contents">
|
||||
<div className="container-fluid full-width">
|
||||
{ dashboard ? this.renderDashboard(dashboard) : '' }
|
||||
</div>
|
||||
</div>
|
||||
{
|
||||
isEditMode ?
|
||||
<EditHeader dashboard={dashboard} onSave={() => {}} /> :
|
||||
<Header
|
||||
buttonText={dashboard ? dashboard.name : ''}
|
||||
timeRange={timeRange}
|
||||
handleChooseTimeRange={this.handleChooseTimeRange}
|
||||
isHidden={inPresentationMode}
|
||||
handleClickPresentationButton={handleClickPresentationButton}
|
||||
dashboard={dashboard}
|
||||
sourceID={sourceID}
|
||||
>
|
||||
{(dashboards).map((d, i) => {
|
||||
return (
|
||||
<li key={i}>
|
||||
<Link to={`/sources/${sourceID}/dashboards/${d.id}`} className="role-option">
|
||||
{d.name}
|
||||
</Link>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</Header>
|
||||
}
|
||||
<Dashboard
|
||||
dashboard={dashboard}
|
||||
isEditMode={isEditMode}
|
||||
inPresentationMode={inPresentationMode}
|
||||
source={source}
|
||||
timeRange={timeRange}
|
||||
onPositionChange={this.handleUpdatePosition}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export default DashboardPage;
|
||||
const mapStateToProps = (state) => {
|
||||
const {
|
||||
appUI,
|
||||
dashboardUI: {
|
||||
dashboards,
|
||||
dashboard,
|
||||
timeRange,
|
||||
isEditMode,
|
||||
},
|
||||
} = state
|
||||
|
||||
return {
|
||||
inPresentationMode: appUI.presentationMode,
|
||||
dashboards,
|
||||
dashboard,
|
||||
timeRange,
|
||||
isEditMode,
|
||||
}
|
||||
}
|
||||
|
||||
const mapDispatchToProps = (dispatch) => ({
|
||||
handleClickPresentationButton: presentationButtonDispatcher(dispatch),
|
||||
dashboardActions: bindActionCreators(dashboardActionCreators, dispatch),
|
||||
})
|
||||
|
||||
export default connect(mapStateToProps, mapDispatchToProps)(DashboardPage);
|
||||
|
|
|
@ -34,13 +34,14 @@ const DashboardsPage = React.createClass({
|
|||
},
|
||||
|
||||
render() {
|
||||
const dashboardLink = `/sources/${this.props.source.id}`;
|
||||
let tableHeader;
|
||||
if (this.state.waiting) {
|
||||
tableHeader = "Loading Dashboards...";
|
||||
} else if (this.state.dashboards.length === 0) {
|
||||
tableHeader = "No Dashboards";
|
||||
tableHeader = "1 Dashboard";
|
||||
} else {
|
||||
tableHeader = `${this.state.dashboards.length} Dashboards`;
|
||||
tableHeader = `${this.state.dashboards.length + 1} Dashboards`;
|
||||
}
|
||||
|
||||
return (
|
||||
|
@ -75,7 +76,7 @@ const DashboardsPage = React.createClass({
|
|||
return (
|
||||
<tr key={dashboard.id}>
|
||||
<td className="monotype">
|
||||
<Link to={`/sources/${this.props.source.id}/dashboards/${dashboard.id}`}>
|
||||
<Link to={`${dashboardLink}/dashboards/${dashboard.id}`}>
|
||||
{dashboard.name}
|
||||
</Link>
|
||||
</td>
|
||||
|
@ -83,6 +84,13 @@ const DashboardsPage = React.createClass({
|
|||
);
|
||||
})
|
||||
}
|
||||
<tr>
|
||||
<td className="monotype">
|
||||
<Link to={`${dashboardLink}/kubernetes`}>
|
||||
{'Kubernetes'}
|
||||
</Link>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
import _ from 'lodash';
|
||||
import {EMPTY_DASHBOARD} from 'src/dashboards/constants'
|
||||
import timeRanges from 'hson!../../shared/data/timeRanges.hson';
|
||||
|
||||
const initialState = {
|
||||
dashboards: [],
|
||||
dashboard: EMPTY_DASHBOARD,
|
||||
timeRange: timeRanges[1],
|
||||
isEditMode: false,
|
||||
};
|
||||
|
||||
export default function ui(state = initialState, action) {
|
||||
switch (action.type) {
|
||||
case 'LOAD_DASHBOARDS': {
|
||||
const {dashboards, dashboardID} = action.payload;
|
||||
const newState = {
|
||||
dashboards,
|
||||
dashboard: _.find(dashboards, (d) => d.id === +dashboardID),
|
||||
};
|
||||
|
||||
return {...state, ...newState};
|
||||
}
|
||||
|
||||
case 'SET_DASHBOARD': {
|
||||
const {dashboardID} = action.payload
|
||||
const newState = {
|
||||
dashboard: _.find(state.dashboards, (d) => d.id === +dashboardID),
|
||||
};
|
||||
|
||||
return {...state, ...newState}
|
||||
}
|
||||
|
||||
case 'SET_DASHBOARD_TIME_RANGE': {
|
||||
const {timeRange} = action.payload
|
||||
|
||||
return {...state, timeRange};
|
||||
}
|
||||
|
||||
case 'SET_EDIT_MODE': {
|
||||
const {isEditMode} = action.payload
|
||||
return {...state, isEditMode}
|
||||
}
|
||||
|
||||
case 'UPDATE_DASHBOARD': {
|
||||
const {dashboard} = action.payload
|
||||
const newState = {
|
||||
dashboard,
|
||||
dashboards: state.dashboards.map((d) => d.id === dashboard.id ? dashboard : d),
|
||||
}
|
||||
|
||||
return {...state, ...newState}
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
|
@ -1,51 +1,20 @@
|
|||
import uuid from 'node-uuid';
|
||||
|
||||
export function createPanel() {
|
||||
return {
|
||||
type: 'CREATE_PANEL',
|
||||
payload: {
|
||||
panelID: uuid.v4(), // for the default Panel
|
||||
queryID: uuid.v4(), // for the default Query
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function renamePanel(panelId, name) {
|
||||
return {
|
||||
type: 'RENAME_PANEL',
|
||||
payload: {
|
||||
panelId,
|
||||
name,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function deletePanel(panelId) {
|
||||
return {
|
||||
type: 'DELETE_PANEL',
|
||||
payload: {
|
||||
panelId,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function addQuery(panelId, options) {
|
||||
export function addQuery(options = {}) {
|
||||
return {
|
||||
type: 'ADD_QUERY',
|
||||
payload: {
|
||||
panelId,
|
||||
queryId: uuid.v4(),
|
||||
queryID: uuid.v4(),
|
||||
options,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function deleteQuery(panelId, queryId) {
|
||||
export function deleteQuery(queryID) {
|
||||
return {
|
||||
type: 'DELETE_QUERY',
|
||||
payload: {
|
||||
queryId,
|
||||
panelId,
|
||||
queryID,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -159,12 +128,3 @@ export function updateRawQuery(queryID, text) {
|
|||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function activatePanel(panelID) {
|
||||
return {
|
||||
type: 'ACTIVATE_PANEL',
|
||||
payload: {
|
||||
panelID,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
|
@ -60,18 +60,21 @@ const DatabaseList = React.createClass({
|
|||
const {onChooseNamespace, query} = this.props;
|
||||
|
||||
return (
|
||||
<ul className="qeditor--list">
|
||||
{this.state.namespaces.map((namespace) => {
|
||||
const {database, retentionPolicy} = namespace;
|
||||
const isActive = database === query.database && retentionPolicy === query.retentionPolicy;
|
||||
<div className="query-builder--column">
|
||||
<div className="query-builder--column-heading">Databases</div>
|
||||
<ul className="qeditor--list">
|
||||
{this.state.namespaces.map((namespace) => {
|
||||
const {database, retentionPolicy} = namespace;
|
||||
const isActive = database === query.database && retentionPolicy === query.retentionPolicy;
|
||||
|
||||
return (
|
||||
<li className={classNames('qeditor--list-item qeditor--list-radio', {active: isActive})} key={`${database}..${retentionPolicy}`} onClick={_.wrap(namespace, onChooseNamespace)}>
|
||||
{database}.{retentionPolicy}
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
return (
|
||||
<li className={classNames('qeditor--list-item qeditor--list-radio', {active: isActive})} key={`${database}..${retentionPolicy}`} onClick={_.wrap(namespace, onChooseNamespace)}>
|
||||
{database}.{retentionPolicy}
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
|
|
@ -41,25 +41,26 @@ const FieldList = React.createClass({
|
|||
},
|
||||
|
||||
componentDidMount() {
|
||||
const {database, measurement, retentionPolicy} = this.props.query;
|
||||
const {database, measurement} = this.props.query;
|
||||
if (!database || !measurement) {
|
||||
return;
|
||||
}
|
||||
|
||||
const {source} = this.context;
|
||||
const proxySource = source.links.proxy;
|
||||
showFieldKeys(proxySource, database, measurement, retentionPolicy).then((resp) => {
|
||||
const {errors, fieldSets} = showFieldKeysParser(resp.data);
|
||||
if (errors.length) {
|
||||
// TODO: do something
|
||||
}
|
||||
this._getFields();
|
||||
},
|
||||
|
||||
this.setState({
|
||||
fields: fieldSets[measurement].map((f) => {
|
||||
return {field: f, funcs: []};
|
||||
}),
|
||||
});
|
||||
});
|
||||
componentDidUpdate(prevProps) {
|
||||
const {database, measurement, retentionPolicy} = this.props.query;
|
||||
const {database: prevDB, measurement: prevMeas, retentionPolicy: prevRP} = prevProps.query;
|
||||
if (!database || !measurement) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (database === prevDB && measurement === prevMeas && retentionPolicy === prevRP) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._getFields();
|
||||
},
|
||||
|
||||
handleGroupByTime(groupBy) {
|
||||
|
@ -72,7 +73,8 @@ const FieldList = React.createClass({
|
|||
const hasGroupByTime = query.groupBy.time;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="query-builder--column">
|
||||
<div className="query-builder--column-heading">Fields</div>
|
||||
{
|
||||
hasAggregates ?
|
||||
<div className="qeditor--list-header">
|
||||
|
@ -94,23 +96,43 @@ const FieldList = React.createClass({
|
|||
return <div className="qeditor--empty">No <strong>Measurement</strong> selected</div>;
|
||||
}
|
||||
|
||||
return (<ul className="qeditor--list">
|
||||
{this.state.fields.map((fieldFunc) => {
|
||||
const selectedField = this.props.query.fields.find((f) => f.field === fieldFunc.field);
|
||||
return (
|
||||
<FieldListItem
|
||||
key={fieldFunc.field}
|
||||
onToggleField={this.props.onToggleField}
|
||||
onApplyFuncsToField={this.props.applyFuncsToField}
|
||||
isSelected={!!selectedField}
|
||||
fieldFunc={selectedField || fieldFunc}
|
||||
isKapacitorRule={this.props.isKapacitorRule}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
return (
|
||||
<ul className="qeditor--list">
|
||||
{this.state.fields.map((fieldFunc) => {
|
||||
const selectedField = this.props.query.fields.find((f) => f.field === fieldFunc.field);
|
||||
return (
|
||||
<FieldListItem
|
||||
key={fieldFunc.field}
|
||||
onToggleField={this.props.onToggleField}
|
||||
onApplyFuncsToField={this.props.applyFuncsToField}
|
||||
isSelected={!!selectedField}
|
||||
fieldFunc={selectedField || fieldFunc}
|
||||
isKapacitorRule={this.props.isKapacitorRule}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
);
|
||||
},
|
||||
|
||||
_getFields() {
|
||||
const {database, measurement, retentionPolicy} = this.props.query;
|
||||
const {source} = this.context;
|
||||
const proxySource = source.links.proxy;
|
||||
|
||||
showFieldKeys(proxySource, database, measurement, retentionPolicy).then((resp) => {
|
||||
const {errors, fieldSets} = showFieldKeysParser(resp.data);
|
||||
if (errors.length) {
|
||||
// TODO: do something
|
||||
}
|
||||
|
||||
this.setState({
|
||||
fields: fieldSets[measurement].map((f) => {
|
||||
return {field: f, funcs: []};
|
||||
}),
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default FieldList;
|
||||
|
|
|
@ -34,19 +34,21 @@ const MeasurementList = React.createClass({
|
|||
return;
|
||||
}
|
||||
|
||||
const {source} = this.context;
|
||||
const proxy = source.links.proxy;
|
||||
showMeasurements(proxy, this.props.query.database).then((resp) => {
|
||||
const {errors, measurementSets} = showMeasurementsParser(resp.data);
|
||||
if (errors.length) {
|
||||
// TODO: display errors in the UI.
|
||||
return console.error('InfluxDB returned error(s): ', errors); // eslint-disable-line no-console
|
||||
}
|
||||
this._getMeasurements();
|
||||
},
|
||||
|
||||
this.setState({
|
||||
measurements: measurementSets[0].measurements,
|
||||
});
|
||||
});
|
||||
componentDidUpdate(prevProps) {
|
||||
const {query} = this.props;
|
||||
|
||||
if (!query.database) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (prevProps.query.database === query.database) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._getMeasurements();
|
||||
},
|
||||
|
||||
handleFilterText(e) {
|
||||
|
@ -69,9 +71,10 @@ const MeasurementList = React.createClass({
|
|||
|
||||
render() {
|
||||
return (
|
||||
<div>
|
||||
<div className="query-builder--column">
|
||||
<div className="query-builder--column-heading">Measurements</div>
|
||||
{this.props.query.database ? <div className="qeditor--list-header">
|
||||
<input className="qeditor--filter" ref="filterText" placeholder="Filter Measurements..." type="text" value={this.state.filterText} onChange={this.handleFilterText} onKeyUp={this.handleEscape} />
|
||||
<input className="qeditor--filter" ref="filterText" placeholder="Filter" type="text" value={this.state.filterText} onChange={this.handleFilterText} onKeyUp={this.handleEscape} />
|
||||
<span className="icon search"></span>
|
||||
</div> : null }
|
||||
{this.renderList()}
|
||||
|
@ -97,6 +100,23 @@ const MeasurementList = React.createClass({
|
|||
</ul>
|
||||
);
|
||||
},
|
||||
|
||||
_getMeasurements() {
|
||||
const {source} = this.context;
|
||||
const proxy = source.links.proxy;
|
||||
showMeasurements(proxy, this.props.query.database).then((resp) => {
|
||||
const {errors, measurementSets} = showMeasurementsParser(resp.data);
|
||||
if (errors.length) {
|
||||
// TODO: display errors in the UI.
|
||||
return console.error('InfluxDB returned error(s): ', errors); // eslint-disable-line no-console
|
||||
}
|
||||
|
||||
this.setState({
|
||||
measurements: measurementSets[0].measurements,
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
});
|
||||
|
||||
export default MeasurementList;
|
||||
|
|
|
@ -2,7 +2,14 @@ import React, {PropTypes} from 'react';
|
|||
import Table from './Table';
|
||||
import classNames from 'classnames';
|
||||
|
||||
const {bool, string, shape, arrayOf, func} = PropTypes;
|
||||
const {
|
||||
arrayOf,
|
||||
bool,
|
||||
func,
|
||||
number,
|
||||
shape,
|
||||
string,
|
||||
} = PropTypes;
|
||||
|
||||
const MultiTable = React.createClass({
|
||||
propTypes: {
|
||||
|
@ -10,6 +17,7 @@ const MultiTable = React.createClass({
|
|||
host: arrayOf(string.isRequired).isRequired,
|
||||
text: string.isRequired,
|
||||
})),
|
||||
height: number,
|
||||
},
|
||||
|
||||
getInitialState() {
|
||||
|
@ -40,13 +48,14 @@ const MultiTable = React.createClass({
|
|||
},
|
||||
|
||||
renderTable() {
|
||||
const {height} = this.props;
|
||||
const query = this.getActiveQuery();
|
||||
const noQuery = !query || !query.text;
|
||||
if (noQuery) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <Table key={query.text} query={query} />;
|
||||
return <Table key={query.text} query={query} height={height} />;
|
||||
},
|
||||
|
||||
renderTabs() {
|
||||
|
|
|
@ -1,183 +0,0 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import classNames from 'classnames';
|
||||
import QueryEditor from './QueryEditor';
|
||||
import QueryTabItem from './QueryTabItem';
|
||||
import RenamePanelModal from './RenamePanelModal';
|
||||
import SimpleDropdown from 'src/shared/components/SimpleDropdown';
|
||||
|
||||
const Panel = React.createClass({
|
||||
propTypes: {
|
||||
panel: PropTypes.shape({
|
||||
id: PropTypes.string.isRequired,
|
||||
}).isRequired,
|
||||
queries: PropTypes.arrayOf(PropTypes.shape({})).isRequired,
|
||||
timeRange: PropTypes.shape({
|
||||
upper: PropTypes.string,
|
||||
lower: PropTypes.string,
|
||||
}).isRequired,
|
||||
isExpanded: PropTypes.bool.isRequired,
|
||||
onTogglePanel: PropTypes.func.isRequired,
|
||||
actions: PropTypes.shape({
|
||||
chooseNamespace: PropTypes.func.isRequired,
|
||||
chooseMeasurement: PropTypes.func.isRequired,
|
||||
chooseTag: PropTypes.func.isRequired,
|
||||
groupByTag: PropTypes.func.isRequired,
|
||||
addQuery: PropTypes.func.isRequired,
|
||||
deleteQuery: PropTypes.func.isRequired,
|
||||
toggleField: PropTypes.func.isRequired,
|
||||
groupByTime: PropTypes.func.isRequired,
|
||||
toggleTagAcceptance: PropTypes.func.isRequired,
|
||||
applyFuncsToField: PropTypes.func.isRequired,
|
||||
deletePanel: PropTypes.func.isRequired,
|
||||
renamePanel: PropTypes.func.isRequired,
|
||||
}).isRequired,
|
||||
setActiveQuery: PropTypes.func.isRequired,
|
||||
activeQueryID: PropTypes.string,
|
||||
},
|
||||
|
||||
handleSetActiveQuery(query) {
|
||||
this.props.setActiveQuery(query.id);
|
||||
},
|
||||
|
||||
handleAddQuery() {
|
||||
this.props.actions.addQuery();
|
||||
},
|
||||
|
||||
handleAddRawQuery() {
|
||||
this.props.actions.addQuery({rawText: `SELECT "fields" from "db"."rp"."measurement"`});
|
||||
},
|
||||
|
||||
handleDeleteQuery(query) {
|
||||
this.props.actions.deleteQuery(query.id);
|
||||
},
|
||||
|
||||
handleSelectPanel() {
|
||||
this.props.onTogglePanel(this.props.panel);
|
||||
},
|
||||
|
||||
handleDeletePanel(e) {
|
||||
e.stopPropagation();
|
||||
this.props.actions.deletePanel(this.props.panel.id);
|
||||
},
|
||||
|
||||
getActiveQuery() {
|
||||
const {queries, activeQueryID} = this.props;
|
||||
const activeQuery = queries.find((query) => query.id === activeQueryID);
|
||||
const defaultQuery = queries[0];
|
||||
|
||||
return activeQuery || defaultQuery;
|
||||
},
|
||||
|
||||
openRenamePanelModal(e) {
|
||||
e.stopPropagation();
|
||||
$(`#renamePanelModal-${this.props.panel.id}`).modal('show'); // eslint-disable-line no-undef
|
||||
},
|
||||
|
||||
handleRename(newName) {
|
||||
this.props.actions.renamePanel(this.props.panel.id, newName);
|
||||
},
|
||||
|
||||
|
||||
render() {
|
||||
const {panel, isExpanded} = this.props;
|
||||
|
||||
return (
|
||||
<div className={classNames('panel', {active: isExpanded})}>
|
||||
<div className="panel--header" onClick={this.handleSelectPanel}>
|
||||
<div className="panel--name">
|
||||
<span className="icon caret-right"></span>
|
||||
{panel.name || "Graph"}
|
||||
</div>
|
||||
<div className="panel--actions">
|
||||
{/* <div title="Export Queries to Dashboard" className="panel--action"><span className="icon export"></span></div> */}
|
||||
<div title="Rename Graph" className="panel--action" onClick={this.openRenamePanelModal}><span className="icon pencil"></span></div>
|
||||
<div title="Delete Graph" className="panel--action" onClick={this.handleDeletePanel}><span className="icon trash"></span></div>
|
||||
</div>
|
||||
</div>
|
||||
{this.renderQueryTabList()}
|
||||
{this.renderQueryEditor()}
|
||||
<RenamePanelModal panel={panel} onConfirm={this.handleRename} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
renderQueryEditor() {
|
||||
if (!this.props.isExpanded) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {timeRange, actions} = this.props;
|
||||
const query = this.getActiveQuery();
|
||||
|
||||
if (!query) {
|
||||
return (
|
||||
<div className="qeditor--empty">
|
||||
<h5>This Graph has no Queries</h5>
|
||||
<br/>
|
||||
<div className="btn btn-primary" role="button" onClick={this.handleAddQuery}>Add a Query</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<QueryEditor
|
||||
timeRange={timeRange}
|
||||
query={this.getActiveQuery()}
|
||||
actions={actions}
|
||||
onAddQuery={this.handleAddQuery}
|
||||
/>
|
||||
);
|
||||
},
|
||||
|
||||
renderQueryTabList() {
|
||||
const {isExpanded, queries} = this.props;
|
||||
if (!isExpanded) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div className="panel--tabs">
|
||||
{queries.map((q) => {
|
||||
let queryTabText;
|
||||
if (q.rawText) {
|
||||
queryTabText = 'InfluxQL';
|
||||
} else {
|
||||
queryTabText = (q.measurement && q.fields.length !== 0) ? `${q.measurement}.${q.fields[0].field}` : 'Query';
|
||||
}
|
||||
return (
|
||||
<QueryTabItem
|
||||
isActive={this.getActiveQuery().id === q.id}
|
||||
key={q.id}
|
||||
query={q}
|
||||
onSelect={this.handleSetActiveQuery}
|
||||
onDelete={this.handleDeleteQuery}
|
||||
queryTabText={queryTabText}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
{this.renderAddQuery()}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
onChoose(item) {
|
||||
switch (item.text) {
|
||||
case 'Query Builder':
|
||||
this.handleAddQuery();
|
||||
break;
|
||||
case 'InfluxQL':
|
||||
this.handleAddRawQuery();
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
renderAddQuery() {
|
||||
return (
|
||||
<SimpleDropdown onChoose={this.onChoose} items={[{text: 'Query Builder'}, {text: 'InfluxQL'}]} className="panel--tab-new">
|
||||
<span className="icon plus"></span>
|
||||
</SimpleDropdown>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export default Panel;
|
|
@ -1,63 +0,0 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import {connect} from 'react-redux';
|
||||
import {bindActionCreators} from 'redux';
|
||||
import PanelList from './PanelList';
|
||||
import * as viewActions from '../actions/view';
|
||||
|
||||
const {string, func} = PropTypes;
|
||||
const PanelBuilder = React.createClass({
|
||||
propTypes: {
|
||||
width: string,
|
||||
actions: PropTypes.shape({
|
||||
activatePanel: func.isRequired,
|
||||
createPanel: func.isRequired,
|
||||
deleteQuery: func.isRequired,
|
||||
addQuery: func.isRequired,
|
||||
editRawText: func.isRequired,
|
||||
chooseNamespace: func.isRequired,
|
||||
chooseMeasurement: func.isRequired,
|
||||
toggleField: func.isRequired,
|
||||
groupByTime: func.isRequired,
|
||||
applyFuncsToField: func.isRequired,
|
||||
chooseTag: func.isRequired,
|
||||
groupByTag: func.isRequired,
|
||||
toggleTagAcceptance: func.isRequired,
|
||||
deletePanel: func.isRequired,
|
||||
}).isRequired,
|
||||
setActiveQuery: func.isRequired,
|
||||
activePanelID: string,
|
||||
activeQueryID: string,
|
||||
},
|
||||
|
||||
handleCreateExplorer() {
|
||||
this.props.actions.createPanel();
|
||||
},
|
||||
|
||||
render() {
|
||||
const {width, actions, setActiveQuery, activePanelID, activeQueryID} = this.props;
|
||||
|
||||
return (
|
||||
<div className="panel-builder" style={{width}}>
|
||||
<div className="btn btn-block btn-primary" onClick={this.handleCreateExplorer}><span className="icon graphline"></span> Create Graph</div>
|
||||
<PanelList
|
||||
actions={actions}
|
||||
setActiveQuery={setActiveQuery}
|
||||
activePanelID={activePanelID}
|
||||
activeQueryID={activeQueryID}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
function mapStateToProps() {
|
||||
return {};
|
||||
}
|
||||
|
||||
function mapDispatchToProps(dispatch) {
|
||||
return {
|
||||
actions: bindActionCreators(viewActions, dispatch),
|
||||
};
|
||||
}
|
||||
|
||||
export default connect(mapStateToProps, mapDispatchToProps)(PanelBuilder);
|
|
@ -1,76 +0,0 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import {connect} from 'react-redux';
|
||||
import _ from 'lodash';
|
||||
|
||||
import Panel from './Panel';
|
||||
|
||||
const {func, string, shape} = PropTypes;
|
||||
const PanelList = React.createClass({
|
||||
propTypes: {
|
||||
timeRange: shape({
|
||||
upper: string,
|
||||
lower: string,
|
||||
}).isRequired,
|
||||
panels: shape({}).isRequired,
|
||||
queryConfigs: PropTypes.shape({}),
|
||||
actions: shape({
|
||||
activatePanel: func.isRequired,
|
||||
deleteQuery: func.isRequired,
|
||||
addQuery: func.isRequired,
|
||||
}).isRequired,
|
||||
setActiveQuery: func.isRequired,
|
||||
activePanelID: string,
|
||||
activeQueryID: string,
|
||||
},
|
||||
|
||||
handleTogglePanel(panel) {
|
||||
const panelID = panel.id === this.props.activePanelID ? null : panel.id;
|
||||
this.props.actions.activatePanel(panelID);
|
||||
|
||||
// Reset the activeQueryID when toggling Exporations
|
||||
this.props.setActiveQuery(null);
|
||||
},
|
||||
|
||||
render() {
|
||||
const {actions, panels, timeRange, queryConfigs, setActiveQuery, activeQueryID, activePanelID} = this.props;
|
||||
|
||||
return (
|
||||
<div>
|
||||
{Object.keys(panels).map((panelID) => {
|
||||
const panel = panels[panelID];
|
||||
const queries = panel.queryIds.map((configId) => queryConfigs[configId]);
|
||||
const deleteQueryFromPanel = _.partial(actions.deleteQuery, panelID);
|
||||
const addQueryToPanel = _.partial(actions.addQuery, panelID);
|
||||
const allActions = Object.assign({}, actions, {
|
||||
addQuery: addQueryToPanel,
|
||||
deleteQuery: deleteQueryFromPanel,
|
||||
});
|
||||
|
||||
return (
|
||||
<Panel
|
||||
key={panelID}
|
||||
panel={panel}
|
||||
queries={queries}
|
||||
timeRange={timeRange}
|
||||
onTogglePanel={this.handleTogglePanel}
|
||||
setActiveQuery={setActiveQuery}
|
||||
isExpanded={panelID === activePanelID}
|
||||
actions={allActions}
|
||||
activeQueryID={activeQueryID}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
function mapStateToProps(state) {
|
||||
return {
|
||||
timeRange: state.timeRange,
|
||||
panels: state.panels,
|
||||
queryConfigs: state.queryConfigs,
|
||||
};
|
||||
}
|
||||
|
||||
export default connect(mapStateToProps)(PanelList);
|
|
@ -0,0 +1,160 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import {bindActionCreators} from 'redux';
|
||||
import {connect} from 'react-redux';
|
||||
|
||||
import QueryEditor from './QueryEditor';
|
||||
import QueryTabItem from './QueryTabItem';
|
||||
import SimpleDropdown from 'src/shared/components/SimpleDropdown';
|
||||
|
||||
import * as viewActions from '../actions/view';
|
||||
const {
|
||||
arrayOf,
|
||||
func,
|
||||
shape,
|
||||
string,
|
||||
} = PropTypes;
|
||||
|
||||
const QueryBuilder = React.createClass({
|
||||
propTypes: {
|
||||
queries: arrayOf(shape({})).isRequired,
|
||||
timeRange: shape({
|
||||
upper: string,
|
||||
lower: string,
|
||||
}).isRequired,
|
||||
actions: shape({
|
||||
chooseNamespace: func.isRequired,
|
||||
chooseMeasurement: func.isRequired,
|
||||
chooseTag: func.isRequired,
|
||||
groupByTag: func.isRequired,
|
||||
addQuery: func.isRequired,
|
||||
deleteQuery: func.isRequired,
|
||||
toggleField: func.isRequired,
|
||||
groupByTime: func.isRequired,
|
||||
toggleTagAcceptance: func.isRequired,
|
||||
applyFuncsToField: func.isRequired,
|
||||
}).isRequired,
|
||||
height: string,
|
||||
top: string,
|
||||
setActiveQuery: func.isRequired,
|
||||
activeQueryID: string,
|
||||
},
|
||||
|
||||
handleSetActiveQuery(query) {
|
||||
this.props.setActiveQuery(query.id);
|
||||
},
|
||||
|
||||
handleAddQuery() {
|
||||
this.props.actions.addQuery();
|
||||
},
|
||||
|
||||
handleAddRawQuery() {
|
||||
this.props.actions.addQuery({rawText: `SELECT "fields" from "db"."rp"."measurement"`});
|
||||
},
|
||||
|
||||
handleDeleteQuery(query) {
|
||||
this.props.actions.deleteQuery(query.id);
|
||||
},
|
||||
|
||||
getActiveQuery() {
|
||||
const {queries, activeQueryID} = this.props;
|
||||
const activeQuery = queries.find((query) => query.id === activeQueryID);
|
||||
const defaultQuery = queries[0];
|
||||
|
||||
return activeQuery || defaultQuery;
|
||||
},
|
||||
|
||||
render() {
|
||||
const {height, top} = this.props;
|
||||
return (
|
||||
<div className="query-builder" style={{height, top}}>
|
||||
{this.renderQueryTabList()}
|
||||
{this.renderQueryEditor()}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
renderQueryEditor() {
|
||||
const {timeRange, actions} = this.props;
|
||||
const query = this.getActiveQuery();
|
||||
|
||||
if (!query) {
|
||||
return (
|
||||
<div className="qeditor--empty">
|
||||
<h5>This Graph has no Queries</h5>
|
||||
<br/>
|
||||
<div className="btn btn-primary" role="button" onClick={this.handleAddQuery}>Add a Query</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<QueryEditor
|
||||
timeRange={timeRange}
|
||||
query={this.getActiveQuery()}
|
||||
actions={actions}
|
||||
onAddQuery={this.handleAddQuery}
|
||||
/>
|
||||
);
|
||||
},
|
||||
|
||||
renderQueryTabList() {
|
||||
const {queries} = this.props;
|
||||
return (
|
||||
<div className="query-builder--tabs">
|
||||
<div className="query-builder--tabs-heading">
|
||||
<h1>Queries</h1>
|
||||
{this.renderAddQuery()}
|
||||
</div>
|
||||
{queries.map((q, i) => {
|
||||
let queryTabText;
|
||||
if (q.rawText) {
|
||||
queryTabText = 'InfluxQL';
|
||||
} else {
|
||||
queryTabText = (q.measurement && q.fields.length !== 0) ? `${q.measurement}.${q.fields[0].field}` : 'Query';
|
||||
}
|
||||
return (
|
||||
<QueryTabItem
|
||||
isActive={this.getActiveQuery().id === q.id}
|
||||
key={q.id + i}
|
||||
query={q}
|
||||
onSelect={this.handleSetActiveQuery}
|
||||
onDelete={this.handleDeleteQuery}
|
||||
queryTabText={queryTabText}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
onChoose(item) {
|
||||
switch (item.text) {
|
||||
case 'Query Builder':
|
||||
this.handleAddQuery();
|
||||
break;
|
||||
case 'InfluxQL':
|
||||
this.handleAddRawQuery();
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
renderAddQuery() {
|
||||
return (
|
||||
<SimpleDropdown onChoose={this.onChoose} items={[{text: 'Query Builder'}, {text: 'InfluxQL'}]} className="panel--tab-new">
|
||||
<span className="icon plus"></span>
|
||||
</SimpleDropdown>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
function mapStateToProps() {
|
||||
return {};
|
||||
}
|
||||
|
||||
function mapDispatchToProps(dispatch) {
|
||||
return {
|
||||
actions: bindActionCreators(viewActions, dispatch),
|
||||
};
|
||||
}
|
||||
|
||||
export default connect(mapStateToProps, mapDispatchToProps)(QueryBuilder);
|
|
@ -1,6 +1,4 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import classNames from 'classnames';
|
||||
import _ from 'lodash';
|
||||
import selectStatement from '../utils/influxql/select';
|
||||
|
||||
import DatabaseList from './DatabaseList';
|
||||
|
@ -9,12 +7,11 @@ import FieldList from './FieldList';
|
|||
import TagList from './TagList';
|
||||
import RawQueryEditor from './RawQueryEditor';
|
||||
|
||||
const DB_TAB = 'databases';
|
||||
const MEASUREMENTS_TAB = 'measurments';
|
||||
const FIELDS_TAB = 'fields';
|
||||
const TAGS_TAB = 'tags';
|
||||
|
||||
const {string, shape, func} = PropTypes;
|
||||
const {
|
||||
string,
|
||||
shape,
|
||||
func,
|
||||
} = PropTypes;
|
||||
const QueryEditor = React.createClass({
|
||||
propTypes: {
|
||||
query: shape({
|
||||
|
@ -38,29 +35,17 @@ const QueryEditor = React.createClass({
|
|||
|
||||
getInitialState() {
|
||||
return {
|
||||
activeTab: DB_TAB,
|
||||
database: null,
|
||||
measurement: null,
|
||||
};
|
||||
},
|
||||
|
||||
componentWillReceiveProps(nextProps) {
|
||||
const changingQueries = this.props.query.id !== nextProps.query.id;
|
||||
if (changingQueries) {
|
||||
this.setState({activeTab: DB_TAB});
|
||||
}
|
||||
},
|
||||
|
||||
handleChooseNamespace(namespace) {
|
||||
this.props.actions.chooseNamespace(this.props.query.id, namespace);
|
||||
|
||||
this.setState({activeTab: MEASUREMENTS_TAB});
|
||||
},
|
||||
|
||||
handleChooseMeasurement(measurement) {
|
||||
this.props.actions.chooseMeasurement(this.props.query.id, measurement);
|
||||
|
||||
this.setState({activeTab: FIELDS_TAB});
|
||||
},
|
||||
|
||||
handleToggleField(field) {
|
||||
|
@ -91,15 +76,13 @@ const QueryEditor = React.createClass({
|
|||
this.props.actions.editRawText(this.props.query.id, text);
|
||||
},
|
||||
|
||||
handleClickTab(tab) {
|
||||
this.setState({activeTab: tab});
|
||||
},
|
||||
|
||||
render() {
|
||||
return (
|
||||
<div className="panel--tab-contents">
|
||||
{this.renderQuery()}
|
||||
{this.renderLists()}
|
||||
<div className="query-builder--tab-contents">
|
||||
<div>
|
||||
{this.renderQuery()}
|
||||
{this.renderLists()}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
@ -110,7 +93,7 @@ const QueryEditor = React.createClass({
|
|||
|
||||
if (!query.rawText) {
|
||||
return (
|
||||
<div className="qeditor--query-preview">
|
||||
<div className="query-builder--query-preview">
|
||||
<pre><code>{statement}</code></pre>
|
||||
</div>
|
||||
);
|
||||
|
@ -120,60 +103,32 @@ const QueryEditor = React.createClass({
|
|||
},
|
||||
|
||||
renderLists() {
|
||||
const {activeTab} = this.state;
|
||||
return (
|
||||
<div>
|
||||
<div className="qeditor--tabs">
|
||||
<div className="qeditor--tabs-heading">Schema Explorer</div>
|
||||
<div onClick={_.wrap(DB_TAB, this.handleClickTab)} className={classNames("qeditor--tab", {active: activeTab === DB_TAB})}>Databases</div>
|
||||
<div onClick={_.wrap(MEASUREMENTS_TAB, this.handleClickTab)} className={classNames("qeditor--tab", {active: activeTab === MEASUREMENTS_TAB})}>Measurements</div>
|
||||
<div onClick={_.wrap(FIELDS_TAB, this.handleClickTab)} className={classNames("qeditor--tab", {active: activeTab === FIELDS_TAB})}>Fields</div>
|
||||
<div onClick={_.wrap(TAGS_TAB, this.handleClickTab)} className={classNames("qeditor--tab", {active: activeTab === TAGS_TAB})}>Tags</div>
|
||||
</div>
|
||||
{this.renderList()}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
renderList() {
|
||||
const {query} = this.props;
|
||||
|
||||
switch (this.state.activeTab) {
|
||||
case DB_TAB:
|
||||
return (
|
||||
<DatabaseList
|
||||
query={query}
|
||||
onChooseNamespace={this.handleChooseNamespace}
|
||||
/>
|
||||
);
|
||||
case MEASUREMENTS_TAB:
|
||||
return (
|
||||
<MeasurementList
|
||||
query={query}
|
||||
onChooseMeasurement={this.handleChooseMeasurement}
|
||||
/>
|
||||
);
|
||||
case FIELDS_TAB:
|
||||
return (
|
||||
<FieldList
|
||||
query={query}
|
||||
onToggleField={this.handleToggleField}
|
||||
onGroupByTime={this.handleGroupByTime}
|
||||
applyFuncsToField={this.handleApplyFuncsToField}
|
||||
/>
|
||||
);
|
||||
case TAGS_TAB:
|
||||
return (
|
||||
<TagList
|
||||
query={query}
|
||||
onChooseTag={this.handleChooseTag}
|
||||
onGroupByTag={this.handleGroupByTag}
|
||||
onToggleTagAcceptance={this.handleToggleTagAcceptance}
|
||||
/>
|
||||
);
|
||||
default:
|
||||
return <ul className="qeditor--list"></ul>;
|
||||
}
|
||||
return (
|
||||
<div className="query-builder--columns">
|
||||
<DatabaseList
|
||||
query={query}
|
||||
onChooseNamespace={this.handleChooseNamespace}
|
||||
/>
|
||||
<MeasurementList
|
||||
query={query}
|
||||
onChooseMeasurement={this.handleChooseMeasurement}
|
||||
/>
|
||||
<FieldList
|
||||
query={query}
|
||||
onToggleField={this.handleToggleField}
|
||||
onGroupByTime={this.handleGroupByTime}
|
||||
applyFuncsToField={this.handleApplyFuncsToField}
|
||||
/>
|
||||
<TagList
|
||||
query={query}
|
||||
onChooseTag={this.handleChooseTag}
|
||||
onGroupByTag={this.handleGroupByTag}
|
||||
onToggleTagAcceptance={this.handleToggleTagAcceptance}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -23,9 +23,9 @@ const QueryTabItem = React.createClass({
|
|||
|
||||
render() {
|
||||
return (
|
||||
<div className={classNames('panel--tab', {active: this.props.isActive})} onClick={this.handleSelect}>
|
||||
<span className="panel--tab-label">{this.props.queryTabText}</span>
|
||||
<span className="panel--tab-delete" onClick={this.handleDelete}></span>
|
||||
<div className={classNames('query-builder--tab', {active: this.props.isActive})} onClick={this.handleSelect}>
|
||||
<span className="query-builder--tab-label">{this.props.queryTabText}</span>
|
||||
<span className="query-builder--tab-delete" onClick={this.handleDelete}></span>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
|
||||
const RenamePanelModal = React.createClass({
|
||||
propTypes: {
|
||||
onConfirm: PropTypes.func.isRequired,
|
||||
panel: PropTypes.shape({
|
||||
id: PropTypes.string.isRequired,
|
||||
}),
|
||||
},
|
||||
|
||||
getInitialState() {
|
||||
return {error: null};
|
||||
},
|
||||
|
||||
componentDidMount() {
|
||||
this.refs.name.focus();
|
||||
},
|
||||
|
||||
render() {
|
||||
const {panel} = this.props;
|
||||
|
||||
return (
|
||||
<div className="modal fade in" id={`renamePanelModal-${panel.id}`} tabIndex="-1" role="dialog">
|
||||
<div className="modal-dialog">
|
||||
<div className="modal-content">
|
||||
<div className="modal-header">
|
||||
<button type="button" className="close" data-dismiss="modal" aria-label="Close">
|
||||
<span aria-hidden="true">×</span>
|
||||
</button>
|
||||
<h4 className="modal-title">Rename Panel</h4>
|
||||
</div>
|
||||
<div className="modal-body">
|
||||
{this.state.error ?
|
||||
<div className="alert alert-danger" role="alert">{this.state.error}</div>
|
||||
: null}
|
||||
<div className="form-grid padding-top">
|
||||
<div className="form-group col-md-8 col-md-offset-2">
|
||||
<input ref="name" name="renameExplorer" type="text" placeholder={panel.name} className="form-control input-lg" id="renameExplorer" required={true} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="modal-footer">
|
||||
<button className="btn btn-info" data-dismiss="modal">Cancel</button>
|
||||
<button onClick={this.handleConfirm} className="btn btn-success">Rename</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
handleConfirm() {
|
||||
const name = this.refs.name.value;
|
||||
|
||||
if (name === '') {
|
||||
this.setState({error: "Name can't be blank"});
|
||||
return;
|
||||
}
|
||||
|
||||
$(`#renamePanelModal-${this.props.panel.id}`).modal('hide'); // eslint-disable-line no-undef
|
||||
this.refs.name.value = '';
|
||||
this.setState({error: null});
|
||||
this.props.onConfirm(name);
|
||||
},
|
||||
});
|
||||
|
||||
export default RenamePanelModal;
|
|
@ -33,6 +33,7 @@ const ChronoTable = React.createClass({
|
|||
text: string.isRequired,
|
||||
}),
|
||||
containerWidth: number.isRequired,
|
||||
height: number,
|
||||
},
|
||||
|
||||
getInitialState() {
|
||||
|
@ -45,6 +46,12 @@ const ChronoTable = React.createClass({
|
|||
};
|
||||
},
|
||||
|
||||
getDefaultProps() {
|
||||
return {
|
||||
height: 600,
|
||||
};
|
||||
},
|
||||
|
||||
fetchCellData(query) {
|
||||
this.setState({isLoading: true});
|
||||
// second param is db, we want to leave this blank
|
||||
|
@ -81,30 +88,33 @@ const ChronoTable = React.createClass({
|
|||
|
||||
// Table data as a list of array.
|
||||
render() {
|
||||
const {containerWidth} = this.props;
|
||||
const {containerWidth, height} = this.props;
|
||||
const {cellData, columnWidths, isLoading} = this.state;
|
||||
const {columns, values} = cellData;
|
||||
|
||||
const ownerHeight = 300;
|
||||
// adjust height to proper value by subtracting the heights of the UI around it
|
||||
// tab height, graph-container vertical padding, graph-heading height, multitable-header height
|
||||
const stylePixelOffset = 136;
|
||||
|
||||
const rowHeight = 34;
|
||||
const height = 300;
|
||||
const width = 200;
|
||||
const headerHeight = 40;
|
||||
const headerHeight = 30;
|
||||
const minWidth = 70;
|
||||
const styleAdjustedHeight = height - stylePixelOffset;
|
||||
|
||||
if (!isLoading && !values.length) {
|
||||
return <div>Your query returned no data</div>;
|
||||
return <div className="generic-empty-state">Your query returned no data</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<Table
|
||||
onColumnResizeEndCallback={this.handleColumnResize}
|
||||
isColumnResizing={false}
|
||||
ownerHeight={ownerHeight}
|
||||
rowHeight={rowHeight}
|
||||
rowsCount={values.length}
|
||||
width={containerWidth}
|
||||
height={height}
|
||||
ownerHeight={styleAdjustedHeight}
|
||||
height={styleAdjustedHeight}
|
||||
headerHeight={headerHeight}>
|
||||
{columns.map((columnName, colIndex) => {
|
||||
return (
|
||||
|
|
|
@ -36,14 +36,11 @@ const TagList = React.createClass({
|
|||
};
|
||||
},
|
||||
|
||||
componentDidMount() {
|
||||
_getTags() {
|
||||
const {database, measurement, retentionPolicy} = this.props.query;
|
||||
const {source} = this.context;
|
||||
if (!database || !measurement || !retentionPolicy) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceProxy = source.links.proxy;
|
||||
|
||||
showTagKeys({source: sourceProxy, database, retentionPolicy, measurement}).then((resp) => {
|
||||
const {errors, tagKeys} = showTagKeysParser(resp.data);
|
||||
if (errors.length) {
|
||||
|
@ -61,6 +58,29 @@ const TagList = React.createClass({
|
|||
});
|
||||
},
|
||||
|
||||
componentDidMount() {
|
||||
const {database, measurement, retentionPolicy} = this.props.query;
|
||||
if (!database || !measurement || !retentionPolicy) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._getTags();
|
||||
},
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
const {database, measurement, retentionPolicy} = this.props.query;
|
||||
const {database: prevDB, measurement: prevMeas, retentionPolicy: prevRP} = prevProps.query;
|
||||
if (!database || !measurement || !retentionPolicy) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (database === prevDB && measurement === prevMeas && retentionPolicy === prevRP) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._getTags();
|
||||
},
|
||||
|
||||
handleAcceptReject(e) {
|
||||
e.stopPropagation();
|
||||
this.props.onToggleTagAcceptance();
|
||||
|
@ -70,11 +90,12 @@ const TagList = React.createClass({
|
|||
const {query} = this.props;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="query-builder--column">
|
||||
<div className="query-builder--column-heading">Tags</div>
|
||||
{(!query.database || !query.measurement || !query.retentionPolicy) ? null : <div className="qeditor--list-header">
|
||||
<div className="toggle toggle-sm">
|
||||
<div onClick={this.handleAcceptReject} className={cx("toggle-btn", {active: query.areTagsAccepted})}>Accept</div>
|
||||
<div onClick={this.handleAcceptReject} className={cx("toggle-btn", {active: !query.areTagsAccepted})}>Reject</div>
|
||||
<div onClick={this.handleAcceptReject} className={cx("toggle-btn", {active: query.areTagsAccepted})}>=</div>
|
||||
<div onClick={this.handleAcceptReject} className={cx("toggle-btn", {active: !query.areTagsAccepted})}>!=</div>
|
||||
</div>
|
||||
</div>}
|
||||
{this.renderList()}
|
||||
|
|
|
@ -81,7 +81,10 @@ const TagListItem = React.createClass({
|
|||
},
|
||||
|
||||
render() {
|
||||
const itemClasses = classNames("qeditor--list-item tag-list__item", {open: this.state.isOpen});
|
||||
const {tagKey, tagValues} = this.props;
|
||||
const {isOpen} = this.state;
|
||||
const itemClasses = classNames("qeditor--list-item tag-list__item", {open: isOpen});
|
||||
|
||||
return (
|
||||
<div>
|
||||
<li className={itemClasses} onClick={this.handleClickKey}>
|
||||
|
@ -89,12 +92,15 @@ const TagListItem = React.createClass({
|
|||
<div className="tag-list__caret">
|
||||
<div className="icon caret-right"></div>
|
||||
</div>
|
||||
{this.props.tagKey}
|
||||
<span className="badge">{this.props.tagValues.length}</span>
|
||||
{tagKey}
|
||||
<span className="badge">{tagValues.length}</span>
|
||||
</div>
|
||||
<div
|
||||
className={classNames('btn btn-info btn-xs tag-list__group-by', {active: this.props.isUsingGroupBy})}
|
||||
onClick={this.handleGroupBy}>Group By {tagKey}
|
||||
</div>
|
||||
<div className={classNames('btn btn-info btn-xs tag-list__group-by', {active: this.props.isUsingGroupBy})} onClick={this.handleGroupBy}>Group By</div>
|
||||
</li>
|
||||
{this.state.isOpen ? this.renderTagValues() : null}
|
||||
{isOpen ? this.renderTagValues() : null}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
|
|
@ -6,16 +6,24 @@ import LineGraph from 'shared/components/LineGraph';
|
|||
import MultiTable from './MultiTable';
|
||||
const RefreshingLineGraph = AutoRefresh(LineGraph);
|
||||
|
||||
const {
|
||||
arrayOf,
|
||||
number,
|
||||
shape,
|
||||
string,
|
||||
} = PropTypes;
|
||||
|
||||
const Visualization = React.createClass({
|
||||
propTypes: {
|
||||
timeRange: PropTypes.shape({
|
||||
upper: PropTypes.string,
|
||||
lower: PropTypes.string,
|
||||
timeRange: shape({
|
||||
upper: string,
|
||||
lower: string,
|
||||
}).isRequired,
|
||||
queryConfigs: PropTypes.arrayOf(PropTypes.shape({})).isRequired,
|
||||
isActive: PropTypes.bool.isRequired,
|
||||
name: PropTypes.string,
|
||||
activeQueryIndex: PropTypes.number,
|
||||
queryConfigs: arrayOf(shape({})).isRequired,
|
||||
name: string,
|
||||
activeQueryIndex: number,
|
||||
height: string,
|
||||
heightPixels: number,
|
||||
},
|
||||
|
||||
contextTypes: {
|
||||
|
@ -32,20 +40,12 @@ const Visualization = React.createClass({
|
|||
};
|
||||
},
|
||||
|
||||
componentDidUpdate() {
|
||||
if (this.props.isActive) {
|
||||
this.panel.scrollIntoView();
|
||||
// scrollIntoView scrolls slightly *too* far, so this adds some top offset.
|
||||
this.panel.parentNode.scrollTop -= 10;
|
||||
}
|
||||
},
|
||||
|
||||
handleToggleView() {
|
||||
this.setState({isGraphInView: !this.state.isGraphInView});
|
||||
},
|
||||
|
||||
render() {
|
||||
const {queryConfigs, timeRange, isActive, name, activeQueryIndex} = this.props;
|
||||
const {queryConfigs, timeRange, activeQueryIndex, height, heightPixels} = this.props;
|
||||
const {source} = this.context;
|
||||
const proxyLink = source.links.proxy;
|
||||
|
||||
|
@ -61,7 +61,7 @@ const Visualization = React.createClass({
|
|||
const isInDataExplorer = true;
|
||||
|
||||
return (
|
||||
<div ref={(p) => this.panel = p} className={classNames("graph", {active: isActive})}>
|
||||
<div className={classNames("graph", {active: true})} style={{height}}>
|
||||
<div className="graph-heading">
|
||||
<div className="graph-title">
|
||||
{name || "Graph"}
|
||||
|
@ -73,7 +73,7 @@ const Visualization = React.createClass({
|
|||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div className="graph-container">
|
||||
<div className={classNames({"graph-container": isGraphInView, "table-container": !isGraphInView})}>
|
||||
{isGraphInView ? (
|
||||
<RefreshingLineGraph
|
||||
queries={queries}
|
||||
|
@ -81,7 +81,7 @@ const Visualization = React.createClass({
|
|||
activeQueryIndex={activeQueryIndex}
|
||||
isInDataExplorer={isInDataExplorer}
|
||||
/>
|
||||
) : <MultiTable queries={queries} />}
|
||||
) : <MultiTable queries={queries} height={heightPixels} />}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import {connect} from 'react-redux';
|
||||
import Visualization from './Visualization';
|
||||
|
||||
const {shape, string} = PropTypes;
|
||||
|
||||
const Visualizations = React.createClass({
|
||||
propTypes: {
|
||||
timeRange: shape({
|
||||
upper: string,
|
||||
lower: string,
|
||||
}).isRequired,
|
||||
panels: shape({}).isRequired,
|
||||
queryConfigs: shape({}).isRequired,
|
||||
width: string,
|
||||
activePanelID: string,
|
||||
activeQueryID: string,
|
||||
},
|
||||
|
||||
render() {
|
||||
const {panels, queryConfigs, timeRange, width, activePanelID} = this.props;
|
||||
|
||||
const visualizations = Object.keys(panels).map((panelID) => {
|
||||
const panel = panels[panelID];
|
||||
const queries = panel.queryIds.map((id) => queryConfigs[id]);
|
||||
const isActive = panelID === activePanelID;
|
||||
|
||||
return <Visualization activeQueryIndex={this.getActiveQueryIndex(panelID)} name={panel.name} key={panelID} queryConfigs={queries} timeRange={timeRange} isActive={isActive} />;
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="panels" style={{width}}>
|
||||
{visualizations}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
|
||||
getActiveQueryIndex(panelID) {
|
||||
const {activeQueryID, activePanelID, panels} = this.props;
|
||||
const isPanelActive = panelID === activePanelID;
|
||||
|
||||
if (!isPanelActive) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (activeQueryID === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return panels[panelID].queryIds.indexOf(activeQueryID);
|
||||
},
|
||||
});
|
||||
|
||||
function mapStateToProps(state) {
|
||||
return {
|
||||
panels: state.panels,
|
||||
queryConfigs: state.queryConfigs,
|
||||
};
|
||||
}
|
||||
|
||||
export default connect(mapStateToProps)(Visualizations);
|
|
@ -1,15 +1,16 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import {connect} from 'react-redux';
|
||||
import PanelBuilder from '../components/PanelBuilder';
|
||||
import Visualizations from '../components/Visualizations';
|
||||
import QueryBuilder from '../components/QueryBuilder';
|
||||
import Visualization from '../components/Visualization';
|
||||
import Header from '../containers/Header';
|
||||
import ResizeContainer from 'shared/components/ResizeContainer';
|
||||
import ResizeContainer from 'src/shared/components/ResizeContainer';
|
||||
|
||||
import {
|
||||
setTimeRange as setTimeRangeAction,
|
||||
} from '../actions/view';
|
||||
|
||||
const {
|
||||
arrayOf,
|
||||
func,
|
||||
shape,
|
||||
string,
|
||||
|
@ -23,12 +24,15 @@ const DataExplorer = React.createClass({
|
|||
self: string.isRequired,
|
||||
}).isRequired,
|
||||
}).isRequired,
|
||||
queryConfigs: PropTypes.shape({}),
|
||||
timeRange: shape({
|
||||
upper: string,
|
||||
lower: string,
|
||||
}).isRequired,
|
||||
activePanel: string,
|
||||
setTimeRange: func.isRequired,
|
||||
dataExplorer: shape({
|
||||
queryIDs: arrayOf(string).isRequired,
|
||||
}).isRequired,
|
||||
},
|
||||
|
||||
childContextTypes: {
|
||||
|
@ -55,7 +59,9 @@ const DataExplorer = React.createClass({
|
|||
},
|
||||
|
||||
render() {
|
||||
const {timeRange, setTimeRange, activePanel} = this.props;
|
||||
const {timeRange, setTimeRange, queryConfigs, dataExplorer} = this.props;
|
||||
const {activeQueryID} = this.state;
|
||||
const queries = dataExplorer.queryIDs.map((qid) => queryConfigs[qid]);
|
||||
|
||||
return (
|
||||
<div className="data-explorer">
|
||||
|
@ -64,16 +70,17 @@ const DataExplorer = React.createClass({
|
|||
timeRange={timeRange}
|
||||
/>
|
||||
<ResizeContainer>
|
||||
<PanelBuilder
|
||||
<Visualization
|
||||
timeRange={timeRange}
|
||||
activePanelID={activePanel}
|
||||
queryConfigs={queries}
|
||||
activeQueryID={this.state.activeQueryID}
|
||||
setActiveQuery={this.handleSetActiveQuery}
|
||||
activeQueryIndex={0}
|
||||
/>
|
||||
<Visualizations
|
||||
<QueryBuilder
|
||||
queries={queries}
|
||||
timeRange={timeRange}
|
||||
activePanelID={activePanel}
|
||||
activeQueryID={this.state.activeQueryID}
|
||||
setActiveQuery={this.handleSetActiveQuery}
|
||||
activeQueryID={activeQueryID}
|
||||
/>
|
||||
</ResizeContainer>
|
||||
</div>
|
||||
|
@ -82,11 +89,12 @@ const DataExplorer = React.createClass({
|
|||
});
|
||||
|
||||
function mapStateToProps(state) {
|
||||
const {timeRange, dataExplorerUI} = state;
|
||||
const {timeRange, queryConfigs, dataExplorer} = state;
|
||||
|
||||
return {
|
||||
timeRange,
|
||||
activePanel: dataExplorerUI.activePanel,
|
||||
queryConfigs,
|
||||
dataExplorer,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
export default function dataExplorerUI(state = {}, action) {
|
||||
switch (action.type) {
|
||||
case 'ACTIVATE_PANEL':
|
||||
case 'CREATE_PANEL': {
|
||||
const {panelID} = action.payload;
|
||||
return {...state, activePanel: panelID};
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
|
@ -1,11 +1,9 @@
|
|||
import queryConfigs from './queryConfigs';
|
||||
import panels from './panels';
|
||||
import timeRange from './timeRange';
|
||||
import dataExplorerUI from './dataExplorerUI';
|
||||
import dataExplorer from './ui';
|
||||
|
||||
export {
|
||||
queryConfigs,
|
||||
panels,
|
||||
timeRange,
|
||||
dataExplorerUI,
|
||||
dataExplorer,
|
||||
};
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
import update from 'react-addons-update';
|
||||
|
||||
export default function panels(state = {}, action) {
|
||||
switch (action.type) {
|
||||
case 'CREATE_PANEL': {
|
||||
const {panelID, queryID} = action.payload;
|
||||
return {
|
||||
...state,
|
||||
[panelID]: {id: panelID, queryIds: [queryID]},
|
||||
};
|
||||
}
|
||||
|
||||
case 'RENAME_PANEL': {
|
||||
const {panelId, name} = action.payload;
|
||||
return update(state, {
|
||||
[panelId]: {
|
||||
name: {$set: name},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
case 'DELETE_PANEL': {
|
||||
const {panelId} = action.payload;
|
||||
return update(state, {$apply: (p) => {
|
||||
const panelsCopy = Object.assign({}, p);
|
||||
delete panelsCopy[panelId];
|
||||
return panelsCopy;
|
||||
}});
|
||||
}
|
||||
|
||||
case 'ADD_QUERY': {
|
||||
const {panelId, queryId} = action.payload;
|
||||
return update(state, {
|
||||
[panelId]: {
|
||||
queryIds: {$push: [queryId]},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
case 'DELETE_QUERY': {
|
||||
const {panelId, queryId} = action.payload;
|
||||
return update(state, {
|
||||
[panelId]: {
|
||||
queryIds: {$set: state[panelId].queryIds.filter((id) => id !== queryId)},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
|
@ -46,7 +46,6 @@ export default function queryConfigs(state = {}, action) {
|
|||
return nextState;
|
||||
}
|
||||
|
||||
case 'CREATE_PANEL':
|
||||
case 'ADD_KAPACITOR_QUERY':
|
||||
case 'ADD_QUERY': {
|
||||
const {queryID, options} = action.payload;
|
||||
|
@ -94,9 +93,9 @@ export default function queryConfigs(state = {}, action) {
|
|||
}
|
||||
|
||||
case 'DELETE_QUERY': {
|
||||
const {queryId} = action.payload;
|
||||
const {queryID} = action.payload;
|
||||
const nextState = update(state, {$apply: (configs) => {
|
||||
delete configs[queryId];
|
||||
delete configs[queryID];
|
||||
return configs;
|
||||
}});
|
||||
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
import update from 'react-addons-update';
|
||||
const initialState = {
|
||||
upper: null,
|
||||
lower: 'now() - 15m',
|
||||
};
|
||||
|
||||
export default function timeRange(state = {}, action) {
|
||||
export default function timeRange(state = initialState, action) {
|
||||
switch (action.type) {
|
||||
case 'SET_TIME_RANGE': {
|
||||
const {upper, lower} = action.payload;
|
||||
const newState = {
|
||||
upper,
|
||||
lower,
|
||||
};
|
||||
|
||||
return update(state, {
|
||||
['lower']: {$set: lower},
|
||||
['upper']: {$set: upper},
|
||||
});
|
||||
return {...state, ...newState};
|
||||
}
|
||||
}
|
||||
return state;
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
const initialState = {
|
||||
queryIDs: [],
|
||||
};
|
||||
|
||||
export default function ui(state = initialState, action) {
|
||||
switch (action.type) {
|
||||
case 'ADD_QUERY': {
|
||||
const {queryID} = action.payload;
|
||||
const newState = {
|
||||
queryIDs: state.queryIDs.concat(queryID),
|
||||
};
|
||||
|
||||
return {...state, ...newState};
|
||||
}
|
||||
|
||||
case 'DELETE_QUERY': {
|
||||
const {queryID} = action.payload;
|
||||
const newState = {
|
||||
queryIDs: state.queryIDs.filter(id => id !== queryID),
|
||||
};
|
||||
|
||||
return {...state, ...newState};
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
|
@ -87,7 +87,7 @@ export async function getAllHosts(proxyLink, telegrafDB) {
|
|||
export function getMappings() {
|
||||
return AJAX({
|
||||
method: 'GET',
|
||||
url: `/chronograf/v1/mappings`,
|
||||
resource: 'mappings',
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,31 +1,42 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
import ReactTooltip from 'react-tooltip';
|
||||
import {Link} from 'react-router';
|
||||
import _ from 'lodash';
|
||||
import React, {PropTypes} from 'react'
|
||||
import {Link} from 'react-router'
|
||||
import {connect} from 'react-redux'
|
||||
import _ from 'lodash'
|
||||
import classnames from 'classnames';
|
||||
|
||||
import LayoutRenderer from 'shared/components/LayoutRenderer';
|
||||
import TimeRangeDropdown from '../../shared/components/TimeRangeDropdown';
|
||||
import DashboardHeader from 'src/dashboards/components/DashboardHeader';
|
||||
import timeRanges from 'hson!../../shared/data/timeRanges.hson';
|
||||
import {getMappings, getAppsForHosts, getMeasurementsForHost, getAllHosts} from 'src/hosts/apis';
|
||||
import {fetchLayouts} from 'shared/apis';
|
||||
import {presentationButtonDispatcher} from 'shared/dispatchers'
|
||||
|
||||
const {
|
||||
shape,
|
||||
string,
|
||||
bool,
|
||||
func,
|
||||
} = PropTypes
|
||||
|
||||
export const HostPage = React.createClass({
|
||||
propTypes: {
|
||||
source: PropTypes.shape({
|
||||
links: PropTypes.shape({
|
||||
proxy: PropTypes.string.isRequired,
|
||||
source: shape({
|
||||
links: shape({
|
||||
proxy: string.isRequired,
|
||||
}).isRequired,
|
||||
telegraf: PropTypes.string.isRequired,
|
||||
id: PropTypes.string.isRequired,
|
||||
telegraf: string.isRequired,
|
||||
id: string.isRequired,
|
||||
}),
|
||||
params: PropTypes.shape({
|
||||
hostID: PropTypes.string.isRequired,
|
||||
params: shape({
|
||||
hostID: string.isRequired,
|
||||
}).isRequired,
|
||||
location: PropTypes.shape({
|
||||
query: PropTypes.shape({
|
||||
app: PropTypes.string,
|
||||
location: shape({
|
||||
query: shape({
|
||||
app: string,
|
||||
}),
|
||||
}),
|
||||
inPresentationMode: bool,
|
||||
handleClickPresentationButton: func,
|
||||
},
|
||||
|
||||
getInitialState() {
|
||||
|
@ -134,45 +145,34 @@ export const HostPage = React.createClass({
|
|||
},
|
||||
|
||||
render() {
|
||||
const hostID = this.props.params.hostID;
|
||||
const {layouts, timeRange, hosts} = this.state;
|
||||
const appParam = this.props.location.query.app ? `?app=${this.props.location.query.app}` : '';
|
||||
const {params: {hostID}, location: {query: {app}}, source: {id}, inPresentationMode, handleClickPresentationButton} = this.props
|
||||
const {layouts, timeRange, hosts} = this.state
|
||||
const appParam = app ? `?app=${app}` : ''
|
||||
|
||||
return (
|
||||
<div className="page">
|
||||
<div className="page-header full-width">
|
||||
<div className="page-header__container">
|
||||
<div className="page-header__left">
|
||||
<div className="dropdown page-header-dropdown">
|
||||
<button className="dropdown-toggle" type="button" data-toggle="dropdown">
|
||||
<span className="button-text">{hostID}</span>
|
||||
<span className="caret"></span>
|
||||
</button>
|
||||
<ul className="dropdown-menu" aria-labelledby="dropdownMenu1">
|
||||
{Object.keys(hosts).map((host, i) => {
|
||||
return (
|
||||
<li key={i}>
|
||||
<Link to={`/sources/${this.props.source.id}/hosts/${host + appParam}`} className="role-option">
|
||||
{host}
|
||||
</Link>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div className="page-header__right">
|
||||
<div className="btn btn-info btn-sm" data-for="graph-tips-tooltip" data-tip="<p><code>Click + Drag</code> Zoom in (X or Y)</p><p><code>Shift + Click</code> Pan Graph Window</p><p><code>Double Click</code> Reset Graph Window</p>">
|
||||
<span className="icon heart"></span>
|
||||
Graph Tips
|
||||
</div>
|
||||
<ReactTooltip id="graph-tips-tooltip" effect="solid" html={true} offset={{top: 2}} place="bottom" class="influx-tooltip place-bottom" />
|
||||
<TimeRangeDropdown onChooseTimeRange={this.handleChooseTimeRange} selected={timeRange.inputValue} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="page-contents">
|
||||
<div className="container-fluid full-width">
|
||||
<DashboardHeader
|
||||
buttonText={hostID}
|
||||
timeRange={timeRange}
|
||||
isHidden={inPresentationMode}
|
||||
handleChooseTimeRange={this.handleChooseTimeRange}
|
||||
handleClickPresentationButton={handleClickPresentationButton}
|
||||
>
|
||||
{Object.keys(hosts).map((host, i) => {
|
||||
return (
|
||||
<li key={i}>
|
||||
<Link to={`/sources/${id}/hosts/${host + appParam}`} className="role-option">
|
||||
{host}
|
||||
</Link>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</DashboardHeader>
|
||||
<div className={classnames({
|
||||
'page-contents': true,
|
||||
'presentation-mode': inPresentationMode,
|
||||
})}>
|
||||
<div className="container-fluid full-width dashboard">
|
||||
{ (layouts.length > 0) ? this.renderLayouts(layouts) : '' }
|
||||
</div>
|
||||
</div>
|
||||
|
@ -181,4 +181,12 @@ export const HostPage = React.createClass({
|
|||
},
|
||||
});
|
||||
|
||||
export default HostPage;
|
||||
const mapStateToProps = (state) => ({
|
||||
inPresentationMode: state.appUI.presentationMode,
|
||||
})
|
||||
|
||||
const mapDispatchToProps = (dispatch) => ({
|
||||
handleClickPresentationButton: presentationButtonDispatcher(dispatch),
|
||||
})
|
||||
|
||||
export default connect(mapStateToProps, mapDispatchToProps)(HostPage)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import React from 'react';
|
||||
import {render} from 'react-dom';
|
||||
import {Provider} from 'react-redux';
|
||||
import {Router, Route, Redirect} from 'react-router';
|
||||
import {createHistory, useBasename} from 'history';
|
||||
import {Router, Route, Redirect, useRouterHistory} from 'react-router';
|
||||
import {createHistory} from 'history';
|
||||
|
||||
import App from 'src/App';
|
||||
import AlertsApp from 'src/alerts';
|
||||
|
@ -18,6 +18,8 @@ import NotFound from 'src/shared/components/NotFound';
|
|||
import configureStore from 'src/store/configureStore';
|
||||
import {getMe, getSources} from 'shared/apis';
|
||||
import {receiveMe} from 'shared/actions/me';
|
||||
import {receiveAuth} from 'shared/actions/auth';
|
||||
import {disablePresentationMode} from 'shared/actions/ui';
|
||||
import {loadLocalStorage} from './localStorage';
|
||||
|
||||
import 'src/style/chronograf.scss';
|
||||
|
@ -29,15 +31,21 @@ let browserHistory;
|
|||
const basepath = rootNode.dataset.basepath;
|
||||
window.basepath = basepath;
|
||||
if (basepath) {
|
||||
browserHistory = useBasename(createHistory)({
|
||||
browserHistory = useRouterHistory(createHistory)({
|
||||
basename: basepath, // this is written in when available by the URL prefixer middleware
|
||||
});
|
||||
} else {
|
||||
browserHistory = useBasename(createHistory)({
|
||||
browserHistory = useRouterHistory(createHistory)({
|
||||
basename: "",
|
||||
});
|
||||
}
|
||||
|
||||
window.addEventListener('keyup', (event) => {
|
||||
if (event.key === 'Escape') {
|
||||
store.dispatch(disablePresentationMode())
|
||||
}
|
||||
})
|
||||
|
||||
const Root = React.createClass({
|
||||
getInitialState() {
|
||||
return {
|
||||
|
@ -69,14 +77,13 @@ const Root = React.createClass({
|
|||
if (store.getState().me.links) {
|
||||
return this.setState({loggedIn: true});
|
||||
}
|
||||
getMe().then(({data: me}) => {
|
||||
getMe().then(({data: me, auth}) => {
|
||||
store.dispatch(receiveMe(me));
|
||||
store.dispatch(receiveAuth(auth));
|
||||
this.setState({loggedIn: true});
|
||||
}).catch((err) => {
|
||||
const AUTH_DISABLED = 418;
|
||||
if (err.response.status === AUTH_DISABLED) {
|
||||
return this.setState({loggedIn: true});
|
||||
// Could store a boolean indicating auth is not set up
|
||||
}).catch((error) => {
|
||||
if (error.auth) {
|
||||
store.dispatch(receiveAuth(error.auth));
|
||||
}
|
||||
|
||||
this.setState({loggedIn: false});
|
||||
|
@ -116,12 +123,13 @@ const Root = React.createClass({
|
|||
<Route path="alerts" component={AlertsApp} />
|
||||
<Route path="dashboards" component={DashboardsPage} />
|
||||
<Route path="dashboards/:dashboardID" component={DashboardPage} />
|
||||
<Route path="dashboards/:dashboardID/edit" component={DashboardPage} />
|
||||
<Route path="alert-rules" component={KapacitorRulesPage} />
|
||||
<Route path="alert-rules/:ruleID" component={KapacitorRulePage} />
|
||||
<Route path="alert-rules/new" component={KapacitorRulePage} />
|
||||
</Route>
|
||||
<Route path="*" component={NotFound} />
|
||||
</Route>
|
||||
<Route path="*" component={NotFound} />
|
||||
</Router>
|
||||
</Provider>
|
||||
);
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
import uuid from 'node-uuid';
|
||||
import {getRules, getRule, deleteRule as deleteRuleAPI} from 'src/kapacitor/apis';
|
||||
import {getKapacitor} from 'src/shared/apis';
|
||||
import {publishNotification} from 'src/shared/actions/notifications';
|
||||
import {
|
||||
getRules,
|
||||
getRule,
|
||||
deleteRule as deleteRuleAPI,
|
||||
updateRuleStatus as updateRuleStatusAPI,
|
||||
} from 'src/kapacitor/apis';
|
||||
|
||||
export function fetchRule(source, ruleID) {
|
||||
return (dispatch) => {
|
||||
|
@ -107,6 +112,17 @@ export function updateAlerts(ruleID, alerts) {
|
|||
};
|
||||
}
|
||||
|
||||
export function updateAlertNodes(ruleID, alertType, alertNodesText) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_ALERT_NODES',
|
||||
payload: {
|
||||
ruleID,
|
||||
alertType,
|
||||
alertNodesText,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function updateRuleName(ruleID, name) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_NAME',
|
||||
|
@ -126,6 +142,16 @@ export function deleteRuleSuccess(ruleID) {
|
|||
};
|
||||
}
|
||||
|
||||
export function updateRuleStatusSuccess(ruleID, status) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_STATUS_SUCCESS',
|
||||
payload: {
|
||||
ruleID,
|
||||
status,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function deleteRule(rule) {
|
||||
return (dispatch) => {
|
||||
deleteRuleAPI(rule).then(() => {
|
||||
|
@ -136,3 +162,14 @@ export function deleteRule(rule) {
|
|||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function updateRuleStatus(rule, {status}) {
|
||||
return (dispatch) => {
|
||||
updateRuleStatusAPI(rule, status).then(() => {
|
||||
dispatch(publishNotification('success', `${rule.name} ${status} successfully`));
|
||||
}).catch(() => {
|
||||
dispatch(updateRuleStatusSuccess(rule.id, status));
|
||||
dispatch(publishNotification('error', `${rule.name} could not be ${status}`));
|
||||
});
|
||||
};
|
||||
}
|
||||
|
|