Merge branch 'master' into feature/tickscript-logging
commit
d462226099
12
CHANGELOG.md
12
CHANGELOG.md
|
@ -4,9 +4,21 @@
|
|||
1. [#2158](https://github.com/influxdata/chronograf/pull/2158): Fix 'Cannot connect to source' false error flag on Dashboard page
|
||||
1. [#2167](https://github.com/influxdata/chronograf/pull/2167): Add fractions of seconds to time field in csv export
|
||||
1. [#1077](https://github.com/influxdata/chronograf/pull/2087): Fix Chronograf requiring Telegraf's CPU and system plugins to ensure that all Apps appear on the HOST LIST page.
|
||||
1. [#2222](https://github.com/influxdata/chronograf/pull/2222): Fix template variables in dashboard query building.
|
||||
1. [#2291](https://github.com/influxdata/chronograf/pull/2291): Fix several kapacitor alert creation panics.
|
||||
1. [#2303](https://github.com/influxdata/chronograf/pull/2303): Add shadow-utils to RPM release packages
|
||||
1. [#2292](https://github.com/influxdata/chronograf/pull/2292): Source extra command line options from defaults file
|
||||
1. [#2327](https://github.com/influxdata/chronograf/pull/2327): After CREATE/DELETE queries, refresh list of databases in Data Explorer
|
||||
1. [#2327](https://github.com/influxdata/chronograf/pull/2327): Visualize CREATE/DELETE queries with Table view in Data Explorer
|
||||
1. [#2329](https://github.com/influxdata/chronograf/pull/2329): Include tag values alongside measurement name in Data Explorer result tabs
|
||||
1. [#2386](https://github.com/influxdata/chronograf/pull/2386): Fix queries that include regex, numbers and wildcard
|
||||
1. [#2398](https://github.com/influxdata/chronograf/pull/2398): Fix apps on hosts page from parsing tags with null values
|
||||
1. [#2408](https://github.com/influxdata/chronograf/pull/2408): Fix updated Dashboard names not updating dashboard list
|
||||
|
||||
### Features
|
||||
1. [#2188](https://github.com/influxdata/chronograf/pull/2188): Add Kapacitor logs to the TICKscript editor
|
||||
1. [#2384](https://github.com/influxdata/chronograf/pull/2384): Add filtering by name to Dashboard index page
|
||||
1. [#2385](https://github.com/influxdata/chronograf/pull/2385): Add time shift feature to DataExplorer and Dashboards
|
||||
|
||||
### UI Improvements
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/google/go-cmp"
|
||||
packages = ["cmp"]
|
||||
packages = ["cmp","cmp/cmpopts"]
|
||||
revision = "79b2d888f100ec053545168aa94bcfb322e8bfc8"
|
||||
|
||||
[[projects]]
|
||||
|
@ -140,6 +140,6 @@
|
|||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "f34fb88755292baba8b52c14bf5b9a028daff96a763368a7cf1de90004d33695"
|
||||
inputs-digest = "85a5451fc9e0596e486a676204eb2de0b12900522341ee0804cf9ec86fb2765e"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
||||
|
|
5
Makefile
5
Makefile
|
@ -1,4 +1,4 @@
|
|||
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev ctags continuous
|
||||
.PHONY: assets dep clean test gotest gotestrace jstest run run-dev run-hmr ctags
|
||||
|
||||
VERSION ?= $(shell git describe --always --tags)
|
||||
COMMIT ?= $(shell git rev-parse --short=8 HEAD)
|
||||
|
@ -106,6 +106,9 @@ run: ${BINARY}
|
|||
run-dev: chronogiraffe
|
||||
./chronograf -d --log-level=debug
|
||||
|
||||
run-hmr:
|
||||
cd ui && npm run start:hmr
|
||||
|
||||
clean:
|
||||
if [ -f ${BINARY} ] ; then rm ${BINARY} ; fi
|
||||
cd ui && yarn run clean
|
||||
|
|
|
@ -86,6 +86,7 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
|
|||
id, _ := b.NextSequence()
|
||||
|
||||
src.ID = chronograf.DashboardID(id)
|
||||
// TODO: use FormatInt
|
||||
strID := strconv.Itoa(int(id))
|
||||
for i, cell := range src.Cells {
|
||||
cid, err := d.IDs.Generate()
|
||||
|
@ -95,12 +96,11 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
|
|||
cell.ID = cid
|
||||
src.Cells[i] = cell
|
||||
}
|
||||
if v, err := internal.MarshalDashboard(src); err != nil {
|
||||
return err
|
||||
} else if err := b.Put([]byte(strID), v); err != nil {
|
||||
v, err := internal.MarshalDashboard(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return b.Put([]byte(strID), v)
|
||||
}); err != nil {
|
||||
return chronograf.Dashboard{}, err
|
||||
}
|
||||
|
|
|
@ -191,12 +191,26 @@ func MarshalDashboard(d chronograf.Dashboard) ([]byte, error) {
|
|||
if q.Range != nil {
|
||||
r.Upper, r.Lower = q.Range.Upper, q.Range.Lower
|
||||
}
|
||||
q.Shifts = q.QueryConfig.Shifts
|
||||
queries[j] = &Query{
|
||||
Command: q.Command,
|
||||
Label: q.Label,
|
||||
Range: r,
|
||||
Source: q.Source,
|
||||
}
|
||||
|
||||
shifts := make([]*TimeShift, len(q.Shifts))
|
||||
for k := range q.Shifts {
|
||||
shift := &TimeShift{
|
||||
Label: q.Shifts[k].Label,
|
||||
Unit: q.Shifts[k].Unit,
|
||||
Quantity: q.Shifts[k].Quantity,
|
||||
}
|
||||
|
||||
shifts[k] = shift
|
||||
}
|
||||
|
||||
queries[j].Shifts = shifts
|
||||
}
|
||||
|
||||
axes := make(map[string]*Axis, len(c.Axes))
|
||||
|
@ -277,12 +291,26 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
|
|||
Label: q.Label,
|
||||
Source: q.Source,
|
||||
}
|
||||
|
||||
if q.Range.Upper != q.Range.Lower {
|
||||
queries[j].Range = &chronograf.Range{
|
||||
Upper: q.Range.Upper,
|
||||
Lower: q.Range.Lower,
|
||||
}
|
||||
}
|
||||
|
||||
shifts := make([]chronograf.TimeShift, len(q.Shifts))
|
||||
for k := range q.Shifts {
|
||||
shift := chronograf.TimeShift{
|
||||
Label: q.Shifts[k].Label,
|
||||
Unit: q.Shifts[k].Unit,
|
||||
Quantity: q.Shifts[k].Quantity,
|
||||
}
|
||||
|
||||
shifts[k] = shift
|
||||
}
|
||||
|
||||
queries[j].Shifts = shifts
|
||||
}
|
||||
|
||||
axes := make(map[string]chronograf.Axis, len(c.Axes))
|
||||
|
@ -330,9 +358,9 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
|
|||
|
||||
templates := make([]chronograf.Template, len(pb.Templates))
|
||||
for i, t := range pb.Templates {
|
||||
vals := make([]chronograf.BasicTemplateValue, len(t.Values))
|
||||
vals := make([]chronograf.TemplateValue, len(t.Values))
|
||||
for j, v := range t.Values {
|
||||
vals[j] = chronograf.BasicTemplateValue{
|
||||
vals[j] = chronograf.TemplateValue{
|
||||
Selected: v.Selected,
|
||||
Type: v.Type,
|
||||
Value: v.Value,
|
||||
|
@ -341,7 +369,7 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
|
|||
|
||||
template := chronograf.Template{
|
||||
ID: chronograf.TemplateID(t.ID),
|
||||
BasicTemplateVar: chronograf.BasicTemplateVar{
|
||||
TemplateVar: chronograf.TemplateVar{
|
||||
Var: t.TempVar,
|
||||
Values: vals,
|
||||
},
|
||||
|
@ -434,8 +462,5 @@ func UnmarshalUser(data []byte, u *chronograf.User) error {
|
|||
// UnmarshalUserPB decodes a user from binary protobuf data.
|
||||
// We are ignoring the password for now.
|
||||
func UnmarshalUserPB(data []byte, u *User) error {
|
||||
if err := proto.Unmarshal(data, u); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return proto.Unmarshal(data, u)
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ It has these top-level messages:
|
|||
Layout
|
||||
Cell
|
||||
Query
|
||||
TimeShift
|
||||
Range
|
||||
AlertRule
|
||||
User
|
||||
|
@ -60,6 +61,83 @@ func (m *Source) String() string { return proto.CompactTextString(m)
|
|||
func (*Source) ProtoMessage() {}
|
||||
func (*Source) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
|
||||
|
||||
func (m *Source) GetID() int64 {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Source) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetType() string {
|
||||
if m != nil {
|
||||
return m.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetUsername() string {
|
||||
if m != nil {
|
||||
return m.Username
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetPassword() string {
|
||||
if m != nil {
|
||||
return m.Password
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetURL() string {
|
||||
if m != nil {
|
||||
return m.URL
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetDefault() bool {
|
||||
if m != nil {
|
||||
return m.Default
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Source) GetTelegraf() string {
|
||||
if m != nil {
|
||||
return m.Telegraf
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetInsecureSkipVerify() bool {
|
||||
if m != nil {
|
||||
return m.InsecureSkipVerify
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Source) GetMetaURL() string {
|
||||
if m != nil {
|
||||
return m.MetaURL
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Source) GetSharedSecret() string {
|
||||
if m != nil {
|
||||
return m.SharedSecret
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Dashboard struct {
|
||||
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
|
||||
|
@ -72,6 +150,20 @@ func (m *Dashboard) String() string { return proto.CompactTextString(
|
|||
func (*Dashboard) ProtoMessage() {}
|
||||
func (*Dashboard) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
|
||||
|
||||
func (m *Dashboard) GetID() int64 {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Dashboard) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Dashboard) GetCells() []*DashboardCell {
|
||||
if m != nil {
|
||||
return m.Cells
|
||||
|
@ -103,6 +195,34 @@ func (m *DashboardCell) String() string { return proto.CompactTextStr
|
|||
func (*DashboardCell) ProtoMessage() {}
|
||||
func (*DashboardCell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
|
||||
|
||||
func (m *DashboardCell) GetX() int32 {
|
||||
if m != nil {
|
||||
return m.X
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetY() int32 {
|
||||
if m != nil {
|
||||
return m.Y
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetW() int32 {
|
||||
if m != nil {
|
||||
return m.W
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetH() int32 {
|
||||
if m != nil {
|
||||
return m.H
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetQueries() []*Query {
|
||||
if m != nil {
|
||||
return m.Queries
|
||||
|
@ -110,6 +230,27 @@ func (m *DashboardCell) GetQueries() []*Query {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetType() string {
|
||||
if m != nil {
|
||||
return m.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetID() string {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *DashboardCell) GetAxes() map[string]*Axis {
|
||||
if m != nil {
|
||||
return m.Axes
|
||||
|
@ -118,7 +259,7 @@ func (m *DashboardCell) GetAxes() map[string]*Axis {
|
|||
}
|
||||
|
||||
type Axis struct {
|
||||
LegacyBounds []int64 `protobuf:"varint,1,rep,name=legacyBounds" json:"legacyBounds,omitempty"`
|
||||
LegacyBounds []int64 `protobuf:"varint,1,rep,packed,name=legacyBounds" json:"legacyBounds,omitempty"`
|
||||
Bounds []string `protobuf:"bytes,2,rep,name=bounds" json:"bounds,omitempty"`
|
||||
Label string `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"`
|
||||
Prefix string `protobuf:"bytes,4,opt,name=prefix,proto3" json:"prefix,omitempty"`
|
||||
|
@ -132,6 +273,55 @@ func (m *Axis) String() string { return proto.CompactTextString(m) }
|
|||
func (*Axis) ProtoMessage() {}
|
||||
func (*Axis) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
|
||||
|
||||
func (m *Axis) GetLegacyBounds() []int64 {
|
||||
if m != nil {
|
||||
return m.LegacyBounds
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Axis) GetBounds() []string {
|
||||
if m != nil {
|
||||
return m.Bounds
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Axis) GetLabel() string {
|
||||
if m != nil {
|
||||
return m.Label
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Axis) GetPrefix() string {
|
||||
if m != nil {
|
||||
return m.Prefix
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Axis) GetSuffix() string {
|
||||
if m != nil {
|
||||
return m.Suffix
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Axis) GetBase() string {
|
||||
if m != nil {
|
||||
return m.Base
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Axis) GetScale() string {
|
||||
if m != nil {
|
||||
return m.Scale
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Template struct {
|
||||
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
|
||||
TempVar string `protobuf:"bytes,2,opt,name=temp_var,json=tempVar,proto3" json:"temp_var,omitempty"`
|
||||
|
@ -146,6 +336,20 @@ func (m *Template) String() string { return proto.CompactTextString(m
|
|||
func (*Template) ProtoMessage() {}
|
||||
func (*Template) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
|
||||
|
||||
func (m *Template) GetID() string {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Template) GetTempVar() string {
|
||||
if m != nil {
|
||||
return m.TempVar
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Template) GetValues() []*TemplateValue {
|
||||
if m != nil {
|
||||
return m.Values
|
||||
|
@ -153,6 +357,20 @@ func (m *Template) GetValues() []*TemplateValue {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *Template) GetType() string {
|
||||
if m != nil {
|
||||
return m.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Template) GetLabel() string {
|
||||
if m != nil {
|
||||
return m.Label
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Template) GetQuery() *TemplateQuery {
|
||||
if m != nil {
|
||||
return m.Query
|
||||
|
@ -171,6 +389,27 @@ func (m *TemplateValue) String() string { return proto.CompactTextStr
|
|||
func (*TemplateValue) ProtoMessage() {}
|
||||
func (*TemplateValue) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
|
||||
|
||||
func (m *TemplateValue) GetType() string {
|
||||
if m != nil {
|
||||
return m.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateValue) GetValue() string {
|
||||
if m != nil {
|
||||
return m.Value
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateValue) GetSelected() bool {
|
||||
if m != nil {
|
||||
return m.Selected
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type TemplateQuery struct {
|
||||
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
|
||||
Db string `protobuf:"bytes,2,opt,name=db,proto3" json:"db,omitempty"`
|
||||
|
@ -185,6 +424,48 @@ func (m *TemplateQuery) String() string { return proto.CompactTextStr
|
|||
func (*TemplateQuery) ProtoMessage() {}
|
||||
func (*TemplateQuery) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
|
||||
|
||||
func (m *TemplateQuery) GetCommand() string {
|
||||
if m != nil {
|
||||
return m.Command
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateQuery) GetDb() string {
|
||||
if m != nil {
|
||||
return m.Db
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateQuery) GetRp() string {
|
||||
if m != nil {
|
||||
return m.Rp
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateQuery) GetMeasurement() string {
|
||||
if m != nil {
|
||||
return m.Measurement
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateQuery) GetTagKey() string {
|
||||
if m != nil {
|
||||
return m.TagKey
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TemplateQuery) GetFieldKey() string {
|
||||
if m != nil {
|
||||
return m.FieldKey
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Server struct {
|
||||
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
|
||||
|
@ -200,6 +481,55 @@ func (m *Server) String() string { return proto.CompactTextString(m)
|
|||
func (*Server) ProtoMessage() {}
|
||||
func (*Server) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
|
||||
|
||||
func (m *Server) GetID() int64 {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Server) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Server) GetUsername() string {
|
||||
if m != nil {
|
||||
return m.Username
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Server) GetPassword() string {
|
||||
if m != nil {
|
||||
return m.Password
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Server) GetURL() string {
|
||||
if m != nil {
|
||||
return m.URL
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Server) GetSrcID() int64 {
|
||||
if m != nil {
|
||||
return m.SrcID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Server) GetActive() bool {
|
||||
if m != nil {
|
||||
return m.Active
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type Layout struct {
|
||||
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
|
||||
Application string `protobuf:"bytes,2,opt,name=Application,proto3" json:"Application,omitempty"`
|
||||
|
@ -213,6 +543,27 @@ func (m *Layout) String() string { return proto.CompactTextString(m)
|
|||
func (*Layout) ProtoMessage() {}
|
||||
func (*Layout) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{8} }
|
||||
|
||||
func (m *Layout) GetID() string {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Layout) GetApplication() string {
|
||||
if m != nil {
|
||||
return m.Application
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Layout) GetMeasurement() string {
|
||||
if m != nil {
|
||||
return m.Measurement
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Layout) GetCells() []*Cell {
|
||||
if m != nil {
|
||||
return m.Cells
|
||||
|
@ -220,6 +571,13 @@ func (m *Layout) GetCells() []*Cell {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *Layout) GetAutoflow() bool {
|
||||
if m != nil {
|
||||
return m.Autoflow
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type Cell struct {
|
||||
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
|
||||
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
|
||||
|
@ -228,7 +586,7 @@ type Cell struct {
|
|||
Queries []*Query `protobuf:"bytes,5,rep,name=queries" json:"queries,omitempty"`
|
||||
I string `protobuf:"bytes,6,opt,name=i,proto3" json:"i,omitempty"`
|
||||
Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Yranges []int64 `protobuf:"varint,8,rep,name=yranges" json:"yranges,omitempty"`
|
||||
Yranges []int64 `protobuf:"varint,8,rep,packed,name=yranges" json:"yranges,omitempty"`
|
||||
Ylabels []string `protobuf:"bytes,9,rep,name=ylabels" json:"ylabels,omitempty"`
|
||||
Type string `protobuf:"bytes,10,opt,name=type,proto3" json:"type,omitempty"`
|
||||
Axes map[string]*Axis `protobuf:"bytes,11,rep,name=axes" json:"axes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
|
||||
|
@ -239,6 +597,34 @@ func (m *Cell) String() string { return proto.CompactTextString(m) }
|
|||
func (*Cell) ProtoMessage() {}
|
||||
func (*Cell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{9} }
|
||||
|
||||
func (m *Cell) GetX() int32 {
|
||||
if m != nil {
|
||||
return m.X
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Cell) GetY() int32 {
|
||||
if m != nil {
|
||||
return m.Y
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Cell) GetW() int32 {
|
||||
if m != nil {
|
||||
return m.W
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Cell) GetH() int32 {
|
||||
if m != nil {
|
||||
return m.H
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Cell) GetQueries() []*Query {
|
||||
if m != nil {
|
||||
return m.Queries
|
||||
|
@ -246,6 +632,41 @@ func (m *Cell) GetQueries() []*Query {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *Cell) GetI() string {
|
||||
if m != nil {
|
||||
return m.I
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Cell) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Cell) GetYranges() []int64 {
|
||||
if m != nil {
|
||||
return m.Yranges
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Cell) GetYlabels() []string {
|
||||
if m != nil {
|
||||
return m.Ylabels
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Cell) GetType() string {
|
||||
if m != nil {
|
||||
return m.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Cell) GetAxes() map[string]*Axis {
|
||||
if m != nil {
|
||||
return m.Axes
|
||||
|
@ -254,14 +675,15 @@ func (m *Cell) GetAxes() map[string]*Axis {
|
|||
}
|
||||
|
||||
type Query struct {
|
||||
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
|
||||
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
|
||||
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
|
||||
GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
|
||||
Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
|
||||
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
|
||||
Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
|
||||
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
|
||||
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
|
||||
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
|
||||
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
|
||||
GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
|
||||
Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
|
||||
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
|
||||
Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
|
||||
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
|
||||
Shifts []*TimeShift `protobuf:"bytes,9,rep,name=Shifts" json:"Shifts,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Query) Reset() { *m = Query{} }
|
||||
|
@ -269,6 +691,48 @@ func (m *Query) String() string { return proto.CompactTextString(m) }
|
|||
func (*Query) ProtoMessage() {}
|
||||
func (*Query) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{10} }
|
||||
|
||||
func (m *Query) GetCommand() string {
|
||||
if m != nil {
|
||||
return m.Command
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Query) GetDB() string {
|
||||
if m != nil {
|
||||
return m.DB
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Query) GetRP() string {
|
||||
if m != nil {
|
||||
return m.RP
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Query) GetGroupBys() []string {
|
||||
if m != nil {
|
||||
return m.GroupBys
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Query) GetWheres() []string {
|
||||
if m != nil {
|
||||
return m.Wheres
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Query) GetLabel() string {
|
||||
if m != nil {
|
||||
return m.Label
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Query) GetRange() *Range {
|
||||
if m != nil {
|
||||
return m.Range
|
||||
|
@ -276,6 +740,52 @@ func (m *Query) GetRange() *Range {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *Query) GetSource() string {
|
||||
if m != nil {
|
||||
return m.Source
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Query) GetShifts() []*TimeShift {
|
||||
if m != nil {
|
||||
return m.Shifts
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeShift struct {
|
||||
Label string `protobuf:"bytes,1,opt,name=Label,proto3" json:"Label,omitempty"`
|
||||
Unit string `protobuf:"bytes,2,opt,name=Unit,proto3" json:"Unit,omitempty"`
|
||||
Quantity string `protobuf:"bytes,3,opt,name=Quantity,proto3" json:"Quantity,omitempty"`
|
||||
}
|
||||
|
||||
func (m *TimeShift) Reset() { *m = TimeShift{} }
|
||||
func (m *TimeShift) String() string { return proto.CompactTextString(m) }
|
||||
func (*TimeShift) ProtoMessage() {}
|
||||
func (*TimeShift) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
|
||||
|
||||
func (m *TimeShift) GetLabel() string {
|
||||
if m != nil {
|
||||
return m.Label
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TimeShift) GetUnit() string {
|
||||
if m != nil {
|
||||
return m.Unit
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *TimeShift) GetQuantity() string {
|
||||
if m != nil {
|
||||
return m.Quantity
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Range struct {
|
||||
Upper int64 `protobuf:"varint,1,opt,name=Upper,proto3" json:"Upper,omitempty"`
|
||||
Lower int64 `protobuf:"varint,2,opt,name=Lower,proto3" json:"Lower,omitempty"`
|
||||
|
@ -284,7 +794,21 @@ type Range struct {
|
|||
func (m *Range) Reset() { *m = Range{} }
|
||||
func (m *Range) String() string { return proto.CompactTextString(m) }
|
||||
func (*Range) ProtoMessage() {}
|
||||
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
|
||||
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
|
||||
|
||||
func (m *Range) GetUpper() int64 {
|
||||
if m != nil {
|
||||
return m.Upper
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Range) GetLower() int64 {
|
||||
if m != nil {
|
||||
return m.Lower
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type AlertRule struct {
|
||||
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
|
||||
|
@ -296,7 +820,35 @@ type AlertRule struct {
|
|||
func (m *AlertRule) Reset() { *m = AlertRule{} }
|
||||
func (m *AlertRule) String() string { return proto.CompactTextString(m) }
|
||||
func (*AlertRule) ProtoMessage() {}
|
||||
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
|
||||
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
|
||||
|
||||
func (m *AlertRule) GetID() string {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *AlertRule) GetJSON() string {
|
||||
if m != nil {
|
||||
return m.JSON
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *AlertRule) GetSrcID() int64 {
|
||||
if m != nil {
|
||||
return m.SrcID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *AlertRule) GetKapaID() int64 {
|
||||
if m != nil {
|
||||
return m.KapaID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type User struct {
|
||||
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
|
||||
|
@ -306,7 +858,21 @@ type User struct {
|
|||
func (m *User) Reset() { *m = User{} }
|
||||
func (m *User) String() string { return proto.CompactTextString(m) }
|
||||
func (*User) ProtoMessage() {}
|
||||
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
|
||||
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{14} }
|
||||
|
||||
func (m *User) GetID() uint64 {
|
||||
if m != nil {
|
||||
return m.ID
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *User) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Source)(nil), "internal.Source")
|
||||
|
@ -320,6 +886,7 @@ func init() {
|
|||
proto.RegisterType((*Layout)(nil), "internal.Layout")
|
||||
proto.RegisterType((*Cell)(nil), "internal.Cell")
|
||||
proto.RegisterType((*Query)(nil), "internal.Query")
|
||||
proto.RegisterType((*TimeShift)(nil), "internal.TimeShift")
|
||||
proto.RegisterType((*Range)(nil), "internal.Range")
|
||||
proto.RegisterType((*AlertRule)(nil), "internal.AlertRule")
|
||||
proto.RegisterType((*User)(nil), "internal.User")
|
||||
|
@ -328,70 +895,73 @@ func init() {
|
|||
func init() { proto.RegisterFile("internal.proto", fileDescriptorInternal) }
|
||||
|
||||
var fileDescriptorInternal = []byte{
|
||||
// 1028 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x6f, 0xe3, 0x44,
|
||||
0x14, 0xd7, 0xf8, 0x4f, 0x12, 0xbf, 0x74, 0x0b, 0x1a, 0xad, 0x58, 0xb3, 0x5c, 0x82, 0x05, 0x52,
|
||||
0x40, 0x6c, 0x41, 0xbb, 0x42, 0x42, 0xdc, 0xd2, 0x06, 0xad, 0x4a, 0xbb, 0x4b, 0x99, 0xb4, 0xe5,
|
||||
0x84, 0x56, 0x13, 0xe7, 0xa5, 0xb5, 0xd6, 0x89, 0xcd, 0xd8, 0x6e, 0xe3, 0x6f, 0xc1, 0x27, 0x40,
|
||||
0x42, 0xe2, 0xc4, 0x81, 0x03, 0x5f, 0x80, 0xfb, 0x7e, 0x2a, 0xf4, 0x66, 0xc6, 0x8e, 0xc3, 0x76,
|
||||
0xd1, 0x5e, 0xe0, 0x36, 0xbf, 0xf7, 0xc6, 0x6f, 0x66, 0xde, 0xef, 0xfd, 0x7e, 0x09, 0xec, 0x27,
|
||||
0xeb, 0x12, 0xd5, 0x5a, 0xa6, 0x07, 0xb9, 0xca, 0xca, 0x8c, 0x0f, 0x1a, 0x1c, 0xfd, 0xe1, 0x40,
|
||||
0x6f, 0x96, 0x55, 0x2a, 0x46, 0xbe, 0x0f, 0xce, 0xf1, 0x34, 0x64, 0x23, 0x36, 0x76, 0x85, 0x73,
|
||||
0x3c, 0xe5, 0x1c, 0xbc, 0xe7, 0x72, 0x85, 0xa1, 0x33, 0x62, 0xe3, 0x40, 0xe8, 0x35, 0xc5, 0xce,
|
||||
0xeb, 0x1c, 0x43, 0xd7, 0xc4, 0x68, 0xcd, 0x1f, 0xc2, 0xe0, 0xa2, 0xa0, 0x6a, 0x2b, 0x0c, 0x3d,
|
||||
0x1d, 0x6f, 0x31, 0xe5, 0xce, 0x64, 0x51, 0xdc, 0x66, 0x6a, 0x11, 0xfa, 0x26, 0xd7, 0x60, 0xfe,
|
||||
0x2e, 0xb8, 0x17, 0xe2, 0x34, 0xec, 0xe9, 0x30, 0x2d, 0x79, 0x08, 0xfd, 0x29, 0x2e, 0x65, 0x95,
|
||||
0x96, 0x61, 0x7f, 0xc4, 0xc6, 0x03, 0xd1, 0x40, 0xaa, 0x73, 0x8e, 0x29, 0x5e, 0x29, 0xb9, 0x0c,
|
||||
0x07, 0xa6, 0x4e, 0x83, 0xf9, 0x01, 0xf0, 0xe3, 0x75, 0x81, 0x71, 0xa5, 0x70, 0xf6, 0x32, 0xc9,
|
||||
0x2f, 0x51, 0x25, 0xcb, 0x3a, 0x0c, 0x74, 0x81, 0x3b, 0x32, 0x74, 0xca, 0x33, 0x2c, 0x25, 0x9d,
|
||||
0x0d, 0xba, 0x54, 0x03, 0x79, 0x04, 0x7b, 0xb3, 0x6b, 0xa9, 0x70, 0x31, 0xc3, 0x58, 0x61, 0x19,
|
||||
0x0e, 0x75, 0x7a, 0x27, 0x16, 0xfd, 0xcc, 0x20, 0x98, 0xca, 0xe2, 0x7a, 0x9e, 0x49, 0xb5, 0x78,
|
||||
0xab, 0x9e, 0x3d, 0x02, 0x3f, 0xc6, 0x34, 0x2d, 0x42, 0x77, 0xe4, 0x8e, 0x87, 0x8f, 0x1f, 0x1c,
|
||||
0xb4, 0x64, 0xb4, 0x75, 0x8e, 0x30, 0x4d, 0x85, 0xd9, 0xc5, 0xbf, 0x80, 0xa0, 0xc4, 0x55, 0x9e,
|
||||
0xca, 0x12, 0x8b, 0xd0, 0xd3, 0x9f, 0xf0, 0xed, 0x27, 0xe7, 0x36, 0x25, 0xb6, 0x9b, 0xa2, 0xdf,
|
||||
0x1d, 0xb8, 0xb7, 0x53, 0x8a, 0xef, 0x01, 0xdb, 0xe8, 0x5b, 0xf9, 0x82, 0x6d, 0x08, 0xd5, 0xfa,
|
||||
0x46, 0xbe, 0x60, 0x35, 0xa1, 0x5b, 0xcd, 0x9f, 0x2f, 0xd8, 0x2d, 0xa1, 0x6b, 0xcd, 0x9a, 0x2f,
|
||||
0xd8, 0x35, 0xff, 0x04, 0xfa, 0x3f, 0x55, 0xa8, 0x12, 0x2c, 0x42, 0x5f, 0x9f, 0xfc, 0xce, 0xf6,
|
||||
0xe4, 0xef, 0x2b, 0x54, 0xb5, 0x68, 0xf2, 0xf4, 0x52, 0xcd, 0xb8, 0xa1, 0x4f, 0xaf, 0x29, 0x56,
|
||||
0xd2, 0x74, 0xf4, 0x4d, 0x8c, 0xd6, 0xb6, 0x43, 0x86, 0x33, 0xea, 0xd0, 0x97, 0xe0, 0xc9, 0x0d,
|
||||
0x16, 0x61, 0xa0, 0xeb, 0x7f, 0xf8, 0x86, 0x66, 0x1c, 0x4c, 0x36, 0x58, 0x7c, 0xb3, 0x2e, 0x55,
|
||||
0x2d, 0xf4, 0xf6, 0x87, 0x4f, 0x21, 0x68, 0x43, 0x34, 0x39, 0x2f, 0xb1, 0xd6, 0x0f, 0x0c, 0x04,
|
||||
0x2d, 0xf9, 0x47, 0xe0, 0xdf, 0xc8, 0xb4, 0x32, 0x8d, 0x1f, 0x3e, 0xde, 0xdf, 0x96, 0x9d, 0x6c,
|
||||
0x92, 0x42, 0x98, 0xe4, 0xd7, 0xce, 0x57, 0x2c, 0xfa, 0x93, 0x81, 0x47, 0x31, 0x22, 0x3b, 0xc5,
|
||||
0x2b, 0x19, 0xd7, 0x87, 0x59, 0xb5, 0x5e, 0x14, 0x21, 0x1b, 0xb9, 0x63, 0x57, 0xec, 0xc4, 0xf8,
|
||||
0x7b, 0xd0, 0x9b, 0x9b, 0xac, 0x33, 0x72, 0xc7, 0x81, 0xb0, 0x88, 0xdf, 0x07, 0x3f, 0x95, 0x73,
|
||||
0x4c, 0xad, 0x0e, 0x0c, 0xa0, 0xdd, 0xb9, 0xc2, 0x65, 0xb2, 0xb1, 0x32, 0xb0, 0x88, 0xe2, 0x45,
|
||||
0xb5, 0xa4, 0xb8, 0x91, 0x80, 0x45, 0xd4, 0xae, 0xb9, 0x2c, 0xda, 0x16, 0xd2, 0x9a, 0x2a, 0x17,
|
||||
0xb1, 0x4c, 0x9b, 0x1e, 0x1a, 0x10, 0xfd, 0xc5, 0x68, 0xfe, 0x0d, 0xdf, 0x9d, 0x99, 0x33, 0x1d,
|
||||
0x7d, 0x1f, 0x06, 0x34, 0x0b, 0x2f, 0x6e, 0xa4, 0xb2, 0x73, 0xd7, 0x27, 0x7c, 0x29, 0x15, 0xff,
|
||||
0x1c, 0x7a, 0xfa, 0xe5, 0x77, 0xcc, 0x5e, 0x53, 0xee, 0x92, 0xf2, 0xc2, 0x6e, 0x6b, 0x19, 0xf4,
|
||||
0x3a, 0x0c, 0xb6, 0x8f, 0xf5, 0xbb, 0x8f, 0x7d, 0x04, 0x3e, 0x8d, 0x42, 0xad, 0x6f, 0x7f, 0x67,
|
||||
0x65, 0x33, 0x30, 0x66, 0x57, 0x74, 0x01, 0xf7, 0x76, 0x4e, 0x6c, 0x4f, 0x62, 0xbb, 0x27, 0x6d,
|
||||
0x59, 0x0c, 0x2c, 0x6b, 0xa4, 0xfd, 0x02, 0x53, 0x8c, 0x4b, 0x5c, 0xe8, 0x7e, 0x0f, 0x44, 0x8b,
|
||||
0xa3, 0x5f, 0xd9, 0xb6, 0xae, 0x3e, 0x8f, 0xd4, 0x1d, 0x67, 0xab, 0x95, 0x5c, 0x2f, 0x6c, 0xe9,
|
||||
0x06, 0x52, 0xdf, 0x16, 0x73, 0x5b, 0xda, 0x59, 0xcc, 0x09, 0xab, 0xdc, 0x32, 0xe8, 0xa8, 0x9c,
|
||||
0x8f, 0x60, 0xb8, 0x42, 0x59, 0x54, 0x0a, 0x57, 0xb8, 0x2e, 0x6d, 0x0b, 0xba, 0x21, 0xfe, 0x00,
|
||||
0xfa, 0xa5, 0xbc, 0x7a, 0x41, 0xb3, 0x67, 0x99, 0x2c, 0xe5, 0xd5, 0x09, 0xd6, 0xfc, 0x03, 0x08,
|
||||
0x96, 0x09, 0xa6, 0x0b, 0x9d, 0x32, 0x74, 0x0e, 0x74, 0xe0, 0x04, 0xeb, 0xe8, 0x37, 0x06, 0xbd,
|
||||
0x19, 0xaa, 0x1b, 0x54, 0x6f, 0x65, 0x17, 0x5d, 0x3b, 0x75, 0xff, 0xc5, 0x4e, 0xbd, 0xbb, 0xed,
|
||||
0xd4, 0xdf, 0xda, 0xe9, 0x7d, 0xf0, 0x67, 0x2a, 0x3e, 0x9e, 0xea, 0x1b, 0xb9, 0xc2, 0x00, 0x9a,
|
||||
0xc6, 0x49, 0x5c, 0x26, 0x37, 0x68, 0x3d, 0xd6, 0xa2, 0xe8, 0x17, 0x06, 0xbd, 0x53, 0x59, 0x67,
|
||||
0x55, 0xf9, 0xda, 0x84, 0x8d, 0x60, 0x38, 0xc9, 0xf3, 0x34, 0x89, 0x65, 0x99, 0x64, 0x6b, 0x7b,
|
||||
0xdb, 0x6e, 0x88, 0x76, 0x3c, 0xeb, 0xf4, 0xce, 0xdc, 0xbb, 0x1b, 0x22, 0x85, 0x1e, 0x69, 0x17,
|
||||
0x34, 0x96, 0xd6, 0x51, 0xa8, 0x31, 0x3f, 0x9d, 0xa4, 0x07, 0x4e, 0xaa, 0x32, 0x5b, 0xa6, 0xd9,
|
||||
0xad, 0x7e, 0xc9, 0x40, 0xb4, 0x38, 0x7a, 0xe5, 0x80, 0xf7, 0x7f, 0xb9, 0xdb, 0x1e, 0xb0, 0xc4,
|
||||
0x12, 0xc9, 0x92, 0xd6, 0xeb, 0xfa, 0x1d, 0xaf, 0x0b, 0xa1, 0x5f, 0x2b, 0xb9, 0xbe, 0xc2, 0x22,
|
||||
0x1c, 0x68, 0xe7, 0x68, 0xa0, 0xce, 0x68, 0x8d, 0x18, 0x93, 0x0b, 0x44, 0x03, 0xdb, 0x99, 0x87,
|
||||
0xce, 0xcc, 0x7f, 0x66, 0xfd, 0x70, 0xa8, 0x6f, 0x14, 0xee, 0xb6, 0xe5, 0xbf, 0xb3, 0xc1, 0x57,
|
||||
0x0c, 0xfc, 0x56, 0x30, 0x47, 0xbb, 0x82, 0x39, 0xda, 0x0a, 0x66, 0x7a, 0xd8, 0x08, 0x66, 0x7a,
|
||||
0x48, 0x58, 0x9c, 0x35, 0x82, 0x11, 0x67, 0x44, 0xd6, 0x53, 0x95, 0x55, 0xf9, 0x61, 0x6d, 0x58,
|
||||
0x0d, 0x44, 0x8b, 0x69, 0xca, 0x7e, 0xb8, 0x46, 0x65, 0x5b, 0x1d, 0x08, 0x8b, 0x68, 0x26, 0x4f,
|
||||
0xb5, 0x99, 0x98, 0xe6, 0x1a, 0xc0, 0x3f, 0x06, 0x5f, 0x50, 0xf3, 0x74, 0x87, 0x77, 0x78, 0xd1,
|
||||
0x61, 0x61, 0xb2, 0x54, 0xd4, 0xfc, 0x57, 0xb1, 0xbf, 0x27, 0x16, 0x45, 0x4f, 0xec, 0xe7, 0x54,
|
||||
0xfd, 0x22, 0xcf, 0x51, 0x59, 0x89, 0x19, 0xa0, 0xcf, 0xcc, 0x6e, 0xd1, 0xb8, 0xa3, 0x2b, 0x0c,
|
||||
0x88, 0x7e, 0x84, 0x60, 0x92, 0xa2, 0x2a, 0x45, 0x95, 0xbe, 0xee, 0xa9, 0x1c, 0xbc, 0x6f, 0x67,
|
||||
0xdf, 0x3d, 0x6f, 0x84, 0x49, 0xeb, 0xad, 0x9c, 0xdc, 0x7f, 0xc8, 0xe9, 0x44, 0xe6, 0xf2, 0x78,
|
||||
0xaa, 0xe7, 0xcc, 0x15, 0x16, 0x45, 0x9f, 0x82, 0x47, 0xb2, 0xed, 0x54, 0xf6, 0xde, 0x24, 0xf9,
|
||||
0x79, 0x4f, 0xff, 0x2b, 0x7b, 0xf2, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x59, 0x2e, 0xc0,
|
||||
0xa7, 0x09, 0x00, 0x00,
|
||||
// 1082 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcf, 0x8e, 0xe3, 0xc4,
|
||||
0x13, 0x96, 0x63, 0x3b, 0x89, 0x2b, 0xb3, 0xf3, 0xfb, 0xa9, 0x59, 0xb1, 0x66, 0xb9, 0x04, 0x0b,
|
||||
0xa4, 0xf0, 0x67, 0x07, 0xb4, 0x2b, 0x24, 0xc4, 0x2d, 0x33, 0x41, 0xab, 0x61, 0x66, 0x97, 0x99,
|
||||
0xce, 0xcc, 0x70, 0x42, 0xab, 0x4e, 0x52, 0x99, 0x58, 0xeb, 0xd8, 0xa6, 0xdd, 0x9e, 0x89, 0xdf,
|
||||
0x82, 0x27, 0x40, 0x42, 0xe2, 0xc4, 0x81, 0x03, 0x2f, 0xc0, 0x9d, 0x17, 0xe2, 0x8a, 0xaa, 0xbb,
|
||||
0xed, 0x38, 0xec, 0x2c, 0xda, 0x0b, 0xdc, 0xfa, 0xab, 0xea, 0x54, 0x55, 0xd7, 0x57, 0xf5, 0xc5,
|
||||
0xb0, 0x1f, 0xa7, 0x0a, 0x65, 0x2a, 0x92, 0x83, 0x5c, 0x66, 0x2a, 0x63, 0xfd, 0x1a, 0x47, 0xbf,
|
||||
0x76, 0xa0, 0x3b, 0xcd, 0x4a, 0x39, 0x47, 0xb6, 0x0f, 0x9d, 0xe3, 0x49, 0xe8, 0x0c, 0x9d, 0x91,
|
||||
0xcb, 0x3b, 0xc7, 0x13, 0xc6, 0xc0, 0x7b, 0x2e, 0xd6, 0x18, 0x76, 0x86, 0xce, 0x28, 0xe0, 0xfa,
|
||||
0x4c, 0xb6, 0x8b, 0x2a, 0xc7, 0xd0, 0x35, 0x36, 0x3a, 0xb3, 0x87, 0xd0, 0xbf, 0x2c, 0x28, 0xda,
|
||||
0x1a, 0x43, 0x4f, 0xdb, 0x1b, 0x4c, 0xbe, 0x33, 0x51, 0x14, 0xb7, 0x99, 0x5c, 0x84, 0xbe, 0xf1,
|
||||
0xd5, 0x98, 0xfd, 0x1f, 0xdc, 0x4b, 0x7e, 0x1a, 0x76, 0xb5, 0x99, 0x8e, 0x2c, 0x84, 0xde, 0x04,
|
||||
0x97, 0xa2, 0x4c, 0x54, 0xd8, 0x1b, 0x3a, 0xa3, 0x3e, 0xaf, 0x21, 0xc5, 0xb9, 0xc0, 0x04, 0xaf,
|
||||
0xa5, 0x58, 0x86, 0x7d, 0x13, 0xa7, 0xc6, 0xec, 0x00, 0xd8, 0x71, 0x5a, 0xe0, 0xbc, 0x94, 0x38,
|
||||
0x7d, 0x19, 0xe7, 0x57, 0x28, 0xe3, 0x65, 0x15, 0x06, 0x3a, 0xc0, 0x1d, 0x1e, 0xca, 0xf2, 0x0c,
|
||||
0x95, 0xa0, 0xdc, 0xa0, 0x43, 0xd5, 0x90, 0x45, 0xb0, 0x37, 0x5d, 0x09, 0x89, 0x8b, 0x29, 0xce,
|
||||
0x25, 0xaa, 0x70, 0xa0, 0xdd, 0x3b, 0xb6, 0xe8, 0x07, 0x07, 0x82, 0x89, 0x28, 0x56, 0xb3, 0x4c,
|
||||
0xc8, 0xc5, 0x1b, 0xf5, 0xec, 0x11, 0xf8, 0x73, 0x4c, 0x92, 0x22, 0x74, 0x87, 0xee, 0x68, 0xf0,
|
||||
0xf8, 0xc1, 0x41, 0x43, 0x46, 0x13, 0xe7, 0x08, 0x93, 0x84, 0x9b, 0x5b, 0xec, 0x33, 0x08, 0x14,
|
||||
0xae, 0xf3, 0x44, 0x28, 0x2c, 0x42, 0x4f, 0xff, 0x84, 0x6d, 0x7f, 0x72, 0x61, 0x5d, 0x7c, 0x7b,
|
||||
0x29, 0xfa, 0xa5, 0x03, 0xf7, 0x76, 0x42, 0xb1, 0x3d, 0x70, 0x36, 0xba, 0x2a, 0x9f, 0x3b, 0x1b,
|
||||
0x42, 0x95, 0xae, 0xc8, 0xe7, 0x4e, 0x45, 0xe8, 0x56, 0xf3, 0xe7, 0x73, 0xe7, 0x96, 0xd0, 0x4a,
|
||||
0xb3, 0xe6, 0x73, 0x67, 0xc5, 0x3e, 0x84, 0xde, 0xf7, 0x25, 0xca, 0x18, 0x8b, 0xd0, 0xd7, 0x99,
|
||||
0xff, 0xb7, 0xcd, 0x7c, 0x5e, 0xa2, 0xac, 0x78, 0xed, 0xa7, 0x97, 0x6a, 0xc6, 0x0d, 0x7d, 0xfa,
|
||||
0x4c, 0x36, 0x45, 0xd3, 0xd1, 0x33, 0x36, 0x3a, 0xdb, 0x0e, 0x19, 0xce, 0xa8, 0x43, 0x9f, 0x83,
|
||||
0x27, 0x36, 0x58, 0x84, 0x81, 0x8e, 0xff, 0xde, 0x6b, 0x9a, 0x71, 0x30, 0xde, 0x60, 0xf1, 0x55,
|
||||
0xaa, 0x64, 0xc5, 0xf5, 0xf5, 0x87, 0x4f, 0x21, 0x68, 0x4c, 0x34, 0x39, 0x2f, 0xb1, 0xd2, 0x0f,
|
||||
0x0c, 0x38, 0x1d, 0xd9, 0xfb, 0xe0, 0xdf, 0x88, 0xa4, 0x34, 0x8d, 0x1f, 0x3c, 0xde, 0xdf, 0x86,
|
||||
0x1d, 0x6f, 0xe2, 0x82, 0x1b, 0xe7, 0x97, 0x9d, 0x2f, 0x9c, 0xe8, 0x37, 0x07, 0x3c, 0xb2, 0x11,
|
||||
0xd9, 0x09, 0x5e, 0x8b, 0x79, 0x75, 0x98, 0x95, 0xe9, 0xa2, 0x08, 0x9d, 0xa1, 0x3b, 0x72, 0xf9,
|
||||
0x8e, 0x8d, 0xbd, 0x0d, 0xdd, 0x99, 0xf1, 0x76, 0x86, 0xee, 0x28, 0xe0, 0x16, 0xb1, 0xfb, 0xe0,
|
||||
0x27, 0x62, 0x86, 0x89, 0xdd, 0x03, 0x03, 0xe8, 0x76, 0x2e, 0x71, 0x19, 0x6f, 0xec, 0x1a, 0x58,
|
||||
0x44, 0xf6, 0xa2, 0x5c, 0x92, 0xdd, 0xac, 0x80, 0x45, 0xd4, 0xae, 0x99, 0x28, 0x9a, 0x16, 0xd2,
|
||||
0x99, 0x22, 0x17, 0x73, 0x91, 0xd4, 0x3d, 0x34, 0x20, 0xfa, 0xdd, 0xa1, 0xf9, 0x37, 0x7c, 0xb7,
|
||||
0x66, 0xce, 0x74, 0xf4, 0x1d, 0xe8, 0xd3, 0x2c, 0xbc, 0xb8, 0x11, 0xd2, 0xce, 0x5d, 0x8f, 0xf0,
|
||||
0x95, 0x90, 0xec, 0x53, 0xe8, 0xea, 0x97, 0xdf, 0x31, 0x7b, 0x75, 0xb8, 0x2b, 0xf2, 0x73, 0x7b,
|
||||
0xad, 0x61, 0xd0, 0x6b, 0x31, 0xd8, 0x3c, 0xd6, 0x6f, 0x3f, 0xf6, 0x11, 0xf8, 0x34, 0x0a, 0x95,
|
||||
0xae, 0xfe, 0xce, 0xc8, 0x66, 0x60, 0xcc, 0xad, 0xe8, 0x12, 0xee, 0xed, 0x64, 0x6c, 0x32, 0x39,
|
||||
0xbb, 0x99, 0xb6, 0x2c, 0x06, 0x96, 0x35, 0xda, 0xfd, 0x02, 0x13, 0x9c, 0x2b, 0x5c, 0xe8, 0x7e,
|
||||
0xf7, 0x79, 0x83, 0xa3, 0x9f, 0x9c, 0x6d, 0x5c, 0x9d, 0x8f, 0xb6, 0x7b, 0x9e, 0xad, 0xd7, 0x22,
|
||||
0x5d, 0xd8, 0xd0, 0x35, 0xa4, 0xbe, 0x2d, 0x66, 0x36, 0x74, 0x67, 0x31, 0x23, 0x2c, 0x73, 0xcb,
|
||||
0x60, 0x47, 0xe6, 0x6c, 0x08, 0x83, 0x35, 0x8a, 0xa2, 0x94, 0xb8, 0xc6, 0x54, 0xd9, 0x16, 0xb4,
|
||||
0x4d, 0xec, 0x01, 0xf4, 0x94, 0xb8, 0x7e, 0x41, 0xb3, 0x67, 0x99, 0x54, 0xe2, 0xfa, 0x04, 0x2b,
|
||||
0xf6, 0x2e, 0x04, 0xcb, 0x18, 0x93, 0x85, 0x76, 0x19, 0x3a, 0xfb, 0xda, 0x70, 0x82, 0x55, 0xf4,
|
||||
0xb3, 0x03, 0xdd, 0x29, 0xca, 0x1b, 0x94, 0x6f, 0x24, 0x17, 0x6d, 0x39, 0x75, 0xff, 0x41, 0x4e,
|
||||
0xbd, 0xbb, 0xe5, 0xd4, 0xdf, 0xca, 0xe9, 0x7d, 0xf0, 0xa7, 0x72, 0x7e, 0x3c, 0xd1, 0x15, 0xb9,
|
||||
0xdc, 0x00, 0x9a, 0xc6, 0xf1, 0x5c, 0xc5, 0x37, 0x68, 0x35, 0xd6, 0xa2, 0xe8, 0x47, 0x07, 0xba,
|
||||
0xa7, 0xa2, 0xca, 0x4a, 0xf5, 0xca, 0x84, 0x0d, 0x61, 0x30, 0xce, 0xf3, 0x24, 0x9e, 0x0b, 0x15,
|
||||
0x67, 0xa9, 0xad, 0xb6, 0x6d, 0xa2, 0x1b, 0xcf, 0x5a, 0xbd, 0x33, 0x75, 0xb7, 0x4d, 0xb4, 0xa1,
|
||||
0x47, 0x5a, 0x05, 0x8d, 0xa4, 0xb5, 0x36, 0xd4, 0x88, 0x9f, 0x76, 0xd2, 0x03, 0xc7, 0xa5, 0xca,
|
||||
0x96, 0x49, 0x76, 0xab, 0x5f, 0xd2, 0xe7, 0x0d, 0x8e, 0xfe, 0xe8, 0x80, 0xf7, 0x5f, 0xa9, 0xdb,
|
||||
0x1e, 0x38, 0xb1, 0x25, 0xd2, 0x89, 0x1b, 0xad, 0xeb, 0xb5, 0xb4, 0x2e, 0x84, 0x5e, 0x25, 0x45,
|
||||
0x7a, 0x8d, 0x45, 0xd8, 0xd7, 0xca, 0x51, 0x43, 0xed, 0xd1, 0x3b, 0x62, 0x44, 0x2e, 0xe0, 0x35,
|
||||
0x6c, 0x66, 0x1e, 0x5a, 0x33, 0xff, 0x89, 0xd5, 0xc3, 0x81, 0xae, 0x28, 0xdc, 0x6d, 0xcb, 0xbf,
|
||||
0x27, 0x83, 0x7f, 0x3a, 0xe0, 0x37, 0x0b, 0x73, 0xb4, 0xbb, 0x30, 0x47, 0xdb, 0x85, 0x99, 0x1c,
|
||||
0xd6, 0x0b, 0x33, 0x39, 0x24, 0xcc, 0xcf, 0xea, 0x85, 0xe1, 0x67, 0x44, 0xd6, 0x53, 0x99, 0x95,
|
||||
0xf9, 0x61, 0x65, 0x58, 0x0d, 0x78, 0x83, 0x69, 0xca, 0xbe, 0x5d, 0xa1, 0xb4, 0xad, 0x0e, 0xb8,
|
||||
0x45, 0x34, 0x93, 0xa7, 0x5a, 0x4c, 0x4c, 0x73, 0x0d, 0x60, 0x1f, 0x80, 0xcf, 0xa9, 0x79, 0xba,
|
||||
0xc3, 0x3b, 0xbc, 0x68, 0x33, 0x37, 0x5e, 0x0a, 0x6a, 0xbe, 0x55, 0xec, 0xff, 0x49, 0xfd, 0xe5,
|
||||
0xf2, 0x31, 0x74, 0xa7, 0xab, 0x78, 0xa9, 0xea, 0x7f, 0x95, 0xb7, 0x5a, 0x62, 0x14, 0xaf, 0x51,
|
||||
0xfb, 0xb8, 0xbd, 0x12, 0x9d, 0x43, 0xd0, 0x18, 0xb7, 0xe5, 0x38, 0xed, 0x72, 0x18, 0x78, 0x97,
|
||||
0x69, 0xac, 0xea, 0xb5, 0xa4, 0x33, 0x3d, 0xf6, 0xbc, 0x14, 0xa9, 0x8a, 0x55, 0x55, 0xaf, 0x65,
|
||||
0x8d, 0xa3, 0x27, 0xb6, 0x7c, 0x0a, 0x77, 0x99, 0xe7, 0x28, 0xed, 0x8a, 0x1b, 0xa0, 0x93, 0x64,
|
||||
0xb7, 0x68, 0xd4, 0xd9, 0xe5, 0x06, 0x44, 0xdf, 0x41, 0x30, 0x4e, 0x50, 0x2a, 0x5e, 0x26, 0xaf,
|
||||
0x6a, 0x3a, 0x03, 0xef, 0xeb, 0xe9, 0x37, 0xcf, 0xeb, 0x0a, 0xe8, 0xbc, 0x5d, 0x67, 0xf7, 0x6f,
|
||||
0xeb, 0x7c, 0x22, 0x72, 0x71, 0x3c, 0xd1, 0x73, 0xee, 0x72, 0x8b, 0xa2, 0x8f, 0xc0, 0x23, 0xd9,
|
||||
0x68, 0x45, 0xf6, 0x5e, 0x27, 0x39, 0xb3, 0xae, 0xfe, 0x2a, 0x7c, 0xf2, 0x57, 0x00, 0x00, 0x00,
|
||||
0xff, 0xff, 0xda, 0x20, 0xfc, 0x99, 0x27, 0x0a, 0x00, 0x00,
|
||||
}
|
||||
|
|
|
@ -23,15 +23,15 @@ message Dashboard {
|
|||
}
|
||||
|
||||
message DashboardCell {
|
||||
int32 x = 1; // X-coordinate of Cell in the Dashboard
|
||||
int32 y = 2; // Y-coordinate of Cell in the Dashboard
|
||||
int32 w = 3; // Width of Cell in the Dashboard
|
||||
int32 h = 4; // Height of Cell in the Dashboard
|
||||
repeated Query queries = 5; // Time-series data queries for Dashboard
|
||||
string name = 6; // User-facing name for this Dashboard
|
||||
string type = 7; // Dashboard visualization type
|
||||
string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
|
||||
map<string, Axis> axes = 9; // Axes represent the graphical viewport for a cell's visualizations
|
||||
int32 x = 1; // X-coordinate of Cell in the Dashboard
|
||||
int32 y = 2; // Y-coordinate of Cell in the Dashboard
|
||||
int32 w = 3; // Width of Cell in the Dashboard
|
||||
int32 h = 4; // Height of Cell in the Dashboard
|
||||
repeated Query queries = 5; // Time-series data queries for Dashboard
|
||||
string name = 6; // User-facing name for this Dashboard
|
||||
string type = 7; // Dashboard visualization type
|
||||
string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
|
||||
map<string, Axis> axes = 9; // Axes represent the graphical viewport for a cell's visualizations
|
||||
}
|
||||
|
||||
message Axis {
|
||||
|
@ -54,18 +54,18 @@ message Template {
|
|||
}
|
||||
|
||||
message TemplateValue {
|
||||
string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
|
||||
string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
|
||||
bool selected = 3; // Selected states that this variable has been picked to use for replacement
|
||||
string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
|
||||
string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
|
||||
bool selected = 3; // Selected states that this variable has been picked to use for replacement
|
||||
}
|
||||
|
||||
message TemplateQuery {
|
||||
string command = 1; // Command is the query itself
|
||||
string db = 2; // DB the database for the query (optional)
|
||||
string rp = 3; // RP is a retention policy and optional;
|
||||
string measurement = 4; // Measurement is the optinally selected measurement for the query
|
||||
string tag_key = 5; // TagKey is the optionally selected tag key for the query
|
||||
string field_key = 6; // FieldKey is the optionally selected field key for the query
|
||||
string command = 1; // Command is the query itself
|
||||
string db = 2; // DB the database for the query (optional)
|
||||
string rp = 3; // RP is a retention policy and optional;
|
||||
string measurement = 4; // Measurement is the optinally selected measurement for the query
|
||||
string tag_key = 5; // TagKey is the optionally selected tag key for the query
|
||||
string field_key = 6; // FieldKey is the optionally selected field key for the query
|
||||
}
|
||||
|
||||
message Server {
|
||||
|
@ -101,31 +101,38 @@ message Cell {
|
|||
}
|
||||
|
||||
message Query {
|
||||
string Command = 1; // Command is the query itself
|
||||
string DB = 2; // DB the database for the query (optional)
|
||||
string RP = 3; // RP is a retention policy and optional;
|
||||
repeated string GroupBys= 4; // GroupBys define the groups to combine in the query
|
||||
repeated string Wheres = 5; // Wheres define the restrictions on the query
|
||||
string Label = 6; // Label is the name of the Y-Axis
|
||||
Range Range = 7; // Range is the upper and lower bound of the Y-Axis
|
||||
string Source = 8; // Source is the optional URI to the data source
|
||||
string Command = 1; // Command is the query itself
|
||||
string DB = 2; // DB the database for the query (optional)
|
||||
string RP = 3; // RP is a retention policy and optional;
|
||||
repeated string GroupBys = 4; // GroupBys define the groups to combine in the query
|
||||
repeated string Wheres = 5; // Wheres define the restrictions on the query
|
||||
string Label = 6; // Label is the name of the Y-Axis
|
||||
Range Range = 7; // Range is the upper and lower bound of the Y-Axis
|
||||
string Source = 8; // Source is the optional URI to the data source
|
||||
repeated TimeShift Shifts = 9; // TimeShift represents a shift to apply to an influxql query's time range
|
||||
}
|
||||
|
||||
message TimeShift {
|
||||
string Label = 1; // Label user facing description
|
||||
string Unit = 2; // Unit influxql time unit representation i.e. ms, s, m, h, d
|
||||
string Quantity = 3; // Quantity number of units
|
||||
}
|
||||
|
||||
message Range {
|
||||
int64 Upper = 1; // Upper is the upper-bound of the range
|
||||
int64 Lower = 2; // Lower is the lower-bound of the range
|
||||
int64 Upper = 1; // Upper is the upper-bound of the range
|
||||
int64 Lower = 2; // Lower is the lower-bound of the range
|
||||
}
|
||||
|
||||
message AlertRule {
|
||||
string ID = 1; // ID is the unique ID of this alert rule
|
||||
string JSON = 2; // JSON byte representation of the alert
|
||||
int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
|
||||
int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
|
||||
string ID = 1; // ID is the unique ID of this alert rule
|
||||
string JSON = 2; // JSON byte representation of the alert
|
||||
int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
|
||||
int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
|
||||
}
|
||||
|
||||
message User {
|
||||
uint64 ID = 1; // ID is the unique ID of this user
|
||||
string Name = 2; // Name is the user's login name
|
||||
uint64 ID = 1; // ID is the unique ID of this user
|
||||
string Name = 2; // Name is the user's login name
|
||||
}
|
||||
|
||||
// The following is a vim modeline, it autoconfigures vim to have the
|
||||
|
|
|
@ -163,6 +163,7 @@ func Test_MarshalDashboard(t *testing.T) {
|
|||
Upper: int64(100),
|
||||
},
|
||||
Source: "/chronograf/v1/sources/1",
|
||||
Shifts: []chronograf.TimeShift{},
|
||||
},
|
||||
},
|
||||
Axes: map[string]chronograf.Axis{
|
||||
|
@ -210,6 +211,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
|
|||
Range: &chronograf.Range{
|
||||
Upper: int64(100),
|
||||
},
|
||||
Shifts: []chronograf.TimeShift{},
|
||||
},
|
||||
},
|
||||
Axes: map[string]chronograf.Axis{
|
||||
|
@ -241,6 +243,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
|
|||
Range: &chronograf.Range{
|
||||
Upper: int64(100),
|
||||
},
|
||||
Shifts: []chronograf.TimeShift{},
|
||||
},
|
||||
},
|
||||
Axes: map[string]chronograf.Axis{
|
||||
|
@ -285,6 +288,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
|
|||
Range: &chronograf.Range{
|
||||
Upper: int64(100),
|
||||
},
|
||||
Shifts: []chronograf.TimeShift{},
|
||||
},
|
||||
},
|
||||
Axes: map[string]chronograf.Axis{
|
||||
|
@ -316,6 +320,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
|
|||
Range: &chronograf.Range{
|
||||
Upper: int64(100),
|
||||
},
|
||||
Shifts: []chronograf.TimeShift{},
|
||||
},
|
||||
},
|
||||
Axes: map[string]chronograf.Axis{
|
||||
|
|
281
chronograf.go
281
chronograf.go
|
@ -1,21 +1,10 @@
|
|||
package chronograf
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/influxdata/influxdb/influxql"
|
||||
)
|
||||
|
||||
// General errors.
|
||||
|
@ -136,196 +125,17 @@ type Range struct {
|
|||
Lower int64 `json:"lower"` // Lower is the lower bound
|
||||
}
|
||||
|
||||
type TemplateVariable interface {
|
||||
fmt.Stringer
|
||||
Name() string // returns the variable name
|
||||
Precedence() uint // ordinal indicating precedence level for replacement
|
||||
}
|
||||
|
||||
type ExecutableVar interface {
|
||||
Exec(string)
|
||||
}
|
||||
|
||||
// TemplateValue is a value use to replace a template in an InfluxQL query
|
||||
type BasicTemplateValue struct {
|
||||
type TemplateValue struct {
|
||||
Value string `json:"value"` // Value is the specific value used to replace a template in an InfluxQL query
|
||||
Type string `json:"type"` // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
|
||||
Selected bool `json:"selected"` // Selected states that this variable has been picked to use for replacement
|
||||
}
|
||||
|
||||
// TemplateVar is a named variable within an InfluxQL query to be replaced with Values
|
||||
type BasicTemplateVar struct {
|
||||
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
|
||||
Values []BasicTemplateValue `json:"values"` // Values are the replacement values within InfluxQL
|
||||
}
|
||||
|
||||
func (t BasicTemplateVar) Name() string {
|
||||
return t.Var
|
||||
}
|
||||
|
||||
// String converts the template variable into a correct InfluxQL string based
|
||||
// on its type
|
||||
func (t BasicTemplateVar) String() string {
|
||||
if len(t.Values) == 0 {
|
||||
return ""
|
||||
}
|
||||
switch t.Values[0].Type {
|
||||
case "tagKey", "fieldKey", "measurement", "database":
|
||||
return `"` + t.Values[0].Value + `"`
|
||||
case "tagValue", "timeStamp":
|
||||
return `'` + t.Values[0].Value + `'`
|
||||
case "csv", "constant":
|
||||
return t.Values[0].Value
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (t BasicTemplateVar) Precedence() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
type GroupByVar struct {
|
||||
Var string `json:"tempVar"` // the name of the variable as present in the query
|
||||
Duration time.Duration `json:"duration,omitempty"` // the Duration supplied by the query
|
||||
Resolution uint `json:"resolution"` // the available screen resolution to render the results of this query
|
||||
ReportingInterval time.Duration `json:"reportingInterval,omitempty"` // the interval at which data is reported to this series
|
||||
}
|
||||
|
||||
// Exec is responsible for extracting the Duration from the query
|
||||
func (g *GroupByVar) Exec(query string) {
|
||||
whereClause := "WHERE"
|
||||
start := strings.Index(query, whereClause)
|
||||
if start == -1 {
|
||||
// no where clause
|
||||
return
|
||||
}
|
||||
|
||||
// reposition start to after the 'where' keyword
|
||||
durStr := query[start+len(whereClause):]
|
||||
|
||||
// attempt to parse out a relative time range
|
||||
// locate duration literal start
|
||||
prefix := "time > now() - "
|
||||
lowerDuration, err := g.parseRelative(durStr, prefix)
|
||||
if err == nil {
|
||||
prefix := "time < now() - "
|
||||
upperDuration, err := g.parseRelative(durStr, prefix)
|
||||
if err != nil {
|
||||
g.Duration = lowerDuration
|
||||
return
|
||||
}
|
||||
g.Duration = lowerDuration - upperDuration
|
||||
if g.Duration < 0 {
|
||||
g.Duration = -g.Duration
|
||||
}
|
||||
}
|
||||
|
||||
dur, err := g.parseAbsolute(durStr)
|
||||
if err == nil {
|
||||
// we found an absolute time range
|
||||
g.Duration = dur
|
||||
}
|
||||
}
|
||||
|
||||
// parseRelative locates and extracts a duration value from a fragment of an
|
||||
// InfluxQL query following the "where" keyword. For example, in the fragment
|
||||
// "time > now() - 180d GROUP BY :interval:", parseRelative would return a
|
||||
// duration equal to 180d
|
||||
func (g *GroupByVar) parseRelative(fragment string, prefix string) (time.Duration, error) {
|
||||
start := strings.Index(fragment, prefix)
|
||||
if start == -1 {
|
||||
return time.Duration(0), errors.New("not a relative duration")
|
||||
}
|
||||
|
||||
// reposition to duration literal
|
||||
durFragment := fragment[start+len(prefix):]
|
||||
|
||||
// init counters
|
||||
pos := 0
|
||||
|
||||
// locate end of duration literal
|
||||
for pos < len(durFragment) {
|
||||
rn, _ := utf8.DecodeRuneInString(durFragment[pos:])
|
||||
if unicode.IsSpace(rn) {
|
||||
break
|
||||
}
|
||||
pos++
|
||||
}
|
||||
|
||||
// attempt to parse what we suspect is a duration literal
|
||||
dur, err := influxql.ParseDuration(durFragment[:pos])
|
||||
if err != nil {
|
||||
return dur, err
|
||||
}
|
||||
|
||||
return dur, nil
|
||||
}
|
||||
|
||||
// parseAbsolute will determine the duration between two absolute timestamps
|
||||
// found within an InfluxQL fragment following the "where" keyword. For
|
||||
// example, the fragement "time > '1985-10-25T00:01:21-0800 and time <
|
||||
// '1985-10-25T00:01:22-0800'" would yield a duration of 1m'
|
||||
func (g *GroupByVar) parseAbsolute(fragment string) (time.Duration, error) {
|
||||
timePtn := `time\s[>|<]\s'([0-9\-T\:\.Z]+)'` // Playground: http://gobular.com/x/208f66bd-1889-4269-ab47-1efdfeeb63f0
|
||||
re, err := regexp.Compile(timePtn)
|
||||
if err != nil {
|
||||
// this is a developer error and should complain loudly
|
||||
panic("Bad Regex: err:" + err.Error())
|
||||
}
|
||||
|
||||
if !re.Match([]byte(fragment)) {
|
||||
return time.Duration(0), errors.New("absolute duration not found")
|
||||
}
|
||||
|
||||
// extract at most two times
|
||||
matches := re.FindAll([]byte(fragment), 2)
|
||||
|
||||
// parse out absolute times
|
||||
durs := make([]time.Time, 0, 2)
|
||||
for _, match := range matches {
|
||||
durStr := re.FindSubmatch(match)
|
||||
if tm, err := time.Parse(time.RFC3339Nano, string(durStr[1])); err == nil {
|
||||
durs = append(durs, tm)
|
||||
}
|
||||
}
|
||||
|
||||
if len(durs) == 1 {
|
||||
durs = append(durs, time.Now())
|
||||
}
|
||||
|
||||
// reject more than 2 times found
|
||||
if len(durs) != 2 {
|
||||
return time.Duration(0), errors.New("must provide exactly two absolute times")
|
||||
}
|
||||
|
||||
dur := durs[1].Sub(durs[0])
|
||||
|
||||
return dur, nil
|
||||
}
|
||||
|
||||
func (g *GroupByVar) String() string {
|
||||
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
|
||||
// Number of points given the pixels
|
||||
pixels := float64(g.Resolution) / 3.0
|
||||
msPerPixel := float64(g.Duration/time.Millisecond) / pixels
|
||||
secPerPixel := float64(g.Duration/time.Second) / pixels
|
||||
if secPerPixel < 1.0 {
|
||||
if msPerPixel < 1.0 {
|
||||
msPerPixel = 1.0
|
||||
}
|
||||
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
|
||||
}
|
||||
// If groupby is more than 1 second round to the second
|
||||
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
|
||||
}
|
||||
|
||||
func (g *GroupByVar) Name() string {
|
||||
return g.Var
|
||||
}
|
||||
|
||||
func (g *GroupByVar) Precedence() uint {
|
||||
return 1
|
||||
type TemplateVar struct {
|
||||
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
|
||||
Values []TemplateValue `json:"values"` // Values are the replacement values within InfluxQL
|
||||
}
|
||||
|
||||
// TemplateID is the unique ID used to identify a template
|
||||
|
@ -333,7 +143,7 @@ type TemplateID string
|
|||
|
||||
// Template represents a series of choices to replace TemplateVars within InfluxQL
|
||||
type Template struct {
|
||||
BasicTemplateVar
|
||||
TemplateVar
|
||||
ID TemplateID `json:"id"` // ID is the unique ID associated with this template
|
||||
Type string `json:"type"` // Type can be fieldKeys, tagKeys, tagValues, CSV, constant, query, measurements, databases
|
||||
Label string `json:"label"` // Label is a user-facing description of the Template
|
||||
|
@ -342,69 +152,15 @@ type Template struct {
|
|||
|
||||
// Query retrieves a Response from a TimeSeries.
|
||||
type Query struct {
|
||||
Command string `json:"query"` // Command is the query itself
|
||||
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
|
||||
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
|
||||
TemplateVars TemplateVars `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
|
||||
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
|
||||
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
|
||||
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
|
||||
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
|
||||
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
|
||||
}
|
||||
|
||||
// TemplateVars are a heterogeneous collection of different TemplateVariables
|
||||
// with the capability to decode arbitrary JSON into the appropriate template
|
||||
// variable type
|
||||
type TemplateVars []TemplateVariable
|
||||
|
||||
func (t *TemplateVars) UnmarshalJSON(text []byte) error {
|
||||
// TODO: Need to test that server throws an error when :interval:'s Resolution or ReportingInterval or zero-value
|
||||
rawVars := bytes.NewReader(text)
|
||||
dec := json.NewDecoder(rawVars)
|
||||
|
||||
// read open bracket
|
||||
rawTok, err := dec.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tok, isDelim := rawTok.(json.Delim)
|
||||
if !isDelim || tok != '[' {
|
||||
return errors.New("Expected JSON array, but found " + tok.String())
|
||||
}
|
||||
|
||||
for dec.More() {
|
||||
var halfBakedVar json.RawMessage
|
||||
err := dec.Decode(&halfBakedVar)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var agb GroupByVar
|
||||
err = json.Unmarshal(halfBakedVar, &agb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// ensure that we really have a GroupByVar
|
||||
if agb.Resolution != 0 {
|
||||
(*t) = append(*t, &agb)
|
||||
continue
|
||||
}
|
||||
|
||||
var tvar BasicTemplateVar
|
||||
err = json.Unmarshal(halfBakedVar, &tvar)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// ensure that we really have a BasicTemplateVar
|
||||
if len(tvar.Values) != 0 {
|
||||
(*t) = append(*t, tvar)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
Command string `json:"query"` // Command is the query itself
|
||||
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
|
||||
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
|
||||
TemplateVars []TemplateVar `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
|
||||
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
|
||||
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
|
||||
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
|
||||
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
|
||||
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
|
||||
}
|
||||
|
||||
// DashboardQuery includes state for the query builder. This is a transition
|
||||
|
@ -415,6 +171,7 @@ type DashboardQuery struct {
|
|||
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
|
||||
QueryConfig QueryConfig `json:"queryConfig,omitempty"` // QueryConfig represents the query state that is understood by the data explorer
|
||||
Source string `json:"source"` // Source is the optional URI to the data source for this queryConfig
|
||||
Shifts []TimeShift `json:"-"` // Shifts represents shifts to apply to an influxql query's time range. Clients expect the shift to be in the generated QueryConfig
|
||||
}
|
||||
|
||||
// TemplateQuery is used to retrieve choices for template replacement
|
||||
|
@ -528,6 +285,13 @@ type DurationRange struct {
|
|||
Lower string `json:"lower"`
|
||||
}
|
||||
|
||||
// TimeShift represents a shift to apply to an influxql query's time range
|
||||
type TimeShift struct {
|
||||
Label string `json:"label"` // Label user facing description
|
||||
Unit string `json:"unit"` // Unit influxql time unit representation i.e. ms, s, m, h, d
|
||||
Quantity string `json:"quantity"` // Quantity number of units
|
||||
}
|
||||
|
||||
// QueryConfig represents UI query from the data explorer
|
||||
type QueryConfig struct {
|
||||
ID string `json:"id,omitempty"`
|
||||
|
@ -541,6 +305,7 @@ type QueryConfig struct {
|
|||
Fill string `json:"fill,omitempty"`
|
||||
RawText *string `json:"rawText"`
|
||||
Range *DurationRange `json:"range"`
|
||||
Shifts []TimeShift `json:"shifts"`
|
||||
}
|
||||
|
||||
// KapacitorNode adds arguments and properties to an alert
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
package chronograf_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
func Test_GroupByVar(t *testing.T) {
|
||||
gbvTests := []struct {
|
||||
name string
|
||||
query string
|
||||
want string
|
||||
resolution uint // the screen resolution to render queries into
|
||||
}{
|
||||
{
|
||||
name: "relative time only lower bound with one day of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "time(259s)",
|
||||
},
|
||||
{
|
||||
name: "relative time with relative upper bound with one minute of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "time(180ms)",
|
||||
},
|
||||
{
|
||||
name: "relative time with relative lower bound and now upper with one day of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "time(259s)",
|
||||
},
|
||||
{
|
||||
name: "absolute time with one minute of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "time(180ms)",
|
||||
},
|
||||
{
|
||||
name: "absolute time with nano seconds and zero duraiton",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "time(1ms)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range gbvTests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
gbv := chronograf.GroupByVar{
|
||||
Var: ":interval:",
|
||||
Resolution: test.resolution,
|
||||
}
|
||||
|
||||
gbv.Exec(test.query)
|
||||
got := gbv.String()
|
||||
|
||||
if got != test.want {
|
||||
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", test.name, test.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -674,7 +674,7 @@ def package(build_output, pkg_name, version, nightly=False, iteration=1, static=
|
|||
package_build_root,
|
||||
current_location)
|
||||
if package_type == "rpm":
|
||||
fpm_command += "--depends coreutils"
|
||||
fpm_command += "--depends coreutils --depends shadow-utils"
|
||||
# TODO: Check for changelog
|
||||
# elif package_type == "deb":
|
||||
# fpm_command += "--deb-changelog {} ".format(os.path.join(os.getcwd(), "CHANGELOG.md"))
|
||||
|
|
|
@ -9,7 +9,7 @@ After=network-online.target
|
|||
User=chronograf
|
||||
Group=chronograf
|
||||
EnvironmentFile=-/etc/default/chronograf
|
||||
ExecStart=/usr/bin/chronograf --host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned
|
||||
ExecStart=/usr/bin/chronograf --host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned $CHRONOGRAF_OPTS
|
||||
KillMode=control-group
|
||||
Restart=on-failure
|
||||
|
||||
|
|
|
@ -13,7 +13,8 @@
|
|||
# Script to execute when starting
|
||||
SCRIPT="/usr/bin/chronograf"
|
||||
# Options to pass to the script on startup
|
||||
SCRIPT_OPTS="--host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned"
|
||||
. /etc/default/chronograf
|
||||
SCRIPT_OPTS="--host 0.0.0.0 --port 8888 -b /var/lib/chronograf/chronograf-v1.db -c /usr/share/chronograf/canned ${CHRONOGRAF_OPTS}"
|
||||
|
||||
# User to run the process under
|
||||
RUNAS=chronograf
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
@ -55,7 +56,10 @@ func (c *Client) query(u *url.URL, q chronograf.Query) (chronograf.Response, err
|
|||
command := q.Command
|
||||
// TODO(timraymond): move this upper Query() function
|
||||
if len(q.TemplateVars) > 0 {
|
||||
command = TemplateReplace(q.Command, q.TemplateVars)
|
||||
command, err = TemplateReplace(q.Command, q.TemplateVars, time.Now())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
logs := c.Logger.
|
||||
WithField("component", "proxy").
|
||||
|
|
|
@ -276,11 +276,11 @@ func Test_Influx_HTTPS_InsecureSkipVerify(t *testing.T) {
|
|||
called = false
|
||||
q = ""
|
||||
query = chronograf.Query{
|
||||
Command: "select $field from cpu",
|
||||
TemplateVars: chronograf.TemplateVars{
|
||||
chronograf.BasicTemplateVar{
|
||||
Var: "$field",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Command: "select :field: from cpu",
|
||||
TemplateVars: []chronograf.TemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: ":field:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "usage_user",
|
||||
Type: "fieldKey",
|
||||
|
|
|
@ -10,6 +10,52 @@ import (
|
|||
"github.com/influxdata/influxdb/influxql"
|
||||
)
|
||||
|
||||
func TimeRangeAsEpochNano(expr influxql.Expr, now time.Time) (min, max int64, err error) {
|
||||
tmin, tmax, err := influxql.TimeRange(expr)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
if tmin.IsZero() {
|
||||
min = time.Unix(0, influxql.MinTime).UnixNano()
|
||||
} else {
|
||||
min = tmin.UnixNano()
|
||||
}
|
||||
if tmax.IsZero() {
|
||||
max = now.UnixNano()
|
||||
} else {
|
||||
max = tmax.UnixNano()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const WhereToken = "WHERE"
|
||||
|
||||
func ParseTime(influxQL string, now time.Time) (time.Duration, error) {
|
||||
start := strings.Index(strings.ToUpper(influxQL), WhereToken)
|
||||
if start == -1 {
|
||||
return 0, fmt.Errorf("not a relative duration")
|
||||
}
|
||||
start += len(WhereToken)
|
||||
where := influxQL[start:]
|
||||
cond, err := influxql.ParseExpr(where)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
nowVal := &influxql.NowValuer{
|
||||
Now: now,
|
||||
}
|
||||
cond = influxql.Reduce(cond, nowVal)
|
||||
min, max, err := TimeRangeAsEpochNano(cond, now)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
dur := time.Duration(max - min)
|
||||
if dur < 0 {
|
||||
dur = 0
|
||||
}
|
||||
return dur, nil
|
||||
}
|
||||
|
||||
// Convert changes an InfluxQL query to a QueryConfig
|
||||
func Convert(influxQL string) (chronograf.QueryConfig, error) {
|
||||
itsDashboardTime := false
|
||||
|
|
|
@ -2,6 +2,7 @@ package influx
|
|||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/influxdata/chronograf"
|
||||
|
@ -767,3 +768,43 @@ func TestConvert(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseTime(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
influxQL string
|
||||
now string
|
||||
want time.Duration
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "time equal",
|
||||
now: "2000-01-01T00:00:00Z",
|
||||
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
|
||||
want: 0,
|
||||
},
|
||||
{
|
||||
name: "time shifted by one hour",
|
||||
now: "2000-01-01T00:00:00Z",
|
||||
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
|
||||
want: 3599999999998,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
now, err := time.Parse(time.RFC3339, tt.now)
|
||||
if err != nil {
|
||||
t.Fatalf("%v", err)
|
||||
}
|
||||
got, err := ParseTime(tt.influxQL, now)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("ParseTime() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Logf("%d", got)
|
||||
t.Errorf("ParseTime() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,40 +1,106 @@
|
|||
package influx
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
// TemplateReplace replaces templates with values within the query string
|
||||
func TemplateReplace(query string, templates chronograf.TemplateVars) string {
|
||||
tvarsByPrecedence := make(map[uint]chronograf.TemplateVars, len(templates))
|
||||
maxPrecedence := uint(0)
|
||||
for _, tmp := range templates {
|
||||
precedence := tmp.Precedence()
|
||||
if precedence > maxPrecedence {
|
||||
maxPrecedence = precedence
|
||||
}
|
||||
tvarsByPrecedence[precedence] = append(tvarsByPrecedence[precedence], tmp)
|
||||
}
|
||||
|
||||
replaced := query
|
||||
for prc := uint(0); prc <= maxPrecedence; prc++ {
|
||||
replacements := []string{}
|
||||
|
||||
for _, v := range tvarsByPrecedence[prc] {
|
||||
if evar, ok := v.(chronograf.ExecutableVar); ok {
|
||||
evar.Exec(replaced)
|
||||
}
|
||||
newVal := v.String()
|
||||
if newVal != "" {
|
||||
replacements = append(replacements, v.Name(), newVal)
|
||||
}
|
||||
func SortTemplates(ts []chronograf.TemplateVar) []chronograf.TemplateVar {
|
||||
sort.Slice(ts, func(i, j int) bool {
|
||||
if len(ts[i].Values) != len(ts[j].Values) {
|
||||
return len(ts[i].Values) < len(ts[j].Values)
|
||||
}
|
||||
|
||||
replacer := strings.NewReplacer(replacements...)
|
||||
replaced = replacer.Replace(replaced)
|
||||
}
|
||||
if len(ts[i].Values) == 0 {
|
||||
return i < j
|
||||
}
|
||||
|
||||
return replaced
|
||||
for k := range ts[i].Values {
|
||||
if ts[i].Values[k].Type != ts[j].Values[k].Type {
|
||||
return ts[i].Values[k].Type < ts[j].Values[k].Type
|
||||
}
|
||||
if ts[i].Values[k].Value != ts[j].Values[k].Value {
|
||||
return ts[i].Values[k].Value < ts[j].Values[k].Value
|
||||
}
|
||||
}
|
||||
return i < j
|
||||
})
|
||||
return ts
|
||||
}
|
||||
|
||||
// RenderTemplate converts the template variable into a correct InfluxQL string based
|
||||
// on its type
|
||||
func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (string, error) {
|
||||
if len(t.Values) == 0 {
|
||||
return query, nil
|
||||
}
|
||||
switch t.Values[0].Type {
|
||||
case "tagKey", "fieldKey", "measurement", "database":
|
||||
return strings.Replace(query, t.Var, `"`+t.Values[0].Value+`"`, -1), nil
|
||||
case "tagValue", "timeStamp":
|
||||
return strings.Replace(query, t.Var, `'`+t.Values[0].Value+`'`, -1), nil
|
||||
case "csv", "constant":
|
||||
return strings.Replace(query, t.Var, t.Values[0].Value, -1), nil
|
||||
}
|
||||
|
||||
tv := map[string]string{}
|
||||
for i := range t.Values {
|
||||
tv[t.Values[i].Type] = t.Values[i].Value
|
||||
}
|
||||
|
||||
if res, ok := tv["resolution"]; ok {
|
||||
resolution, err := strconv.ParseInt(res, 0, 64)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
ppp, ok := tv["pointsPerPixel"]
|
||||
if !ok {
|
||||
ppp = "3"
|
||||
}
|
||||
pixelsPerPoint, err := strconv.ParseInt(ppp, 0, 64)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
dur, err := ParseTime(query, now)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
interval := AutoGroupBy(resolution, pixelsPerPoint, dur)
|
||||
return strings.Replace(query, t.Var, interval, -1), nil
|
||||
}
|
||||
return query, nil
|
||||
}
|
||||
|
||||
func AutoGroupBy(resolution, pixelsPerPoint int64, duration time.Duration) string {
|
||||
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
|
||||
// Number of points given the pixels
|
||||
pixels := float64(resolution) / float64(pixelsPerPoint)
|
||||
msPerPixel := float64(duration/time.Millisecond) / pixels
|
||||
secPerPixel := float64(duration/time.Second) / pixels
|
||||
if secPerPixel < 1.0 {
|
||||
if msPerPixel < 1.0 {
|
||||
msPerPixel = 1.0
|
||||
}
|
||||
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
|
||||
}
|
||||
// If groupby is more than 1 second round to the second
|
||||
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
|
||||
}
|
||||
|
||||
// TemplateReplace replaces templates with values within the query string
|
||||
func TemplateReplace(query string, templates []chronograf.TemplateVar, now time.Time) (string, error) {
|
||||
templates = SortTemplates(templates)
|
||||
for i := range templates {
|
||||
var err error
|
||||
query, err = RenderTemplate(query, templates[i], now)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
return query, nil
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package influx
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
@ -13,43 +14,43 @@ func TestTemplateReplace(t *testing.T) {
|
|||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
vars chronograf.TemplateVars
|
||||
vars []chronograf.TemplateVar
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "select with parameters",
|
||||
query: "$METHOD field1, $field FROM $measurement WHERE temperature > $temperature",
|
||||
vars: chronograf.TemplateVars{
|
||||
chronograf.BasicTemplateVar{
|
||||
Var: "$temperature",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
query: ":method: field1, :field: FROM :measurement: WHERE temperature > :temperature:",
|
||||
vars: []chronograf.TemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: ":temperature:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "csv",
|
||||
Value: "10",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
Var: "$field",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
chronograf.TemplateVar{
|
||||
Var: ":field:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "fieldKey",
|
||||
Value: "field2",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
Var: "$METHOD",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
chronograf.TemplateVar{
|
||||
Var: ":method:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "csv",
|
||||
Value: "SELECT",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
Var: "$measurement",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
chronograf.TemplateVar{
|
||||
Var: ":measurement:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "csv",
|
||||
Value: `"cpu"`,
|
||||
|
@ -62,28 +63,28 @@ func TestTemplateReplace(t *testing.T) {
|
|||
{
|
||||
name: "select with parameters and aggregates",
|
||||
query: `SELECT mean($field) FROM "cpu" WHERE $tag = $value GROUP BY $tag`,
|
||||
vars: chronograf.TemplateVars{
|
||||
chronograf.BasicTemplateVar{
|
||||
vars: []chronograf.TemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: "$value",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "tagValue",
|
||||
Value: "howdy.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: "$tag",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "tagKey",
|
||||
Value: "host",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: "$field",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "fieldKey",
|
||||
Value: "field",
|
||||
|
@ -101,8 +102,8 @@ func TestTemplateReplace(t *testing.T) {
|
|||
{
|
||||
name: "var without a value",
|
||||
query: `SELECT $field FROM "cpu"`,
|
||||
vars: chronograf.TemplateVars{
|
||||
chronograf.BasicTemplateVar{
|
||||
vars: []chronograf.TemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: "$field",
|
||||
},
|
||||
},
|
||||
|
@ -111,10 +112,10 @@ func TestTemplateReplace(t *testing.T) {
|
|||
{
|
||||
name: "var with unknown type",
|
||||
query: `SELECT $field FROM "cpu"`,
|
||||
vars: chronograf.TemplateVars{
|
||||
chronograf.BasicTemplateVar{
|
||||
vars: []chronograf.TemplateVar{
|
||||
chronograf.TemplateVar{
|
||||
Var: "$field",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "who knows?",
|
||||
Value: "field",
|
||||
|
@ -127,42 +128,63 @@ func TestTemplateReplace(t *testing.T) {
|
|||
{
|
||||
name: "auto group by",
|
||||
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by :interval:`,
|
||||
vars: chronograf.TemplateVars{
|
||||
&chronograf.GroupByVar{
|
||||
Var: ":interval:",
|
||||
Duration: 180 * 24 * time.Hour,
|
||||
Resolution: 1000,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
vars: []chronograf.TemplateVar{
|
||||
{
|
||||
Var: ":interval:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "1000",
|
||||
Type: "resolution",
|
||||
},
|
||||
{
|
||||
Value: "3",
|
||||
Type: "pointsPerPixel",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46656s)`,
|
||||
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46655s)`,
|
||||
},
|
||||
{
|
||||
name: "auto group by without duration",
|
||||
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by :interval:`,
|
||||
vars: chronograf.TemplateVars{
|
||||
&chronograf.GroupByVar{
|
||||
Var: ":interval:",
|
||||
Duration: 0 * time.Minute,
|
||||
Resolution: 1000,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
vars: []chronograf.TemplateVar{
|
||||
{
|
||||
Var: ":interval:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "1000",
|
||||
Type: "resolution",
|
||||
},
|
||||
{
|
||||
Value: "3",
|
||||
Type: "pointsPerPixel",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
|
||||
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
|
||||
},
|
||||
{
|
||||
name: "auto group by with :dashboardTime:",
|
||||
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by :interval:`,
|
||||
vars: chronograf.TemplateVars{
|
||||
&chronograf.GroupByVar{
|
||||
Var: ":interval:",
|
||||
Duration: 0 * time.Minute,
|
||||
Resolution: 1000,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
vars: []chronograf.TemplateVar{
|
||||
{
|
||||
Var: ":interval:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "1000",
|
||||
Type: "resolution",
|
||||
},
|
||||
{
|
||||
Value: "3",
|
||||
Type: "pointsPerPixel",
|
||||
},
|
||||
},
|
||||
},
|
||||
&chronograf.BasicTemplateVar{
|
||||
{
|
||||
Var: ":dashboardTime:",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "constant",
|
||||
Value: "now() - 4320h",
|
||||
|
@ -170,20 +192,28 @@ func TestTemplateReplace(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
|
||||
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
|
||||
},
|
||||
{
|
||||
name: "auto group by failing condition",
|
||||
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
|
||||
vars: []chronograf.TemplateVariable{
|
||||
&chronograf.GroupByVar{
|
||||
Var: ":interval:",
|
||||
Resolution: 115,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
vars: []chronograf.TemplateVar{
|
||||
{
|
||||
Var: ":interval:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "115",
|
||||
Type: "resolution",
|
||||
},
|
||||
{
|
||||
Value: "3",
|
||||
Type: "pointsPerPixel",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
{
|
||||
Var: ":dashboardTime:",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "now() - 1h",
|
||||
Type: "constant",
|
||||
|
@ -197,7 +227,14 @@ func TestTemplateReplace(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := TemplateReplace(tt.query, tt.vars)
|
||||
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
got, err := TemplateReplace(tt.query, tt.vars, now)
|
||||
if err != nil {
|
||||
t.Fatalf("TestParse unexpected TemplateReplace error: %v", err)
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("TestParse %s =\n%s\nwant\n%s", tt.name, got, tt.want)
|
||||
}
|
||||
|
@ -209,8 +246,20 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
|
|||
req := `[
|
||||
{
|
||||
"tempVar": ":interval:",
|
||||
"resolution": 1000,
|
||||
"reportingInterval": 10
|
||||
"values": [
|
||||
{
|
||||
"value": "1000",
|
||||
"type": "resolution"
|
||||
},
|
||||
{
|
||||
"value": "3",
|
||||
"type": "pointsPerPixel"
|
||||
},
|
||||
{
|
||||
"value": "10",
|
||||
"type": "reportingInterval"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tempVar": ":cpu:",
|
||||
|
@ -224,15 +273,27 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
|
|||
}
|
||||
]`
|
||||
|
||||
expected := []chronograf.TemplateVariable{
|
||||
&chronograf.GroupByVar{
|
||||
Var: ":interval:",
|
||||
Resolution: 1000,
|
||||
ReportingInterval: 10 * time.Nanosecond,
|
||||
want := []chronograf.TemplateVar{
|
||||
{
|
||||
Var: ":interval:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "1000",
|
||||
Type: "resolution",
|
||||
},
|
||||
{
|
||||
Value: "3",
|
||||
Type: "pointsPerPixel",
|
||||
},
|
||||
{
|
||||
Value: "10",
|
||||
Type: "reportingInterval",
|
||||
},
|
||||
},
|
||||
},
|
||||
chronograf.BasicTemplateVar{
|
||||
{
|
||||
Var: ":cpu:",
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: "cpu-total",
|
||||
Type: "tagValue",
|
||||
|
@ -242,65 +303,128 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
var tvars chronograf.TemplateVars
|
||||
err := json.Unmarshal([]byte(req), &tvars)
|
||||
var got []chronograf.TemplateVar
|
||||
err := json.Unmarshal([]byte(req), &got)
|
||||
if err != nil {
|
||||
t.Fatal("Err unmarshaling:", err)
|
||||
}
|
||||
|
||||
if len(tvars) != len(expected) {
|
||||
t.Fatal("Expected", len(expected), "vars but found", len(tvars))
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(*(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar))) {
|
||||
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", *(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar)))
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar)) {
|
||||
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar))
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGroupByVarString(t *testing.T) {
|
||||
func TestAutoGroupBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
tvar *chronograf.GroupByVar
|
||||
want string
|
||||
name string
|
||||
resolution int64
|
||||
pixelsPerPoint int64
|
||||
duration time.Duration
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "String() calculates the GROUP BY interval",
|
||||
tvar: &chronograf.GroupByVar{
|
||||
Resolution: 700,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
Duration: 24 * time.Hour,
|
||||
},
|
||||
want: "time(370s)",
|
||||
name: "String() calculates the GROUP BY interval",
|
||||
resolution: 700,
|
||||
pixelsPerPoint: 3,
|
||||
duration: 24 * time.Hour,
|
||||
want: "time(370s)",
|
||||
},
|
||||
{
|
||||
name: "String() milliseconds if less than one second intervals",
|
||||
tvar: &chronograf.GroupByVar{
|
||||
Resolution: 100000,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
Duration: time.Hour,
|
||||
},
|
||||
want: "time(107ms)",
|
||||
name: "String() milliseconds if less than one second intervals",
|
||||
resolution: 100000,
|
||||
pixelsPerPoint: 3,
|
||||
duration: time.Hour,
|
||||
want: "time(107ms)",
|
||||
},
|
||||
{
|
||||
name: "String() milliseconds if less than one millisecond",
|
||||
tvar: &chronograf.GroupByVar{
|
||||
Resolution: 100000,
|
||||
ReportingInterval: 10 * time.Second,
|
||||
Duration: time.Second,
|
||||
},
|
||||
want: "time(1ms)",
|
||||
name: "String() milliseconds if less than one millisecond",
|
||||
resolution: 100000,
|
||||
pixelsPerPoint: 3,
|
||||
duration: time.Second,
|
||||
want: "time(1ms)",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.tvar.String()
|
||||
got := AutoGroupBy(tt.resolution, tt.pixelsPerPoint, tt.duration)
|
||||
if got != tt.want {
|
||||
t.Errorf("TestGroupByVarString %s =\n%s\nwant\n%s", tt.name, got, tt.want)
|
||||
t.Errorf("TestAutoGroupBy %s =\n%s\nwant\n%s", tt.name, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_RenderTemplate(t *testing.T) {
|
||||
gbvTests := []struct {
|
||||
name string
|
||||
query string
|
||||
want string
|
||||
resolution uint // the screen resolution to render queries into
|
||||
}{
|
||||
{
|
||||
name: "relative time only lower bound with one day of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(259s)",
|
||||
},
|
||||
{
|
||||
name: "relative time offset by week",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(259s)",
|
||||
},
|
||||
{
|
||||
name: "relative time with relative upper bound with one minute of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(179ms)",
|
||||
},
|
||||
{
|
||||
name: "relative time with relative lower bound and now upper with one day of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(259s)",
|
||||
},
|
||||
{
|
||||
name: "absolute time with one minute of duration",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(179ms)",
|
||||
},
|
||||
{
|
||||
name: "absolute time with nano seconds and zero duraiton",
|
||||
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
|
||||
resolution: 1000,
|
||||
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(1ms)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range gbvTests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
tvar := chronograf.TemplateVar{
|
||||
Var: ":interval:",
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Value: fmt.Sprintf("%d", tt.resolution),
|
||||
Type: "resolution",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
got, err := RenderTemplate(tt.query, tvar, now)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error rendering template %v", err)
|
||||
}
|
||||
|
||||
if got != tt.want {
|
||||
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", tt.name, tt.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h GROUP BY :interval: FILL(null);SELECT mean("numSeries") AS "mean_numSeries_shifted__1__h" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h AND time < now() - 1h GROUP BY :interval: FILL(null)
|
||||
|
|
|
@ -319,6 +319,9 @@ func (c *Client) Update(ctx context.Context, href string, rule chronograf.AlertR
|
|||
} else {
|
||||
opt, err = c.updateFromTick(rule)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
task, err := kapa.UpdateTask(client.Link{Href: href}, *opt)
|
||||
if err != nil {
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/influxdata/chronograf"
|
||||
client "github.com/influxdata/kapacitor/client/v1"
|
||||
)
|
||||
|
@ -945,10 +946,22 @@ func TestClient_Update(t *testing.T) {
|
|||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/howdy",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "howdy",
|
||||
ID: "howdy",
|
||||
Name: "myname",
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Measurement: "meas",
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Type: "field",
|
||||
Value: "usage_user",
|
||||
},
|
||||
},
|
||||
},
|
||||
Trigger: "threshold",
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Operator: greaterThan,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -1009,10 +1022,22 @@ func TestClient_Update(t *testing.T) {
|
|||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/howdy",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "howdy",
|
||||
ID: "howdy",
|
||||
Name: "myname",
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Measurement: "meas",
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Type: "field",
|
||||
Value: "usage_user",
|
||||
},
|
||||
},
|
||||
},
|
||||
Trigger: "threshold",
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Operator: greaterThan,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -1061,6 +1086,135 @@ func TestClient_Update(t *testing.T) {
|
|||
},
|
||||
wantStatus: client.Disabled,
|
||||
},
|
||||
{
|
||||
name: "error because relative cannot have inside range",
|
||||
wantErr: true,
|
||||
fields: fields{
|
||||
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||
return kapa, nil
|
||||
},
|
||||
Ticker: &Alert{},
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/error",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "error",
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Value: "usage_user",
|
||||
Type: "field",
|
||||
},
|
||||
},
|
||||
},
|
||||
Trigger: Relative,
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Operator: InsideRange,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "error because rule has an unknown trigger mechanism",
|
||||
wantErr: true,
|
||||
fields: fields{
|
||||
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||
return kapa, nil
|
||||
},
|
||||
Ticker: &Alert{},
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/error",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "error",
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "error because query has no fields",
|
||||
wantErr: true,
|
||||
fields: fields{
|
||||
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||
return kapa, nil
|
||||
},
|
||||
Ticker: &Alert{},
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/error",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "error",
|
||||
Trigger: Threshold,
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Period: "1d",
|
||||
},
|
||||
Name: "myname",
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Measurement: "meas",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "error because alert has no name",
|
||||
wantErr: true,
|
||||
fields: fields{
|
||||
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||
return kapa, nil
|
||||
},
|
||||
Ticker: &Alert{},
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/error",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "error",
|
||||
Trigger: Deadman,
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Period: "1d",
|
||||
},
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Measurement: "meas",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "error because alert period cannot be an empty string in deadman alert",
|
||||
wantErr: true,
|
||||
fields: fields{
|
||||
kapaClient: func(url, username, password string, insecureSkipVerify bool) (KapaClient, error) {
|
||||
return kapa, nil
|
||||
},
|
||||
Ticker: &Alert{},
|
||||
},
|
||||
args: args{
|
||||
ctx: context.Background(),
|
||||
href: "/kapacitor/v1/tasks/error",
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "error",
|
||||
Name: "myname",
|
||||
Trigger: Deadman,
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Measurement: "meas",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
kapa.ResTask = tt.resTask
|
||||
|
@ -1079,11 +1233,17 @@ func TestClient_Update(t *testing.T) {
|
|||
t.Errorf("Client.Update() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if tt.wantErr {
|
||||
return
|
||||
}
|
||||
if !cmp.Equal(got, tt.want) {
|
||||
t.Errorf("%q. Client.Update() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
|
||||
}
|
||||
if !reflect.DeepEqual(kapa.UpdateTaskOptions, tt.updateTaskOptions) {
|
||||
t.Errorf("Client.Update() = %v, want %v", kapa.UpdateTaskOptions, tt.updateTaskOptions)
|
||||
var cmpOptions = cmp.Options{
|
||||
cmpopts.IgnoreFields(client.UpdateTaskOptions{}, "TICKscript"),
|
||||
}
|
||||
if !cmp.Equal(kapa.UpdateTaskOptions, tt.updateTaskOptions, cmpOptions...) {
|
||||
t.Errorf("Client.Update() = %s", cmp.Diff(got, tt.updateTaskOptions, cmpOptions...))
|
||||
}
|
||||
if tt.wantStatus != kapa.LastStatus {
|
||||
t.Errorf("Client.Update() = %v, want %v", kapa.LastStatus, tt.wantStatus)
|
||||
|
@ -1130,10 +1290,16 @@ func TestClient_Create(t *testing.T) {
|
|||
args: args{
|
||||
ctx: context.Background(),
|
||||
rule: chronograf.AlertRule{
|
||||
ID: "howdy",
|
||||
ID: "howdy",
|
||||
Name: "myname's",
|
||||
Query: &chronograf.QueryConfig{
|
||||
Database: "db",
|
||||
RetentionPolicy: "rp",
|
||||
Measurement: "meas",
|
||||
},
|
||||
Trigger: Deadman,
|
||||
TriggerValues: chronograf.TriggerValues{
|
||||
Period: "1d",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -1152,10 +1318,79 @@ func TestClient_Create(t *testing.T) {
|
|||
},
|
||||
},
|
||||
createTaskOptions: &client.CreateTaskOptions{
|
||||
TICKscript: "",
|
||||
ID: "chronograf-v1-howdy",
|
||||
Type: client.StreamTask,
|
||||
Status: client.Enabled,
|
||||
TICKscript: `var db = 'db'
|
||||
|
||||
var rp = 'rp'
|
||||
|
||||
var measurement = 'meas'
|
||||
|
||||
var groupBy = []
|
||||
|
||||
var whereFilter = lambda: TRUE
|
||||
|
||||
var period = 1d
|
||||
|
||||
var name = 'myname\'s'
|
||||
|
||||
var idVar = name + ':{{.Group}}'
|
||||
|
||||
var message = ''
|
||||
|
||||
var idTag = 'alertID'
|
||||
|
||||
var levelTag = 'level'
|
||||
|
||||
var messageField = 'message'
|
||||
|
||||
var durationField = 'duration'
|
||||
|
||||
var outputDB = 'chronograf'
|
||||
|
||||
var outputRP = 'autogen'
|
||||
|
||||
var outputMeasurement = 'alerts'
|
||||
|
||||
var triggerType = 'deadman'
|
||||
|
||||
var threshold = 0.0
|
||||
|
||||
var data = stream
|
||||
|from()
|
||||
.database(db)
|
||||
.retentionPolicy(rp)
|
||||
.measurement(measurement)
|
||||
.groupBy(groupBy)
|
||||
.where(whereFilter)
|
||||
|
||||
var trigger = data
|
||||
|deadman(threshold, period)
|
||||
.stateChangesOnly()
|
||||
.message(message)
|
||||
.id(idVar)
|
||||
.idTag(idTag)
|
||||
.levelTag(levelTag)
|
||||
.messageField(messageField)
|
||||
.durationField(durationField)
|
||||
|
||||
trigger
|
||||
|eval(lambda: "emitted")
|
||||
.as('value')
|
||||
.keep('value', messageField, durationField)
|
||||
|influxDBOut()
|
||||
.create()
|
||||
.database(outputDB)
|
||||
.retentionPolicy(outputRP)
|
||||
.measurement(outputMeasurement)
|
||||
.tag('alertName', name)
|
||||
.tag('triggerType', triggerType)
|
||||
|
||||
trigger
|
||||
|httpOut('output')
|
||||
`,
|
||||
|
||||
ID: "chronograf-v1-howdy",
|
||||
Type: client.StreamTask,
|
||||
Status: client.Enabled,
|
||||
DBRPs: []client.DBRP{
|
||||
{
|
||||
Database: "db",
|
||||
|
@ -1205,10 +1440,9 @@ func TestClient_Create(t *testing.T) {
|
|||
},
|
||||
resError: fmt.Errorf("error"),
|
||||
createTaskOptions: &client.CreateTaskOptions{
|
||||
TICKscript: "",
|
||||
ID: "chronograf-v1-howdy",
|
||||
Type: client.StreamTask,
|
||||
Status: client.Enabled,
|
||||
ID: "chronograf-v1-howdy",
|
||||
Type: client.StreamTask,
|
||||
Status: client.Enabled,
|
||||
DBRPs: []client.DBRP{
|
||||
{
|
||||
Database: "db",
|
||||
|
@ -1236,6 +1470,9 @@ func TestClient_Create(t *testing.T) {
|
|||
t.Errorf("Client.Create() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if tt.wantErr {
|
||||
return
|
||||
}
|
||||
if !cmp.Equal(got, tt.want) {
|
||||
t.Errorf("%q. Client.Create() = -got/+want %s", tt.name, cmp.Diff(got, tt.want))
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package kapacitor
|
||||
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
const (
|
||||
greaterThan = "greater than"
|
||||
|
|
|
@ -15,11 +15,11 @@ type Alert struct{}
|
|||
func (a *Alert) Generate(rule chronograf.AlertRule) (chronograf.TICKScript, error) {
|
||||
vars, err := Vars(rule)
|
||||
if err != nil {
|
||||
return "", nil
|
||||
return "", err
|
||||
}
|
||||
data, err := Data(rule)
|
||||
if err != nil {
|
||||
return "", nil
|
||||
return "", err
|
||||
}
|
||||
trigger, err := Trigger(rule)
|
||||
if err != nil {
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
package kapacitor
|
||||
|
||||
import "github.com/influxdata/chronograf"
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
const (
|
||||
// Deadman triggers when data is missing for a period of time
|
||||
|
|
|
@ -76,7 +76,41 @@ func Vars(rule chronograf.AlertRule) (string, error) {
|
|||
}
|
||||
}
|
||||
|
||||
type NotEmpty struct {
|
||||
Err error
|
||||
}
|
||||
|
||||
func (n *NotEmpty) Valid(name, s string) error {
|
||||
if n.Err != nil {
|
||||
return n.Err
|
||||
|
||||
}
|
||||
if s == "" {
|
||||
n.Err = fmt.Errorf("%s cannot be an empty string", name)
|
||||
}
|
||||
return n.Err
|
||||
}
|
||||
|
||||
func Escape(str string) string {
|
||||
return strings.Replace(str, "'", `\'`, -1)
|
||||
}
|
||||
|
||||
func commonVars(rule chronograf.AlertRule) (string, error) {
|
||||
n := new(NotEmpty)
|
||||
n.Valid("database", rule.Query.Database)
|
||||
n.Valid("retention policy", rule.Query.RetentionPolicy)
|
||||
n.Valid("measurement", rule.Query.Measurement)
|
||||
n.Valid("alert name", rule.Name)
|
||||
n.Valid("trigger type", rule.Trigger)
|
||||
if n.Err != nil {
|
||||
return "", n.Err
|
||||
}
|
||||
|
||||
wind, err := window(rule)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
common := `
|
||||
var db = '%s'
|
||||
var rp = '%s'
|
||||
|
@ -99,14 +133,14 @@ func commonVars(rule chronograf.AlertRule) (string, error) {
|
|||
var triggerType = '%s'
|
||||
`
|
||||
res := fmt.Sprintf(common,
|
||||
rule.Query.Database,
|
||||
rule.Query.RetentionPolicy,
|
||||
rule.Query.Measurement,
|
||||
Escape(rule.Query.Database),
|
||||
Escape(rule.Query.RetentionPolicy),
|
||||
Escape(rule.Query.Measurement),
|
||||
groupBy(rule.Query),
|
||||
whereFilter(rule.Query),
|
||||
window(rule),
|
||||
rule.Name,
|
||||
rule.Message,
|
||||
wind,
|
||||
Escape(rule.Name),
|
||||
Escape(rule.Message),
|
||||
IDTag,
|
||||
LevelTag,
|
||||
MessageField,
|
||||
|
@ -127,17 +161,27 @@ func commonVars(rule chronograf.AlertRule) (string, error) {
|
|||
|
||||
// window is only used if deadman or threshold/relative with aggregate. Will return empty
|
||||
// if no period.
|
||||
func window(rule chronograf.AlertRule) string {
|
||||
func window(rule chronograf.AlertRule) (string, error) {
|
||||
if rule.Trigger == Deadman {
|
||||
return fmt.Sprintf("var period = %s", rule.TriggerValues.Period)
|
||||
if rule.TriggerValues.Period == "" {
|
||||
return "", fmt.Errorf("period cannot be an empty string in deadman alert")
|
||||
}
|
||||
return fmt.Sprintf("var period = %s", rule.TriggerValues.Period), nil
|
||||
|
||||
}
|
||||
// Period only makes sense if the field has a been grouped via a time duration.
|
||||
for _, field := range rule.Query.Fields {
|
||||
if field.Type == "func" {
|
||||
return fmt.Sprintf("var period = %s\nvar every = %s", rule.Query.GroupBy.Time, rule.Every)
|
||||
n := new(NotEmpty)
|
||||
n.Valid("group by time", rule.Query.GroupBy.Time)
|
||||
n.Valid("every", rule.Every)
|
||||
if n.Err != nil {
|
||||
return "", n.Err
|
||||
}
|
||||
return fmt.Sprintf("var period = %s\nvar every = %s", rule.Query.GroupBy.Time, rule.Every), nil
|
||||
}
|
||||
}
|
||||
return ""
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func groupBy(q *chronograf.QueryConfig) string {
|
||||
|
|
|
@ -31,7 +31,6 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
|
|||
cells := make([]dashboardCellResponse, len(dcells))
|
||||
for i, cell := range dcells {
|
||||
newCell := chronograf.DashboardCell{}
|
||||
|
||||
newCell.Queries = make([]chronograf.DashboardQuery, len(cell.Queries))
|
||||
copy(newCell.Queries, cell.Queries)
|
||||
|
||||
|
@ -70,7 +69,17 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
|
|||
// ValidDashboardCellRequest verifies that the dashboard cells have a query and
|
||||
// have the correct axes specified
|
||||
func ValidDashboardCellRequest(c *chronograf.DashboardCell) error {
|
||||
if c == nil {
|
||||
return fmt.Errorf("Chronograf dashboard cell was nil")
|
||||
}
|
||||
|
||||
CorrectWidthHeight(c)
|
||||
for _, q := range c.Queries {
|
||||
if err := ValidateQueryConfig(&q.QueryConfig); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
MoveTimeShift(c)
|
||||
return HasCorrectAxes(c)
|
||||
}
|
||||
|
||||
|
@ -115,12 +124,22 @@ func CorrectWidthHeight(c *chronograf.DashboardCell) {
|
|||
}
|
||||
}
|
||||
|
||||
// MoveTimeShift moves TimeShift from the QueryConfig to the DashboardQuery
|
||||
func MoveTimeShift(c *chronograf.DashboardCell) {
|
||||
for i, query := range c.Queries {
|
||||
query.Shifts = query.QueryConfig.Shifts
|
||||
c.Queries[i] = query
|
||||
}
|
||||
}
|
||||
|
||||
// AddQueryConfig updates a cell by converting InfluxQL into queryconfigs
|
||||
// If influxql cannot be represented by a full query config, then, the
|
||||
// query config's raw text is set to the command.
|
||||
func AddQueryConfig(c *chronograf.DashboardCell) {
|
||||
for i, q := range c.Queries {
|
||||
qc := ToQueryConfig(q.Command)
|
||||
qc.Shifts = append([]chronograf.TimeShift(nil), q.Shifts...)
|
||||
q.Shifts = nil
|
||||
q.QueryConfig = qc
|
||||
c.Queries[i] = q
|
||||
}
|
||||
|
|
|
@ -162,14 +162,14 @@ func Test_Service_DashboardCells(t *testing.T) {
|
|||
http.StatusOK,
|
||||
},
|
||||
{
|
||||
"cell axes should always be \"x\", \"y\", and \"y2\"",
|
||||
&url.URL{
|
||||
name: "cell axes should always be \"x\", \"y\", and \"y2\"",
|
||||
reqURL: &url.URL{
|
||||
Path: "/chronograf/v1/dashboards/1/cells",
|
||||
},
|
||||
map[string]string{
|
||||
ctxParams: map[string]string{
|
||||
"id": "1",
|
||||
},
|
||||
[]chronograf.DashboardCell{
|
||||
mockResponse: []chronograf.DashboardCell{
|
||||
{
|
||||
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
|
||||
X: 0,
|
||||
|
@ -182,7 +182,7 @@ func Test_Service_DashboardCells(t *testing.T) {
|
|||
Axes: map[string]chronograf.Axis{},
|
||||
},
|
||||
},
|
||||
[]chronograf.DashboardCell{
|
||||
expected: []chronograf.DashboardCell{
|
||||
{
|
||||
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
|
||||
X: 0,
|
||||
|
@ -205,7 +205,7 @@ func Test_Service_DashboardCells(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
http.StatusOK,
|
||||
expectedCode: http.StatusOK,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -219,6 +219,13 @@ func Test_newDashboardResponse(t *testing.T) {
|
|||
{
|
||||
Source: "/chronograf/v1/sources/1",
|
||||
Command: "SELECT donors from hill_valley_preservation_society where time > '1985-10-25 08:00:00'",
|
||||
Shifts: []chronograf.TimeShift{
|
||||
{
|
||||
Label: "Best Week Evar",
|
||||
Unit: "d",
|
||||
Quantity: "7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Axes: map[string]chronograf.Axis{
|
||||
|
@ -267,6 +274,13 @@ func Test_newDashboardResponse(t *testing.T) {
|
|||
},
|
||||
Tags: make(map[string][]string, 0),
|
||||
AreTagsAccepted: false,
|
||||
Shifts: []chronograf.TimeShift{
|
||||
{
|
||||
Label: "Best Week Evar",
|
||||
Unit: "d",
|
||||
Quantity: "7",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
|
||||
|
@ -12,30 +13,36 @@ import (
|
|||
"github.com/influxdata/chronograf/influx/queries"
|
||||
)
|
||||
|
||||
// QueryRequest is query that will be converted to a queryConfig
|
||||
type QueryRequest struct {
|
||||
ID string `json:"id"`
|
||||
Query string `json:"query"`
|
||||
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
|
||||
ID string `json:"id"`
|
||||
Query string `json:"query"`
|
||||
}
|
||||
|
||||
// QueriesRequest converts all queries to queryConfigs with the help
|
||||
// of the template variables
|
||||
type QueriesRequest struct {
|
||||
Queries []QueryRequest `json:"queries"`
|
||||
Queries []QueryRequest `json:"queries"`
|
||||
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
|
||||
}
|
||||
|
||||
// QueryResponse is the return result of a QueryRequest including
|
||||
// the raw query, the templated query, the queryConfig and the queryAST
|
||||
type QueryResponse struct {
|
||||
ID string `json:"id"`
|
||||
Query string `json:"query"`
|
||||
QueryConfig chronograf.QueryConfig `json:"queryConfig"`
|
||||
QueryAST *queries.SelectStatement `json:"queryAST,omitempty"`
|
||||
QueryTemplated *string `json:"queryTemplated,omitempty"`
|
||||
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
|
||||
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
|
||||
}
|
||||
|
||||
// QueriesResponse is the response for a QueriesRequest
|
||||
type QueriesResponse struct {
|
||||
Queries []QueryResponse `json:"queries"`
|
||||
}
|
||||
|
||||
// Queries parses InfluxQL and returns the JSON
|
||||
// Queries analyzes InfluxQL to produce front-end friendly QueryConfig
|
||||
func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
||||
srcID, err := paramID("id", r)
|
||||
if err != nil {
|
||||
|
@ -66,10 +73,10 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
|||
Query: q.Query,
|
||||
}
|
||||
|
||||
query := q.Query
|
||||
if len(q.TemplateVars) > 0 {
|
||||
query = influx.TemplateReplace(query, q.TemplateVars)
|
||||
qr.QueryTemplated = &query
|
||||
query, err := influx.TemplateReplace(q.Query, req.TemplateVars, time.Now())
|
||||
if err != nil {
|
||||
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
|
||||
return
|
||||
}
|
||||
|
||||
qc := ToQueryConfig(query)
|
||||
|
@ -77,15 +84,17 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
|
|||
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
|
||||
return
|
||||
}
|
||||
qc.Shifts = []chronograf.TimeShift{}
|
||||
qr.QueryConfig = qc
|
||||
|
||||
if stmt, err := queries.ParseSelect(query); err == nil {
|
||||
qr.QueryAST = stmt
|
||||
}
|
||||
|
||||
if len(q.TemplateVars) > 0 {
|
||||
qr.TemplateVars = q.TemplateVars
|
||||
if len(req.TemplateVars) > 0 {
|
||||
qr.TemplateVars = req.TemplateVars
|
||||
qr.QueryConfig.RawText = &qr.Query
|
||||
qr.QueryTemplated = &query
|
||||
}
|
||||
|
||||
qr.QueryConfig.ID = q.ID
|
||||
|
|
|
@ -0,0 +1,194 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/bouk/httprouter"
|
||||
"github.com/influxdata/chronograf"
|
||||
"github.com/influxdata/chronograf/mocks"
|
||||
)
|
||||
|
||||
func TestService_Queries(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
SourcesStore chronograf.SourcesStore
|
||||
ID string
|
||||
w *httptest.ResponseRecorder
|
||||
r *http.Request
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "bad json",
|
||||
SourcesStore: &mocks.SourcesStore{
|
||||
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||
return chronograf.Source{
|
||||
ID: ID,
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
ID: "1",
|
||||
w: httptest.NewRecorder(),
|
||||
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`howdy`))),
|
||||
want: `{"code":400,"message":"Unparsable JSON"}`,
|
||||
},
|
||||
{
|
||||
name: "bad id",
|
||||
ID: "howdy",
|
||||
w: httptest.NewRecorder(),
|
||||
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte{})),
|
||||
want: `{"code":422,"message":"Error converting ID howdy"}`,
|
||||
},
|
||||
{
|
||||
name: "query with no template vars",
|
||||
SourcesStore: &mocks.SourcesStore{
|
||||
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||
return chronograf.Source{
|
||||
ID: ID,
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
ID: "1",
|
||||
w: httptest.NewRecorder(),
|
||||
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time > now() - 1m",
|
||||
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
|
||||
}
|
||||
]}`))),
|
||||
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"},"shifts":[]},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "query with unparsable query",
|
||||
SourcesStore: &mocks.SourcesStore{
|
||||
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||
return chronograf.Source{
|
||||
ID: ID,
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
ID: "1",
|
||||
w: httptest.NewRecorder(),
|
||||
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
|
||||
"queries": [
|
||||
{
|
||||
"query": "SHOW DATABASES",
|
||||
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
|
||||
}
|
||||
]}`))),
|
||||
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null,"shifts":[]}}]}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "query with template vars",
|
||||
SourcesStore: &mocks.SourcesStore{
|
||||
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
|
||||
return chronograf.Source{
|
||||
ID: ID,
|
||||
}, nil
|
||||
},
|
||||
},
|
||||
ID: "1",
|
||||
w: httptest.NewRecorder(),
|
||||
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY :interval:",
|
||||
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
|
||||
}
|
||||
],
|
||||
"tempVars": [
|
||||
{
|
||||
"tempVar": ":dbs:",
|
||||
"values": [
|
||||
{
|
||||
"value": "_internal",
|
||||
"type": "database",
|
||||
"selected": true
|
||||
}
|
||||
],
|
||||
"id": "792eda0d-2bb2-4de6-a86f-1f652889b044",
|
||||
"type": "databases",
|
||||
"label": "",
|
||||
"query": {
|
||||
"influxql": "SHOW DATABASES",
|
||||
"measurement": "",
|
||||
"tagKey": "",
|
||||
"fieldKey": ""
|
||||
},
|
||||
"links": {
|
||||
"self": "/chronograf/v1/dashboards/1/templates/792eda0d-2bb2-4de6-a86f-1f652889b044"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "dashtime",
|
||||
"tempVar": ":dashboardTime:",
|
||||
"type": "constant",
|
||||
"values": [
|
||||
{
|
||||
"value": "now() - 15m",
|
||||
"type": "constant",
|
||||
"selected": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "upperdashtime",
|
||||
"tempVar": ":upperDashboardTime:",
|
||||
"type": "constant",
|
||||
"values": [
|
||||
{
|
||||
"value": "now()",
|
||||
"type": "constant",
|
||||
"selected": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "interval",
|
||||
"type": "constant",
|
||||
"tempVar": ":interval:",
|
||||
"values": [
|
||||
{
|
||||
"value": "1000",
|
||||
"type": "resolution"
|
||||
},
|
||||
{
|
||||
"value": "3",
|
||||
"type": "pointsPerPixel"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}`))),
|
||||
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","range":null,"shifts":[]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"1000","type":"resolution","selected":false},{"value":"3","type":"pointsPerPixel","selected":false}]}]}]}
|
||||
`,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tt.r = tt.r.WithContext(httprouter.WithParams(
|
||||
context.Background(),
|
||||
httprouter.Params{
|
||||
{
|
||||
Key: "id",
|
||||
Value: tt.ID,
|
||||
},
|
||||
}))
|
||||
s := &Service{
|
||||
SourcesStore: tt.SourcesStore,
|
||||
Logger: &mocks.TestLogger{},
|
||||
}
|
||||
s.Queries(tt.w, tt.r)
|
||||
got := tt.w.Body.String()
|
||||
if got != tt.want {
|
||||
t.Errorf("got:\n%s\nwant:\n%s\n", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
"github.com/influxdata/chronograf/influx"
|
||||
)
|
||||
|
@ -22,3 +24,28 @@ func ToQueryConfig(query string) chronograf.QueryConfig {
|
|||
Tags: make(map[string][]string, 0),
|
||||
}
|
||||
}
|
||||
|
||||
var validFieldTypes = map[string]bool{
|
||||
"func": true,
|
||||
"field": true,
|
||||
"integer": true,
|
||||
"number": true,
|
||||
"regex": true,
|
||||
"wildcard": true,
|
||||
}
|
||||
|
||||
// ValidateQueryConfig checks any query config input
|
||||
func ValidateQueryConfig(q *chronograf.QueryConfig) error {
|
||||
for _, fld := range q.Fields {
|
||||
invalid := fmt.Errorf(`invalid field type "%s" ; expect func, field, integer, number, regex, wildcard`, fld.Type)
|
||||
if !validFieldTypes[fld.Type] {
|
||||
return invalid
|
||||
}
|
||||
for _, arg := range fld.Args {
|
||||
if !validFieldTypes[arg.Type] {
|
||||
return invalid
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/influxdata/chronograf"
|
||||
)
|
||||
|
||||
func TestValidateQueryConfig(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
q *chronograf.QueryConfig
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "invalid field type",
|
||||
q: &chronograf.QueryConfig{
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Type: "invalid",
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "invalid field args",
|
||||
q: &chronograf.QueryConfig{
|
||||
Fields: []chronograf.Field{
|
||||
{
|
||||
Type: "func",
|
||||
Args: []chronograf.Field{
|
||||
{
|
||||
Type: "invalid",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if err := ValidateQueryConfig(tt.q); (err != nil) != tt.wantErr {
|
||||
t.Errorf("ValidateQueryConfig() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -16,8 +16,8 @@ func TestValidTemplateRequest(t *testing.T) {
|
|||
name: "Valid Template",
|
||||
template: &chronograf.Template{
|
||||
Type: "fieldKeys",
|
||||
BasicTemplateVar: chronograf.BasicTemplateVar{
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
TemplateVar: chronograf.TemplateVar{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "fieldKey",
|
||||
},
|
||||
|
@ -30,8 +30,8 @@ func TestValidTemplateRequest(t *testing.T) {
|
|||
wantErr: true,
|
||||
template: &chronograf.Template{
|
||||
Type: "Unknown Type",
|
||||
BasicTemplateVar: chronograf.BasicTemplateVar{
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
TemplateVar: chronograf.TemplateVar{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "fieldKey",
|
||||
},
|
||||
|
@ -44,8 +44,8 @@ func TestValidTemplateRequest(t *testing.T) {
|
|||
wantErr: true,
|
||||
template: &chronograf.Template{
|
||||
Type: "csv",
|
||||
BasicTemplateVar: chronograf.BasicTemplateVar{
|
||||
Values: []chronograf.BasicTemplateValue{
|
||||
TemplateVar: chronograf.TemplateVar{
|
||||
Values: []chronograf.TemplateValue{
|
||||
{
|
||||
Type: "unknown value",
|
||||
},
|
||||
|
|
|
@ -12,11 +12,12 @@
|
|||
"build": "yarn run clean && env NODE_ENV=production webpack --optimize-minimize --config ./webpack/prodConfig.js",
|
||||
"build:dev": "webpack --config ./webpack/devConfig.js",
|
||||
"start": "webpack --watch --config ./webpack/devConfig.js",
|
||||
"start:hmr": "webpack-dev-server --open --config ./webpack/devConfig.js",
|
||||
"lint": "esw src/",
|
||||
"test": "karma start",
|
||||
"test:integration": "nightwatch tests --skip",
|
||||
"test:lint": "yarn run lint; yarn run test",
|
||||
"test:dev": "concurrently \"yarn run lint -- --watch\" \"yarn run test -- --no-single-run --reporters=verbose\"",
|
||||
"test:dev": "concurrently \"yarn run lint --watch\" \"yarn run test --no-single-run --reporters=verbose\"",
|
||||
"clean": "rm -rf build",
|
||||
"storybook": "node ./storybook.js",
|
||||
"prettier": "prettier --single-quote --trailing-comma es5 --bracket-spacing false --semi false --write \"{src,spec}/**/*.js\"; eslint src --fix"
|
||||
|
@ -77,6 +78,7 @@
|
|||
"mocha-loader": "^0.7.1",
|
||||
"mustache": "^2.2.1",
|
||||
"node-sass": "^4.5.3",
|
||||
"on-build-webpack": "^0.1.0",
|
||||
"postcss-browser-reporter": "^0.4.0",
|
||||
"postcss-calc": "^5.2.0",
|
||||
"postcss-loader": "^0.8.0",
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import reducer from 'src/data_explorer/reducers/queryConfigs'
|
||||
|
||||
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
|
||||
import {
|
||||
fill,
|
||||
timeShift,
|
||||
chooseTag,
|
||||
groupByTag,
|
||||
groupByTime,
|
||||
|
@ -26,63 +28,63 @@ const fakeAddQueryAction = (panelID, queryID) => {
|
|||
}
|
||||
}
|
||||
|
||||
function buildInitialState(queryId, params) {
|
||||
return Object.assign({}, defaultQueryConfig({id: queryId}), params)
|
||||
function buildInitialState(queryID, params) {
|
||||
return Object.assign({}, defaultQueryConfig({id: queryID}), params)
|
||||
}
|
||||
|
||||
describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
||||
const queryId = 123
|
||||
const queryID = 123
|
||||
|
||||
it('can add a query', () => {
|
||||
const state = reducer({}, fakeAddQueryAction('blah', queryId))
|
||||
const state = reducer({}, fakeAddQueryAction('blah', queryID))
|
||||
|
||||
const actual = state[queryId]
|
||||
const expected = defaultQueryConfig({id: queryId})
|
||||
const actual = state[queryID]
|
||||
const expected = defaultQueryConfig({id: queryID})
|
||||
expect(actual).to.deep.equal(expected)
|
||||
})
|
||||
|
||||
describe('choosing db, rp, and measurement', () => {
|
||||
let state
|
||||
beforeEach(() => {
|
||||
state = reducer({}, fakeAddQueryAction('any', queryId))
|
||||
state = reducer({}, fakeAddQueryAction('any', queryID))
|
||||
})
|
||||
|
||||
it('sets the db and rp', () => {
|
||||
const newState = reducer(
|
||||
state,
|
||||
chooseNamespace(queryId, {
|
||||
chooseNamespace(queryID, {
|
||||
database: 'telegraf',
|
||||
retentionPolicy: 'monitor',
|
||||
})
|
||||
)
|
||||
|
||||
expect(newState[queryId].database).to.equal('telegraf')
|
||||
expect(newState[queryId].retentionPolicy).to.equal('monitor')
|
||||
expect(newState[queryID].database).to.equal('telegraf')
|
||||
expect(newState[queryID].retentionPolicy).to.equal('monitor')
|
||||
})
|
||||
|
||||
it('sets the measurement', () => {
|
||||
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
|
||||
const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
|
||||
|
||||
expect(newState[queryId].measurement).to.equal('mem')
|
||||
expect(newState[queryID].measurement).to.equal('mem')
|
||||
})
|
||||
})
|
||||
|
||||
describe('a query has measurements and fields', () => {
|
||||
let state
|
||||
beforeEach(() => {
|
||||
const one = reducer({}, fakeAddQueryAction('any', queryId))
|
||||
const one = reducer({}, fakeAddQueryAction('any', queryID))
|
||||
const two = reducer(
|
||||
one,
|
||||
chooseNamespace(queryId, {
|
||||
chooseNamespace(queryID, {
|
||||
database: '_internal',
|
||||
retentionPolicy: 'daily',
|
||||
})
|
||||
)
|
||||
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
|
||||
const three = reducer(two, chooseMeasurement(queryID, 'disk'))
|
||||
|
||||
state = reducer(
|
||||
three,
|
||||
addInitialField(queryId, {
|
||||
addInitialField(queryID, {
|
||||
value: 'a great field',
|
||||
type: 'field',
|
||||
})
|
||||
|
@ -92,91 +94,91 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
describe('choosing a new namespace', () => {
|
||||
it('clears out the old measurement and fields', () => {
|
||||
// what about tags?
|
||||
expect(state[queryId].measurement).to.equal('disk')
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].measurement).to.equal('disk')
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
chooseNamespace(queryId, {
|
||||
chooseNamespace(queryID, {
|
||||
database: 'newdb',
|
||||
retentionPolicy: 'newrp',
|
||||
})
|
||||
)
|
||||
|
||||
expect(newState[queryId].measurement).to.be.null
|
||||
expect(newState[queryId].fields.length).to.equal(0)
|
||||
expect(newState[queryID].measurement).to.be.null
|
||||
expect(newState[queryID].fields.length).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('choosing a new measurement', () => {
|
||||
it('leaves the namespace and clears out the old fields', () => {
|
||||
// what about tags?
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
chooseMeasurement(queryId, 'newmeasurement')
|
||||
chooseMeasurement(queryID, 'newmeasurement')
|
||||
)
|
||||
|
||||
expect(state[queryId].database).to.equal(newState[queryId].database)
|
||||
expect(state[queryId].retentionPolicy).to.equal(
|
||||
newState[queryId].retentionPolicy
|
||||
expect(state[queryID].database).to.equal(newState[queryID].database)
|
||||
expect(state[queryID].retentionPolicy).to.equal(
|
||||
newState[queryID].retentionPolicy
|
||||
)
|
||||
expect(newState[queryId].fields.length).to.equal(0)
|
||||
expect(newState[queryID].fields.length).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('DE_TOGGLE_FIELD', () => {
|
||||
it('can toggle multiple fields', () => {
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
toggleField(queryId, {
|
||||
toggleField(queryID, {
|
||||
value: 'f2',
|
||||
type: 'field',
|
||||
})
|
||||
)
|
||||
|
||||
expect(newState[queryId].fields.length).to.equal(2)
|
||||
expect(newState[queryId].fields[1].alias).to.deep.equal('mean_f2')
|
||||
expect(newState[queryId].fields[1].args).to.deep.equal([
|
||||
expect(newState[queryID].fields.length).to.equal(2)
|
||||
expect(newState[queryID].fields[1].alias).to.deep.equal('mean_f2')
|
||||
expect(newState[queryID].fields[1].args).to.deep.equal([
|
||||
{value: 'f2', type: 'field'},
|
||||
])
|
||||
expect(newState[queryId].fields[1].value).to.deep.equal('mean')
|
||||
expect(newState[queryID].fields[1].value).to.deep.equal('mean')
|
||||
})
|
||||
|
||||
it('applies a func to newly selected fields', () => {
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryId].fields[0].type).to.equal('func')
|
||||
expect(state[queryId].fields[0].value).to.equal('mean')
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields[0].type).to.equal('func')
|
||||
expect(state[queryID].fields[0].value).to.equal('mean')
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
toggleField(queryId, {
|
||||
toggleField(queryID, {
|
||||
value: 'f2',
|
||||
type: 'field',
|
||||
})
|
||||
)
|
||||
|
||||
expect(newState[queryId].fields[1].value).to.equal('mean')
|
||||
expect(newState[queryId].fields[1].alias).to.equal('mean_f2')
|
||||
expect(newState[queryId].fields[1].args).to.deep.equal([
|
||||
expect(newState[queryID].fields[1].value).to.equal('mean')
|
||||
expect(newState[queryID].fields[1].alias).to.equal('mean_f2')
|
||||
expect(newState[queryID].fields[1].args).to.deep.equal([
|
||||
{value: 'f2', type: 'field'},
|
||||
])
|
||||
expect(newState[queryId].fields[1].type).to.equal('func')
|
||||
expect(newState[queryID].fields[1].type).to.equal('func')
|
||||
})
|
||||
|
||||
it('adds the field property to query config if not found', () => {
|
||||
delete state[queryId].fields
|
||||
expect(state[queryId].fields).to.equal(undefined)
|
||||
delete state[queryID].fields
|
||||
expect(state[queryID].fields).to.equal(undefined)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
toggleField(queryId, {value: 'fk1', type: 'field'})
|
||||
toggleField(queryID, {value: 'fk1', type: 'field'})
|
||||
)
|
||||
|
||||
expect(newState[queryId].fields.length).to.equal(1)
|
||||
expect(newState[queryID].fields.length).to.equal(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -189,7 +191,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
const f4 = {value: 'f4', type: 'field'}
|
||||
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -201,7 +203,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
},
|
||||
}
|
||||
|
||||
const action = applyFuncsToField(queryId, {
|
||||
const action = applyFuncsToField(queryID, {
|
||||
field: {value: 'f1', type: 'field'},
|
||||
funcs: [
|
||||
{value: 'fn3', type: 'func', args: []},
|
||||
|
@ -211,7 +213,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].fields).to.deep.equal([
|
||||
expect(nextState[queryID].fields).to.deep.equal([
|
||||
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
|
||||
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
|
||||
{value: 'fn1', type: 'func', args: [f2], alias: `fn1_${f2.value}`},
|
||||
|
@ -230,7 +232,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
const groupBy = {time: '1m', tags: []}
|
||||
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -239,35 +241,35 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
},
|
||||
}
|
||||
|
||||
const action = removeFuncs(queryId, fields, groupBy)
|
||||
const action = removeFuncs(queryID, fields, groupBy)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
const actual = nextState[queryId].fields
|
||||
const actual = nextState[queryID].fields
|
||||
const expected = [f1, f2]
|
||||
|
||||
expect(actual).to.eql(expected)
|
||||
expect(nextState[queryId].groupBy.time).to.equal(null)
|
||||
expect(nextState[queryID].groupBy.time).to.equal(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe('DE_CHOOSE_TAG', () => {
|
||||
it('adds a tag key/value to the query', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId, {
|
||||
[queryID]: buildInitialState(queryID, {
|
||||
tags: {
|
||||
k1: ['v0'],
|
||||
k2: ['foo'],
|
||||
},
|
||||
}),
|
||||
}
|
||||
const action = chooseTag(queryId, {
|
||||
const action = chooseTag(queryID, {
|
||||
key: 'k1',
|
||||
value: 'v1',
|
||||
})
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].tags).to.eql({
|
||||
expect(nextState[queryID].tags).to.eql({
|
||||
k1: ['v0', 'v1'],
|
||||
k2: ['foo'],
|
||||
})
|
||||
|
@ -275,31 +277,31 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
|
||||
it("creates a new entry if it's the first key", () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId, {
|
||||
[queryID]: buildInitialState(queryID, {
|
||||
tags: {},
|
||||
}),
|
||||
}
|
||||
const action = chooseTag(queryId, {
|
||||
const action = chooseTag(queryID, {
|
||||
key: 'k1',
|
||||
value: 'v1',
|
||||
})
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].tags).to.eql({
|
||||
expect(nextState[queryID].tags).to.eql({
|
||||
k1: ['v1'],
|
||||
})
|
||||
})
|
||||
|
||||
it('removes a value that is already in the list', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId, {
|
||||
[queryID]: buildInitialState(queryID, {
|
||||
tags: {
|
||||
k1: ['v1'],
|
||||
},
|
||||
}),
|
||||
}
|
||||
const action = chooseTag(queryId, {
|
||||
const action = chooseTag(queryID, {
|
||||
key: 'k1',
|
||||
value: 'v1',
|
||||
})
|
||||
|
@ -307,14 +309,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
const nextState = reducer(initialState, action)
|
||||
|
||||
// TODO: this should probably remove the `k1` property entirely from the tags object
|
||||
expect(nextState[queryId].tags).to.eql({})
|
||||
expect(nextState[queryID].tags).to.eql({})
|
||||
})
|
||||
})
|
||||
|
||||
describe('DE_GROUP_BY_TAG', () => {
|
||||
it('adds a tag key/value to the query', () => {
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -323,11 +325,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
groupBy: {tags: [], time: null},
|
||||
},
|
||||
}
|
||||
const action = groupByTag(queryId, 'k1')
|
||||
const action = groupByTag(queryID, 'k1')
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].groupBy).to.eql({
|
||||
expect(nextState[queryID].groupBy).to.eql({
|
||||
time: null,
|
||||
tags: ['k1'],
|
||||
})
|
||||
|
@ -335,7 +337,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
|
||||
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -344,11 +346,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
groupBy: {tags: ['k1'], time: null},
|
||||
},
|
||||
}
|
||||
const action = groupByTag(queryId, 'k1')
|
||||
const action = groupByTag(queryID, 'k1')
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].groupBy).to.eql({
|
||||
expect(nextState[queryID].groupBy).to.eql({
|
||||
time: null,
|
||||
tags: [],
|
||||
})
|
||||
|
@ -358,14 +360,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
describe('DE_TOGGLE_TAG_ACCEPTANCE', () => {
|
||||
it('it toggles areTagsAccepted', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const action = toggleTagAcceptance(queryId)
|
||||
const action = toggleTagAcceptance(queryID)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].areTagsAccepted).to.equal(
|
||||
!initialState[queryId].areTagsAccepted
|
||||
expect(nextState[queryID].areTagsAccepted).to.equal(
|
||||
!initialState[queryID].areTagsAccepted
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -374,99 +376,113 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
|
|||
it('applys the appropriate group by time', () => {
|
||||
const time = '100y'
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
|
||||
const action = groupByTime(queryId, time)
|
||||
const action = groupByTime(queryID, time)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].groupBy.time).to.equal(time)
|
||||
expect(nextState[queryID].groupBy.time).to.equal(time)
|
||||
})
|
||||
})
|
||||
|
||||
it('updates entire config', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const expected = defaultQueryConfig({id: queryId}, {rawText: 'hello'})
|
||||
const expected = defaultQueryConfig({id: queryID}, {rawText: 'hello'})
|
||||
const action = updateQueryConfig(expected)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId]).to.deep.equal(expected)
|
||||
expect(nextState[queryID]).to.deep.equal(expected)
|
||||
})
|
||||
|
||||
it("updates a query's raw text", () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const text = 'foo'
|
||||
const action = updateRawQuery(queryId, text)
|
||||
const action = updateRawQuery(queryID, text)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].rawText).to.equal('foo')
|
||||
expect(nextState[queryID].rawText).to.equal('foo')
|
||||
})
|
||||
|
||||
it("updates a query's raw status", () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const status = 'your query was sweet'
|
||||
const action = editQueryStatus(queryId, status)
|
||||
const action = editQueryStatus(queryID, status)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].status).to.equal(status)
|
||||
expect(nextState[queryID].status).to.equal(status)
|
||||
})
|
||||
|
||||
describe('DE_FILL', () => {
|
||||
it('applies an explicit fill when group by time is used', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const time = '10s'
|
||||
const action = groupByTime(queryId, time)
|
||||
const action = groupByTime(queryID, time)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].fill).to.equal(NULL_STRING)
|
||||
expect(nextState[queryID].fill).to.equal(NULL_STRING)
|
||||
})
|
||||
|
||||
it('updates fill to non-null-string non-number string value', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const action = fill(queryId, LINEAR)
|
||||
const action = fill(queryID, LINEAR)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].fill).to.equal(LINEAR)
|
||||
expect(nextState[queryID].fill).to.equal(LINEAR)
|
||||
})
|
||||
|
||||
it('updates fill to string integer value', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const INT_STRING = '1337'
|
||||
const action = fill(queryId, INT_STRING)
|
||||
const action = fill(queryID, INT_STRING)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].fill).to.equal(INT_STRING)
|
||||
expect(nextState[queryID].fill).to.equal(INT_STRING)
|
||||
})
|
||||
|
||||
it('updates fill to string float value', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const FLOAT_STRING = '1.337'
|
||||
const action = fill(queryId, FLOAT_STRING)
|
||||
const action = fill(queryID, FLOAT_STRING)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].fill).to.equal(FLOAT_STRING)
|
||||
expect(nextState[queryID].fill).to.equal(FLOAT_STRING)
|
||||
})
|
||||
})
|
||||
|
||||
describe('DE_TIME_SHIFT', () => {
|
||||
it('can shift the time', () => {
|
||||
const initialState = {
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
|
||||
const shift = {quantity: 1, unit: 'd', duration: '1d'}
|
||||
const action = timeShift(queryID, shift)
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryID].shifts).to.deep.equal([shift])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import reducer from 'src/kapacitor/reducers/queryConfigs'
|
||||
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
|
||||
import {
|
||||
chooseTag,
|
||||
timeShift,
|
||||
groupByTag,
|
||||
toggleField,
|
||||
groupByTime,
|
||||
chooseNamespace,
|
||||
chooseMeasurement,
|
||||
chooseTag,
|
||||
groupByTag,
|
||||
toggleTagAcceptance,
|
||||
toggleField,
|
||||
applyFuncsToField,
|
||||
groupByTime,
|
||||
toggleTagAcceptance,
|
||||
} from 'src/kapacitor/actions/queryConfigs'
|
||||
|
||||
const fakeAddQueryAction = (panelID, queryID) => {
|
||||
|
@ -18,142 +19,142 @@ const fakeAddQueryAction = (panelID, queryID) => {
|
|||
}
|
||||
}
|
||||
|
||||
function buildInitialState(queryId, params) {
|
||||
function buildInitialState(queryID, params) {
|
||||
return Object.assign(
|
||||
{},
|
||||
defaultQueryConfig({id: queryId, isKapacitorRule: true}),
|
||||
defaultQueryConfig({id: queryID, isKapacitorRule: true}),
|
||||
params
|
||||
)
|
||||
}
|
||||
|
||||
describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
||||
const queryId = 123
|
||||
const queryID = 123
|
||||
|
||||
it('can add a query', () => {
|
||||
const state = reducer({}, fakeAddQueryAction('blah', queryId))
|
||||
const state = reducer({}, fakeAddQueryAction('blah', queryID))
|
||||
|
||||
const actual = state[queryId]
|
||||
const expected = defaultQueryConfig({id: queryId, isKapacitorRule: true})
|
||||
const actual = state[queryID]
|
||||
const expected = defaultQueryConfig({id: queryID, isKapacitorRule: true})
|
||||
expect(actual).to.deep.equal(expected)
|
||||
})
|
||||
|
||||
describe('choosing db, rp, and measurement', () => {
|
||||
let state
|
||||
beforeEach(() => {
|
||||
state = reducer({}, fakeAddQueryAction('any', queryId))
|
||||
state = reducer({}, fakeAddQueryAction('any', queryID))
|
||||
})
|
||||
|
||||
it('sets the db and rp', () => {
|
||||
const newState = reducer(
|
||||
state,
|
||||
chooseNamespace(queryId, {
|
||||
chooseNamespace(queryID, {
|
||||
database: 'telegraf',
|
||||
retentionPolicy: 'monitor',
|
||||
})
|
||||
)
|
||||
|
||||
expect(newState[queryId].database).to.equal('telegraf')
|
||||
expect(newState[queryId].retentionPolicy).to.equal('monitor')
|
||||
expect(newState[queryID].database).to.equal('telegraf')
|
||||
expect(newState[queryID].retentionPolicy).to.equal('monitor')
|
||||
})
|
||||
|
||||
it('sets the measurement', () => {
|
||||
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
|
||||
const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
|
||||
|
||||
expect(newState[queryId].measurement).to.equal('mem')
|
||||
expect(newState[queryID].measurement).to.equal('mem')
|
||||
})
|
||||
})
|
||||
|
||||
describe('a query has measurements and fields', () => {
|
||||
let state
|
||||
beforeEach(() => {
|
||||
const one = reducer({}, fakeAddQueryAction('any', queryId))
|
||||
const one = reducer({}, fakeAddQueryAction('any', queryID))
|
||||
const two = reducer(
|
||||
one,
|
||||
chooseNamespace(queryId, {
|
||||
chooseNamespace(queryID, {
|
||||
database: '_internal',
|
||||
retentionPolicy: 'daily',
|
||||
})
|
||||
)
|
||||
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
|
||||
const three = reducer(two, chooseMeasurement(queryID, 'disk'))
|
||||
state = reducer(
|
||||
three,
|
||||
toggleField(queryId, {value: 'a great field', funcs: []})
|
||||
toggleField(queryID, {value: 'a great field', funcs: []})
|
||||
)
|
||||
})
|
||||
|
||||
describe('choosing a new namespace', () => {
|
||||
it('clears out the old measurement and fields', () => {
|
||||
// what about tags?
|
||||
expect(state[queryId].measurement).to.exist
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].measurement).to.exist
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
chooseNamespace(queryId, {
|
||||
chooseNamespace(queryID, {
|
||||
database: 'newdb',
|
||||
retentionPolicy: 'newrp',
|
||||
})
|
||||
)
|
||||
|
||||
expect(newState[queryId].measurement).not.to.exist
|
||||
expect(newState[queryId].fields.length).to.equal(0)
|
||||
expect(newState[queryID].measurement).not.to.exist
|
||||
expect(newState[queryID].fields.length).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('choosing a new measurement', () => {
|
||||
it('leaves the namespace and clears out the old fields', () => {
|
||||
// what about tags?
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
chooseMeasurement(queryId, 'newmeasurement')
|
||||
chooseMeasurement(queryID, 'newmeasurement')
|
||||
)
|
||||
|
||||
expect(state[queryId].database).to.equal(newState[queryId].database)
|
||||
expect(state[queryId].retentionPolicy).to.equal(
|
||||
newState[queryId].retentionPolicy
|
||||
expect(state[queryID].database).to.equal(newState[queryID].database)
|
||||
expect(state[queryID].retentionPolicy).to.equal(
|
||||
newState[queryID].retentionPolicy
|
||||
)
|
||||
expect(newState[queryId].fields.length).to.equal(0)
|
||||
expect(newState[queryID].fields.length).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the query is part of a kapacitor rule', () => {
|
||||
it('only allows one field', () => {
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
toggleField(queryId, {value: 'a different field', type: 'field'})
|
||||
toggleField(queryID, {value: 'a different field', type: 'field'})
|
||||
)
|
||||
|
||||
expect(newState[queryId].fields.length).to.equal(1)
|
||||
expect(newState[queryId].fields[0].value).to.equal('a different field')
|
||||
expect(newState[queryID].fields.length).to.equal(1)
|
||||
expect(newState[queryID].fields[0].value).to.equal('a different field')
|
||||
})
|
||||
})
|
||||
|
||||
describe('KAPA_TOGGLE_FIELD', () => {
|
||||
it('cannot toggle multiple fields', () => {
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
toggleField(queryId, {value: 'a different field', type: 'field'})
|
||||
toggleField(queryID, {value: 'a different field', type: 'field'})
|
||||
)
|
||||
|
||||
expect(newState[queryId].fields.length).to.equal(1)
|
||||
expect(newState[queryId].fields[0].value).to.equal('a different field')
|
||||
expect(newState[queryID].fields.length).to.equal(1)
|
||||
expect(newState[queryID].fields[0].value).to.equal('a different field')
|
||||
})
|
||||
|
||||
it('applies no funcs to newly selected fields', () => {
|
||||
expect(state[queryId].fields.length).to.equal(1)
|
||||
expect(state[queryID].fields.length).to.equal(1)
|
||||
|
||||
const newState = reducer(
|
||||
state,
|
||||
toggleField(queryId, {value: 'a different field', type: 'field'})
|
||||
toggleField(queryID, {value: 'a different field', type: 'field'})
|
||||
)
|
||||
|
||||
expect(newState[queryId].fields[0].type).to.equal('field')
|
||||
expect(newState[queryID].fields[0].type).to.equal('field')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -162,7 +163,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
it('applies functions to a field without any existing functions', () => {
|
||||
const f1 = {value: 'f1', type: 'field'}
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -174,13 +175,13 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
},
|
||||
}
|
||||
|
||||
const action = applyFuncsToField(queryId, {
|
||||
const action = applyFuncsToField(queryID, {
|
||||
field: {value: 'f1', type: 'field'},
|
||||
funcs: [{value: 'fn3', type: 'func'}, {value: 'fn4', type: 'func'}],
|
||||
})
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
const actual = nextState[queryId].fields
|
||||
const actual = nextState[queryID].fields
|
||||
const expected = [
|
||||
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
|
||||
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
|
||||
|
@ -193,21 +194,21 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
describe('KAPA_CHOOSE_TAG', () => {
|
||||
it('adds a tag key/value to the query', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId, {
|
||||
[queryID]: buildInitialState(queryID, {
|
||||
tags: {
|
||||
k1: ['v0'],
|
||||
k2: ['foo'],
|
||||
},
|
||||
}),
|
||||
}
|
||||
const action = chooseTag(queryId, {
|
||||
const action = chooseTag(queryID, {
|
||||
key: 'k1',
|
||||
value: 'v1',
|
||||
})
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].tags).to.eql({
|
||||
expect(nextState[queryID].tags).to.eql({
|
||||
k1: ['v0', 'v1'],
|
||||
k2: ['foo'],
|
||||
})
|
||||
|
@ -215,31 +216,31 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
|
||||
it("creates a new entry if it's the first key", () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId, {
|
||||
[queryID]: buildInitialState(queryID, {
|
||||
tags: {},
|
||||
}),
|
||||
}
|
||||
const action = chooseTag(queryId, {
|
||||
const action = chooseTag(queryID, {
|
||||
key: 'k1',
|
||||
value: 'v1',
|
||||
})
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].tags).to.eql({
|
||||
expect(nextState[queryID].tags).to.eql({
|
||||
k1: ['v1'],
|
||||
})
|
||||
})
|
||||
|
||||
it('removes a value that is already in the list', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId, {
|
||||
[queryID]: buildInitialState(queryID, {
|
||||
tags: {
|
||||
k1: ['v1'],
|
||||
},
|
||||
}),
|
||||
}
|
||||
const action = chooseTag(queryId, {
|
||||
const action = chooseTag(queryID, {
|
||||
key: 'k1',
|
||||
value: 'v1',
|
||||
})
|
||||
|
@ -247,14 +248,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
const nextState = reducer(initialState, action)
|
||||
|
||||
// TODO: this should probably remove the `k1` property entirely from the tags object
|
||||
expect(nextState[queryId].tags).to.eql({})
|
||||
expect(nextState[queryID].tags).to.eql({})
|
||||
})
|
||||
})
|
||||
|
||||
describe('KAPA_GROUP_BY_TAG', () => {
|
||||
it('adds a tag key/value to the query', () => {
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -263,11 +264,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
groupBy: {tags: [], time: null},
|
||||
},
|
||||
}
|
||||
const action = groupByTag(queryId, 'k1')
|
||||
const action = groupByTag(queryID, 'k1')
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].groupBy).to.eql({
|
||||
expect(nextState[queryID].groupBy).to.eql({
|
||||
time: null,
|
||||
tags: ['k1'],
|
||||
})
|
||||
|
@ -275,7 +276,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
|
||||
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
|
||||
const initialState = {
|
||||
[queryId]: {
|
||||
[queryID]: {
|
||||
id: 123,
|
||||
database: 'db1',
|
||||
measurement: 'm1',
|
||||
|
@ -284,11 +285,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
groupBy: {tags: ['k1'], time: null},
|
||||
},
|
||||
}
|
||||
const action = groupByTag(queryId, 'k1')
|
||||
const action = groupByTag(queryID, 'k1')
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].groupBy).to.eql({
|
||||
expect(nextState[queryID].groupBy).to.eql({
|
||||
time: null,
|
||||
tags: [],
|
||||
})
|
||||
|
@ -298,14 +299,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
describe('KAPA_TOGGLE_TAG_ACCEPTANCE', () => {
|
||||
it('it toggles areTagsAccepted', () => {
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
const action = toggleTagAcceptance(queryId)
|
||||
const action = toggleTagAcceptance(queryID)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].areTagsAccepted).to.equal(
|
||||
!initialState[queryId].areTagsAccepted
|
||||
expect(nextState[queryID].areTagsAccepted).to.equal(
|
||||
!initialState[queryID].areTagsAccepted
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -314,14 +315,28 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
|
|||
it('applys the appropriate group by time', () => {
|
||||
const time = '100y'
|
||||
const initialState = {
|
||||
[queryId]: buildInitialState(queryId),
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
|
||||
const action = groupByTime(queryId, time)
|
||||
const action = groupByTime(queryID, time)
|
||||
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryId].groupBy.time).to.equal(time)
|
||||
expect(nextState[queryID].groupBy.time).to.equal(time)
|
||||
})
|
||||
})
|
||||
|
||||
describe('KAPA_TIME_SHIFT', () => {
|
||||
it('can shift the time', () => {
|
||||
const initialState = {
|
||||
[queryID]: buildInitialState(queryID),
|
||||
}
|
||||
|
||||
const shift = {quantity: 1, unit: 'd', duration: '1d'}
|
||||
const action = timeShift(queryID, shift)
|
||||
const nextState = reducer(initialState, action)
|
||||
|
||||
expect(nextState[queryID].shifts).to.deep.equal([shift])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
import {timeRangeType, shiftTimeRange} from 'shared/query/helpers'
|
||||
import moment from 'moment'
|
||||
import {
|
||||
INVALID,
|
||||
ABSOLUTE,
|
||||
INFLUXQL,
|
||||
RELATIVE_LOWER,
|
||||
RELATIVE_UPPER,
|
||||
} from 'shared/constants/timeRange'
|
||||
const format = INFLUXQL
|
||||
|
||||
describe('Shared.Query.Helpers', () => {
|
||||
describe('timeRangeTypes', () => {
|
||||
it('returns invalid if no upper and lower', () => {
|
||||
const upper = null
|
||||
const lower = null
|
||||
|
||||
const timeRange = {lower, upper}
|
||||
|
||||
expect(timeRangeType(timeRange)).to.equal(INVALID)
|
||||
})
|
||||
|
||||
it('can detect absolute type', () => {
|
||||
const tenMinutes = 600000
|
||||
const upper = Date.now()
|
||||
const lower = upper - tenMinutes
|
||||
|
||||
const timeRange = {lower, upper, format}
|
||||
|
||||
expect(timeRangeType(timeRange)).to.equal(ABSOLUTE)
|
||||
})
|
||||
|
||||
it('can detect exclusive relative lower', () => {
|
||||
const lower = 'now() - 15m'
|
||||
const upper = null
|
||||
|
||||
const timeRange = {lower, upper, format}
|
||||
|
||||
expect(timeRangeType(timeRange)).to.equal(RELATIVE_LOWER)
|
||||
})
|
||||
|
||||
it('can detect relative upper', () => {
|
||||
const upper = 'now()'
|
||||
const oneMinute = 60000
|
||||
const lower = Date.now() - oneMinute
|
||||
|
||||
const timeRange = {lower, upper, format}
|
||||
|
||||
expect(timeRangeType(timeRange)).to.equal(RELATIVE_UPPER)
|
||||
})
|
||||
})
|
||||
|
||||
describe('timeRangeShift', () => {
|
||||
it('can calculate the shift for absolute timeRanges', () => {
|
||||
const upper = Date.now()
|
||||
const oneMinute = 60000
|
||||
const lower = Date.now() - oneMinute
|
||||
const shift = {quantity: 7, unit: 'd'}
|
||||
const timeRange = {upper, lower}
|
||||
|
||||
const type = timeRangeType(timeRange)
|
||||
const actual = shiftTimeRange(timeRange, shift)
|
||||
const expected = {
|
||||
lower: `${lower} - 7d`,
|
||||
upper: `${upper} - 7d`,
|
||||
type: 'shifted',
|
||||
}
|
||||
|
||||
expect(type).to.equal(ABSOLUTE)
|
||||
expect(actual).to.deep.equal(expected)
|
||||
})
|
||||
|
||||
it('can calculate the shift for relative lower timeRanges', () => {
|
||||
const shift = {quantity: 7, unit: 'd'}
|
||||
const lower = 'now() - 15m'
|
||||
const timeRange = {lower, upper: null}
|
||||
|
||||
const type = timeRangeType(timeRange)
|
||||
const actual = shiftTimeRange(timeRange, shift)
|
||||
const expected = {
|
||||
lower: `${lower} - 7d`,
|
||||
upper: `now() - 7d`,
|
||||
type: 'shifted',
|
||||
}
|
||||
|
||||
expect(type).to.equal(RELATIVE_LOWER)
|
||||
expect(actual).to.deep.equal(expected)
|
||||
})
|
||||
|
||||
it('can calculate the shift for relative upper timeRanges', () => {
|
||||
const upper = Date.now()
|
||||
const oneMinute = 60000
|
||||
const lower = Date.now() - oneMinute
|
||||
const shift = {quantity: 7, unit: 'd'}
|
||||
const timeRange = {upper, lower}
|
||||
|
||||
const type = timeRangeType(timeRange)
|
||||
const actual = shiftTimeRange(timeRange, shift)
|
||||
const expected = {
|
||||
lower: `${lower} - 7d`,
|
||||
upper: `${upper} - 7d`,
|
||||
type: 'shifted',
|
||||
}
|
||||
|
||||
expect(type).to.equal(ABSOLUTE)
|
||||
expect(actual).to.deep.equal(expected)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -228,11 +228,7 @@ describe('timeSeriesToDygraph', () => {
|
|||
]
|
||||
|
||||
const isInDataExplorer = true
|
||||
const actual = timeSeriesToDygraph(
|
||||
influxResponse,
|
||||
undefined,
|
||||
isInDataExplorer
|
||||
)
|
||||
const actual = timeSeriesToDygraph(influxResponse, isInDataExplorer)
|
||||
|
||||
const expected = {}
|
||||
|
||||
|
|
|
@ -1,54 +1,81 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import React, {PropTypes, Component} from 'react'
|
||||
|
||||
import DashboardsTable from 'src/dashboards/components/DashboardsTable'
|
||||
import SearchBar from 'src/hosts/components/SearchBar'
|
||||
import FancyScrollbar from 'shared/components/FancyScrollbar'
|
||||
|
||||
const DashboardsPageContents = ({
|
||||
dashboards,
|
||||
onDeleteDashboard,
|
||||
onCreateDashboard,
|
||||
dashboardLink,
|
||||
}) => {
|
||||
let tableHeader
|
||||
if (dashboards === null) {
|
||||
tableHeader = 'Loading Dashboards...'
|
||||
} else if (dashboards.length === 1) {
|
||||
tableHeader = '1 Dashboard'
|
||||
} else {
|
||||
tableHeader = `${dashboards.length} Dashboards`
|
||||
class DashboardsPageContents extends Component {
|
||||
constructor(props) {
|
||||
super(props)
|
||||
|
||||
this.state = {
|
||||
searchTerm: '',
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<FancyScrollbar className="page-contents">
|
||||
<div className="container-fluid">
|
||||
<div className="row">
|
||||
<div className="col-md-12">
|
||||
<div className="panel panel-minimal">
|
||||
<div className="panel-heading u-flex u-ai-center u-jc-space-between">
|
||||
<h2 className="panel-title">
|
||||
{tableHeader}
|
||||
</h2>
|
||||
<button
|
||||
className="btn btn-sm btn-primary"
|
||||
onClick={onCreateDashboard}
|
||||
>
|
||||
<span className="icon plus" /> Create Dashboard
|
||||
</button>
|
||||
</div>
|
||||
<div className="panel-body">
|
||||
<DashboardsTable
|
||||
dashboards={dashboards}
|
||||
onDeleteDashboard={onDeleteDashboard}
|
||||
onCreateDashboard={onCreateDashboard}
|
||||
dashboardLink={dashboardLink}
|
||||
/>
|
||||
filterDashboards = searchTerm => {
|
||||
this.setState({searchTerm})
|
||||
}
|
||||
|
||||
render() {
|
||||
const {
|
||||
dashboards,
|
||||
onDeleteDashboard,
|
||||
onCreateDashboard,
|
||||
dashboardLink,
|
||||
} = this.props
|
||||
|
||||
let tableHeader
|
||||
if (dashboards === null) {
|
||||
tableHeader = 'Loading Dashboards...'
|
||||
} else if (dashboards.length === 1) {
|
||||
tableHeader = '1 Dashboard'
|
||||
} else {
|
||||
tableHeader = `${dashboards.length} Dashboards`
|
||||
}
|
||||
|
||||
const filteredDashboards = dashboards.filter(d =>
|
||||
d.name.includes(this.state.searchTerm)
|
||||
)
|
||||
|
||||
return (
|
||||
<FancyScrollbar className="page-contents">
|
||||
<div className="container-fluid">
|
||||
<div className="row">
|
||||
<div className="col-md-12">
|
||||
<div className="panel panel-minimal">
|
||||
<div className="panel-heading u-flex u-ai-center u-jc-space-between">
|
||||
<h2 className="panel-title">
|
||||
{tableHeader}
|
||||
</h2>
|
||||
<div className="u-flex u-ai-center dashboards-page--actions">
|
||||
<SearchBar
|
||||
placeholder="Filter by Name..."
|
||||
onSearch={this.filterDashboards}
|
||||
/>
|
||||
<button
|
||||
className="btn btn-sm btn-primary"
|
||||
onClick={onCreateDashboard}
|
||||
>
|
||||
<span className="icon plus" /> Create Dashboard
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="panel-body">
|
||||
<DashboardsTable
|
||||
dashboards={filteredDashboards}
|
||||
onDeleteDashboard={onDeleteDashboard}
|
||||
onCreateDashboard={onCreateDashboard}
|
||||
dashboardLink={dashboardLink}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</FancyScrollbar>
|
||||
)
|
||||
</FancyScrollbar>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const {arrayOf, func, shape, string} = PropTypes
|
||||
|
|
|
@ -79,35 +79,29 @@ export const applyMasks = query => {
|
|||
const maskForWholeTemplates = '😸$1😸'
|
||||
return query.replace(matchWholeTemplates, maskForWholeTemplates)
|
||||
}
|
||||
|
||||
export const insertTempVar = (query, tempVar) => {
|
||||
return query.replace(MATCH_INCOMPLETE_TEMPLATES, tempVar)
|
||||
}
|
||||
|
||||
export const unMask = query => {
|
||||
return query.replace(/😸/g, ':')
|
||||
}
|
||||
|
||||
export const removeUnselectedTemplateValues = templates => {
|
||||
return templates.map(template => {
|
||||
const selectedValues = template.values.filter(value => value.selected)
|
||||
return {...template, values: selectedValues}
|
||||
})
|
||||
}
|
||||
|
||||
export const DISPLAY_OPTIONS = {
|
||||
LINEAR: 'linear',
|
||||
LOG: 'log',
|
||||
BASE_2: '2',
|
||||
BASE_10: '10',
|
||||
}
|
||||
|
||||
export const TOOLTIP_CONTENT = {
|
||||
FORMAT:
|
||||
'<p><strong>K/M/B</strong> = Thousand / Million / Billion<br/><strong>K/M/G</strong> = Kilo / Mega / Giga </p>',
|
||||
}
|
||||
|
||||
export const TYPE_QUERY_CONFIG = 'queryConfig'
|
||||
export const TYPE_SHIFTED = 'shifted queryConfig'
|
||||
export const TYPE_IFQL = 'ifql'
|
||||
|
||||
export const DASHBOARD_NAME_MAX_LENGTH = 50
|
||||
|
|
|
@ -39,13 +39,12 @@ class DashboardPage extends Component {
|
|||
selectedCell: null,
|
||||
isTemplating: false,
|
||||
zoomedTimeRange: {zoomedLower: null, zoomedUpper: null},
|
||||
names: [],
|
||||
}
|
||||
}
|
||||
|
||||
async componentDidMount() {
|
||||
const {
|
||||
params: {dashboardID, sourceID},
|
||||
params: {dashboardID},
|
||||
dashboardActions: {
|
||||
getDashboardsAsync,
|
||||
updateTempVarValues,
|
||||
|
@ -62,13 +61,6 @@ class DashboardPage extends Component {
|
|||
// Refresh and persists influxql generated template variable values
|
||||
await updateTempVarValues(source, dashboard)
|
||||
await putDashboardByID(dashboardID)
|
||||
|
||||
const names = dashboards.map(d => ({
|
||||
name: d.name,
|
||||
link: `/sources/${sourceID}/dashboards/${d.id}`,
|
||||
}))
|
||||
|
||||
this.setState({names})
|
||||
}
|
||||
|
||||
handleOpenTemplateManager = () => {
|
||||
|
@ -263,14 +255,23 @@ class DashboardPage extends Component {
|
|||
],
|
||||
}
|
||||
|
||||
// this controls the auto group by behavior
|
||||
const interval = {
|
||||
id: 'interval',
|
||||
type: 'constant',
|
||||
type: 'autoGroupBy',
|
||||
tempVar: ':interval:',
|
||||
resolution: 1000,
|
||||
reportingInterval: 10000000000,
|
||||
values: [],
|
||||
label: 'automatically determine the best group by time',
|
||||
values: [
|
||||
{
|
||||
value: '1000', // pixels
|
||||
type: 'resolution',
|
||||
selected: true,
|
||||
},
|
||||
{
|
||||
value: '3',
|
||||
type: 'pointsPerPixel',
|
||||
selected: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
let templatesIncludingDashTime
|
||||
|
@ -285,7 +286,11 @@ class DashboardPage extends Component {
|
|||
templatesIncludingDashTime = []
|
||||
}
|
||||
|
||||
const {selectedCell, isEditMode, isTemplating, names} = this.state
|
||||
const {selectedCell, isEditMode, isTemplating} = this.state
|
||||
const names = dashboards.map(d => ({
|
||||
name: d.name,
|
||||
link: `/sources/${sourceID}/dashboards/${d.id}`,
|
||||
}))
|
||||
|
||||
return (
|
||||
<div className="page">
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import React, {PropTypes, Component} from 'react'
|
||||
import {withRouter} from 'react-router'
|
||||
import {connect} from 'react-redux'
|
||||
import {bindActionCreators} from 'redux'
|
||||
|
@ -11,40 +11,20 @@ import {getDashboardsAsync, deleteDashboardAsync} from 'src/dashboards/actions'
|
|||
|
||||
import {NEW_DASHBOARD} from 'src/dashboards/constants'
|
||||
|
||||
const {arrayOf, func, string, shape} = PropTypes
|
||||
|
||||
const DashboardsPage = React.createClass({
|
||||
propTypes: {
|
||||
source: shape({
|
||||
id: string.isRequired,
|
||||
name: string.isRequired,
|
||||
type: string,
|
||||
links: shape({
|
||||
proxy: string.isRequired,
|
||||
}).isRequired,
|
||||
telegraf: string.isRequired,
|
||||
}),
|
||||
router: shape({
|
||||
push: func.isRequired,
|
||||
}).isRequired,
|
||||
handleGetDashboards: func.isRequired,
|
||||
handleDeleteDashboard: func.isRequired,
|
||||
dashboards: arrayOf(shape()),
|
||||
},
|
||||
|
||||
class DashboardsPage extends Component {
|
||||
componentDidMount() {
|
||||
this.props.handleGetDashboards()
|
||||
},
|
||||
}
|
||||
|
||||
async handleCreateDashbord() {
|
||||
const {source: {id}, router: {push}} = this.props
|
||||
const {data} = await createDashboard(NEW_DASHBOARD)
|
||||
push(`/sources/${id}/dashboards/${data.id}`)
|
||||
},
|
||||
}
|
||||
|
||||
handleDeleteDashboard(dashboard) {
|
||||
this.props.handleDeleteDashboard(dashboard)
|
||||
},
|
||||
}
|
||||
|
||||
render() {
|
||||
const {dashboards} = this.props
|
||||
|
@ -61,8 +41,28 @@ const DashboardsPage = React.createClass({
|
|||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const {arrayOf, func, string, shape} = PropTypes
|
||||
|
||||
DashboardsPage.propTypes = {
|
||||
source: shape({
|
||||
id: string.isRequired,
|
||||
name: string.isRequired,
|
||||
type: string,
|
||||
links: shape({
|
||||
proxy: string.isRequired,
|
||||
}).isRequired,
|
||||
telegraf: string.isRequired,
|
||||
}),
|
||||
router: shape({
|
||||
push: func.isRequired,
|
||||
}).isRequired,
|
||||
handleGetDashboards: func.isRequired,
|
||||
handleDeleteDashboard: func.isRequired,
|
||||
dashboards: arrayOf(shape()),
|
||||
}
|
||||
|
||||
const mapStateToProps = ({dashboardUI: {dashboards, dashboard}}) => ({
|
||||
dashboards,
|
||||
|
|
|
@ -18,26 +18,26 @@ export const deleteQuery = queryID => ({
|
|||
},
|
||||
})
|
||||
|
||||
export const toggleField = (queryId, fieldFunc) => ({
|
||||
export const toggleField = (queryID, fieldFunc) => ({
|
||||
type: 'DE_TOGGLE_FIELD',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
fieldFunc,
|
||||
},
|
||||
})
|
||||
|
||||
export const groupByTime = (queryId, time) => ({
|
||||
export const groupByTime = (queryID, time) => ({
|
||||
type: 'DE_GROUP_BY_TIME',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
time,
|
||||
},
|
||||
})
|
||||
|
||||
export const fill = (queryId, value) => ({
|
||||
export const fill = (queryID, value) => ({
|
||||
type: 'DE_FILL',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
value,
|
||||
},
|
||||
})
|
||||
|
@ -51,44 +51,44 @@ export const removeFuncs = (queryID, fields, groupBy) => ({
|
|||
},
|
||||
})
|
||||
|
||||
export const applyFuncsToField = (queryId, fieldFunc, groupBy) => ({
|
||||
export const applyFuncsToField = (queryID, fieldFunc, groupBy) => ({
|
||||
type: 'DE_APPLY_FUNCS_TO_FIELD',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
fieldFunc,
|
||||
groupBy,
|
||||
},
|
||||
})
|
||||
|
||||
export const chooseTag = (queryId, tag) => ({
|
||||
export const chooseTag = (queryID, tag) => ({
|
||||
type: 'DE_CHOOSE_TAG',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
tag,
|
||||
},
|
||||
})
|
||||
|
||||
export const chooseNamespace = (queryId, {database, retentionPolicy}) => ({
|
||||
export const chooseNamespace = (queryID, {database, retentionPolicy}) => ({
|
||||
type: 'DE_CHOOSE_NAMESPACE',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
database,
|
||||
retentionPolicy,
|
||||
},
|
||||
})
|
||||
|
||||
export const chooseMeasurement = (queryId, measurement) => ({
|
||||
export const chooseMeasurement = (queryID, measurement) => ({
|
||||
type: 'DE_CHOOSE_MEASUREMENT',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
measurement,
|
||||
},
|
||||
})
|
||||
|
||||
export const editRawText = (queryId, rawText) => ({
|
||||
export const editRawText = (queryID, rawText) => ({
|
||||
type: 'DE_EDIT_RAW_TEXT',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
rawText,
|
||||
},
|
||||
})
|
||||
|
@ -100,18 +100,18 @@ export const setTimeRange = bounds => ({
|
|||
},
|
||||
})
|
||||
|
||||
export const groupByTag = (queryId, tagKey) => ({
|
||||
export const groupByTag = (queryID, tagKey) => ({
|
||||
type: 'DE_GROUP_BY_TAG',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
tagKey,
|
||||
},
|
||||
})
|
||||
|
||||
export const toggleTagAcceptance = queryId => ({
|
||||
export const toggleTagAcceptance = queryID => ({
|
||||
type: 'DE_TOGGLE_TAG_ACCEPTANCE',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -147,6 +147,14 @@ export const editQueryStatus = (queryID, status) => ({
|
|||
},
|
||||
})
|
||||
|
||||
export const timeShift = (queryID, shift) => ({
|
||||
type: 'DE_TIME_SHIFT',
|
||||
payload: {
|
||||
queryID,
|
||||
shift,
|
||||
},
|
||||
})
|
||||
|
||||
// Async actions
|
||||
export const editRawTextAsync = (url, id, text) => async dispatch => {
|
||||
try {
|
||||
|
|
|
@ -7,8 +7,6 @@ import Dropdown from 'shared/components/Dropdown'
|
|||
|
||||
import {AUTO_GROUP_BY} from 'shared/constants'
|
||||
|
||||
const {func, string, shape} = PropTypes
|
||||
|
||||
const isInRuleBuilder = pathname => pathname.includes('alert-rules')
|
||||
const isInDataExplorer = pathname => pathname.includes('data-explorer')
|
||||
|
||||
|
@ -37,6 +35,8 @@ const GroupByTimeDropdown = ({
|
|||
/>
|
||||
</div>
|
||||
|
||||
const {func, string, shape} = PropTypes
|
||||
|
||||
GroupByTimeDropdown.propTypes = {
|
||||
location: shape({
|
||||
pathname: string.isRequired,
|
||||
|
|
|
@ -88,7 +88,14 @@ class ChronoTable extends Component {
|
|||
)
|
||||
}
|
||||
|
||||
makeTabName = ({name, tags}) => (tags ? `${name}.${tags[name]}` : name)
|
||||
makeTabName = ({name, tags}) => {
|
||||
if (!tags) {
|
||||
return name
|
||||
}
|
||||
const tagKeys = Object.keys(tags).sort()
|
||||
const tagValues = tagKeys.map(key => tags[key]).join('.')
|
||||
return `${name}.${tagValues}`
|
||||
}
|
||||
|
||||
render() {
|
||||
const {containerWidth, height, query} = this.props
|
||||
|
@ -135,9 +142,13 @@ class ChronoTable extends Component {
|
|||
</div>
|
||||
: <Dropdown
|
||||
className="dropdown-160 table--tabs-dropdown"
|
||||
items={series.map((s, index) => ({...s, text: s.name, index}))}
|
||||
items={series.map((s, index) => ({
|
||||
...s,
|
||||
text: this.makeTabName(s),
|
||||
index,
|
||||
}))}
|
||||
onChoose={this.handleClickDropdown}
|
||||
selected={series[activeSeriesIndex].name}
|
||||
selected={this.makeTabName(series[activeSeriesIndex])}
|
||||
buttonSize="btn-xs"
|
||||
/>}
|
||||
<div className="table--tabs-content">
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import React, {PropTypes, Component} from 'react'
|
||||
import buildInfluxQLQuery from 'utils/influxql'
|
||||
import classnames from 'classnames'
|
||||
import VisHeader from 'src/data_explorer/components/VisHeader'
|
||||
import VisView from 'src/data_explorer/components/VisView'
|
||||
import {GRAPH, TABLE} from 'shared/constants'
|
||||
import buildQueries from 'utils/buildQueriesForGraphs'
|
||||
import _ from 'lodash'
|
||||
|
||||
const META_QUERY_REGEX = /^show/i
|
||||
const META_QUERY_REGEX = /^(show|create|drop)/i
|
||||
|
||||
class Visualization extends Component {
|
||||
constructor(props) {
|
||||
|
@ -61,19 +61,11 @@ class Visualization extends Component {
|
|||
resizerBottomHeight,
|
||||
errorThrown,
|
||||
} = this.props
|
||||
|
||||
const {source: {links: {proxy}}} = this.context
|
||||
const {view} = this.state
|
||||
|
||||
const statements = queryConfigs.map(query => {
|
||||
const text =
|
||||
query.rawText || buildInfluxQLQuery(query.range || timeRange, query)
|
||||
return {text, id: query.id, queryConfig: query}
|
||||
})
|
||||
|
||||
const queries = statements.filter(s => s.text !== null).map(s => {
|
||||
return {host: [proxy], text: s.text, id: s.id, queryConfig: s.queryConfig}
|
||||
})
|
||||
|
||||
const queries = buildQueries(proxy, queryConfigs, timeRange)
|
||||
const activeQuery = queries[activeQueryIndex]
|
||||
const defaultQuery = queries[0]
|
||||
const query = activeQuery || defaultQuery
|
||||
|
@ -81,12 +73,12 @@ class Visualization extends Component {
|
|||
return (
|
||||
<div className="graph" style={{height}}>
|
||||
<VisHeader
|
||||
views={views}
|
||||
view={view}
|
||||
onToggleView={this.handleToggleView}
|
||||
name={cellName}
|
||||
views={views}
|
||||
query={query}
|
||||
name={cellName}
|
||||
errorThrown={errorThrown}
|
||||
onToggleView={this.handleToggleView}
|
||||
/>
|
||||
<div
|
||||
className={classnames({
|
||||
|
|
|
@ -3,6 +3,7 @@ import _ from 'lodash'
|
|||
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
|
||||
import {
|
||||
fill,
|
||||
timeShift,
|
||||
chooseTag,
|
||||
groupByTag,
|
||||
removeFuncs,
|
||||
|
@ -20,24 +21,24 @@ import {
|
|||
const queryConfigs = (state = {}, action) => {
|
||||
switch (action.type) {
|
||||
case 'DE_CHOOSE_NAMESPACE': {
|
||||
const {queryId, database, retentionPolicy} = action.payload
|
||||
const nextQueryConfig = chooseNamespace(state[queryId], {
|
||||
const {queryID, database, retentionPolicy} = action.payload
|
||||
const nextQueryConfig = chooseNamespace(state[queryID], {
|
||||
database,
|
||||
retentionPolicy,
|
||||
})
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: Object.assign(nextQueryConfig, {rawText: null}),
|
||||
[queryID]: Object.assign(nextQueryConfig, {rawText: null}),
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_CHOOSE_MEASUREMENT': {
|
||||
const {queryId, measurement} = action.payload
|
||||
const nextQueryConfig = chooseMeasurement(state[queryId], measurement)
|
||||
const {queryID, measurement} = action.payload
|
||||
const nextQueryConfig = chooseMeasurement(state[queryID], measurement)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: Object.assign(nextQueryConfig, {
|
||||
rawText: state[queryId].rawText,
|
||||
[queryID]: Object.assign(nextQueryConfig, {
|
||||
rawText: state[queryID].rawText,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
@ -64,78 +65,78 @@ const queryConfigs = (state = {}, action) => {
|
|||
}
|
||||
|
||||
case 'DE_EDIT_RAW_TEXT': {
|
||||
const {queryId, rawText} = action.payload
|
||||
const nextQueryConfig = editRawText(state[queryId], rawText)
|
||||
const {queryID, rawText} = action.payload
|
||||
const nextQueryConfig = editRawText(state[queryID], rawText)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_GROUP_BY_TIME': {
|
||||
const {queryId, time} = action.payload
|
||||
const nextQueryConfig = groupByTime(state[queryId], time)
|
||||
const {queryID, time} = action.payload
|
||||
const nextQueryConfig = groupByTime(state[queryID], time)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_TOGGLE_TAG_ACCEPTANCE': {
|
||||
const {queryId} = action.payload
|
||||
const nextQueryConfig = toggleTagAcceptance(state[queryId])
|
||||
const {queryID} = action.payload
|
||||
const nextQueryConfig = toggleTagAcceptance(state[queryID])
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_TOGGLE_FIELD': {
|
||||
const {queryId, fieldFunc} = action.payload
|
||||
const nextQueryConfig = toggleField(state[queryId], fieldFunc)
|
||||
const {queryID, fieldFunc} = action.payload
|
||||
const nextQueryConfig = toggleField(state[queryID], fieldFunc)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: {...nextQueryConfig, rawText: null},
|
||||
[queryID]: {...nextQueryConfig, rawText: null},
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_APPLY_FUNCS_TO_FIELD': {
|
||||
const {queryId, fieldFunc, groupBy} = action.payload
|
||||
const {queryID, fieldFunc, groupBy} = action.payload
|
||||
const nextQueryConfig = applyFuncsToField(
|
||||
state[queryId],
|
||||
state[queryID],
|
||||
fieldFunc,
|
||||
groupBy
|
||||
)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_CHOOSE_TAG': {
|
||||
const {queryId, tag} = action.payload
|
||||
const nextQueryConfig = chooseTag(state[queryId], tag)
|
||||
const {queryID, tag} = action.payload
|
||||
const nextQueryConfig = chooseTag(state[queryID], tag)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_GROUP_BY_TAG': {
|
||||
const {queryId, tagKey} = action.payload
|
||||
const nextQueryConfig = groupByTag(state[queryId], tagKey)
|
||||
const {queryID, tagKey} = action.payload
|
||||
const nextQueryConfig = groupByTag(state[queryID], tagKey)
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'DE_FILL': {
|
||||
const {queryId, value} = action.payload
|
||||
const nextQueryConfig = fill(state[queryId], value)
|
||||
const {queryID, value} = action.payload
|
||||
const nextQueryConfig = fill(state[queryID], value)
|
||||
|
||||
return {
|
||||
...state,
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,6 +172,13 @@ const queryConfigs = (state = {}, action) => {
|
|||
|
||||
return {...state, [queryID]: nextQuery}
|
||||
}
|
||||
|
||||
case 'DE_TIME_SHIFT': {
|
||||
const {queryID, shift} = action.payload
|
||||
const nextQuery = timeShift(state[queryID], shift)
|
||||
|
||||
return {...state, [queryID]: nextQuery}
|
||||
}
|
||||
}
|
||||
return state
|
||||
}
|
||||
|
|
|
@ -196,17 +196,21 @@ function parseSeries(series) {
|
|||
function parseTag(s, obj) {
|
||||
const match = tag.exec(s)
|
||||
|
||||
const kv = match[0]
|
||||
const key = match[1]
|
||||
const value = match[2]
|
||||
if (match) {
|
||||
const kv = match[0]
|
||||
const key = match[1]
|
||||
const value = match[2]
|
||||
|
||||
if (key) {
|
||||
if (!obj.tags) {
|
||||
obj.tags = {}
|
||||
if (key) {
|
||||
if (!obj.tags) {
|
||||
obj.tags = {}
|
||||
}
|
||||
obj.tags[key] = value
|
||||
}
|
||||
obj.tags[key] = value
|
||||
return s.slice(match.index + kv.length)
|
||||
}
|
||||
return s.slice(match.index + kv.length)
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
let workStr = series.slice()
|
||||
|
|
|
@ -103,7 +103,10 @@ class HostsTable extends Component {
|
|||
<h2 className="panel-title">
|
||||
{hostsTitle}
|
||||
</h2>
|
||||
<SearchBar onSearch={this.updateSearchTerm} />
|
||||
<SearchBar
|
||||
placeholder="Filter by Host..."
|
||||
onSearch={this.updateSearchTerm}
|
||||
/>
|
||||
</div>
|
||||
<div className="panel-body">
|
||||
{hostCount > 0 && !hostsError.length
|
||||
|
|
|
@ -10,8 +10,7 @@ class SearchBar extends Component {
|
|||
}
|
||||
|
||||
componentWillMount() {
|
||||
const waitPeriod = 300
|
||||
this.handleSearch = _.debounce(this.handleSearch, waitPeriod)
|
||||
this.handleSearch = _.debounce(this.handleSearch, 50)
|
||||
}
|
||||
|
||||
handleSearch = () => {
|
||||
|
@ -23,12 +22,13 @@ class SearchBar extends Component {
|
|||
}
|
||||
|
||||
render() {
|
||||
const {placeholder} = this.props
|
||||
return (
|
||||
<div className="users__search-widget input-group">
|
||||
<input
|
||||
type="text"
|
||||
className="form-control"
|
||||
placeholder="Filter by Host..."
|
||||
placeholder={placeholder}
|
||||
ref="searchInput"
|
||||
onChange={this.handleChange}
|
||||
/>
|
||||
|
@ -40,10 +40,11 @@ class SearchBar extends Component {
|
|||
}
|
||||
}
|
||||
|
||||
const {func} = PropTypes
|
||||
const {func, string} = PropTypes
|
||||
|
||||
SearchBar.propTypes = {
|
||||
onSearch: func.isRequired,
|
||||
placeholder: string.isRequired,
|
||||
}
|
||||
|
||||
export default SearchBar
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import React, {PropTypes, Component} from 'react'
|
||||
import _ from 'lodash'
|
||||
|
||||
import HostsTable from 'src/hosts/components/HostsTable'
|
||||
|
@ -7,27 +7,16 @@ import SourceIndicator from 'shared/components/SourceIndicator'
|
|||
|
||||
import {getCpuAndLoadForHosts, getMappings, getAppsForHosts} from '../apis'
|
||||
|
||||
export const HostsPage = React.createClass({
|
||||
propTypes: {
|
||||
source: PropTypes.shape({
|
||||
id: PropTypes.string.isRequired,
|
||||
name: PropTypes.string.isRequired,
|
||||
type: PropTypes.string, // 'influx-enterprise'
|
||||
links: PropTypes.shape({
|
||||
proxy: PropTypes.string.isRequired,
|
||||
}).isRequired,
|
||||
telegraf: PropTypes.string.isRequired,
|
||||
}),
|
||||
addFlashMessage: PropTypes.func,
|
||||
},
|
||||
class HostsPage extends Component {
|
||||
constructor(props) {
|
||||
super(props)
|
||||
|
||||
getInitialState() {
|
||||
return {
|
||||
this.state = {
|
||||
hosts: {},
|
||||
hostsLoading: true,
|
||||
hostsError: '',
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
const {source, addFlashMessage} = this.props
|
||||
|
@ -71,7 +60,7 @@ export const HostsPage = React.createClass({
|
|||
// (like with a bogus proxy link). We should provide better messaging to the user in this catch after that's fixed.
|
||||
console.error(reason) // eslint-disable-line no-console
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
render() {
|
||||
const {source} = this.props
|
||||
|
@ -104,7 +93,22 @@ export const HostsPage = React.createClass({
|
|||
</FancyScrollbar>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const {func, shape, string} = PropTypes
|
||||
|
||||
HostsPage.propTypes = {
|
||||
source: shape({
|
||||
id: string.isRequired,
|
||||
name: string.isRequired,
|
||||
type: string, // 'influx-enterprise'
|
||||
links: shape({
|
||||
proxy: string.isRequired,
|
||||
}).isRequired,
|
||||
telegraf: string.isRequired,
|
||||
}),
|
||||
addFlashMessage: func,
|
||||
}
|
||||
|
||||
export default HostsPage
|
||||
|
|
|
@ -1,63 +1,63 @@
|
|||
export const chooseNamespace = (queryId, {database, retentionPolicy}) => ({
|
||||
export const chooseNamespace = (queryID, {database, retentionPolicy}) => ({
|
||||
type: 'KAPA_CHOOSE_NAMESPACE',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
database,
|
||||
retentionPolicy,
|
||||
},
|
||||
})
|
||||
|
||||
export const chooseMeasurement = (queryId, measurement) => ({
|
||||
export const chooseMeasurement = (queryID, measurement) => ({
|
||||
type: 'KAPA_CHOOSE_MEASUREMENT',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
measurement,
|
||||
},
|
||||
})
|
||||
|
||||
export const chooseTag = (queryId, tag) => ({
|
||||
export const chooseTag = (queryID, tag) => ({
|
||||
type: 'KAPA_CHOOSE_TAG',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
tag,
|
||||
},
|
||||
})
|
||||
|
||||
export const groupByTag = (queryId, tagKey) => ({
|
||||
export const groupByTag = (queryID, tagKey) => ({
|
||||
type: 'KAPA_GROUP_BY_TAG',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
tagKey,
|
||||
},
|
||||
})
|
||||
|
||||
export const toggleTagAcceptance = queryId => ({
|
||||
export const toggleTagAcceptance = queryID => ({
|
||||
type: 'KAPA_TOGGLE_TAG_ACCEPTANCE',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
},
|
||||
})
|
||||
|
||||
export const toggleField = (queryId, fieldFunc) => ({
|
||||
export const toggleField = (queryID, fieldFunc) => ({
|
||||
type: 'KAPA_TOGGLE_FIELD',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
fieldFunc,
|
||||
},
|
||||
})
|
||||
|
||||
export const applyFuncsToField = (queryId, fieldFunc) => ({
|
||||
export const applyFuncsToField = (queryID, fieldFunc) => ({
|
||||
type: 'KAPA_APPLY_FUNCS_TO_FIELD',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
fieldFunc,
|
||||
},
|
||||
})
|
||||
|
||||
export const groupByTime = (queryId, time) => ({
|
||||
export const groupByTime = (queryID, time) => ({
|
||||
type: 'KAPA_GROUP_BY_TIME',
|
||||
payload: {
|
||||
queryId,
|
||||
queryID,
|
||||
time,
|
||||
},
|
||||
})
|
||||
|
@ -69,3 +69,11 @@ export const removeFuncs = (queryID, fields) => ({
|
|||
fields,
|
||||
},
|
||||
})
|
||||
|
||||
export const timeShift = (queryID, shift) => ({
|
||||
type: 'KAPA_TIME_SHIFT',
|
||||
payload: {
|
||||
queryID,
|
||||
shift,
|
||||
},
|
||||
})
|
||||
|
|
|
@ -66,7 +66,7 @@ export const getRule = (kapacitor, ruleID) => async dispatch => {
|
|||
}
|
||||
}
|
||||
|
||||
export function loadDefaultRule() {
|
||||
export const loadDefaultRule = () => {
|
||||
return dispatch => {
|
||||
const queryID = uuid.v4()
|
||||
dispatch({
|
||||
|
@ -88,15 +88,13 @@ export const fetchRules = kapacitor => async dispatch => {
|
|||
}
|
||||
}
|
||||
|
||||
export function chooseTrigger(ruleID, trigger) {
|
||||
return {
|
||||
type: 'CHOOSE_TRIGGER',
|
||||
payload: {
|
||||
ruleID,
|
||||
trigger,
|
||||
},
|
||||
}
|
||||
}
|
||||
export const chooseTrigger = (ruleID, trigger) => ({
|
||||
type: 'CHOOSE_TRIGGER',
|
||||
payload: {
|
||||
ruleID,
|
||||
trigger,
|
||||
},
|
||||
})
|
||||
|
||||
export const addEvery = (ruleID, frequency) => ({
|
||||
type: 'ADD_EVERY',
|
||||
|
@ -113,36 +111,30 @@ export const removeEvery = ruleID => ({
|
|||
},
|
||||
})
|
||||
|
||||
export function updateRuleValues(ruleID, trigger, values) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_VALUES',
|
||||
payload: {
|
||||
ruleID,
|
||||
trigger,
|
||||
values,
|
||||
},
|
||||
}
|
||||
}
|
||||
export const updateRuleValues = (ruleID, trigger, values) => ({
|
||||
type: 'UPDATE_RULE_VALUES',
|
||||
payload: {
|
||||
ruleID,
|
||||
trigger,
|
||||
values,
|
||||
},
|
||||
})
|
||||
|
||||
export function updateMessage(ruleID, message) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_MESSAGE',
|
||||
payload: {
|
||||
ruleID,
|
||||
message,
|
||||
},
|
||||
}
|
||||
}
|
||||
export const updateMessage = (ruleID, message) => ({
|
||||
type: 'UPDATE_RULE_MESSAGE',
|
||||
payload: {
|
||||
ruleID,
|
||||
message,
|
||||
},
|
||||
})
|
||||
|
||||
export function updateDetails(ruleID, details) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_DETAILS',
|
||||
payload: {
|
||||
ruleID,
|
||||
details,
|
||||
},
|
||||
}
|
||||
}
|
||||
export const updateDetails = (ruleID, details) => ({
|
||||
type: 'UPDATE_RULE_DETAILS',
|
||||
payload: {
|
||||
ruleID,
|
||||
details,
|
||||
},
|
||||
})
|
||||
|
||||
export const updateAlertProperty = (ruleID, alertNodeName, alertProperty) => ({
|
||||
type: 'UPDATE_RULE_ALERT_PROPERTY',
|
||||
|
@ -153,87 +145,73 @@ export const updateAlertProperty = (ruleID, alertNodeName, alertProperty) => ({
|
|||
},
|
||||
})
|
||||
|
||||
export function updateAlerts(ruleID, alerts) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_ALERTS',
|
||||
payload: {
|
||||
ruleID,
|
||||
alerts,
|
||||
},
|
||||
}
|
||||
export const updateAlerts = (ruleID, alerts) => ({
|
||||
type: 'UPDATE_RULE_ALERTS',
|
||||
payload: {
|
||||
ruleID,
|
||||
alerts,
|
||||
},
|
||||
})
|
||||
|
||||
export const updateAlertNodes = (ruleID, alertNodeName, alertNodesText) => ({
|
||||
type: 'UPDATE_RULE_ALERT_NODES',
|
||||
payload: {
|
||||
ruleID,
|
||||
alertNodeName,
|
||||
alertNodesText,
|
||||
},
|
||||
})
|
||||
|
||||
export const updateRuleName = (ruleID, name) => ({
|
||||
type: 'UPDATE_RULE_NAME',
|
||||
payload: {
|
||||
ruleID,
|
||||
name,
|
||||
},
|
||||
})
|
||||
|
||||
export const deleteRuleSuccess = ruleID => ({
|
||||
type: 'DELETE_RULE_SUCCESS',
|
||||
payload: {
|
||||
ruleID,
|
||||
},
|
||||
})
|
||||
|
||||
export const updateRuleStatusSuccess = (ruleID, status) => ({
|
||||
type: 'UPDATE_RULE_STATUS_SUCCESS',
|
||||
payload: {
|
||||
ruleID,
|
||||
status,
|
||||
},
|
||||
})
|
||||
|
||||
export const deleteRule = rule => dispatch => {
|
||||
deleteRuleAPI(rule)
|
||||
.then(() => {
|
||||
dispatch(deleteRuleSuccess(rule.id))
|
||||
dispatch(
|
||||
publishNotification('success', `${rule.name} deleted successfully`)
|
||||
)
|
||||
})
|
||||
.catch(() => {
|
||||
dispatch(
|
||||
publishNotification('error', `${rule.name} could not be deleted`)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export function updateAlertNodes(ruleID, alertNodeName, alertNodesText) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_ALERT_NODES',
|
||||
payload: {
|
||||
ruleID,
|
||||
alertNodeName,
|
||||
alertNodesText,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function updateRuleName(ruleID, name) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_NAME',
|
||||
payload: {
|
||||
ruleID,
|
||||
name,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function deleteRuleSuccess(ruleID) {
|
||||
return {
|
||||
type: 'DELETE_RULE_SUCCESS',
|
||||
payload: {
|
||||
ruleID,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function updateRuleStatusSuccess(ruleID, status) {
|
||||
return {
|
||||
type: 'UPDATE_RULE_STATUS_SUCCESS',
|
||||
payload: {
|
||||
ruleID,
|
||||
status,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function deleteRule(rule) {
|
||||
return dispatch => {
|
||||
deleteRuleAPI(rule)
|
||||
.then(() => {
|
||||
dispatch(deleteRuleSuccess(rule.id))
|
||||
dispatch(
|
||||
publishNotification('success', `${rule.name} deleted successfully`)
|
||||
)
|
||||
})
|
||||
.catch(() => {
|
||||
dispatch(
|
||||
publishNotification('error', `${rule.name} could not be deleted`)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function updateRuleStatus(rule, status) {
|
||||
return dispatch => {
|
||||
updateRuleStatusAPI(rule, status)
|
||||
.then(() => {
|
||||
dispatch(
|
||||
publishNotification('success', `${rule.name} ${status} successfully`)
|
||||
)
|
||||
})
|
||||
.catch(() => {
|
||||
dispatch(
|
||||
publishNotification('error', `${rule.name} could not be ${status}`)
|
||||
)
|
||||
})
|
||||
}
|
||||
export const updateRuleStatus = (rule, status) => dispatch => {
|
||||
updateRuleStatusAPI(rule, status)
|
||||
.then(() => {
|
||||
dispatch(
|
||||
publishNotification('success', `${rule.name} ${status} successfully`)
|
||||
)
|
||||
})
|
||||
.catch(() => {
|
||||
dispatch(
|
||||
publishNotification('error', `${rule.name} could not be ${status}`)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export const createTask = (
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import {CHANGES, OPERATORS, SHIFTS} from 'src/kapacitor/constants'
|
||||
import {CHANGES, RELATIVE_OPERATORS, SHIFTS} from 'src/kapacitor/constants'
|
||||
import Dropdown from 'shared/components/Dropdown'
|
||||
|
||||
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
|
||||
const changes = mapToItems(CHANGES, 'change')
|
||||
const shifts = mapToItems(SHIFTS, 'shift')
|
||||
const operators = mapToItems(OPERATORS, 'operator')
|
||||
const operators = mapToItems(RELATIVE_OPERATORS, 'operator')
|
||||
|
||||
const Relative = ({
|
||||
onRuleTypeInputChange,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import {OPERATORS} from 'src/kapacitor/constants'
|
||||
import {THRESHOLD_OPERATORS} from 'src/kapacitor/constants'
|
||||
import Dropdown from 'shared/components/Dropdown'
|
||||
import _ from 'lodash'
|
||||
|
||||
const mapToItems = (arr, type) => arr.map(text => ({text, type}))
|
||||
const operators = mapToItems(OPERATORS, 'operator')
|
||||
const operators = mapToItems(THRESHOLD_OPERATORS, 'operator')
|
||||
const noopSubmit = e => e.preventDefault()
|
||||
const getField = ({fields}) => {
|
||||
const alias = _.get(fields, ['0', 'alias'], false)
|
||||
|
|
|
@ -31,7 +31,7 @@ export const OUTSIDE_RANGE = 'outside range'
|
|||
export const EQUAL_TO_OR_GREATER_THAN = 'equal to or greater'
|
||||
export const EQUAL_TO_OR_LESS_THAN = 'equal to or less than'
|
||||
|
||||
export const OPERATORS = [
|
||||
export const THRESHOLD_OPERATORS = [
|
||||
GREATER_THAN,
|
||||
EQUAL_TO_OR_GREATER_THAN,
|
||||
EQUAL_TO_OR_LESS_THAN,
|
||||
|
@ -42,6 +42,15 @@ export const OPERATORS = [
|
|||
OUTSIDE_RANGE,
|
||||
]
|
||||
|
||||
export const RELATIVE_OPERATORS = [
|
||||
GREATER_THAN,
|
||||
EQUAL_TO_OR_GREATER_THAN,
|
||||
EQUAL_TO_OR_LESS_THAN,
|
||||
LESS_THAN,
|
||||
EQUAL_TO,
|
||||
NOT_EQUAL_TO,
|
||||
]
|
||||
|
||||
// export const RELATIONS = ['once', 'more than ', 'less than'];
|
||||
export const PERIODS = ['1m', '5m', '10m', '30m', '1h', '2h', '24h']
|
||||
export const CHANGES = ['change', '% change']
|
||||
|
|
|
@ -61,13 +61,13 @@ class KapacitorRulePage extends Component {
|
|||
render() {
|
||||
const {
|
||||
rules,
|
||||
queryConfigs,
|
||||
params,
|
||||
ruleActions,
|
||||
source,
|
||||
queryConfigActions,
|
||||
addFlashMessage,
|
||||
router,
|
||||
ruleActions,
|
||||
queryConfigs,
|
||||
addFlashMessage,
|
||||
queryConfigActions,
|
||||
} = this.props
|
||||
const {enabledAlerts, kapacitor} = this.state
|
||||
const rule = this.isEditing()
|
||||
|
@ -80,17 +80,17 @@ class KapacitorRulePage extends Component {
|
|||
}
|
||||
return (
|
||||
<KapacitorRule
|
||||
source={source}
|
||||
rule={rule}
|
||||
query={query}
|
||||
queryConfigs={queryConfigs}
|
||||
queryConfigActions={queryConfigActions}
|
||||
ruleActions={ruleActions}
|
||||
addFlashMessage={addFlashMessage}
|
||||
enabledAlerts={enabledAlerts}
|
||||
isEditing={this.isEditing()}
|
||||
router={router}
|
||||
source={source}
|
||||
kapacitor={kapacitor}
|
||||
ruleActions={ruleActions}
|
||||
queryConfigs={queryConfigs}
|
||||
isEditing={this.isEditing()}
|
||||
enabledAlerts={enabledAlerts}
|
||||
addFlashMessage={addFlashMessage}
|
||||
queryConfigActions={queryConfigActions}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
|
||||
import {
|
||||
applyFuncsToField,
|
||||
chooseMeasurement,
|
||||
chooseNamespace,
|
||||
timeShift,
|
||||
chooseTag,
|
||||
groupByTag,
|
||||
groupByTime,
|
||||
removeFuncs,
|
||||
chooseNamespace,
|
||||
toggleKapaField,
|
||||
applyFuncsToField,
|
||||
chooseMeasurement,
|
||||
toggleTagAcceptance,
|
||||
} from 'src/utils/queryTransitions'
|
||||
|
||||
|
@ -34,9 +35,9 @@ const queryConfigs = (state = {}, action) => {
|
|||
}
|
||||
|
||||
case 'KAPA_CHOOSE_NAMESPACE': {
|
||||
const {queryId, database, retentionPolicy} = action.payload
|
||||
const {queryID, database, retentionPolicy} = action.payload
|
||||
const nextQueryConfig = chooseNamespace(
|
||||
state[queryId],
|
||||
state[queryID],
|
||||
{
|
||||
database,
|
||||
retentionPolicy,
|
||||
|
@ -45,75 +46,75 @@ const queryConfigs = (state = {}, action) => {
|
|||
)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: Object.assign(nextQueryConfig, {rawText: null}),
|
||||
[queryID]: Object.assign(nextQueryConfig, {rawText: null}),
|
||||
})
|
||||
}
|
||||
|
||||
case 'KAPA_CHOOSE_MEASUREMENT': {
|
||||
const {queryId, measurement} = action.payload
|
||||
const {queryID, measurement} = action.payload
|
||||
const nextQueryConfig = chooseMeasurement(
|
||||
state[queryId],
|
||||
state[queryID],
|
||||
measurement,
|
||||
IS_KAPACITOR_RULE
|
||||
)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: Object.assign(nextQueryConfig, {
|
||||
rawText: state[queryId].rawText,
|
||||
[queryID]: Object.assign(nextQueryConfig, {
|
||||
rawText: state[queryID].rawText,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
case 'KAPA_CHOOSE_TAG': {
|
||||
const {queryId, tag} = action.payload
|
||||
const nextQueryConfig = chooseTag(state[queryId], tag)
|
||||
const {queryID, tag} = action.payload
|
||||
const nextQueryConfig = chooseTag(state[queryID], tag)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'KAPA_GROUP_BY_TAG': {
|
||||
const {queryId, tagKey} = action.payload
|
||||
const nextQueryConfig = groupByTag(state[queryId], tagKey)
|
||||
const {queryID, tagKey} = action.payload
|
||||
const nextQueryConfig = groupByTag(state[queryID], tagKey)
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'KAPA_TOGGLE_TAG_ACCEPTANCE': {
|
||||
const {queryId} = action.payload
|
||||
const nextQueryConfig = toggleTagAcceptance(state[queryId])
|
||||
const {queryID} = action.payload
|
||||
const nextQueryConfig = toggleTagAcceptance(state[queryID])
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
case 'KAPA_TOGGLE_FIELD': {
|
||||
const {queryId, fieldFunc} = action.payload
|
||||
const nextQueryConfig = toggleKapaField(state[queryId], fieldFunc)
|
||||
const {queryID, fieldFunc} = action.payload
|
||||
const nextQueryConfig = toggleKapaField(state[queryID], fieldFunc)
|
||||
|
||||
return {...state, [queryId]: {...nextQueryConfig, rawText: null}}
|
||||
return {...state, [queryID]: {...nextQueryConfig, rawText: null}}
|
||||
}
|
||||
|
||||
case 'KAPA_APPLY_FUNCS_TO_FIELD': {
|
||||
const {queryId, fieldFunc} = action.payload
|
||||
const {groupBy} = state[queryId]
|
||||
const nextQueryConfig = applyFuncsToField(state[queryId], fieldFunc, {
|
||||
const {queryID, fieldFunc} = action.payload
|
||||
const {groupBy} = state[queryID]
|
||||
const nextQueryConfig = applyFuncsToField(state[queryID], fieldFunc, {
|
||||
...groupBy,
|
||||
time: groupBy.time ? groupBy.time : '10s',
|
||||
})
|
||||
|
||||
return {...state, [queryId]: nextQueryConfig}
|
||||
return {...state, [queryID]: nextQueryConfig}
|
||||
}
|
||||
|
||||
case 'KAPA_GROUP_BY_TIME': {
|
||||
const {queryId, time} = action.payload
|
||||
const nextQueryConfig = groupByTime(state[queryId], time)
|
||||
const {queryID, time} = action.payload
|
||||
const nextQueryConfig = groupByTime(state[queryID], time)
|
||||
|
||||
return Object.assign({}, state, {
|
||||
[queryId]: nextQueryConfig,
|
||||
[queryID]: nextQueryConfig,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -124,6 +125,13 @@ const queryConfigs = (state = {}, action) => {
|
|||
// fields with no functions cannot have a group by time
|
||||
return {...state, [queryID]: nextQuery}
|
||||
}
|
||||
|
||||
case 'KAPA_TIME_SHIFT': {
|
||||
const {queryID, shift} = action.payload
|
||||
const nextQuery = timeShift(state[queryID], shift)
|
||||
|
||||
return {...state, [queryID]: nextQuery}
|
||||
}
|
||||
}
|
||||
return state
|
||||
}
|
||||
|
|
|
@ -24,8 +24,8 @@ export function showQueries(source, db) {
|
|||
return proxy({source, query, db})
|
||||
}
|
||||
|
||||
export function killQuery(source, queryId) {
|
||||
const query = `KILL QUERY ${queryId}`
|
||||
export function killQuery(source, queryID) {
|
||||
const query = `KILL QUERY ${queryID}`
|
||||
|
||||
return proxy({source, query})
|
||||
}
|
||||
|
|
|
@ -81,20 +81,35 @@ const AutoRefresh = ComposedComponent => {
|
|||
const templatesWithResolution = templates.map(temp => {
|
||||
if (temp.tempVar === ':interval:') {
|
||||
if (resolution) {
|
||||
return {...temp, resolution}
|
||||
return {
|
||||
...temp,
|
||||
values: temp.values.map(
|
||||
v => (temp.type === 'resolution' ? {...v, resolution} : v)
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...temp,
|
||||
values: [
|
||||
...temp.values,
|
||||
{value: '1000', type: 'resolution', selected: true},
|
||||
],
|
||||
}
|
||||
return {...temp, resolution: 1000}
|
||||
}
|
||||
return {...temp}
|
||||
|
||||
return temp
|
||||
})
|
||||
|
||||
const tempVars = removeUnselectedTemplateValues(templatesWithResolution)
|
||||
|
||||
return fetchTimeSeriesAsync(
|
||||
{
|
||||
source: host,
|
||||
db: database,
|
||||
rp,
|
||||
query,
|
||||
tempVars: removeUnselectedTemplateValues(templatesWithResolution),
|
||||
tempVars,
|
||||
resolution,
|
||||
},
|
||||
editQueryStatus
|
||||
|
@ -136,13 +151,6 @@ const AutoRefresh = ComposedComponent => {
|
|||
return this.renderFetching(timeSeries)
|
||||
}
|
||||
|
||||
if (
|
||||
!this._resultsForQuery(timeSeries) ||
|
||||
!this.state.lastQuerySuccessful
|
||||
) {
|
||||
return this.renderNoResults()
|
||||
}
|
||||
|
||||
return (
|
||||
<ComposedComponent
|
||||
{...this.props}
|
||||
|
@ -169,14 +177,6 @@ const AutoRefresh = ComposedComponent => {
|
|||
)
|
||||
}
|
||||
|
||||
renderNoResults = () => {
|
||||
return (
|
||||
<div className="graph-empty">
|
||||
<p data-test="data-explorer-no-results">No Results</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
_resultsForQuery = data =>
|
||||
data.length
|
||||
? data.every(({response}) =>
|
||||
|
|
|
@ -45,12 +45,17 @@ const DatabaseList = React.createClass({
|
|||
this.getDbRp()
|
||||
},
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
if (_.isEqual(prevProps.querySource, this.props.querySource)) {
|
||||
return
|
||||
}
|
||||
componentDidUpdate({querySource: prevSource, query: prevQuery}) {
|
||||
const {querySource: nextSource, query: nextQuery} = this.props
|
||||
const differentSource = !_.isEqual(prevSource, nextSource)
|
||||
|
||||
this.getDbRp()
|
||||
const newMetaQuery =
|
||||
prevQuery.rawText !== nextQuery.rawText &&
|
||||
nextQuery.rawText.match(/^(create|drop)/i)
|
||||
|
||||
if (differentSource || newMetaQuery) {
|
||||
setTimeout(this.getDbRp, 100)
|
||||
}
|
||||
},
|
||||
|
||||
getDbRp() {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import _ from 'lodash'
|
||||
import classnames from 'classnames'
|
||||
import uuid from 'node-uuid'
|
||||
|
||||
import {makeLegendStyles} from 'shared/graphs/helpers'
|
||||
|
||||
|
@ -68,7 +69,6 @@ const DygraphLegend = ({
|
|||
<div className="sort-btn--bottom">9</div>
|
||||
</button>
|
||||
)
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`dygraph-legend ${hidden}`}
|
||||
|
@ -118,7 +118,7 @@ const DygraphLegend = ({
|
|||
? 'dygraph-legend--row highlight'
|
||||
: 'dygraph-legend--row'
|
||||
return (
|
||||
<div key={label + color} className={seriesClass}>
|
||||
<div key={uuid.v4()} className={seriesClass}>
|
||||
<span style={{color}}>
|
||||
{isSnipped ? removeMeasurement(label) : label}
|
||||
</span>
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import React, {PropTypes, Component} from 'react'
|
||||
import _ from 'lodash'
|
||||
|
||||
import QueryOptions from 'shared/components/QueryOptions'
|
||||
import FieldListItem from 'src/data_explorer/components/FieldListItem'
|
||||
import GroupByTimeDropdown from 'src/data_explorer/components/GroupByTimeDropdown'
|
||||
import FillQuery from 'shared/components/FillQuery'
|
||||
import FancyScrollbar from 'shared/components/FancyScrollbar'
|
||||
|
||||
import {showFieldKeys} from 'shared/apis/metaQuery'
|
||||
|
@ -107,6 +106,10 @@ class FieldList extends Component {
|
|||
applyFuncsToField(fieldFunc, groupBy)
|
||||
}
|
||||
|
||||
handleTimeShift = shift => {
|
||||
this.props.onTimeShift(shift)
|
||||
}
|
||||
|
||||
_getFields = () => {
|
||||
const {database, measurement, retentionPolicy} = this.props.query
|
||||
const {source} = this.context
|
||||
|
@ -129,12 +132,11 @@ class FieldList extends Component {
|
|||
|
||||
render() {
|
||||
const {
|
||||
query: {database, measurement, fields = [], groupBy, fill},
|
||||
query: {database, measurement, fields = [], groupBy, fill, shifts},
|
||||
isKapacitorRule,
|
||||
} = this.props
|
||||
|
||||
const hasAggregates = numFunctions(fields) > 0
|
||||
const hasGroupByTime = groupBy.time
|
||||
const noDBorMeas = !database || !measurement
|
||||
|
||||
return (
|
||||
|
@ -142,16 +144,15 @@ class FieldList extends Component {
|
|||
<div className="query-builder--heading">
|
||||
<span>Fields</span>
|
||||
{hasAggregates
|
||||
? <div className="query-builder--groupby-fill-container">
|
||||
<GroupByTimeDropdown
|
||||
isOpen={!hasGroupByTime}
|
||||
selected={groupBy.time}
|
||||
onChooseGroupByTime={this.handleGroupByTime}
|
||||
/>
|
||||
{isKapacitorRule
|
||||
? null
|
||||
: <FillQuery value={fill} onChooseFill={this.handleFill} />}
|
||||
</div>
|
||||
? <QueryOptions
|
||||
fill={fill}
|
||||
shift={_.first(shifts)}
|
||||
groupBy={groupBy}
|
||||
onFill={this.handleFill}
|
||||
isKapacitorRule={isKapacitorRule}
|
||||
onTimeShift={this.handleTimeShift}
|
||||
onGroupByTime={this.handleGroupByTime}
|
||||
/>
|
||||
: null}
|
||||
</div>
|
||||
{noDBorMeas
|
||||
|
@ -192,7 +193,7 @@ class FieldList extends Component {
|
|||
}
|
||||
}
|
||||
|
||||
const {bool, func, shape, string} = PropTypes
|
||||
const {arrayOf, bool, func, shape, string} = PropTypes
|
||||
|
||||
FieldList.defaultProps = {
|
||||
isKapacitorRule: false,
|
||||
|
@ -212,7 +213,15 @@ FieldList.propTypes = {
|
|||
database: string,
|
||||
retentionPolicy: string,
|
||||
measurement: string,
|
||||
shifts: arrayOf(
|
||||
shape({
|
||||
label: string,
|
||||
unit: string,
|
||||
quantity: string,
|
||||
})
|
||||
),
|
||||
}).isRequired,
|
||||
onTimeShift: func,
|
||||
onToggleField: func.isRequired,
|
||||
onGroupByTime: func.isRequired,
|
||||
onFill: func,
|
||||
|
|
|
@ -2,7 +2,7 @@ import React, {Component, PropTypes} from 'react'
|
|||
import WidgetCell from 'shared/components/WidgetCell'
|
||||
import LayoutCell from 'shared/components/LayoutCell'
|
||||
import RefreshingGraph from 'shared/components/RefreshingGraph'
|
||||
import {buildQueriesForLayouts} from 'utils/influxql'
|
||||
import {buildQueriesForLayouts} from 'utils/buildQueriesForLayouts'
|
||||
|
||||
import _ from 'lodash'
|
||||
|
||||
|
|
|
@ -17,12 +17,8 @@ class LineGraph extends Component {
|
|||
}
|
||||
|
||||
componentWillMount() {
|
||||
const {data, activeQueryIndex, isInDataExplorer} = this.props
|
||||
this._timeSeries = timeSeriesToDygraph(
|
||||
data,
|
||||
activeQueryIndex,
|
||||
isInDataExplorer
|
||||
)
|
||||
const {data, isInDataExplorer} = this.props
|
||||
this._timeSeries = timeSeriesToDygraph(data, isInDataExplorer)
|
||||
}
|
||||
|
||||
componentWillUpdate(nextProps) {
|
||||
|
@ -33,7 +29,6 @@ class LineGraph extends Component {
|
|||
) {
|
||||
this._timeSeries = timeSeriesToDygraph(
|
||||
nextProps.data,
|
||||
nextProps.activeQueryIndex,
|
||||
nextProps.isInDataExplorer
|
||||
)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import GroupByTimeDropdown from 'src/data_explorer/components/GroupByTimeDropdown'
|
||||
import TimeShiftDropdown from 'src/shared/components/TimeShiftDropdown'
|
||||
import FillQuery from 'shared/components/FillQuery'
|
||||
|
||||
const QueryOptions = ({
|
||||
fill,
|
||||
shift,
|
||||
onFill,
|
||||
groupBy,
|
||||
onTimeShift,
|
||||
onGroupByTime,
|
||||
isKapacitorRule,
|
||||
}) =>
|
||||
<div className="query-builder--groupby-fill-container">
|
||||
<GroupByTimeDropdown
|
||||
selected={groupBy.time}
|
||||
onChooseGroupByTime={onGroupByTime}
|
||||
/>
|
||||
{isKapacitorRule
|
||||
? null
|
||||
: <TimeShiftDropdown
|
||||
selected={shift && shift.label}
|
||||
onChooseTimeShift={onTimeShift}
|
||||
/>}
|
||||
{isKapacitorRule ? null : <FillQuery value={fill} onChooseFill={onFill} />}
|
||||
</div>
|
||||
|
||||
const {bool, func, shape, string} = PropTypes
|
||||
|
||||
QueryOptions.propTypes = {
|
||||
fill: string,
|
||||
onFill: func.isRequired,
|
||||
groupBy: shape({
|
||||
time: string,
|
||||
}).isRequired,
|
||||
shift: shape({
|
||||
label: string,
|
||||
}),
|
||||
onGroupByTime: func.isRequired,
|
||||
isKapacitorRule: bool.isRequired,
|
||||
onTimeShift: func.isRequired,
|
||||
}
|
||||
|
||||
export default QueryOptions
|
|
@ -13,6 +13,7 @@ const SchemaExplorer = ({
|
|||
initialGroupByTime,
|
||||
actions: {
|
||||
fill,
|
||||
timeShift,
|
||||
chooseTag,
|
||||
groupByTag,
|
||||
groupByTime,
|
||||
|
@ -44,13 +45,14 @@ const SchemaExplorer = ({
|
|||
source={source}
|
||||
query={query}
|
||||
querySource={source}
|
||||
initialGroupByTime={initialGroupByTime}
|
||||
onToggleField={actionBinder(id, toggleField)}
|
||||
onFill={actionBinder(id, fill)}
|
||||
onGroupByTime={actionBinder(id, groupByTime)}
|
||||
applyFuncsToField={actionBinder(id, applyFuncsToField)}
|
||||
initialGroupByTime={initialGroupByTime}
|
||||
onTimeShift={actionBinder(id, timeShift)}
|
||||
removeFuncs={actionBinder(id, removeFuncs)}
|
||||
onToggleField={actionBinder(id, toggleField)}
|
||||
onGroupByTime={actionBinder(id, groupByTime)}
|
||||
addInitialField={actionBinder(id, addInitialField)}
|
||||
applyFuncsToField={actionBinder(id, applyFuncsToField)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
import React, {PropTypes} from 'react'
|
||||
import Dropdown from 'shared/components/Dropdown'
|
||||
import {TIME_SHIFTS} from 'shared/constants/timeShift'
|
||||
|
||||
const TimeShiftDropdown = ({selected, onChooseTimeShift}) =>
|
||||
<div className="group-by-time">
|
||||
<label className="group-by-time--label">Compare:</label>
|
||||
<Dropdown
|
||||
className="group-by-time--dropdown"
|
||||
buttonColor="btn-info"
|
||||
items={TIME_SHIFTS}
|
||||
onChoose={onChooseTimeShift}
|
||||
selected={selected || 'none'}
|
||||
/>
|
||||
</div>
|
||||
|
||||
const {func, string} = PropTypes
|
||||
|
||||
TimeShiftDropdown.propTypes = {
|
||||
selected: string,
|
||||
onChooseTimeShift: func.isRequired,
|
||||
}
|
||||
|
||||
export default TimeShiftDropdown
|
|
@ -0,0 +1,4 @@
|
|||
export const ABSOLUTE = 'absolute'
|
||||
export const INVALID = 'invalid'
|
||||
export const RELATIVE_LOWER = 'relative lower'
|
||||
export const RELATIVE_UPPER = 'relative upper'
|
|
@ -0,0 +1,10 @@
|
|||
export const TIME_SHIFTS = [
|
||||
{label: 'none', text: 'none', quantity: null, unit: null},
|
||||
{label: '1m', text: '1m', quantity: '1', unit: 'm'},
|
||||
{label: '1h', text: '1h', quantity: '1', unit: 'h'},
|
||||
{label: '12h', text: '12h', quantity: '12', unit: 'h'},
|
||||
{label: '1d', text: '1d', quantity: '1', unit: 'd'},
|
||||
{label: '7d', text: '7d', quantity: '7', unit: 'd'},
|
||||
{label: '30d', text: '30d', quantity: '30', unit: 'd'},
|
||||
{label: '365d', text: '365d', quantity: '365', unit: 'd'},
|
||||
]
|
|
@ -37,7 +37,8 @@ const errorsMiddleware = store => next => action => {
|
|||
} else if (altText) {
|
||||
store.dispatch(notify(alertType, altText))
|
||||
} else {
|
||||
store.dispatch(notify(alertType, 'Cannot communicate with server.'))
|
||||
// TODO: actually do proper error handling
|
||||
// store.dispatch(notify(alertType, 'Cannot communicate with server.'))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
import moment from 'moment'
|
||||
import {
|
||||
INFLUXQL,
|
||||
ABSOLUTE,
|
||||
INVALID,
|
||||
RELATIVE_LOWER,
|
||||
RELATIVE_UPPER,
|
||||
} from 'shared/constants/timeRange'
|
||||
const now = /^now/
|
||||
|
||||
export const timeRangeType = ({upper, lower, type}) => {
|
||||
if (!upper && !lower) {
|
||||
return INVALID
|
||||
}
|
||||
|
||||
if (type && type !== INFLUXQL) {
|
||||
return INVALID
|
||||
}
|
||||
|
||||
const isUpperValid = moment(upper).isValid()
|
||||
const isLowerValid = moment(lower).isValid()
|
||||
|
||||
// {lower: <Date>, upper: <Date>}
|
||||
if (isLowerValid && isUpperValid) {
|
||||
return ABSOLUTE
|
||||
}
|
||||
|
||||
// {lower: now - <Duration>, upper: <empty>}
|
||||
if (now.test(lower) && !upper) {
|
||||
return RELATIVE_LOWER
|
||||
}
|
||||
|
||||
// {lower: <Date>, upper: now() - <Duration>}
|
||||
if (isLowerValid && now.test(upper)) {
|
||||
return RELATIVE_UPPER
|
||||
}
|
||||
|
||||
return INVALID
|
||||
}
|
||||
|
||||
export const shiftTimeRange = (timeRange, shift) => {
|
||||
const {upper, lower} = timeRange
|
||||
const {quantity, unit} = shift
|
||||
const trType = timeRangeType(timeRange)
|
||||
const duration = `${quantity}${unit}`
|
||||
const type = 'shifted'
|
||||
|
||||
switch (trType) {
|
||||
case RELATIVE_UPPER:
|
||||
case ABSOLUTE: {
|
||||
return {
|
||||
lower: `${lower} - ${duration}`,
|
||||
upper: `${upper} - ${duration}`,
|
||||
type,
|
||||
}
|
||||
}
|
||||
|
||||
case RELATIVE_LOWER: {
|
||||
return {
|
||||
lower: `${lower} - ${duration}`,
|
||||
upper: `now() - ${duration}`,
|
||||
type,
|
||||
}
|
||||
}
|
||||
|
||||
default: {
|
||||
return {lower, upper, type: 'unshifted'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getMomentUnit = unit => {
|
||||
switch (unit) {
|
||||
case 'ms': {
|
||||
return 'milliseconds' // (1 thousandth of a second)
|
||||
}
|
||||
|
||||
case 's': {
|
||||
return 'seconds'
|
||||
}
|
||||
|
||||
case 'm': {
|
||||
return 'minute'
|
||||
}
|
||||
|
||||
case 'h': {
|
||||
return 'hour'
|
||||
}
|
||||
|
||||
case 'd': {
|
||||
return 'day'
|
||||
}
|
||||
|
||||
case 'w': {
|
||||
return 'week'
|
||||
}
|
||||
|
||||
default: {
|
||||
return unit
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const shiftDate = (date, quantity, unit) => {
|
||||
if (!date && !quantity && !unit) {
|
||||
return moment(date)
|
||||
}
|
||||
|
||||
return moment(date).add(quantity, getMomentUnit(unit))
|
||||
}
|
|
@ -138,7 +138,7 @@ $graph-gutter: 16px;
|
|||
font-size: 20px;
|
||||
font-weight: 400;
|
||||
margin: 0;
|
||||
text-align: left;
|
||||
text-align: center;
|
||||
color: $g8-storm;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
|
|
@ -56,6 +56,14 @@ $dash-graph-options-arrow: 8px;
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Dashboard Index Page
|
||||
------------------------------------------------------
|
||||
*/
|
||||
.dashboards-page--actions .users__search-widget {
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
/*
|
||||
Default Dashboard Mode
|
||||
------------------------------------------------------
|
||||
|
|
|
@ -1,28 +1,46 @@
|
|||
import {buildQuery} from 'utils/influxql'
|
||||
import {TYPE_QUERY_CONFIG} from 'src/dashboards/constants'
|
||||
import {TYPE_QUERY_CONFIG, TYPE_SHIFTED} from 'src/dashboards/constants'
|
||||
|
||||
const buildQueries = (proxy, queryConfigs, timeRange) => {
|
||||
const buildQueries = (proxy, queryConfigs, tR) => {
|
||||
const statements = queryConfigs.map(query => {
|
||||
const text =
|
||||
query.rawText ||
|
||||
buildQuery(TYPE_QUERY_CONFIG, query.range || timeRange, query)
|
||||
return {text, id: query.id, queryConfig: query}
|
||||
})
|
||||
const {rawText, range, id, shifts, database, measurement, fields} = query
|
||||
const timeRange = range || tR
|
||||
const text = rawText || buildQuery(TYPE_QUERY_CONFIG, timeRange, query)
|
||||
const isParsable = database && measurement && fields.length
|
||||
|
||||
const queries = statements.filter(s => s.text !== null).map(s => {
|
||||
let queryProxy = ''
|
||||
if (s.queryConfig.source) {
|
||||
queryProxy = `${s.queryConfig.source.links.proxy}`
|
||||
if (shifts && shifts.length && isParsable) {
|
||||
const shiftedQueries = shifts
|
||||
.filter(s => s.unit)
|
||||
.map(s => buildQuery(TYPE_SHIFTED, timeRange, query, s))
|
||||
|
||||
return {
|
||||
text: `${text};${shiftedQueries.join(';')}`,
|
||||
id,
|
||||
queryConfig: query,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
host: [queryProxy || proxy],
|
||||
text: s.text,
|
||||
id: s.id,
|
||||
queryConfig: s.queryConfig,
|
||||
}
|
||||
return {text, id, queryConfig: query}
|
||||
})
|
||||
|
||||
const queries = statements
|
||||
.filter(s => s.text !== null)
|
||||
.map(({queryConfig, text, id}) => {
|
||||
let queryProxy = ''
|
||||
if (queryConfig.source) {
|
||||
queryProxy = `${queryConfig.source.links.proxy}`
|
||||
}
|
||||
|
||||
const host = [queryProxy || proxy]
|
||||
|
||||
return {
|
||||
host,
|
||||
text,
|
||||
id,
|
||||
queryConfig,
|
||||
}
|
||||
})
|
||||
|
||||
return queries
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
import {buildQuery} from 'utils/influxql'
|
||||
import {TYPE_SHIFTED, TYPE_QUERY_CONFIG} from 'src/dashboards/constants'
|
||||
import timeRanges from 'hson!shared/data/timeRanges.hson'
|
||||
|
||||
const buildCannedDashboardQuery = (query, {lower, upper}, host) => {
|
||||
const {defaultGroupBy} = timeRanges.find(range => range.lower === lower) || {
|
||||
defaultGroupBy: '5m',
|
||||
}
|
||||
const {wheres, groupbys} = query
|
||||
|
||||
let text = query.text
|
||||
|
||||
if (upper) {
|
||||
text += ` where time > '${lower}' AND time < '${upper}'`
|
||||
} else {
|
||||
text += ` where time > ${lower}`
|
||||
}
|
||||
|
||||
if (host) {
|
||||
text += ` and \"host\" = '${host}'`
|
||||
}
|
||||
|
||||
if (wheres && wheres.length > 0) {
|
||||
text += ` and ${wheres.join(' and ')}`
|
||||
}
|
||||
|
||||
if (groupbys) {
|
||||
if (groupbys.find(g => g.includes('time'))) {
|
||||
text += ` group by ${groupbys.join(',')}`
|
||||
} else if (groupbys.length > 0) {
|
||||
text += ` group by time(${defaultGroupBy}),${groupbys.join(',')}`
|
||||
} else {
|
||||
text += ` group by time(${defaultGroupBy})`
|
||||
}
|
||||
} else {
|
||||
text += ` group by time(${defaultGroupBy})`
|
||||
}
|
||||
|
||||
return text
|
||||
}
|
||||
|
||||
export const buildQueriesForLayouts = (cell, source, timeRange, host) => {
|
||||
return cell.queries.map(query => {
|
||||
let queryText
|
||||
// Canned dashboards use an different a schema different from queryConfig.
|
||||
if (query.queryConfig) {
|
||||
const {
|
||||
queryConfig: {database, measurement, fields, shifts, rawText, range},
|
||||
} = query
|
||||
const tR = range || {
|
||||
upper: ':upperDashboardTime:',
|
||||
lower: ':dashboardTime:',
|
||||
}
|
||||
|
||||
queryText =
|
||||
rawText || buildQuery(TYPE_QUERY_CONFIG, tR, query.queryConfig)
|
||||
const isParsable = database && measurement && fields.length
|
||||
|
||||
if (shifts && shifts.length && isParsable) {
|
||||
const shiftedQueries = shifts
|
||||
.filter(s => s.unit)
|
||||
.map(s => buildQuery(TYPE_SHIFTED, timeRange, query.queryConfig, s))
|
||||
|
||||
queryText = `${queryText};${shiftedQueries.join(';')}`
|
||||
}
|
||||
} else {
|
||||
queryText = buildCannedDashboardQuery(query, timeRange, host)
|
||||
}
|
||||
|
||||
return {...query, host: source.links.proxy, text: queryText}
|
||||
})
|
||||
}
|
|
@ -15,6 +15,7 @@ const defaultQueryConfig = ({id, isKapacitorRule = false}) => {
|
|||
areTagsAccepted: true,
|
||||
rawText: null,
|
||||
status: null,
|
||||
shifts: [],
|
||||
}
|
||||
|
||||
return isKapacitorRule ? queryConfig : {...queryConfig, fill: NULL_STRING}
|
||||
|
|
|
@ -2,8 +2,12 @@ import _ from 'lodash'
|
|||
|
||||
import {TEMP_VAR_INTERVAL, AUTO_GROUP_BY} from 'shared/constants'
|
||||
import {NULL_STRING} from 'shared/constants/queryFillOptions'
|
||||
import {TYPE_QUERY_CONFIG, TYPE_IFQL} from 'src/dashboards/constants'
|
||||
import timeRanges from 'hson!shared/data/timeRanges.hson'
|
||||
import {
|
||||
TYPE_QUERY_CONFIG,
|
||||
TYPE_SHIFTED,
|
||||
TYPE_IFQL,
|
||||
} from 'src/dashboards/constants'
|
||||
import {shiftTimeRange} from 'shared/query/helpers'
|
||||
|
||||
/* eslint-disable quotes */
|
||||
export const quoteIfTimestamp = ({lower, upper}) => {
|
||||
|
@ -19,11 +23,11 @@ export const quoteIfTimestamp = ({lower, upper}) => {
|
|||
}
|
||||
/* eslint-enable quotes */
|
||||
|
||||
export default function buildInfluxQLQuery(timeRange, config) {
|
||||
export default function buildInfluxQLQuery(timeRange, config, shift) {
|
||||
const {groupBy, fill = NULL_STRING, tags, areTagsAccepted} = config
|
||||
const {upper, lower} = quoteIfTimestamp(timeRange)
|
||||
|
||||
const select = _buildSelect(config)
|
||||
const select = _buildSelect(config, shift)
|
||||
if (select === null) {
|
||||
return null
|
||||
}
|
||||
|
@ -35,26 +39,35 @@ export default function buildInfluxQLQuery(timeRange, config) {
|
|||
return `${select}${condition}${dimensions}${fillClause}`
|
||||
}
|
||||
|
||||
function _buildSelect({fields, database, retentionPolicy, measurement}) {
|
||||
function _buildSelect({fields, database, retentionPolicy, measurement}, shift) {
|
||||
if (!database || !measurement || !fields || !fields.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const rpSegment = retentionPolicy ? `"${retentionPolicy}"` : ''
|
||||
const fieldsClause = _buildFields(fields)
|
||||
const fieldsClause = _buildFields(fields, shift)
|
||||
const fullyQualifiedMeasurement = `"${database}".${rpSegment}."${measurement}"`
|
||||
const statement = `SELECT ${fieldsClause} FROM ${fullyQualifiedMeasurement}`
|
||||
return statement
|
||||
}
|
||||
|
||||
// type arg will reason about new query types i.e. IFQL, GraphQL, or queryConfig
|
||||
export const buildQuery = (type, timeRange, config) => {
|
||||
export const buildQuery = (type, timeRange, config, shift) => {
|
||||
switch (type) {
|
||||
case `${TYPE_QUERY_CONFIG}`: {
|
||||
case TYPE_QUERY_CONFIG: {
|
||||
return buildInfluxQLQuery(timeRange, config)
|
||||
}
|
||||
|
||||
case `${TYPE_IFQL}`: {
|
||||
case TYPE_SHIFTED: {
|
||||
const {quantity, unit} = shift
|
||||
return buildInfluxQLQuery(
|
||||
shiftTimeRange(timeRange, shift),
|
||||
config,
|
||||
`_shifted__${quantity}__${unit}`
|
||||
)
|
||||
}
|
||||
|
||||
case TYPE_IFQL: {
|
||||
// build query usining IFQL here
|
||||
}
|
||||
}
|
||||
|
@ -66,7 +79,7 @@ export function buildSelectStatement(config) {
|
|||
return _buildSelect(config)
|
||||
}
|
||||
|
||||
function _buildFields(fieldFuncs) {
|
||||
function _buildFields(fieldFuncs, shift = '') {
|
||||
if (!fieldFuncs) {
|
||||
return ''
|
||||
}
|
||||
|
@ -77,9 +90,21 @@ function _buildFields(fieldFuncs) {
|
|||
case 'field': {
|
||||
return f.value === '*' ? '*' : `"${f.value}"`
|
||||
}
|
||||
case 'wildcard': {
|
||||
return '*'
|
||||
}
|
||||
case 'regex': {
|
||||
return `/${f.value}/`
|
||||
}
|
||||
case 'number': {
|
||||
return `${f.value}`
|
||||
}
|
||||
case 'integer': {
|
||||
return `${f.value}`
|
||||
}
|
||||
case 'func': {
|
||||
const args = _buildFields(f.args)
|
||||
const alias = f.alias ? ` AS "${f.alias}"` : ''
|
||||
const alias = f.alias ? ` AS "${f.alias}${shift}"` : ''
|
||||
return `${f.value}(${args})${alias}`
|
||||
}
|
||||
}
|
||||
|
@ -155,61 +180,5 @@ function _buildFill(fill) {
|
|||
return ` FILL(${fill})`
|
||||
}
|
||||
|
||||
const buildCannedDashboardQuery = (query, {lower, upper}, host) => {
|
||||
const {defaultGroupBy} = timeRanges.find(range => range.lower === lower) || {
|
||||
defaultGroupBy: '5m',
|
||||
}
|
||||
const {wheres, groupbys} = query
|
||||
|
||||
let text = query.text
|
||||
|
||||
if (upper) {
|
||||
text += ` where time > '${lower}' AND time < '${upper}'`
|
||||
} else {
|
||||
text += ` where time > ${lower}`
|
||||
}
|
||||
|
||||
if (host) {
|
||||
text += ` and \"host\" = '${host}'`
|
||||
}
|
||||
|
||||
if (wheres && wheres.length > 0) {
|
||||
text += ` and ${wheres.join(' and ')}`
|
||||
}
|
||||
|
||||
if (groupbys) {
|
||||
if (groupbys.find(g => g.includes('time'))) {
|
||||
text += ` group by ${groupbys.join(',')}`
|
||||
} else if (groupbys.length > 0) {
|
||||
text += ` group by time(${defaultGroupBy}),${groupbys.join(',')}`
|
||||
} else {
|
||||
text += ` group by time(${defaultGroupBy})`
|
||||
}
|
||||
} else {
|
||||
text += ` group by time(${defaultGroupBy})`
|
||||
}
|
||||
|
||||
return text
|
||||
}
|
||||
|
||||
export const buildQueriesForLayouts = (cell, source, timeRange, host) => {
|
||||
return cell.queries.map(query => {
|
||||
let queryText
|
||||
// Canned dashboards use an different a schema different from queryConfig.
|
||||
if (query.queryConfig) {
|
||||
const {queryConfig: {rawText, range}} = query
|
||||
const tR = range || {
|
||||
upper: ':upperDashboardTime:',
|
||||
lower: ':dashboardTime:',
|
||||
}
|
||||
queryText = rawText || buildInfluxQLQuery(tR, query.queryConfig)
|
||||
} else {
|
||||
queryText = buildCannedDashboardQuery(query, timeRange, host)
|
||||
}
|
||||
|
||||
return {...query, host: source.links.proxy, text: queryText}
|
||||
})
|
||||
}
|
||||
|
||||
export const buildRawText = (q, timeRange) =>
|
||||
q.rawText || buildInfluxQLQuery(timeRange, q) || ''
|
||||
|
|
|
@ -108,7 +108,7 @@ export const toggleField = (query, {value}) => {
|
|||
}
|
||||
}
|
||||
|
||||
export function groupByTime(query, time) {
|
||||
export const groupByTime = (query, time) => {
|
||||
return Object.assign({}, query, {
|
||||
groupBy: Object.assign({}, query.groupBy, {
|
||||
time,
|
||||
|
@ -118,7 +118,7 @@ export function groupByTime(query, time) {
|
|||
|
||||
export const fill = (query, value) => ({...query, fill: value})
|
||||
|
||||
export function toggleTagAcceptance(query) {
|
||||
export const toggleTagAcceptance = query => {
|
||||
return Object.assign({}, query, {
|
||||
areTagsAccepted: !query.areTagsAccepted,
|
||||
})
|
||||
|
@ -185,13 +185,13 @@ export const applyFuncsToField = (query, {field, funcs = []}, groupBy) => {
|
|||
}
|
||||
}
|
||||
|
||||
export function updateRawQuery(query, rawText) {
|
||||
export const updateRawQuery = (query, rawText) => {
|
||||
return Object.assign({}, query, {
|
||||
rawText,
|
||||
})
|
||||
}
|
||||
|
||||
export function groupByTag(query, tagKey) {
|
||||
export const groupByTag = (query, tagKey) => {
|
||||
const oldTags = query.groupBy.tags
|
||||
let newTags
|
||||
|
||||
|
@ -209,7 +209,7 @@ export function groupByTag(query, tagKey) {
|
|||
})
|
||||
}
|
||||
|
||||
export function chooseTag(query, tag) {
|
||||
export const chooseTag = (query, tag) => {
|
||||
const tagValues = query.tags[tag.key]
|
||||
const shouldRemoveTag =
|
||||
tagValues && tagValues.length === 1 && tagValues[0] === tag.value
|
||||
|
@ -219,6 +219,14 @@ export function chooseTag(query, tag) {
|
|||
return Object.assign({}, query, {tags: newTags})
|
||||
}
|
||||
|
||||
const updateTagValues = newTagValues => {
|
||||
return Object.assign({}, query, {
|
||||
tags: Object.assign({}, query.tags, {
|
||||
[tag.key]: newTagValues,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
const oldTagValues = query.tags[tag.key]
|
||||
if (!oldTagValues) {
|
||||
return updateTagValues([tag.value])
|
||||
|
@ -233,12 +241,6 @@ export function chooseTag(query, tag) {
|
|||
}
|
||||
|
||||
return updateTagValues(query.tags[tag.key].concat(tag.value))
|
||||
|
||||
function updateTagValues(newTagValues) {
|
||||
return Object.assign({}, query, {
|
||||
tags: Object.assign({}, query.tags, {
|
||||
[tag.key]: newTagValues,
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const timeShift = (query, shift) => ({...query, shifts: [shift]})
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import _ from 'lodash'
|
||||
import {shiftDate} from 'shared/query/helpers'
|
||||
import {map, reduce, forEach, concat, clone} from 'fast.js'
|
||||
|
||||
/**
|
||||
|
@ -15,12 +16,7 @@ const cells = {
|
|||
responseIndex: new Array(DEFAULT_SIZE),
|
||||
}
|
||||
|
||||
// activeQueryIndex is an optional argument that indicated which query's series we want highlighted.
|
||||
export default function timeSeriesToDygraph(
|
||||
raw = [],
|
||||
activeQueryIndex,
|
||||
isInDataExplorer
|
||||
) {
|
||||
export default function timeSeriesToDygraph(raw = [], isInDataExplorer) {
|
||||
// collect results from each influx response
|
||||
const results = reduce(
|
||||
raw,
|
||||
|
@ -115,11 +111,16 @@ export default function timeSeriesToDygraph(
|
|||
|
||||
const timeSeries = []
|
||||
for (let i = 0; i < size; i++) {
|
||||
const time = cells.time[i]
|
||||
let time = cells.time[i]
|
||||
const value = cells.value[i]
|
||||
const label = cells.label[i]
|
||||
const seriesIndex = cells.seriesIndex[i]
|
||||
|
||||
if (label.includes('_shifted__')) {
|
||||
const [, quantity, duration] = label.split('__')
|
||||
time = +shiftDate(time, quantity, duration).format('x')
|
||||
}
|
||||
|
||||
let existingRowIndex = tsMemo[time]
|
||||
|
||||
if (existingRowIndex === undefined) {
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
var webpack = require('webpack');
|
||||
var path = require('path');
|
||||
var ExtractTextPlugin = require("extract-text-webpack-plugin");
|
||||
var HtmlWebpackPlugin = require("html-webpack-plugin");
|
||||
var package = require('../package.json');
|
||||
var dependencies = package.dependencies;
|
||||
var webpack = require('webpack')
|
||||
var path = require('path')
|
||||
var ExtractTextPlugin = require('extract-text-webpack-plugin')
|
||||
var HtmlWebpackPlugin = require('html-webpack-plugin')
|
||||
var package = require('../package.json')
|
||||
const WebpackOnBuildPlugin = require('on-build-webpack')
|
||||
const fs = require('fs')
|
||||
var dependencies = package.dependencies
|
||||
|
||||
const buildDir = path.resolve(__dirname, '../build')
|
||||
|
||||
module.exports = {
|
||||
watch: true,
|
||||
devtool: 'source-map',
|
||||
entry: {
|
||||
app: path.resolve(__dirname, '..', 'src', 'index.js'),
|
||||
|
@ -14,7 +19,7 @@ module.exports = {
|
|||
output: {
|
||||
publicPath: '/',
|
||||
path: path.resolve(__dirname, '../build'),
|
||||
filename: '[name].[chunkhash].dev.js',
|
||||
filename: '[name].[hash].dev.js',
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
|
@ -48,15 +53,21 @@ module.exports = {
|
|||
},
|
||||
{
|
||||
test: /\.scss$/,
|
||||
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!sass-loader!resolve-url!sass?sourceMap'),
|
||||
loader: ExtractTextPlugin.extract(
|
||||
'style-loader',
|
||||
'css-loader!sass-loader!resolve-url!sass?sourceMap'
|
||||
),
|
||||
},
|
||||
{
|
||||
test: /\.css$/,
|
||||
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!postcss-loader'),
|
||||
loader: ExtractTextPlugin.extract(
|
||||
'style-loader',
|
||||
'css-loader!postcss-loader'
|
||||
),
|
||||
},
|
||||
{
|
||||
test : /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
|
||||
loader : 'file',
|
||||
test: /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
|
||||
loader: 'file',
|
||||
},
|
||||
{
|
||||
test: /\.js$/,
|
||||
|
@ -70,18 +81,19 @@ module.exports = {
|
|||
],
|
||||
},
|
||||
sassLoader: {
|
||||
includePaths: [path.resolve(__dirname, "node_modules")],
|
||||
includePaths: [path.resolve(__dirname, 'node_modules')],
|
||||
},
|
||||
eslint: {
|
||||
failOnWarning: false,
|
||||
failOnError: false,
|
||||
},
|
||||
plugins: [
|
||||
new webpack.HotModuleReplacementPlugin(),
|
||||
new webpack.ProvidePlugin({
|
||||
$: "jquery",
|
||||
jQuery: "jquery",
|
||||
$: 'jquery',
|
||||
jQuery: 'jquery',
|
||||
}),
|
||||
new ExtractTextPlugin("chronograf.css"),
|
||||
new ExtractTextPlugin('chronograf.css'),
|
||||
new HtmlWebpackPlugin({
|
||||
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
|
||||
inject: 'body',
|
||||
|
@ -93,7 +105,45 @@ module.exports = {
|
|||
new webpack.DefinePlugin({
|
||||
VERSION: JSON.stringify(require('../package.json').version),
|
||||
}),
|
||||
new WebpackOnBuildPlugin(function(stats) {
|
||||
const newlyCreatedAssets = stats.compilation.assets
|
||||
|
||||
const unlinked = []
|
||||
fs.readdir(path.resolve(buildDir), (err, files) => {
|
||||
files.forEach(file => {
|
||||
if (!newlyCreatedAssets[file]) {
|
||||
const del = path.resolve(buildDir + file)
|
||||
fs.stat(del, function(err, stat) {
|
||||
if (err == null) {
|
||||
try {
|
||||
fs.unlink(path.resolve(buildDir + file))
|
||||
unlinked.push(file)
|
||||
} catch (e) {}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}),
|
||||
],
|
||||
postcss: require('./postcss'),
|
||||
target: 'web',
|
||||
};
|
||||
devServer: {
|
||||
hot: true,
|
||||
historyApiFallback: true,
|
||||
clientLogLevel: 'info',
|
||||
stats: {colors: true},
|
||||
contentBase: 'build',
|
||||
quiet: false,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 300,
|
||||
poll: 1000,
|
||||
},
|
||||
proxy: {
|
||||
'/chronograf/v1': {
|
||||
target: 'http://localhost:8888',
|
||||
secure: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -5134,6 +5134,10 @@ object.values@^1.0.3:
|
|||
function-bind "^1.1.0"
|
||||
has "^1.0.1"
|
||||
|
||||
on-build-webpack@^0.1.0:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/on-build-webpack/-/on-build-webpack-0.1.0.tgz#a287c0e17766e6141926e5f2cbb0d8bb53b76814"
|
||||
|
||||
on-finished@~2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// Package cmpopts provides common options for the cmp package.
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
func equateAlways(_, _ interface{}) bool { return true }
|
||||
|
||||
// EquateEmpty returns a Comparer option that determines all maps and slices
|
||||
// with a length of zero to be equal, regardless of whether they are nil.
|
||||
//
|
||||
// EquateEmpty can be used in conjuction with SortSlices and SortMaps.
|
||||
func EquateEmpty() cmp.Option {
|
||||
return cmp.FilterValues(isEmpty, cmp.Comparer(equateAlways))
|
||||
}
|
||||
|
||||
func isEmpty(x, y interface{}) bool {
|
||||
vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
|
||||
return (x != nil && y != nil && vx.Type() == vy.Type()) &&
|
||||
(vx.Kind() == reflect.Slice || vx.Kind() == reflect.Map) &&
|
||||
(vx.Len() == 0 && vy.Len() == 0)
|
||||
}
|
||||
|
||||
// EquateApprox returns a Comparer option that determines float32 or float64
|
||||
// values to be equal if they are within a relative fraction or absolute margin.
|
||||
// This option is not used when either x or y is NaN or infinite.
|
||||
//
|
||||
// The fraction determines that the difference of two values must be within the
|
||||
// smaller fraction of the two values, while the margin determines that the two
|
||||
// values must be within some absolute margin.
|
||||
// To express only a fraction or only a margin, use 0 for the other parameter.
|
||||
// The fraction and margin must be non-negative.
|
||||
//
|
||||
// The mathematical expression used is equivalent to:
|
||||
// |x-y| ≤ max(fraction*min(|x|, |y|), margin)
|
||||
//
|
||||
// EquateApprox can be used in conjuction with EquateNaNs.
|
||||
func EquateApprox(fraction, margin float64) cmp.Option {
|
||||
if margin < 0 || fraction < 0 || math.IsNaN(margin) || math.IsNaN(fraction) {
|
||||
panic("margin or fraction must be a non-negative number")
|
||||
}
|
||||
a := approximator{fraction, margin}
|
||||
return cmp.Options{
|
||||
cmp.FilterValues(areRealF64s, cmp.Comparer(a.compareF64)),
|
||||
cmp.FilterValues(areRealF32s, cmp.Comparer(a.compareF32)),
|
||||
}
|
||||
}
|
||||
|
||||
type approximator struct{ frac, marg float64 }
|
||||
|
||||
func areRealF64s(x, y float64) bool {
|
||||
return !math.IsNaN(x) && !math.IsNaN(y) && !math.IsInf(x, 0) && !math.IsInf(y, 0)
|
||||
}
|
||||
func areRealF32s(x, y float32) bool {
|
||||
return areRealF64s(float64(x), float64(y))
|
||||
}
|
||||
func (a approximator) compareF64(x, y float64) bool {
|
||||
relMarg := a.frac * math.Min(math.Abs(x), math.Abs(y))
|
||||
return math.Abs(x-y) <= math.Max(a.marg, relMarg)
|
||||
}
|
||||
func (a approximator) compareF32(x, y float32) bool {
|
||||
return a.compareF64(float64(x), float64(y))
|
||||
}
|
||||
|
||||
// EquateNaNs returns a Comparer option that determines float32 and float64
|
||||
// NaN values to be equal.
|
||||
//
|
||||
// EquateNaNs can be used in conjuction with EquateApprox.
|
||||
func EquateNaNs() cmp.Option {
|
||||
return cmp.Options{
|
||||
cmp.FilterValues(areNaNsF64s, cmp.Comparer(equateAlways)),
|
||||
cmp.FilterValues(areNaNsF32s, cmp.Comparer(equateAlways)),
|
||||
}
|
||||
}
|
||||
|
||||
func areNaNsF64s(x, y float64) bool {
|
||||
return math.IsNaN(x) && math.IsNaN(y)
|
||||
}
|
||||
func areNaNsF32s(x, y float32) bool {
|
||||
return areNaNsF64s(float64(x), float64(y))
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
// IgnoreFields returns an Option that ignores exported fields of the
|
||||
// given names on a single struct type.
|
||||
// The struct type is specified by passing in a value of that type.
|
||||
//
|
||||
// The name may be a dot-delimited string (e.g., "Foo.Bar") to ignore a
|
||||
// specific sub-field that is embedded or nested within the parent struct.
|
||||
//
|
||||
// This does not handle unexported fields; use IgnoreUnexported instead.
|
||||
func IgnoreFields(typ interface{}, names ...string) cmp.Option {
|
||||
sf := newStructFilter(typ, names...)
|
||||
return cmp.FilterPath(sf.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
// IgnoreTypes returns an Option that ignores all values assignable to
|
||||
// certain types, which are specified by passing in a value of each type.
|
||||
func IgnoreTypes(typs ...interface{}) cmp.Option {
|
||||
tf := newTypeFilter(typs...)
|
||||
return cmp.FilterPath(tf.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
type typeFilter []reflect.Type
|
||||
|
||||
func newTypeFilter(typs ...interface{}) (tf typeFilter) {
|
||||
for _, typ := range typs {
|
||||
t := reflect.TypeOf(typ)
|
||||
if t == nil {
|
||||
// This occurs if someone tries to pass in sync.Locker(nil)
|
||||
panic("cannot determine type; consider using IgnoreInterfaces")
|
||||
}
|
||||
tf = append(tf, t)
|
||||
}
|
||||
return tf
|
||||
}
|
||||
func (tf typeFilter) filter(p cmp.Path) bool {
|
||||
if len(p) < 1 {
|
||||
return false
|
||||
}
|
||||
t := p[len(p)-1].Type()
|
||||
for _, ti := range tf {
|
||||
if t.AssignableTo(ti) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IgnoreInterfaces returns an Option that ignores all values or references of
|
||||
// values assignable to certain interface types. These interfaces are specified
|
||||
// by passing in an anonymous struct with the interface types embedded in it.
|
||||
// For example, to ignore sync.Locker, pass in struct{sync.Locker}{}.
|
||||
func IgnoreInterfaces(ifaces interface{}) cmp.Option {
|
||||
tf := newIfaceFilter(ifaces)
|
||||
return cmp.FilterPath(tf.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
type ifaceFilter []reflect.Type
|
||||
|
||||
func newIfaceFilter(ifaces interface{}) (tf ifaceFilter) {
|
||||
t := reflect.TypeOf(ifaces)
|
||||
if ifaces == nil || t.Name() != "" || t.Kind() != reflect.Struct {
|
||||
panic("input must be an anonymous struct")
|
||||
}
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
fi := t.Field(i)
|
||||
switch {
|
||||
case !fi.Anonymous:
|
||||
panic("struct cannot have named fields")
|
||||
case fi.Type.Kind() != reflect.Interface:
|
||||
panic("embedded field must be an interface type")
|
||||
case fi.Type.NumMethod() == 0:
|
||||
// This matches everything; why would you ever want this?
|
||||
panic("cannot ignore empty interface")
|
||||
default:
|
||||
tf = append(tf, fi.Type)
|
||||
}
|
||||
}
|
||||
return tf
|
||||
}
|
||||
func (tf ifaceFilter) filter(p cmp.Path) bool {
|
||||
if len(p) < 1 {
|
||||
return false
|
||||
}
|
||||
t := p[len(p)-1].Type()
|
||||
for _, ti := range tf {
|
||||
if t.AssignableTo(ti) {
|
||||
return true
|
||||
}
|
||||
if t.Kind() != reflect.Ptr && reflect.PtrTo(t).AssignableTo(ti) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IgnoreUnexported returns an Option that only ignores the immediate unexported
|
||||
// fields of a struct, including anonymous fields of unexported types.
|
||||
// In particular, unexported fields within the struct's exported fields
|
||||
// of struct types, including anonymous fields, will not be ignored unless the
|
||||
// type of the field itself is also passed to IgnoreUnexported.
|
||||
func IgnoreUnexported(typs ...interface{}) cmp.Option {
|
||||
ux := newUnexportedFilter(typs...)
|
||||
return cmp.FilterPath(ux.filter, cmp.Ignore())
|
||||
}
|
||||
|
||||
type unexportedFilter struct{ m map[reflect.Type]bool }
|
||||
|
||||
func newUnexportedFilter(typs ...interface{}) unexportedFilter {
|
||||
ux := unexportedFilter{m: make(map[reflect.Type]bool)}
|
||||
for _, typ := range typs {
|
||||
t := reflect.TypeOf(typ)
|
||||
if t == nil || t.Kind() != reflect.Struct {
|
||||
panic(fmt.Sprintf("invalid struct type: %T", typ))
|
||||
}
|
||||
ux.m[t] = true
|
||||
}
|
||||
return ux
|
||||
}
|
||||
func (xf unexportedFilter) filter(p cmp.Path) bool {
|
||||
if len(p) < 2 {
|
||||
return false
|
||||
}
|
||||
sf, ok := p[len(p)-1].(cmp.StructField)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return xf.m[p[len(p)-2].Type()] && !isExported(sf.Name())
|
||||
}
|
||||
|
||||
// isExported reports whether the identifier is exported.
|
||||
func isExported(id string) bool {
|
||||
r, _ := utf8.DecodeRuneInString(id)
|
||||
return unicode.IsUpper(r)
|
||||
}
|
|
@ -0,0 +1,155 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
// SortSlices returns a Transformer option that sorts all []V.
|
||||
// The less function must be of the form "func(T, T) bool" which is used to
|
||||
// sort any slice with element type V that is assignable to T.
|
||||
//
|
||||
// The less function must be:
|
||||
// • Deterministic: less(x, y) == less(x, y)
|
||||
// • Irreflexive: !less(x, x)
|
||||
// • Transitive: if !less(x, y) and !less(y, z), then !less(x, z)
|
||||
//
|
||||
// The less function does not have to be "total". That is, if !less(x, y) and
|
||||
// !less(y, x) for two elements x and y, their relative order is maintained.
|
||||
//
|
||||
// SortSlices can be used in conjuction with EquateEmpty.
|
||||
func SortSlices(less interface{}) cmp.Option {
|
||||
vf := reflect.ValueOf(less)
|
||||
if !isTTBoolFunc(vf.Type()) || vf.IsNil() {
|
||||
panic(fmt.Sprintf("invalid less function: %T", less))
|
||||
}
|
||||
ss := sliceSorter{vf.Type().In(0), vf}
|
||||
return cmp.FilterValues(ss.filter, cmp.Transformer("Sort", ss.sort))
|
||||
}
|
||||
|
||||
type sliceSorter struct {
|
||||
in reflect.Type // T
|
||||
fnc reflect.Value // func(T, T) bool
|
||||
}
|
||||
|
||||
func (ss sliceSorter) filter(x, y interface{}) bool {
|
||||
vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
|
||||
if !(x != nil && y != nil && vx.Type() == vy.Type()) ||
|
||||
!(vx.Kind() == reflect.Slice && vx.Type().Elem().AssignableTo(ss.in)) ||
|
||||
(vx.Len() <= 1 && vy.Len() <= 1) {
|
||||
return false
|
||||
}
|
||||
// Check whether the slices are already sorted to avoid an infinite
|
||||
// recursion cycle applying the same transform to itself.
|
||||
ok1 := sliceIsSorted(x, func(i, j int) bool { return ss.less(vx, i, j) })
|
||||
ok2 := sliceIsSorted(y, func(i, j int) bool { return ss.less(vy, i, j) })
|
||||
return !ok1 || !ok2
|
||||
}
|
||||
func (ss sliceSorter) sort(x interface{}) interface{} {
|
||||
src := reflect.ValueOf(x)
|
||||
dst := reflect.MakeSlice(src.Type(), src.Len(), src.Len())
|
||||
for i := 0; i < src.Len(); i++ {
|
||||
dst.Index(i).Set(src.Index(i))
|
||||
}
|
||||
sortSliceStable(dst.Interface(), func(i, j int) bool { return ss.less(dst, i, j) })
|
||||
ss.checkSort(dst)
|
||||
return dst.Interface()
|
||||
}
|
||||
func (ss sliceSorter) checkSort(v reflect.Value) {
|
||||
start := -1 // Start of a sequence of equal elements.
|
||||
for i := 1; i < v.Len(); i++ {
|
||||
if ss.less(v, i-1, i) {
|
||||
// Check that first and last elements in v[start:i] are equal.
|
||||
if start >= 0 && (ss.less(v, start, i-1) || ss.less(v, i-1, start)) {
|
||||
panic(fmt.Sprintf("incomparable values detected: want equal elements: %v", v.Slice(start, i)))
|
||||
}
|
||||
start = -1
|
||||
} else if start == -1 {
|
||||
start = i
|
||||
}
|
||||
}
|
||||
}
|
||||
func (ss sliceSorter) less(v reflect.Value, i, j int) bool {
|
||||
vx, vy := v.Index(i), v.Index(j)
|
||||
return ss.fnc.Call([]reflect.Value{vx, vy})[0].Bool()
|
||||
}
|
||||
|
||||
// SortMaps returns a Transformer option that flattens map[K]V types to be a
|
||||
// sorted []struct{K, V}. The less function must be of the form
|
||||
// "func(T, T) bool" which is used to sort any map with key K that is
|
||||
// assignable to T.
|
||||
//
|
||||
// Flattening the map into a slice has the property that cmp.Equal is able to
|
||||
// use Comparers on K or the K.Equal method if it exists.
|
||||
//
|
||||
// The less function must be:
|
||||
// • Deterministic: less(x, y) == less(x, y)
|
||||
// • Irreflexive: !less(x, x)
|
||||
// • Transitive: if !less(x, y) and !less(y, z), then !less(x, z)
|
||||
// • Total: if x != y, then either less(x, y) or less(y, x)
|
||||
//
|
||||
// SortMaps can be used in conjuction with EquateEmpty.
|
||||
func SortMaps(less interface{}) cmp.Option {
|
||||
vf := reflect.ValueOf(less)
|
||||
if !isTTBoolFunc(vf.Type()) || vf.IsNil() {
|
||||
panic(fmt.Sprintf("invalid less function: %T", less))
|
||||
}
|
||||
ms := mapSorter{vf.Type().In(0), vf}
|
||||
return cmp.FilterValues(ms.filter, cmp.Transformer("Sort", ms.sort))
|
||||
}
|
||||
|
||||
type mapSorter struct {
|
||||
in reflect.Type // T
|
||||
fnc reflect.Value // func(T, T) bool
|
||||
}
|
||||
|
||||
func (ms mapSorter) filter(x, y interface{}) bool {
|
||||
vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
|
||||
return (x != nil && y != nil && vx.Type() == vy.Type()) &&
|
||||
(vx.Kind() == reflect.Map && vx.Type().Key().AssignableTo(ms.in)) &&
|
||||
(vx.Len() != 0 || vy.Len() != 0)
|
||||
}
|
||||
func (ms mapSorter) sort(x interface{}) interface{} {
|
||||
src := reflect.ValueOf(x)
|
||||
outType := mapEntryType(src.Type())
|
||||
dst := reflect.MakeSlice(reflect.SliceOf(outType), src.Len(), src.Len())
|
||||
for i, k := range src.MapKeys() {
|
||||
v := reflect.New(outType).Elem()
|
||||
v.Field(0).Set(k)
|
||||
v.Field(1).Set(src.MapIndex(k))
|
||||
dst.Index(i).Set(v)
|
||||
}
|
||||
sortSlice(dst.Interface(), func(i, j int) bool { return ms.less(dst, i, j) })
|
||||
ms.checkSort(dst)
|
||||
return dst.Interface()
|
||||
}
|
||||
func (ms mapSorter) checkSort(v reflect.Value) {
|
||||
for i := 1; i < v.Len(); i++ {
|
||||
if !ms.less(v, i-1, i) {
|
||||
panic(fmt.Sprintf("partial order detected: want %v < %v", v.Index(i-1), v.Index(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
func (ms mapSorter) less(v reflect.Value, i, j int) bool {
|
||||
vx, vy := v.Index(i).Field(0), v.Index(j).Field(0)
|
||||
if !hasReflectStructOf {
|
||||
vx, vy = vx.Elem(), vy.Elem()
|
||||
}
|
||||
return ms.fnc.Call([]reflect.Value{vx, vy})[0].Bool()
|
||||
}
|
||||
|
||||
var boolType = reflect.TypeOf(true)
|
||||
|
||||
// isTTBoolFunc reports whether f is of the form: func(T, T) bool.
|
||||
func isTTBoolFunc(t reflect.Type) bool {
|
||||
if t == nil || t.Kind() != reflect.Func || t.IsVariadic() {
|
||||
return false
|
||||
}
|
||||
return t.NumIn() == 2 && t.NumOut() == 1 && t.In(0) == t.In(1) && t.Out(0) == boolType
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// +build !go1.8
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
const hasReflectStructOf = false
|
||||
|
||||
func mapEntryType(reflect.Type) reflect.Type {
|
||||
return reflect.TypeOf(struct{ K, V interface{} }{})
|
||||
}
|
||||
|
||||
func sliceIsSorted(slice interface{}, less func(i, j int) bool) bool {
|
||||
return sort.IsSorted(reflectSliceSorter{reflect.ValueOf(slice), less})
|
||||
}
|
||||
func sortSlice(slice interface{}, less func(i, j int) bool) {
|
||||
sort.Sort(reflectSliceSorter{reflect.ValueOf(slice), less})
|
||||
}
|
||||
func sortSliceStable(slice interface{}, less func(i, j int) bool) {
|
||||
sort.Stable(reflectSliceSorter{reflect.ValueOf(slice), less})
|
||||
}
|
||||
|
||||
type reflectSliceSorter struct {
|
||||
slice reflect.Value
|
||||
less func(i, j int) bool
|
||||
}
|
||||
|
||||
func (ss reflectSliceSorter) Len() int {
|
||||
return ss.slice.Len()
|
||||
}
|
||||
func (ss reflectSliceSorter) Less(i, j int) bool {
|
||||
return ss.less(i, j)
|
||||
}
|
||||
func (ss reflectSliceSorter) Swap(i, j int) {
|
||||
vi := ss.slice.Index(i).Interface()
|
||||
vj := ss.slice.Index(j).Interface()
|
||||
ss.slice.Index(i).Set(reflect.ValueOf(vj))
|
||||
ss.slice.Index(j).Set(reflect.ValueOf(vi))
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// +build go1.8
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
const hasReflectStructOf = true
|
||||
|
||||
func mapEntryType(t reflect.Type) reflect.Type {
|
||||
return reflect.StructOf([]reflect.StructField{
|
||||
{Name: "K", Type: t.Key()},
|
||||
{Name: "V", Type: t.Elem()},
|
||||
})
|
||||
}
|
||||
|
||||
func sliceIsSorted(slice interface{}, less func(i, j int) bool) bool {
|
||||
return sort.SliceIsSorted(slice, less)
|
||||
}
|
||||
func sortSlice(slice interface{}, less func(i, j int) bool) {
|
||||
sort.Slice(slice, less)
|
||||
}
|
||||
func sortSliceStable(slice interface{}, less func(i, j int) bool) {
|
||||
sort.SliceStable(slice, less)
|
||||
}
|
|
@ -0,0 +1,182 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
// filterField returns a new Option where opt is only evaluated on paths that
|
||||
// include a specific exported field on a single struct type.
|
||||
// The struct type is specified by passing in a value of that type.
|
||||
//
|
||||
// The name may be a dot-delimited string (e.g., "Foo.Bar") to select a
|
||||
// specific sub-field that is embedded or nested within the parent struct.
|
||||
func filterField(typ interface{}, name string, opt cmp.Option) cmp.Option {
|
||||
// TODO: This is currently unexported over concerns of how helper filters
|
||||
// can be composed together easily.
|
||||
// TODO: Add tests for FilterField.
|
||||
|
||||
sf := newStructFilter(typ, name)
|
||||
return cmp.FilterPath(sf.filter, opt)
|
||||
}
|
||||
|
||||
type structFilter struct {
|
||||
t reflect.Type // The root struct type to match on
|
||||
ft fieldTree // Tree of fields to match on
|
||||
}
|
||||
|
||||
func newStructFilter(typ interface{}, names ...string) structFilter {
|
||||
// TODO: Perhaps allow * as a special identifier to allow ignoring any
|
||||
// number of path steps until the next field match?
|
||||
// This could be useful when a concrete struct gets transformed into
|
||||
// an anonymous struct where it is not possible to specify that by type,
|
||||
// but the transformer happens to provide guarantees about the names of
|
||||
// the transformed fields.
|
||||
|
||||
t := reflect.TypeOf(typ)
|
||||
if t == nil || t.Kind() != reflect.Struct {
|
||||
panic(fmt.Sprintf("%T must be a struct", typ))
|
||||
}
|
||||
var ft fieldTree
|
||||
for _, name := range names {
|
||||
cname, err := canonicalName(t, name)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("%s: %v", strings.Join(cname, "."), err))
|
||||
}
|
||||
ft.insert(cname)
|
||||
}
|
||||
return structFilter{t, ft}
|
||||
}
|
||||
|
||||
func (sf structFilter) filter(p cmp.Path) bool {
|
||||
for i, ps := range p {
|
||||
if ps.Type().AssignableTo(sf.t) && sf.ft.matchPrefix(p[i+1:]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// fieldTree represents a set of dot-separated identifiers.
|
||||
//
|
||||
// For example, inserting the following selectors:
|
||||
// Foo
|
||||
// Foo.Bar.Baz
|
||||
// Foo.Buzz
|
||||
// Nuka.Cola.Quantum
|
||||
//
|
||||
// Results in a tree of the form:
|
||||
// {sub: {
|
||||
// "Foo": {ok: true, sub: {
|
||||
// "Bar": {sub: {
|
||||
// "Baz": {ok: true},
|
||||
// }},
|
||||
// "Buzz": {ok: true},
|
||||
// }},
|
||||
// "Nuka": {sub: {
|
||||
// "Cola": {sub: {
|
||||
// "Quantum": {ok: true},
|
||||
// }},
|
||||
// }},
|
||||
// }}
|
||||
type fieldTree struct {
|
||||
ok bool // Whether this is a specified node
|
||||
sub map[string]fieldTree // The sub-tree of fields under this node
|
||||
}
|
||||
|
||||
// insert inserts a sequence of field accesses into the tree.
|
||||
func (ft *fieldTree) insert(cname []string) {
|
||||
if ft.sub == nil {
|
||||
ft.sub = make(map[string]fieldTree)
|
||||
}
|
||||
if len(cname) == 0 {
|
||||
ft.ok = true
|
||||
return
|
||||
}
|
||||
sub := ft.sub[cname[0]]
|
||||
sub.insert(cname[1:])
|
||||
ft.sub[cname[0]] = sub
|
||||
}
|
||||
|
||||
// matchPrefix reports whether any selector in the fieldTree matches
|
||||
// the start of path p.
|
||||
func (ft fieldTree) matchPrefix(p cmp.Path) bool {
|
||||
for _, ps := range p {
|
||||
switch ps := ps.(type) {
|
||||
case cmp.StructField:
|
||||
ft = ft.sub[ps.Name()]
|
||||
if ft.ok {
|
||||
return true
|
||||
}
|
||||
if len(ft.sub) == 0 {
|
||||
return false
|
||||
}
|
||||
case cmp.Indirect:
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// canonicalName returns a list of identifiers where any struct field access
|
||||
// through an embedded field is expanded to include the names of the embedded
|
||||
// types themselves.
|
||||
//
|
||||
// For example, suppose field "Foo" is not directly in the parent struct,
|
||||
// but actually from an embedded struct of type "Bar". Then, the canonical name
|
||||
// of "Foo" is actually "Bar.Foo".
|
||||
//
|
||||
// Suppose field "Foo" is not directly in the parent struct, but actually
|
||||
// a field in two different embedded structs of types "Bar" and "Baz".
|
||||
// Then the selector "Foo" causes a panic since it is ambiguous which one it
|
||||
// refers to. The user must specify either "Bar.Foo" or "Baz.Foo".
|
||||
func canonicalName(t reflect.Type, sel string) ([]string, error) {
|
||||
var name string
|
||||
sel = strings.TrimPrefix(sel, ".")
|
||||
if sel == "" {
|
||||
return nil, fmt.Errorf("name must not be empty")
|
||||
}
|
||||
if i := strings.IndexByte(sel, '.'); i < 0 {
|
||||
name, sel = sel, ""
|
||||
} else {
|
||||
name, sel = sel[:i], sel[i:]
|
||||
}
|
||||
|
||||
// Type must be a struct or pointer to struct.
|
||||
if t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
if t.Kind() != reflect.Struct {
|
||||
return nil, fmt.Errorf("%v must be a struct", t)
|
||||
}
|
||||
|
||||
// Find the canonical name for this current field name.
|
||||
// If the field exists in an embedded struct, then it will be expanded.
|
||||
if !isExported(name) {
|
||||
// Disallow unexported fields:
|
||||
// * To discourage people from actually touching unexported fields
|
||||
// * FieldByName is buggy (https://golang.org/issue/4876)
|
||||
return []string{name}, fmt.Errorf("name must be exported")
|
||||
}
|
||||
sf, ok := t.FieldByName(name)
|
||||
if !ok {
|
||||
return []string{name}, fmt.Errorf("does not exist")
|
||||
}
|
||||
var ss []string
|
||||
for i := range sf.Index {
|
||||
ss = append(ss, t.FieldByIndex(sf.Index[:i+1]).Name)
|
||||
}
|
||||
if sel == "" {
|
||||
return ss, nil
|
||||
}
|
||||
ssPost, err := canonicalName(sf.Type, sel)
|
||||
return append(ss, ssPost...), err
|
||||
}
|
|
@ -0,0 +1,996 @@
|
|||
// Copyright 2017, The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
package cmpopts
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
type (
|
||||
MyInt int
|
||||
MyFloat float32
|
||||
MyTime struct{ time.Time }
|
||||
MyStruct struct {
|
||||
A, B []int
|
||||
C, D map[time.Time]string
|
||||
}
|
||||
|
||||
Foo1 struct{ Alpha, Bravo, Charlie int }
|
||||
Foo2 struct{ *Foo1 }
|
||||
Foo3 struct{ *Foo2 }
|
||||
Bar1 struct{ Foo3 }
|
||||
Bar2 struct {
|
||||
Bar1
|
||||
*Foo3
|
||||
Bravo float32
|
||||
}
|
||||
Bar3 struct {
|
||||
Bar1
|
||||
Bravo *Bar2
|
||||
Delta struct{ Echo Foo1 }
|
||||
*Foo3
|
||||
Alpha string
|
||||
}
|
||||
|
||||
privateStruct struct{ Public, private int }
|
||||
PublicStruct struct{ Public, private int }
|
||||
ParentStruct struct {
|
||||
*privateStruct
|
||||
*PublicStruct
|
||||
Public int
|
||||
private int
|
||||
}
|
||||
|
||||
Everything struct {
|
||||
MyInt
|
||||
MyFloat
|
||||
MyTime
|
||||
MyStruct
|
||||
Bar3
|
||||
ParentStruct
|
||||
}
|
||||
|
||||
EmptyInterface interface{}
|
||||
)
|
||||
|
||||
func TestOptions(t *testing.T) {
|
||||
createBar3X := func() *Bar3 {
|
||||
return &Bar3{
|
||||
Bar1: Bar1{Foo3{&Foo2{&Foo1{Bravo: 2}}}},
|
||||
Bravo: &Bar2{
|
||||
Bar1: Bar1{Foo3{&Foo2{&Foo1{Charlie: 7}}}},
|
||||
Foo3: &Foo3{&Foo2{&Foo1{Bravo: 5}}},
|
||||
Bravo: 4,
|
||||
},
|
||||
Delta: struct{ Echo Foo1 }{Foo1{Charlie: 3}},
|
||||
Foo3: &Foo3{&Foo2{&Foo1{Alpha: 1}}},
|
||||
Alpha: "alpha",
|
||||
}
|
||||
}
|
||||
createBar3Y := func() *Bar3 {
|
||||
return &Bar3{
|
||||
Bar1: Bar1{Foo3{&Foo2{&Foo1{Bravo: 3}}}},
|
||||
Bravo: &Bar2{
|
||||
Bar1: Bar1{Foo3{&Foo2{&Foo1{Charlie: 8}}}},
|
||||
Foo3: &Foo3{&Foo2{&Foo1{Bravo: 6}}},
|
||||
Bravo: 5,
|
||||
},
|
||||
Delta: struct{ Echo Foo1 }{Foo1{Charlie: 4}},
|
||||
Foo3: &Foo3{&Foo2{&Foo1{Alpha: 2}}},
|
||||
Alpha: "ALPHA",
|
||||
}
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
label string // Test name
|
||||
x, y interface{} // Input values to compare
|
||||
opts []cmp.Option // Input options
|
||||
wantEqual bool // Whether the inputs are equal
|
||||
wantPanic bool // Whether Equal should panic
|
||||
reason string // The reason for the expected outcome
|
||||
}{{
|
||||
label: "EquateEmpty",
|
||||
x: []int{},
|
||||
y: []int(nil),
|
||||
wantEqual: false,
|
||||
reason: "not equal because empty non-nil and nil slice differ",
|
||||
}, {
|
||||
label: "EquateEmpty",
|
||||
x: []int{},
|
||||
y: []int(nil),
|
||||
opts: []cmp.Option{EquateEmpty()},
|
||||
wantEqual: true,
|
||||
reason: "equal because EquateEmpty equates empty slices",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
x: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
|
||||
y: []int{1, 0, 5, 2, 8, 9, 4, 3, 6, 7},
|
||||
wantEqual: false,
|
||||
reason: "not equal because element order differs",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
x: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
|
||||
y: []int{1, 0, 5, 2, 8, 9, 4, 3, 6, 7},
|
||||
opts: []cmp.Option{SortSlices(func(x, y int) bool { return x < y })},
|
||||
wantEqual: true,
|
||||
reason: "equal because SortSlices sorts the slices",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
x: []MyInt{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
|
||||
y: []MyInt{1, 0, 5, 2, 8, 9, 4, 3, 6, 7},
|
||||
opts: []cmp.Option{SortSlices(func(x, y int) bool { return x < y })},
|
||||
wantEqual: false,
|
||||
reason: "not equal because MyInt is not the same type as int",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
x: []float64{0, 1, 1, 2, 2, 2},
|
||||
y: []float64{2, 0, 2, 1, 2, 1},
|
||||
opts: []cmp.Option{SortSlices(func(x, y float64) bool { return x < y })},
|
||||
wantEqual: true,
|
||||
reason: "equal even when sorted with duplicate elements",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
x: []float64{0, 1, 1, 2, 2, 2, math.NaN(), 3, 3, 3, 3, 4, 4, 4, 4},
|
||||
y: []float64{2, 0, 4, 4, 3, math.NaN(), 4, 1, 3, 2, 3, 3, 4, 1, 2},
|
||||
opts: []cmp.Option{SortSlices(func(x, y float64) bool { return x < y })},
|
||||
wantPanic: true,
|
||||
reason: "panics because SortSlices used with non-transitive less function",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
x: []float64{0, 1, 1, 2, 2, 2, math.NaN(), 3, 3, 3, 3, 4, 4, 4, 4},
|
||||
y: []float64{2, 0, 4, 4, 3, math.NaN(), 4, 1, 3, 2, 3, 3, 4, 1, 2},
|
||||
opts: []cmp.Option{SortSlices(func(x, y float64) bool {
|
||||
return (!math.IsNaN(x) && math.IsNaN(y)) || x < y
|
||||
})},
|
||||
wantEqual: false,
|
||||
reason: "no panics because SortSlices used with valid less function; not equal because NaN != NaN",
|
||||
}, {
|
||||
label: "SortSlices+EquateNaNs",
|
||||
x: []float64{0, 1, 1, 2, 2, 2, math.NaN(), 3, 3, 3, math.NaN(), 3, 4, 4, 4, 4},
|
||||
y: []float64{2, 0, 4, 4, 3, math.NaN(), 4, 1, 3, 2, 3, 3, 4, 1, math.NaN(), 2},
|
||||
opts: []cmp.Option{
|
||||
EquateNaNs(),
|
||||
SortSlices(func(x, y float64) bool {
|
||||
return (!math.IsNaN(x) && math.IsNaN(y)) || x < y
|
||||
}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "no panics because SortSlices used with valid less function; equal because EquateNaNs is used",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[time.Time]string{
|
||||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC): "0th birthday",
|
||||
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC): "1st birthday",
|
||||
time.Date(2011, time.November, 10, 23, 0, 0, 0, time.UTC): "2nd birthday",
|
||||
},
|
||||
y: map[time.Time]string{
|
||||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "0th birthday",
|
||||
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "1st birthday",
|
||||
time.Date(2011, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "2nd birthday",
|
||||
},
|
||||
wantEqual: false,
|
||||
reason: "not equal because timezones differ",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[time.Time]string{
|
||||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC): "0th birthday",
|
||||
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC): "1st birthday",
|
||||
time.Date(2011, time.November, 10, 23, 0, 0, 0, time.UTC): "2nd birthday",
|
||||
},
|
||||
y: map[time.Time]string{
|
||||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "0th birthday",
|
||||
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "1st birthday",
|
||||
time.Date(2011, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "2nd birthday",
|
||||
},
|
||||
opts: []cmp.Option{SortMaps(func(x, y time.Time) bool { return x.Before(y) })},
|
||||
wantEqual: true,
|
||||
reason: "equal because SortMaps flattens to a slice where Time.Equal can be used",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[MyTime]string{
|
||||
{time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)}: "0th birthday",
|
||||
{time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC)}: "1st birthday",
|
||||
{time.Date(2011, time.November, 10, 23, 0, 0, 0, time.UTC)}: "2nd birthday",
|
||||
},
|
||||
y: map[MyTime]string{
|
||||
{time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local)}: "0th birthday",
|
||||
{time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local)}: "1st birthday",
|
||||
{time.Date(2011, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local)}: "2nd birthday",
|
||||
},
|
||||
opts: []cmp.Option{SortMaps(func(x, y time.Time) bool { return x.Before(y) })},
|
||||
wantEqual: false,
|
||||
reason: "not equal because MyTime is not assignable to time.Time",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[int]string{-3: "", -2: "", -1: "", 0: "", 1: "", 2: "", 3: ""},
|
||||
// => {0, 1, 2, 3, -1, -2, -3},
|
||||
y: map[int]string{300: "", 200: "", 100: "", 0: "", 1: "", 2: "", 3: ""},
|
||||
// => {0, 1, 2, 3, 100, 200, 300},
|
||||
opts: []cmp.Option{SortMaps(func(a, b int) bool {
|
||||
if -10 < a && a <= 0 {
|
||||
a *= -100
|
||||
}
|
||||
if -10 < b && b <= 0 {
|
||||
b *= -100
|
||||
}
|
||||
return a < b
|
||||
})},
|
||||
wantEqual: false,
|
||||
reason: "not equal because values differ even though SortMap provides valid ordering",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[int]string{-3: "", -2: "", -1: "", 0: "", 1: "", 2: "", 3: ""},
|
||||
// => {0, 1, 2, 3, -1, -2, -3},
|
||||
y: map[int]string{300: "", 200: "", 100: "", 0: "", 1: "", 2: "", 3: ""},
|
||||
// => {0, 1, 2, 3, 100, 200, 300},
|
||||
opts: []cmp.Option{
|
||||
SortMaps(func(x, y int) bool {
|
||||
if -10 < x && x <= 0 {
|
||||
x *= -100
|
||||
}
|
||||
if -10 < y && y <= 0 {
|
||||
y *= -100
|
||||
}
|
||||
return x < y
|
||||
}),
|
||||
cmp.Comparer(func(x, y int) bool {
|
||||
if -10 < x && x <= 0 {
|
||||
x *= -100
|
||||
}
|
||||
if -10 < y && y <= 0 {
|
||||
y *= -100
|
||||
}
|
||||
return x == y
|
||||
}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because Comparer used to equate differences",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[int]string{-3: "", -2: "", -1: "", 0: "", 1: "", 2: "", 3: ""},
|
||||
y: map[int]string{},
|
||||
opts: []cmp.Option{SortMaps(func(x, y int) bool {
|
||||
return x < y && x >= 0 && y >= 0
|
||||
})},
|
||||
wantPanic: true,
|
||||
reason: "panics because SortMaps used with non-transitive less function",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
x: map[int]string{-3: "", -2: "", -1: "", 0: "", 1: "", 2: "", 3: ""},
|
||||
y: map[int]string{},
|
||||
opts: []cmp.Option{SortMaps(func(x, y int) bool {
|
||||
return math.Abs(float64(x)) < math.Abs(float64(y))
|
||||
})},
|
||||
wantPanic: true,
|
||||
reason: "panics because SortMaps used with partial less function",
|
||||
}, {
|
||||
label: "EquateEmpty+SortSlices+SortMaps",
|
||||
x: MyStruct{
|
||||
A: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
|
||||
C: map[time.Time]string{
|
||||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC): "0th birthday",
|
||||
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC): "1st birthday",
|
||||
},
|
||||
D: map[time.Time]string{},
|
||||
},
|
||||
y: MyStruct{
|
||||
A: []int{1, 0, 5, 2, 8, 9, 4, 3, 6, 7},
|
||||
B: []int{},
|
||||
C: map[time.Time]string{
|
||||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "0th birthday",
|
||||
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC).In(time.Local): "1st birthday",
|
||||
},
|
||||
},
|
||||
opts: []cmp.Option{
|
||||
EquateEmpty(),
|
||||
SortSlices(func(x, y int) bool { return x < y }),
|
||||
SortMaps(func(x, y time.Time) bool { return x.Before(y) }),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "no panics because EquateEmpty should compose with the sort options",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: 3.09,
|
||||
y: 3.10,
|
||||
wantEqual: false,
|
||||
reason: "not equal because floats do not exactly matches",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: 3.09,
|
||||
y: 3.10,
|
||||
opts: []cmp.Option{EquateApprox(0, 0)},
|
||||
wantEqual: false,
|
||||
reason: "not equal because EquateApprox(0 ,0) is equivalent to using ==",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: 3.09,
|
||||
y: 3.10,
|
||||
opts: []cmp.Option{EquateApprox(0.003, 0.009)},
|
||||
wantEqual: false,
|
||||
reason: "not equal because EquateApprox is too strict",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: 3.09,
|
||||
y: 3.10,
|
||||
opts: []cmp.Option{EquateApprox(0, 0.011)},
|
||||
wantEqual: true,
|
||||
reason: "equal because margin is loose enough to match",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: 3.09,
|
||||
y: 3.10,
|
||||
opts: []cmp.Option{EquateApprox(0.004, 0)},
|
||||
wantEqual: true,
|
||||
reason: "equal because fraction is loose enough to match",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: 3.09,
|
||||
y: 3.10,
|
||||
opts: []cmp.Option{EquateApprox(0.004, 0.011)},
|
||||
wantEqual: true,
|
||||
reason: "equal because both the margin and fraction are loose enough to match",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: float32(3.09),
|
||||
y: float64(3.10),
|
||||
opts: []cmp.Option{EquateApprox(0.004, 0)},
|
||||
wantEqual: false,
|
||||
reason: "not equal because the types differ",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: float32(3.09),
|
||||
y: float32(3.10),
|
||||
opts: []cmp.Option{EquateApprox(0.004, 0)},
|
||||
wantEqual: true,
|
||||
reason: "equal because EquateApprox also applies on float32s",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: []float64{math.Inf(+1), math.Inf(-1)},
|
||||
y: []float64{math.Inf(+1), math.Inf(-1)},
|
||||
opts: []cmp.Option{EquateApprox(0, 1)},
|
||||
wantEqual: true,
|
||||
reason: "equal because we fall back on == which matches Inf (EquateApprox does not apply on Inf) ",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: []float64{math.Inf(+1), -1e100},
|
||||
y: []float64{+1e100, math.Inf(-1)},
|
||||
opts: []cmp.Option{EquateApprox(0, 1)},
|
||||
wantEqual: false,
|
||||
reason: "not equal because we fall back on == where Inf != 1e100 (EquateApprox does not apply on Inf)",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: float64(+1e100),
|
||||
y: float64(-1e100),
|
||||
opts: []cmp.Option{EquateApprox(math.Inf(+1), 0)},
|
||||
wantEqual: true,
|
||||
reason: "equal because infinite fraction matches everything",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: float64(+1e100),
|
||||
y: float64(-1e100),
|
||||
opts: []cmp.Option{EquateApprox(0, math.Inf(+1))},
|
||||
wantEqual: true,
|
||||
reason: "equal because infinite margin matches everything",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: math.Pi,
|
||||
y: math.Pi,
|
||||
opts: []cmp.Option{EquateApprox(0, 0)},
|
||||
wantEqual: true,
|
||||
reason: "equal because EquateApprox(0, 0) is equivalent to ==",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
x: math.Pi,
|
||||
y: math.Nextafter(math.Pi, math.Inf(+1)),
|
||||
opts: []cmp.Option{EquateApprox(0, 0)},
|
||||
wantEqual: false,
|
||||
reason: "not equal because EquateApprox(0, 0) is equivalent to ==",
|
||||
}, {
|
||||
label: "EquateNaNs",
|
||||
x: []float64{1.0, math.NaN(), math.E, -0.0, +0.0, math.Inf(+1), math.Inf(-1)},
|
||||
y: []float64{1.0, math.NaN(), math.E, -0.0, +0.0, math.Inf(+1), math.Inf(-1)},
|
||||
wantEqual: false,
|
||||
reason: "not equal because NaN != NaN",
|
||||
}, {
|
||||
label: "EquateNaNs",
|
||||
x: []float64{1.0, math.NaN(), math.E, -0.0, +0.0, math.Inf(+1), math.Inf(-1)},
|
||||
y: []float64{1.0, math.NaN(), math.E, -0.0, +0.0, math.Inf(+1), math.Inf(-1)},
|
||||
opts: []cmp.Option{EquateNaNs()},
|
||||
wantEqual: true,
|
||||
reason: "equal because EquateNaNs allows NaN == NaN",
|
||||
}, {
|
||||
label: "EquateNaNs",
|
||||
x: []float32{1.0, float32(math.NaN()), math.E, -0.0, +0.0},
|
||||
y: []float32{1.0, float32(math.NaN()), math.E, -0.0, +0.0},
|
||||
opts: []cmp.Option{EquateNaNs()},
|
||||
wantEqual: true,
|
||||
reason: "equal because EquateNaNs operates on float32",
|
||||
}, {
|
||||
label: "EquateApprox+EquateNaNs",
|
||||
x: []float64{1.0, math.NaN(), math.E, -0.0, +0.0, math.Inf(+1), math.Inf(-1), 1.01, 5001},
|
||||
y: []float64{1.0, math.NaN(), math.E, -0.0, +0.0, math.Inf(+1), math.Inf(-1), 1.02, 5002},
|
||||
opts: []cmp.Option{
|
||||
EquateNaNs(),
|
||||
EquateApprox(0.01, 0),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because EquateNaNs and EquateApprox compose together",
|
||||
}, {
|
||||
label: "EquateApprox+EquateNaNs",
|
||||
x: []MyFloat{1.0, MyFloat(math.NaN()), MyFloat(math.E), -0.0, +0.0, MyFloat(math.Inf(+1)), MyFloat(math.Inf(-1)), 1.01, 5001},
|
||||
y: []MyFloat{1.0, MyFloat(math.NaN()), MyFloat(math.E), -0.0, +0.0, MyFloat(math.Inf(+1)), MyFloat(math.Inf(-1)), 1.02, 5002},
|
||||
opts: []cmp.Option{
|
||||
EquateNaNs(),
|
||||
EquateApprox(0.01, 0),
|
||||
},
|
||||
wantEqual: false,
|
||||
reason: "not equal because EquateApprox and EquateNaNs do not apply on a named type",
|
||||
}, {
|
||||
label: "EquateApprox+EquateNaNs+Transform",
|
||||
x: []MyFloat{1.0, MyFloat(math.NaN()), MyFloat(math.E), -0.0, +0.0, MyFloat(math.Inf(+1)), MyFloat(math.Inf(-1)), 1.01, 5001},
|
||||
y: []MyFloat{1.0, MyFloat(math.NaN()), MyFloat(math.E), -0.0, +0.0, MyFloat(math.Inf(+1)), MyFloat(math.Inf(-1)), 1.02, 5002},
|
||||
opts: []cmp.Option{
|
||||
cmp.Transformer("", func(x MyFloat) float64 { return float64(x) }),
|
||||
EquateNaNs(),
|
||||
EquateApprox(0.01, 0),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because named type is transformed to float64",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: Bar1{Foo3{&Foo2{&Foo1{Alpha: 5}}}},
|
||||
y: Bar1{Foo3{&Foo2{&Foo1{Alpha: 6}}}},
|
||||
wantEqual: false,
|
||||
reason: "not equal because values do not match in deeply embedded field",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: Bar1{Foo3{&Foo2{&Foo1{Alpha: 5}}}},
|
||||
y: Bar1{Foo3{&Foo2{&Foo1{Alpha: 6}}}},
|
||||
opts: []cmp.Option{IgnoreFields(Bar1{}, "Alpha")},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreField ignores deeply embedded field: Alpha",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: Bar1{Foo3{&Foo2{&Foo1{Alpha: 5}}}},
|
||||
y: Bar1{Foo3{&Foo2{&Foo1{Alpha: 6}}}},
|
||||
opts: []cmp.Option{IgnoreFields(Bar1{}, "Foo1.Alpha")},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreField ignores deeply embedded field: Foo1.Alpha",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: Bar1{Foo3{&Foo2{&Foo1{Alpha: 5}}}},
|
||||
y: Bar1{Foo3{&Foo2{&Foo1{Alpha: 6}}}},
|
||||
opts: []cmp.Option{IgnoreFields(Bar1{}, "Foo2.Alpha")},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreField ignores deeply embedded field: Foo2.Alpha",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: Bar1{Foo3{&Foo2{&Foo1{Alpha: 5}}}},
|
||||
y: Bar1{Foo3{&Foo2{&Foo1{Alpha: 6}}}},
|
||||
opts: []cmp.Option{IgnoreFields(Bar1{}, "Foo3.Alpha")},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreField ignores deeply embedded field: Foo3.Alpha",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: Bar1{Foo3{&Foo2{&Foo1{Alpha: 5}}}},
|
||||
y: Bar1{Foo3{&Foo2{&Foo1{Alpha: 6}}}},
|
||||
opts: []cmp.Option{IgnoreFields(Bar1{}, "Foo3.Foo2.Alpha")},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreField ignores deeply embedded field: Foo3.Foo2.Alpha",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: createBar3X(),
|
||||
y: createBar3Y(),
|
||||
wantEqual: false,
|
||||
reason: "not equal because many deeply nested or embedded fields differ",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: createBar3X(),
|
||||
y: createBar3Y(),
|
||||
opts: []cmp.Option{IgnoreFields(Bar3{}, "Bar1", "Bravo", "Delta", "Foo3", "Alpha")},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreFields ignores fields at the highest levels",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: createBar3X(),
|
||||
y: createBar3Y(),
|
||||
opts: []cmp.Option{
|
||||
IgnoreFields(Bar3{},
|
||||
"Bar1.Foo3.Bravo",
|
||||
"Bravo.Bar1.Foo3.Foo2.Foo1.Charlie",
|
||||
"Bravo.Foo3.Foo2.Foo1.Bravo",
|
||||
"Bravo.Bravo",
|
||||
"Delta.Echo.Charlie",
|
||||
"Foo3.Foo2.Foo1.Alpha",
|
||||
"Alpha",
|
||||
),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreFields ignores fields using fully-qualified field",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: createBar3X(),
|
||||
y: createBar3Y(),
|
||||
opts: []cmp.Option{
|
||||
IgnoreFields(Bar3{},
|
||||
"Bar1.Foo3.Bravo",
|
||||
"Bravo.Foo3.Foo2.Foo1.Bravo",
|
||||
"Bravo.Bravo",
|
||||
"Delta.Echo.Charlie",
|
||||
"Foo3.Foo2.Foo1.Alpha",
|
||||
"Alpha",
|
||||
),
|
||||
},
|
||||
wantEqual: false,
|
||||
reason: "not equal because one fully-qualified field is not ignored: Bravo.Bar1.Foo3.Foo2.Foo1.Charlie",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
x: createBar3X(),
|
||||
y: createBar3Y(),
|
||||
opts: []cmp.Option{IgnoreFields(Bar3{}, "Bar1", "Bravo", "Delta", "Alpha")},
|
||||
wantEqual: false,
|
||||
reason: "not equal because highest-level field is not ignored: Foo3",
|
||||
}, {
|
||||
label: "IgnoreTypes",
|
||||
x: []interface{}{5, "same"},
|
||||
y: []interface{}{6, "same"},
|
||||
wantEqual: false,
|
||||
reason: "not equal because 5 != 6",
|
||||
}, {
|
||||
label: "IgnoreTypes",
|
||||
x: []interface{}{5, "same"},
|
||||
y: []interface{}{6, "same"},
|
||||
opts: []cmp.Option{IgnoreTypes(0)},
|
||||
wantEqual: true,
|
||||
reason: "equal because ints are ignored",
|
||||
}, {
|
||||
label: "IgnoreTypes+IgnoreInterfaces",
|
||||
x: []interface{}{5, "same", new(bytes.Buffer)},
|
||||
y: []interface{}{6, "same", new(bytes.Buffer)},
|
||||
opts: []cmp.Option{IgnoreTypes(0)},
|
||||
wantPanic: true,
|
||||
reason: "panics because bytes.Buffer has unexported fields",
|
||||
}, {
|
||||
label: "IgnoreTypes+IgnoreInterfaces",
|
||||
x: []interface{}{5, "same", new(bytes.Buffer)},
|
||||
y: []interface{}{6, "diff", new(bytes.Buffer)},
|
||||
opts: []cmp.Option{
|
||||
IgnoreTypes(0, ""),
|
||||
IgnoreInterfaces(struct{ io.Reader }{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because bytes.Buffer is ignored by match on interface type",
|
||||
}, {
|
||||
label: "IgnoreTypes+IgnoreInterfaces",
|
||||
x: []interface{}{5, "same", new(bytes.Buffer)},
|
||||
y: []interface{}{6, "same", new(bytes.Buffer)},
|
||||
opts: []cmp.Option{
|
||||
IgnoreTypes(0, ""),
|
||||
IgnoreInterfaces(struct {
|
||||
io.Reader
|
||||
io.Writer
|
||||
fmt.Stringer
|
||||
}{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because bytes.Buffer is ignored by match on multiple interface types",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
x: struct{ mu sync.Mutex }{},
|
||||
y: struct{ mu sync.Mutex }{},
|
||||
wantPanic: true,
|
||||
reason: "panics because sync.Mutex has unexported fields",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
x: struct{ mu sync.Mutex }{},
|
||||
y: struct{ mu sync.Mutex }{},
|
||||
opts: []cmp.Option{IgnoreInterfaces(struct{ sync.Locker }{})},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreInterfaces applies on values (with pointer receiver)",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
x: struct{ mu *sync.Mutex }{},
|
||||
y: struct{ mu *sync.Mutex }{},
|
||||
opts: []cmp.Option{IgnoreInterfaces(struct{ sync.Locker }{})},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreInterfaces applies on pointers",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2},
|
||||
y: ParentStruct{Public: 1, private: -2},
|
||||
opts: []cmp.Option{cmp.AllowUnexported(ParentStruct{})},
|
||||
wantEqual: false,
|
||||
reason: "not equal because ParentStruct.private differs with AllowUnexported",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2},
|
||||
y: ParentStruct{Public: 1, private: -2},
|
||||
opts: []cmp.Option{IgnoreUnexported(ParentStruct{})},
|
||||
wantEqual: true,
|
||||
reason: "equal because IgnoreUnexported ignored ParentStruct.private",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, PublicStruct: &PublicStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: -2, PublicStruct: &PublicStruct{Public: 3, private: 4}},
|
||||
opts: []cmp.Option{
|
||||
cmp.AllowUnexported(PublicStruct{}),
|
||||
IgnoreUnexported(ParentStruct{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because ParentStruct.private is ignored",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, PublicStruct: &PublicStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: -2, PublicStruct: &PublicStruct{Public: 3, private: -4}},
|
||||
opts: []cmp.Option{
|
||||
cmp.AllowUnexported(PublicStruct{}),
|
||||
IgnoreUnexported(ParentStruct{}),
|
||||
},
|
||||
wantEqual: false,
|
||||
reason: "not equal because ParentStruct.PublicStruct.private differs and not ignored by IgnoreUnexported(ParentStruct{})",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, PublicStruct: &PublicStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: -2, PublicStruct: &PublicStruct{Public: 3, private: -4}},
|
||||
opts: []cmp.Option{
|
||||
IgnoreUnexported(ParentStruct{}, PublicStruct{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because both ParentStruct.PublicStruct and ParentStruct.PublicStruct.private are ignored",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: -3, private: -4}},
|
||||
opts: []cmp.Option{
|
||||
cmp.AllowUnexported(privateStruct{}, PublicStruct{}, ParentStruct{}),
|
||||
},
|
||||
wantEqual: false,
|
||||
reason: "not equal since ParentStruct.privateStruct differs",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: -3, private: -4}},
|
||||
opts: []cmp.Option{
|
||||
cmp.AllowUnexported(privateStruct{}, PublicStruct{}),
|
||||
IgnoreUnexported(ParentStruct{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because ParentStruct.privateStruct ignored by IgnoreUnexported(ParentStruct{})",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: 3, private: -4}},
|
||||
opts: []cmp.Option{
|
||||
cmp.AllowUnexported(PublicStruct{}, ParentStruct{}),
|
||||
IgnoreUnexported(privateStruct{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because privateStruct.private ignored by IgnoreUnexported(privateStruct{})",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
x: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: 3, private: 4}},
|
||||
y: ParentStruct{Public: 1, private: 2, privateStruct: &privateStruct{Public: -3, private: -4}},
|
||||
opts: []cmp.Option{
|
||||
cmp.AllowUnexported(PublicStruct{}, ParentStruct{}),
|
||||
IgnoreUnexported(privateStruct{}),
|
||||
},
|
||||
wantEqual: false,
|
||||
reason: "not equal because privateStruct.Public differs and not ignored by IgnoreUnexported(privateStruct{})",
|
||||
}, {
|
||||
label: "IgnoreFields+IgnoreTypes+IgnoreUnexported",
|
||||
x: &Everything{
|
||||
MyInt: 5,
|
||||
MyFloat: 3.3,
|
||||
MyTime: MyTime{time.Now()},
|
||||
Bar3: *createBar3X(),
|
||||
ParentStruct: ParentStruct{
|
||||
Public: 1, private: 2, PublicStruct: &PublicStruct{Public: 3, private: 4},
|
||||
},
|
||||
},
|
||||
y: &Everything{
|
||||
MyInt: -5,
|
||||
MyFloat: 3.3,
|
||||
MyTime: MyTime{time.Now()},
|
||||
Bar3: *createBar3Y(),
|
||||
ParentStruct: ParentStruct{
|
||||
Public: 1, private: -2, PublicStruct: &PublicStruct{Public: -3, private: -4},
|
||||
},
|
||||
},
|
||||
opts: []cmp.Option{
|
||||
IgnoreFields(Everything{}, "MyTime", "Bar3.Foo3"),
|
||||
IgnoreFields(Bar3{}, "Bar1", "Bravo", "Delta", "Alpha"),
|
||||
IgnoreTypes(MyInt(0), PublicStruct{}),
|
||||
IgnoreUnexported(ParentStruct{}),
|
||||
},
|
||||
wantEqual: true,
|
||||
reason: "equal because all Ignore options can be composed together",
|
||||
}}
|
||||
|
||||
for _, tt := range tests {
|
||||
tRun(t, tt.label, func(t *testing.T) {
|
||||
var gotEqual bool
|
||||
var gotPanic string
|
||||
func() {
|
||||
defer func() {
|
||||
if ex := recover(); ex != nil {
|
||||
gotPanic = fmt.Sprint(ex)
|
||||
}
|
||||
}()
|
||||
gotEqual = cmp.Equal(tt.x, tt.y, tt.opts...)
|
||||
}()
|
||||
switch {
|
||||
case gotPanic == "" && tt.wantPanic:
|
||||
t.Errorf("expected Equal panic\nreason: %s", tt.reason)
|
||||
case gotPanic != "" && !tt.wantPanic:
|
||||
t.Errorf("unexpected Equal panic: got %v\nreason: %v", gotPanic, tt.reason)
|
||||
case gotEqual != tt.wantEqual:
|
||||
t.Errorf("Equal = %v, want %v\nreason: %v", gotEqual, tt.wantEqual, tt.reason)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPanic(t *testing.T) {
|
||||
args := func(x ...interface{}) []interface{} { return x }
|
||||
tests := []struct {
|
||||
label string // Test name
|
||||
fnc interface{} // Option function to call
|
||||
args []interface{} // Arguments to pass in
|
||||
wantPanic string // Expected panic message
|
||||
reason string // The reason for the expected outcome
|
||||
}{{
|
||||
label: "EquateApprox",
|
||||
fnc: EquateApprox,
|
||||
args: args(0.0, 0.0),
|
||||
reason: "zero margin and fraction is equivalent to exact equality",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
fnc: EquateApprox,
|
||||
args: args(-0.1, 0.0),
|
||||
wantPanic: "margin or fraction must be a non-negative number",
|
||||
reason: "negative inputs are invalid",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
fnc: EquateApprox,
|
||||
args: args(0.0, -0.1),
|
||||
wantPanic: "margin or fraction must be a non-negative number",
|
||||
reason: "negative inputs are invalid",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
fnc: EquateApprox,
|
||||
args: args(math.NaN(), 0.0),
|
||||
wantPanic: "margin or fraction must be a non-negative number",
|
||||
reason: "NaN inputs are invalid",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
fnc: EquateApprox,
|
||||
args: args(1.0, 0.0),
|
||||
reason: "fraction of 1.0 or greater is valid",
|
||||
}, {
|
||||
label: "EquateApprox",
|
||||
fnc: EquateApprox,
|
||||
args: args(0.0, math.Inf(+1)),
|
||||
reason: "margin of infinity is valid",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
fnc: SortSlices,
|
||||
args: args(strings.Compare),
|
||||
wantPanic: "invalid less function",
|
||||
reason: "func(x, y string) int is wrong signature for less",
|
||||
}, {
|
||||
label: "SortSlices",
|
||||
fnc: SortSlices,
|
||||
args: args((func(_, _ int) bool)(nil)),
|
||||
wantPanic: "invalid less function",
|
||||
reason: "nil value is not valid",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
fnc: SortMaps,
|
||||
args: args(strings.Compare),
|
||||
wantPanic: "invalid less function",
|
||||
reason: "func(x, y string) int is wrong signature for less",
|
||||
}, {
|
||||
label: "SortMaps",
|
||||
fnc: SortMaps,
|
||||
args: args((func(_, _ int) bool)(nil)),
|
||||
wantPanic: "invalid less function",
|
||||
reason: "nil value is not valid",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, ""),
|
||||
wantPanic: "name must not be empty",
|
||||
reason: "empty selector is invalid",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, "."),
|
||||
wantPanic: "name must not be empty",
|
||||
reason: "single dot selector is invalid",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, ".Alpha"),
|
||||
reason: "dot-prefix is okay since Foo1.Alpha reads naturally",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, "Alpha."),
|
||||
wantPanic: "name must not be empty",
|
||||
reason: "dot-suffix is invalid",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, "Alpha "),
|
||||
wantPanic: "does not exist",
|
||||
reason: "identifiers must not have spaces",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, "Zulu"),
|
||||
wantPanic: "does not exist",
|
||||
reason: "name of non-existent field is invalid",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, "Alpha.NoExist"),
|
||||
wantPanic: "must be a struct",
|
||||
reason: "cannot select into a non-struct",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(&Foo1{}, "Alpha"),
|
||||
wantPanic: "must be a struct",
|
||||
reason: "the type must be a struct (not pointer to a struct)",
|
||||
}, {
|
||||
label: "IgnoreFields",
|
||||
fnc: IgnoreFields,
|
||||
args: args(Foo1{}, "unexported"),
|
||||
wantPanic: "name must be exported",
|
||||
reason: "unexported fields must not be specified",
|
||||
}, {
|
||||
label: "IgnoreTypes",
|
||||
fnc: IgnoreTypes,
|
||||
reason: "empty input is valid",
|
||||
}, {
|
||||
label: "IgnoreTypes",
|
||||
fnc: IgnoreTypes,
|
||||
args: args(nil),
|
||||
wantPanic: "cannot determine type",
|
||||
reason: "input must not be nil value",
|
||||
}, {
|
||||
label: "IgnoreTypes",
|
||||
fnc: IgnoreTypes,
|
||||
args: args(0, 0, 0),
|
||||
reason: "duplicate inputs of the same type is valid",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
fnc: IgnoreInterfaces,
|
||||
args: args(nil),
|
||||
wantPanic: "input must be an anonymous struct",
|
||||
reason: "input must not be nil value",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
fnc: IgnoreInterfaces,
|
||||
args: args(Foo1{}),
|
||||
wantPanic: "input must be an anonymous struct",
|
||||
reason: "input must not be a named struct type",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
fnc: IgnoreInterfaces,
|
||||
args: args(struct{ _ io.Reader }{}),
|
||||
wantPanic: "struct cannot have named fields",
|
||||
reason: "input must not have named fields",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
fnc: IgnoreInterfaces,
|
||||
args: args(struct{ Foo1 }{}),
|
||||
wantPanic: "embedded field must be an interface type",
|
||||
reason: "field types must be interfaces",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
fnc: IgnoreInterfaces,
|
||||
args: args(struct{ EmptyInterface }{}),
|
||||
wantPanic: "cannot ignore empty interface",
|
||||
reason: "field types must not be the empty interface",
|
||||
}, {
|
||||
label: "IgnoreInterfaces",
|
||||
fnc: IgnoreInterfaces,
|
||||
args: args(struct {
|
||||
io.Reader
|
||||
io.Writer
|
||||
io.Closer
|
||||
io.ReadWriteCloser
|
||||
}{}),
|
||||
reason: "multiple interfaces may be specified, even if they overlap",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
fnc: IgnoreUnexported,
|
||||
reason: "empty input is valid",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
fnc: IgnoreUnexported,
|
||||
args: args(nil),
|
||||
wantPanic: "invalid struct type",
|
||||
reason: "input must not be nil value",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
fnc: IgnoreUnexported,
|
||||
args: args(&Foo1{}),
|
||||
wantPanic: "invalid struct type",
|
||||
reason: "input must be a struct type (not a pointer to a struct)",
|
||||
}, {
|
||||
label: "IgnoreUnexported",
|
||||
fnc: IgnoreUnexported,
|
||||
args: args(Foo1{}, struct{ x, X int }{}),
|
||||
reason: "input may be named or unnamed structs",
|
||||
}}
|
||||
|
||||
for _, tt := range tests {
|
||||
tRun(t, tt.label, func(t *testing.T) {
|
||||
// Prepare function arguments.
|
||||
vf := reflect.ValueOf(tt.fnc)
|
||||
var vargs []reflect.Value
|
||||
for i, arg := range tt.args {
|
||||
if arg == nil {
|
||||
tf := vf.Type()
|
||||
if i == tf.NumIn()-1 && tf.IsVariadic() {
|
||||
vargs = append(vargs, reflect.Zero(tf.In(i).Elem()))
|
||||
} else {
|
||||
vargs = append(vargs, reflect.Zero(tf.In(i)))
|
||||
}
|
||||
} else {
|
||||
vargs = append(vargs, reflect.ValueOf(arg))
|
||||
}
|
||||
}
|
||||
|
||||
// Call the function and capture any panics.
|
||||
var gotPanic string
|
||||
func() {
|
||||
defer func() {
|
||||
if ex := recover(); ex != nil {
|
||||
if s, ok := ex.(string); ok {
|
||||
gotPanic = s
|
||||
} else {
|
||||
panic(ex)
|
||||
}
|
||||
}
|
||||
}()
|
||||
vf.Call(vargs)
|
||||
}()
|
||||
|
||||
switch {
|
||||
case tt.wantPanic == "" && gotPanic != "":
|
||||
t.Errorf("unexpected panic message: %s\nreason: %s", gotPanic, tt.reason)
|
||||
case tt.wantPanic != "" && !strings.Contains(gotPanic, tt.wantPanic):
|
||||
t.Errorf("panic message:\ngot: %s\nwant: %s\nreason: %s", gotPanic, tt.wantPanic, tt.reason)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Delete this hack when we drop Go1.6 support.
|
||||
func tRun(t *testing.T, name string, f func(t *testing.T)) {
|
||||
type runner interface {
|
||||
Run(string, func(t *testing.T)) bool
|
||||
}
|
||||
var ti interface{} = t
|
||||
if r, ok := ti.(runner); ok {
|
||||
r.Run(name, f)
|
||||
} else {
|
||||
t.Logf("Test: %s", name)
|
||||
f(t)
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue