Merge branch 'master' into refresh-list-after-metas

pull/2327/head
Andrew Watkins 2017-11-27 17:17:07 -05:00 committed by GitHub
commit 8caf145e97
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
57 changed files with 2333 additions and 1217 deletions

View File

@ -10,8 +10,13 @@
1. [#2292](https://github.com/influxdata/chronograf/pull/2292): Source extra command line options from defaults file
1. [#2327](https://github.com/influxdata/chronograf/pull/2327): After CREATE/DELETE queries, refresh list of databases in Data Explorer
1. [#2327](https://github.com/influxdata/chronograf/pull/2327): Visualize CREATE/DELETE queries with Table view in Data Explorer
1. [#2329](https://github.com/influxdata/chronograf/pull/2329): Include tag values alongside measurement name in Data Explorer result tabs
1. [#2386](https://github.com/influxdata/chronograf/pull/2386): Fix queries that include regex, numbers and wildcard
1. [#2408](https://github.com/influxdata/chronograf/pull/2408): Fix updated Dashboard names not updating dashboard list
### Features
1. [#2385](https://github.com/influxdata/chronograf/pull/2385): Add time shift feature to DataExplorer and Dashboards
### UI Improvements
## v1.3.10.0 [2017-10-24]

View File

@ -23,42 +23,14 @@ ${BINARY}: $(SOURCES) .bindata .jsdep .godep
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
define CHRONOGIRAFFE
tLf iCf.
.CCC. tCC:
CGG; CGG:
tG0Gt: GGGGGGGGGGGGGGGG1 .,:,
LG1,,:1CC: .GGL;iLC1iii1LCi;GG1 .1GCL1iGG1
LG1:::;i1CGGt;;;;;;L0t;;;;;;GGGC1;;::,iGC
,ii:. 1GG1iiii;;tfiC;;;;;;;GGCfCGCGGC,
fGCiiiiGi1Lt;;iCLL,i;;;CGt
fGG11iiii1C1iiiiiGt1;;;;;CGf
.GGLLL1i1CitfiiL1iCi;;iLCGGt
.CGL11LGCCCCCCCLLCGG1;1GG;
CGL1tf1111iiiiiiL1ifGG,
LGCff1fCt1tCfiiCiCGC
LGGf111111111iCGGt
fGGGGGGGGGGGGGGi
ifii111111itL
;f1i11111iitf
;f1iiiiiii1tf
:fi111iii11tf
:fi111ii1i1tf
:f111111ii1tt
,L111111ii1tt
.Li1111i1111CCCCCCCCCCCCCCLt;
L111ii11111ittttt1tttttittti1fC;
f1111ii111i1ttttt1;iii1ittt1ttttCt.
tt11ii111tti1ttt1tt1;11;;;;iitttifCCCL,
11i1i11ttttti;1t1;;;ttt1;;ii;itti;L,;CCL
;f;;;;1tttti;;ttti;;;;;;;;;;;1tt1ifi .CCi
,L;itti;;;it;;;;;tt1;;;t1;;;;;;ii;t; :CC,
L;;;;iti;;;;;;;;;;;;;;;;;;;;;;;i;L, ;CC.
ti;;;iLLfffi;;;;;ittt11i;;;;;;;;;L tCCfff;
it;;;;;;L,ti;;;;;1Ltttft1t;;;;;;1t ;CCCL;
:f;;;;;;L.ti;;;;;tftttf1,f;;;;;;f: ;CC1:
.L;;;;;;L.t1;;;;;tt111fi,f;;;;;;L.
1Li;;iL1 :Ci;;;tL1i1fC, Lt;;;;Li
.;tt; ifLt:;fLf; ;LCCt,
._ o o
\_`-)|_
,"" _\_
," ## | 0 0.
," ## ,-\__ `.
," / `--._;) - "HAI, I'm Chronogiraffe. Let's be friends!"
," ## /
," ## /
endef
export CHRONOGIRAFFE
chronogiraffe: ${BINARY}

View File

@ -86,6 +86,7 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
id, _ := b.NextSequence()
src.ID = chronograf.DashboardID(id)
// TODO: use FormatInt
strID := strconv.Itoa(int(id))
for i, cell := range src.Cells {
cid, err := d.IDs.Generate()
@ -95,12 +96,11 @@ func (d *DashboardsStore) Add(ctx context.Context, src chronograf.Dashboard) (ch
cell.ID = cid
src.Cells[i] = cell
}
if v, err := internal.MarshalDashboard(src); err != nil {
return err
} else if err := b.Put([]byte(strID), v); err != nil {
v, err := internal.MarshalDashboard(src)
if err != nil {
return err
}
return nil
return b.Put([]byte(strID), v)
}); err != nil {
return chronograf.Dashboard{}, err
}

View File

@ -191,12 +191,26 @@ func MarshalDashboard(d chronograf.Dashboard) ([]byte, error) {
if q.Range != nil {
r.Upper, r.Lower = q.Range.Upper, q.Range.Lower
}
q.Shifts = q.QueryConfig.Shifts
queries[j] = &Query{
Command: q.Command,
Label: q.Label,
Range: r,
Source: q.Source,
}
shifts := make([]*TimeShift, len(q.Shifts))
for k := range q.Shifts {
shift := &TimeShift{
Label: q.Shifts[k].Label,
Unit: q.Shifts[k].Unit,
Quantity: q.Shifts[k].Quantity,
}
shifts[k] = shift
}
queries[j].Shifts = shifts
}
axes := make(map[string]*Axis, len(c.Axes))
@ -277,12 +291,26 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
Label: q.Label,
Source: q.Source,
}
if q.Range.Upper != q.Range.Lower {
queries[j].Range = &chronograf.Range{
Upper: q.Range.Upper,
Lower: q.Range.Lower,
}
}
shifts := make([]chronograf.TimeShift, len(q.Shifts))
for k := range q.Shifts {
shift := chronograf.TimeShift{
Label: q.Shifts[k].Label,
Unit: q.Shifts[k].Unit,
Quantity: q.Shifts[k].Quantity,
}
shifts[k] = shift
}
queries[j].Shifts = shifts
}
axes := make(map[string]chronograf.Axis, len(c.Axes))
@ -330,9 +358,9 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
templates := make([]chronograf.Template, len(pb.Templates))
for i, t := range pb.Templates {
vals := make([]chronograf.BasicTemplateValue, len(t.Values))
vals := make([]chronograf.TemplateValue, len(t.Values))
for j, v := range t.Values {
vals[j] = chronograf.BasicTemplateValue{
vals[j] = chronograf.TemplateValue{
Selected: v.Selected,
Type: v.Type,
Value: v.Value,
@ -341,7 +369,7 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
template := chronograf.Template{
ID: chronograf.TemplateID(t.ID),
BasicTemplateVar: chronograf.BasicTemplateVar{
TemplateVar: chronograf.TemplateVar{
Var: t.TempVar,
Values: vals,
},
@ -434,8 +462,5 @@ func UnmarshalUser(data []byte, u *chronograf.User) error {
// UnmarshalUserPB decodes a user from binary protobuf data.
// We are ignoring the password for now.
func UnmarshalUserPB(data []byte, u *User) error {
if err := proto.Unmarshal(data, u); err != nil {
return err
}
return nil
return proto.Unmarshal(data, u)
}

View File

@ -20,6 +20,7 @@ It has these top-level messages:
Layout
Cell
Query
TimeShift
Range
AlertRule
User
@ -60,6 +61,83 @@ func (m *Source) String() string { return proto.CompactTextString(m)
func (*Source) ProtoMessage() {}
func (*Source) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
func (m *Source) GetID() int64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Source) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Source) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *Source) GetUsername() string {
if m != nil {
return m.Username
}
return ""
}
func (m *Source) GetPassword() string {
if m != nil {
return m.Password
}
return ""
}
func (m *Source) GetURL() string {
if m != nil {
return m.URL
}
return ""
}
func (m *Source) GetDefault() bool {
if m != nil {
return m.Default
}
return false
}
func (m *Source) GetTelegraf() string {
if m != nil {
return m.Telegraf
}
return ""
}
func (m *Source) GetInsecureSkipVerify() bool {
if m != nil {
return m.InsecureSkipVerify
}
return false
}
func (m *Source) GetMetaURL() string {
if m != nil {
return m.MetaURL
}
return ""
}
func (m *Source) GetSharedSecret() string {
if m != nil {
return m.SharedSecret
}
return ""
}
type Dashboard struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@ -72,6 +150,20 @@ func (m *Dashboard) String() string { return proto.CompactTextString(
func (*Dashboard) ProtoMessage() {}
func (*Dashboard) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
func (m *Dashboard) GetID() int64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Dashboard) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Dashboard) GetCells() []*DashboardCell {
if m != nil {
return m.Cells
@ -103,6 +195,34 @@ func (m *DashboardCell) String() string { return proto.CompactTextStr
func (*DashboardCell) ProtoMessage() {}
func (*DashboardCell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
func (m *DashboardCell) GetX() int32 {
if m != nil {
return m.X
}
return 0
}
func (m *DashboardCell) GetY() int32 {
if m != nil {
return m.Y
}
return 0
}
func (m *DashboardCell) GetW() int32 {
if m != nil {
return m.W
}
return 0
}
func (m *DashboardCell) GetH() int32 {
if m != nil {
return m.H
}
return 0
}
func (m *DashboardCell) GetQueries() []*Query {
if m != nil {
return m.Queries
@ -110,6 +230,27 @@ func (m *DashboardCell) GetQueries() []*Query {
return nil
}
func (m *DashboardCell) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *DashboardCell) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *DashboardCell) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *DashboardCell) GetAxes() map[string]*Axis {
if m != nil {
return m.Axes
@ -118,7 +259,7 @@ func (m *DashboardCell) GetAxes() map[string]*Axis {
}
type Axis struct {
LegacyBounds []int64 `protobuf:"varint,1,rep,name=legacyBounds" json:"legacyBounds,omitempty"`
LegacyBounds []int64 `protobuf:"varint,1,rep,packed,name=legacyBounds" json:"legacyBounds,omitempty"`
Bounds []string `protobuf:"bytes,2,rep,name=bounds" json:"bounds,omitempty"`
Label string `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"`
Prefix string `protobuf:"bytes,4,opt,name=prefix,proto3" json:"prefix,omitempty"`
@ -132,6 +273,55 @@ func (m *Axis) String() string { return proto.CompactTextString(m) }
func (*Axis) ProtoMessage() {}
func (*Axis) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
func (m *Axis) GetLegacyBounds() []int64 {
if m != nil {
return m.LegacyBounds
}
return nil
}
func (m *Axis) GetBounds() []string {
if m != nil {
return m.Bounds
}
return nil
}
func (m *Axis) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *Axis) GetPrefix() string {
if m != nil {
return m.Prefix
}
return ""
}
func (m *Axis) GetSuffix() string {
if m != nil {
return m.Suffix
}
return ""
}
func (m *Axis) GetBase() string {
if m != nil {
return m.Base
}
return ""
}
func (m *Axis) GetScale() string {
if m != nil {
return m.Scale
}
return ""
}
type Template struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
TempVar string `protobuf:"bytes,2,opt,name=temp_var,json=tempVar,proto3" json:"temp_var,omitempty"`
@ -146,6 +336,20 @@ func (m *Template) String() string { return proto.CompactTextString(m
func (*Template) ProtoMessage() {}
func (*Template) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
func (m *Template) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *Template) GetTempVar() string {
if m != nil {
return m.TempVar
}
return ""
}
func (m *Template) GetValues() []*TemplateValue {
if m != nil {
return m.Values
@ -153,6 +357,20 @@ func (m *Template) GetValues() []*TemplateValue {
return nil
}
func (m *Template) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *Template) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *Template) GetQuery() *TemplateQuery {
if m != nil {
return m.Query
@ -171,6 +389,27 @@ func (m *TemplateValue) String() string { return proto.CompactTextStr
func (*TemplateValue) ProtoMessage() {}
func (*TemplateValue) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
func (m *TemplateValue) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *TemplateValue) GetValue() string {
if m != nil {
return m.Value
}
return ""
}
func (m *TemplateValue) GetSelected() bool {
if m != nil {
return m.Selected
}
return false
}
type TemplateQuery struct {
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
Db string `protobuf:"bytes,2,opt,name=db,proto3" json:"db,omitempty"`
@ -185,6 +424,48 @@ func (m *TemplateQuery) String() string { return proto.CompactTextStr
func (*TemplateQuery) ProtoMessage() {}
func (*TemplateQuery) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
func (m *TemplateQuery) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *TemplateQuery) GetDb() string {
if m != nil {
return m.Db
}
return ""
}
func (m *TemplateQuery) GetRp() string {
if m != nil {
return m.Rp
}
return ""
}
func (m *TemplateQuery) GetMeasurement() string {
if m != nil {
return m.Measurement
}
return ""
}
func (m *TemplateQuery) GetTagKey() string {
if m != nil {
return m.TagKey
}
return ""
}
func (m *TemplateQuery) GetFieldKey() string {
if m != nil {
return m.FieldKey
}
return ""
}
type Server struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@ -200,6 +481,55 @@ func (m *Server) String() string { return proto.CompactTextString(m)
func (*Server) ProtoMessage() {}
func (*Server) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
func (m *Server) GetID() int64 {
if m != nil {
return m.ID
}
return 0
}
func (m *Server) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Server) GetUsername() string {
if m != nil {
return m.Username
}
return ""
}
func (m *Server) GetPassword() string {
if m != nil {
return m.Password
}
return ""
}
func (m *Server) GetURL() string {
if m != nil {
return m.URL
}
return ""
}
func (m *Server) GetSrcID() int64 {
if m != nil {
return m.SrcID
}
return 0
}
func (m *Server) GetActive() bool {
if m != nil {
return m.Active
}
return false
}
type Layout struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
Application string `protobuf:"bytes,2,opt,name=Application,proto3" json:"Application,omitempty"`
@ -213,6 +543,27 @@ func (m *Layout) String() string { return proto.CompactTextString(m)
func (*Layout) ProtoMessage() {}
func (*Layout) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{8} }
func (m *Layout) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *Layout) GetApplication() string {
if m != nil {
return m.Application
}
return ""
}
func (m *Layout) GetMeasurement() string {
if m != nil {
return m.Measurement
}
return ""
}
func (m *Layout) GetCells() []*Cell {
if m != nil {
return m.Cells
@ -220,6 +571,13 @@ func (m *Layout) GetCells() []*Cell {
return nil
}
func (m *Layout) GetAutoflow() bool {
if m != nil {
return m.Autoflow
}
return false
}
type Cell struct {
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
@ -228,7 +586,7 @@ type Cell struct {
Queries []*Query `protobuf:"bytes,5,rep,name=queries" json:"queries,omitempty"`
I string `protobuf:"bytes,6,opt,name=i,proto3" json:"i,omitempty"`
Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"`
Yranges []int64 `protobuf:"varint,8,rep,name=yranges" json:"yranges,omitempty"`
Yranges []int64 `protobuf:"varint,8,rep,packed,name=yranges" json:"yranges,omitempty"`
Ylabels []string `protobuf:"bytes,9,rep,name=ylabels" json:"ylabels,omitempty"`
Type string `protobuf:"bytes,10,opt,name=type,proto3" json:"type,omitempty"`
Axes map[string]*Axis `protobuf:"bytes,11,rep,name=axes" json:"axes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
@ -239,6 +597,34 @@ func (m *Cell) String() string { return proto.CompactTextString(m) }
func (*Cell) ProtoMessage() {}
func (*Cell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{9} }
func (m *Cell) GetX() int32 {
if m != nil {
return m.X
}
return 0
}
func (m *Cell) GetY() int32 {
if m != nil {
return m.Y
}
return 0
}
func (m *Cell) GetW() int32 {
if m != nil {
return m.W
}
return 0
}
func (m *Cell) GetH() int32 {
if m != nil {
return m.H
}
return 0
}
func (m *Cell) GetQueries() []*Query {
if m != nil {
return m.Queries
@ -246,6 +632,41 @@ func (m *Cell) GetQueries() []*Query {
return nil
}
func (m *Cell) GetI() string {
if m != nil {
return m.I
}
return ""
}
func (m *Cell) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Cell) GetYranges() []int64 {
if m != nil {
return m.Yranges
}
return nil
}
func (m *Cell) GetYlabels() []string {
if m != nil {
return m.Ylabels
}
return nil
}
func (m *Cell) GetType() string {
if m != nil {
return m.Type
}
return ""
}
func (m *Cell) GetAxes() map[string]*Axis {
if m != nil {
return m.Axes
@ -254,14 +675,15 @@ func (m *Cell) GetAxes() map[string]*Axis {
}
type Query struct {
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
GroupBys []string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
Wheres []string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
Range *Range `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
Shifts []*TimeShift `protobuf:"bytes,9,rep,name=Shifts" json:"Shifts,omitempty"`
}
func (m *Query) Reset() { *m = Query{} }
@ -269,6 +691,48 @@ func (m *Query) String() string { return proto.CompactTextString(m) }
func (*Query) ProtoMessage() {}
func (*Query) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{10} }
func (m *Query) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *Query) GetDB() string {
if m != nil {
return m.DB
}
return ""
}
func (m *Query) GetRP() string {
if m != nil {
return m.RP
}
return ""
}
func (m *Query) GetGroupBys() []string {
if m != nil {
return m.GroupBys
}
return nil
}
func (m *Query) GetWheres() []string {
if m != nil {
return m.Wheres
}
return nil
}
func (m *Query) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *Query) GetRange() *Range {
if m != nil {
return m.Range
@ -276,6 +740,52 @@ func (m *Query) GetRange() *Range {
return nil
}
func (m *Query) GetSource() string {
if m != nil {
return m.Source
}
return ""
}
func (m *Query) GetShifts() []*TimeShift {
if m != nil {
return m.Shifts
}
return nil
}
type TimeShift struct {
Label string `protobuf:"bytes,1,opt,name=Label,proto3" json:"Label,omitempty"`
Unit string `protobuf:"bytes,2,opt,name=Unit,proto3" json:"Unit,omitempty"`
Quantity string `protobuf:"bytes,3,opt,name=Quantity,proto3" json:"Quantity,omitempty"`
}
func (m *TimeShift) Reset() { *m = TimeShift{} }
func (m *TimeShift) String() string { return proto.CompactTextString(m) }
func (*TimeShift) ProtoMessage() {}
func (*TimeShift) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
func (m *TimeShift) GetLabel() string {
if m != nil {
return m.Label
}
return ""
}
func (m *TimeShift) GetUnit() string {
if m != nil {
return m.Unit
}
return ""
}
func (m *TimeShift) GetQuantity() string {
if m != nil {
return m.Quantity
}
return ""
}
type Range struct {
Upper int64 `protobuf:"varint,1,opt,name=Upper,proto3" json:"Upper,omitempty"`
Lower int64 `protobuf:"varint,2,opt,name=Lower,proto3" json:"Lower,omitempty"`
@ -284,7 +794,21 @@ type Range struct {
func (m *Range) Reset() { *m = Range{} }
func (m *Range) String() string { return proto.CompactTextString(m) }
func (*Range) ProtoMessage() {}
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{11} }
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
func (m *Range) GetUpper() int64 {
if m != nil {
return m.Upper
}
return 0
}
func (m *Range) GetLower() int64 {
if m != nil {
return m.Lower
}
return 0
}
type AlertRule struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -296,7 +820,35 @@ type AlertRule struct {
func (m *AlertRule) Reset() { *m = AlertRule{} }
func (m *AlertRule) String() string { return proto.CompactTextString(m) }
func (*AlertRule) ProtoMessage() {}
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{12} }
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
func (m *AlertRule) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *AlertRule) GetJSON() string {
if m != nil {
return m.JSON
}
return ""
}
func (m *AlertRule) GetSrcID() int64 {
if m != nil {
return m.SrcID
}
return 0
}
func (m *AlertRule) GetKapaID() int64 {
if m != nil {
return m.KapaID
}
return 0
}
type User struct {
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -306,7 +858,21 @@ type User struct {
func (m *User) Reset() { *m = User{} }
func (m *User) String() string { return proto.CompactTextString(m) }
func (*User) ProtoMessage() {}
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{13} }
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{14} }
func (m *User) GetID() uint64 {
if m != nil {
return m.ID
}
return 0
}
func (m *User) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func init() {
proto.RegisterType((*Source)(nil), "internal.Source")
@ -320,6 +886,7 @@ func init() {
proto.RegisterType((*Layout)(nil), "internal.Layout")
proto.RegisterType((*Cell)(nil), "internal.Cell")
proto.RegisterType((*Query)(nil), "internal.Query")
proto.RegisterType((*TimeShift)(nil), "internal.TimeShift")
proto.RegisterType((*Range)(nil), "internal.Range")
proto.RegisterType((*AlertRule)(nil), "internal.AlertRule")
proto.RegisterType((*User)(nil), "internal.User")
@ -328,70 +895,73 @@ func init() {
func init() { proto.RegisterFile("internal.proto", fileDescriptorInternal) }
var fileDescriptorInternal = []byte{
// 1028 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x6f, 0xe3, 0x44,
0x14, 0xd7, 0xf8, 0x4f, 0x12, 0xbf, 0x74, 0x0b, 0x1a, 0xad, 0x58, 0xb3, 0x5c, 0x82, 0x05, 0x52,
0x40, 0x6c, 0x41, 0xbb, 0x42, 0x42, 0xdc, 0xd2, 0x06, 0xad, 0x4a, 0xbb, 0x4b, 0x99, 0xb4, 0xe5,
0x84, 0x56, 0x13, 0xe7, 0xa5, 0xb5, 0xd6, 0x89, 0xcd, 0xd8, 0x6e, 0xe3, 0x6f, 0xc1, 0x27, 0x40,
0x42, 0xe2, 0xc4, 0x81, 0x03, 0x5f, 0x80, 0xfb, 0x7e, 0x2a, 0xf4, 0x66, 0xc6, 0x8e, 0xc3, 0x76,
0xd1, 0x5e, 0xe0, 0x36, 0xbf, 0xf7, 0xc6, 0x6f, 0x66, 0xde, 0xef, 0xfd, 0x7e, 0x09, 0xec, 0x27,
0xeb, 0x12, 0xd5, 0x5a, 0xa6, 0x07, 0xb9, 0xca, 0xca, 0x8c, 0x0f, 0x1a, 0x1c, 0xfd, 0xe1, 0x40,
0x6f, 0x96, 0x55, 0x2a, 0x46, 0xbe, 0x0f, 0xce, 0xf1, 0x34, 0x64, 0x23, 0x36, 0x76, 0x85, 0x73,
0x3c, 0xe5, 0x1c, 0xbc, 0xe7, 0x72, 0x85, 0xa1, 0x33, 0x62, 0xe3, 0x40, 0xe8, 0x35, 0xc5, 0xce,
0xeb, 0x1c, 0x43, 0xd7, 0xc4, 0x68, 0xcd, 0x1f, 0xc2, 0xe0, 0xa2, 0xa0, 0x6a, 0x2b, 0x0c, 0x3d,
0x1d, 0x6f, 0x31, 0xe5, 0xce, 0x64, 0x51, 0xdc, 0x66, 0x6a, 0x11, 0xfa, 0x26, 0xd7, 0x60, 0xfe,
0x2e, 0xb8, 0x17, 0xe2, 0x34, 0xec, 0xe9, 0x30, 0x2d, 0x79, 0x08, 0xfd, 0x29, 0x2e, 0x65, 0x95,
0x96, 0x61, 0x7f, 0xc4, 0xc6, 0x03, 0xd1, 0x40, 0xaa, 0x73, 0x8e, 0x29, 0x5e, 0x29, 0xb9, 0x0c,
0x07, 0xa6, 0x4e, 0x83, 0xf9, 0x01, 0xf0, 0xe3, 0x75, 0x81, 0x71, 0xa5, 0x70, 0xf6, 0x32, 0xc9,
0x2f, 0x51, 0x25, 0xcb, 0x3a, 0x0c, 0x74, 0x81, 0x3b, 0x32, 0x74, 0xca, 0x33, 0x2c, 0x25, 0x9d,
0x0d, 0xba, 0x54, 0x03, 0x79, 0x04, 0x7b, 0xb3, 0x6b, 0xa9, 0x70, 0x31, 0xc3, 0x58, 0x61, 0x19,
0x0e, 0x75, 0x7a, 0x27, 0x16, 0xfd, 0xcc, 0x20, 0x98, 0xca, 0xe2, 0x7a, 0x9e, 0x49, 0xb5, 0x78,
0xab, 0x9e, 0x3d, 0x02, 0x3f, 0xc6, 0x34, 0x2d, 0x42, 0x77, 0xe4, 0x8e, 0x87, 0x8f, 0x1f, 0x1c,
0xb4, 0x64, 0xb4, 0x75, 0x8e, 0x30, 0x4d, 0x85, 0xd9, 0xc5, 0xbf, 0x80, 0xa0, 0xc4, 0x55, 0x9e,
0xca, 0x12, 0x8b, 0xd0, 0xd3, 0x9f, 0xf0, 0xed, 0x27, 0xe7, 0x36, 0x25, 0xb6, 0x9b, 0xa2, 0xdf,
0x1d, 0xb8, 0xb7, 0x53, 0x8a, 0xef, 0x01, 0xdb, 0xe8, 0x5b, 0xf9, 0x82, 0x6d, 0x08, 0xd5, 0xfa,
0x46, 0xbe, 0x60, 0x35, 0xa1, 0x5b, 0xcd, 0x9f, 0x2f, 0xd8, 0x2d, 0xa1, 0x6b, 0xcd, 0x9a, 0x2f,
0xd8, 0x35, 0xff, 0x04, 0xfa, 0x3f, 0x55, 0xa8, 0x12, 0x2c, 0x42, 0x5f, 0x9f, 0xfc, 0xce, 0xf6,
0xe4, 0xef, 0x2b, 0x54, 0xb5, 0x68, 0xf2, 0xf4, 0x52, 0xcd, 0xb8, 0xa1, 0x4f, 0xaf, 0x29, 0x56,
0xd2, 0x74, 0xf4, 0x4d, 0x8c, 0xd6, 0xb6, 0x43, 0x86, 0x33, 0xea, 0xd0, 0x97, 0xe0, 0xc9, 0x0d,
0x16, 0x61, 0xa0, 0xeb, 0x7f, 0xf8, 0x86, 0x66, 0x1c, 0x4c, 0x36, 0x58, 0x7c, 0xb3, 0x2e, 0x55,
0x2d, 0xf4, 0xf6, 0x87, 0x4f, 0x21, 0x68, 0x43, 0x34, 0x39, 0x2f, 0xb1, 0xd6, 0x0f, 0x0c, 0x04,
0x2d, 0xf9, 0x47, 0xe0, 0xdf, 0xc8, 0xb4, 0x32, 0x8d, 0x1f, 0x3e, 0xde, 0xdf, 0x96, 0x9d, 0x6c,
0x92, 0x42, 0x98, 0xe4, 0xd7, 0xce, 0x57, 0x2c, 0xfa, 0x93, 0x81, 0x47, 0x31, 0x22, 0x3b, 0xc5,
0x2b, 0x19, 0xd7, 0x87, 0x59, 0xb5, 0x5e, 0x14, 0x21, 0x1b, 0xb9, 0x63, 0x57, 0xec, 0xc4, 0xf8,
0x7b, 0xd0, 0x9b, 0x9b, 0xac, 0x33, 0x72, 0xc7, 0x81, 0xb0, 0x88, 0xdf, 0x07, 0x3f, 0x95, 0x73,
0x4c, 0xad, 0x0e, 0x0c, 0xa0, 0xdd, 0xb9, 0xc2, 0x65, 0xb2, 0xb1, 0x32, 0xb0, 0x88, 0xe2, 0x45,
0xb5, 0xa4, 0xb8, 0x91, 0x80, 0x45, 0xd4, 0xae, 0xb9, 0x2c, 0xda, 0x16, 0xd2, 0x9a, 0x2a, 0x17,
0xb1, 0x4c, 0x9b, 0x1e, 0x1a, 0x10, 0xfd, 0xc5, 0x68, 0xfe, 0x0d, 0xdf, 0x9d, 0x99, 0x33, 0x1d,
0x7d, 0x1f, 0x06, 0x34, 0x0b, 0x2f, 0x6e, 0xa4, 0xb2, 0x73, 0xd7, 0x27, 0x7c, 0x29, 0x15, 0xff,
0x1c, 0x7a, 0xfa, 0xe5, 0x77, 0xcc, 0x5e, 0x53, 0xee, 0x92, 0xf2, 0xc2, 0x6e, 0x6b, 0x19, 0xf4,
0x3a, 0x0c, 0xb6, 0x8f, 0xf5, 0xbb, 0x8f, 0x7d, 0x04, 0x3e, 0x8d, 0x42, 0xad, 0x6f, 0x7f, 0x67,
0x65, 0x33, 0x30, 0x66, 0x57, 0x74, 0x01, 0xf7, 0x76, 0x4e, 0x6c, 0x4f, 0x62, 0xbb, 0x27, 0x6d,
0x59, 0x0c, 0x2c, 0x6b, 0xa4, 0xfd, 0x02, 0x53, 0x8c, 0x4b, 0x5c, 0xe8, 0x7e, 0x0f, 0x44, 0x8b,
0xa3, 0x5f, 0xd9, 0xb6, 0xae, 0x3e, 0x8f, 0xd4, 0x1d, 0x67, 0xab, 0x95, 0x5c, 0x2f, 0x6c, 0xe9,
0x06, 0x52, 0xdf, 0x16, 0x73, 0x5b, 0xda, 0x59, 0xcc, 0x09, 0xab, 0xdc, 0x32, 0xe8, 0xa8, 0x9c,
0x8f, 0x60, 0xb8, 0x42, 0x59, 0x54, 0x0a, 0x57, 0xb8, 0x2e, 0x6d, 0x0b, 0xba, 0x21, 0xfe, 0x00,
0xfa, 0xa5, 0xbc, 0x7a, 0x41, 0xb3, 0x67, 0x99, 0x2c, 0xe5, 0xd5, 0x09, 0xd6, 0xfc, 0x03, 0x08,
0x96, 0x09, 0xa6, 0x0b, 0x9d, 0x32, 0x74, 0x0e, 0x74, 0xe0, 0x04, 0xeb, 0xe8, 0x37, 0x06, 0xbd,
0x19, 0xaa, 0x1b, 0x54, 0x6f, 0x65, 0x17, 0x5d, 0x3b, 0x75, 0xff, 0xc5, 0x4e, 0xbd, 0xbb, 0xed,
0xd4, 0xdf, 0xda, 0xe9, 0x7d, 0xf0, 0x67, 0x2a, 0x3e, 0x9e, 0xea, 0x1b, 0xb9, 0xc2, 0x00, 0x9a,
0xc6, 0x49, 0x5c, 0x26, 0x37, 0x68, 0x3d, 0xd6, 0xa2, 0xe8, 0x17, 0x06, 0xbd, 0x53, 0x59, 0x67,
0x55, 0xf9, 0xda, 0x84, 0x8d, 0x60, 0x38, 0xc9, 0xf3, 0x34, 0x89, 0x65, 0x99, 0x64, 0x6b, 0x7b,
0xdb, 0x6e, 0x88, 0x76, 0x3c, 0xeb, 0xf4, 0xce, 0xdc, 0xbb, 0x1b, 0x22, 0x85, 0x1e, 0x69, 0x17,
0x34, 0x96, 0xd6, 0x51, 0xa8, 0x31, 0x3f, 0x9d, 0xa4, 0x07, 0x4e, 0xaa, 0x32, 0x5b, 0xa6, 0xd9,
0xad, 0x7e, 0xc9, 0x40, 0xb4, 0x38, 0x7a, 0xe5, 0x80, 0xf7, 0x7f, 0xb9, 0xdb, 0x1e, 0xb0, 0xc4,
0x12, 0xc9, 0x92, 0xd6, 0xeb, 0xfa, 0x1d, 0xaf, 0x0b, 0xa1, 0x5f, 0x2b, 0xb9, 0xbe, 0xc2, 0x22,
0x1c, 0x68, 0xe7, 0x68, 0xa0, 0xce, 0x68, 0x8d, 0x18, 0x93, 0x0b, 0x44, 0x03, 0xdb, 0x99, 0x87,
0xce, 0xcc, 0x7f, 0x66, 0xfd, 0x70, 0xa8, 0x6f, 0x14, 0xee, 0xb6, 0xe5, 0xbf, 0xb3, 0xc1, 0x57,
0x0c, 0xfc, 0x56, 0x30, 0x47, 0xbb, 0x82, 0x39, 0xda, 0x0a, 0x66, 0x7a, 0xd8, 0x08, 0x66, 0x7a,
0x48, 0x58, 0x9c, 0x35, 0x82, 0x11, 0x67, 0x44, 0xd6, 0x53, 0x95, 0x55, 0xf9, 0x61, 0x6d, 0x58,
0x0d, 0x44, 0x8b, 0x69, 0xca, 0x7e, 0xb8, 0x46, 0x65, 0x5b, 0x1d, 0x08, 0x8b, 0x68, 0x26, 0x4f,
0xb5, 0x99, 0x98, 0xe6, 0x1a, 0xc0, 0x3f, 0x06, 0x5f, 0x50, 0xf3, 0x74, 0x87, 0x77, 0x78, 0xd1,
0x61, 0x61, 0xb2, 0x54, 0xd4, 0xfc, 0x57, 0xb1, 0xbf, 0x27, 0x16, 0x45, 0x4f, 0xec, 0xe7, 0x54,
0xfd, 0x22, 0xcf, 0x51, 0x59, 0x89, 0x19, 0xa0, 0xcf, 0xcc, 0x6e, 0xd1, 0xb8, 0xa3, 0x2b, 0x0c,
0x88, 0x7e, 0x84, 0x60, 0x92, 0xa2, 0x2a, 0x45, 0x95, 0xbe, 0xee, 0xa9, 0x1c, 0xbc, 0x6f, 0x67,
0xdf, 0x3d, 0x6f, 0x84, 0x49, 0xeb, 0xad, 0x9c, 0xdc, 0x7f, 0xc8, 0xe9, 0x44, 0xe6, 0xf2, 0x78,
0xaa, 0xe7, 0xcc, 0x15, 0x16, 0x45, 0x9f, 0x82, 0x47, 0xb2, 0xed, 0x54, 0xf6, 0xde, 0x24, 0xf9,
0x79, 0x4f, 0xff, 0x2b, 0x7b, 0xf2, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x59, 0x2e, 0xc0,
0xa7, 0x09, 0x00, 0x00,
// 1082 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcf, 0x8e, 0xe3, 0xc4,
0x13, 0x96, 0x63, 0x3b, 0x89, 0x2b, 0xb3, 0xf3, 0xfb, 0xa9, 0x59, 0xb1, 0x66, 0xb9, 0x04, 0x0b,
0xa4, 0xf0, 0x67, 0x07, 0xb4, 0x2b, 0x24, 0xc4, 0x2d, 0x33, 0x41, 0xab, 0x61, 0x66, 0x97, 0x99,
0xce, 0xcc, 0x70, 0x42, 0xab, 0x4e, 0x52, 0x99, 0x58, 0xeb, 0xd8, 0xa6, 0xdd, 0x9e, 0x89, 0xdf,
0x82, 0x27, 0x40, 0x42, 0xe2, 0xc4, 0x81, 0x03, 0x2f, 0xc0, 0x9d, 0x17, 0xe2, 0x8a, 0xaa, 0xbb,
0xed, 0x38, 0xec, 0x2c, 0xda, 0x0b, 0xdc, 0xfa, 0xab, 0xea, 0x54, 0x55, 0xd7, 0x57, 0xf5, 0xc5,
0xb0, 0x1f, 0xa7, 0x0a, 0x65, 0x2a, 0x92, 0x83, 0x5c, 0x66, 0x2a, 0x63, 0xfd, 0x1a, 0x47, 0xbf,
0x76, 0xa0, 0x3b, 0xcd, 0x4a, 0x39, 0x47, 0xb6, 0x0f, 0x9d, 0xe3, 0x49, 0xe8, 0x0c, 0x9d, 0x91,
0xcb, 0x3b, 0xc7, 0x13, 0xc6, 0xc0, 0x7b, 0x2e, 0xd6, 0x18, 0x76, 0x86, 0xce, 0x28, 0xe0, 0xfa,
0x4c, 0xb6, 0x8b, 0x2a, 0xc7, 0xd0, 0x35, 0x36, 0x3a, 0xb3, 0x87, 0xd0, 0xbf, 0x2c, 0x28, 0xda,
0x1a, 0x43, 0x4f, 0xdb, 0x1b, 0x4c, 0xbe, 0x33, 0x51, 0x14, 0xb7, 0x99, 0x5c, 0x84, 0xbe, 0xf1,
0xd5, 0x98, 0xfd, 0x1f, 0xdc, 0x4b, 0x7e, 0x1a, 0x76, 0xb5, 0x99, 0x8e, 0x2c, 0x84, 0xde, 0x04,
0x97, 0xa2, 0x4c, 0x54, 0xd8, 0x1b, 0x3a, 0xa3, 0x3e, 0xaf, 0x21, 0xc5, 0xb9, 0xc0, 0x04, 0xaf,
0xa5, 0x58, 0x86, 0x7d, 0x13, 0xa7, 0xc6, 0xec, 0x00, 0xd8, 0x71, 0x5a, 0xe0, 0xbc, 0x94, 0x38,
0x7d, 0x19, 0xe7, 0x57, 0x28, 0xe3, 0x65, 0x15, 0x06, 0x3a, 0xc0, 0x1d, 0x1e, 0xca, 0xf2, 0x0c,
0x95, 0xa0, 0xdc, 0xa0, 0x43, 0xd5, 0x90, 0x45, 0xb0, 0x37, 0x5d, 0x09, 0x89, 0x8b, 0x29, 0xce,
0x25, 0xaa, 0x70, 0xa0, 0xdd, 0x3b, 0xb6, 0xe8, 0x07, 0x07, 0x82, 0x89, 0x28, 0x56, 0xb3, 0x4c,
0xc8, 0xc5, 0x1b, 0xf5, 0xec, 0x11, 0xf8, 0x73, 0x4c, 0x92, 0x22, 0x74, 0x87, 0xee, 0x68, 0xf0,
0xf8, 0xc1, 0x41, 0x43, 0x46, 0x13, 0xe7, 0x08, 0x93, 0x84, 0x9b, 0x5b, 0xec, 0x33, 0x08, 0x14,
0xae, 0xf3, 0x44, 0x28, 0x2c, 0x42, 0x4f, 0xff, 0x84, 0x6d, 0x7f, 0x72, 0x61, 0x5d, 0x7c, 0x7b,
0x29, 0xfa, 0xa5, 0x03, 0xf7, 0x76, 0x42, 0xb1, 0x3d, 0x70, 0x36, 0xba, 0x2a, 0x9f, 0x3b, 0x1b,
0x42, 0x95, 0xae, 0xc8, 0xe7, 0x4e, 0x45, 0xe8, 0x56, 0xf3, 0xe7, 0x73, 0xe7, 0x96, 0xd0, 0x4a,
0xb3, 0xe6, 0x73, 0x67, 0xc5, 0x3e, 0x84, 0xde, 0xf7, 0x25, 0xca, 0x18, 0x8b, 0xd0, 0xd7, 0x99,
0xff, 0xb7, 0xcd, 0x7c, 0x5e, 0xa2, 0xac, 0x78, 0xed, 0xa7, 0x97, 0x6a, 0xc6, 0x0d, 0x7d, 0xfa,
0x4c, 0x36, 0x45, 0xd3, 0xd1, 0x33, 0x36, 0x3a, 0xdb, 0x0e, 0x19, 0xce, 0xa8, 0x43, 0x9f, 0x83,
0x27, 0x36, 0x58, 0x84, 0x81, 0x8e, 0xff, 0xde, 0x6b, 0x9a, 0x71, 0x30, 0xde, 0x60, 0xf1, 0x55,
0xaa, 0x64, 0xc5, 0xf5, 0xf5, 0x87, 0x4f, 0x21, 0x68, 0x4c, 0x34, 0x39, 0x2f, 0xb1, 0xd2, 0x0f,
0x0c, 0x38, 0x1d, 0xd9, 0xfb, 0xe0, 0xdf, 0x88, 0xa4, 0x34, 0x8d, 0x1f, 0x3c, 0xde, 0xdf, 0x86,
0x1d, 0x6f, 0xe2, 0x82, 0x1b, 0xe7, 0x97, 0x9d, 0x2f, 0x9c, 0xe8, 0x37, 0x07, 0x3c, 0xb2, 0x11,
0xd9, 0x09, 0x5e, 0x8b, 0x79, 0x75, 0x98, 0x95, 0xe9, 0xa2, 0x08, 0x9d, 0xa1, 0x3b, 0x72, 0xf9,
0x8e, 0x8d, 0xbd, 0x0d, 0xdd, 0x99, 0xf1, 0x76, 0x86, 0xee, 0x28, 0xe0, 0x16, 0xb1, 0xfb, 0xe0,
0x27, 0x62, 0x86, 0x89, 0xdd, 0x03, 0x03, 0xe8, 0x76, 0x2e, 0x71, 0x19, 0x6f, 0xec, 0x1a, 0x58,
0x44, 0xf6, 0xa2, 0x5c, 0x92, 0xdd, 0xac, 0x80, 0x45, 0xd4, 0xae, 0x99, 0x28, 0x9a, 0x16, 0xd2,
0x99, 0x22, 0x17, 0x73, 0x91, 0xd4, 0x3d, 0x34, 0x20, 0xfa, 0xdd, 0xa1, 0xf9, 0x37, 0x7c, 0xb7,
0x66, 0xce, 0x74, 0xf4, 0x1d, 0xe8, 0xd3, 0x2c, 0xbc, 0xb8, 0x11, 0xd2, 0xce, 0x5d, 0x8f, 0xf0,
0x95, 0x90, 0xec, 0x53, 0xe8, 0xea, 0x97, 0xdf, 0x31, 0x7b, 0x75, 0xb8, 0x2b, 0xf2, 0x73, 0x7b,
0xad, 0x61, 0xd0, 0x6b, 0x31, 0xd8, 0x3c, 0xd6, 0x6f, 0x3f, 0xf6, 0x11, 0xf8, 0x34, 0x0a, 0x95,
0xae, 0xfe, 0xce, 0xc8, 0x66, 0x60, 0xcc, 0xad, 0xe8, 0x12, 0xee, 0xed, 0x64, 0x6c, 0x32, 0x39,
0xbb, 0x99, 0xb6, 0x2c, 0x06, 0x96, 0x35, 0xda, 0xfd, 0x02, 0x13, 0x9c, 0x2b, 0x5c, 0xe8, 0x7e,
0xf7, 0x79, 0x83, 0xa3, 0x9f, 0x9c, 0x6d, 0x5c, 0x9d, 0x8f, 0xb6, 0x7b, 0x9e, 0xad, 0xd7, 0x22,
0x5d, 0xd8, 0xd0, 0x35, 0xa4, 0xbe, 0x2d, 0x66, 0x36, 0x74, 0x67, 0x31, 0x23, 0x2c, 0x73, 0xcb,
0x60, 0x47, 0xe6, 0x6c, 0x08, 0x83, 0x35, 0x8a, 0xa2, 0x94, 0xb8, 0xc6, 0x54, 0xd9, 0x16, 0xb4,
0x4d, 0xec, 0x01, 0xf4, 0x94, 0xb8, 0x7e, 0x41, 0xb3, 0x67, 0x99, 0x54, 0xe2, 0xfa, 0x04, 0x2b,
0xf6, 0x2e, 0x04, 0xcb, 0x18, 0x93, 0x85, 0x76, 0x19, 0x3a, 0xfb, 0xda, 0x70, 0x82, 0x55, 0xf4,
0xb3, 0x03, 0xdd, 0x29, 0xca, 0x1b, 0x94, 0x6f, 0x24, 0x17, 0x6d, 0x39, 0x75, 0xff, 0x41, 0x4e,
0xbd, 0xbb, 0xe5, 0xd4, 0xdf, 0xca, 0xe9, 0x7d, 0xf0, 0xa7, 0x72, 0x7e, 0x3c, 0xd1, 0x15, 0xb9,
0xdc, 0x00, 0x9a, 0xc6, 0xf1, 0x5c, 0xc5, 0x37, 0x68, 0x35, 0xd6, 0xa2, 0xe8, 0x47, 0x07, 0xba,
0xa7, 0xa2, 0xca, 0x4a, 0xf5, 0xca, 0x84, 0x0d, 0x61, 0x30, 0xce, 0xf3, 0x24, 0x9e, 0x0b, 0x15,
0x67, 0xa9, 0xad, 0xb6, 0x6d, 0xa2, 0x1b, 0xcf, 0x5a, 0xbd, 0x33, 0x75, 0xb7, 0x4d, 0xb4, 0xa1,
0x47, 0x5a, 0x05, 0x8d, 0xa4, 0xb5, 0x36, 0xd4, 0x88, 0x9f, 0x76, 0xd2, 0x03, 0xc7, 0xa5, 0xca,
0x96, 0x49, 0x76, 0xab, 0x5f, 0xd2, 0xe7, 0x0d, 0x8e, 0xfe, 0xe8, 0x80, 0xf7, 0x5f, 0xa9, 0xdb,
0x1e, 0x38, 0xb1, 0x25, 0xd2, 0x89, 0x1b, 0xad, 0xeb, 0xb5, 0xb4, 0x2e, 0x84, 0x5e, 0x25, 0x45,
0x7a, 0x8d, 0x45, 0xd8, 0xd7, 0xca, 0x51, 0x43, 0xed, 0xd1, 0x3b, 0x62, 0x44, 0x2e, 0xe0, 0x35,
0x6c, 0x66, 0x1e, 0x5a, 0x33, 0xff, 0x89, 0xd5, 0xc3, 0x81, 0xae, 0x28, 0xdc, 0x6d, 0xcb, 0xbf,
0x27, 0x83, 0x7f, 0x3a, 0xe0, 0x37, 0x0b, 0x73, 0xb4, 0xbb, 0x30, 0x47, 0xdb, 0x85, 0x99, 0x1c,
0xd6, 0x0b, 0x33, 0x39, 0x24, 0xcc, 0xcf, 0xea, 0x85, 0xe1, 0x67, 0x44, 0xd6, 0x53, 0x99, 0x95,
0xf9, 0x61, 0x65, 0x58, 0x0d, 0x78, 0x83, 0x69, 0xca, 0xbe, 0x5d, 0xa1, 0xb4, 0xad, 0x0e, 0xb8,
0x45, 0x34, 0x93, 0xa7, 0x5a, 0x4c, 0x4c, 0x73, 0x0d, 0x60, 0x1f, 0x80, 0xcf, 0xa9, 0x79, 0xba,
0xc3, 0x3b, 0xbc, 0x68, 0x33, 0x37, 0x5e, 0x0a, 0x6a, 0xbe, 0x55, 0xec, 0xff, 0x49, 0xfd, 0xe5,
0xf2, 0x31, 0x74, 0xa7, 0xab, 0x78, 0xa9, 0xea, 0x7f, 0x95, 0xb7, 0x5a, 0x62, 0x14, 0xaf, 0x51,
0xfb, 0xb8, 0xbd, 0x12, 0x9d, 0x43, 0xd0, 0x18, 0xb7, 0xe5, 0x38, 0xed, 0x72, 0x18, 0x78, 0x97,
0x69, 0xac, 0xea, 0xb5, 0xa4, 0x33, 0x3d, 0xf6, 0xbc, 0x14, 0xa9, 0x8a, 0x55, 0x55, 0xaf, 0x65,
0x8d, 0xa3, 0x27, 0xb6, 0x7c, 0x0a, 0x77, 0x99, 0xe7, 0x28, 0xed, 0x8a, 0x1b, 0xa0, 0x93, 0x64,
0xb7, 0x68, 0xd4, 0xd9, 0xe5, 0x06, 0x44, 0xdf, 0x41, 0x30, 0x4e, 0x50, 0x2a, 0x5e, 0x26, 0xaf,
0x6a, 0x3a, 0x03, 0xef, 0xeb, 0xe9, 0x37, 0xcf, 0xeb, 0x0a, 0xe8, 0xbc, 0x5d, 0x67, 0xf7, 0x6f,
0xeb, 0x7c, 0x22, 0x72, 0x71, 0x3c, 0xd1, 0x73, 0xee, 0x72, 0x8b, 0xa2, 0x8f, 0xc0, 0x23, 0xd9,
0x68, 0x45, 0xf6, 0x5e, 0x27, 0x39, 0xb3, 0xae, 0xfe, 0x2a, 0x7c, 0xf2, 0x57, 0x00, 0x00, 0x00,
0xff, 0xff, 0xda, 0x20, 0xfc, 0x99, 0x27, 0x0a, 0x00, 0x00,
}

View File

@ -23,15 +23,15 @@ message Dashboard {
}
message DashboardCell {
int32 x = 1; // X-coordinate of Cell in the Dashboard
int32 y = 2; // Y-coordinate of Cell in the Dashboard
int32 w = 3; // Width of Cell in the Dashboard
int32 h = 4; // Height of Cell in the Dashboard
repeated Query queries = 5; // Time-series data queries for Dashboard
string name = 6; // User-facing name for this Dashboard
string type = 7; // Dashboard visualization type
string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
map<string, Axis> axes = 9; // Axes represent the graphical viewport for a cell's visualizations
int32 x = 1; // X-coordinate of Cell in the Dashboard
int32 y = 2; // Y-coordinate of Cell in the Dashboard
int32 w = 3; // Width of Cell in the Dashboard
int32 h = 4; // Height of Cell in the Dashboard
repeated Query queries = 5; // Time-series data queries for Dashboard
string name = 6; // User-facing name for this Dashboard
string type = 7; // Dashboard visualization type
string ID = 8; // id is the unique id of the dashboard. MIGRATED FIELD added in 1.2.0-beta6
map<string, Axis> axes = 9; // Axes represent the graphical viewport for a cell's visualizations
}
message Axis {
@ -54,18 +54,18 @@ message Template {
}
message TemplateValue {
string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
bool selected = 3; // Selected states that this variable has been picked to use for replacement
string type = 1; // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
string value = 2; // Value is the specific value used to replace a template in an InfluxQL query
bool selected = 3; // Selected states that this variable has been picked to use for replacement
}
message TemplateQuery {
string command = 1; // Command is the query itself
string db = 2; // DB the database for the query (optional)
string rp = 3; // RP is a retention policy and optional;
string measurement = 4; // Measurement is the optinally selected measurement for the query
string tag_key = 5; // TagKey is the optionally selected tag key for the query
string field_key = 6; // FieldKey is the optionally selected field key for the query
string command = 1; // Command is the query itself
string db = 2; // DB the database for the query (optional)
string rp = 3; // RP is a retention policy and optional;
string measurement = 4; // Measurement is the optinally selected measurement for the query
string tag_key = 5; // TagKey is the optionally selected tag key for the query
string field_key = 6; // FieldKey is the optionally selected field key for the query
}
message Server {
@ -101,31 +101,38 @@ message Cell {
}
message Query {
string Command = 1; // Command is the query itself
string DB = 2; // DB the database for the query (optional)
string RP = 3; // RP is a retention policy and optional;
repeated string GroupBys= 4; // GroupBys define the groups to combine in the query
repeated string Wheres = 5; // Wheres define the restrictions on the query
string Label = 6; // Label is the name of the Y-Axis
Range Range = 7; // Range is the upper and lower bound of the Y-Axis
string Source = 8; // Source is the optional URI to the data source
string Command = 1; // Command is the query itself
string DB = 2; // DB the database for the query (optional)
string RP = 3; // RP is a retention policy and optional;
repeated string GroupBys = 4; // GroupBys define the groups to combine in the query
repeated string Wheres = 5; // Wheres define the restrictions on the query
string Label = 6; // Label is the name of the Y-Axis
Range Range = 7; // Range is the upper and lower bound of the Y-Axis
string Source = 8; // Source is the optional URI to the data source
repeated TimeShift Shifts = 9; // TimeShift represents a shift to apply to an influxql query's time range
}
message TimeShift {
string Label = 1; // Label user facing description
string Unit = 2; // Unit influxql time unit representation i.e. ms, s, m, h, d
string Quantity = 3; // Quantity number of units
}
message Range {
int64 Upper = 1; // Upper is the upper-bound of the range
int64 Lower = 2; // Lower is the lower-bound of the range
int64 Upper = 1; // Upper is the upper-bound of the range
int64 Lower = 2; // Lower is the lower-bound of the range
}
message AlertRule {
string ID = 1; // ID is the unique ID of this alert rule
string JSON = 2; // JSON byte representation of the alert
int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
string ID = 1; // ID is the unique ID of this alert rule
string JSON = 2; // JSON byte representation of the alert
int64 SrcID = 3; // SrcID is the id of the source this alert is associated with
int64 KapaID = 4; // KapaID is the id of the kapacitor this alert is associated with
}
message User {
uint64 ID = 1; // ID is the unique ID of this user
string Name = 2; // Name is the user's login name
uint64 ID = 1; // ID is the unique ID of this user
string Name = 2; // Name is the user's login name
}
// The following is a vim modeline, it autoconfigures vim to have the

View File

@ -163,6 +163,7 @@ func Test_MarshalDashboard(t *testing.T) {
Upper: int64(100),
},
Source: "/chronograf/v1/sources/1",
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -210,6 +211,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -241,6 +243,7 @@ func Test_MarshalDashboard_WithLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -285,6 +288,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{
@ -316,6 +320,7 @@ func Test_MarshalDashboard_WithEmptyLegacyBounds(t *testing.T) {
Range: &chronograf.Range{
Upper: int64(100),
},
Shifts: []chronograf.TimeShift{},
},
},
Axes: map[string]chronograf.Axis{

View File

@ -1,21 +1,10 @@
package chronograf
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"regexp"
"strconv"
"strings"
"time"
"unicode"
"unicode/utf8"
"github.com/influxdata/influxdb/influxql"
)
// General errors.
@ -136,196 +125,17 @@ type Range struct {
Lower int64 `json:"lower"` // Lower is the lower bound
}
type TemplateVariable interface {
fmt.Stringer
Name() string // returns the variable name
Precedence() uint // ordinal indicating precedence level for replacement
}
type ExecutableVar interface {
Exec(string)
}
// TemplateValue is a value use to replace a template in an InfluxQL query
type BasicTemplateValue struct {
type TemplateValue struct {
Value string `json:"value"` // Value is the specific value used to replace a template in an InfluxQL query
Type string `json:"type"` // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
Selected bool `json:"selected"` // Selected states that this variable has been picked to use for replacement
}
// TemplateVar is a named variable within an InfluxQL query to be replaced with Values
type BasicTemplateVar struct {
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
Values []BasicTemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
func (t BasicTemplateVar) Name() string {
return t.Var
}
// String converts the template variable into a correct InfluxQL string based
// on its type
func (t BasicTemplateVar) String() string {
if len(t.Values) == 0 {
return ""
}
switch t.Values[0].Type {
case "tagKey", "fieldKey", "measurement", "database":
return `"` + t.Values[0].Value + `"`
case "tagValue", "timeStamp":
return `'` + t.Values[0].Value + `'`
case "csv", "constant":
return t.Values[0].Value
default:
return ""
}
}
func (t BasicTemplateVar) Precedence() uint {
return 0
}
type GroupByVar struct {
Var string `json:"tempVar"` // the name of the variable as present in the query
Duration time.Duration `json:"duration,omitempty"` // the Duration supplied by the query
Resolution uint `json:"resolution"` // the available screen resolution to render the results of this query
ReportingInterval time.Duration `json:"reportingInterval,omitempty"` // the interval at which data is reported to this series
}
// Exec is responsible for extracting the Duration from the query
func (g *GroupByVar) Exec(query string) {
whereClause := "WHERE"
start := strings.Index(query, whereClause)
if start == -1 {
// no where clause
return
}
// reposition start to after the 'where' keyword
durStr := query[start+len(whereClause):]
// attempt to parse out a relative time range
// locate duration literal start
prefix := "time > now() - "
lowerDuration, err := g.parseRelative(durStr, prefix)
if err == nil {
prefix := "time < now() - "
upperDuration, err := g.parseRelative(durStr, prefix)
if err != nil {
g.Duration = lowerDuration
return
}
g.Duration = lowerDuration - upperDuration
if g.Duration < 0 {
g.Duration = -g.Duration
}
}
dur, err := g.parseAbsolute(durStr)
if err == nil {
// we found an absolute time range
g.Duration = dur
}
}
// parseRelative locates and extracts a duration value from a fragment of an
// InfluxQL query following the "where" keyword. For example, in the fragment
// "time > now() - 180d GROUP BY :interval:", parseRelative would return a
// duration equal to 180d
func (g *GroupByVar) parseRelative(fragment string, prefix string) (time.Duration, error) {
start := strings.Index(fragment, prefix)
if start == -1 {
return time.Duration(0), errors.New("not a relative duration")
}
// reposition to duration literal
durFragment := fragment[start+len(prefix):]
// init counters
pos := 0
// locate end of duration literal
for pos < len(durFragment) {
rn, _ := utf8.DecodeRuneInString(durFragment[pos:])
if unicode.IsSpace(rn) {
break
}
pos++
}
// attempt to parse what we suspect is a duration literal
dur, err := influxql.ParseDuration(durFragment[:pos])
if err != nil {
return dur, err
}
return dur, nil
}
// parseAbsolute will determine the duration between two absolute timestamps
// found within an InfluxQL fragment following the "where" keyword. For
// example, the fragement "time > '1985-10-25T00:01:21-0800 and time <
// '1985-10-25T00:01:22-0800'" would yield a duration of 1m'
func (g *GroupByVar) parseAbsolute(fragment string) (time.Duration, error) {
timePtn := `time\s[>|<]\s'([0-9\-T\:\.Z]+)'` // Playground: http://gobular.com/x/208f66bd-1889-4269-ab47-1efdfeeb63f0
re, err := regexp.Compile(timePtn)
if err != nil {
// this is a developer error and should complain loudly
panic("Bad Regex: err:" + err.Error())
}
if !re.Match([]byte(fragment)) {
return time.Duration(0), errors.New("absolute duration not found")
}
// extract at most two times
matches := re.FindAll([]byte(fragment), 2)
// parse out absolute times
durs := make([]time.Time, 0, 2)
for _, match := range matches {
durStr := re.FindSubmatch(match)
if tm, err := time.Parse(time.RFC3339Nano, string(durStr[1])); err == nil {
durs = append(durs, tm)
}
}
if len(durs) == 1 {
durs = append(durs, time.Now())
}
// reject more than 2 times found
if len(durs) != 2 {
return time.Duration(0), errors.New("must provide exactly two absolute times")
}
dur := durs[1].Sub(durs[0])
return dur, nil
}
func (g *GroupByVar) String() string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(g.Resolution) / 3.0
msPerPixel := float64(g.Duration/time.Millisecond) / pixels
secPerPixel := float64(g.Duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
}
func (g *GroupByVar) Name() string {
return g.Var
}
func (g *GroupByVar) Precedence() uint {
return 1
type TemplateVar struct {
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
Values []TemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
// TemplateID is the unique ID used to identify a template
@ -333,7 +143,7 @@ type TemplateID string
// Template represents a series of choices to replace TemplateVars within InfluxQL
type Template struct {
BasicTemplateVar
TemplateVar
ID TemplateID `json:"id"` // ID is the unique ID associated with this template
Type string `json:"type"` // Type can be fieldKeys, tagKeys, tagValues, CSV, constant, query, measurements, databases
Label string `json:"label"` // Label is a user-facing description of the Template
@ -342,69 +152,15 @@ type Template struct {
// Query retrieves a Response from a TimeSeries.
type Query struct {
Command string `json:"query"` // Command is the query itself
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
TemplateVars TemplateVars `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// TemplateVars are a heterogeneous collection of different TemplateVariables
// with the capability to decode arbitrary JSON into the appropriate template
// variable type
type TemplateVars []TemplateVariable
func (t *TemplateVars) UnmarshalJSON(text []byte) error {
// TODO: Need to test that server throws an error when :interval:'s Resolution or ReportingInterval or zero-value
rawVars := bytes.NewReader(text)
dec := json.NewDecoder(rawVars)
// read open bracket
rawTok, err := dec.Token()
if err != nil {
return err
}
tok, isDelim := rawTok.(json.Delim)
if !isDelim || tok != '[' {
return errors.New("Expected JSON array, but found " + tok.String())
}
for dec.More() {
var halfBakedVar json.RawMessage
err := dec.Decode(&halfBakedVar)
if err != nil {
return err
}
var agb GroupByVar
err = json.Unmarshal(halfBakedVar, &agb)
if err != nil {
return err
}
// ensure that we really have a GroupByVar
if agb.Resolution != 0 {
(*t) = append(*t, &agb)
continue
}
var tvar BasicTemplateVar
err = json.Unmarshal(halfBakedVar, &tvar)
if err != nil {
return err
}
// ensure that we really have a BasicTemplateVar
if len(tvar.Values) != 0 {
(*t) = append(*t, tvar)
}
}
return nil
Command string `json:"query"` // Command is the query itself
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
TemplateVars []TemplateVar `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// DashboardQuery includes state for the query builder. This is a transition
@ -415,6 +171,7 @@ type DashboardQuery struct {
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
QueryConfig QueryConfig `json:"queryConfig,omitempty"` // QueryConfig represents the query state that is understood by the data explorer
Source string `json:"source"` // Source is the optional URI to the data source for this queryConfig
Shifts []TimeShift `json:"-"` // Shifts represents shifts to apply to an influxql query's time range. Clients expect the shift to be in the generated QueryConfig
}
// TemplateQuery is used to retrieve choices for template replacement
@ -528,6 +285,13 @@ type DurationRange struct {
Lower string `json:"lower"`
}
// TimeShift represents a shift to apply to an influxql query's time range
type TimeShift struct {
Label string `json:"label"` // Label user facing description
Unit string `json:"unit"` // Unit influxql time unit representation i.e. ms, s, m, h, d
Quantity string `json:"quantity"` // Quantity number of units
}
// QueryConfig represents UI query from the data explorer
type QueryConfig struct {
ID string `json:"id,omitempty"`
@ -541,6 +305,7 @@ type QueryConfig struct {
Fill string `json:"fill,omitempty"`
RawText *string `json:"rawText"`
Range *DurationRange `json:"range"`
Shifts []TimeShift `json:"shifts"`
}
// KapacitorNode adds arguments and properties to an alert

View File

@ -1,63 +0,0 @@
package chronograf_test
import (
"testing"
"github.com/influxdata/chronograf"
)
func Test_GroupByVar(t *testing.T) {
gbvTests := []struct {
name string
query string
want string
resolution uint // the screen resolution to render queries into
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
want: "time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "time(180ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
want: "time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "time(180ms)",
},
{
name: "absolute time with nano seconds and zero duraiton",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
want: "time(1ms)",
},
}
for _, test := range gbvTests {
t.Run(test.name, func(t *testing.T) {
gbv := chronograf.GroupByVar{
Var: ":interval:",
Resolution: test.resolution,
}
gbv.Exec(test.query)
got := gbv.String()
if got != test.want {
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", test.name, test.want, got)
}
})
}
}

View File

@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"strings"
"time"
"github.com/influxdata/chronograf"
)
@ -55,7 +56,10 @@ func (c *Client) query(u *url.URL, q chronograf.Query) (chronograf.Response, err
command := q.Command
// TODO(timraymond): move this upper Query() function
if len(q.TemplateVars) > 0 {
command = TemplateReplace(q.Command, q.TemplateVars)
command, err = TemplateReplace(q.Command, q.TemplateVars, time.Now())
if err != nil {
return nil, err
}
}
logs := c.Logger.
WithField("component", "proxy").

View File

@ -276,11 +276,11 @@ func Test_Influx_HTTPS_InsecureSkipVerify(t *testing.T) {
called = false
q = ""
query = chronograf.Query{
Command: "select $field from cpu",
TemplateVars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Command: "select :field: from cpu",
TemplateVars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: ":field:",
Values: []chronograf.TemplateValue{
{
Value: "usage_user",
Type: "fieldKey",

View File

@ -10,6 +10,52 @@ import (
"github.com/influxdata/influxdb/influxql"
)
func TimeRangeAsEpochNano(expr influxql.Expr, now time.Time) (min, max int64, err error) {
tmin, tmax, err := influxql.TimeRange(expr)
if err != nil {
return 0, 0, err
}
if tmin.IsZero() {
min = time.Unix(0, influxql.MinTime).UnixNano()
} else {
min = tmin.UnixNano()
}
if tmax.IsZero() {
max = now.UnixNano()
} else {
max = tmax.UnixNano()
}
return
}
const WhereToken = "WHERE"
func ParseTime(influxQL string, now time.Time) (time.Duration, error) {
start := strings.Index(strings.ToUpper(influxQL), WhereToken)
if start == -1 {
return 0, fmt.Errorf("not a relative duration")
}
start += len(WhereToken)
where := influxQL[start:]
cond, err := influxql.ParseExpr(where)
if err != nil {
return 0, err
}
nowVal := &influxql.NowValuer{
Now: now,
}
cond = influxql.Reduce(cond, nowVal)
min, max, err := TimeRangeAsEpochNano(cond, now)
if err != nil {
return 0, err
}
dur := time.Duration(max - min)
if dur < 0 {
dur = 0
}
return dur, nil
}
// Convert changes an InfluxQL query to a QueryConfig
func Convert(influxQL string) (chronograf.QueryConfig, error) {
itsDashboardTime := false

View File

@ -2,6 +2,7 @@ package influx
import (
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/influxdata/chronograf"
@ -767,3 +768,43 @@ func TestConvert(t *testing.T) {
})
}
}
func TestParseTime(t *testing.T) {
tests := []struct {
name string
influxQL string
now string
want time.Duration
wantErr bool
}{
{
name: "time equal",
now: "2000-01-01T00:00:00Z",
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
want: 0,
},
{
name: "time shifted by one hour",
now: "2000-01-01T00:00:00Z",
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
want: 3599999999998,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
now, err := time.Parse(time.RFC3339, tt.now)
if err != nil {
t.Fatalf("%v", err)
}
got, err := ParseTime(tt.influxQL, now)
if (err != nil) != tt.wantErr {
t.Errorf("ParseTime() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Logf("%d", got)
t.Errorf("ParseTime() = %v, want %v", got, tt.want)
}
})
}
}

View File

@ -1,40 +1,106 @@
package influx
import (
"sort"
"strconv"
"strings"
"time"
"github.com/influxdata/chronograf"
)
// TemplateReplace replaces templates with values within the query string
func TemplateReplace(query string, templates chronograf.TemplateVars) string {
tvarsByPrecedence := make(map[uint]chronograf.TemplateVars, len(templates))
maxPrecedence := uint(0)
for _, tmp := range templates {
precedence := tmp.Precedence()
if precedence > maxPrecedence {
maxPrecedence = precedence
}
tvarsByPrecedence[precedence] = append(tvarsByPrecedence[precedence], tmp)
}
replaced := query
for prc := uint(0); prc <= maxPrecedence; prc++ {
replacements := []string{}
for _, v := range tvarsByPrecedence[prc] {
if evar, ok := v.(chronograf.ExecutableVar); ok {
evar.Exec(replaced)
}
newVal := v.String()
if newVal != "" {
replacements = append(replacements, v.Name(), newVal)
}
func SortTemplates(ts []chronograf.TemplateVar) []chronograf.TemplateVar {
sort.Slice(ts, func(i, j int) bool {
if len(ts[i].Values) != len(ts[j].Values) {
return len(ts[i].Values) < len(ts[j].Values)
}
replacer := strings.NewReplacer(replacements...)
replaced = replacer.Replace(replaced)
}
if len(ts[i].Values) == 0 {
return i < j
}
return replaced
for k := range ts[i].Values {
if ts[i].Values[k].Type != ts[j].Values[k].Type {
return ts[i].Values[k].Type < ts[j].Values[k].Type
}
if ts[i].Values[k].Value != ts[j].Values[k].Value {
return ts[i].Values[k].Value < ts[j].Values[k].Value
}
}
return i < j
})
return ts
}
// RenderTemplate converts the template variable into a correct InfluxQL string based
// on its type
func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (string, error) {
if len(t.Values) == 0 {
return query, nil
}
switch t.Values[0].Type {
case "tagKey", "fieldKey", "measurement", "database":
return strings.Replace(query, t.Var, `"`+t.Values[0].Value+`"`, -1), nil
case "tagValue", "timeStamp":
return strings.Replace(query, t.Var, `'`+t.Values[0].Value+`'`, -1), nil
case "csv", "constant":
return strings.Replace(query, t.Var, t.Values[0].Value, -1), nil
}
tv := map[string]string{}
for i := range t.Values {
tv[t.Values[i].Type] = t.Values[i].Value
}
if res, ok := tv["resolution"]; ok {
resolution, err := strconv.ParseInt(res, 0, 64)
if err != nil {
return "", err
}
ppp, ok := tv["pointsPerPixel"]
if !ok {
ppp = "3"
}
pixelsPerPoint, err := strconv.ParseInt(ppp, 0, 64)
if err != nil {
return "", err
}
dur, err := ParseTime(query, now)
if err != nil {
return "", err
}
interval := AutoGroupBy(resolution, pixelsPerPoint, dur)
return strings.Replace(query, t.Var, interval, -1), nil
}
return query, nil
}
func AutoGroupBy(resolution, pixelsPerPoint int64, duration time.Duration) string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(resolution) / float64(pixelsPerPoint)
msPerPixel := float64(duration/time.Millisecond) / pixels
secPerPixel := float64(duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
}
// TemplateReplace replaces templates with values within the query string
func TemplateReplace(query string, templates []chronograf.TemplateVar, now time.Time) (string, error) {
templates = SortTemplates(templates)
for i := range templates {
var err error
query, err = RenderTemplate(query, templates[i], now)
if err != nil {
return "", err
}
}
return query, nil
}

View File

@ -2,6 +2,7 @@ package influx
import (
"encoding/json"
"fmt"
"reflect"
"testing"
"time"
@ -13,43 +14,43 @@ func TestTemplateReplace(t *testing.T) {
tests := []struct {
name string
query string
vars chronograf.TemplateVars
vars []chronograf.TemplateVar
want string
}{
{
name: "select with parameters",
query: "$METHOD field1, $field FROM $measurement WHERE temperature > $temperature",
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
Var: "$temperature",
Values: []chronograf.BasicTemplateValue{
query: ":method: field1, :field: FROM :measurement: WHERE temperature > :temperature:",
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: ":temperature:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "10",
},
},
},
chronograf.BasicTemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":field:",
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field2",
},
},
},
chronograf.BasicTemplateVar{
Var: "$METHOD",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":method:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "SELECT",
},
},
},
chronograf.BasicTemplateVar{
Var: "$measurement",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":measurement:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: `"cpu"`,
@ -62,28 +63,28 @@ func TestTemplateReplace(t *testing.T) {
{
name: "select with parameters and aggregates",
query: `SELECT mean($field) FROM "cpu" WHERE $tag = $value GROUP BY $tag`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$value",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "tagValue",
Value: "howdy.com",
},
},
},
chronograf.BasicTemplateVar{
chronograf.TemplateVar{
Var: "$tag",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "tagKey",
Value: "host",
},
},
},
chronograf.BasicTemplateVar{
chronograf.TemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field",
@ -101,8 +102,8 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var without a value",
query: `SELECT $field FROM "cpu"`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$field",
},
},
@ -111,10 +112,10 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var with unknown type",
query: `SELECT $field FROM "cpu"`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "who knows?",
Value: "field",
@ -127,42 +128,63 @@ func TestTemplateReplace(t *testing.T) {
{
name: "auto group by",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 180 * 24 * time.Hour,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by without duration",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 0 * time.Minute,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by with :dashboardTime:",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 0 * time.Minute,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
&chronograf.BasicTemplateVar{
{
Var: ":dashboardTime:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "constant",
Value: "now() - 4320h",
@ -170,20 +192,28 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by failing condition",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
vars: []chronograf.TemplateVariable{
&chronograf.GroupByVar{
Var: ":interval:",
Resolution: 115,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "115",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
chronograf.BasicTemplateVar{
{
Var: ":dashboardTime:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Value: "now() - 1h",
Type: "constant",
@ -197,7 +227,14 @@ func TestTemplateReplace(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := TemplateReplace(tt.query, tt.vars)
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
if err != nil {
t.Fatal(err)
}
got, err := TemplateReplace(tt.query, tt.vars, now)
if err != nil {
t.Fatalf("TestParse unexpected TemplateReplace error: %v", err)
}
if got != tt.want {
t.Errorf("TestParse %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
@ -209,8 +246,20 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
req := `[
{
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
},
{
"value": "10",
"type": "reportingInterval"
}
]
},
{
"tempVar": ":cpu:",
@ -224,15 +273,27 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
}
]`
expected := []chronograf.TemplateVariable{
&chronograf.GroupByVar{
Var: ":interval:",
Resolution: 1000,
ReportingInterval: 10 * time.Nanosecond,
want := []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
{
Value: "10",
Type: "reportingInterval",
},
},
},
chronograf.BasicTemplateVar{
{
Var: ":cpu:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Value: "cpu-total",
Type: "tagValue",
@ -242,65 +303,128 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
},
}
var tvars chronograf.TemplateVars
err := json.Unmarshal([]byte(req), &tvars)
var got []chronograf.TemplateVar
err := json.Unmarshal([]byte(req), &got)
if err != nil {
t.Fatal("Err unmarshaling:", err)
}
if len(tvars) != len(expected) {
t.Fatal("Expected", len(expected), "vars but found", len(tvars))
}
if !reflect.DeepEqual(*(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar))) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", *(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar)))
}
if !reflect.DeepEqual(tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar)) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar))
if !reflect.DeepEqual(got, want) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", got, want)
}
}
func TestGroupByVarString(t *testing.T) {
func TestAutoGroupBy(t *testing.T) {
tests := []struct {
name string
tvar *chronograf.GroupByVar
want string
name string
resolution int64
pixelsPerPoint int64
duration time.Duration
want string
}{
{
name: "String() calculates the GROUP BY interval",
tvar: &chronograf.GroupByVar{
Resolution: 700,
ReportingInterval: 10 * time.Second,
Duration: 24 * time.Hour,
},
want: "time(370s)",
name: "String() calculates the GROUP BY interval",
resolution: 700,
pixelsPerPoint: 3,
duration: 24 * time.Hour,
want: "time(370s)",
},
{
name: "String() milliseconds if less than one second intervals",
tvar: &chronograf.GroupByVar{
Resolution: 100000,
ReportingInterval: 10 * time.Second,
Duration: time.Hour,
},
want: "time(107ms)",
name: "String() milliseconds if less than one second intervals",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Hour,
want: "time(107ms)",
},
{
name: "String() milliseconds if less than one millisecond",
tvar: &chronograf.GroupByVar{
Resolution: 100000,
ReportingInterval: 10 * time.Second,
Duration: time.Second,
},
want: "time(1ms)",
name: "String() milliseconds if less than one millisecond",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Second,
want: "time(1ms)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.tvar.String()
got := AutoGroupBy(tt.resolution, tt.pixelsPerPoint, tt.duration)
if got != tt.want {
t.Errorf("TestGroupByVarString %s =\n%s\nwant\n%s", tt.name, got, tt.want)
t.Errorf("TestAutoGroupBy %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
})
}
}
func Test_RenderTemplate(t *testing.T) {
gbvTests := []struct {
name string
query string
want string
resolution uint // the screen resolution to render queries into
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(259s)",
},
{
name: "relative time offset by week",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(179ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(179ms)",
},
{
name: "absolute time with nano seconds and zero duraiton",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(1ms)",
},
}
for _, tt := range gbvTests {
t.Run(tt.name, func(t *testing.T) {
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
if err != nil {
t.Fatal(err)
}
tvar := chronograf.TemplateVar{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: fmt.Sprintf("%d", tt.resolution),
Type: "resolution",
},
},
}
got, err := RenderTemplate(tt.query, tvar, now)
if err != nil {
t.Fatalf("unexpected error rendering template %v", err)
}
if got != tt.want {
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", tt.name, tt.want, got)
}
})
}
}
// SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h GROUP BY :interval: FILL(null);SELECT mean("numSeries") AS "mean_numSeries_shifted__1__h" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h AND time < now() - 1h GROUP BY :interval: FILL(null)

View File

@ -31,7 +31,6 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
cells := make([]dashboardCellResponse, len(dcells))
for i, cell := range dcells {
newCell := chronograf.DashboardCell{}
newCell.Queries = make([]chronograf.DashboardQuery, len(cell.Queries))
copy(newCell.Queries, cell.Queries)
@ -70,7 +69,17 @@ func newCellResponses(dID chronograf.DashboardID, dcells []chronograf.DashboardC
// ValidDashboardCellRequest verifies that the dashboard cells have a query and
// have the correct axes specified
func ValidDashboardCellRequest(c *chronograf.DashboardCell) error {
if c == nil {
return fmt.Errorf("Chronograf dashboard cell was nil")
}
CorrectWidthHeight(c)
for _, q := range c.Queries {
if err := ValidateQueryConfig(&q.QueryConfig); err != nil {
return err
}
}
MoveTimeShift(c)
return HasCorrectAxes(c)
}
@ -115,12 +124,22 @@ func CorrectWidthHeight(c *chronograf.DashboardCell) {
}
}
// MoveTimeShift moves TimeShift from the QueryConfig to the DashboardQuery
func MoveTimeShift(c *chronograf.DashboardCell) {
for i, query := range c.Queries {
query.Shifts = query.QueryConfig.Shifts
c.Queries[i] = query
}
}
// AddQueryConfig updates a cell by converting InfluxQL into queryconfigs
// If influxql cannot be represented by a full query config, then, the
// query config's raw text is set to the command.
func AddQueryConfig(c *chronograf.DashboardCell) {
for i, q := range c.Queries {
qc := ToQueryConfig(q.Command)
qc.Shifts = append([]chronograf.TimeShift(nil), q.Shifts...)
q.Shifts = nil
q.QueryConfig = qc
c.Queries[i] = q
}

View File

@ -162,14 +162,14 @@ func Test_Service_DashboardCells(t *testing.T) {
http.StatusOK,
},
{
"cell axes should always be \"x\", \"y\", and \"y2\"",
&url.URL{
name: "cell axes should always be \"x\", \"y\", and \"y2\"",
reqURL: &url.URL{
Path: "/chronograf/v1/dashboards/1/cells",
},
map[string]string{
ctxParams: map[string]string{
"id": "1",
},
[]chronograf.DashboardCell{
mockResponse: []chronograf.DashboardCell{
{
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
X: 0,
@ -182,7 +182,7 @@ func Test_Service_DashboardCells(t *testing.T) {
Axes: map[string]chronograf.Axis{},
},
},
[]chronograf.DashboardCell{
expected: []chronograf.DashboardCell{
{
ID: "3899be5a-f6eb-4347-b949-de2f4fbea859",
X: 0,
@ -205,7 +205,7 @@ func Test_Service_DashboardCells(t *testing.T) {
},
},
},
http.StatusOK,
expectedCode: http.StatusOK,
},
}

View File

@ -219,6 +219,13 @@ func Test_newDashboardResponse(t *testing.T) {
{
Source: "/chronograf/v1/sources/1",
Command: "SELECT donors from hill_valley_preservation_society where time > '1985-10-25 08:00:00'",
Shifts: []chronograf.TimeShift{
{
Label: "Best Week Evar",
Unit: "d",
Quantity: "7",
},
},
},
},
Axes: map[string]chronograf.Axis{
@ -267,6 +274,13 @@ func Test_newDashboardResponse(t *testing.T) {
},
Tags: make(map[string][]string, 0),
AreTagsAccepted: false,
Shifts: []chronograf.TimeShift{
{
Label: "Best Week Evar",
Unit: "d",
Quantity: "7",
},
},
},
},
},

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"net/http"
"time"
"golang.org/x/net/context"
@ -21,8 +22,8 @@ type QueryRequest struct {
// QueriesRequest converts all queries to queryConfigs with the help
// of the template variables
type QueriesRequest struct {
Queries []QueryRequest `json:"queries"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
Queries []QueryRequest `json:"queries"`
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueryResponse is the return result of a QueryRequest including
@ -33,7 +34,7 @@ type QueryResponse struct {
QueryConfig chronograf.QueryConfig `json:"queryConfig"`
QueryAST *queries.SelectStatement `json:"queryAST,omitempty"`
QueryTemplated *string `json:"queryTemplated,omitempty"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueriesResponse is the response for a QueriesRequest
@ -72,12 +73,18 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
Query: q.Query,
}
query := influx.TemplateReplace(q.Query, req.TemplateVars)
query, err := influx.TemplateReplace(q.Query, req.TemplateVars, time.Now())
if err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
qc := ToQueryConfig(query)
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
qc.Shifts = []chronograf.TimeShift{}
qr.QueryConfig = qc
if stmt, err := queries.ParseSelect(query); err == nil {

View File

@ -60,7 +60,7 @@ func TestService_Queries(t *testing.T) {
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM db.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"db","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":null,"range":{"upper":"","lower":"now() - 1m"},"shifts":[]},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"db","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]}}]}
`,
},
{
@ -81,7 +81,7 @@ func TestService_Queries(t *testing.T) {
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
]}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null}}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SHOW DATABASES","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SHOW DATABASES","range":null,"shifts":[]}}]}
`,
},
{
@ -98,7 +98,7 @@ func TestService_Queries(t *testing.T) {
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > now() - 1m",
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY :interval:",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
],
@ -153,13 +153,20 @@ func TestService_Queries(t *testing.T) {
"id": "interval",
"type": "constant",
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10000000000,
"values": []
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
}
]
}
]
}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"_internal","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"_internal","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","tempVars":[{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":interval:","duration":60000000000,"resolution":1000,"reportingInterval":10000000000}]}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","range":null,"shifts":[]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"1000","type":"resolution","selected":false},{"value":"3","type":"pointsPerPixel","selected":false}]}]}]}
`,
},
}

View File

@ -1,6 +1,8 @@
package server
import (
"fmt"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/influx"
)
@ -22,3 +24,28 @@ func ToQueryConfig(query string) chronograf.QueryConfig {
Tags: make(map[string][]string, 0),
}
}
var validFieldTypes = map[string]bool{
"func": true,
"field": true,
"integer": true,
"number": true,
"regex": true,
"wildcard": true,
}
// ValidateQueryConfig checks any query config input
func ValidateQueryConfig(q *chronograf.QueryConfig) error {
for _, fld := range q.Fields {
invalid := fmt.Errorf(`invalid field type "%s" ; expect func, field, integer, number, regex, wildcard`, fld.Type)
if !validFieldTypes[fld.Type] {
return invalid
}
for _, arg := range fld.Args {
if !validFieldTypes[arg.Type] {
return invalid
}
}
}
return nil
}

View File

@ -0,0 +1,50 @@
package server
import (
"testing"
"github.com/influxdata/chronograf"
)
func TestValidateQueryConfig(t *testing.T) {
tests := []struct {
name string
q *chronograf.QueryConfig
wantErr bool
}{
{
name: "invalid field type",
q: &chronograf.QueryConfig{
Fields: []chronograf.Field{
{
Type: "invalid",
},
},
},
wantErr: true,
},
{
name: "invalid field args",
q: &chronograf.QueryConfig{
Fields: []chronograf.Field{
{
Type: "func",
Args: []chronograf.Field{
{
Type: "invalid",
},
},
},
},
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if err := ValidateQueryConfig(tt.q); (err != nil) != tt.wantErr {
t.Errorf("ValidateQueryConfig() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

View File

@ -16,8 +16,8 @@ func TestValidTemplateRequest(t *testing.T) {
name: "Valid Template",
template: &chronograf.Template{
Type: "fieldKeys",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@ -30,8 +30,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "Unknown Type",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@ -44,8 +44,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "csv",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "unknown value",
},

View File

@ -17,7 +17,7 @@
"test": "karma start",
"test:integration": "nightwatch tests --skip",
"test:lint": "yarn run lint; yarn run test",
"test:dev": "concurrently \"yarn run lint -- --watch\" \"yarn run test -- --no-single-run --reporters=verbose\"",
"test:dev": "concurrently \"yarn run lint --watch\" \"yarn run test --no-single-run --reporters=verbose\"",
"clean": "rm -rf build",
"storybook": "node ./storybook.js",
"prettier": "prettier --single-quote --trailing-comma es5 --bracket-spacing false --semi false --write \"{src,spec}/**/*.js\"; eslint src --fix"

View File

@ -1,7 +1,9 @@
import reducer from 'src/data_explorer/reducers/queryConfigs'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
fill,
timeShift,
chooseTag,
groupByTag,
groupByTime,
@ -26,63 +28,63 @@ const fakeAddQueryAction = (panelID, queryID) => {
}
}
function buildInitialState(queryId, params) {
return Object.assign({}, defaultQueryConfig({id: queryId}), params)
function buildInitialState(queryID, params) {
return Object.assign({}, defaultQueryConfig({id: queryID}), params)
}
describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const queryId = 123
const queryID = 123
it('can add a query', () => {
const state = reducer({}, fakeAddQueryAction('blah', queryId))
const state = reducer({}, fakeAddQueryAction('blah', queryID))
const actual = state[queryId]
const expected = defaultQueryConfig({id: queryId})
const actual = state[queryID]
const expected = defaultQueryConfig({id: queryID})
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state
beforeEach(() => {
state = reducer({}, fakeAddQueryAction('any', queryId))
state = reducer({}, fakeAddQueryAction('any', queryID))
})
it('sets the db and rp', () => {
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'telegraf',
retentionPolicy: 'monitor',
})
)
expect(newState[queryId].database).to.equal('telegraf')
expect(newState[queryId].retentionPolicy).to.equal('monitor')
expect(newState[queryID].database).to.equal('telegraf')
expect(newState[queryID].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
expect(newState[queryId].measurement).to.equal('mem')
expect(newState[queryID].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state
beforeEach(() => {
const one = reducer({}, fakeAddQueryAction('any', queryId))
const one = reducer({}, fakeAddQueryAction('any', queryID))
const two = reducer(
one,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: '_internal',
retentionPolicy: 'daily',
})
)
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
const three = reducer(two, chooseMeasurement(queryID, 'disk'))
state = reducer(
three,
addInitialField(queryId, {
addInitialField(queryID, {
value: 'a great field',
type: 'field',
})
@ -92,91 +94,91 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => {
// what about tags?
expect(state[queryId].measurement).to.equal('disk')
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].measurement).to.equal('disk')
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'newdb',
retentionPolicy: 'newrp',
})
)
expect(newState[queryId].measurement).to.be.null
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].measurement).to.be.null
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => {
// what about tags?
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseMeasurement(queryId, 'newmeasurement')
chooseMeasurement(queryID, 'newmeasurement')
)
expect(state[queryId].database).to.equal(newState[queryId].database)
expect(state[queryId].retentionPolicy).to.equal(
newState[queryId].retentionPolicy
expect(state[queryID].database).to.equal(newState[queryID].database)
expect(state[queryID].retentionPolicy).to.equal(
newState[queryID].retentionPolicy
)
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('DE_TOGGLE_FIELD', () => {
it('can toggle multiple fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {
toggleField(queryID, {
value: 'f2',
type: 'field',
})
)
expect(newState[queryId].fields.length).to.equal(2)
expect(newState[queryId].fields[1].alias).to.deep.equal('mean_f2')
expect(newState[queryId].fields[1].args).to.deep.equal([
expect(newState[queryID].fields.length).to.equal(2)
expect(newState[queryID].fields[1].alias).to.deep.equal('mean_f2')
expect(newState[queryID].fields[1].args).to.deep.equal([
{value: 'f2', type: 'field'},
])
expect(newState[queryId].fields[1].value).to.deep.equal('mean')
expect(newState[queryID].fields[1].value).to.deep.equal('mean')
})
it('applies a func to newly selected fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryId].fields[0].type).to.equal('func')
expect(state[queryId].fields[0].value).to.equal('mean')
expect(state[queryID].fields.length).to.equal(1)
expect(state[queryID].fields[0].type).to.equal('func')
expect(state[queryID].fields[0].value).to.equal('mean')
const newState = reducer(
state,
toggleField(queryId, {
toggleField(queryID, {
value: 'f2',
type: 'field',
})
)
expect(newState[queryId].fields[1].value).to.equal('mean')
expect(newState[queryId].fields[1].alias).to.equal('mean_f2')
expect(newState[queryId].fields[1].args).to.deep.equal([
expect(newState[queryID].fields[1].value).to.equal('mean')
expect(newState[queryID].fields[1].alias).to.equal('mean_f2')
expect(newState[queryID].fields[1].args).to.deep.equal([
{value: 'f2', type: 'field'},
])
expect(newState[queryId].fields[1].type).to.equal('func')
expect(newState[queryID].fields[1].type).to.equal('func')
})
it('adds the field property to query config if not found', () => {
delete state[queryId].fields
expect(state[queryId].fields).to.equal(undefined)
delete state[queryID].fields
expect(state[queryID].fields).to.equal(undefined)
const newState = reducer(
state,
toggleField(queryId, {value: 'fk1', type: 'field'})
toggleField(queryID, {value: 'fk1', type: 'field'})
)
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryID].fields.length).to.equal(1)
})
})
})
@ -189,7 +191,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const f4 = {value: 'f4', type: 'field'}
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -201,7 +203,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
},
}
const action = applyFuncsToField(queryId, {
const action = applyFuncsToField(queryID, {
field: {value: 'f1', type: 'field'},
funcs: [
{value: 'fn3', type: 'func', args: []},
@ -211,7 +213,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const nextState = reducer(initialState, action)
expect(nextState[queryId].fields).to.deep.equal([
expect(nextState[queryID].fields).to.deep.equal([
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
{value: 'fn1', type: 'func', args: [f2], alias: `fn1_${f2.value}`},
@ -230,7 +232,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const groupBy = {time: '1m', tags: []}
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -239,35 +241,35 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
},
}
const action = removeFuncs(queryId, fields, groupBy)
const action = removeFuncs(queryID, fields, groupBy)
const nextState = reducer(initialState, action)
const actual = nextState[queryId].fields
const actual = nextState[queryID].fields
const expected = [f1, f2]
expect(actual).to.eql(expected)
expect(nextState[queryId].groupBy.time).to.equal(null)
expect(nextState[queryID].groupBy.time).to.equal(null)
})
})
describe('DE_CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v0'],
k2: ['foo'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
})
@ -275,31 +277,31 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it("creates a new entry if it's the first key", () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v1'],
})
})
it('removes a value that is already in the list', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v1'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
@ -307,14 +309,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
expect(nextState[queryId].tags).to.eql({})
expect(nextState[queryID].tags).to.eql({})
})
})
describe('DE_GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -323,11 +325,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
groupBy: {tags: [], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: ['k1'],
})
@ -335,7 +337,7 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -344,11 +346,11 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
groupBy: {tags: ['k1'], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: [],
})
@ -358,14 +360,14 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
describe('DE_TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = toggleTagAcceptance(queryId)
const action = toggleTagAcceptance(queryID)
const nextState = reducer(initialState, action)
expect(nextState[queryId].areTagsAccepted).to.equal(
!initialState[queryId].areTagsAccepted
expect(nextState[queryID].areTagsAccepted).to.equal(
!initialState[queryID].areTagsAccepted
)
})
})
@ -374,99 +376,113 @@ describe('Chronograf.Reducers.DataExplorer.queryConfigs', () => {
it('applys the appropriate group by time', () => {
const time = '100y'
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = groupByTime(queryId, time)
const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy.time).to.equal(time)
expect(nextState[queryID].groupBy.time).to.equal(time)
})
})
it('updates entire config', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const expected = defaultQueryConfig({id: queryId}, {rawText: 'hello'})
const expected = defaultQueryConfig({id: queryID}, {rawText: 'hello'})
const action = updateQueryConfig(expected)
const nextState = reducer(initialState, action)
expect(nextState[queryId]).to.deep.equal(expected)
expect(nextState[queryID]).to.deep.equal(expected)
})
it("updates a query's raw text", () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const text = 'foo'
const action = updateRawQuery(queryId, text)
const action = updateRawQuery(queryID, text)
const nextState = reducer(initialState, action)
expect(nextState[queryId].rawText).to.equal('foo')
expect(nextState[queryID].rawText).to.equal('foo')
})
it("updates a query's raw status", () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const status = 'your query was sweet'
const action = editQueryStatus(queryId, status)
const action = editQueryStatus(queryID, status)
const nextState = reducer(initialState, action)
expect(nextState[queryId].status).to.equal(status)
expect(nextState[queryID].status).to.equal(status)
})
describe('DE_FILL', () => {
it('applies an explicit fill when group by time is used', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const time = '10s'
const action = groupByTime(queryId, time)
const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(NULL_STRING)
expect(nextState[queryID].fill).to.equal(NULL_STRING)
})
it('updates fill to non-null-string non-number string value', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = fill(queryId, LINEAR)
const action = fill(queryID, LINEAR)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(LINEAR)
expect(nextState[queryID].fill).to.equal(LINEAR)
})
it('updates fill to string integer value', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const INT_STRING = '1337'
const action = fill(queryId, INT_STRING)
const action = fill(queryID, INT_STRING)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(INT_STRING)
expect(nextState[queryID].fill).to.equal(INT_STRING)
})
it('updates fill to string float value', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const FLOAT_STRING = '1.337'
const action = fill(queryId, FLOAT_STRING)
const action = fill(queryID, FLOAT_STRING)
const nextState = reducer(initialState, action)
expect(nextState[queryId].fill).to.equal(FLOAT_STRING)
expect(nextState[queryID].fill).to.equal(FLOAT_STRING)
})
})
describe('DE_TIME_SHIFT', () => {
it('can shift the time', () => {
const initialState = {
[queryID]: buildInitialState(queryID),
}
const shift = {quantity: 1, unit: 'd', duration: '1d'}
const action = timeShift(queryID, shift)
const nextState = reducer(initialState, action)
expect(nextState[queryID].shifts).to.deep.equal([shift])
})
})
})

View File

@ -1,14 +1,15 @@
import reducer from 'src/kapacitor/reducers/queryConfigs'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
chooseTag,
timeShift,
groupByTag,
toggleField,
groupByTime,
chooseNamespace,
chooseMeasurement,
chooseTag,
groupByTag,
toggleTagAcceptance,
toggleField,
applyFuncsToField,
groupByTime,
toggleTagAcceptance,
} from 'src/kapacitor/actions/queryConfigs'
const fakeAddQueryAction = (panelID, queryID) => {
@ -18,142 +19,142 @@ const fakeAddQueryAction = (panelID, queryID) => {
}
}
function buildInitialState(queryId, params) {
function buildInitialState(queryID, params) {
return Object.assign(
{},
defaultQueryConfig({id: queryId, isKapacitorRule: true}),
defaultQueryConfig({id: queryID, isKapacitorRule: true}),
params
)
}
describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
const queryId = 123
const queryID = 123
it('can add a query', () => {
const state = reducer({}, fakeAddQueryAction('blah', queryId))
const state = reducer({}, fakeAddQueryAction('blah', queryID))
const actual = state[queryId]
const expected = defaultQueryConfig({id: queryId, isKapacitorRule: true})
const actual = state[queryID]
const expected = defaultQueryConfig({id: queryID, isKapacitorRule: true})
expect(actual).to.deep.equal(expected)
})
describe('choosing db, rp, and measurement', () => {
let state
beforeEach(() => {
state = reducer({}, fakeAddQueryAction('any', queryId))
state = reducer({}, fakeAddQueryAction('any', queryID))
})
it('sets the db and rp', () => {
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'telegraf',
retentionPolicy: 'monitor',
})
)
expect(newState[queryId].database).to.equal('telegraf')
expect(newState[queryId].retentionPolicy).to.equal('monitor')
expect(newState[queryID].database).to.equal('telegraf')
expect(newState[queryID].retentionPolicy).to.equal('monitor')
})
it('sets the measurement', () => {
const newState = reducer(state, chooseMeasurement(queryId, 'mem'))
const newState = reducer(state, chooseMeasurement(queryID, 'mem'))
expect(newState[queryId].measurement).to.equal('mem')
expect(newState[queryID].measurement).to.equal('mem')
})
})
describe('a query has measurements and fields', () => {
let state
beforeEach(() => {
const one = reducer({}, fakeAddQueryAction('any', queryId))
const one = reducer({}, fakeAddQueryAction('any', queryID))
const two = reducer(
one,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: '_internal',
retentionPolicy: 'daily',
})
)
const three = reducer(two, chooseMeasurement(queryId, 'disk'))
const three = reducer(two, chooseMeasurement(queryID, 'disk'))
state = reducer(
three,
toggleField(queryId, {value: 'a great field', funcs: []})
toggleField(queryID, {value: 'a great field', funcs: []})
)
})
describe('choosing a new namespace', () => {
it('clears out the old measurement and fields', () => {
// what about tags?
expect(state[queryId].measurement).to.exist
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].measurement).to.exist
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseNamespace(queryId, {
chooseNamespace(queryID, {
database: 'newdb',
retentionPolicy: 'newrp',
})
)
expect(newState[queryId].measurement).not.to.exist
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].measurement).not.to.exist
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('choosing a new measurement', () => {
it('leaves the namespace and clears out the old fields', () => {
// what about tags?
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
chooseMeasurement(queryId, 'newmeasurement')
chooseMeasurement(queryID, 'newmeasurement')
)
expect(state[queryId].database).to.equal(newState[queryId].database)
expect(state[queryId].retentionPolicy).to.equal(
newState[queryId].retentionPolicy
expect(state[queryID].database).to.equal(newState[queryID].database)
expect(state[queryID].retentionPolicy).to.equal(
newState[queryID].retentionPolicy
)
expect(newState[queryId].fields.length).to.equal(0)
expect(newState[queryID].fields.length).to.equal(0)
})
})
describe('when the query is part of a kapacitor rule', () => {
it('only allows one field', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {value: 'a different field', type: 'field'})
toggleField(queryID, {value: 'a different field', type: 'field'})
)
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryId].fields[0].value).to.equal('a different field')
expect(newState[queryID].fields.length).to.equal(1)
expect(newState[queryID].fields[0].value).to.equal('a different field')
})
})
describe('KAPA_TOGGLE_FIELD', () => {
it('cannot toggle multiple fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {value: 'a different field', type: 'field'})
toggleField(queryID, {value: 'a different field', type: 'field'})
)
expect(newState[queryId].fields.length).to.equal(1)
expect(newState[queryId].fields[0].value).to.equal('a different field')
expect(newState[queryID].fields.length).to.equal(1)
expect(newState[queryID].fields[0].value).to.equal('a different field')
})
it('applies no funcs to newly selected fields', () => {
expect(state[queryId].fields.length).to.equal(1)
expect(state[queryID].fields.length).to.equal(1)
const newState = reducer(
state,
toggleField(queryId, {value: 'a different field', type: 'field'})
toggleField(queryID, {value: 'a different field', type: 'field'})
)
expect(newState[queryId].fields[0].type).to.equal('field')
expect(newState[queryID].fields[0].type).to.equal('field')
})
})
})
@ -162,7 +163,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('applies functions to a field without any existing functions', () => {
const f1 = {value: 'f1', type: 'field'}
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -174,13 +175,13 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
},
}
const action = applyFuncsToField(queryId, {
const action = applyFuncsToField(queryID, {
field: {value: 'f1', type: 'field'},
funcs: [{value: 'fn3', type: 'func'}, {value: 'fn4', type: 'func'}],
})
const nextState = reducer(initialState, action)
const actual = nextState[queryId].fields
const actual = nextState[queryID].fields
const expected = [
{value: 'fn3', type: 'func', args: [f1], alias: `fn3_${f1.value}`},
{value: 'fn4', type: 'func', args: [f1], alias: `fn4_${f1.value}`},
@ -193,21 +194,21 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
describe('KAPA_CHOOSE_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v0'],
k2: ['foo'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v0', 'v1'],
k2: ['foo'],
})
@ -215,31 +216,31 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it("creates a new entry if it's the first key", () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
const nextState = reducer(initialState, action)
expect(nextState[queryId].tags).to.eql({
expect(nextState[queryID].tags).to.eql({
k1: ['v1'],
})
})
it('removes a value that is already in the list', () => {
const initialState = {
[queryId]: buildInitialState(queryId, {
[queryID]: buildInitialState(queryID, {
tags: {
k1: ['v1'],
},
}),
}
const action = chooseTag(queryId, {
const action = chooseTag(queryID, {
key: 'k1',
value: 'v1',
})
@ -247,14 +248,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
const nextState = reducer(initialState, action)
// TODO: this should probably remove the `k1` property entirely from the tags object
expect(nextState[queryId].tags).to.eql({})
expect(nextState[queryID].tags).to.eql({})
})
})
describe('KAPA_GROUP_BY_TAG', () => {
it('adds a tag key/value to the query', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -263,11 +264,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
groupBy: {tags: [], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: ['k1'],
})
@ -275,7 +276,7 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('removes a tag if the given tag key is already in the GROUP BY list', () => {
const initialState = {
[queryId]: {
[queryID]: {
id: 123,
database: 'db1',
measurement: 'm1',
@ -284,11 +285,11 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
groupBy: {tags: ['k1'], time: null},
},
}
const action = groupByTag(queryId, 'k1')
const action = groupByTag(queryID, 'k1')
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy).to.eql({
expect(nextState[queryID].groupBy).to.eql({
time: null,
tags: [],
})
@ -298,14 +299,14 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
describe('KAPA_TOGGLE_TAG_ACCEPTANCE', () => {
it('it toggles areTagsAccepted', () => {
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = toggleTagAcceptance(queryId)
const action = toggleTagAcceptance(queryID)
const nextState = reducer(initialState, action)
expect(nextState[queryId].areTagsAccepted).to.equal(
!initialState[queryId].areTagsAccepted
expect(nextState[queryID].areTagsAccepted).to.equal(
!initialState[queryID].areTagsAccepted
)
})
})
@ -314,14 +315,28 @@ describe('Chronograf.Reducers.Kapacitor.queryConfigs', () => {
it('applys the appropriate group by time', () => {
const time = '100y'
const initialState = {
[queryId]: buildInitialState(queryId),
[queryID]: buildInitialState(queryID),
}
const action = groupByTime(queryId, time)
const action = groupByTime(queryID, time)
const nextState = reducer(initialState, action)
expect(nextState[queryId].groupBy.time).to.equal(time)
expect(nextState[queryID].groupBy.time).to.equal(time)
})
})
describe('KAPA_TIME_SHIFT', () => {
it('can shift the time', () => {
const initialState = {
[queryID]: buildInitialState(queryID),
}
const shift = {quantity: 1, unit: 'd', duration: '1d'}
const action = timeShift(queryID, shift)
const nextState = reducer(initialState, action)
expect(nextState[queryID].shifts).to.deep.equal([shift])
})
})
})

View File

@ -0,0 +1,109 @@
import {timeRangeType, shiftTimeRange} from 'shared/query/helpers'
import moment from 'moment'
import {
INVALID,
ABSOLUTE,
INFLUXQL,
RELATIVE_LOWER,
RELATIVE_UPPER,
} from 'shared/constants/timeRange'
const format = INFLUXQL
describe('Shared.Query.Helpers', () => {
describe('timeRangeTypes', () => {
it('returns invalid if no upper and lower', () => {
const upper = null
const lower = null
const timeRange = {lower, upper}
expect(timeRangeType(timeRange)).to.equal(INVALID)
})
it('can detect absolute type', () => {
const tenMinutes = 600000
const upper = Date.now()
const lower = upper - tenMinutes
const timeRange = {lower, upper, format}
expect(timeRangeType(timeRange)).to.equal(ABSOLUTE)
})
it('can detect exclusive relative lower', () => {
const lower = 'now() - 15m'
const upper = null
const timeRange = {lower, upper, format}
expect(timeRangeType(timeRange)).to.equal(RELATIVE_LOWER)
})
it('can detect relative upper', () => {
const upper = 'now()'
const oneMinute = 60000
const lower = Date.now() - oneMinute
const timeRange = {lower, upper, format}
expect(timeRangeType(timeRange)).to.equal(RELATIVE_UPPER)
})
})
describe('timeRangeShift', () => {
it('can calculate the shift for absolute timeRanges', () => {
const upper = Date.now()
const oneMinute = 60000
const lower = Date.now() - oneMinute
const shift = {quantity: 7, unit: 'd'}
const timeRange = {upper, lower}
const type = timeRangeType(timeRange)
const actual = shiftTimeRange(timeRange, shift)
const expected = {
lower: `${lower} - 7d`,
upper: `${upper} - 7d`,
type: 'shifted',
}
expect(type).to.equal(ABSOLUTE)
expect(actual).to.deep.equal(expected)
})
it('can calculate the shift for relative lower timeRanges', () => {
const shift = {quantity: 7, unit: 'd'}
const lower = 'now() - 15m'
const timeRange = {lower, upper: null}
const type = timeRangeType(timeRange)
const actual = shiftTimeRange(timeRange, shift)
const expected = {
lower: `${lower} - 7d`,
upper: `now() - 7d`,
type: 'shifted',
}
expect(type).to.equal(RELATIVE_LOWER)
expect(actual).to.deep.equal(expected)
})
it('can calculate the shift for relative upper timeRanges', () => {
const upper = Date.now()
const oneMinute = 60000
const lower = Date.now() - oneMinute
const shift = {quantity: 7, unit: 'd'}
const timeRange = {upper, lower}
const type = timeRangeType(timeRange)
const actual = shiftTimeRange(timeRange, shift)
const expected = {
lower: `${lower} - 7d`,
upper: `${upper} - 7d`,
type: 'shifted',
}
expect(type).to.equal(ABSOLUTE)
expect(actual).to.deep.equal(expected)
})
})
})

View File

@ -228,11 +228,7 @@ describe('timeSeriesToDygraph', () => {
]
const isInDataExplorer = true
const actual = timeSeriesToDygraph(
influxResponse,
undefined,
isInDataExplorer
)
const actual = timeSeriesToDygraph(influxResponse, isInDataExplorer)
const expected = {}

View File

@ -79,35 +79,29 @@ export const applyMasks = query => {
const maskForWholeTemplates = '😸$1😸'
return query.replace(matchWholeTemplates, maskForWholeTemplates)
}
export const insertTempVar = (query, tempVar) => {
return query.replace(MATCH_INCOMPLETE_TEMPLATES, tempVar)
}
export const unMask = query => {
return query.replace(/😸/g, ':')
}
export const removeUnselectedTemplateValues = templates => {
return templates.map(template => {
const selectedValues = template.values.filter(value => value.selected)
return {...template, values: selectedValues}
})
}
export const DISPLAY_OPTIONS = {
LINEAR: 'linear',
LOG: 'log',
BASE_2: '2',
BASE_10: '10',
}
export const TOOLTIP_CONTENT = {
FORMAT:
'<p><strong>K/M/B</strong> = Thousand / Million / Billion<br/><strong>K/M/G</strong> = Kilo / Mega / Giga </p>',
}
export const TYPE_QUERY_CONFIG = 'queryConfig'
export const TYPE_SHIFTED = 'shifted queryConfig'
export const TYPE_IFQL = 'ifql'
export const DASHBOARD_NAME_MAX_LENGTH = 50

View File

@ -39,13 +39,12 @@ class DashboardPage extends Component {
selectedCell: null,
isTemplating: false,
zoomedTimeRange: {zoomedLower: null, zoomedUpper: null},
names: [],
}
}
async componentDidMount() {
const {
params: {dashboardID, sourceID},
params: {dashboardID},
dashboardActions: {
getDashboardsAsync,
updateTempVarValues,
@ -62,13 +61,6 @@ class DashboardPage extends Component {
// Refresh and persists influxql generated template variable values
await updateTempVarValues(source, dashboard)
await putDashboardByID(dashboardID)
const names = dashboards.map(d => ({
name: d.name,
link: `/sources/${sourceID}/dashboards/${d.id}`,
}))
this.setState({names})
}
handleOpenTemplateManager = () => {
@ -263,14 +255,23 @@ class DashboardPage extends Component {
],
}
// this controls the auto group by behavior
const interval = {
id: 'interval',
type: 'constant',
type: 'autoGroupBy',
tempVar: ':interval:',
resolution: 1000,
reportingInterval: 10000000000,
values: [],
label: 'automatically determine the best group by time',
values: [
{
value: '1000', // pixels
type: 'resolution',
selected: true,
},
{
value: '3',
type: 'pointsPerPixel',
selected: true,
},
],
}
let templatesIncludingDashTime
@ -285,7 +286,11 @@ class DashboardPage extends Component {
templatesIncludingDashTime = []
}
const {selectedCell, isEditMode, isTemplating, names} = this.state
const {selectedCell, isEditMode, isTemplating} = this.state
const names = dashboards.map(d => ({
name: d.name,
link: `/sources/${sourceID}/dashboards/${d.id}`,
}))
return (
<div className="page">

View File

@ -18,26 +18,26 @@ export const deleteQuery = queryID => ({
},
})
export const toggleField = (queryId, fieldFunc) => ({
export const toggleField = (queryID, fieldFunc) => ({
type: 'DE_TOGGLE_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
},
})
export const groupByTime = (queryId, time) => ({
export const groupByTime = (queryID, time) => ({
type: 'DE_GROUP_BY_TIME',
payload: {
queryId,
queryID,
time,
},
})
export const fill = (queryId, value) => ({
export const fill = (queryID, value) => ({
type: 'DE_FILL',
payload: {
queryId,
queryID,
value,
},
})
@ -51,44 +51,44 @@ export const removeFuncs = (queryID, fields, groupBy) => ({
},
})
export const applyFuncsToField = (queryId, fieldFunc, groupBy) => ({
export const applyFuncsToField = (queryID, fieldFunc, groupBy) => ({
type: 'DE_APPLY_FUNCS_TO_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
groupBy,
},
})
export const chooseTag = (queryId, tag) => ({
export const chooseTag = (queryID, tag) => ({
type: 'DE_CHOOSE_TAG',
payload: {
queryId,
queryID,
tag,
},
})
export const chooseNamespace = (queryId, {database, retentionPolicy}) => ({
export const chooseNamespace = (queryID, {database, retentionPolicy}) => ({
type: 'DE_CHOOSE_NAMESPACE',
payload: {
queryId,
queryID,
database,
retentionPolicy,
},
})
export const chooseMeasurement = (queryId, measurement) => ({
export const chooseMeasurement = (queryID, measurement) => ({
type: 'DE_CHOOSE_MEASUREMENT',
payload: {
queryId,
queryID,
measurement,
},
})
export const editRawText = (queryId, rawText) => ({
export const editRawText = (queryID, rawText) => ({
type: 'DE_EDIT_RAW_TEXT',
payload: {
queryId,
queryID,
rawText,
},
})
@ -100,18 +100,18 @@ export const setTimeRange = bounds => ({
},
})
export const groupByTag = (queryId, tagKey) => ({
export const groupByTag = (queryID, tagKey) => ({
type: 'DE_GROUP_BY_TAG',
payload: {
queryId,
queryID,
tagKey,
},
})
export const toggleTagAcceptance = queryId => ({
export const toggleTagAcceptance = queryID => ({
type: 'DE_TOGGLE_TAG_ACCEPTANCE',
payload: {
queryId,
queryID,
},
})
@ -147,6 +147,14 @@ export const editQueryStatus = (queryID, status) => ({
},
})
export const timeShift = (queryID, shift) => ({
type: 'DE_TIME_SHIFT',
payload: {
queryID,
shift,
},
})
// Async actions
export const editRawTextAsync = (url, id, text) => async dispatch => {
try {

View File

@ -7,8 +7,6 @@ import Dropdown from 'shared/components/Dropdown'
import {AUTO_GROUP_BY} from 'shared/constants'
const {func, string, shape} = PropTypes
const isInRuleBuilder = pathname => pathname.includes('alert-rules')
const isInDataExplorer = pathname => pathname.includes('data-explorer')
@ -37,6 +35,8 @@ const GroupByTimeDropdown = ({
/>
</div>
const {func, string, shape} = PropTypes
GroupByTimeDropdown.propTypes = {
location: shape({
pathname: string.isRequired,

View File

@ -88,7 +88,14 @@ class ChronoTable extends Component {
)
}
makeTabName = ({name, tags}) => (tags ? `${name}.${tags[name]}` : name)
makeTabName = ({name, tags}) => {
if (!tags) {
return name
}
const tagKeys = Object.keys(tags).sort()
const tagValues = tagKeys.map(key => tags[key]).join('.')
return `${name}.${tagValues}`
}
render() {
const {containerWidth, height, query} = this.props
@ -135,9 +142,13 @@ class ChronoTable extends Component {
</div>
: <Dropdown
className="dropdown-160 table--tabs-dropdown"
items={series.map((s, index) => ({...s, text: s.name, index}))}
items={series.map((s, index) => ({
...s,
text: this.makeTabName(s),
index,
}))}
onChoose={this.handleClickDropdown}
selected={series[activeSeriesIndex].name}
selected={this.makeTabName(series[activeSeriesIndex])}
buttonSize="btn-xs"
/>}
<div className="table--tabs-content">

View File

@ -1,9 +1,9 @@
import React, {PropTypes, Component} from 'react'
import buildInfluxQLQuery from 'utils/influxql'
import classnames from 'classnames'
import VisHeader from 'src/data_explorer/components/VisHeader'
import VisView from 'src/data_explorer/components/VisView'
import {GRAPH, TABLE} from 'shared/constants'
import buildQueries from 'utils/buildQueriesForGraphs'
import _ from 'lodash'
const META_QUERY_REGEX = /^(show|create|drop)/i
@ -61,19 +61,11 @@ class Visualization extends Component {
resizerBottomHeight,
errorThrown,
} = this.props
const {source: {links: {proxy}}} = this.context
const {view} = this.state
const statements = queryConfigs.map(query => {
const text =
query.rawText || buildInfluxQLQuery(query.range || timeRange, query)
return {text, id: query.id, queryConfig: query}
})
const queries = statements.filter(s => s.text !== null).map(s => {
return {host: [proxy], text: s.text, id: s.id, queryConfig: s.queryConfig}
})
const queries = buildQueries(proxy, queryConfigs, timeRange)
const activeQuery = queries[activeQueryIndex]
const defaultQuery = queries[0]
const query = activeQuery || defaultQuery
@ -81,12 +73,12 @@ class Visualization extends Component {
return (
<div className="graph" style={{height}}>
<VisHeader
views={views}
view={view}
onToggleView={this.handleToggleView}
name={cellName}
views={views}
query={query}
name={cellName}
errorThrown={errorThrown}
onToggleView={this.handleToggleView}
/>
<div
className={classnames({

View File

@ -3,6 +3,7 @@ import _ from 'lodash'
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
fill,
timeShift,
chooseTag,
groupByTag,
removeFuncs,
@ -20,24 +21,24 @@ import {
const queryConfigs = (state = {}, action) => {
switch (action.type) {
case 'DE_CHOOSE_NAMESPACE': {
const {queryId, database, retentionPolicy} = action.payload
const nextQueryConfig = chooseNamespace(state[queryId], {
const {queryID, database, retentionPolicy} = action.payload
const nextQueryConfig = chooseNamespace(state[queryID], {
database,
retentionPolicy,
})
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {rawText: null}),
[queryID]: Object.assign(nextQueryConfig, {rawText: null}),
})
}
case 'DE_CHOOSE_MEASUREMENT': {
const {queryId, measurement} = action.payload
const nextQueryConfig = chooseMeasurement(state[queryId], measurement)
const {queryID, measurement} = action.payload
const nextQueryConfig = chooseMeasurement(state[queryID], measurement)
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {
rawText: state[queryId].rawText,
[queryID]: Object.assign(nextQueryConfig, {
rawText: state[queryID].rawText,
}),
})
}
@ -64,78 +65,78 @@ const queryConfigs = (state = {}, action) => {
}
case 'DE_EDIT_RAW_TEXT': {
const {queryId, rawText} = action.payload
const nextQueryConfig = editRawText(state[queryId], rawText)
const {queryID, rawText} = action.payload
const nextQueryConfig = editRawText(state[queryID], rawText)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_GROUP_BY_TIME': {
const {queryId, time} = action.payload
const nextQueryConfig = groupByTime(state[queryId], time)
const {queryID, time} = action.payload
const nextQueryConfig = groupByTime(state[queryID], time)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_TOGGLE_TAG_ACCEPTANCE': {
const {queryId} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryId])
const {queryID} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryID])
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_TOGGLE_FIELD': {
const {queryId, fieldFunc} = action.payload
const nextQueryConfig = toggleField(state[queryId], fieldFunc)
const {queryID, fieldFunc} = action.payload
const nextQueryConfig = toggleField(state[queryID], fieldFunc)
return Object.assign({}, state, {
[queryId]: {...nextQueryConfig, rawText: null},
[queryID]: {...nextQueryConfig, rawText: null},
})
}
case 'DE_APPLY_FUNCS_TO_FIELD': {
const {queryId, fieldFunc, groupBy} = action.payload
const {queryID, fieldFunc, groupBy} = action.payload
const nextQueryConfig = applyFuncsToField(
state[queryId],
state[queryID],
fieldFunc,
groupBy
)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_CHOOSE_TAG': {
const {queryId, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryId], tag)
const {queryID, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryID], tag)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_GROUP_BY_TAG': {
const {queryId, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryId], tagKey)
const {queryID, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryID], tagKey)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'DE_FILL': {
const {queryId, value} = action.payload
const nextQueryConfig = fill(state[queryId], value)
const {queryID, value} = action.payload
const nextQueryConfig = fill(state[queryID], value)
return {
...state,
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
}
}
@ -171,6 +172,13 @@ const queryConfigs = (state = {}, action) => {
return {...state, [queryID]: nextQuery}
}
case 'DE_TIME_SHIFT': {
const {queryID, shift} = action.payload
const nextQuery = timeShift(state[queryID], shift)
return {...state, [queryID]: nextQuery}
}
}
return state
}

View File

@ -1,63 +1,63 @@
export const chooseNamespace = (queryId, {database, retentionPolicy}) => ({
export const chooseNamespace = (queryID, {database, retentionPolicy}) => ({
type: 'KAPA_CHOOSE_NAMESPACE',
payload: {
queryId,
queryID,
database,
retentionPolicy,
},
})
export const chooseMeasurement = (queryId, measurement) => ({
export const chooseMeasurement = (queryID, measurement) => ({
type: 'KAPA_CHOOSE_MEASUREMENT',
payload: {
queryId,
queryID,
measurement,
},
})
export const chooseTag = (queryId, tag) => ({
export const chooseTag = (queryID, tag) => ({
type: 'KAPA_CHOOSE_TAG',
payload: {
queryId,
queryID,
tag,
},
})
export const groupByTag = (queryId, tagKey) => ({
export const groupByTag = (queryID, tagKey) => ({
type: 'KAPA_GROUP_BY_TAG',
payload: {
queryId,
queryID,
tagKey,
},
})
export const toggleTagAcceptance = queryId => ({
export const toggleTagAcceptance = queryID => ({
type: 'KAPA_TOGGLE_TAG_ACCEPTANCE',
payload: {
queryId,
queryID,
},
})
export const toggleField = (queryId, fieldFunc) => ({
export const toggleField = (queryID, fieldFunc) => ({
type: 'KAPA_TOGGLE_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
},
})
export const applyFuncsToField = (queryId, fieldFunc) => ({
export const applyFuncsToField = (queryID, fieldFunc) => ({
type: 'KAPA_APPLY_FUNCS_TO_FIELD',
payload: {
queryId,
queryID,
fieldFunc,
},
})
export const groupByTime = (queryId, time) => ({
export const groupByTime = (queryID, time) => ({
type: 'KAPA_GROUP_BY_TIME',
payload: {
queryId,
queryID,
time,
},
})
@ -69,3 +69,11 @@ export const removeFuncs = (queryID, fields) => ({
fields,
},
})
export const timeShift = (queryID, shift) => ({
type: 'KAPA_TIME_SHIFT',
payload: {
queryID,
shift,
},
})

View File

@ -66,7 +66,7 @@ export const getRule = (kapacitor, ruleID) => async dispatch => {
}
}
export function loadDefaultRule() {
export const loadDefaultRule = () => {
return dispatch => {
const queryID = uuid.v4()
dispatch({
@ -88,15 +88,13 @@ export const fetchRules = kapacitor => async dispatch => {
}
}
export function chooseTrigger(ruleID, trigger) {
return {
type: 'CHOOSE_TRIGGER',
payload: {
ruleID,
trigger,
},
}
}
export const chooseTrigger = (ruleID, trigger) => ({
type: 'CHOOSE_TRIGGER',
payload: {
ruleID,
trigger,
},
})
export const addEvery = (ruleID, frequency) => ({
type: 'ADD_EVERY',
@ -113,36 +111,30 @@ export const removeEvery = ruleID => ({
},
})
export function updateRuleValues(ruleID, trigger, values) {
return {
type: 'UPDATE_RULE_VALUES',
payload: {
ruleID,
trigger,
values,
},
}
}
export const updateRuleValues = (ruleID, trigger, values) => ({
type: 'UPDATE_RULE_VALUES',
payload: {
ruleID,
trigger,
values,
},
})
export function updateMessage(ruleID, message) {
return {
type: 'UPDATE_RULE_MESSAGE',
payload: {
ruleID,
message,
},
}
}
export const updateMessage = (ruleID, message) => ({
type: 'UPDATE_RULE_MESSAGE',
payload: {
ruleID,
message,
},
})
export function updateDetails(ruleID, details) {
return {
type: 'UPDATE_RULE_DETAILS',
payload: {
ruleID,
details,
},
}
}
export const updateDetails = (ruleID, details) => ({
type: 'UPDATE_RULE_DETAILS',
payload: {
ruleID,
details,
},
})
export const updateAlertProperty = (ruleID, alertNodeName, alertProperty) => ({
type: 'UPDATE_RULE_ALERT_PROPERTY',
@ -153,87 +145,73 @@ export const updateAlertProperty = (ruleID, alertNodeName, alertProperty) => ({
},
})
export function updateAlerts(ruleID, alerts) {
return {
type: 'UPDATE_RULE_ALERTS',
payload: {
ruleID,
alerts,
},
}
export const updateAlerts = (ruleID, alerts) => ({
type: 'UPDATE_RULE_ALERTS',
payload: {
ruleID,
alerts,
},
})
export const updateAlertNodes = (ruleID, alertNodeName, alertNodesText) => ({
type: 'UPDATE_RULE_ALERT_NODES',
payload: {
ruleID,
alertNodeName,
alertNodesText,
},
})
export const updateRuleName = (ruleID, name) => ({
type: 'UPDATE_RULE_NAME',
payload: {
ruleID,
name,
},
})
export const deleteRuleSuccess = ruleID => ({
type: 'DELETE_RULE_SUCCESS',
payload: {
ruleID,
},
})
export const updateRuleStatusSuccess = (ruleID, status) => ({
type: 'UPDATE_RULE_STATUS_SUCCESS',
payload: {
ruleID,
status,
},
})
export const deleteRule = rule => dispatch => {
deleteRuleAPI(rule)
.then(() => {
dispatch(deleteRuleSuccess(rule.id))
dispatch(
publishNotification('success', `${rule.name} deleted successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be deleted`)
)
})
}
export function updateAlertNodes(ruleID, alertNodeName, alertNodesText) {
return {
type: 'UPDATE_RULE_ALERT_NODES',
payload: {
ruleID,
alertNodeName,
alertNodesText,
},
}
}
export function updateRuleName(ruleID, name) {
return {
type: 'UPDATE_RULE_NAME',
payload: {
ruleID,
name,
},
}
}
export function deleteRuleSuccess(ruleID) {
return {
type: 'DELETE_RULE_SUCCESS',
payload: {
ruleID,
},
}
}
export function updateRuleStatusSuccess(ruleID, status) {
return {
type: 'UPDATE_RULE_STATUS_SUCCESS',
payload: {
ruleID,
status,
},
}
}
export function deleteRule(rule) {
return dispatch => {
deleteRuleAPI(rule)
.then(() => {
dispatch(deleteRuleSuccess(rule.id))
dispatch(
publishNotification('success', `${rule.name} deleted successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be deleted`)
)
})
}
}
export function updateRuleStatus(rule, status) {
return dispatch => {
updateRuleStatusAPI(rule, status)
.then(() => {
dispatch(
publishNotification('success', `${rule.name} ${status} successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be ${status}`)
)
})
}
export const updateRuleStatus = (rule, status) => dispatch => {
updateRuleStatusAPI(rule, status)
.then(() => {
dispatch(
publishNotification('success', `${rule.name} ${status} successfully`)
)
})
.catch(() => {
dispatch(
publishNotification('error', `${rule.name} could not be ${status}`)
)
})
}
export const createTask = (

View File

@ -61,13 +61,13 @@ class KapacitorRulePage extends Component {
render() {
const {
rules,
queryConfigs,
params,
ruleActions,
source,
queryConfigActions,
addFlashMessage,
router,
ruleActions,
queryConfigs,
addFlashMessage,
queryConfigActions,
} = this.props
const {enabledAlerts, kapacitor} = this.state
const rule = this.isEditing()
@ -80,17 +80,17 @@ class KapacitorRulePage extends Component {
}
return (
<KapacitorRule
source={source}
rule={rule}
query={query}
queryConfigs={queryConfigs}
queryConfigActions={queryConfigActions}
ruleActions={ruleActions}
addFlashMessage={addFlashMessage}
enabledAlerts={enabledAlerts}
isEditing={this.isEditing()}
router={router}
source={source}
kapacitor={kapacitor}
ruleActions={ruleActions}
queryConfigs={queryConfigs}
isEditing={this.isEditing()}
enabledAlerts={enabledAlerts}
addFlashMessage={addFlashMessage}
queryConfigActions={queryConfigActions}
/>
)
}

View File

@ -1,13 +1,14 @@
import defaultQueryConfig from 'src/utils/defaultQueryConfig'
import {
applyFuncsToField,
chooseMeasurement,
chooseNamespace,
timeShift,
chooseTag,
groupByTag,
groupByTime,
removeFuncs,
chooseNamespace,
toggleKapaField,
applyFuncsToField,
chooseMeasurement,
toggleTagAcceptance,
} from 'src/utils/queryTransitions'
@ -34,9 +35,9 @@ const queryConfigs = (state = {}, action) => {
}
case 'KAPA_CHOOSE_NAMESPACE': {
const {queryId, database, retentionPolicy} = action.payload
const {queryID, database, retentionPolicy} = action.payload
const nextQueryConfig = chooseNamespace(
state[queryId],
state[queryID],
{
database,
retentionPolicy,
@ -45,75 +46,75 @@ const queryConfigs = (state = {}, action) => {
)
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {rawText: null}),
[queryID]: Object.assign(nextQueryConfig, {rawText: null}),
})
}
case 'KAPA_CHOOSE_MEASUREMENT': {
const {queryId, measurement} = action.payload
const {queryID, measurement} = action.payload
const nextQueryConfig = chooseMeasurement(
state[queryId],
state[queryID],
measurement,
IS_KAPACITOR_RULE
)
return Object.assign({}, state, {
[queryId]: Object.assign(nextQueryConfig, {
rawText: state[queryId].rawText,
[queryID]: Object.assign(nextQueryConfig, {
rawText: state[queryID].rawText,
}),
})
}
case 'KAPA_CHOOSE_TAG': {
const {queryId, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryId], tag)
const {queryID, tag} = action.payload
const nextQueryConfig = chooseTag(state[queryID], tag)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'KAPA_GROUP_BY_TAG': {
const {queryId, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryId], tagKey)
const {queryID, tagKey} = action.payload
const nextQueryConfig = groupByTag(state[queryID], tagKey)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'KAPA_TOGGLE_TAG_ACCEPTANCE': {
const {queryId} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryId])
const {queryID} = action.payload
const nextQueryConfig = toggleTagAcceptance(state[queryID])
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
case 'KAPA_TOGGLE_FIELD': {
const {queryId, fieldFunc} = action.payload
const nextQueryConfig = toggleKapaField(state[queryId], fieldFunc)
const {queryID, fieldFunc} = action.payload
const nextQueryConfig = toggleKapaField(state[queryID], fieldFunc)
return {...state, [queryId]: {...nextQueryConfig, rawText: null}}
return {...state, [queryID]: {...nextQueryConfig, rawText: null}}
}
case 'KAPA_APPLY_FUNCS_TO_FIELD': {
const {queryId, fieldFunc} = action.payload
const {groupBy} = state[queryId]
const nextQueryConfig = applyFuncsToField(state[queryId], fieldFunc, {
const {queryID, fieldFunc} = action.payload
const {groupBy} = state[queryID]
const nextQueryConfig = applyFuncsToField(state[queryID], fieldFunc, {
...groupBy,
time: groupBy.time ? groupBy.time : '10s',
})
return {...state, [queryId]: nextQueryConfig}
return {...state, [queryID]: nextQueryConfig}
}
case 'KAPA_GROUP_BY_TIME': {
const {queryId, time} = action.payload
const nextQueryConfig = groupByTime(state[queryId], time)
const {queryID, time} = action.payload
const nextQueryConfig = groupByTime(state[queryID], time)
return Object.assign({}, state, {
[queryId]: nextQueryConfig,
[queryID]: nextQueryConfig,
})
}
@ -124,6 +125,13 @@ const queryConfigs = (state = {}, action) => {
// fields with no functions cannot have a group by time
return {...state, [queryID]: nextQuery}
}
case 'KAPA_TIME_SHIFT': {
const {queryID, shift} = action.payload
const nextQuery = timeShift(state[queryID], shift)
return {...state, [queryID]: nextQuery}
}
}
return state
}

View File

@ -24,8 +24,8 @@ export function showQueries(source, db) {
return proxy({source, query, db})
}
export function killQuery(source, queryId) {
const query = `KILL QUERY ${queryId}`
export function killQuery(source, queryID) {
const query = `KILL QUERY ${queryID}`
return proxy({source, query})
}

View File

@ -81,20 +81,35 @@ const AutoRefresh = ComposedComponent => {
const templatesWithResolution = templates.map(temp => {
if (temp.tempVar === ':interval:') {
if (resolution) {
return {...temp, resolution}
return {
...temp,
values: temp.values.map(
v => (temp.type === 'resolution' ? {...v, resolution} : v)
),
}
}
return {
...temp,
values: [
...temp.values,
{value: '1000', type: 'resolution', selected: true},
],
}
return {...temp, resolution: 1000}
}
return {...temp}
return temp
})
const tempVars = removeUnselectedTemplateValues(templatesWithResolution)
return fetchTimeSeriesAsync(
{
source: host,
db: database,
rp,
query,
tempVars: removeUnselectedTemplateValues(templatesWithResolution),
tempVars,
resolution,
},
editQueryStatus

View File

@ -1,9 +1,8 @@
import React, {PropTypes, Component} from 'react'
import _ from 'lodash'
import QueryOptions from 'shared/components/QueryOptions'
import FieldListItem from 'src/data_explorer/components/FieldListItem'
import GroupByTimeDropdown from 'src/data_explorer/components/GroupByTimeDropdown'
import FillQuery from 'shared/components/FillQuery'
import FancyScrollbar from 'shared/components/FancyScrollbar'
import {showFieldKeys} from 'shared/apis/metaQuery'
@ -107,6 +106,10 @@ class FieldList extends Component {
applyFuncsToField(fieldFunc, groupBy)
}
handleTimeShift = shift => {
this.props.onTimeShift(shift)
}
_getFields = () => {
const {database, measurement, retentionPolicy} = this.props.query
const {source} = this.context
@ -129,12 +132,11 @@ class FieldList extends Component {
render() {
const {
query: {database, measurement, fields = [], groupBy, fill},
query: {database, measurement, fields = [], groupBy, fill, shifts},
isKapacitorRule,
} = this.props
const hasAggregates = numFunctions(fields) > 0
const hasGroupByTime = groupBy.time
const noDBorMeas = !database || !measurement
return (
@ -142,16 +144,15 @@ class FieldList extends Component {
<div className="query-builder--heading">
<span>Fields</span>
{hasAggregates
? <div className="query-builder--groupby-fill-container">
<GroupByTimeDropdown
isOpen={!hasGroupByTime}
selected={groupBy.time}
onChooseGroupByTime={this.handleGroupByTime}
/>
{isKapacitorRule
? null
: <FillQuery value={fill} onChooseFill={this.handleFill} />}
</div>
? <QueryOptions
fill={fill}
shift={_.first(shifts)}
groupBy={groupBy}
onFill={this.handleFill}
isKapacitorRule={isKapacitorRule}
onTimeShift={this.handleTimeShift}
onGroupByTime={this.handleGroupByTime}
/>
: null}
</div>
{noDBorMeas
@ -192,7 +193,7 @@ class FieldList extends Component {
}
}
const {bool, func, shape, string} = PropTypes
const {arrayOf, bool, func, shape, string} = PropTypes
FieldList.defaultProps = {
isKapacitorRule: false,
@ -212,7 +213,15 @@ FieldList.propTypes = {
database: string,
retentionPolicy: string,
measurement: string,
shifts: arrayOf(
shape({
label: string,
unit: string,
quantity: string,
})
),
}).isRequired,
onTimeShift: func,
onToggleField: func.isRequired,
onGroupByTime: func.isRequired,
onFill: func,

View File

@ -2,7 +2,7 @@ import React, {Component, PropTypes} from 'react'
import WidgetCell from 'shared/components/WidgetCell'
import LayoutCell from 'shared/components/LayoutCell'
import RefreshingGraph from 'shared/components/RefreshingGraph'
import {buildQueriesForLayouts} from 'utils/influxql'
import {buildQueriesForLayouts} from 'utils/buildQueriesForLayouts'
import _ from 'lodash'

View File

@ -17,12 +17,8 @@ class LineGraph extends Component {
}
componentWillMount() {
const {data, activeQueryIndex, isInDataExplorer} = this.props
this._timeSeries = timeSeriesToDygraph(
data,
activeQueryIndex,
isInDataExplorer
)
const {data, isInDataExplorer} = this.props
this._timeSeries = timeSeriesToDygraph(data, isInDataExplorer)
}
componentWillUpdate(nextProps) {
@ -33,7 +29,6 @@ class LineGraph extends Component {
) {
this._timeSeries = timeSeriesToDygraph(
nextProps.data,
nextProps.activeQueryIndex,
nextProps.isInDataExplorer
)
}

View File

@ -0,0 +1,45 @@
import React, {PropTypes} from 'react'
import GroupByTimeDropdown from 'src/data_explorer/components/GroupByTimeDropdown'
import TimeShiftDropdown from 'src/shared/components/TimeShiftDropdown'
import FillQuery from 'shared/components/FillQuery'
const QueryOptions = ({
fill,
shift,
onFill,
groupBy,
onTimeShift,
onGroupByTime,
isKapacitorRule,
}) =>
<div className="query-builder--groupby-fill-container">
<GroupByTimeDropdown
selected={groupBy.time}
onChooseGroupByTime={onGroupByTime}
/>
{isKapacitorRule
? null
: <TimeShiftDropdown
selected={shift && shift.label}
onChooseTimeShift={onTimeShift}
/>}
{isKapacitorRule ? null : <FillQuery value={fill} onChooseFill={onFill} />}
</div>
const {bool, func, shape, string} = PropTypes
QueryOptions.propTypes = {
fill: string,
onFill: func.isRequired,
groupBy: shape({
time: string,
}).isRequired,
shift: shape({
label: string,
}),
onGroupByTime: func.isRequired,
isKapacitorRule: bool.isRequired,
onTimeShift: func.isRequired,
}
export default QueryOptions

View File

@ -13,6 +13,7 @@ const SchemaExplorer = ({
initialGroupByTime,
actions: {
fill,
timeShift,
chooseTag,
groupByTag,
groupByTime,
@ -44,13 +45,14 @@ const SchemaExplorer = ({
source={source}
query={query}
querySource={source}
initialGroupByTime={initialGroupByTime}
onToggleField={actionBinder(id, toggleField)}
onFill={actionBinder(id, fill)}
onGroupByTime={actionBinder(id, groupByTime)}
applyFuncsToField={actionBinder(id, applyFuncsToField)}
initialGroupByTime={initialGroupByTime}
onTimeShift={actionBinder(id, timeShift)}
removeFuncs={actionBinder(id, removeFuncs)}
onToggleField={actionBinder(id, toggleField)}
onGroupByTime={actionBinder(id, groupByTime)}
addInitialField={actionBinder(id, addInitialField)}
applyFuncsToField={actionBinder(id, applyFuncsToField)}
/>
</div>

View File

@ -0,0 +1,24 @@
import React, {PropTypes} from 'react'
import Dropdown from 'shared/components/Dropdown'
import {TIME_SHIFTS} from 'shared/constants/timeShift'
const TimeShiftDropdown = ({selected, onChooseTimeShift}) =>
<div className="group-by-time">
<label className="group-by-time--label">Compare:</label>
<Dropdown
className="group-by-time--dropdown"
buttonColor="btn-info"
items={TIME_SHIFTS}
onChoose={onChooseTimeShift}
selected={selected || 'none'}
/>
</div>
const {func, string} = PropTypes
TimeShiftDropdown.propTypes = {
selected: string,
onChooseTimeShift: func.isRequired,
}
export default TimeShiftDropdown

View File

@ -0,0 +1,4 @@
export const ABSOLUTE = 'absolute'
export const INVALID = 'invalid'
export const RELATIVE_LOWER = 'relative lower'
export const RELATIVE_UPPER = 'relative upper'

View File

@ -0,0 +1,10 @@
export const TIME_SHIFTS = [
{label: 'none', text: 'none', quantity: null, unit: null},
{label: '1m', text: '1m', quantity: '1', unit: 'm'},
{label: '1h', text: '1h', quantity: '1', unit: 'h'},
{label: '12h', text: '12h', quantity: '12', unit: 'h'},
{label: '1d', text: '1d', quantity: '1', unit: 'd'},
{label: '7d', text: '7d', quantity: '7', unit: 'd'},
{label: '30d', text: '30d', quantity: '30', unit: 'd'},
{label: '365d', text: '365d', quantity: '365', unit: 'd'},
]

View File

@ -0,0 +1,110 @@
import moment from 'moment'
import {
INFLUXQL,
ABSOLUTE,
INVALID,
RELATIVE_LOWER,
RELATIVE_UPPER,
} from 'shared/constants/timeRange'
const now = /^now/
export const timeRangeType = ({upper, lower, type}) => {
if (!upper && !lower) {
return INVALID
}
if (type && type !== INFLUXQL) {
return INVALID
}
const isUpperValid = moment(upper).isValid()
const isLowerValid = moment(lower).isValid()
// {lower: <Date>, upper: <Date>}
if (isLowerValid && isUpperValid) {
return ABSOLUTE
}
// {lower: now - <Duration>, upper: <empty>}
if (now.test(lower) && !upper) {
return RELATIVE_LOWER
}
// {lower: <Date>, upper: now() - <Duration>}
if (isLowerValid && now.test(upper)) {
return RELATIVE_UPPER
}
return INVALID
}
export const shiftTimeRange = (timeRange, shift) => {
const {upper, lower} = timeRange
const {quantity, unit} = shift
const trType = timeRangeType(timeRange)
const duration = `${quantity}${unit}`
const type = 'shifted'
switch (trType) {
case RELATIVE_UPPER:
case ABSOLUTE: {
return {
lower: `${lower} - ${duration}`,
upper: `${upper} - ${duration}`,
type,
}
}
case RELATIVE_LOWER: {
return {
lower: `${lower} - ${duration}`,
upper: `now() - ${duration}`,
type,
}
}
default: {
return {lower, upper, type: 'unshifted'}
}
}
}
const getMomentUnit = unit => {
switch (unit) {
case 'ms': {
return 'milliseconds' // (1 thousandth of a second)
}
case 's': {
return 'seconds'
}
case 'm': {
return 'minute'
}
case 'h': {
return 'hour'
}
case 'd': {
return 'day'
}
case 'w': {
return 'week'
}
default: {
return unit
}
}
}
export const shiftDate = (date, quantity, unit) => {
if (!date && !quantity && !unit) {
return moment(date)
}
return moment(date).add(quantity, getMomentUnit(unit))
}

View File

@ -138,7 +138,7 @@ $graph-gutter: 16px;
font-size: 20px;
font-weight: 400;
margin: 0;
text-align: left;
text-align: center;
color: $g8-storm;
white-space: pre-wrap;
}

View File

@ -1,28 +1,46 @@
import {buildQuery} from 'utils/influxql'
import {TYPE_QUERY_CONFIG} from 'src/dashboards/constants'
import {TYPE_QUERY_CONFIG, TYPE_SHIFTED} from 'src/dashboards/constants'
const buildQueries = (proxy, queryConfigs, timeRange) => {
const buildQueries = (proxy, queryConfigs, tR) => {
const statements = queryConfigs.map(query => {
const text =
query.rawText ||
buildQuery(TYPE_QUERY_CONFIG, query.range || timeRange, query)
return {text, id: query.id, queryConfig: query}
})
const {rawText, range, id, shifts, database, measurement, fields} = query
const timeRange = range || tR
const text = rawText || buildQuery(TYPE_QUERY_CONFIG, timeRange, query)
const isParsable = database && measurement && fields.length
const queries = statements.filter(s => s.text !== null).map(s => {
let queryProxy = ''
if (s.queryConfig.source) {
queryProxy = `${s.queryConfig.source.links.proxy}`
if (shifts && shifts.length && isParsable) {
const shiftedQueries = shifts
.filter(s => s.unit)
.map(s => buildQuery(TYPE_SHIFTED, timeRange, query, s))
return {
text: `${text};${shiftedQueries.join(';')}`,
id,
queryConfig: query,
}
}
return {
host: [queryProxy || proxy],
text: s.text,
id: s.id,
queryConfig: s.queryConfig,
}
return {text, id, queryConfig: query}
})
const queries = statements
.filter(s => s.text !== null)
.map(({queryConfig, text, id}) => {
let queryProxy = ''
if (queryConfig.source) {
queryProxy = `${queryConfig.source.links.proxy}`
}
const host = [queryProxy || proxy]
return {
host,
text,
id,
queryConfig,
}
})
return queries
}

View File

@ -0,0 +1,72 @@
import {buildQuery} from 'utils/influxql'
import {TYPE_SHIFTED, TYPE_QUERY_CONFIG} from 'src/dashboards/constants'
import timeRanges from 'hson!shared/data/timeRanges.hson'
const buildCannedDashboardQuery = (query, {lower, upper}, host) => {
const {defaultGroupBy} = timeRanges.find(range => range.lower === lower) || {
defaultGroupBy: '5m',
}
const {wheres, groupbys} = query
let text = query.text
if (upper) {
text += ` where time > '${lower}' AND time < '${upper}'`
} else {
text += ` where time > ${lower}`
}
if (host) {
text += ` and \"host\" = '${host}'`
}
if (wheres && wheres.length > 0) {
text += ` and ${wheres.join(' and ')}`
}
if (groupbys) {
if (groupbys.find(g => g.includes('time'))) {
text += ` group by ${groupbys.join(',')}`
} else if (groupbys.length > 0) {
text += ` group by time(${defaultGroupBy}),${groupbys.join(',')}`
} else {
text += ` group by time(${defaultGroupBy})`
}
} else {
text += ` group by time(${defaultGroupBy})`
}
return text
}
export const buildQueriesForLayouts = (cell, source, timeRange, host) => {
return cell.queries.map(query => {
let queryText
// Canned dashboards use an different a schema different from queryConfig.
if (query.queryConfig) {
const {
queryConfig: {database, measurement, fields, shifts, rawText, range},
} = query
const tR = range || {
upper: ':upperDashboardTime:',
lower: ':dashboardTime:',
}
queryText =
rawText || buildQuery(TYPE_QUERY_CONFIG, tR, query.queryConfig)
const isParsable = database && measurement && fields.length
if (shifts && shifts.length && isParsable) {
const shiftedQueries = shifts
.filter(s => s.unit)
.map(s => buildQuery(TYPE_SHIFTED, timeRange, query.queryConfig, s))
queryText = `${queryText};${shiftedQueries.join(';')}`
}
} else {
queryText = buildCannedDashboardQuery(query, timeRange, host)
}
return {...query, host: source.links.proxy, text: queryText}
})
}

View File

@ -15,6 +15,7 @@ const defaultQueryConfig = ({id, isKapacitorRule = false}) => {
areTagsAccepted: true,
rawText: null,
status: null,
shifts: [],
}
return isKapacitorRule ? queryConfig : {...queryConfig, fill: NULL_STRING}

View File

@ -2,8 +2,12 @@ import _ from 'lodash'
import {TEMP_VAR_INTERVAL, AUTO_GROUP_BY} from 'shared/constants'
import {NULL_STRING} from 'shared/constants/queryFillOptions'
import {TYPE_QUERY_CONFIG, TYPE_IFQL} from 'src/dashboards/constants'
import timeRanges from 'hson!shared/data/timeRanges.hson'
import {
TYPE_QUERY_CONFIG,
TYPE_SHIFTED,
TYPE_IFQL,
} from 'src/dashboards/constants'
import {shiftTimeRange} from 'shared/query/helpers'
/* eslint-disable quotes */
export const quoteIfTimestamp = ({lower, upper}) => {
@ -19,11 +23,11 @@ export const quoteIfTimestamp = ({lower, upper}) => {
}
/* eslint-enable quotes */
export default function buildInfluxQLQuery(timeRange, config) {
export default function buildInfluxQLQuery(timeRange, config, shift) {
const {groupBy, fill = NULL_STRING, tags, areTagsAccepted} = config
const {upper, lower} = quoteIfTimestamp(timeRange)
const select = _buildSelect(config)
const select = _buildSelect(config, shift)
if (select === null) {
return null
}
@ -35,26 +39,35 @@ export default function buildInfluxQLQuery(timeRange, config) {
return `${select}${condition}${dimensions}${fillClause}`
}
function _buildSelect({fields, database, retentionPolicy, measurement}) {
function _buildSelect({fields, database, retentionPolicy, measurement}, shift) {
if (!database || !measurement || !fields || !fields.length) {
return null
}
const rpSegment = retentionPolicy ? `"${retentionPolicy}"` : ''
const fieldsClause = _buildFields(fields)
const fieldsClause = _buildFields(fields, shift)
const fullyQualifiedMeasurement = `"${database}".${rpSegment}."${measurement}"`
const statement = `SELECT ${fieldsClause} FROM ${fullyQualifiedMeasurement}`
return statement
}
// type arg will reason about new query types i.e. IFQL, GraphQL, or queryConfig
export const buildQuery = (type, timeRange, config) => {
export const buildQuery = (type, timeRange, config, shift) => {
switch (type) {
case `${TYPE_QUERY_CONFIG}`: {
case TYPE_QUERY_CONFIG: {
return buildInfluxQLQuery(timeRange, config)
}
case `${TYPE_IFQL}`: {
case TYPE_SHIFTED: {
const {quantity, unit} = shift
return buildInfluxQLQuery(
shiftTimeRange(timeRange, shift),
config,
`_shifted__${quantity}__${unit}`
)
}
case TYPE_IFQL: {
// build query usining IFQL here
}
}
@ -66,7 +79,7 @@ export function buildSelectStatement(config) {
return _buildSelect(config)
}
function _buildFields(fieldFuncs) {
function _buildFields(fieldFuncs, shift = '') {
if (!fieldFuncs) {
return ''
}
@ -77,9 +90,21 @@ function _buildFields(fieldFuncs) {
case 'field': {
return f.value === '*' ? '*' : `"${f.value}"`
}
case 'wildcard': {
return '*'
}
case 'regex': {
return `/${f.value}/`
}
case 'number': {
return `${f.value}`
}
case 'integer': {
return `${f.value}`
}
case 'func': {
const args = _buildFields(f.args)
const alias = f.alias ? ` AS "${f.alias}"` : ''
const alias = f.alias ? ` AS "${f.alias}${shift}"` : ''
return `${f.value}(${args})${alias}`
}
}
@ -155,61 +180,5 @@ function _buildFill(fill) {
return ` FILL(${fill})`
}
const buildCannedDashboardQuery = (query, {lower, upper}, host) => {
const {defaultGroupBy} = timeRanges.find(range => range.lower === lower) || {
defaultGroupBy: '5m',
}
const {wheres, groupbys} = query
let text = query.text
if (upper) {
text += ` where time > '${lower}' AND time < '${upper}'`
} else {
text += ` where time > ${lower}`
}
if (host) {
text += ` and \"host\" = '${host}'`
}
if (wheres && wheres.length > 0) {
text += ` and ${wheres.join(' and ')}`
}
if (groupbys) {
if (groupbys.find(g => g.includes('time'))) {
text += ` group by ${groupbys.join(',')}`
} else if (groupbys.length > 0) {
text += ` group by time(${defaultGroupBy}),${groupbys.join(',')}`
} else {
text += ` group by time(${defaultGroupBy})`
}
} else {
text += ` group by time(${defaultGroupBy})`
}
return text
}
export const buildQueriesForLayouts = (cell, source, timeRange, host) => {
return cell.queries.map(query => {
let queryText
// Canned dashboards use an different a schema different from queryConfig.
if (query.queryConfig) {
const {queryConfig: {rawText, range}} = query
const tR = range || {
upper: ':upperDashboardTime:',
lower: ':dashboardTime:',
}
queryText = rawText || buildInfluxQLQuery(tR, query.queryConfig)
} else {
queryText = buildCannedDashboardQuery(query, timeRange, host)
}
return {...query, host: source.links.proxy, text: queryText}
})
}
export const buildRawText = (q, timeRange) =>
q.rawText || buildInfluxQLQuery(timeRange, q) || ''

View File

@ -108,7 +108,7 @@ export const toggleField = (query, {value}) => {
}
}
export function groupByTime(query, time) {
export const groupByTime = (query, time) => {
return Object.assign({}, query, {
groupBy: Object.assign({}, query.groupBy, {
time,
@ -118,7 +118,7 @@ export function groupByTime(query, time) {
export const fill = (query, value) => ({...query, fill: value})
export function toggleTagAcceptance(query) {
export const toggleTagAcceptance = query => {
return Object.assign({}, query, {
areTagsAccepted: !query.areTagsAccepted,
})
@ -185,13 +185,13 @@ export const applyFuncsToField = (query, {field, funcs = []}, groupBy) => {
}
}
export function updateRawQuery(query, rawText) {
export const updateRawQuery = (query, rawText) => {
return Object.assign({}, query, {
rawText,
})
}
export function groupByTag(query, tagKey) {
export const groupByTag = (query, tagKey) => {
const oldTags = query.groupBy.tags
let newTags
@ -209,7 +209,7 @@ export function groupByTag(query, tagKey) {
})
}
export function chooseTag(query, tag) {
export const chooseTag = (query, tag) => {
const tagValues = query.tags[tag.key]
const shouldRemoveTag =
tagValues && tagValues.length === 1 && tagValues[0] === tag.value
@ -219,6 +219,14 @@ export function chooseTag(query, tag) {
return Object.assign({}, query, {tags: newTags})
}
const updateTagValues = newTagValues => {
return Object.assign({}, query, {
tags: Object.assign({}, query.tags, {
[tag.key]: newTagValues,
}),
})
}
const oldTagValues = query.tags[tag.key]
if (!oldTagValues) {
return updateTagValues([tag.value])
@ -233,12 +241,6 @@ export function chooseTag(query, tag) {
}
return updateTagValues(query.tags[tag.key].concat(tag.value))
function updateTagValues(newTagValues) {
return Object.assign({}, query, {
tags: Object.assign({}, query.tags, {
[tag.key]: newTagValues,
}),
})
}
}
export const timeShift = (query, shift) => ({...query, shifts: [shift]})

View File

@ -1,4 +1,5 @@
import _ from 'lodash'
import {shiftDate} from 'shared/query/helpers'
import {map, reduce, forEach, concat, clone} from 'fast.js'
/**
@ -15,12 +16,7 @@ const cells = {
responseIndex: new Array(DEFAULT_SIZE),
}
// activeQueryIndex is an optional argument that indicated which query's series we want highlighted.
export default function timeSeriesToDygraph(
raw = [],
activeQueryIndex,
isInDataExplorer
) {
export default function timeSeriesToDygraph(raw = [], isInDataExplorer) {
// collect results from each influx response
const results = reduce(
raw,
@ -115,11 +111,16 @@ export default function timeSeriesToDygraph(
const timeSeries = []
for (let i = 0; i < size; i++) {
const time = cells.time[i]
let time = cells.time[i]
const value = cells.value[i]
const label = cells.label[i]
const seriesIndex = cells.seriesIndex[i]
if (label.includes('_shifted__')) {
const [, quantity, duration] = label.split('__')
time = +shiftDate(time, quantity, duration).format('x')
}
let existingRowIndex = tsMemo[time]
if (existingRowIndex === undefined) {