Merge branch 'multiple-event-handlers' of github.com:influxdata/chronograf into multiple-event-handlers

pull/10616/head
deniz kusefoglu 2017-11-21 13:04:01 -08:00
commit cb55dadb9c
22 changed files with 870 additions and 545 deletions

10
Gopkg.lock generated
View File

@ -65,13 +65,13 @@
[[projects]]
name = "github.com/influxdata/influxdb"
packages = ["influxql","influxql/internal","influxql/neldermead","models","pkg/escape"]
revision = "af72d9b0e4ebe95be30e89b160f43eabaf0529ed"
revision = "cd9363b52cac452113b95554d98a6be51beda24e"
version = "v1.1.5"
[[projects]]
name = "github.com/influxdata/kapacitor"
packages = ["client/v1","pipeline","services/k8s/client","tick","tick/ast","tick/stateful","udf/agent"]
revision = "3b5512f7276483326577907803167e4bb213c613"
version = "v1.3.1"
packages = ["client/v1","pipeline","pipeline/tick","services/k8s/client","tick","tick/ast","tick/stateful","udf/agent"]
revision = "291ca33f5d7b8b277cbb9a7afb65397d1769a99e"
[[projects]]
name = "github.com/influxdata/usage-client"
@ -140,6 +140,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "85a5451fc9e0596e486a676204eb2de0b12900522341ee0804cf9ec86fb2765e"
inputs-digest = "46184c2d3fedb48dad6649bb1a97237bc5eef1f48ee1f4b69373e99783a2a47f"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -32,14 +32,6 @@ required = ["github.com/jteeuwen/go-bindata","github.com/gogo/protobuf/proto","g
name = "github.com/google/go-github"
revision = "1bc362c7737e51014af7299e016444b654095ad9"
[[constraint]]
name = "github.com/influxdata/influxdb"
revision = "af72d9b0e4ebe95be30e89b160f43eabaf0529ed"
[[constraint]]
name = "github.com/influxdata/kapacitor"
version = "^1.2.0"
[[constraint]]
name = "github.com/influxdata/usage-client"
revision = "6d3895376368aa52a3a81d2a16e90f0f52371967"
@ -75,3 +67,12 @@ required = ["github.com/jteeuwen/go-bindata","github.com/gogo/protobuf/proto","g
[[constraint]]
name = "google.golang.org/api"
revision = "bc20c61134e1d25265dd60049f5735381e79b631"
[[constraint]]
name = "github.com/influxdata/influxdb"
version = "~1.1.0"
[[constraint]]
name = "github.com/influxdata/kapacitor"
revision = "291ca33f5d7b8b277cbb9a7afb65397d1769a99e"

View File

@ -23,42 +23,23 @@ ${BINARY}: $(SOURCES) .bindata .jsdep .godep
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
define CHRONOGIRAFFE
tLf iCf.
.CCC. tCC:
CGG; CGG:
tG0Gt: GGGGGGGGGGGGGGGG1 .,:,
LG1,,:1CC: .GGL;iLC1iii1LCi;GG1 .1GCL1iGG1
LG1:::;i1CGGt;;;;;;L0t;;;;;;GGGC1;;::,iGC
,ii:. 1GG1iiii;;tfiC;;;;;;;GGCfCGCGGC,
fGCiiiiGi1Lt;;iCLL,i;;;CGt
fGG11iiii1C1iiiiiGt1;;;;;CGf
.GGLLL1i1CitfiiL1iCi;;iLCGGt
.CGL11LGCCCCCCCLLCGG1;1GG;
CGL1tf1111iiiiiiL1ifGG,
LGCff1fCt1tCfiiCiCGC
LGGf111111111iCGGt
fGGGGGGGGGGGGGGi
ifii111111itL
;f1i11111iitf
;f1iiiiiii1tf
:fi111iii11tf
:fi111ii1i1tf
:f111111ii1tt
,L111111ii1tt
.Li1111i1111CCCCCCCCCCCCCCLt;
L111ii11111ittttt1tttttittti1fC;
f1111ii111i1ttttt1;iii1ittt1ttttCt.
tt11ii111tti1ttt1tt1;11;;;;iitttifCCCL,
11i1i11ttttti;1t1;;;ttt1;;ii;itti;L,;CCL
;f;;;;1tttti;;ttti;;;;;;;;;;;1tt1ifi .CCi
,L;itti;;;it;;;;;tt1;;;t1;;;;;;ii;t; :CC,
L;;;;iti;;;;;;;;;;;;;;;;;;;;;;;i;L, ;CC.
ti;;;iLLfffi;;;;;ittt11i;;;;;;;;;L tCCfff;
it;;;;;;L,ti;;;;;1Ltttft1t;;;;;;1t ;CCCL;
:f;;;;;;L.ti;;;;;tftttf1,f;;;;;;f: ;CC1:
.L;;;;;;L.t1;;;;;tt111fi,f;;;;;;L.
1Li;;iL1 :Ci;;;tL1i1fC, Lt;;;;Li
.;tt; ifLt:;fLf; ;LCCt,
.-. .-.
| \/ |
/, ,_ `'-.
.-|\ /`\ '.
.' 0/ | 0\ \_ `".
.-' _,/ '--'.'|#''---'
`--' | / \#
| / \#
\ ;|\ .\#
|' ' // \ ::\#
\ /` \ ':\#
`"` \.. \#
\::. \#
\:: \#
\' .:\#
\ :::\#
\ '::\#
endef
export CHRONOGIRAFFE
chronogiraffe: ${BINARY}

View File

@ -330,9 +330,9 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
templates := make([]chronograf.Template, len(pb.Templates))
for i, t := range pb.Templates {
vals := make([]chronograf.BasicTemplateValue, len(t.Values))
vals := make([]chronograf.TemplateValue, len(t.Values))
for j, v := range t.Values {
vals[j] = chronograf.BasicTemplateValue{
vals[j] = chronograf.TemplateValue{
Selected: v.Selected,
Type: v.Type,
Value: v.Value,
@ -341,7 +341,7 @@ func UnmarshalDashboard(data []byte, d *chronograf.Dashboard) error {
template := chronograf.Template{
ID: chronograf.TemplateID(t.ID),
BasicTemplateVar: chronograf.BasicTemplateVar{
TemplateVar: chronograf.TemplateVar{
Var: t.TempVar,
Values: vals,
},
@ -434,8 +434,5 @@ func UnmarshalUser(data []byte, u *chronograf.User) error {
// UnmarshalUserPB decodes a user from binary protobuf data.
// We are ignoring the password for now.
func UnmarshalUserPB(data []byte, u *User) error {
if err := proto.Unmarshal(data, u); err != nil {
return err
}
return nil
return proto.Unmarshal(data, u)
}

View File

@ -1,21 +1,10 @@
package chronograf
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"regexp"
"strconv"
"strings"
"time"
"unicode"
"unicode/utf8"
"github.com/influxdata/influxdb/influxql"
)
// General errors.
@ -136,196 +125,17 @@ type Range struct {
Lower int64 `json:"lower"` // Lower is the lower bound
}
type TemplateVariable interface {
fmt.Stringer
Name() string // returns the variable name
Precedence() uint // ordinal indicating precedence level for replacement
}
type ExecutableVar interface {
Exec(string)
}
// TemplateValue is a value use to replace a template in an InfluxQL query
type BasicTemplateValue struct {
type TemplateValue struct {
Value string `json:"value"` // Value is the specific value used to replace a template in an InfluxQL query
Type string `json:"type"` // Type can be tagKey, tagValue, fieldKey, csv, measurement, database, constant
Selected bool `json:"selected"` // Selected states that this variable has been picked to use for replacement
}
// TemplateVar is a named variable within an InfluxQL query to be replaced with Values
type BasicTemplateVar struct {
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
Values []BasicTemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
func (t BasicTemplateVar) Name() string {
return t.Var
}
// String converts the template variable into a correct InfluxQL string based
// on its type
func (t BasicTemplateVar) String() string {
if len(t.Values) == 0 {
return ""
}
switch t.Values[0].Type {
case "tagKey", "fieldKey", "measurement", "database":
return `"` + t.Values[0].Value + `"`
case "tagValue", "timeStamp":
return `'` + t.Values[0].Value + `'`
case "csv", "constant":
return t.Values[0].Value
default:
return ""
}
}
func (t BasicTemplateVar) Precedence() uint {
return 0
}
type GroupByVar struct {
Var string `json:"tempVar"` // the name of the variable as present in the query
Duration time.Duration `json:"duration,omitempty"` // the Duration supplied by the query
Resolution uint `json:"resolution"` // the available screen resolution to render the results of this query
ReportingInterval time.Duration `json:"reportingInterval,omitempty"` // the interval at which data is reported to this series
}
// Exec is responsible for extracting the Duration from the query
func (g *GroupByVar) Exec(query string) {
whereClause := "WHERE"
start := strings.Index(query, whereClause)
if start == -1 {
// no where clause
return
}
// reposition start to after the 'where' keyword
durStr := query[start+len(whereClause):]
// attempt to parse out a relative time range
// locate duration literal start
prefix := "time > now() - "
lowerDuration, err := g.parseRelative(durStr, prefix)
if err == nil {
prefix := "time < now() - "
upperDuration, err := g.parseRelative(durStr, prefix)
if err != nil {
g.Duration = lowerDuration
return
}
g.Duration = lowerDuration - upperDuration
if g.Duration < 0 {
g.Duration = -g.Duration
}
}
dur, err := g.parseAbsolute(durStr)
if err == nil {
// we found an absolute time range
g.Duration = dur
}
}
// parseRelative locates and extracts a duration value from a fragment of an
// InfluxQL query following the "where" keyword. For example, in the fragment
// "time > now() - 180d GROUP BY :interval:", parseRelative would return a
// duration equal to 180d
func (g *GroupByVar) parseRelative(fragment string, prefix string) (time.Duration, error) {
start := strings.Index(fragment, prefix)
if start == -1 {
return time.Duration(0), errors.New("not a relative duration")
}
// reposition to duration literal
durFragment := fragment[start+len(prefix):]
// init counters
pos := 0
// locate end of duration literal
for pos < len(durFragment) {
rn, _ := utf8.DecodeRuneInString(durFragment[pos:])
if unicode.IsSpace(rn) {
break
}
pos++
}
// attempt to parse what we suspect is a duration literal
dur, err := influxql.ParseDuration(durFragment[:pos])
if err != nil {
return dur, err
}
return dur, nil
}
// parseAbsolute will determine the duration between two absolute timestamps
// found within an InfluxQL fragment following the "where" keyword. For
// example, the fragement "time > '1985-10-25T00:01:21-0800 and time <
// '1985-10-25T00:01:22-0800'" would yield a duration of 1m'
func (g *GroupByVar) parseAbsolute(fragment string) (time.Duration, error) {
timePtn := `time\s[>|<]\s'([0-9\-T\:\.Z]+)'` // Playground: http://gobular.com/x/208f66bd-1889-4269-ab47-1efdfeeb63f0
re, err := regexp.Compile(timePtn)
if err != nil {
// this is a developer error and should complain loudly
panic("Bad Regex: err:" + err.Error())
}
if !re.Match([]byte(fragment)) {
return time.Duration(0), errors.New("absolute duration not found")
}
// extract at most two times
matches := re.FindAll([]byte(fragment), 2)
// parse out absolute times
durs := make([]time.Time, 0, 2)
for _, match := range matches {
durStr := re.FindSubmatch(match)
if tm, err := time.Parse(time.RFC3339Nano, string(durStr[1])); err == nil {
durs = append(durs, tm)
}
}
if len(durs) == 1 {
durs = append(durs, time.Now())
}
// reject more than 2 times found
if len(durs) != 2 {
return time.Duration(0), errors.New("must provide exactly two absolute times")
}
dur := durs[1].Sub(durs[0])
return dur, nil
}
func (g *GroupByVar) String() string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(g.Resolution) / 3.0
msPerPixel := float64(g.Duration/time.Millisecond) / pixels
secPerPixel := float64(g.Duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
}
func (g *GroupByVar) Name() string {
return g.Var
}
func (g *GroupByVar) Precedence() uint {
return 1
type TemplateVar struct {
Var string `json:"tempVar"` // Var is the string to replace within InfluxQL
Values []TemplateValue `json:"values"` // Values are the replacement values within InfluxQL
}
// TemplateID is the unique ID used to identify a template
@ -333,7 +143,7 @@ type TemplateID string
// Template represents a series of choices to replace TemplateVars within InfluxQL
type Template struct {
BasicTemplateVar
TemplateVar
ID TemplateID `json:"id"` // ID is the unique ID associated with this template
Type string `json:"type"` // Type can be fieldKeys, tagKeys, tagValues, CSV, constant, query, measurements, databases
Label string `json:"label"` // Label is a user-facing description of the Template
@ -342,69 +152,15 @@ type Template struct {
// Query retrieves a Response from a TimeSeries.
type Query struct {
Command string `json:"query"` // Command is the query itself
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
TemplateVars TemplateVars `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// TemplateVars are a heterogeneous collection of different TemplateVariables
// with the capability to decode arbitrary JSON into the appropriate template
// variable type
type TemplateVars []TemplateVariable
func (t *TemplateVars) UnmarshalJSON(text []byte) error {
// TODO: Need to test that server throws an error when :interval:'s Resolution or ReportingInterval or zero-value
rawVars := bytes.NewReader(text)
dec := json.NewDecoder(rawVars)
// read open bracket
rawTok, err := dec.Token()
if err != nil {
return err
}
tok, isDelim := rawTok.(json.Delim)
if !isDelim || tok != '[' {
return errors.New("Expected JSON array, but found " + tok.String())
}
for dec.More() {
var halfBakedVar json.RawMessage
err := dec.Decode(&halfBakedVar)
if err != nil {
return err
}
var agb GroupByVar
err = json.Unmarshal(halfBakedVar, &agb)
if err != nil {
return err
}
// ensure that we really have a GroupByVar
if agb.Resolution != 0 {
(*t) = append(*t, &agb)
continue
}
var tvar BasicTemplateVar
err = json.Unmarshal(halfBakedVar, &tvar)
if err != nil {
return err
}
// ensure that we really have a BasicTemplateVar
if len(tvar.Values) != 0 {
(*t) = append(*t, tvar)
}
}
return nil
Command string `json:"query"` // Command is the query itself
DB string `json:"db,omitempty"` // DB is optional and if empty will not be used.
RP string `json:"rp,omitempty"` // RP is a retention policy and optional; if empty will not be used.
TemplateVars []TemplateVar `json:"tempVars,omitempty"` // TemplateVars are template variables to replace within an InfluxQL query
Wheres []string `json:"wheres,omitempty"` // Wheres restricts the query to certain attributes
GroupBys []string `json:"groupbys,omitempty"` // GroupBys collate the query by these tags
Resolution uint `json:"resolution,omitempty"` // Resolution is the available screen resolution to render query results
Label string `json:"label,omitempty"` // Label is the Y-Axis label for the data
Range *Range `json:"range,omitempty"` // Range is the default Y-Axis range for the data
}
// DashboardQuery includes state for the query builder. This is a transition

View File

@ -1,63 +0,0 @@
package chronograf_test
import (
"testing"
"github.com/influxdata/chronograf"
)
func Test_GroupByVar(t *testing.T) {
gbvTests := []struct {
name string
query string
want string
resolution uint // the screen resolution to render queries into
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
want: "time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "time(180ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
want: "time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "time(180ms)",
},
{
name: "absolute time with nano seconds and zero duraiton",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
want: "time(1ms)",
},
}
for _, test := range gbvTests {
t.Run(test.name, func(t *testing.T) {
gbv := chronograf.GroupByVar{
Var: ":interval:",
Resolution: test.resolution,
}
gbv.Exec(test.query)
got := gbv.String()
if got != test.want {
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", test.name, test.want, got)
}
})
}
}

View File

@ -9,6 +9,7 @@ import (
"net/http"
"net/url"
"strings"
"time"
"github.com/influxdata/chronograf"
)
@ -55,7 +56,10 @@ func (c *Client) query(u *url.URL, q chronograf.Query) (chronograf.Response, err
command := q.Command
// TODO(timraymond): move this upper Query() function
if len(q.TemplateVars) > 0 {
command = TemplateReplace(q.Command, q.TemplateVars)
command, err = TemplateReplace(q.Command, q.TemplateVars, time.Now())
if err != nil {
return nil, err
}
}
logs := c.Logger.
WithField("component", "proxy").

View File

@ -276,11 +276,11 @@ func Test_Influx_HTTPS_InsecureSkipVerify(t *testing.T) {
called = false
q = ""
query = chronograf.Query{
Command: "select $field from cpu",
TemplateVars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Command: "select :field: from cpu",
TemplateVars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: ":field:",
Values: []chronograf.TemplateValue{
{
Value: "usage_user",
Type: "fieldKey",

View File

@ -10,6 +10,52 @@ import (
"github.com/influxdata/influxdb/influxql"
)
func TimeRangeAsEpochNano(expr influxql.Expr, now time.Time) (min, max int64, err error) {
tmin, tmax, err := influxql.TimeRange(expr)
if err != nil {
return 0, 0, err
}
if tmin.IsZero() {
min = time.Unix(0, influxql.MinTime).UnixNano()
} else {
min = tmin.UnixNano()
}
if tmax.IsZero() {
max = now.UnixNano()
} else {
max = tmax.UnixNano()
}
return
}
const WhereToken = "WHERE"
func ParseTime(influxQL string, now time.Time) (time.Duration, error) {
start := strings.Index(strings.ToUpper(influxQL), WhereToken)
if start == -1 {
return 0, fmt.Errorf("not a relative duration")
}
start += len(WhereToken)
where := influxQL[start:]
cond, err := influxql.ParseExpr(where)
if err != nil {
return 0, err
}
nowVal := &influxql.NowValuer{
Now: now,
}
cond = influxql.Reduce(cond, nowVal)
min, max, err := TimeRangeAsEpochNano(cond, now)
if err != nil {
return 0, err
}
dur := time.Duration(max - min)
if dur < 0 {
dur = 0
}
return dur, nil
}
// Convert changes an InfluxQL query to a QueryConfig
func Convert(influxQL string) (chronograf.QueryConfig, error) {
itsDashboardTime := false

View File

@ -2,6 +2,7 @@ package influx
import (
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/influxdata/chronograf"
@ -767,3 +768,43 @@ func TestConvert(t *testing.T) {
})
}
}
func TestParseTime(t *testing.T) {
tests := []struct {
name string
influxQL string
now string
want time.Duration
wantErr bool
}{
{
name: "time equal",
now: "2000-01-01T00:00:00Z",
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
want: 0,
},
{
name: "time shifted by one hour",
now: "2000-01-01T00:00:00Z",
influxQL: `SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h and time < now() - 1h GROUP BY :interval: FILL(null);`,
want: 3599999999998,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
now, err := time.Parse(time.RFC3339, tt.now)
if err != nil {
t.Fatalf("%v", err)
}
got, err := ParseTime(tt.influxQL, now)
if (err != nil) != tt.wantErr {
t.Errorf("ParseTime() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Logf("%d", got)
t.Errorf("ParseTime() = %v, want %v", got, tt.want)
}
})
}
}

View File

@ -1,40 +1,106 @@
package influx
import (
"sort"
"strconv"
"strings"
"time"
"github.com/influxdata/chronograf"
)
// TemplateReplace replaces templates with values within the query string
func TemplateReplace(query string, templates chronograf.TemplateVars) string {
tvarsByPrecedence := make(map[uint]chronograf.TemplateVars, len(templates))
maxPrecedence := uint(0)
for _, tmp := range templates {
precedence := tmp.Precedence()
if precedence > maxPrecedence {
maxPrecedence = precedence
}
tvarsByPrecedence[precedence] = append(tvarsByPrecedence[precedence], tmp)
}
replaced := query
for prc := uint(0); prc <= maxPrecedence; prc++ {
replacements := []string{}
for _, v := range tvarsByPrecedence[prc] {
if evar, ok := v.(chronograf.ExecutableVar); ok {
evar.Exec(replaced)
}
newVal := v.String()
if newVal != "" {
replacements = append(replacements, v.Name(), newVal)
}
func SortTemplates(ts []chronograf.TemplateVar) []chronograf.TemplateVar {
sort.Slice(ts, func(i, j int) bool {
if len(ts[i].Values) != len(ts[j].Values) {
return len(ts[i].Values) < len(ts[j].Values)
}
replacer := strings.NewReplacer(replacements...)
replaced = replacer.Replace(replaced)
}
if len(ts[i].Values) == 0 {
return i < j
}
return replaced
for k := range ts[i].Values {
if ts[i].Values[k].Type != ts[j].Values[k].Type {
return ts[i].Values[k].Type < ts[j].Values[k].Type
}
if ts[i].Values[k].Value != ts[j].Values[k].Value {
return ts[i].Values[k].Value < ts[j].Values[k].Value
}
}
return i < j
})
return ts
}
// RenderTemplate converts the template variable into a correct InfluxQL string based
// on its type
func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (string, error) {
if len(t.Values) == 0 {
return query, nil
}
switch t.Values[0].Type {
case "tagKey", "fieldKey", "measurement", "database":
return strings.Replace(query, t.Var, `"`+t.Values[0].Value+`"`, -1), nil
case "tagValue", "timeStamp":
return strings.Replace(query, t.Var, `'`+t.Values[0].Value+`'`, -1), nil
case "csv", "constant":
return strings.Replace(query, t.Var, t.Values[0].Value, -1), nil
}
tv := map[string]string{}
for i := range t.Values {
tv[t.Values[i].Type] = t.Values[i].Value
}
if res, ok := tv["resolution"]; ok {
resolution, err := strconv.ParseInt(res, 0, 64)
if err != nil {
return "", err
}
ppp, ok := tv["pointsPerPixel"]
if !ok {
ppp = "3"
}
pixelsPerPoint, err := strconv.ParseInt(ppp, 0, 64)
if err != nil {
return "", err
}
dur, err := ParseTime(query, now)
if err != nil {
return "", err
}
interval := AutoGroupBy(resolution, pixelsPerPoint, dur)
return strings.Replace(query, t.Var, interval, -1), nil
}
return query, nil
}
func AutoGroupBy(resolution, pixelsPerPoint int64, duration time.Duration) string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(resolution) / float64(pixelsPerPoint)
msPerPixel := float64(duration/time.Millisecond) / pixels
secPerPixel := float64(duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
}
// TemplateReplace replaces templates with values within the query string
func TemplateReplace(query string, templates []chronograf.TemplateVar, now time.Time) (string, error) {
templates = SortTemplates(templates)
for i := range templates {
var err error
query, err = RenderTemplate(query, templates[i], now)
if err != nil {
return "", err
}
}
return query, nil
}

View File

@ -2,6 +2,7 @@ package influx
import (
"encoding/json"
"fmt"
"reflect"
"testing"
"time"
@ -13,43 +14,43 @@ func TestTemplateReplace(t *testing.T) {
tests := []struct {
name string
query string
vars chronograf.TemplateVars
vars []chronograf.TemplateVar
want string
}{
{
name: "select with parameters",
query: "$METHOD field1, $field FROM $measurement WHERE temperature > $temperature",
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
Var: "$temperature",
Values: []chronograf.BasicTemplateValue{
query: ":method: field1, :field: FROM :measurement: WHERE temperature > :temperature:",
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: ":temperature:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "10",
},
},
},
chronograf.BasicTemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":field:",
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field2",
},
},
},
chronograf.BasicTemplateVar{
Var: "$METHOD",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":method:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: "SELECT",
},
},
},
chronograf.BasicTemplateVar{
Var: "$measurement",
Values: []chronograf.BasicTemplateValue{
chronograf.TemplateVar{
Var: ":measurement:",
Values: []chronograf.TemplateValue{
{
Type: "csv",
Value: `"cpu"`,
@ -62,28 +63,28 @@ func TestTemplateReplace(t *testing.T) {
{
name: "select with parameters and aggregates",
query: `SELECT mean($field) FROM "cpu" WHERE $tag = $value GROUP BY $tag`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$value",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "tagValue",
Value: "howdy.com",
},
},
},
chronograf.BasicTemplateVar{
chronograf.TemplateVar{
Var: "$tag",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "tagKey",
Value: "host",
},
},
},
chronograf.BasicTemplateVar{
chronograf.TemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
Value: "field",
@ -101,8 +102,8 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var without a value",
query: `SELECT $field FROM "cpu"`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$field",
},
},
@ -111,10 +112,10 @@ func TestTemplateReplace(t *testing.T) {
{
name: "var with unknown type",
query: `SELECT $field FROM "cpu"`,
vars: chronograf.TemplateVars{
chronograf.BasicTemplateVar{
vars: []chronograf.TemplateVar{
chronograf.TemplateVar{
Var: "$field",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "who knows?",
Value: "field",
@ -127,42 +128,63 @@ func TestTemplateReplace(t *testing.T) {
{
name: "auto group by",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 180 * 24 * time.Hour,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by without duration",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 0 * time.Minute,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by with :dashboardTime:",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by :interval:`,
vars: chronograf.TemplateVars{
&chronograf.GroupByVar{
Var: ":interval:",
Duration: 0 * time.Minute,
Resolution: 1000,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
&chronograf.BasicTemplateVar{
{
Var: ":dashboardTime:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Type: "constant",
Value: "now() - 4320h",
@ -170,20 +192,28 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46656s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
},
{
name: "auto group by failing condition",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
vars: []chronograf.TemplateVariable{
&chronograf.GroupByVar{
Var: ":interval:",
Resolution: 115,
ReportingInterval: 10 * time.Second,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "115",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
},
},
chronograf.BasicTemplateVar{
{
Var: ":dashboardTime:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Value: "now() - 1h",
Type: "constant",
@ -197,7 +227,14 @@ func TestTemplateReplace(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := TemplateReplace(tt.query, tt.vars)
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
if err != nil {
t.Fatal(err)
}
got, err := TemplateReplace(tt.query, tt.vars, now)
if err != nil {
t.Fatalf("TestParse unexpected TemplateReplace error: %v", err)
}
if got != tt.want {
t.Errorf("TestParse %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
@ -209,8 +246,20 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
req := `[
{
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
},
{
"value": "10",
"type": "reportingInterval"
}
]
},
{
"tempVar": ":cpu:",
@ -224,15 +273,27 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
}
]`
expected := []chronograf.TemplateVariable{
&chronograf.GroupByVar{
Var: ":interval:",
Resolution: 1000,
ReportingInterval: 10 * time.Nanosecond,
want := []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
},
{
Value: "10",
Type: "reportingInterval",
},
},
},
chronograf.BasicTemplateVar{
{
Var: ":cpu:",
Values: []chronograf.BasicTemplateValue{
Values: []chronograf.TemplateValue{
{
Value: "cpu-total",
Type: "tagValue",
@ -242,65 +303,128 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
},
}
var tvars chronograf.TemplateVars
err := json.Unmarshal([]byte(req), &tvars)
var got []chronograf.TemplateVar
err := json.Unmarshal([]byte(req), &got)
if err != nil {
t.Fatal("Err unmarshaling:", err)
}
if len(tvars) != len(expected) {
t.Fatal("Expected", len(expected), "vars but found", len(tvars))
}
if !reflect.DeepEqual(*(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar))) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", *(tvars[0].(*chronograf.GroupByVar)), *(expected[0].(*chronograf.GroupByVar)))
}
if !reflect.DeepEqual(tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar)) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", tvars[1].(chronograf.BasicTemplateVar), expected[1].(chronograf.BasicTemplateVar))
if !reflect.DeepEqual(got, want) {
t.Errorf("UnmarshalJSON() = \n%#v\n want \n%#v\n", got, want)
}
}
func TestGroupByVarString(t *testing.T) {
func TestAutoGroupBy(t *testing.T) {
tests := []struct {
name string
tvar *chronograf.GroupByVar
want string
name string
resolution int64
pixelsPerPoint int64
duration time.Duration
want string
}{
{
name: "String() calculates the GROUP BY interval",
tvar: &chronograf.GroupByVar{
Resolution: 700,
ReportingInterval: 10 * time.Second,
Duration: 24 * time.Hour,
},
want: "time(370s)",
name: "String() calculates the GROUP BY interval",
resolution: 700,
pixelsPerPoint: 3,
duration: 24 * time.Hour,
want: "time(370s)",
},
{
name: "String() milliseconds if less than one second intervals",
tvar: &chronograf.GroupByVar{
Resolution: 100000,
ReportingInterval: 10 * time.Second,
Duration: time.Hour,
},
want: "time(107ms)",
name: "String() milliseconds if less than one second intervals",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Hour,
want: "time(107ms)",
},
{
name: "String() milliseconds if less than one millisecond",
tvar: &chronograf.GroupByVar{
Resolution: 100000,
ReportingInterval: 10 * time.Second,
Duration: time.Second,
},
want: "time(1ms)",
name: "String() milliseconds if less than one millisecond",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Second,
want: "time(1ms)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.tvar.String()
got := AutoGroupBy(tt.resolution, tt.pixelsPerPoint, tt.duration)
if got != tt.want {
t.Errorf("TestGroupByVarString %s =\n%s\nwant\n%s", tt.name, got, tt.want)
t.Errorf("TestAutoGroupBy %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
})
}
}
func Test_RenderTemplate(t *testing.T) {
gbvTests := []struct {
name string
query string
want string
resolution uint // the screen resolution to render queries into
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(259s)",
},
{
name: "relative time offset by week",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(179ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(179ms)",
},
{
name: "absolute time with nano seconds and zero duraiton",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(1ms)",
},
}
for _, tt := range gbvTests {
t.Run(tt.name, func(t *testing.T) {
now, err := time.Parse(time.RFC3339, "1985-10-25T00:01:00Z")
if err != nil {
t.Fatal(err)
}
tvar := chronograf.TemplateVar{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: fmt.Sprintf("%d", tt.resolution),
Type: "resolution",
},
},
}
got, err := RenderTemplate(tt.query, tvar, now)
if err != nil {
t.Fatalf("unexpected error rendering template %v", err)
}
if got != tt.want {
t.Fatalf("%q - durations not equal! Want: %s, Got: %s", tt.name, tt.want, got)
}
})
}
}
// SELECT mean("numSeries") AS "mean_numSeries" FROM "_internal"."monitor"."database" WHERE time > now() - 1h GROUP BY :interval: FILL(null);SELECT mean("numSeries") AS "mean_numSeries_shifted__1__h" FROM "_internal"."monitor"."database" WHERE time > now() - 1h - 1h AND time < now() - 1h GROUP BY :interval: FILL(null)

View File

@ -131,7 +131,7 @@ func TestClient_All(t *testing.T) {
ID: "howdy",
Name: "howdy",
TICKScript: "",
Type: "unknown TaskType 0",
Type: "invalid",
Status: "enabled",
DBRPs: []chronograf.DBRP{},
},

37
kapacitor/pipeline.go Normal file
View File

@ -0,0 +1,37 @@
package kapacitor
import (
"bytes"
"encoding/json"
"github.com/influxdata/chronograf"
"github.com/influxdata/kapacitor/pipeline"
totick "github.com/influxdata/kapacitor/pipeline/tick"
)
// MarshalTICK converts tickscript to JSON representation
func MarshalTICK(script string) ([]byte, error) {
pipeline, err := newPipeline(chronograf.TICKScript(script))
if err != nil {
return nil, err
}
return json.MarshalIndent(pipeline, "", " ")
}
// UnmarshalTICK converts JSON to tickscript
func UnmarshalTICK(octets []byte) (string, error) {
pipe := &pipeline.Pipeline{}
if err := pipe.Unmarshal(octets); err != nil {
return "", err
}
ast := totick.AST{}
err := ast.Build(pipe)
if err != nil {
return "", err
}
var buf bytes.Buffer
ast.Program.Format(&buf, "", false)
return buf.String(), nil
}

288
kapacitor/pipeline_test.go Normal file
View File

@ -0,0 +1,288 @@
package kapacitor
import (
"fmt"
"testing"
"github.com/sergi/go-diff/diffmatchpatch"
)
func TestPipelineJSON(t *testing.T) {
script := `var db = 'telegraf'
var rp = 'autogen'
var measurement = 'cpu'
var groupBy = ['host', 'cluster_id']
var whereFilter = lambda: ("cpu" == 'cpu_total') AND ("host" == 'acc-0eabc309-eu-west-1-data-3' OR "host" == 'prod')
var period = 10m
var every = 30s
var name = 'name'
var idVar = name + ':{{.Group}}'
var message = 'message'
var idTag = 'alertID'
var levelTag = 'level'
var messageField = 'message'
var durationField = 'duration'
var outputDB = 'chronograf'
var outputRP = 'autogen'
var outputMeasurement = 'alerts'
var triggerType = 'threshold'
var crit = 90
var data = stream
|from()
.database(db)
.retentionPolicy(rp)
.measurement(measurement)
.groupBy(groupBy)
.where(whereFilter)
|window()
.period(period)
.every(every)
.align()
|mean('usage_user')
.as('value')
var trigger = data
|alert()
.crit(lambda: "value" > crit)
.stateChangesOnly()
.message(message)
.id(idVar)
.idTag(idTag)
.levelTag(levelTag)
.messageField(messageField)
.durationField(durationField)
.slack()
.victorOps()
.email()
trigger
|influxDBOut()
.create()
.database(outputDB)
.retentionPolicy(outputRP)
.measurement(outputMeasurement)
.tag('alertName', name)
.tag('triggerType', triggerType)
trigger
|httpOut('output')
`
want := `var alert4 = stream
|from()
.database('telegraf')
.retentionPolicy('autogen')
.measurement('cpu')
.where(lambda: "cpu" == 'cpu_total' AND "host" == 'acc-0eabc309-eu-west-1-data-3' OR "host" == 'prod')
.groupBy('host', 'cluster_id')
|window()
.period(10m)
.every(30s)
.align()
|mean('usage_user')
.as('value')
|alert()
.id('name:{{.Group}}')
.message('message')
.details('{{ json . }}')
.crit(lambda: "value" > 90)
.history(21)
.levelTag('level')
.messageField('message')
.durationField('duration')
.idTag('alertID')
.stateChangesOnly()
.email()
.victorOps()
.slack()
alert4
|httpOut('output')
alert4
|influxDBOut()
.database('chronograf')
.retentionPolicy('autogen')
.measurement('alerts')
.buffer(1000)
.flushInterval(10s)
.create()
.tag('alertName', 'name')
.tag('triggerType', 'threshold')
`
octets, err := MarshalTICK(script)
if err != nil {
t.Fatalf("MarshalTICK unexpected error %v", err)
}
got, err := UnmarshalTICK(octets)
if err != nil {
t.Fatalf("UnmarshalTICK unexpected error %v", err)
}
if got != want {
fmt.Println(got)
diff := diffmatchpatch.New()
delta := diff.DiffMain(want, got, true)
t.Errorf("%s", diff.DiffPrettyText(delta))
}
}
func TestPipelineJSONDeadman(t *testing.T) {
script := `var db = 'telegraf'
var rp = 'autogen'
var measurement = 'cpu'
var groupBy = ['host', 'cluster_id']
var whereFilter = lambda: ("cpu" == 'cpu_total') AND ("host" == 'acc-0eabc309-eu-west-1-data-3' OR "host" == 'prod')
var period = 10m
var name = 'name'
var idVar = name + ':{{.Group}}'
var message = 'message'
var idTag = 'alertID'
var levelTag = 'level'
var messageField = 'message'
var durationField = 'duration'
var outputDB = 'chronograf'
var outputRP = 'autogen'
var outputMeasurement = 'alerts'
var triggerType = 'deadman'
var threshold = 0.0
var data = stream
|from()
.database(db)
.retentionPolicy(rp)
.measurement(measurement)
.groupBy(groupBy)
.where(whereFilter)
var trigger = data
|deadman(threshold, period)
.stateChangesOnly()
.message(message)
.id(idVar)
.idTag(idTag)
.levelTag(levelTag)
.messageField(messageField)
.durationField(durationField)
.slack()
.victorOps()
.email()
trigger
|eval(lambda: "emitted")
.as('value')
.keep('value', messageField, durationField)
|influxDBOut()
.create()
.database(outputDB)
.retentionPolicy(outputRP)
.measurement(outputMeasurement)
.tag('alertName', name)
.tag('triggerType', triggerType)
trigger
|httpOut('output')
`
want := `var from1 = stream
|from()
.database('telegraf')
.retentionPolicy('autogen')
.measurement('cpu')
.where(lambda: "cpu" == 'cpu_total' AND "host" == 'acc-0eabc309-eu-west-1-data-3' OR "host" == 'prod')
.groupBy('host', 'cluster_id')
var alert5 = from1
|stats(10m)
.align()
|derivative('emitted')
.as('emitted')
.unit(10m)
.nonNegative()
|alert()
.id('name:{{.Group}}')
.message('message')
.details('{{ json . }}')
.crit(lambda: "emitted" <= 0.0)
.history(21)
.levelTag('level')
.messageField('message')
.durationField('duration')
.idTag('alertID')
.stateChangesOnly()
.email()
.victorOps()
.slack()
alert5
|httpOut('output')
alert5
|eval(lambda: "emitted")
.as('value')
.tags()
.keep('value', 'message', 'duration')
|influxDBOut()
.database('chronograf')
.retentionPolicy('autogen')
.measurement('alerts')
.buffer(1000)
.flushInterval(10s)
.create()
.tag('alertName', 'name')
.tag('triggerType', 'deadman')
`
octets, err := MarshalTICK(script)
if err != nil {
t.Fatalf("MarshalTICK unexpected error %v", err)
}
got, err := UnmarshalTICK(octets)
if err != nil {
t.Fatalf("UnmarshalTICK unexpected error %v", err)
}
if got != want {
fmt.Println(got)
diff := diffmatchpatch.New()
delta := diff.DiffMain(want, got, true)
t.Errorf("%s", diff.DiffPrettyText(delta))
}
}

View File

@ -3,6 +3,7 @@ package kapacitor
import (
"bytes"
"fmt"
"strings"
"time"
"github.com/influxdata/chronograf"
@ -33,10 +34,19 @@ func formatTick(tickscript string) (chronograf.TICKScript, error) {
}
func validateTick(script chronograf.TICKScript) error {
_, err := newPipeline(script)
return err
}
func newPipeline(script chronograf.TICKScript) (*pipeline.Pipeline, error) {
edge := pipeline.StreamEdge
if strings.Contains(string(script), "batch") {
edge = pipeline.BatchEdge
}
scope := stateful.NewScope()
predefinedVars := map[string]tick.Var{}
_, err := pipeline.CreatePipeline(string(script), pipeline.StreamEdge, scope, &deadman{}, predefinedVars)
return err
return pipeline.CreatePipeline(string(script), edge, scope, &deadman{}, predefinedVars)
}
// deadman is an empty implementation of a kapacitor DeadmanService to allow CreatePipeline

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"net/http"
"time"
"golang.org/x/net/context"
@ -21,8 +22,8 @@ type QueryRequest struct {
// QueriesRequest converts all queries to queryConfigs with the help
// of the template variables
type QueriesRequest struct {
Queries []QueryRequest `json:"queries"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
Queries []QueryRequest `json:"queries"`
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueryResponse is the return result of a QueryRequest including
@ -33,7 +34,7 @@ type QueryResponse struct {
QueryConfig chronograf.QueryConfig `json:"queryConfig"`
QueryAST *queries.SelectStatement `json:"queryAST,omitempty"`
QueryTemplated *string `json:"queryTemplated,omitempty"`
TemplateVars chronograf.TemplateVars `json:"tempVars,omitempty"`
TemplateVars []chronograf.TemplateVar `json:"tempVars,omitempty"`
}
// QueriesResponse is the response for a QueriesRequest
@ -72,7 +73,12 @@ func (s *Service) Queries(w http.ResponseWriter, r *http.Request) {
Query: q.Query,
}
query := influx.TemplateReplace(q.Query, req.TemplateVars)
query, err := influx.TemplateReplace(q.Query, req.TemplateVars, time.Now())
if err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
qc := ToQueryConfig(query)
if err := s.DefaultRP(ctx, &qc, &src); err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)

View File

@ -98,7 +98,7 @@ func TestService_Queries(t *testing.T) {
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > now() - 1m",
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY :interval:",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
],
@ -153,13 +153,20 @@ func TestService_Queries(t *testing.T) {
"id": "interval",
"type": "constant",
"tempVar": ":interval:",
"resolution": 1000,
"reportingInterval": 10000000000,
"values": []
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
}
]
}
]
}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"_internal","measurement":"httpd","retentionPolicy":"monitor","fields":[{"value":"pingReq","type":"field","alias":""}],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","range":{"upper":"","lower":"now() - 1m"}},"queryAST":{"condition":{"expr":"binary","op":"\u003e","lhs":{"expr":"reference","val":"time"},"rhs":{"expr":"binary","op":"-","lhs":{"expr":"call","name":"now"},"rhs":{"expr":"literal","val":"1m","type":"duration"}}},"fields":[{"column":{"expr":"reference","val":"pingReq"}}],"sources":[{"database":"_internal","retentionPolicy":"monitor","name":"httpd","type":"measurement"}]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 1m","tempVars":[{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":interval:","duration":60000000000,"resolution":1000,"reportingInterval":10000000000}]}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","range":null},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"1000","type":"resolution","selected":false},{"value":"3","type":"pointsPerPixel","selected":false}]}]}]}
`,
},
}

View File

@ -16,8 +16,8 @@ func TestValidTemplateRequest(t *testing.T) {
name: "Valid Template",
template: &chronograf.Template{
Type: "fieldKeys",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@ -30,8 +30,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "Unknown Type",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "fieldKey",
},
@ -44,8 +44,8 @@ func TestValidTemplateRequest(t *testing.T) {
wantErr: true,
template: &chronograf.Template{
Type: "csv",
BasicTemplateVar: chronograf.BasicTemplateVar{
Values: []chronograf.BasicTemplateValue{
TemplateVar: chronograf.TemplateVar{
Values: []chronograf.TemplateValue{
{
Type: "unknown value",
},

View File

@ -263,14 +263,23 @@ class DashboardPage extends Component {
],
}
// this controls the auto group by behavior
const interval = {
id: 'interval',
type: 'constant',
type: 'autoGroupBy',
tempVar: ':interval:',
resolution: 1000,
reportingInterval: 10000000000,
values: [],
label: 'automatically determine the best group by time',
values: [
{
value: '1000', // pixels
type: 'resolution',
selected: true,
},
{
value: '3',
type: 'pointsPerPixel',
selected: true,
},
],
}
let templatesIncludingDashTime

View File

@ -81,20 +81,35 @@ const AutoRefresh = ComposedComponent => {
const templatesWithResolution = templates.map(temp => {
if (temp.tempVar === ':interval:') {
if (resolution) {
return {...temp, resolution}
return {
...temp,
values: temp.values.map(
v => (temp.type === 'resolution' ? {...v, resolution} : v)
),
}
}
return {
...temp,
values: [
...temp.values,
{value: '1000', type: 'resolution', selected: true},
],
}
return {...temp, resolution: 1000}
}
return {...temp}
return temp
})
const tempVars = removeUnselectedTemplateValues(templatesWithResolution)
return fetchTimeSeriesAsync(
{
source: host,
db: database,
rp,
query,
tempVars: removeUnselectedTemplateValues(templatesWithResolution),
tempVars,
resolution,
},
editQueryStatus

View File

@ -138,7 +138,7 @@ $graph-gutter: 16px;
font-size: 20px;
font-weight: 400;
margin: 0;
text-align: left;
text-align: center;
color: $g8-storm;
white-space: pre-wrap;
}