Merge pull request #3120 from influxdata/change-interval-to-duration

:interval: now represents a raw influxql duration
pull/3104/head^2
lukevmorris 2018-04-04 09:39:49 -07:00 committed by GitHub
commit 465f80944b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 365 additions and 235 deletions

View File

@ -74,7 +74,6 @@
1. [#2837](https://github.com/influxdata/chronograf/pull/2837): Prevent execution of queries in cells that are not in view on the dashboard page
1. [#2829](https://github.com/influxdata/chronograf/pull/2829): Add an optional persistent legend which can toggle series visibility to dashboard cells
1. [#2846](https://github.com/influxdata/chronograf/pull/2846): Allow user to annotate graphs via UI or API
### UI Improvements
1. [#2848](https://github.com/influxdata/chronograf/pull/2848): Add ability to set a prefix and suffix on Single Stat and Gauge cell types

92
bolt/base.go Normal file
View File

@ -0,0 +1,92 @@
package bolt
import (
"time"
"github.com/boltdb/bolt"
)
// SchemaVersionBucket stores ids of completed migrations
var SchemaVersionBucket = []byte("SchemaVersions")
// IsMigrationComplete checks for the presence of a particular migration id
func IsMigrationComplete(db *bolt.DB, id string) (bool, error) {
complete := false
if err := db.View(func(tx *bolt.Tx) error {
migration := tx.Bucket(SchemaVersionBucket).Get([]byte(id))
if migration != nil {
complete = true
}
return nil
}); err != nil {
return true, err
}
return complete, nil
}
// MarkMigrationAsComplete adds the migration id to the schema bucket
func MarkMigrationAsComplete(db *bolt.DB, id string) error {
if err := db.Update(func(tx *bolt.Tx) error {
now := time.Now().UTC().Format(time.RFC3339)
return tx.Bucket(SchemaVersionBucket).Put([]byte(id), []byte(now))
}); err != nil {
return err
}
return nil
}
// Migration defines a database state/schema transition
// ID: After the migration is run, this id is stored in the database.
// We don't want to run a state transition twice
// Up: The forward-transition function. After a version upgrade, a number
// of these will run on database startup in order to bring a user's
// schema in line with struct definitions in the new version.
// Down: The backward-transition function. We don't expect these to be
// run on a user's database -- if the user needs to rollback
// to a previous version, it will be easier for them to replace
// their current database with one of their backups. The primary
// purpose of a Down() function is to help contributors move across
// development branches that have different schema definitions.
type Migration struct {
ID string
Up func(db *bolt.DB) error
Down func(db *bolt.DB) error
}
// Migrate runs one migration's Up() function, if it has not already been run
func (m Migration) Migrate(client *Client) error {
complete, err := IsMigrationComplete(client.db, m.ID)
if err != nil {
return err
}
if complete {
return nil
}
client.logger.Info("Running migration ", m.ID, "")
if err = m.Up(client.db); err != nil {
return err
}
return MarkMigrationAsComplete(client.db, m.ID)
}
// MigrateAll iterates through all known migrations and runs them in order
func MigrateAll(client *Client) error {
for _, m := range migrations {
err := m.Migrate(client)
if err != nil {
return err
}
}
return nil
}
var migrations = []Migration{
changeIntervalToDuration,
}

View File

@ -0,0 +1,127 @@
package bolt
import (
"log"
"strings"
"github.com/boltdb/bolt"
"github.com/gogo/protobuf/proto"
)
// changeIntervalToDuration
// Before, we supported queries that included `GROUP BY :interval:`
// After, we only support queries with `GROUP BY time(:interval:)`
// thereby allowing non_negative_derivative(_____, :interval)
var changeIntervalToDuration = Migration{
ID: "59b0cda4fc7909ff84ee5c4f9cb4b655b6a26620",
Up: up,
Down: down,
}
func updateDashboard(board *Dashboard) {
for _, cell := range board.Cells {
for _, query := range cell.Queries {
query.Command = strings.Replace(query.Command, ":interval:", "time(:interval:)", -1)
}
}
}
var up = func(db *bolt.DB) error {
// For each dashboard
err := db.Update(func(tx *bolt.Tx) error {
bucket := tx.Bucket(dashboardBucket)
err := bucket.ForEach(func(id, data []byte) error {
board := &Dashboard{}
err := proto.Unmarshal(data, board)
if err != nil {
log.Fatal("unmarshaling error: ", err)
}
// Migrate the dashboard
updateDashboard(board)
data, err = proto.Marshal(board)
if err != nil {
log.Fatal("marshaling error: ", err)
}
err = bucket.Put(id, data)
if err != nil {
log.Fatal("error updating dashboard: ", err)
}
return nil
})
if err != nil {
log.Fatal("error updating dashboards: ", err)
}
return nil
})
if err != nil {
return err
}
return nil
}
var down = func(db *bolt.DB) error {
return nil
}
/*
Import protobuf types and bucket names that are pertinent to this migration.
This isolates the migration from the codebase, and prevents a future change
to a type definition from invalidating the migration functions.
*/
var dashboardBucket = []byte("Dashoard")
type Dashboard struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
Cells []*DashboardCell `protobuf:"bytes,3,rep,name=cells" json:"cells,omitempty"`
Templates string `protobuf:"bytes,4,rep,name=templates" json:"templates,omitempty"`
Organization string `protobuf:"bytes,5,opt,name=Organization,proto3" json:"Organization,omitempty"`
}
func (*Dashboard) ProtoMessage() {}
func (m *Dashboard) Reset() { *m = Dashboard{} }
func (m *Dashboard) String() string { return proto.CompactTextString(m) }
type DashboardCell struct {
X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"`
Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"`
W int32 `protobuf:"varint,3,opt,name=w,proto3" json:"w,omitempty"`
H int32 `protobuf:"varint,4,opt,name=h,proto3" json:"h,omitempty"`
Queries []*Query `protobuf:"bytes,5,rep,name=queries" json:"queries,omitempty"`
Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"`
Type string `protobuf:"bytes,7,opt,name=type,proto3" json:"type,omitempty"`
ID string `protobuf:"bytes,8,opt,name=ID,proto3" json:"ID,omitempty"`
Axes string `protobuf:"bytes,9,rep,name=axes" json:"axes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value"`
Colors string `protobuf:"bytes,10,rep,name=colors" json:"colors,omitempty"`
Legend string `protobuf:"bytes,11,opt,name=legend" json:"legend,omitempty"`
TableOptions string `protobuf:"bytes,12,opt,name=tableOptions" json:"tableOptions,omitempty"`
}
func (m *DashboardCell) Reset() { *m = DashboardCell{} }
func (m *DashboardCell) String() string { return proto.CompactTextString(m) }
func (*DashboardCell) ProtoMessage() {}
type Query struct {
Command string `protobuf:"bytes,1,opt,name=Command,proto3" json:"Command,omitempty"`
DB string `protobuf:"bytes,2,opt,name=DB,proto3" json:"DB,omitempty"`
RP string `protobuf:"bytes,3,opt,name=RP,proto3" json:"RP,omitempty"`
GroupBys string `protobuf:"bytes,4,rep,name=GroupBys" json:"GroupBys,omitempty"`
Wheres string `protobuf:"bytes,5,rep,name=Wheres" json:"Wheres,omitempty"`
Label string `protobuf:"bytes,6,opt,name=Label,proto3" json:"Label,omitempty"`
Range string `protobuf:"bytes,7,opt,name=Range" json:"Range,omitempty"`
Source string `protobuf:"bytes,8,opt,name=Source,proto3" json:"Source,omitempty"`
Shifts string `protobuf:"bytes,9,rep,name=Shifts" json:"Shifts,omitempty"`
}
func (m *Query) Reset() { *m = Query{} }
func (m *Query) String() string { return proto.CompactTextString(m) }
func (*Query) ProtoMessage() {}

View File

@ -121,6 +121,10 @@ func (c *Client) Open(ctx context.Context, logger chronograf.Logger, build chron
// initialize creates Buckets that are missing
func (c *Client) initialize(ctx context.Context) error {
if err := c.db.Update(func(tx *bolt.Tx) error {
// Always create SchemaVersions bucket.
if _, err := tx.CreateBucketIfNotExists(SchemaVersionBucket); err != nil {
return err
}
// Always create Organizations bucket.
if _, err := tx.CreateBucketIfNotExists(OrganizationsBucket); err != nil {
return err
@ -193,6 +197,8 @@ func (c *Client) migrate(ctx context.Context, build chronograf.BuildInfo) error
if err := c.MappingsStore.Migrate(ctx); err != nil {
return err
}
MigrateAll(c)
}
return nil
}

View File

@ -63,8 +63,9 @@ func ParseTime(influxQL string, now time.Time) (time.Duration, error) {
func Convert(influxQL string) (chronograf.QueryConfig, error) {
itsDashboardTime := false
intervalTime := false
if strings.Contains(influxQL, ":interval:") {
influxQL = strings.Replace(influxQL, ":interval:", "time(1234s)", 1)
influxQL = strings.Replace(influxQL, ":interval:", "8675309ns", -1)
intervalTime = true
}
@ -83,7 +84,7 @@ func Convert(influxQL string) (chronograf.QueryConfig, error) {
}
if intervalTime {
influxQL = strings.Replace(influxQL, "time(1234s)", ":interval:", 1)
influxQL = strings.Replace(influxQL, "8675309ns", ":interval:", -1)
}
raw := chronograf.QueryConfig{

View File

@ -12,6 +12,10 @@ import (
// SortTemplates the templates by size, then type, then value.
func SortTemplates(ts []chronograf.TemplateVar) []chronograf.TemplateVar {
sort.Slice(ts, func(i, j int) bool {
if ts[i].Var == ":interval:" {
return false
}
if len(ts[i].Values) != len(ts[j].Values) {
return len(ts[i].Values) < len(ts[j].Values)
}
@ -59,16 +63,8 @@ func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (stri
tv[t.Values[i].Type] = t.Values[i].Value
}
if res, ok := tv["resolution"]; ok {
resolution, err := strconv.ParseInt(res, 0, 64)
if err != nil {
return "", err
}
ppp, ok := tv["pointsPerPixel"]
if !ok {
ppp = "3"
}
pixelsPerPoint, err := strconv.ParseInt(ppp, 0, 64)
if pts, ok := tv["points"]; ok {
points, err := strconv.ParseInt(pts, 0, 64)
if err != nil {
return "", err
}
@ -77,28 +73,27 @@ func RenderTemplate(query string, t chronograf.TemplateVar, now time.Time) (stri
if err != nil {
return "", err
}
interval := AutoGroupBy(resolution, pixelsPerPoint, dur)
interval := AutoInterval(points, dur)
return strings.Replace(query, t.Var, interval, -1), nil
}
return query, nil
}
// AutoGroupBy generates the time to group by in order to decimate the number of
// points returned in a query
func AutoGroupBy(resolution, pixelsPerPoint int64, duration time.Duration) string {
func AutoInterval(points int64, duration time.Duration) string {
// The function is: ((total_seconds * millisecond_converstion) / group_by) = pixels / 3
// Number of points given the pixels
pixels := float64(resolution) / float64(pixelsPerPoint)
pixels := float64(points)
msPerPixel := float64(duration/time.Millisecond) / pixels
secPerPixel := float64(duration/time.Second) / pixels
if secPerPixel < 1.0 {
if msPerPixel < 1.0 {
msPerPixel = 1.0
}
return "time(" + strconv.FormatInt(int64(msPerPixel), 10) + "ms)"
return strconv.FormatInt(int64(msPerPixel), 10) + "ms"
}
// If groupby is more than 1 second round to the second
return "time(" + strconv.FormatInt(int64(secPerPixel), 10) + "s)"
return strconv.FormatInt(int64(secPerPixel), 10) + "s"
}
// TemplateReplace replaces templates with values within the query string

View File

@ -126,59 +126,79 @@ func TestTemplateReplace(t *testing.T) {
want: `SELECT :field: FROM "cpu"`,
},
{
name: "auto group by",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by :interval:`,
name: "auto interval",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Value: "333",
Type: "points",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46655s)`,
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46702s)`,
},
{
name: "auto interval",
query: `SELECT derivative(mean(usage_idle),:interval:) from "cpu" where time > now() - 4320h group by time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "333",
Type: "points",
},
},
},
},
want: `SELECT derivative(mean(usage_idle),46702s) from "cpu" where time > now() - 4320h group by time(46702s)`,
},
{
name: "auto group by",
query: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "333",
Type: "points",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" where time > now() - 4320h group by time(46702s)`,
},
{
name: "auto group by without duration",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by :interval:`,
query: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Value: "333",
Type: "points",
},
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46702s)`,
},
{
name: "auto group by with :dashboardTime:",
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by :interval:`,
query: `SELECT mean(usage_idle) from "cpu" WHERE time > :dashboardTime: group by time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Value: "333",
Type: "points",
},
},
},
@ -192,22 +212,18 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46655s)`,
want: `SELECT mean(usage_idle) from "cpu" WHERE time > now() - 4320h group by time(46702s)`,
},
{
name: "auto group by failing condition",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "115",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Value: "38",
Type: "points",
},
},
},
@ -222,27 +238,23 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
want: `SELECT mean(usage_idle) FROM "cpu" WHERE time > now() - 1h GROUP BY time(93s)`,
want: `SELECT mean(usage_idle) FROM "cpu" WHERE time > now() - 1h GROUP BY time(94s)`,
},
{
name: "no template variables specified",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
want: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY time(:interval:)`,
want: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY time(:interval:)`,
},
{
name: "auto group by failing condition",
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY :interval:`,
query: `SELECT mean(usage_idle) FROM "cpu" WHERE time > :dashboardTime: GROUP BY time(:interval:)`,
vars: []chronograf.TemplateVar{
{
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "115",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Value: "38",
Type: "points",
},
},
},
@ -257,7 +269,7 @@ func TestTemplateReplace(t *testing.T) {
},
},
},
want: `SELECT mean(usage_idle) FROM "cpu" WHERE time > now() - 1h GROUP BY time(93s)`,
want: `SELECT mean(usage_idle) FROM "cpu" WHERE time > now() - 1h GROUP BY time(94s)`,
},
{
name: "query with no template variables contained should return query",
@ -268,11 +280,7 @@ func TestTemplateReplace(t *testing.T) {
Values: []chronograf.TemplateValue{
{
Value: "115",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Type: "points",
},
},
},
@ -313,12 +321,8 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
"tempVar": ":interval:",
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
"value": "333",
"type": "points"
},
{
"value": "10",
@ -343,12 +347,8 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
Var: ":interval:",
Values: []chronograf.TemplateValue{
{
Value: "1000",
Type: "resolution",
},
{
Value: "3",
Type: "pointsPerPixel",
Value: "333",
Type: "points",
},
{
Value: "10",
@ -379,46 +379,6 @@ func Test_TemplateVarsUnmarshalling(t *testing.T) {
}
}
func TestAutoGroupBy(t *testing.T) {
tests := []struct {
name string
resolution int64
pixelsPerPoint int64
duration time.Duration
want string
}{
{
name: "String() calculates the GROUP BY interval",
resolution: 700,
pixelsPerPoint: 3,
duration: 24 * time.Hour,
want: "time(370s)",
},
{
name: "String() milliseconds if less than one second intervals",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Hour,
want: "time(107ms)",
},
{
name: "String() milliseconds if less than one millisecond",
resolution: 100000,
pixelsPerPoint: 3,
duration: time.Second,
want: "time(1ms)",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := AutoGroupBy(tt.resolution, tt.pixelsPerPoint, tt.duration)
if got != tt.want {
t.Errorf("TestAutoGroupBy %s =\n%s\nwant\n%s", tt.name, got, tt.want)
}
})
}
}
func Test_RenderTemplate(t *testing.T) {
gbvTests := []struct {
name string
@ -428,38 +388,38 @@ func Test_RenderTemplate(t *testing.T) {
}{
{
name: "relative time only lower bound with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY :interval:",
resolution: 1000,
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(:interval:)",
resolution: 333,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d GROUP BY time(259s)",
},
{
name: "relative time offset by week",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY :interval:",
resolution: 1000,
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(:interval:)",
resolution: 333,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d - 7d AND time < now() - 7d GROUP BY time(259s)",
},
{
name: "relative time with relative upper bound with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(179ms)",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(:interval:)",
resolution: 333,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 3m AND time < now() - 2m GROUP BY time(180ms)",
},
{
name: "relative time with relative lower bound and now upper with one day of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY :interval:",
resolution: 1000,
query: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(:interval:)",
resolution: 333,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > now() - 1d AND time < now() GROUP BY time(259s)",
},
{
name: "absolute time with one minute of duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY :interval:",
resolution: 1000,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(179ms)",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(:interval:)",
resolution: 333,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '1985-10-25T00:01:00Z' and time < '1985-10-25T00:02:00Z' GROUP BY time(180ms)",
},
{
name: "absolute time with nano seconds and zero duration",
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY :interval:",
resolution: 1000,
query: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(:interval:)",
resolution: 333,
want: "SELECT mean(usage_idle) FROM cpu WHERE time > '2017-07-24T15:33:42.994Z' and time < '2017-07-24T15:33:42.994Z' GROUP BY time(1ms)",
},
{
@ -480,7 +440,7 @@ func Test_RenderTemplate(t *testing.T) {
Values: []chronograf.TemplateValue{
{
Value: fmt.Sprintf("%d", tt.resolution),
Type: "resolution",
Type: "points",
},
},
}

View File

@ -463,7 +463,7 @@ func TestServer(t *testing.T) {
"name": "Untitled Cell",
"queries": [
{
"query": "SELECT mean(\"value\") AS \"mean_value\" FROM \"telegraf\".\"autogen\".\"cpg\" WHERE time > :dashboardTime: GROUP BY :interval: FILL(null)",
"query": "SELECT mean(\"value\") AS \"mean_value\" FROM \"telegraf\".\"autogen\".\"cpg\" WHERE time > :dashboardTime: GROUP BY time(:interval:) FILL(null)",
"queryConfig": {
"database": "telegraf",
"measurement": "cpg",
@ -718,7 +718,7 @@ func TestServer(t *testing.T) {
"name": "Untitled Cell",
"queries": [
{
"query": "SELECT mean(\"value\") AS \"mean_value\" FROM \"telegraf\".\"autogen\".\"cpg\" WHERE time > :dashboardTime: GROUP BY :interval: FILL(null)",
"query": "SELECT mean(\"value\") AS \"mean_value\" FROM \"telegraf\".\"autogen\".\"cpg\" WHERE time > :dashboardTime: GROUP BY time(:interval:) FILL(null)",
"queryConfig": {
"database": "telegraf",
"measurement": "cpg",

View File

@ -10,7 +10,7 @@
"name": "Untitled Cell",
"queries": [
{
"query": "SELECT mean(\"value\") AS \"mean_value\" FROM \"telegraf\".\"autogen\".\"cpg\" WHERE time \u003e :dashboardTime: GROUP BY :interval: FILL(null)",
"query": "SELECT mean(\"value\") AS \"mean_value\" FROM \"telegraf\".\"autogen\".\"cpg\" WHERE time \u003e :dashboardTime: GROUP BY time(:interval:) FILL(null)",
"queryConfig": {
"id": "b20baa61-bacb-4a17-b27d-b904a0d18114",
"database": "telegraf",

View File

@ -98,7 +98,7 @@ func TestService_Queries(t *testing.T) {
r: httptest.NewRequest("POST", "/queries", bytes.NewReader([]byte(`{
"queries": [
{
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY :interval:",
"query": "SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time > :dashboardTime: AND time < :upperDashboardTime: GROUP BY time(:interval:)",
"id": "82b60d37-251e-4afe-ac93-ca20a3642b11"
}
],
@ -155,18 +155,14 @@ func TestService_Queries(t *testing.T) {
"tempVar": ":interval:",
"values": [
{
"value": "1000",
"type": "resolution"
},
{
"value": "3",
"type": "pointsPerPixel"
"value": "333",
"type": "points"
}
]
}
]
}`))),
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY :interval:","range":null,"shifts":[]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"1000","type":"resolution","selected":false},{"value":"3","type":"pointsPerPixel","selected":false}]}]}]}
want: `{"queries":[{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","query":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY time(:interval:)","queryConfig":{"id":"82b60d37-251e-4afe-ac93-ca20a3642b11","database":"","measurement":"","retentionPolicy":"","fields":[],"tags":{},"groupBy":{"time":"","tags":[]},"areTagsAccepted":false,"rawText":"SELECT \"pingReq\" FROM :dbs:.\"monitor\".\"httpd\" WHERE time \u003e :dashboardTime: AND time \u003c :upperDashboardTime: GROUP BY time(:interval:)","range":null,"shifts":[]},"queryTemplated":"SELECT \"pingReq\" FROM \"_internal\".\"monitor\".\"httpd\" WHERE time \u003e now() - 15m AND time \u003c now() GROUP BY time(2s)","tempVars":[{"tempVar":":upperDashboardTime:","values":[{"value":"now()","type":"constant","selected":true}]},{"tempVar":":dashboardTime:","values":[{"value":"now() - 15m","type":"constant","selected":true}]},{"tempVar":":dbs:","values":[{"value":"_internal","type":"database","selected":true}]},{"tempVar":":interval:","values":[{"value":"333","type":"points","selected":false}]}]}]}
`,
},
}

View File

@ -35,7 +35,7 @@ import {
templateControlBarVisibilityToggled as templateControlBarVisibilityToggledAction,
} from 'shared/actions/app'
import {presentationButtonDispatcher} from 'shared/dispatchers'
import {DASHBOARD_LAYOUT_ROW_HEIGHT} from 'shared/constants'
import {interval, DASHBOARD_LAYOUT_ROW_HEIGHT} from 'shared/constants'
import {notifyDashboardNotFound} from 'shared/copy/notifications'
import {colorsStringSchema, colorsNumberSchema} from 'shared/schemas'
@ -334,25 +334,6 @@ class DashboardPage extends Component {
],
}
const interval = {
id: 'interval',
type: 'autoGroupBy',
tempVar: ':interval:',
label: 'automatically determine the best group by time',
values: [
{
value: '1000', // pixels
type: 'resolution',
selected: true,
},
{
value: '3',
type: 'pointsPerPixel',
selected: true,
},
],
}
let templatesIncludingDashTime
if (dashboard) {
templatesIncludingDashTime = [

View File

@ -8,7 +8,7 @@ import {Table, Column, Cell} from 'fixed-data-table'
import Dropdown from 'shared/components/Dropdown'
import CustomCell from 'src/data_explorer/components/CustomCell'
import TabItem from 'src/data_explorer/components/TableTabItem'
import {TEMPLATES} from 'src/data_explorer/constants'
import {TEMPLATES} from 'src/shared/constants'
import {fetchTimeSeriesAsync} from 'shared/actions/timeSeries'

View File

@ -6,7 +6,7 @@ import _ from 'lodash'
import {fetchTimeSeriesAsync} from 'shared/actions/timeSeries'
import {resultsToCSV} from 'src/shared/parsing/resultsToCSV.js'
import download from 'src/external/download.js'
import {TEMPLATES} from 'src/data_explorer/constants'
import {TEMPLATES} from 'src/shared/constants'
const getCSV = (query, errorThrown) => async () => {
try {

View File

@ -81,16 +81,3 @@ export const QUERY_TEMPLATES = [
{text: 'Show Stats', query: 'SHOW STATS'},
{text: 'Show Diagnostics', query: 'SHOW DIAGNOSTICS'},
]
const interval = {
id: 'interval',
type: 'autoGroupBy',
tempVar: ':interval:',
label: 'automatically determine the best group by time',
values: [
{value: '1000', type: 'resolution', selected: true},
{value: '3', type: 'pointsPerPixel', selected: true},
],
} // pixels
export const TEMPLATES = [interval]

View File

@ -7,16 +7,16 @@ import queryString from 'query-string'
import _ from 'lodash'
import QueryMaker from '../components/QueryMaker'
import Visualization from '../components/Visualization'
import QueryMaker from 'src/data_explorer/components/QueryMaker'
import Visualization from 'src/data_explorer/components/Visualization'
import WriteDataForm from 'src/data_explorer/components/WriteDataForm'
import Header from '../containers/Header'
import ResizeContainer from 'shared/components/ResizeContainer'
import OverlayTechnologies from 'shared/components/OverlayTechnologies'
import Header from 'src/data_explorer/containers/Header'
import ResizeContainer from 'src/shared/components/ResizeContainer'
import OverlayTechnologies from 'src/shared/components/OverlayTechnologies'
import ManualRefresh from 'src/shared/components/ManualRefresh'
import {VIS_VIEWS, AUTO_GROUP_BY} from 'shared/constants'
import {MINIMUM_HEIGHTS, INITIAL_HEIGHTS, TEMPLATES} from '../constants'
import {VIS_VIEWS, AUTO_GROUP_BY, TEMPLATES} from 'src/shared/constants'
import {MINIMUM_HEIGHTS, INITIAL_HEIGHTS} from 'src/data_explorer/constants'
import {errorThrown} from 'shared/actions/errors'
import {setAutoRefresh} from 'shared/actions/app'
import * as dataExplorerActionCreators from 'src/data_explorer/actions/view'

View File

@ -4,6 +4,7 @@ import _ from 'lodash'
import {fetchTimeSeriesAsync} from 'shared/actions/timeSeries'
import {removeUnselectedTemplateValues} from 'src/dashboards/constants'
import {intervalValuesPoints} from 'src/shared/constants'
const AutoRefresh = ComposedComponent => {
class wrapper extends Component {
@ -97,31 +98,30 @@ const AutoRefresh = ComposedComponent => {
const timeSeriesPromises = queries.map(query => {
const {host, database, rp} = query
const templatesWithResolution = templates.map(temp => {
const templatesWithIntervalVals = templates.map(temp => {
if (temp.tempVar === ':interval:') {
if (resolution) {
// resize event
return {
...temp,
values: temp.values.map(
v => (temp.type === 'resolution' ? {...v, resolution} : v)
),
values: temp.values.map(v => ({
...v,
value: `${_.toInteger(Number(resolution) / 3)}`,
})),
}
}
return {
...temp,
values: [
...temp.values,
{value: '1000', type: 'resolution', selected: true},
],
values: intervalValuesPoints,
}
}
return temp
})
const tempVars = removeUnselectedTemplateValues(templatesWithResolution)
const tempVars = removeUnselectedTemplateValues(
templatesWithIntervalVals
)
return fetchTimeSeriesAsync(
{
source: host,

View File

@ -427,6 +427,20 @@ export const DEFAULT_SOURCE = {
metaUrl: '',
}
export const intervalValuesPoints = [
{value: '333', type: 'points', selected: true},
]
export const interval = {
id: 'interval',
type: 'autoGroupBy',
tempVar: ':interval:',
label: 'automatically determine the best group by time',
values: intervalValuesPoints,
}
export const TEMPLATES = [interval]
export const IS_STATIC_LEGEND = legend =>
_.get(legend, 'type', false) === 'static'

View File

@ -157,9 +157,9 @@ function _buildGroupByTime(groupBy) {
return ''
}
return ` GROUP BY ${
groupBy.time === AUTO_GROUP_BY ? TEMP_VAR_INTERVAL : `time(${groupBy.time})`
}`
return ` GROUP BY time(${
groupBy.time === AUTO_GROUP_BY ? TEMP_VAR_INTERVAL : `${groupBy.time}`
})`
}
function _buildGroupByTags(groupBy) {

View File

@ -1854,7 +1854,7 @@ combined-stream@1.0.6, combined-stream@^1.0.5, combined-stream@~1.0.5:
dependencies:
delayed-stream "~1.0.0"
commander@2.14.x, commander@^2.13.0, commander@^2.5.0, commander@^2.9.0, commander@~2.14.1:
commander@2.14.x, commander@~2.14.1:
version "2.14.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.14.1.tgz#2235123e37af8ca3c65df45b026dbd357b01b9aa"
@ -1868,7 +1868,7 @@ commander@2.9.0:
dependencies:
graceful-readlink ">= 1.0.0"
commander@^2.12.1:
commander@^2.12.1, commander@^2.13.0, commander@^2.5.0, commander@^2.9.0:
version "2.15.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f"
@ -4552,29 +4552,13 @@ istanbul-lib-coverage@^1.1.1, istanbul-lib-coverage@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.2.tgz#4113c8ff6b7a40a1ef7350b01016331f63afde14"
istanbul-lib-coverage@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.0.tgz#f7d8f2e42b97e37fe796114cb0f9d68b5e3a4341"
istanbul-lib-hook@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.1.0.tgz#8538d970372cb3716d53e55523dd54b557a8d89b"
dependencies:
append-transform "^0.4.0"
istanbul-lib-instrument@^1.7.2:
version "1.10.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.1.tgz#724b4b6caceba8692d3f1f9d0727e279c401af7b"
dependencies:
babel-generator "^6.18.0"
babel-template "^6.16.0"
babel-traverse "^6.18.0"
babel-types "^6.18.0"
babylon "^6.18.0"
istanbul-lib-coverage "^1.2.0"
semver "^5.3.0"
istanbul-lib-instrument@^1.7.5, istanbul-lib-instrument@^1.8.0, istanbul-lib-instrument@^1.9.2:
istanbul-lib-instrument@^1.7.2, istanbul-lib-instrument@^1.7.5, istanbul-lib-instrument@^1.8.0, istanbul-lib-instrument@^1.9.2:
version "1.9.2"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.9.2.tgz#84905bf47f7e0b401d6b840da7bad67086b4aab6"
dependencies:
@ -7049,19 +7033,13 @@ react-dom@^15.0.2:
object-assign "^4.1.0"
prop-types "^15.5.10"
react-draggable@3.x:
react-draggable@3.x, "react-draggable@^2.2.6 || ^3.0.3":
version "3.0.5"
resolved "https://registry.yarnpkg.com/react-draggable/-/react-draggable-3.0.5.tgz#c031e0ed4313531f9409d6cd84c8ebcec0ddfe2d"
dependencies:
classnames "^2.2.5"
prop-types "^15.6.0"
"react-draggable@^2.2.6 || ^3.0.3":
version "2.2.6"
resolved "https://registry.yarnpkg.com/react-draggable/-/react-draggable-2.2.6.tgz#3a806e10f2da6babfea4136be6510e89b0d76901"
dependencies:
classnames "^2.2.5"
react-grid-layout@^0.16.6:
version "0.16.6"
resolved "https://registry.yarnpkg.com/react-grid-layout/-/react-grid-layout-0.16.6.tgz#9b2407a2b946c2260ebaf66f13b556e1da4efeb2"
@ -7524,18 +7502,12 @@ resolve@1.1.7:
version "1.1.7"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b"
resolve@^1.1.6, resolve@^1.1.7, resolve@^1.5.0:
resolve@^1.1.6, resolve@^1.1.7, resolve@^1.3.2, resolve@^1.5.0:
version "1.5.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.5.0.tgz#1f09acce796c9a762579f31b2c1cc4c3cddf9f36"
dependencies:
path-parse "^1.0.5"
resolve@^1.3.2:
version "1.6.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.6.0.tgz#0fbd21278b27b4004481c395349e7aba60a9ff5c"
dependencies:
path-parse "^1.0.5"
restore-cursor@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-1.0.1.tgz#34661f46886327fed2991479152252df92daa541"